code
stringlengths 130
281k
| code_dependency
stringlengths 182
306k
|
|---|---|
public class class_name {
public static TimeSeries subseries(TimeSeries series, int beginIndex, Duration duration) {
// Calculating the sub-series interval
ZonedDateTime beginInterval = series.getTick(beginIndex).getEndTime();
ZonedDateTime endInterval = beginInterval.plus(duration);
// Checking ticks belonging to the sub-series (starting at the provided index)
int subseriesNbTicks = 0;
int endIndex = series.getEndIndex();
for (int i = beginIndex; i <= endIndex; i++) {
// For each tick...
ZonedDateTime tickTime = series.getTick(i).getEndTime();
if (tickTime.isBefore(beginInterval) || !tickTime.isBefore(endInterval)) {
// Tick out of the interval
break;
}
// Tick in the interval
// --> Incrementing the number of ticks in the subseries
subseriesNbTicks++;
}
return new BaseTimeSeries(series, beginIndex, beginIndex + subseriesNbTicks - 1);
} }
|
public class class_name {
public static TimeSeries subseries(TimeSeries series, int beginIndex, Duration duration) {
// Calculating the sub-series interval
ZonedDateTime beginInterval = series.getTick(beginIndex).getEndTime();
ZonedDateTime endInterval = beginInterval.plus(duration);
// Checking ticks belonging to the sub-series (starting at the provided index)
int subseriesNbTicks = 0;
int endIndex = series.getEndIndex();
for (int i = beginIndex; i <= endIndex; i++) {
// For each tick...
ZonedDateTime tickTime = series.getTick(i).getEndTime();
if (tickTime.isBefore(beginInterval) || !tickTime.isBefore(endInterval)) {
// Tick out of the interval
break;
}
// Tick in the interval
// --> Incrementing the number of ticks in the subseries
subseriesNbTicks++; // depends on control dependency: [for], data = [none]
}
return new BaseTimeSeries(series, beginIndex, beginIndex + subseriesNbTicks - 1);
} }
|
public class class_name {
private static void processBiGram() {
//移除二元模型出现频率为1的情况
Iterator<String> iter = BIGRAM.keySet().iterator();
while(iter.hasNext()){
String key = iter.next();
if(BIGRAM.get(key) < 2){
iter.remove();
}
}
try(BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream("src/main/resources/bigram.txt"),"utf-8"))){
for(Entry<String, Integer> item : BIGRAM.entrySet()){
writer.write(item.getKey()+" "+item.getValue()+"\n");
}
}catch(Exception e){
LOGGER.info("保存bigram模型失败:", e);
}
} }
|
public class class_name {
private static void processBiGram() {
//移除二元模型出现频率为1的情况
Iterator<String> iter = BIGRAM.keySet().iterator();
while(iter.hasNext()){
String key = iter.next();
if(BIGRAM.get(key) < 2){
iter.remove(); // depends on control dependency: [if], data = [none]
}
}
try(BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream("src/main/resources/bigram.txt"),"utf-8"))){
for(Entry<String, Integer> item : BIGRAM.entrySet()){
writer.write(item.getKey()+" "+item.getValue()+"\n");
}
}catch(Exception e){
LOGGER.info("保存bigram模型失败:", e);
}
} }
|
public class class_name {
public LookupEventsRequest withLookupAttributes(LookupAttribute... lookupAttributes) {
if (this.lookupAttributes == null) {
setLookupAttributes(new com.amazonaws.internal.SdkInternalList<LookupAttribute>(lookupAttributes.length));
}
for (LookupAttribute ele : lookupAttributes) {
this.lookupAttributes.add(ele);
}
return this;
} }
|
public class class_name {
public LookupEventsRequest withLookupAttributes(LookupAttribute... lookupAttributes) {
if (this.lookupAttributes == null) {
setLookupAttributes(new com.amazonaws.internal.SdkInternalList<LookupAttribute>(lookupAttributes.length)); // depends on control dependency: [if], data = [none]
}
for (LookupAttribute ele : lookupAttributes) {
this.lookupAttributes.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} }
|
public class class_name {
protected void downgradeFlaechenChildElements(Document doc) throws JaxenException {
List nodes = XmlUtils.newXPath(
"/io:openimmo/io:anbieter/io:immobilie/io:flaechen",
doc).selectNodes(doc);
for (Object item : nodes) {
Element parentNode = (Element) item;
boolean passedAnzahlBalkone = false;
boolean passedAnzahlTerrassen = false;
double sum = 0;
List childNodes = XmlUtils.newXPath("io:anzahl_balkone", doc)
.selectNodes(parentNode);
for (Object childItem : childNodes) {
Node node = (Node) childItem;
if (!passedAnzahlBalkone) {
passedAnzahlBalkone = true;
String value = StringUtils.trimToNull(node.getTextContent());
try {
sum += (value != null) ? Double.parseDouble(value) : 0;
} catch (NumberFormatException ex) {
LOGGER.warn("Can't parse <anzahl_balkone>" + value + "</anzahl_balkone> into a numeric value!");
LOGGER.warn("> " + ex.getLocalizedMessage(), ex);
}
}
parentNode.removeChild(node);
}
childNodes = XmlUtils.newXPath("io:anzahl_terrassen", doc)
.selectNodes(parentNode);
for (Object childItem : childNodes) {
Node node = (Node) childItem;
if (!passedAnzahlTerrassen) {
passedAnzahlTerrassen = true;
String value = StringUtils.trimToNull(node.getTextContent());
try {
sum += (value != null) ? Double.parseDouble(value) : 0;
} catch (NumberFormatException ex) {
LOGGER.warn("Can't parse <anzahl_terrassen>" + value + "</anzahl_terrassen> into a numeric value!");
LOGGER.warn("> " + ex.getLocalizedMessage(), ex);
}
}
parentNode.removeChild(node);
}
if (sum > 0) {
Element node = (Element) XmlUtils.newXPath("io:anzahl_balkon_terrassen", doc)
.selectSingleNode(parentNode);
if (node == null) {
node = doc.createElementNS(StringUtils.EMPTY, "anzahl_balkon_terrassen");
node.setTextContent(String.valueOf(sum));
parentNode.appendChild(node);
} else if (StringUtils.isBlank(node.getTextContent())) {
node.setTextContent(String.valueOf(sum));
}
}
}
} }
|
public class class_name {
protected void downgradeFlaechenChildElements(Document doc) throws JaxenException {
List nodes = XmlUtils.newXPath(
"/io:openimmo/io:anbieter/io:immobilie/io:flaechen",
doc).selectNodes(doc);
for (Object item : nodes) {
Element parentNode = (Element) item;
boolean passedAnzahlBalkone = false;
boolean passedAnzahlTerrassen = false;
double sum = 0;
List childNodes = XmlUtils.newXPath("io:anzahl_balkone", doc)
.selectNodes(parentNode);
for (Object childItem : childNodes) {
Node node = (Node) childItem;
if (!passedAnzahlBalkone) {
passedAnzahlBalkone = true; // depends on control dependency: [if], data = [none]
String value = StringUtils.trimToNull(node.getTextContent());
try {
sum += (value != null) ? Double.parseDouble(value) : 0; // depends on control dependency: [try], data = [none]
} catch (NumberFormatException ex) {
LOGGER.warn("Can't parse <anzahl_balkone>" + value + "</anzahl_balkone> into a numeric value!");
LOGGER.warn("> " + ex.getLocalizedMessage(), ex);
}
}
parentNode.removeChild(node);
}
childNodes = XmlUtils.newXPath("io:anzahl_terrassen", doc)
.selectNodes(parentNode);
for (Object childItem : childNodes) {
Node node = (Node) childItem;
if (!passedAnzahlTerrassen) {
passedAnzahlTerrassen = true;
String value = StringUtils.trimToNull(node.getTextContent());
try {
sum += (value != null) ? Double.parseDouble(value) : 0;
} catch (NumberFormatException ex) {
LOGGER.warn("Can't parse <anzahl_terrassen>" + value + "</anzahl_terrassen> into a numeric value!");
LOGGER.warn("> " + ex.getLocalizedMessage(), ex);
} // depends on control dependency: [catch], data = [none]
}
parentNode.removeChild(node); // depends on control dependency: [for], data = [none]
}
if (sum > 0) {
Element node = (Element) XmlUtils.newXPath("io:anzahl_balkon_terrassen", doc)
.selectSingleNode(parentNode);
if (node == null) {
node = doc.createElementNS(StringUtils.EMPTY, "anzahl_balkon_terrassen"); // depends on control dependency: [if], data = [none]
node.setTextContent(String.valueOf(sum)); // depends on control dependency: [if], data = [none]
parentNode.appendChild(node); // depends on control dependency: [if], data = [(node]
} else if (StringUtils.isBlank(node.getTextContent())) {
node.setTextContent(String.valueOf(sum)); // depends on control dependency: [if], data = [none]
}
}
}
} }
|
public class class_name {
public double[][] getPositionsAsArray(){
double[][] posAsArr = new double[size()][3];
for(int i = 0; i < size(); i++){
if(get(i)!=null){
posAsArr[i][0] = get(i).x;
posAsArr[i][1] = get(i).y;
posAsArr[i][2] = get(i).z;
}
else{
posAsArr[i] = null;
}
}
return posAsArr;
} }
|
public class class_name {
public double[][] getPositionsAsArray(){
double[][] posAsArr = new double[size()][3];
for(int i = 0; i < size(); i++){
if(get(i)!=null){
posAsArr[i][0] = get(i).x; // depends on control dependency: [if], data = [none]
posAsArr[i][1] = get(i).y; // depends on control dependency: [if], data = [none]
posAsArr[i][2] = get(i).z; // depends on control dependency: [if], data = [none]
}
else{
posAsArr[i] = null; // depends on control dependency: [if], data = [none]
}
}
return posAsArr;
} }
|
public class class_name {
public static INDArray min(INDArray x, INDArray y, INDArray z, int... dimensions) {
if(dimensions == null) {
Preconditions.checkArgument(Arrays.equals(x.shape(),y.shape()),getFormattedShapeErrorMessageXy(x,y));
Preconditions.checkArgument(Arrays.equals(x.shape(),z.shape()),getFormattedShapeErrorMessageXResult(x,z));
return Nd4j.getExecutioner().execAndReturn(new OldMin(x,y,z,x.length()));
}
return Nd4j.getExecutioner().execAndReturn(new BroadcastMin(x,y,z,dimensions));
} }
|
public class class_name {
public static INDArray min(INDArray x, INDArray y, INDArray z, int... dimensions) {
if(dimensions == null) {
Preconditions.checkArgument(Arrays.equals(x.shape(),y.shape()),getFormattedShapeErrorMessageXy(x,y)); // depends on control dependency: [if], data = [none]
Preconditions.checkArgument(Arrays.equals(x.shape(),z.shape()),getFormattedShapeErrorMessageXResult(x,z)); // depends on control dependency: [if], data = [none]
return Nd4j.getExecutioner().execAndReturn(new OldMin(x,y,z,x.length())); // depends on control dependency: [if], data = [none]
}
return Nd4j.getExecutioner().execAndReturn(new BroadcastMin(x,y,z,dimensions));
} }
|
public class class_name {
@Override
public Set<Class<?>> resolveByAnnotation(
final Class<? extends Annotation> annotation) throws IOException {
Matcher<Class<?>> matcher = new Matcher<Class<?>>() {
public boolean matches(Class<?> clazz) {
Annotation[] annotations = clazz.getAnnotations();
for (Annotation a : annotations) {
if (a.annotationType().equals(annotation)) {
return true;
}
}
return false;
}
};
return resolve(matcher);
} }
|
public class class_name {
@Override
public Set<Class<?>> resolveByAnnotation(
final Class<? extends Annotation> annotation) throws IOException {
Matcher<Class<?>> matcher = new Matcher<Class<?>>() {
public boolean matches(Class<?> clazz) {
Annotation[] annotations = clazz.getAnnotations();
for (Annotation a : annotations) {
if (a.annotationType().equals(annotation)) {
return true; // depends on control dependency: [if], data = [none]
}
}
return false;
}
};
return resolve(matcher);
} }
|
public class class_name {
@Deprecated
public static String replaceShorthand(char c, Map<String, String> headers,
TimeZone timeZone, boolean needRounding, int unit, int roundDown,
boolean useLocalTimestamp) {
long ts = 0;
if (useLocalTimestamp) {
ts = clock.currentTimeMillis();
}
return replaceShorthand(c, headers, timeZone, needRounding, unit,
roundDown, false, ts);
} }
|
public class class_name {
@Deprecated
public static String replaceShorthand(char c, Map<String, String> headers,
TimeZone timeZone, boolean needRounding, int unit, int roundDown,
boolean useLocalTimestamp) {
long ts = 0;
if (useLocalTimestamp) {
ts = clock.currentTimeMillis(); // depends on control dependency: [if], data = [none]
}
return replaceShorthand(c, headers, timeZone, needRounding, unit,
roundDown, false, ts);
} }
|
public class class_name {
public void onGroupSelected(String id) {
navGroupId = id;
updateNavGroups();
if (onUpdateCommand != null) {
onUpdateCommand.execute();
}
} }
|
public class class_name {
public void onGroupSelected(String id) {
navGroupId = id;
updateNavGroups();
if (onUpdateCommand != null) {
onUpdateCommand.execute(); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public void marshall(DescribeGameSessionQueuesRequest describeGameSessionQueuesRequest, ProtocolMarshaller protocolMarshaller) {
if (describeGameSessionQueuesRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(describeGameSessionQueuesRequest.getNames(), NAMES_BINDING);
protocolMarshaller.marshall(describeGameSessionQueuesRequest.getLimit(), LIMIT_BINDING);
protocolMarshaller.marshall(describeGameSessionQueuesRequest.getNextToken(), NEXTTOKEN_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} }
|
public class class_name {
public void marshall(DescribeGameSessionQueuesRequest describeGameSessionQueuesRequest, ProtocolMarshaller protocolMarshaller) {
if (describeGameSessionQueuesRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(describeGameSessionQueuesRequest.getNames(), NAMES_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(describeGameSessionQueuesRequest.getLimit(), LIMIT_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(describeGameSessionQueuesRequest.getNextToken(), NEXTTOKEN_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
private void reconcile() {
// If nonContiguousMsgIds is empty then nothing to reconcile.
if (nonContiguousMsgIds.isEmpty()) {
return;
}
// This happens when a passive is started after Active has moved on and
// passive starts to see msgIDs starting from a number > 0.
// Once the sync is completed, fast forward highestContiguousMsgId.
// Post sync completion assuming platform will send all msgIds beyond highestContiguousMsgId.
if (highestContiguousMsgId == -1L && isSyncCompleted) {
Long min = nonContiguousMsgIds.last();
LOGGER.info("Setting highestContiguousMsgId to {} from -1", min);
highestContiguousMsgId = min;
nonContiguousMsgIds.removeIf(msgId -> msgId <= min);
}
for (long msgId : nonContiguousMsgIds) {
if (msgId <= highestContiguousMsgId) {
nonContiguousMsgIds.remove(msgId);
} else if (msgId > highestContiguousMsgId + 1) {
break;
} else {
// the order is important..
highestContiguousMsgId = msgId;
nonContiguousMsgIds.remove(msgId);
}
}
} }
|
public class class_name {
private void reconcile() {
// If nonContiguousMsgIds is empty then nothing to reconcile.
if (nonContiguousMsgIds.isEmpty()) {
return; // depends on control dependency: [if], data = [none]
}
// This happens when a passive is started after Active has moved on and
// passive starts to see msgIDs starting from a number > 0.
// Once the sync is completed, fast forward highestContiguousMsgId.
// Post sync completion assuming platform will send all msgIds beyond highestContiguousMsgId.
if (highestContiguousMsgId == -1L && isSyncCompleted) {
Long min = nonContiguousMsgIds.last();
LOGGER.info("Setting highestContiguousMsgId to {} from -1", min); // depends on control dependency: [if], data = [none]
highestContiguousMsgId = min; // depends on control dependency: [if], data = [none]
nonContiguousMsgIds.removeIf(msgId -> msgId <= min); // depends on control dependency: [if], data = [none]
}
for (long msgId : nonContiguousMsgIds) {
if (msgId <= highestContiguousMsgId) {
nonContiguousMsgIds.remove(msgId); // depends on control dependency: [if], data = [(msgId]
} else if (msgId > highestContiguousMsgId + 1) {
break;
} else {
// the order is important..
highestContiguousMsgId = msgId; // depends on control dependency: [if], data = [none]
nonContiguousMsgIds.remove(msgId); // depends on control dependency: [if], data = [(msgId]
}
}
} }
|
public class class_name {
@Override
protected void collapseDependencies(List<TypedDependency> list, boolean CCprocess) {
if (DEBUG) {
printListSorted("collapseDependencies: CCproc: " + CCprocess, list);
}
correctDependencies(list);
if (DEBUG) {
printListSorted("After correctDependencies:", list);
}
eraseMultiConj(list);
if (DEBUG) {
printListSorted("After collapse multi conj:", list);
}
collapse2WP(list);
if (DEBUG) {
printListSorted("After collapse2WP:", list);
}
collapseFlatMWP(list);
if (DEBUG) {
printListSorted("After collapseFlatMWP:", list);
}
collapse2WPbis(list);
if (DEBUG) {
printListSorted("After collapse2WPbis:", list);
}
collapse3WP(list);
if (DEBUG) {
printListSorted("After collapse3WP:", list);
}
collapsePrepAndPoss(list);
if (DEBUG) {
printListSorted("After PrepAndPoss:", list);
}
collapseConj(list);
if (DEBUG) {
printListSorted("After conj:", list);
}
collapseReferent(list);
if (DEBUG) {
printListSorted("After collapse referent:", list);
}
if (CCprocess) {
treatCC(list);
if (DEBUG) {
printListSorted("After treatCC:", list);
}
}
removeDep(list);
if (DEBUG) {
printListSorted("After remove dep:", list);
}
Collections.sort(list);
if (DEBUG) {
printListSorted("After all collapse:", list);
}
} }
|
public class class_name {
@Override
protected void collapseDependencies(List<TypedDependency> list, boolean CCprocess) {
if (DEBUG) {
printListSorted("collapseDependencies: CCproc: " + CCprocess, list);
// depends on control dependency: [if], data = [none]
}
correctDependencies(list);
if (DEBUG) {
printListSorted("After correctDependencies:", list);
// depends on control dependency: [if], data = [none]
}
eraseMultiConj(list);
if (DEBUG) {
printListSorted("After collapse multi conj:", list);
// depends on control dependency: [if], data = [none]
}
collapse2WP(list);
if (DEBUG) {
printListSorted("After collapse2WP:", list);
// depends on control dependency: [if], data = [none]
}
collapseFlatMWP(list);
if (DEBUG) {
printListSorted("After collapseFlatMWP:", list);
// depends on control dependency: [if], data = [none]
}
collapse2WPbis(list);
if (DEBUG) {
printListSorted("After collapse2WPbis:", list);
// depends on control dependency: [if], data = [none]
}
collapse3WP(list);
if (DEBUG) {
printListSorted("After collapse3WP:", list);
// depends on control dependency: [if], data = [none]
}
collapsePrepAndPoss(list);
if (DEBUG) {
printListSorted("After PrepAndPoss:", list);
// depends on control dependency: [if], data = [none]
}
collapseConj(list);
if (DEBUG) {
printListSorted("After conj:", list);
// depends on control dependency: [if], data = [none]
}
collapseReferent(list);
if (DEBUG) {
printListSorted("After collapse referent:", list);
// depends on control dependency: [if], data = [none]
}
if (CCprocess) {
treatCC(list);
// depends on control dependency: [if], data = [none]
if (DEBUG) {
printListSorted("After treatCC:", list);
// depends on control dependency: [if], data = [none]
}
}
removeDep(list);
if (DEBUG) {
printListSorted("After remove dep:", list);
// depends on control dependency: [if], data = [none]
}
Collections.sort(list);
if (DEBUG) {
printListSorted("After all collapse:", list);
// depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private void setDefaults() {
if (xsdInput == null) {
setXsdInput(DEFAULT_INPUT_FOLDER_PATH);
}
if (output == null) {
setOutput(DEFAULT_OUTPUT_FOLDER_PATH);
}
if (avroNamespacePrefix == null) {
setAvroNamespacePrefix(DEFAULT_AVRO_NAMESPACE_PREFIX);
}
} }
|
public class class_name {
private void setDefaults() {
if (xsdInput == null) {
setXsdInput(DEFAULT_INPUT_FOLDER_PATH); // depends on control dependency: [if], data = [none]
}
if (output == null) {
setOutput(DEFAULT_OUTPUT_FOLDER_PATH); // depends on control dependency: [if], data = [none]
}
if (avroNamespacePrefix == null) {
setAvroNamespacePrefix(DEFAULT_AVRO_NAMESPACE_PREFIX); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public String toUrl() {
StringBuffer buffer = new StringBuffer(DEFAULT_BUFFER_SIZE);
for (int i = names.size() - 1; i >= 0; i--) {
LdapRdn n = (LdapRdn) names.get(i);
buffer.append(n.encodeUrl());
if (i > 0) {
buffer.append(",");
}
}
return buffer.toString();
} }
|
public class class_name {
public String toUrl() {
StringBuffer buffer = new StringBuffer(DEFAULT_BUFFER_SIZE);
for (int i = names.size() - 1; i >= 0; i--) {
LdapRdn n = (LdapRdn) names.get(i);
buffer.append(n.encodeUrl()); // depends on control dependency: [for], data = [none]
if (i > 0) {
buffer.append(","); // depends on control dependency: [if], data = [none]
}
}
return buffer.toString();
} }
|
public class class_name {
public static <D>
ScoreAssociation<D> scoreHamming( Class<D> tupleType ) {
if( tupleType == TupleDesc_B.class ) {
return (ScoreAssociation)new ScoreAssociateHamming_B();
}
throw new IllegalArgumentException("Hamming distance not yet supported for type "+tupleType.getSimpleName());
} }
|
public class class_name {
public static <D>
ScoreAssociation<D> scoreHamming( Class<D> tupleType ) {
if( tupleType == TupleDesc_B.class ) {
return (ScoreAssociation)new ScoreAssociateHamming_B(); // depends on control dependency: [if], data = [none]
}
throw new IllegalArgumentException("Hamming distance not yet supported for type "+tupleType.getSimpleName());
} }
|
public class class_name {
public static boolean isAlphabetic(String str)
{
for (char c : str.toCharArray()) {
if (!isLetter(c)) {
return false;
}
}
return true;
} }
|
public class class_name {
public static boolean isAlphabetic(String str)
{
for (char c : str.toCharArray()) {
if (!isLetter(c)) {
return false; // depends on control dependency: [if], data = [none]
}
}
return true;
} }
|
public class class_name {
@Override protected void handleEvents(final String EVENT_TYPE) {
super.handleEvents(EVENT_TYPE);
if ("RECALC".equals(EVENT_TYPE)) {
minValue = gauge.getMinValue();
maxValue = gauge.getMaxValue();
range = gauge.getRange();
angleStep = ANGLE_RANGE / range;
redraw();
setBar(gauge.getCurrentValue());
} else if ("SECTIONS".equals(EVENT_TYPE)) {
sections = gauge.getSections();
} else if ("VISIBILITY".equals(EVENT_TYPE)) {
Helper.enableNode(valueBkgText, gauge.isValueVisible());
Helper.enableNode(valueText, gauge.isValueVisible());
Helper.enableNode(titleText, !gauge.getTitle().isEmpty());
sectionsVisible = gauge.getSectionsVisible();
thresholdVisible = gauge.isThresholdVisible();
resize();
redraw();
}
} }
|
public class class_name {
@Override protected void handleEvents(final String EVENT_TYPE) {
super.handleEvents(EVENT_TYPE);
if ("RECALC".equals(EVENT_TYPE)) {
minValue = gauge.getMinValue(); // depends on control dependency: [if], data = [none]
maxValue = gauge.getMaxValue(); // depends on control dependency: [if], data = [none]
range = gauge.getRange(); // depends on control dependency: [if], data = [none]
angleStep = ANGLE_RANGE / range; // depends on control dependency: [if], data = [none]
redraw(); // depends on control dependency: [if], data = [none]
setBar(gauge.getCurrentValue()); // depends on control dependency: [if], data = [none]
} else if ("SECTIONS".equals(EVENT_TYPE)) {
sections = gauge.getSections(); // depends on control dependency: [if], data = [none]
} else if ("VISIBILITY".equals(EVENT_TYPE)) {
Helper.enableNode(valueBkgText, gauge.isValueVisible()); // depends on control dependency: [if], data = [none]
Helper.enableNode(valueText, gauge.isValueVisible()); // depends on control dependency: [if], data = [none]
Helper.enableNode(titleText, !gauge.getTitle().isEmpty()); // depends on control dependency: [if], data = [none]
sectionsVisible = gauge.getSectionsVisible(); // depends on control dependency: [if], data = [none]
thresholdVisible = gauge.isThresholdVisible(); // depends on control dependency: [if], data = [none]
resize(); // depends on control dependency: [if], data = [none]
redraw(); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public synchronized void update(Gradient gradient, int iteration, int epoch, int batchSize, LayerWorkspaceMgr workspaceMgr) {
//First: check if gradient is standard or external...
//In a MultiLayerNetwork, the INDArray returned by .gradient() is always the standard full view array
// hence should be the same object under normal circumstances
boolean isExternal = gradient.gradient() != getFlattenedGradientsView();
//Split up the gradients on a per-layer basis, for pre-apply
Map<String, Gradient> layerGradients = new HashMap<>();
Trainable[] layers = getOrderedLayers();
if (layers.length == 1 && isSingleLayerUpdater()) {
layerGradients.put(layers[0].getConfig().getLayerName(), gradient);
} else {
for (Map.Entry<String, INDArray> gradientPair : gradient.gradientForVariable().entrySet()) {
String key = gradientPair.getKey();
int idx = key.lastIndexOf('_');
if (idx == -1)
throw new IllegalStateException(
"Invalid key: Gradient key does not have layer separator: \"" + key + "\"");
String layerName = key.substring(0, idx);
Gradient g = layerGradients.get(layerName);
if (g == null) {
g = new DefaultGradient();
layerGradients.put(layerName, g);
}
String newKey = key.substring(idx + 1);
g.setGradientFor(newKey, gradientPair.getValue());
}
}
if(isMiniBatch()){
divideByMinibatch(isExternal, gradient, batchSize);
}
//PRE apply (gradient clipping, etc): done on a per-layer basis
for (Map.Entry<String, Gradient> entry : layerGradients.entrySet()) {
String layerName = entry.getKey();
Trainable layer = layersByName.get(layerName);
preApply(layer, layerGradients.get(layerName), iteration);
}
//Apply the updaters in blocks. This also applies LR and momentum schedules, L1 and L2
if(getClass() != LayerUpdater.class){
//OK for LayerUpdater as this is part of layerwise pretraining
workspaceMgr.assertNotOpen(ArrayType.UPDATER_WORKING_MEM, "Updater working memory");
}
for (UpdaterBlock ub : updaterBlocks) {
if (ub.skipDueToPretrainConfig(this instanceof LayerUpdater)) {
//Should skip some updater blocks sometimes
//For example, VAE decoder params while doing supervised backprop
continue;
}
try(MemoryWorkspace ws = workspaceMgr.notifyScopeEntered(ArrayType.UPDATER_WORKING_MEM)){
if (isExternal) {
//RL4J etc type case: calculate gradients in 1 net, update them in another
ub.updateExternalGradient(iteration, epoch, gradient.gradient(), getParams());
} else {
//Standard case
ub.update(iteration, epoch);
}
}
}
} }
|
public class class_name {
public synchronized void update(Gradient gradient, int iteration, int epoch, int batchSize, LayerWorkspaceMgr workspaceMgr) {
//First: check if gradient is standard or external...
//In a MultiLayerNetwork, the INDArray returned by .gradient() is always the standard full view array
// hence should be the same object under normal circumstances
boolean isExternal = gradient.gradient() != getFlattenedGradientsView();
//Split up the gradients on a per-layer basis, for pre-apply
Map<String, Gradient> layerGradients = new HashMap<>();
Trainable[] layers = getOrderedLayers();
if (layers.length == 1 && isSingleLayerUpdater()) {
layerGradients.put(layers[0].getConfig().getLayerName(), gradient); // depends on control dependency: [if], data = [none]
} else {
for (Map.Entry<String, INDArray> gradientPair : gradient.gradientForVariable().entrySet()) {
String key = gradientPair.getKey();
int idx = key.lastIndexOf('_');
if (idx == -1)
throw new IllegalStateException(
"Invalid key: Gradient key does not have layer separator: \"" + key + "\"");
String layerName = key.substring(0, idx);
Gradient g = layerGradients.get(layerName);
if (g == null) {
g = new DefaultGradient(); // depends on control dependency: [if], data = [none]
layerGradients.put(layerName, g); // depends on control dependency: [if], data = [none]
}
String newKey = key.substring(idx + 1);
g.setGradientFor(newKey, gradientPair.getValue()); // depends on control dependency: [for], data = [gradientPair]
}
}
if(isMiniBatch()){
divideByMinibatch(isExternal, gradient, batchSize); // depends on control dependency: [if], data = [none]
}
//PRE apply (gradient clipping, etc): done on a per-layer basis
for (Map.Entry<String, Gradient> entry : layerGradients.entrySet()) {
String layerName = entry.getKey();
Trainable layer = layersByName.get(layerName);
preApply(layer, layerGradients.get(layerName), iteration); // depends on control dependency: [for], data = [none]
}
//Apply the updaters in blocks. This also applies LR and momentum schedules, L1 and L2
if(getClass() != LayerUpdater.class){
//OK for LayerUpdater as this is part of layerwise pretraining
workspaceMgr.assertNotOpen(ArrayType.UPDATER_WORKING_MEM, "Updater working memory"); // depends on control dependency: [if], data = [none]
}
for (UpdaterBlock ub : updaterBlocks) {
if (ub.skipDueToPretrainConfig(this instanceof LayerUpdater)) {
//Should skip some updater blocks sometimes
//For example, VAE decoder params while doing supervised backprop
continue;
}
try(MemoryWorkspace ws = workspaceMgr.notifyScopeEntered(ArrayType.UPDATER_WORKING_MEM)){
if (isExternal) {
//RL4J etc type case: calculate gradients in 1 net, update them in another
ub.updateExternalGradient(iteration, epoch, gradient.gradient(), getParams()); // depends on control dependency: [if], data = [none]
} else {
//Standard case
ub.update(iteration, epoch); // depends on control dependency: [if], data = [none]
}
}
}
} }
|
public class class_name {
boolean isAllowed(String attribute) {
if (attribute != null) {
for (AclMatch rule : acl) {
if (rule.matches(attribute)) {
return !rule.isDeny();
}
}
}
return defaultAllow;
} }
|
public class class_name {
boolean isAllowed(String attribute) {
if (attribute != null) {
for (AclMatch rule : acl) {
if (rule.matches(attribute)) {
return !rule.isDeny(); // depends on control dependency: [if], data = [none]
}
}
}
return defaultAllow;
} }
|
public class class_name {
public void setAttributeValues(java.util.Collection<AttributeValue> attributeValues) {
if (attributeValues == null) {
this.attributeValues = null;
return;
}
this.attributeValues = new java.util.ArrayList<AttributeValue>(attributeValues);
} }
|
public class class_name {
public void setAttributeValues(java.util.Collection<AttributeValue> attributeValues) {
if (attributeValues == null) {
this.attributeValues = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.attributeValues = new java.util.ArrayList<AttributeValue>(attributeValues);
} }
|
public class class_name {
public static double logpdf(double x, double mu, double beta) {
if(x == Double.NEGATIVE_INFINITY) {
return Double.NEGATIVE_INFINITY;
}
final double z = (x - mu) / beta;
return -z - FastMath.exp(-z) - FastMath.log(beta);
} }
|
public class class_name {
public static double logpdf(double x, double mu, double beta) {
if(x == Double.NEGATIVE_INFINITY) {
return Double.NEGATIVE_INFINITY; // depends on control dependency: [if], data = [none]
}
final double z = (x - mu) / beta;
return -z - FastMath.exp(-z) - FastMath.log(beta);
} }
|
public class class_name {
public static String rfcDate(final Date val) {
synchronized (rfcDateFormat) {
try {
rfcDateFormat.setTimeZone(Timezones.getDefaultTz());
} catch (TimezonesException tze) {
throw new RuntimeException(tze);
}
return rfcDateFormat.format(val);
}
} }
|
public class class_name {
public static String rfcDate(final Date val) {
synchronized (rfcDateFormat) {
try {
rfcDateFormat.setTimeZone(Timezones.getDefaultTz()); // depends on control dependency: [try], data = [none]
} catch (TimezonesException tze) {
throw new RuntimeException(tze);
} // depends on control dependency: [catch], data = [none]
return rfcDateFormat.format(val);
}
} }
|
public class class_name {
public static String getFilename(Resource resource, String extFallback) {
String filename = getResourceName(resource);
if (!HAS_FILE_EXT_PATTERN.matcher(filename).matches()) {
String filenameEnd = null;
MimeType mimeType = getMimeType(resource);
if (mimeType != null) {
String ext = mimeType.getExtension();
if (StringUtils.isNotBlank(ext)) {
filenameEnd = ext;
} else {
Matcher matcher = FILE_EXT_FROM_MIME.matcher(mimeType.toString());
if (matcher.matches()) {
filenameEnd = "." + matcher.group(1);
}
}
}
if (StringUtils.isBlank(filenameEnd) && StringUtils.isNotBlank(extFallback)) {
filenameEnd = "." + extFallback;
}
if (StringUtils.isNotBlank(filenameEnd) &&
!filename.toLowerCase().endsWith(filenameEnd.toLowerCase())) {
filename += filenameEnd;
}
}
return filename;
} }
|
public class class_name {
public static String getFilename(Resource resource, String extFallback) {
String filename = getResourceName(resource);
if (!HAS_FILE_EXT_PATTERN.matcher(filename).matches()) {
String filenameEnd = null;
MimeType mimeType = getMimeType(resource);
if (mimeType != null) {
String ext = mimeType.getExtension();
if (StringUtils.isNotBlank(ext)) {
filenameEnd = ext; // depends on control dependency: [if], data = [none]
} else {
Matcher matcher = FILE_EXT_FROM_MIME.matcher(mimeType.toString());
if (matcher.matches()) {
filenameEnd = "." + matcher.group(1); // depends on control dependency: [if], data = [none]
}
}
}
if (StringUtils.isBlank(filenameEnd) && StringUtils.isNotBlank(extFallback)) {
filenameEnd = "." + extFallback; // depends on control dependency: [if], data = [none]
}
if (StringUtils.isNotBlank(filenameEnd) &&
!filename.toLowerCase().endsWith(filenameEnd.toLowerCase())) {
filename += filenameEnd; // depends on control dependency: [if], data = [none]
}
}
return filename;
} }
|
public class class_name {
@SuppressWarnings("checkstyle:hiddenfield")
private <T extends Time> void subscribe(final Class<T> eventClass, final Set<EventHandler<T>> handlers) {
for (final EventHandler<T> handler : handlers) {
LOG.log(Level.FINEST, "Subscribe: event {0} handler {1}", new Object[] {eventClass.getName(), handler});
this.handlers.subscribe(eventClass, handler);
}
} }
|
public class class_name {
@SuppressWarnings("checkstyle:hiddenfield")
private <T extends Time> void subscribe(final Class<T> eventClass, final Set<EventHandler<T>> handlers) {
for (final EventHandler<T> handler : handlers) {
LOG.log(Level.FINEST, "Subscribe: event {0} handler {1}", new Object[] {eventClass.getName(), handler}); // depends on control dependency: [for], data = [handler]
this.handlers.subscribe(eventClass, handler); // depends on control dependency: [for], data = [handler]
}
} }
|
public class class_name {
@Override
public synchronized Connection getConnection() {
if (availableConnections == null) {
throw new DbSqlException("Connection pool is not initialized");
}
if (!availableConnections.isEmpty()) {
int lastIndex = availableConnections.size() - 1;
ConnectionData existingConnection = availableConnections.get(lastIndex);
availableConnections.remove(lastIndex);
// If conn on available list is closed (e.g., it timed out), then remove it from available list
// and repeat the process of obtaining a conn. Also wake up threads that were waiting for a
// conn because maxConnection limit was reached.
long now = System.currentTimeMillis();
boolean isValid = isConnectionValid(existingConnection, now);
if (!isValid) {
if (log.isDebugEnabled()) {
log.debug("Pooled connection not valid, resetting");
}
notifyAll(); // freed up a spot for anybody waiting
return getConnection();
} else {
if (log.isDebugEnabled()) {
log.debug("Returning valid pooled connection");
}
busyConnections.add(existingConnection);
existingConnection.lastUsed = now;
return existingConnection.connection;
}
}
if (log.isDebugEnabled()) {
log.debug("No more available connections");
}
// no available connections
if (((availableConnections.size() + busyConnections.size()) < maxConnections) && !connectionPending) {
makeBackgroundConnection();
} else if (!waitIfBusy) {
throw new DbSqlException("Connection limit reached: " + maxConnections);
}
// wait for either a new conn to be established (if you called makeBackgroundConnection) or for
// an existing conn to be freed up.
try {
wait();
} catch (InterruptedException ie) {
// ignore
}
// someone freed up a conn, so try again.
return getConnection();
} }
|
public class class_name {
@Override
public synchronized Connection getConnection() {
if (availableConnections == null) {
throw new DbSqlException("Connection pool is not initialized");
}
if (!availableConnections.isEmpty()) {
int lastIndex = availableConnections.size() - 1;
ConnectionData existingConnection = availableConnections.get(lastIndex);
availableConnections.remove(lastIndex); // depends on control dependency: [if], data = [none]
// If conn on available list is closed (e.g., it timed out), then remove it from available list
// and repeat the process of obtaining a conn. Also wake up threads that were waiting for a
// conn because maxConnection limit was reached.
long now = System.currentTimeMillis();
boolean isValid = isConnectionValid(existingConnection, now);
if (!isValid) {
if (log.isDebugEnabled()) {
log.debug("Pooled connection not valid, resetting"); // depends on control dependency: [if], data = [none]
}
notifyAll(); // freed up a spot for anybody waiting // depends on control dependency: [if], data = [none]
return getConnection(); // depends on control dependency: [if], data = [none]
} else {
if (log.isDebugEnabled()) {
log.debug("Returning valid pooled connection"); // depends on control dependency: [if], data = [none]
}
busyConnections.add(existingConnection); // depends on control dependency: [if], data = [none]
existingConnection.lastUsed = now; // depends on control dependency: [if], data = [none]
return existingConnection.connection; // depends on control dependency: [if], data = [none]
}
}
if (log.isDebugEnabled()) {
log.debug("No more available connections"); // depends on control dependency: [if], data = [none]
}
// no available connections
if (((availableConnections.size() + busyConnections.size()) < maxConnections) && !connectionPending) {
makeBackgroundConnection(); // depends on control dependency: [if], data = [none]
} else if (!waitIfBusy) {
throw new DbSqlException("Connection limit reached: " + maxConnections);
}
// wait for either a new conn to be established (if you called makeBackgroundConnection) or for
// an existing conn to be freed up.
try {
wait(); // depends on control dependency: [try], data = [none]
} catch (InterruptedException ie) {
// ignore
} // depends on control dependency: [catch], data = [none]
// someone freed up a conn, so try again.
return getConnection();
} }
|
public class class_name {
public JobResult buildJobConfiguration(String jobname){
HttpPost postRequest = new HttpPost(baseUrl + "job/" + jobname);
StringEntity postEntity = null;
try {
postEntity = new StringEntity(BUILD_ACTION);
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
postEntity.setContentType("application/x-www-form-urlencoded");
postRequest.addHeader("Accept", "application/xml");
postRequest.setEntity(postEntity);
return jobResult(postRequest);
} }
|
public class class_name {
public JobResult buildJobConfiguration(String jobname){
HttpPost postRequest = new HttpPost(baseUrl + "job/" + jobname);
StringEntity postEntity = null;
try {
postEntity = new StringEntity(BUILD_ACTION); // depends on control dependency: [try], data = [none]
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} // depends on control dependency: [catch], data = [none]
postEntity.setContentType("application/x-www-form-urlencoded");
postRequest.addHeader("Accept", "application/xml");
postRequest.setEntity(postEntity);
return jobResult(postRequest);
} }
|
public class class_name {
@SafeVarargs
public final Set<Class<?>> scan(Class<? extends Annotation>... annotationTypes)
throws ClassNotFoundException {
List<String> packages = getPackages();
if (packages.isEmpty()) {
return Collections.emptySet();
}
ClassPathScanningCandidateComponentProvider scanner = new ClassPathScanningCandidateComponentProvider(
false);
scanner.setEnvironment(this.context.getEnvironment());
scanner.setResourceLoader(this.context);
for (Class<? extends Annotation> annotationType : annotationTypes) {
scanner.addIncludeFilter(new AnnotationTypeFilter(annotationType));
}
Set<Class<?>> entitySet = new HashSet<>();
for (String basePackage : packages) {
if (StringUtils.hasText(basePackage)) {
for (BeanDefinition candidate : scanner
.findCandidateComponents(basePackage)) {
entitySet.add(ClassUtils.forName(candidate.getBeanClassName(),
this.context.getClassLoader()));
}
}
}
return entitySet;
} }
|
public class class_name {
@SafeVarargs
public final Set<Class<?>> scan(Class<? extends Annotation>... annotationTypes)
throws ClassNotFoundException {
List<String> packages = getPackages();
if (packages.isEmpty()) {
return Collections.emptySet();
}
ClassPathScanningCandidateComponentProvider scanner = new ClassPathScanningCandidateComponentProvider(
false);
scanner.setEnvironment(this.context.getEnvironment());
scanner.setResourceLoader(this.context);
for (Class<? extends Annotation> annotationType : annotationTypes) {
scanner.addIncludeFilter(new AnnotationTypeFilter(annotationType));
}
Set<Class<?>> entitySet = new HashSet<>();
for (String basePackage : packages) {
if (StringUtils.hasText(basePackage)) {
for (BeanDefinition candidate : scanner
.findCandidateComponents(basePackage)) {
entitySet.add(ClassUtils.forName(candidate.getBeanClassName(),
this.context.getClassLoader())); // depends on control dependency: [for], data = [none]
}
}
}
return entitySet;
} }
|
public class class_name {
@Override
public Object getArgumentValue(Object object, Service service) throws ClassCastException {
if (object instanceof String) {
Provider provider = Providers.getProvider((String) object);
if (provider != null) {
return provider;
}
}
return null;
// throw new
// ClassCastException("Must supply the name of a provider as a string");
} }
|
public class class_name {
@Override
public Object getArgumentValue(Object object, Service service) throws ClassCastException {
if (object instanceof String) {
Provider provider = Providers.getProvider((String) object);
if (provider != null) {
return provider; // depends on control dependency: [if], data = [none]
}
}
return null;
// throw new
// ClassCastException("Must supply the name of a provider as a string");
} }
|
public class class_name {
public boolean contentEqualsIgnoreCase(CharSequence string) {
if (string == null || string.length() != length()) {
return false;
}
if (string.getClass() == AsciiString.class) {
AsciiString rhs = (AsciiString) string;
for (int i = arrayOffset(), j = rhs.arrayOffset(); i < length(); ++i, ++j) {
if (!equalsIgnoreCase(value[i], rhs.value[j])) {
return false;
}
}
return true;
}
for (int i = arrayOffset(), j = 0; i < length(); ++i, ++j) {
if (!equalsIgnoreCase(b2c(value[i]), string.charAt(j))) {
return false;
}
}
return true;
} }
|
public class class_name {
public boolean contentEqualsIgnoreCase(CharSequence string) {
if (string == null || string.length() != length()) {
return false; // depends on control dependency: [if], data = [none]
}
if (string.getClass() == AsciiString.class) {
AsciiString rhs = (AsciiString) string;
for (int i = arrayOffset(), j = rhs.arrayOffset(); i < length(); ++i, ++j) {
if (!equalsIgnoreCase(value[i], rhs.value[j])) {
return false; // depends on control dependency: [if], data = [none]
}
}
return true; // depends on control dependency: [if], data = [none]
}
for (int i = arrayOffset(), j = 0; i < length(); ++i, ++j) {
if (!equalsIgnoreCase(b2c(value[i]), string.charAt(j))) {
return false; // depends on control dependency: [if], data = [none]
}
}
return true;
} }
|
public class class_name {
private void ensureFill() throws JedisConnectionException {
if (count >= limit) {
try {
limit = in.read(buf);
count = 0;
if (limit == -1) {
throw new JedisConnectionException("Unexpected end of stream.");
}
} catch (IOException e) {
throw new JedisConnectionException(e);
}
}
} }
|
public class class_name {
private void ensureFill() throws JedisConnectionException {
if (count >= limit) {
try {
limit = in.read(buf); // depends on control dependency: [try], data = [none]
count = 0; // depends on control dependency: [try], data = [none]
if (limit == -1) {
throw new JedisConnectionException("Unexpected end of stream.");
}
} catch (IOException e) {
throw new JedisConnectionException(e);
} // depends on control dependency: [catch], data = [none]
}
} }
|
public class class_name {
private List<IndexSearchDescriptor> findPrefixes(IndexSearchDescriptor desc, List<IndexSearchDescriptor> descriptors) {
List<IndexSearchDescriptor> result = new ArrayList<>();
for (IndexSearchDescriptor item : descriptors) {
if (isPrefixOf(item, desc)) {
result.add(item);
}
}
return result;
} }
|
public class class_name {
private List<IndexSearchDescriptor> findPrefixes(IndexSearchDescriptor desc, List<IndexSearchDescriptor> descriptors) {
List<IndexSearchDescriptor> result = new ArrayList<>();
for (IndexSearchDescriptor item : descriptors) {
if (isPrefixOf(item, desc)) {
result.add(item); // depends on control dependency: [if], data = [none]
}
}
return result;
} }
|
public class class_name {
public SqlApplicationConfiguration withInputs(Input... inputs) {
if (this.inputs == null) {
setInputs(new java.util.ArrayList<Input>(inputs.length));
}
for (Input ele : inputs) {
this.inputs.add(ele);
}
return this;
} }
|
public class class_name {
public SqlApplicationConfiguration withInputs(Input... inputs) {
if (this.inputs == null) {
setInputs(new java.util.ArrayList<Input>(inputs.length)); // depends on control dependency: [if], data = [none]
}
for (Input ele : inputs) {
this.inputs.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} }
|
public class class_name {
public URI construct(Map<String, String> options) {
// make a copy that can be modified
Map<String, String> uriData = Maps.newLinkedHashMap(options);
try {
// scheme should always be present, but default if necessary
String scheme = defaults.get(SCHEME);
if (uriData.containsKey(SCHEME)) {
scheme = uriData.remove(SCHEME);
}
StringBuilder builder = new StringBuilder();
builder.append(scheme).append(":");
if (!pattern.isOpaque()) {
// Build the URI(String,String,String,int,String,String,String)
StringBuilder authBuilder = new StringBuilder();
String user = constructUserInfo(uriData, defaults);
String host = removeNonDefault(HOST, uriData, defaults);
int port = constructPort(uriData, defaults);
if (user != null) {
authBuilder.append(user).append("@");
}
if (host != null) {
authBuilder.append(host);
}
if (port >= 0) {
authBuilder.append(":").append(port);
}
String auth = authBuilder.toString();
if (!auth.isEmpty()) {
builder.append("//").append(auth);
}
}
builder.append(constructPath(uriData, patternPath));
String query = constructQuery(uriData, defaults);
if (query != null) {
builder.append("?").append(query);
}
return new URI(builder.toString());
} catch (URISyntaxException ex) {
throw new IllegalArgumentException("Could not build URI", ex);
}
} }
|
public class class_name {
public URI construct(Map<String, String> options) {
// make a copy that can be modified
Map<String, String> uriData = Maps.newLinkedHashMap(options);
try {
// scheme should always be present, but default if necessary
String scheme = defaults.get(SCHEME);
if (uriData.containsKey(SCHEME)) {
scheme = uriData.remove(SCHEME); // depends on control dependency: [if], data = [none]
}
StringBuilder builder = new StringBuilder();
builder.append(scheme).append(":");
if (!pattern.isOpaque()) {
// Build the URI(String,String,String,int,String,String,String)
StringBuilder authBuilder = new StringBuilder();
String user = constructUserInfo(uriData, defaults);
String host = removeNonDefault(HOST, uriData, defaults);
int port = constructPort(uriData, defaults);
if (user != null) {
authBuilder.append(user).append("@"); // depends on control dependency: [if], data = [(user]
}
if (host != null) {
authBuilder.append(host); // depends on control dependency: [if], data = [(host]
}
if (port >= 0) {
authBuilder.append(":").append(port); // depends on control dependency: [if], data = [(port]
}
String auth = authBuilder.toString();
if (!auth.isEmpty()) {
builder.append("//").append(auth);
}
}
builder.append(constructPath(uriData, patternPath)); // depends on control dependency: [if], data = [none]
String query = constructQuery(uriData, defaults);
if (query != null) {
builder.append("?").append(query); // depends on control dependency: [if], data = [(query]
}
return new URI(builder.toString()); // depends on control dependency: [if], data = [none]
} catch (URISyntaxException ex) { // depends on control dependency: [if], data = [none]
throw new IllegalArgumentException("Could not build URI", ex);
}
} }
|
public class class_name {
public boolean before(final Interval<C> that) {
if(this.hasNone() || that.hasNone()) {
return false;
}
return this.dimension == that.dimension &&
this.range.upperEndpoint().compareTo(that.range.lowerEndpoint()) < 0;
} }
|
public class class_name {
public boolean before(final Interval<C> that) {
if(this.hasNone() || that.hasNone()) {
return false; // depends on control dependency: [if], data = [none]
}
return this.dimension == that.dimension &&
this.range.upperEndpoint().compareTo(that.range.lowerEndpoint()) < 0;
} }
|
public class class_name {
private String fixDateForJdk(String format) {
if (Constants.JRE_IS_MINIMUM_JAVA7) {
return format;
}
// JDK 6 - fix year formatting
// a. lower case Y to y
if (format.contains("Y")) {
format = format.replace("Y", "y");
}
// gotta love regex
// use lookahead to match isolated y/yyy with yyyy
format = format.replaceAll("((?<!y)(?:y|yyy)(?!y))", "yyyy");
return format;
} }
|
public class class_name {
private String fixDateForJdk(String format) {
if (Constants.JRE_IS_MINIMUM_JAVA7) {
return format; // depends on control dependency: [if], data = [none]
}
// JDK 6 - fix year formatting
// a. lower case Y to y
if (format.contains("Y")) {
format = format.replace("Y", "y"); // depends on control dependency: [if], data = [none]
}
// gotta love regex
// use lookahead to match isolated y/yyy with yyyy
format = format.replaceAll("((?<!y)(?:y|yyy)(?!y))", "yyyy");
return format;
} }
|
public class class_name {
@Override
public AMQP.BasicProperties fromMessageProperties(MessageProperties source, String charset) {
final TraceeFilterConfiguration filterConfiguration = backend.getConfiguration(profile);
if (!backend.isEmpty() && filterConfiguration.shouldProcessContext(AsyncDispatch)) {
final Map<String, String> filteredParams = filterConfiguration.filterDeniedParams(backend.copyToMap(), AsyncDispatch);
source.getHeaders().put(TPIC_HEADER, filteredParams);
}
return super.fromMessageProperties(source, charset);
} }
|
public class class_name {
@Override
public AMQP.BasicProperties fromMessageProperties(MessageProperties source, String charset) {
final TraceeFilterConfiguration filterConfiguration = backend.getConfiguration(profile);
if (!backend.isEmpty() && filterConfiguration.shouldProcessContext(AsyncDispatch)) {
final Map<String, String> filteredParams = filterConfiguration.filterDeniedParams(backend.copyToMap(), AsyncDispatch);
source.getHeaders().put(TPIC_HEADER, filteredParams); // depends on control dependency: [if], data = [none]
}
return super.fromMessageProperties(source, charset);
} }
|
public class class_name {
public Synset getLeastCommonSubsumer(Synset synset1, Synset synset2) {
Preconditions.checkNotNull(synset1);
Preconditions.checkNotNull(synset2);
if (synset1.equals(synset2)) {
return synset1;
}
List<Synset> path = shortestPath(synset1, synset2);
if (path.isEmpty()) {
return null;
}
int node1Height = synset1.depth();
int node2Height = synset2.depth();
int minHeight = Math.min(node1Height, node2Height);
int maxHeight = Integer.MIN_VALUE;
Synset lcs = null;
for (Synset s : path) {
if (s.equals(synset1) || s.equals(synset2)) {
continue;
}
int height = s.depth();
if (height < minHeight && height > maxHeight) {
maxHeight = height;
lcs = s;
}
}
if (lcs == null) {
if (node1Height < node2Height) {
return synset1;
}
return synset2;
}
return lcs;
} }
|
public class class_name {
public Synset getLeastCommonSubsumer(Synset synset1, Synset synset2) {
Preconditions.checkNotNull(synset1);
Preconditions.checkNotNull(synset2);
if (synset1.equals(synset2)) {
return synset1; // depends on control dependency: [if], data = [none]
}
List<Synset> path = shortestPath(synset1, synset2);
if (path.isEmpty()) {
return null; // depends on control dependency: [if], data = [none]
}
int node1Height = synset1.depth();
int node2Height = synset2.depth();
int minHeight = Math.min(node1Height, node2Height);
int maxHeight = Integer.MIN_VALUE;
Synset lcs = null;
for (Synset s : path) {
if (s.equals(synset1) || s.equals(synset2)) {
continue;
}
int height = s.depth();
if (height < minHeight && height > maxHeight) {
maxHeight = height; // depends on control dependency: [if], data = [none]
lcs = s; // depends on control dependency: [if], data = [none]
}
}
if (lcs == null) {
if (node1Height < node2Height) {
return synset1; // depends on control dependency: [if], data = [none]
}
return synset2; // depends on control dependency: [if], data = [none]
}
return lcs;
} }
|
public class class_name {
public static Rational binomial(Rational n, BigInteger m) {
if (m.compareTo(BigInteger.ZERO) == 0) {
return Rational.ONE;
}
Rational bin = n;
for (BigInteger i = BigInteger.valueOf(2); i.compareTo(m) != 1; i = i.add(BigInteger.ONE)) {
bin = bin.multiply(n.subtract(i.subtract(BigInteger.ONE))).divide(i);
}
return bin;
} }
|
public class class_name {
public static Rational binomial(Rational n, BigInteger m) {
if (m.compareTo(BigInteger.ZERO) == 0) {
return Rational.ONE; // depends on control dependency: [if], data = [none]
}
Rational bin = n;
for (BigInteger i = BigInteger.valueOf(2); i.compareTo(m) != 1; i = i.add(BigInteger.ONE)) {
bin = bin.multiply(n.subtract(i.subtract(BigInteger.ONE))).divide(i); // depends on control dependency: [for], data = [i]
}
return bin;
} }
|
public class class_name {
public static String trimTo(final String aString, final String aDefault) {
if (aString == null) {
return aDefault;
}
final String trimmed = aString.trim();
return trimmed.length() == 0 ? aDefault : trimmed;
} }
|
public class class_name {
public static String trimTo(final String aString, final String aDefault) {
if (aString == null) {
return aDefault; // depends on control dependency: [if], data = [none]
}
final String trimmed = aString.trim();
return trimmed.length() == 0 ? aDefault : trimmed;
} }
|
public class class_name {
public synchronized void logConfigReport() {
ConfigStatus reportStatus = mConfigCheckReport.getConfigStatus();
if (reportStatus.equals(ConfigStatus.PASSED)) {
LOG.info(CONSISTENT_CONFIGURATION_INFO);
} else if (reportStatus.equals(ConfigStatus.WARN)) {
LOG.warn("{}\nWarnings: {}", INCONSISTENT_CONFIGURATION_INFO,
mConfigCheckReport.getConfigWarns().values().stream()
.map(Object::toString).limit(LOG_CONF_SIZE).collect(Collectors.joining(", ")));
} else {
LOG.error("{}\nErrors: {}\nWarnings: {}", INCONSISTENT_CONFIGURATION_INFO,
mConfigCheckReport.getConfigErrors().values().stream()
.map(Object::toString).limit(LOG_CONF_SIZE).collect(Collectors.joining(", ")),
mConfigCheckReport.getConfigWarns().values().stream()
.map(Object::toString).limit(LOG_CONF_SIZE).collect(Collectors.joining(", ")));
}
} }
|
public class class_name {
public synchronized void logConfigReport() {
ConfigStatus reportStatus = mConfigCheckReport.getConfigStatus();
if (reportStatus.equals(ConfigStatus.PASSED)) {
LOG.info(CONSISTENT_CONFIGURATION_INFO); // depends on control dependency: [if], data = [none]
} else if (reportStatus.equals(ConfigStatus.WARN)) {
LOG.warn("{}\nWarnings: {}", INCONSISTENT_CONFIGURATION_INFO,
mConfigCheckReport.getConfigWarns().values().stream()
.map(Object::toString).limit(LOG_CONF_SIZE).collect(Collectors.joining(", "))); // depends on control dependency: [if], data = [none]
} else {
LOG.error("{}\nErrors: {}\nWarnings: {}", INCONSISTENT_CONFIGURATION_INFO,
mConfigCheckReport.getConfigErrors().values().stream()
.map(Object::toString).limit(LOG_CONF_SIZE).collect(Collectors.joining(", ")),
mConfigCheckReport.getConfigWarns().values().stream()
.map(Object::toString).limit(LOG_CONF_SIZE).collect(Collectors.joining(", "))); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
final boolean isMaster() {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
JmfTr.entry(this, tc, "isMaster");
if (master == compatibilityWrapperOrSelf) {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
JmfTr.exit(this, tc, "isMaster", Boolean.TRUE);
return Boolean.TRUE;
}
else {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
JmfTr.exit(this, tc, "isMaster", Boolean.FALSE);
return Boolean.FALSE;
}
} }
|
public class class_name {
final boolean isMaster() {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
JmfTr.entry(this, tc, "isMaster");
if (master == compatibilityWrapperOrSelf) {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
JmfTr.exit(this, tc, "isMaster", Boolean.TRUE);
return Boolean.TRUE; // depends on control dependency: [if], data = [none]
}
else {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
JmfTr.exit(this, tc, "isMaster", Boolean.FALSE);
return Boolean.FALSE; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public <RET> RET save(final Object iPojo, final String iClusterName, OPERATION_MODE iMode, boolean iForceCreate,
final ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<Integer> iRecordUpdatedCallback) {
checkOpenness();
if (iPojo == null)
return (RET) iPojo;
else if (OMultiValue.isMultiValue(iPojo)) {
// MULTI VALUE OBJECT: STORE SINGLE POJOS
for (Object pojo : OMultiValue.getMultiValueIterable(iPojo)) {
save(pojo, iClusterName);
}
return (RET) iPojo;
} else {
OSerializationThreadLocal.INSTANCE.get().clear();
// GET THE ASSOCIATED DOCUMENT
final Object proxiedObject = OObjectEntitySerializer.serializeObject(iPojo, this);
final ODocument record = getRecordByUserObject(proxiedObject, true);
try {
record.setInternalStatus(ORecordElement.STATUS.MARSHALLING);
if (!saveOnlyDirty || record.isDirty()) {
// REGISTER BEFORE TO SERIALIZE TO AVOID PROBLEMS WITH CIRCULAR DEPENDENCY
// registerUserObject(iPojo, record);
deleteOrphans((((OObjectProxyMethodHandler) ((ProxyObject) proxiedObject).getHandler())));
ODocument savedRecord = underlying
.save(record, iClusterName, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback);
((OObjectProxyMethodHandler) ((ProxyObject) proxiedObject).getHandler()).setDoc(savedRecord);
((OObjectProxyMethodHandler) ((ProxyObject) proxiedObject).getHandler()).updateLoadedFieldMap(proxiedObject, false);
// RE-REGISTER FOR NEW RECORDS SINCE THE ID HAS CHANGED
registerUserObject(proxiedObject, record);
}
} finally {
record.setInternalStatus(ORecordElement.STATUS.LOADED);
}
return (RET) proxiedObject;
}
} }
|
public class class_name {
public <RET> RET save(final Object iPojo, final String iClusterName, OPERATION_MODE iMode, boolean iForceCreate,
final ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<Integer> iRecordUpdatedCallback) {
checkOpenness();
if (iPojo == null)
return (RET) iPojo;
else if (OMultiValue.isMultiValue(iPojo)) {
// MULTI VALUE OBJECT: STORE SINGLE POJOS
for (Object pojo : OMultiValue.getMultiValueIterable(iPojo)) {
save(pojo, iClusterName);
// depends on control dependency: [for], data = [pojo]
}
return (RET) iPojo;
// depends on control dependency: [if], data = [none]
} else {
OSerializationThreadLocal.INSTANCE.get().clear();
// depends on control dependency: [if], data = [none]
// GET THE ASSOCIATED DOCUMENT
final Object proxiedObject = OObjectEntitySerializer.serializeObject(iPojo, this);
final ODocument record = getRecordByUserObject(proxiedObject, true);
try {
record.setInternalStatus(ORecordElement.STATUS.MARSHALLING);
// depends on control dependency: [try], data = [none]
if (!saveOnlyDirty || record.isDirty()) {
// REGISTER BEFORE TO SERIALIZE TO AVOID PROBLEMS WITH CIRCULAR DEPENDENCY
// registerUserObject(iPojo, record);
deleteOrphans((((OObjectProxyMethodHandler) ((ProxyObject) proxiedObject).getHandler())));
// depends on control dependency: [if], data = [none]
ODocument savedRecord = underlying
.save(record, iClusterName, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback);
((OObjectProxyMethodHandler) ((ProxyObject) proxiedObject).getHandler()).setDoc(savedRecord);
// depends on control dependency: [if], data = [none]
((OObjectProxyMethodHandler) ((ProxyObject) proxiedObject).getHandler()).updateLoadedFieldMap(proxiedObject, false);
// depends on control dependency: [if], data = [none]
// RE-REGISTER FOR NEW RECORDS SINCE THE ID HAS CHANGED
registerUserObject(proxiedObject, record);
// depends on control dependency: [if], data = [none]
}
} finally {
record.setInternalStatus(ORecordElement.STATUS.LOADED);
}
return (RET) proxiedObject;
// depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public CreateClusterRequest withApplication(ApplicationConfig application) {
if (this.applications == null) {
this.applications = new ArrayList<ApplicationConfig>();
}
this.applications.add(application);
return this;
} }
|
public class class_name {
public CreateClusterRequest withApplication(ApplicationConfig application) {
if (this.applications == null) {
this.applications = new ArrayList<ApplicationConfig>(); // depends on control dependency: [if], data = [none]
}
this.applications.add(application);
return this;
} }
|
public class class_name {
public void configure(String rootUrl, Settings settings) {
Data<Read, ReadGroupSet, Reference> data = dataSources.get(rootUrl);
if (data == null) {
data = new Data<Read, ReadGroupSet, Reference>(settings, null);
dataSources.put(rootUrl, data);
} else {
data.settings = settings;
}
} }
|
public class class_name {
public void configure(String rootUrl, Settings settings) {
Data<Read, ReadGroupSet, Reference> data = dataSources.get(rootUrl);
if (data == null) {
data = new Data<Read, ReadGroupSet, Reference>(settings, null); // depends on control dependency: [if], data = [null)]
dataSources.put(rootUrl, data); // depends on control dependency: [if], data = [none]
} else {
data.settings = settings; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public Config setWanReplicationConfigs(Map<String, WanReplicationConfig> wanReplicationConfigs) {
this.wanReplicationConfigs.clear();
this.wanReplicationConfigs.putAll(wanReplicationConfigs);
for (final Entry<String, WanReplicationConfig> entry : this.wanReplicationConfigs.entrySet()) {
entry.getValue().setName(entry.getKey());
}
return this;
} }
|
public class class_name {
public Config setWanReplicationConfigs(Map<String, WanReplicationConfig> wanReplicationConfigs) {
this.wanReplicationConfigs.clear();
this.wanReplicationConfigs.putAll(wanReplicationConfigs);
for (final Entry<String, WanReplicationConfig> entry : this.wanReplicationConfigs.entrySet()) {
entry.getValue().setName(entry.getKey()); // depends on control dependency: [for], data = [entry]
}
return this;
} }
|
public class class_name {
private static JFreeChart createChart(
String title, String xAxis, String yAxis1, XYDataset dataset1, String yAxis2,
XYDataset dataset2
) {
JFreeChart chart =
ChartFactory.createXYLineChart(title, xAxis, yAxis1, dataset1, PlotOrientation.VERTICAL,
true, true, false);
final XYPlot plot = (XYPlot) chart.getPlot();
plot.setBackgroundPaint(Color.lightGray);
plot.setDomainGridlinePaint(Color.white);
plot.setRangeGridlinePaint(Color.white);
plot.setAxisOffset(new RectangleInsets(5.0, 5.0, 5.0, 5.0));
plot.setDomainCrosshairVisible(true);
plot.setRangeCrosshairVisible(true);
final XYItemRenderer r = plot.getRenderer();
int count = Math.min(dataset1.getSeriesCount(), dataset2.getSeriesCount());
if (r instanceof XYLineAndShapeRenderer) {
XYLineAndShapeRenderer renderer = (XYLineAndShapeRenderer) r;
renderer.setBaseStroke(LINE);
renderer.setAutoPopulateSeriesPaint(false);
renderer.setBaseShapesVisible(false);
renderer.setBaseShapesFilled(false);
renderer.setDrawSeriesLineAsPath(true);
for (int i = 0; i < count; i++) {
renderer.setSeriesPaint(i, COLORS.get(i % COLORS.size()));
}
}
chart.setBackgroundPaint(Color.white);
// chart two
{
final NumberAxis axis2 = new NumberAxis(yAxis2);
axis2.setAutoRangeIncludesZero(false);
plot.setRangeAxis(1, axis2);
plot.setDataset(1, dataset2);
plot.mapDatasetToRangeAxis(1, 1);
final StandardXYItemRenderer renderer = new StandardXYItemRenderer();
renderer.setAutoPopulateSeriesPaint(false);
renderer.setAutoPopulateSeriesStroke(false);
renderer.setBaseShapesVisible(false);
renderer.setBaseShapesFilled(false);
renderer.setDrawSeriesLineAsPath(true);
renderer.setBaseStroke(DASHED);
for (int i = 0; i < count; i++) {
renderer.setSeriesPaint(i, COLORS.get(i % COLORS.size()));
}
plot.setRenderer(1, renderer);
}
return chart;
} }
|
public class class_name {
private static JFreeChart createChart(
String title, String xAxis, String yAxis1, XYDataset dataset1, String yAxis2,
XYDataset dataset2
) {
JFreeChart chart =
ChartFactory.createXYLineChart(title, xAxis, yAxis1, dataset1, PlotOrientation.VERTICAL,
true, true, false);
final XYPlot plot = (XYPlot) chart.getPlot();
plot.setBackgroundPaint(Color.lightGray);
plot.setDomainGridlinePaint(Color.white);
plot.setRangeGridlinePaint(Color.white);
plot.setAxisOffset(new RectangleInsets(5.0, 5.0, 5.0, 5.0));
plot.setDomainCrosshairVisible(true);
plot.setRangeCrosshairVisible(true);
final XYItemRenderer r = plot.getRenderer();
int count = Math.min(dataset1.getSeriesCount(), dataset2.getSeriesCount());
if (r instanceof XYLineAndShapeRenderer) {
XYLineAndShapeRenderer renderer = (XYLineAndShapeRenderer) r;
renderer.setBaseStroke(LINE); // depends on control dependency: [if], data = [none]
renderer.setAutoPopulateSeriesPaint(false); // depends on control dependency: [if], data = [none]
renderer.setBaseShapesVisible(false); // depends on control dependency: [if], data = [none]
renderer.setBaseShapesFilled(false); // depends on control dependency: [if], data = [none]
renderer.setDrawSeriesLineAsPath(true); // depends on control dependency: [if], data = [none]
for (int i = 0; i < count; i++) {
renderer.setSeriesPaint(i, COLORS.get(i % COLORS.size())); // depends on control dependency: [for], data = [i]
}
}
chart.setBackgroundPaint(Color.white);
// chart two
{
final NumberAxis axis2 = new NumberAxis(yAxis2);
axis2.setAutoRangeIncludesZero(false);
plot.setRangeAxis(1, axis2);
plot.setDataset(1, dataset2);
plot.mapDatasetToRangeAxis(1, 1);
final StandardXYItemRenderer renderer = new StandardXYItemRenderer();
renderer.setAutoPopulateSeriesPaint(false);
renderer.setAutoPopulateSeriesStroke(false);
renderer.setBaseShapesVisible(false);
renderer.setBaseShapesFilled(false);
renderer.setDrawSeriesLineAsPath(true);
renderer.setBaseStroke(DASHED);
for (int i = 0; i < count; i++) {
renderer.setSeriesPaint(i, COLORS.get(i % COLORS.size())); // depends on control dependency: [for], data = [i]
}
plot.setRenderer(1, renderer);
}
return chart;
} }
|
public class class_name {
public static boolean closeEnoughToBond(IAtom atom1, IAtom atom2, double distanceFudgeFactor) {
if (!atom1.equals(atom2)) {
double distanceBetweenAtoms = atom1.getPoint3d().distance(atom2.getPoint3d());
double bondingDistance = atom1.getCovalentRadius() + atom2.getCovalentRadius();
if (distanceBetweenAtoms <= (distanceFudgeFactor * bondingDistance)) {
return true;
}
}
return false;
} }
|
public class class_name {
public static boolean closeEnoughToBond(IAtom atom1, IAtom atom2, double distanceFudgeFactor) {
if (!atom1.equals(atom2)) {
double distanceBetweenAtoms = atom1.getPoint3d().distance(atom2.getPoint3d());
double bondingDistance = atom1.getCovalentRadius() + atom2.getCovalentRadius();
if (distanceBetweenAtoms <= (distanceFudgeFactor * bondingDistance)) {
return true; // depends on control dependency: [if], data = [none]
}
}
return false;
} }
|
public class class_name {
public void marshall(Resolver resolver, ProtocolMarshaller protocolMarshaller) {
if (resolver == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(resolver.getTypeName(), TYPENAME_BINDING);
protocolMarshaller.marshall(resolver.getFieldName(), FIELDNAME_BINDING);
protocolMarshaller.marshall(resolver.getDataSourceName(), DATASOURCENAME_BINDING);
protocolMarshaller.marshall(resolver.getResolverArn(), RESOLVERARN_BINDING);
protocolMarshaller.marshall(resolver.getRequestMappingTemplate(), REQUESTMAPPINGTEMPLATE_BINDING);
protocolMarshaller.marshall(resolver.getResponseMappingTemplate(), RESPONSEMAPPINGTEMPLATE_BINDING);
protocolMarshaller.marshall(resolver.getKind(), KIND_BINDING);
protocolMarshaller.marshall(resolver.getPipelineConfig(), PIPELINECONFIG_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} }
|
public class class_name {
public void marshall(Resolver resolver, ProtocolMarshaller protocolMarshaller) {
if (resolver == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(resolver.getTypeName(), TYPENAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(resolver.getFieldName(), FIELDNAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(resolver.getDataSourceName(), DATASOURCENAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(resolver.getResolverArn(), RESOLVERARN_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(resolver.getRequestMappingTemplate(), REQUESTMAPPINGTEMPLATE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(resolver.getResponseMappingTemplate(), RESPONSEMAPPINGTEMPLATE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(resolver.getKind(), KIND_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(resolver.getPipelineConfig(), PIPELINECONFIG_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public List<JAXBElement<? extends AbstractSurfaceType>> get_Surface() {
if (_Surface == null) {
_Surface = new ArrayList<JAXBElement<? extends AbstractSurfaceType>>();
}
return this._Surface;
} }
|
public class class_name {
public List<JAXBElement<? extends AbstractSurfaceType>> get_Surface() {
if (_Surface == null) {
_Surface = new ArrayList<JAXBElement<? extends AbstractSurfaceType>>(); // depends on control dependency: [if], data = [none]
}
return this._Surface;
} }
|
public class class_name {
public T marshal(S object) {
try {
final T result = (T) marshalHandle.invoke(object);
return result;
} catch (Throwable ex) {
if (ex.getCause() instanceof RuntimeException) {
throw (RuntimeException) ex.getCause();
}
throw new BindingException(ex.getMessage(), ex.getCause());
}
} }
|
public class class_name {
public T marshal(S object) {
try {
final T result = (T) marshalHandle.invoke(object);
return result;
// depends on control dependency: [try], data = [none]
} catch (Throwable ex) {
if (ex.getCause() instanceof RuntimeException) {
throw (RuntimeException) ex.getCause();
}
throw new BindingException(ex.getMessage(), ex.getCause());
}
// depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
@Override
public void leavingState(ParsingContext ctx) throws CommandFormatException {
final String stateId = ctx.getState().getId();
//System.out.println("left " + stateId + " '" + ctx.getCharacter() + "'");
if(ArgumentValueState.ID.equals(stateId) ||
CompositeState.OBJECT.equals(stateId) || CompositeState.LIST.equals(stateId)) {
if (!currentState.isOnSeparator()) {
if (stack != null && stack.peek() != null) {
stack.peek().addChild(currentState);
currentState = stack.pop();
if (!currentState.isComposite()) {
if (stack.peek() != null) {
stack.peek().addChild(currentState);
currentState = stack.pop();
}
}
}
}
} else if(QuotesState.ID.equals(stateId)) {
flag ^= QUOTES;
currentState.quoted();
} else if(EscapeCharacterState.ID.equals(stateId)) {
flag ^= ESCAPE;
}
} }
|
public class class_name {
@Override
public void leavingState(ParsingContext ctx) throws CommandFormatException {
final String stateId = ctx.getState().getId();
//System.out.println("left " + stateId + " '" + ctx.getCharacter() + "'");
if(ArgumentValueState.ID.equals(stateId) ||
CompositeState.OBJECT.equals(stateId) || CompositeState.LIST.equals(stateId)) {
if (!currentState.isOnSeparator()) {
if (stack != null && stack.peek() != null) {
stack.peek().addChild(currentState); // depends on control dependency: [if], data = [none]
currentState = stack.pop(); // depends on control dependency: [if], data = [none]
if (!currentState.isComposite()) {
if (stack.peek() != null) {
stack.peek().addChild(currentState); // depends on control dependency: [if], data = [none]
currentState = stack.pop(); // depends on control dependency: [if], data = [none]
}
}
}
}
} else if(QuotesState.ID.equals(stateId)) {
flag ^= QUOTES;
currentState.quoted();
} else if(EscapeCharacterState.ID.equals(stateId)) {
flag ^= ESCAPE;
}
} }
|
public class class_name {
@Override
public boolean offer(T t) {
if (t == null) {
throw new IllegalArgumentException();
}
boolean ret = false;
synchronized (lock) {
if (t instanceof QueueItem && ((QueueItem) t).isExpedited()) {
if (numberOfUsedExpeditedSlots.get() < expeditedBuffer.length) {
expeditedInsert(t);
numberOfUsedExpeditedSlots.getAndIncrement();
ret = true;
}
} else {
if (numberOfUsedSlots.get() < buffer.length) {
insert(t);
numberOfUsedSlots.getAndIncrement();
ret = true;
}
}
}
if (ret) {
notifyGet_();
return true;
}
return false;
} }
|
public class class_name {
@Override
public boolean offer(T t) {
if (t == null) {
throw new IllegalArgumentException();
}
boolean ret = false;
synchronized (lock) {
if (t instanceof QueueItem && ((QueueItem) t).isExpedited()) {
if (numberOfUsedExpeditedSlots.get() < expeditedBuffer.length) {
expeditedInsert(t); // depends on control dependency: [if], data = [none]
numberOfUsedExpeditedSlots.getAndIncrement(); // depends on control dependency: [if], data = [none]
ret = true; // depends on control dependency: [if], data = [none]
}
} else {
if (numberOfUsedSlots.get() < buffer.length) {
insert(t); // depends on control dependency: [if], data = [none]
numberOfUsedSlots.getAndIncrement(); // depends on control dependency: [if], data = [none]
ret = true; // depends on control dependency: [if], data = [none]
}
}
}
if (ret) {
notifyGet_(); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
return false;
} }
|
public class class_name {
protected void addMessage(String message)
{
messages.add(message);
messageCount++;
if (messageCount > messagesQueueMax)
{
messages.remove();
messageCount--;
}
} }
|
public class class_name {
protected void addMessage(String message)
{
messages.add(message);
messageCount++;
if (messageCount > messagesQueueMax)
{
messages.remove();
// depends on control dependency: [if], data = [none]
messageCount--;
// depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@POST
@Path("close")
public void close(String id) {
Statement statement = StatementHolder.get().get(id);
try {
statement.close();
StatementHolder.get().remove(id);
} catch (SQLException e) {
// TODO
throw new RuntimeException(e);
}
} }
|
public class class_name {
@POST
@Path("close")
public void close(String id) {
Statement statement = StatementHolder.get().get(id);
try {
statement.close(); // depends on control dependency: [try], data = [none]
StatementHolder.get().remove(id); // depends on control dependency: [try], data = [none]
} catch (SQLException e) {
// TODO
throw new RuntimeException(e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
protected void initialize(Class<T> mappedClass) {
this.mappedClass = mappedClass;
this.mappedFields = new HashMap<String, Field>();
this.mappedProperties = new HashSet<String>();
Field fields[] = mappedClass.getFields();
for (Field field : fields) {
int mod = field.getModifiers();
if (Modifier.isPublic(mod) && !Modifier.isStatic(mod)) {
String fieldNameV1 = field.getName().toLowerCase();
String fieldNameV2 = underscoreName(field.getName());
String fieldNameV3 = underscoreNameR(field.getName());
this.mappedFields.put(fieldNameV1, field);
if (!fieldNameV2.equals(fieldNameV1)) {
this.mappedFields.put(fieldNameV2, field);
}
if (!fieldNameV3.equals(fieldNameV1) && !fieldNameV3.equals(fieldNameV2)) {
this.mappedFields.put(fieldNameV3, field);
}
this.mappedProperties.add(fieldNameV2);
/*
this.mappedFields.put(field.getName().toLowerCase(), field);
String underscoredName = underscoreName(field.getName());
if (!field.getName().toLowerCase().equals(underscoredName)) {
this.mappedFields.put(underscoredName, field);
}
this.mappedProperties.add(underscoredName);
*/
}
}
} }
|
public class class_name {
protected void initialize(Class<T> mappedClass) {
this.mappedClass = mappedClass;
this.mappedFields = new HashMap<String, Field>();
this.mappedProperties = new HashSet<String>();
Field fields[] = mappedClass.getFields();
for (Field field : fields) {
int mod = field.getModifiers();
if (Modifier.isPublic(mod) && !Modifier.isStatic(mod)) {
String fieldNameV1 = field.getName().toLowerCase();
String fieldNameV2 = underscoreName(field.getName());
String fieldNameV3 = underscoreNameR(field.getName());
this.mappedFields.put(fieldNameV1, field); // depends on control dependency: [if], data = [none]
if (!fieldNameV2.equals(fieldNameV1)) {
this.mappedFields.put(fieldNameV2, field); // depends on control dependency: [if], data = [none]
}
if (!fieldNameV3.equals(fieldNameV1) && !fieldNameV3.equals(fieldNameV2)) {
this.mappedFields.put(fieldNameV3, field); // depends on control dependency: [if], data = [none]
}
this.mappedProperties.add(fieldNameV2); // depends on control dependency: [if], data = [none]
/*
this.mappedFields.put(field.getName().toLowerCase(), field);
String underscoredName = underscoreName(field.getName());
if (!field.getName().toLowerCase().equals(underscoredName)) {
this.mappedFields.put(underscoredName, field);
}
this.mappedProperties.add(underscoredName);
*/
}
}
} }
|
public class class_name {
private String nextQuotedValue(ByteString runTerminator) throws IOException {
StringBuilder builder = null;
while (true) {
long index = source.indexOfElement(runTerminator);
if (index == -1L) throw syntaxError("Unterminated string");
// If we've got an escape character, we're going to need a string builder.
if (buffer.getByte(index) == '\\') {
if (builder == null) builder = new StringBuilder();
builder.append(buffer.readUtf8(index));
buffer.readByte(); // '\'
builder.append(readEscapeCharacter());
continue;
}
// If it isn't the escape character, it's the quote. Return the string.
if (builder == null) {
String result = buffer.readUtf8(index);
buffer.readByte(); // Consume the quote character.
return result;
} else {
builder.append(buffer.readUtf8(index));
buffer.readByte(); // Consume the quote character.
return builder.toString();
}
}
} }
|
public class class_name {
private String nextQuotedValue(ByteString runTerminator) throws IOException {
StringBuilder builder = null;
while (true) {
long index = source.indexOfElement(runTerminator);
if (index == -1L) throw syntaxError("Unterminated string");
// If we've got an escape character, we're going to need a string builder.
if (buffer.getByte(index) == '\\') {
if (builder == null) builder = new StringBuilder();
builder.append(buffer.readUtf8(index)); // depends on control dependency: [if], data = [none]
buffer.readByte(); // '\' // depends on control dependency: [if], data = [none]
builder.append(readEscapeCharacter()); // depends on control dependency: [if], data = [none]
continue;
}
// If it isn't the escape character, it's the quote. Return the string.
if (builder == null) {
String result = buffer.readUtf8(index);
buffer.readByte(); // Consume the quote character. // depends on control dependency: [if], data = [none]
return result; // depends on control dependency: [if], data = [none]
} else {
builder.append(buffer.readUtf8(index)); // depends on control dependency: [if], data = [none]
buffer.readByte(); // Consume the quote character. // depends on control dependency: [if], data = [none]
return builder.toString(); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
private void mergeQueryParams(Hashtable tmpQueryParams)
{
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) { //306998.15
logger.logp(Level.FINE, CLASS_NAME,"mergeQueryParams", "");
}
if (WCCustomProperties.CHECK_REQUEST_OBJECT_IN_USE){
checkRequestObjectInUse();
}
SRTServletRequestThreadData reqData = SRTServletRequestThreadData.getInstance();
if (tmpQueryParams != null)
{
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) //306998.15
{
logger.logp(Level.FINE, CLASS_NAME,"mergeQueryParams", "tmpQueryParams.size() " + tmpQueryParams.size());
logger.logp(Level.FINE, CLASS_NAME,"mergeQueryParams", "tmpQueryParams " + tmpQueryParams);
}
Enumeration enumeration = tmpQueryParams.keys();
while (enumeration.hasMoreElements())
{
Object key = enumeration.nextElement();
// Check for QueryString parms with the same name
// pre-append to postdata values if necessary
if (reqData.getParameters() != null && reqData.getParameters().containsKey(key))
{
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) //306998.15
{
logger.logp(Level.FINE, CLASS_NAME,"mergeQueryParams", "_paramaters contains key " + key);
}
String postVals[] = (String[]) reqData.getParameters().get(key);
String queryVals[] = (String[]) tmpQueryParams.get(key);
String newVals[] = new String[postVals.length + queryVals.length];
int newValsIndex = 0;
for (int i = 0; i < queryVals.length; i++)
{
newVals[newValsIndex++] = queryVals[i];
}
for (int i = 0; i < postVals.length; i++)
{
newVals[newValsIndex++] = postVals[i];
}
reqData.getParameters().put(key, newVals);
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) //306998.15
{
logger.logp(Level.FINE, CLASS_NAME,"mergeQueryParams", "put key " + key + " into _parameters.");
}
}
else
{
if (reqData.getParameters() == null) // PK14900
reqData.setParameters(new Hashtable());// PK14900
reqData.getParameters().put(key, tmpQueryParams.get(key));
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) //306998.15
{
logger.logp(Level.FINE, CLASS_NAME,"mergeQueryParams", "put key " + key + " into _parameters. ");
}
}
}
}
} }
|
public class class_name {
private void mergeQueryParams(Hashtable tmpQueryParams)
{
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) { //306998.15
logger.logp(Level.FINE, CLASS_NAME,"mergeQueryParams", ""); // depends on control dependency: [if], data = [none]
}
if (WCCustomProperties.CHECK_REQUEST_OBJECT_IN_USE){
checkRequestObjectInUse(); // depends on control dependency: [if], data = [none]
}
SRTServletRequestThreadData reqData = SRTServletRequestThreadData.getInstance();
if (tmpQueryParams != null)
{
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) //306998.15
{
logger.logp(Level.FINE, CLASS_NAME,"mergeQueryParams", "tmpQueryParams.size() " + tmpQueryParams.size()); // depends on control dependency: [if], data = [none]
logger.logp(Level.FINE, CLASS_NAME,"mergeQueryParams", "tmpQueryParams " + tmpQueryParams); // depends on control dependency: [if], data = [none]
}
Enumeration enumeration = tmpQueryParams.keys();
while (enumeration.hasMoreElements())
{
Object key = enumeration.nextElement();
// Check for QueryString parms with the same name
// pre-append to postdata values if necessary
if (reqData.getParameters() != null && reqData.getParameters().containsKey(key))
{
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) //306998.15
{
logger.logp(Level.FINE, CLASS_NAME,"mergeQueryParams", "_paramaters contains key " + key); // depends on control dependency: [if], data = [none]
}
String postVals[] = (String[]) reqData.getParameters().get(key);
String queryVals[] = (String[]) tmpQueryParams.get(key);
String newVals[] = new String[postVals.length + queryVals.length];
int newValsIndex = 0;
for (int i = 0; i < queryVals.length; i++)
{
newVals[newValsIndex++] = queryVals[i]; // depends on control dependency: [for], data = [i]
}
for (int i = 0; i < postVals.length; i++)
{
newVals[newValsIndex++] = postVals[i]; // depends on control dependency: [for], data = [i]
}
reqData.getParameters().put(key, newVals); // depends on control dependency: [if], data = [none]
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) //306998.15
{
logger.logp(Level.FINE, CLASS_NAME,"mergeQueryParams", "put key " + key + " into _parameters."); // depends on control dependency: [if], data = [none]
}
}
else
{
if (reqData.getParameters() == null) // PK14900
reqData.setParameters(new Hashtable());// PK14900
reqData.getParameters().put(key, tmpQueryParams.get(key)); // depends on control dependency: [if], data = [none]
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) //306998.15
{
logger.logp(Level.FINE, CLASS_NAME,"mergeQueryParams", "put key " + key + " into _parameters. "); // depends on control dependency: [if], data = [none]
}
}
}
}
} }
|
public class class_name {
@SuppressWarnings("deprecation")
static OpenSslApplicationProtocolNegotiator toNegotiator(ApplicationProtocolConfig config) {
if (config == null) {
return NONE_PROTOCOL_NEGOTIATOR;
}
switch (config.protocol()) {
case NONE:
return NONE_PROTOCOL_NEGOTIATOR;
case ALPN:
case NPN:
case NPN_AND_ALPN:
switch (config.selectedListenerFailureBehavior()) {
case CHOOSE_MY_LAST_PROTOCOL:
case ACCEPT:
switch (config.selectorFailureBehavior()) {
case CHOOSE_MY_LAST_PROTOCOL:
case NO_ADVERTISE:
return new OpenSslDefaultApplicationProtocolNegotiator(
config);
default:
throw new UnsupportedOperationException(
new StringBuilder("OpenSSL provider does not support ")
.append(config.selectorFailureBehavior())
.append(" behavior").toString());
}
default:
throw new UnsupportedOperationException(
new StringBuilder("OpenSSL provider does not support ")
.append(config.selectedListenerFailureBehavior())
.append(" behavior").toString());
}
default:
throw new Error();
}
} }
|
public class class_name {
@SuppressWarnings("deprecation")
static OpenSslApplicationProtocolNegotiator toNegotiator(ApplicationProtocolConfig config) {
if (config == null) {
return NONE_PROTOCOL_NEGOTIATOR; // depends on control dependency: [if], data = [none]
}
switch (config.protocol()) {
case NONE:
return NONE_PROTOCOL_NEGOTIATOR;
case ALPN:
case NPN:
case NPN_AND_ALPN:
switch (config.selectedListenerFailureBehavior()) {
case CHOOSE_MY_LAST_PROTOCOL:
case ACCEPT:
switch (config.selectorFailureBehavior()) {
case CHOOSE_MY_LAST_PROTOCOL:
case NO_ADVERTISE:
return new OpenSslDefaultApplicationProtocolNegotiator(
config);
default:
throw new UnsupportedOperationException(
new StringBuilder("OpenSSL provider does not support ")
.append(config.selectorFailureBehavior())
.append(" behavior").toString());
}
default:
throw new UnsupportedOperationException(
new StringBuilder("OpenSSL provider does not support ")
.append(config.selectedListenerFailureBehavior())
.append(" behavior").toString());
}
default:
throw new Error();
}
} }
|
public class class_name {
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
if (CollectionUtils.isNotEmpty(annotations)) {
Set<? extends Element> routeElements = roundEnv.getElementsAnnotatedWith(Route.class);
try {
logger.info(">>> Found routes, start... <<<");
this.parseRoutes(routeElements);
} catch (Exception e) {
logger.error(e);
}
return true;
}
return false;
} }
|
public class class_name {
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
if (CollectionUtils.isNotEmpty(annotations)) {
Set<? extends Element> routeElements = roundEnv.getElementsAnnotatedWith(Route.class); // depends on control dependency: [if], data = [none]
try {
logger.info(">>> Found routes, start... <<<"); // depends on control dependency: [try], data = [none]
this.parseRoutes(routeElements); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
logger.error(e);
} // depends on control dependency: [catch], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
return false;
} }
|
public class class_name {
public void removeConnectionPool(String url) {
try {
removeConnectionPool(new URL(url));
} catch (MalformedURLException e) {
log.error("url exception", e);
throw new IllegalArgumentException(e);
}
} }
|
public class class_name {
public void removeConnectionPool(String url) {
try {
removeConnectionPool(new URL(url)); // depends on control dependency: [try], data = [none]
} catch (MalformedURLException e) {
log.error("url exception", e);
throw new IllegalArgumentException(e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
private void initSize(TypedArray attrs) {
int index = R.styleable.ActionButton_size;
if (attrs.hasValue(index)) {
this.size = attrs.getDimension(index, size);
} else {
this.size = dpToPx(type.getSize());
}
LOGGER.trace("Initialized Action Button size: {}", getSize());
} }
|
public class class_name {
private void initSize(TypedArray attrs) {
int index = R.styleable.ActionButton_size;
if (attrs.hasValue(index)) {
this.size = attrs.getDimension(index, size); // depends on control dependency: [if], data = [none]
} else {
this.size = dpToPx(type.getSize()); // depends on control dependency: [if], data = [none]
}
LOGGER.trace("Initialized Action Button size: {}", getSize());
} }
|
public class class_name {
public void notifyStatusChanged(String status) {
/* If the new status message is shorter than the previous one, then
* first change the status message all spaces to clear out the previous
* message.
*/
if (status.length() < this.status.length()) {
this.status = this.status.replaceAll(".", " ");
this.printProgressBar();
}
this.status = status;
this.printProgressBar();
} }
|
public class class_name {
public void notifyStatusChanged(String status) {
/* If the new status message is shorter than the previous one, then
* first change the status message all spaces to clear out the previous
* message.
*/
if (status.length() < this.status.length()) {
this.status = this.status.replaceAll(".", " "); // depends on control dependency: [if], data = [none]
this.printProgressBar(); // depends on control dependency: [if], data = [none]
}
this.status = status;
this.printProgressBar();
} }
|
public class class_name {
public static Representation create(final File file, final boolean autoDecompress)
throws IllegalArgumentException {
try {
String name = file.getName();
final Representation representation;
if (autoDecompress) {
byte[] bytes = ByteStreams.toByteArray(IO.read(file.getAbsolutePath()));
representation = new Representation(new ByteArrayInputStream(bytes));
if (name.endsWith(".gz") || name.endsWith(".xz") || name.endsWith(".7z")) {
name = name.substring(0, name.length() - 3);
} else if (name.endsWith(".bz2") || name.endsWith(".lz4")) {
name = name.substring(0, name.length() - 4);
}
} else {
representation = new Representation(IO.buffer(new FileInputStream(file)));
}
representation.metadata.set(NFO.FILE_SIZE, file.length());
representation.metadata.set(NFO.FILE_NAME, name);
representation.metadata.set(NFO.FILE_LAST_MODIFIED, new Date(file.lastModified()));
representation.metadata.set(NIE.MIME_TYPE, Data.extensionToMimeType(name));
return representation;
} catch (final FileNotFoundException ex) {
throw new IllegalArgumentException("Not a file: " + file.getAbsolutePath());
} catch (final IOException e) {
throw new IllegalArgumentException("IOException on file: " + file.getAbsolutePath());
}
} }
|
public class class_name {
public static Representation create(final File file, final boolean autoDecompress)
throws IllegalArgumentException {
try {
String name = file.getName();
final Representation representation;
if (autoDecompress) {
byte[] bytes = ByteStreams.toByteArray(IO.read(file.getAbsolutePath()));
representation = new Representation(new ByteArrayInputStream(bytes));
if (name.endsWith(".gz") || name.endsWith(".xz") || name.endsWith(".7z")) {
name = name.substring(0, name.length() - 3); // depends on control dependency: [if], data = [none]
} else if (name.endsWith(".bz2") || name.endsWith(".lz4")) {
name = name.substring(0, name.length() - 4);
}
} else {
representation = new Representation(IO.buffer(new FileInputStream(file)));
}
representation.metadata.set(NFO.FILE_SIZE, file.length());
representation.metadata.set(NFO.FILE_NAME, name);
representation.metadata.set(NFO.FILE_LAST_MODIFIED, new Date(file.lastModified()));
representation.metadata.set(NIE.MIME_TYPE, Data.extensionToMimeType(name));
return representation;
} catch (final FileNotFoundException ex) {
throw new IllegalArgumentException("Not a file: " + file.getAbsolutePath());
} catch (final IOException e) {
throw new IllegalArgumentException("IOException on file: " + file.getAbsolutePath());
}
} }
|
public class class_name {
public int getOffset() {
if (dir_ < 0 && offsets_ != null && !offsets_.isEmpty()) {
// CollationIterator.previousCE() decrements the CEs length
// while it pops CEs from its internal buffer.
int i = iter_.getCEsLength();
if (otherHalf_ != 0) {
// Return the trailing CE offset while we are in the middle of a 64-bit CE.
++i;
}
assert (i < offsets_.size());
return offsets_.elementAti(i);
}
return iter_.getOffset();
} }
|
public class class_name {
public int getOffset() {
if (dir_ < 0 && offsets_ != null && !offsets_.isEmpty()) {
// CollationIterator.previousCE() decrements the CEs length
// while it pops CEs from its internal buffer.
int i = iter_.getCEsLength();
if (otherHalf_ != 0) {
// Return the trailing CE offset while we are in the middle of a 64-bit CE.
++i; // depends on control dependency: [if], data = [none]
}
assert (i < offsets_.size()); // depends on control dependency: [if], data = [none]
return offsets_.elementAti(i); // depends on control dependency: [if], data = [none]
}
return iter_.getOffset();
} }
|
public class class_name {
@Override
public Processor newProcessor(Processor sourceProcessor) {
PredictionCombinerProcessor newProcessor = new PredictionCombinerProcessor();
PredictionCombinerProcessor originProcessor = (PredictionCombinerProcessor) sourceProcessor;
if (originProcessor.getOutputStream() != null) {
newProcessor.setOutputStream(originProcessor.getOutputStream());
}
newProcessor.setSizeEnsemble(originProcessor.getSizeEnsemble());
return newProcessor;
} }
|
public class class_name {
@Override
public Processor newProcessor(Processor sourceProcessor) {
PredictionCombinerProcessor newProcessor = new PredictionCombinerProcessor();
PredictionCombinerProcessor originProcessor = (PredictionCombinerProcessor) sourceProcessor;
if (originProcessor.getOutputStream() != null) {
newProcessor.setOutputStream(originProcessor.getOutputStream()); // depends on control dependency: [if], data = [(originProcessor.getOutputStream()]
}
newProcessor.setSizeEnsemble(originProcessor.getSizeEnsemble());
return newProcessor;
} }
|
public class class_name {
public final QueryMeta queryStmnt() throws RecognitionException {
QueryMeta qMeta = null;
Token id=null;
Token dir=null;
Token l=null;
Token s1=null;
Token s2=null;
Token s=null;
QueryMeta fromQuery =null;
Pair<Granularity, List<Pair<Integer, Integer>>> gran =null;
Having h =null;
PostAggItem p =null;
qMeta = GroupByQueryMeta.promote(new QueryMeta());
((BaseAggQueryMeta)qMeta).aggregations = new ArrayList<>();
qMeta.intervals = new ArrayList<>();
try {
// druidG.g:150:2: ( SELECT ( ( WS selectItems[qMeta] ( ( WS )? ',' ( WS )? selectItems[qMeta] )* ) | ( WS '*' ) )? WS FROM ( ( WS id= ID ) | ( WS LPARAN (fromQuery= queryStmnt ) RPARAN ) ) ( WS WHERE WS whereClause[qMeta] ( ( WS BREAK WS BY WS gran= granularityClause )? ( WS GROUP WS BY WS (id= ID ( ( WS )? ',' ( WS )? id= ID )* ) ( WS HAVING WS h= havingClause )? )? ( WS ORDER WS BY WS (id= ID ) ( WS dir= ( ASC | DESC ) )? )? ( WS LIMIT WS (l= LONG ) )? ( WS THEN WS p= postAggItem )? ) ( WS WHICH WS CONTAINS ( WS )? LPARAN ( WS )? (s1= SINGLE_QUOTE_STRING ( ( WS )? ',' ( WS )? s2= SINGLE_QUOTE_STRING )* ) ( WS )? RPARAN WS SORT ( WS )? LPARAN ( WS )? (s= SINGLE_QUOTE_STRING ) ( WS )? RPARAN )? ( WS HINT ( WS )? LPARAN ( WS )? s= SINGLE_QUOTE_STRING ( WS )? RPARAN )? )? )
// druidG.g:150:4: SELECT ( ( WS selectItems[qMeta] ( ( WS )? ',' ( WS )? selectItems[qMeta] )* ) | ( WS '*' ) )? WS FROM ( ( WS id= ID ) | ( WS LPARAN (fromQuery= queryStmnt ) RPARAN ) ) ( WS WHERE WS whereClause[qMeta] ( ( WS BREAK WS BY WS gran= granularityClause )? ( WS GROUP WS BY WS (id= ID ( ( WS )? ',' ( WS )? id= ID )* ) ( WS HAVING WS h= havingClause )? )? ( WS ORDER WS BY WS (id= ID ) ( WS dir= ( ASC | DESC ) )? )? ( WS LIMIT WS (l= LONG ) )? ( WS THEN WS p= postAggItem )? ) ( WS WHICH WS CONTAINS ( WS )? LPARAN ( WS )? (s1= SINGLE_QUOTE_STRING ( ( WS )? ',' ( WS )? s2= SINGLE_QUOTE_STRING )* ) ( WS )? RPARAN WS SORT ( WS )? LPARAN ( WS )? (s= SINGLE_QUOTE_STRING ) ( WS )? RPARAN )? ( WS HINT ( WS )? LPARAN ( WS )? s= SINGLE_QUOTE_STRING ( WS )? RPARAN )? )?
{
match(input,SELECT,FOLLOW_SELECT_in_queryStmnt1180);
// druidG.g:151:7: ( ( WS selectItems[qMeta] ( ( WS )? ',' ( WS )? selectItems[qMeta] )* ) | ( WS '*' ) )?
int alt101=3;
int LA101_0 = input.LA(1);
if ( (LA101_0==WS) ) {
int LA101_1 = input.LA(2);
if ( (LA101_1==90) ) {
alt101=2;
}
else if ( (LA101_1==COUNT||LA101_1==DOUBLE_SUM||(LA101_1 >= HYPER_UNIQUE && LA101_1 <= ID)||LA101_1==JAVASCRIPT||LA101_1==LONG_SUM||LA101_1==MAX||LA101_1==MIN||LA101_1==UNIQUE) ) {
alt101=1;
}
}
switch (alt101) {
case 1 :
// druidG.g:152:8: ( WS selectItems[qMeta] ( ( WS )? ',' ( WS )? selectItems[qMeta] )* )
{
// druidG.g:152:8: ( WS selectItems[qMeta] ( ( WS )? ',' ( WS )? selectItems[qMeta] )* )
// druidG.g:153:9: WS selectItems[qMeta] ( ( WS )? ',' ( WS )? selectItems[qMeta] )*
{
match(input,WS,FOLLOW_WS_in_queryStmnt1208);
pushFollow(FOLLOW_selectItems_in_queryStmnt1210);
selectItems(qMeta);
state._fsp--;
// druidG.g:153:31: ( ( WS )? ',' ( WS )? selectItems[qMeta] )*
loop100:
while (true) {
int alt100=2;
int LA100_0 = input.LA(1);
if ( (LA100_0==WS) ) {
int LA100_1 = input.LA(2);
if ( (LA100_1==91) ) {
alt100=1;
}
}
else if ( (LA100_0==91) ) {
alt100=1;
}
switch (alt100) {
case 1 :
// druidG.g:153:32: ( WS )? ',' ( WS )? selectItems[qMeta]
{
// druidG.g:153:32: ( WS )?
int alt98=2;
int LA98_0 = input.LA(1);
if ( (LA98_0==WS) ) {
alt98=1;
}
switch (alt98) {
case 1 :
// druidG.g:153:32: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1214);
}
break;
}
match(input,91,FOLLOW_91_in_queryStmnt1217);
// druidG.g:153:40: ( WS )?
int alt99=2;
int LA99_0 = input.LA(1);
if ( (LA99_0==WS) ) {
alt99=1;
}
switch (alt99) {
case 1 :
// druidG.g:153:40: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1219);
}
break;
}
pushFollow(FOLLOW_selectItems_in_queryStmnt1222);
selectItems(qMeta);
state._fsp--;
}
break;
default :
break loop100;
}
}
}
}
break;
case 2 :
// druidG.g:156:8: ( WS '*' )
{
// druidG.g:156:8: ( WS '*' )
// druidG.g:156:9: WS '*'
{
match(input,WS,FOLLOW_WS_in_queryStmnt1253);
match(input,90,FOLLOW_90_in_queryStmnt1255);
}
}
break;
}
match(input,WS,FOLLOW_WS_in_queryStmnt1270);
match(input,FROM,FOLLOW_FROM_in_queryStmnt1272);
// druidG.g:159:4: ( ( WS id= ID ) | ( WS LPARAN (fromQuery= queryStmnt ) RPARAN ) )
int alt102=2;
int LA102_0 = input.LA(1);
if ( (LA102_0==WS) ) {
int LA102_1 = input.LA(2);
if ( (LA102_1==ID) ) {
alt102=1;
}
else if ( (LA102_1==LPARAN) ) {
alt102=2;
}
else {
int nvaeMark = input.mark();
try {
input.consume();
NoViableAltException nvae =
new NoViableAltException("", 102, 1, input);
throw nvae;
} finally {
input.rewind(nvaeMark);
}
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 102, 0, input);
throw nvae;
}
switch (alt102) {
case 1 :
// druidG.g:160:13: ( WS id= ID )
{
// druidG.g:160:13: ( WS id= ID )
// druidG.g:160:14: WS id= ID
{
match(input,WS,FOLLOW_WS_in_queryStmnt1293);
id=(Token)match(input,ID,FOLLOW_ID_in_queryStmnt1297);
qMeta.dataSource = (id!=null?id.getText():null);
}
}
break;
case 2 :
// druidG.g:162:11: ( WS LPARAN (fromQuery= queryStmnt ) RPARAN )
{
// druidG.g:162:11: ( WS LPARAN (fromQuery= queryStmnt ) RPARAN )
// druidG.g:162:12: WS LPARAN (fromQuery= queryStmnt ) RPARAN
{
match(input,WS,FOLLOW_WS_in_queryStmnt1334);
match(input,LPARAN,FOLLOW_LPARAN_in_queryStmnt1336);
// druidG.g:162:22: (fromQuery= queryStmnt )
// druidG.g:162:23: fromQuery= queryStmnt
{
pushFollow(FOLLOW_queryStmnt_in_queryStmnt1341);
fromQuery=queryStmnt();
state._fsp--;
}
match(input,RPARAN,FOLLOW_RPARAN_in_queryStmnt1344);
qMeta.queryDataSource = fromQuery;
}
}
break;
}
if (((BaseAggQueryMeta)qMeta).aggregations.isEmpty()) {
qMeta = SelectQueryMeta.promote(qMeta);
}
// druidG.g:168:2: ( WS WHERE WS whereClause[qMeta] ( ( WS BREAK WS BY WS gran= granularityClause )? ( WS GROUP WS BY WS (id= ID ( ( WS )? ',' ( WS )? id= ID )* ) ( WS HAVING WS h= havingClause )? )? ( WS ORDER WS BY WS (id= ID ) ( WS dir= ( ASC | DESC ) )? )? ( WS LIMIT WS (l= LONG ) )? ( WS THEN WS p= postAggItem )? ) ( WS WHICH WS CONTAINS ( WS )? LPARAN ( WS )? (s1= SINGLE_QUOTE_STRING ( ( WS )? ',' ( WS )? s2= SINGLE_QUOTE_STRING )* ) ( WS )? RPARAN WS SORT ( WS )? LPARAN ( WS )? (s= SINGLE_QUOTE_STRING ) ( WS )? RPARAN )? ( WS HINT ( WS )? LPARAN ( WS )? s= SINGLE_QUOTE_STRING ( WS )? RPARAN )? )?
int alt127=2;
int LA127_0 = input.LA(1);
if ( (LA127_0==WS) ) {
int LA127_1 = input.LA(2);
if ( (LA127_1==WHERE) ) {
alt127=1;
}
}
switch (alt127) {
case 1 :
// druidG.g:169:4: WS WHERE WS whereClause[qMeta] ( ( WS BREAK WS BY WS gran= granularityClause )? ( WS GROUP WS BY WS (id= ID ( ( WS )? ',' ( WS )? id= ID )* ) ( WS HAVING WS h= havingClause )? )? ( WS ORDER WS BY WS (id= ID ) ( WS dir= ( ASC | DESC ) )? )? ( WS LIMIT WS (l= LONG ) )? ( WS THEN WS p= postAggItem )? ) ( WS WHICH WS CONTAINS ( WS )? LPARAN ( WS )? (s1= SINGLE_QUOTE_STRING ( ( WS )? ',' ( WS )? s2= SINGLE_QUOTE_STRING )* ) ( WS )? RPARAN WS SORT ( WS )? LPARAN ( WS )? (s= SINGLE_QUOTE_STRING ) ( WS )? RPARAN )? ( WS HINT ( WS )? LPARAN ( WS )? s= SINGLE_QUOTE_STRING ( WS )? RPARAN )?
{
match(input,WS,FOLLOW_WS_in_queryStmnt1363);
match(input,WHERE,FOLLOW_WHERE_in_queryStmnt1365);
match(input,WS,FOLLOW_WS_in_queryStmnt1367);
pushFollow(FOLLOW_whereClause_in_queryStmnt1369);
whereClause(qMeta);
state._fsp--;
// druidG.g:170:4: ( ( WS BREAK WS BY WS gran= granularityClause )? ( WS GROUP WS BY WS (id= ID ( ( WS )? ',' ( WS )? id= ID )* ) ( WS HAVING WS h= havingClause )? )? ( WS ORDER WS BY WS (id= ID ) ( WS dir= ( ASC | DESC ) )? )? ( WS LIMIT WS (l= LONG ) )? ( WS THEN WS p= postAggItem )? )
// druidG.g:171:5: ( WS BREAK WS BY WS gran= granularityClause )? ( WS GROUP WS BY WS (id= ID ( ( WS )? ',' ( WS )? id= ID )* ) ( WS HAVING WS h= havingClause )? )? ( WS ORDER WS BY WS (id= ID ) ( WS dir= ( ASC | DESC ) )? )? ( WS LIMIT WS (l= LONG ) )? ( WS THEN WS p= postAggItem )?
{
// druidG.g:171:5: ( WS BREAK WS BY WS gran= granularityClause )?
int alt103=2;
int LA103_0 = input.LA(1);
if ( (LA103_0==WS) ) {
int LA103_1 = input.LA(2);
if ( (LA103_1==BREAK) ) {
alt103=1;
}
}
switch (alt103) {
case 1 :
// druidG.g:171:6: WS BREAK WS BY WS gran= granularityClause
{
match(input,WS,FOLLOW_WS_in_queryStmnt1387);
match(input,BREAK,FOLLOW_BREAK_in_queryStmnt1389);
match(input,WS,FOLLOW_WS_in_queryStmnt1391);
match(input,BY,FOLLOW_BY_in_queryStmnt1393);
match(input,WS,FOLLOW_WS_in_queryStmnt1395);
pushFollow(FOLLOW_granularityClause_in_queryStmnt1399);
gran=granularityClause();
state._fsp--;
qMeta.granularity = gran.a;
if (gran.b != null) {
qMeta.microIntervals.addAll(gran.b);
}
}
break;
}
// druidG.g:178:5: ( WS GROUP WS BY WS (id= ID ( ( WS )? ',' ( WS )? id= ID )* ) ( WS HAVING WS h= havingClause )? )?
int alt108=2;
int LA108_0 = input.LA(1);
if ( (LA108_0==WS) ) {
int LA108_1 = input.LA(2);
if ( (LA108_1==GROUP) ) {
alt108=1;
}
}
switch (alt108) {
case 1 :
// druidG.g:178:6: WS GROUP WS BY WS (id= ID ( ( WS )? ',' ( WS )? id= ID )* ) ( WS HAVING WS h= havingClause )?
{
match(input,WS,FOLLOW_WS_in_queryStmnt1416);
match(input,GROUP,FOLLOW_GROUP_in_queryStmnt1418);
match(input,WS,FOLLOW_WS_in_queryStmnt1420);
match(input,BY,FOLLOW_BY_in_queryStmnt1422);
match(input,WS,FOLLOW_WS_in_queryStmnt1424);
qMeta = GroupByQueryMeta.promote(qMeta);
if (((GroupByQueryMeta)qMeta).fetchDimensions == null) {
System.err.println("No dimensions !! ");
}
// druidG.g:185:10: (id= ID ( ( WS )? ',' ( WS )? id= ID )* )
// druidG.g:185:11: id= ID ( ( WS )? ',' ( WS )? id= ID )*
{
id=(Token)match(input,ID,FOLLOW_ID_in_queryStmnt1450);
if (!((GroupByQueryMeta)qMeta).checkDimOrAlias((id!=null?id.getText():null))) {
System.err.println("Dimension/Alias " + (id!=null?id.getText():null) + " not valid..");
}
// druidG.g:191:14: ( ( WS )? ',' ( WS )? id= ID )*
loop106:
while (true) {
int alt106=2;
int LA106_0 = input.LA(1);
if ( (LA106_0==WS) ) {
int LA106_1 = input.LA(2);
if ( (LA106_1==91) ) {
alt106=1;
}
}
else if ( (LA106_0==91) ) {
alt106=1;
}
switch (alt106) {
case 1 :
// druidG.g:191:15: ( WS )? ',' ( WS )? id= ID
{
// druidG.g:191:15: ( WS )?
int alt104=2;
int LA104_0 = input.LA(1);
if ( (LA104_0==WS) ) {
alt104=1;
}
switch (alt104) {
case 1 :
// druidG.g:191:15: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1482);
}
break;
}
match(input,91,FOLLOW_91_in_queryStmnt1485);
// druidG.g:191:23: ( WS )?
int alt105=2;
int LA105_0 = input.LA(1);
if ( (LA105_0==WS) ) {
alt105=1;
}
switch (alt105) {
case 1 :
// druidG.g:191:23: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1487);
}
break;
}
id=(Token)match(input,ID,FOLLOW_ID_in_queryStmnt1492);
if (!((GroupByQueryMeta)qMeta).checkDimOrAlias((id!=null?id.getText():null))) {
System.err.println("Dimension/Alias " + (id!=null?id.getText():null) + " not valid..");
}
}
break;
default :
break loop106;
}
}
}
// druidG.g:199:10: ( WS HAVING WS h= havingClause )?
int alt107=2;
int LA107_0 = input.LA(1);
if ( (LA107_0==WS) ) {
int LA107_1 = input.LA(2);
if ( (LA107_1==HAVING) ) {
alt107=1;
}
}
switch (alt107) {
case 1 :
// druidG.g:199:11: WS HAVING WS h= havingClause
{
match(input,WS,FOLLOW_WS_in_queryStmnt1549);
match(input,HAVING,FOLLOW_HAVING_in_queryStmnt1551);
match(input,WS,FOLLOW_WS_in_queryStmnt1553);
pushFollow(FOLLOW_havingClause_in_queryStmnt1557);
h=havingClause();
state._fsp--;
((GroupByQueryMeta)qMeta).having = h;
}
break;
}
}
break;
}
qMeta = QueryUtils.checkAndPromoteToTimeSeries(qMeta);
// druidG.g:203:5: ( WS ORDER WS BY WS (id= ID ) ( WS dir= ( ASC | DESC ) )? )?
int alt110=2;
int LA110_0 = input.LA(1);
if ( (LA110_0==WS) ) {
int LA110_1 = input.LA(2);
if ( (LA110_1==ORDER) ) {
alt110=1;
}
}
switch (alt110) {
case 1 :
// druidG.g:203:6: WS ORDER WS BY WS (id= ID ) ( WS dir= ( ASC | DESC ) )?
{
match(input,WS,FOLLOW_WS_in_queryStmnt1601);
match(input,ORDER,FOLLOW_ORDER_in_queryStmnt1603);
match(input,WS,FOLLOW_WS_in_queryStmnt1605);
match(input,BY,FOLLOW_BY_in_queryStmnt1607);
match(input,WS,FOLLOW_WS_in_queryStmnt1609);
// druidG.g:203:24: (id= ID )
// druidG.g:203:25: id= ID
{
id=(Token)match(input,ID,FOLLOW_ID_in_queryStmnt1614);
}
if (((PlainDimQueryMeta)qMeta).fetchDimensions.size() != 1) {
((GroupByQueryMeta)qMeta).limitSpec = new LimitSpec();
} else {// If fetchDimensions = 1 then TopN is more optimal.
qMeta = TopNQueryMeta.promote(qMeta);
((TopNQueryMeta)qMeta).metric = (id!=null?id.getText():null);
}
// druidG.g:214:9: ( WS dir= ( ASC | DESC ) )?
int alt109=2;
int LA109_0 = input.LA(1);
if ( (LA109_0==WS) ) {
int LA109_1 = input.LA(2);
if ( (LA109_1==ASC||LA109_1==DESC) ) {
alt109=1;
}
}
switch (alt109) {
case 1 :
// druidG.g:214:10: WS dir= ( ASC | DESC )
{
match(input,WS,FOLLOW_WS_in_queryStmnt1647);
dir=input.LT(1);
if ( input.LA(1)==ASC||input.LA(1)==DESC ) {
input.consume();
state.errorRecovery=false;
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
throw mse;
}
if (qMeta instanceof GroupByQueryMeta && ((GroupByQueryMeta)qMeta).limitSpec != null) {
if (dir != null && (dir!=null?dir.getText():null) != null) {
((GroupByQueryMeta)qMeta).limitSpec.addColumn((id!=null?id.getText():null), (dir!=null?dir.getText():null));
} else {
((GroupByQueryMeta)qMeta).limitSpec.addColumn((id!=null?id.getText():null), "ASC");
}
}
}
break;
}
// At this point if the qMeta is not TopN and is still GroupBy then do the following(default is ascending sort).
if (qMeta instanceof GroupByQueryMeta && ((GroupByQueryMeta)qMeta).limitSpec != null) {
if (!((GroupByQueryMeta)qMeta).limitSpec.columns.containsKey((id!=null?id.getText():null))) {
((GroupByQueryMeta)qMeta).limitSpec.addColumn((id!=null?id.getText():null), "ASC");
}
}
}
break;
}
// druidG.g:235:5: ( WS LIMIT WS (l= LONG ) )?
int alt111=2;
int LA111_0 = input.LA(1);
if ( (LA111_0==WS) ) {
int LA111_1 = input.LA(2);
if ( (LA111_1==LIMIT) ) {
alt111=1;
}
}
switch (alt111) {
case 1 :
// druidG.g:236:6: WS LIMIT WS (l= LONG )
{
match(input,WS,FOLLOW_WS_in_queryStmnt1701);
match(input,LIMIT,FOLLOW_LIMIT_in_queryStmnt1703);
match(input,WS,FOLLOW_WS_in_queryStmnt1705);
// druidG.g:236:18: (l= LONG )
// druidG.g:236:19: l= LONG
{
l=(Token)match(input,LONG,FOLLOW_LONG_in_queryStmnt1710);
}
if (qMeta instanceof SelectQueryMeta) {
((SelectQueryMeta)qMeta).pagingSpec.threshold = Integer.valueOf((l!=null?l.getText():null));
} else if (qMeta instanceof TopNQueryMeta) {
((TopNQueryMeta)qMeta).threshold = Integer.valueOf((l!=null?l.getText():null));
} else if (((PlainDimQueryMeta)qMeta).fetchDimensions.size() != 1) {
if (((GroupByQueryMeta)qMeta).limitSpec != null) {
((GroupByQueryMeta)qMeta).limitSpec.limit = Long.valueOf((l!=null?l.getText():null));
}
}
}
break;
}
// druidG.g:250:7: ( WS THEN WS p= postAggItem )?
int alt112=2;
int LA112_0 = input.LA(1);
if ( (LA112_0==WS) ) {
int LA112_1 = input.LA(2);
if ( (LA112_1==THEN) ) {
alt112=1;
}
}
switch (alt112) {
case 1 :
// druidG.g:250:8: WS THEN WS p= postAggItem
{
match(input,WS,FOLLOW_WS_in_queryStmnt1748);
match(input,THEN,FOLLOW_THEN_in_queryStmnt1750);
match(input,WS,FOLLOW_WS_in_queryStmnt1752);
pushFollow(FOLLOW_postAggItem_in_queryStmnt1756);
p=postAggItem();
state._fsp--;
QueryUtils.setPostAggregation(qMeta, p);
}
break;
}
}
// druidG.g:252:4: ( WS WHICH WS CONTAINS ( WS )? LPARAN ( WS )? (s1= SINGLE_QUOTE_STRING ( ( WS )? ',' ( WS )? s2= SINGLE_QUOTE_STRING )* ) ( WS )? RPARAN WS SORT ( WS )? LPARAN ( WS )? (s= SINGLE_QUOTE_STRING ) ( WS )? RPARAN )?
int alt122=2;
int LA122_0 = input.LA(1);
if ( (LA122_0==WS) ) {
int LA122_1 = input.LA(2);
if ( (LA122_1==WHICH) ) {
alt122=1;
}
}
switch (alt122) {
case 1 :
// druidG.g:252:5: WS WHICH WS CONTAINS ( WS )? LPARAN ( WS )? (s1= SINGLE_QUOTE_STRING ( ( WS )? ',' ( WS )? s2= SINGLE_QUOTE_STRING )* ) ( WS )? RPARAN WS SORT ( WS )? LPARAN ( WS )? (s= SINGLE_QUOTE_STRING ) ( WS )? RPARAN
{
match(input,WS,FOLLOW_WS_in_queryStmnt1771);
match(input,WHICH,FOLLOW_WHICH_in_queryStmnt1773);
match(input,WS,FOLLOW_WS_in_queryStmnt1775);
match(input,CONTAINS,FOLLOW_CONTAINS_in_queryStmnt1777);
qMeta = SearchQueryMeta.promote(qMeta);
// druidG.g:252:68: ( WS )?
int alt113=2;
int LA113_0 = input.LA(1);
if ( (LA113_0==WS) ) {
alt113=1;
}
switch (alt113) {
case 1 :
// druidG.g:252:68: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1781);
}
break;
}
match(input,LPARAN,FOLLOW_LPARAN_in_queryStmnt1784);
// druidG.g:252:79: ( WS )?
int alt114=2;
int LA114_0 = input.LA(1);
if ( (LA114_0==WS) ) {
alt114=1;
}
switch (alt114) {
case 1 :
// druidG.g:252:79: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1786);
}
break;
}
// druidG.g:252:83: (s1= SINGLE_QUOTE_STRING ( ( WS )? ',' ( WS )? s2= SINGLE_QUOTE_STRING )* )
// druidG.g:252:84: s1= SINGLE_QUOTE_STRING ( ( WS )? ',' ( WS )? s2= SINGLE_QUOTE_STRING )*
{
s1=(Token)match(input,SINGLE_QUOTE_STRING,FOLLOW_SINGLE_QUOTE_STRING_in_queryStmnt1792);
((SearchQueryMeta)qMeta).type = "insensitive_contains";((SearchQueryMeta)qMeta).addValue((s1!=null?s1.getText():null));
// druidG.g:252:208: ( ( WS )? ',' ( WS )? s2= SINGLE_QUOTE_STRING )*
loop117:
while (true) {
int alt117=2;
int LA117_0 = input.LA(1);
if ( (LA117_0==WS) ) {
int LA117_1 = input.LA(2);
if ( (LA117_1==91) ) {
alt117=1;
}
}
else if ( (LA117_0==91) ) {
alt117=1;
}
switch (alt117) {
case 1 :
// druidG.g:252:209: ( WS )? ',' ( WS )? s2= SINGLE_QUOTE_STRING
{
// druidG.g:252:209: ( WS )?
int alt115=2;
int LA115_0 = input.LA(1);
if ( (LA115_0==WS) ) {
alt115=1;
}
switch (alt115) {
case 1 :
// druidG.g:252:209: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1796);
}
break;
}
match(input,91,FOLLOW_91_in_queryStmnt1799);
// druidG.g:252:217: ( WS )?
int alt116=2;
int LA116_0 = input.LA(1);
if ( (LA116_0==WS) ) {
alt116=1;
}
switch (alt116) {
case 1 :
// druidG.g:252:217: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1801);
}
break;
}
s2=(Token)match(input,SINGLE_QUOTE_STRING,FOLLOW_SINGLE_QUOTE_STRING_in_queryStmnt1806);
((SearchQueryMeta)qMeta).type = "fragment";((SearchQueryMeta)qMeta).addValue((s2!=null?s2.getText():null));
}
break;
default :
break loop117;
}
}
}
// druidG.g:252:337: ( WS )?
int alt118=2;
int LA118_0 = input.LA(1);
if ( (LA118_0==WS) ) {
alt118=1;
}
switch (alt118) {
case 1 :
// druidG.g:252:337: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1813);
}
break;
}
match(input,RPARAN,FOLLOW_RPARAN_in_queryStmnt1816);
match(input,WS,FOLLOW_WS_in_queryStmnt1822);
match(input,SORT,FOLLOW_SORT_in_queryStmnt1824);
// druidG.g:253:13: ( WS )?
int alt119=2;
int LA119_0 = input.LA(1);
if ( (LA119_0==WS) ) {
alt119=1;
}
switch (alt119) {
case 1 :
// druidG.g:253:13: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1826);
}
break;
}
match(input,LPARAN,FOLLOW_LPARAN_in_queryStmnt1829);
// druidG.g:253:24: ( WS )?
int alt120=2;
int LA120_0 = input.LA(1);
if ( (LA120_0==WS) ) {
alt120=1;
}
switch (alt120) {
case 1 :
// druidG.g:253:24: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1831);
}
break;
}
// druidG.g:253:28: (s= SINGLE_QUOTE_STRING )
// druidG.g:253:29: s= SINGLE_QUOTE_STRING
{
s=(Token)match(input,SINGLE_QUOTE_STRING,FOLLOW_SINGLE_QUOTE_STRING_in_queryStmnt1837);
}
((SearchQueryMeta)qMeta).setSort((s!=null?s.getText():null));
// druidG.g:253:97: ( WS )?
int alt121=2;
int LA121_0 = input.LA(1);
if ( (LA121_0==WS) ) {
alt121=1;
}
switch (alt121) {
case 1 :
// druidG.g:253:97: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1842);
}
break;
}
match(input,RPARAN,FOLLOW_RPARAN_in_queryStmnt1845);
}
break;
}
// druidG.g:255:4: ( WS HINT ( WS )? LPARAN ( WS )? s= SINGLE_QUOTE_STRING ( WS )? RPARAN )?
int alt126=2;
int LA126_0 = input.LA(1);
if ( (LA126_0==WS) ) {
int LA126_1 = input.LA(2);
if ( (LA126_1==HINT) ) {
alt126=1;
}
}
switch (alt126) {
case 1 :
// druidG.g:255:5: WS HINT ( WS )? LPARAN ( WS )? s= SINGLE_QUOTE_STRING ( WS )? RPARAN
{
match(input,WS,FOLLOW_WS_in_queryStmnt1857);
match(input,HINT,FOLLOW_HINT_in_queryStmnt1859);
// druidG.g:255:13: ( WS )?
int alt123=2;
int LA123_0 = input.LA(1);
if ( (LA123_0==WS) ) {
alt123=1;
}
switch (alt123) {
case 1 :
// druidG.g:255:13: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1861);
}
break;
}
match(input,LPARAN,FOLLOW_LPARAN_in_queryStmnt1864);
// druidG.g:255:24: ( WS )?
int alt124=2;
int LA124_0 = input.LA(1);
if ( (LA124_0==WS) ) {
alt124=1;
}
switch (alt124) {
case 1 :
// druidG.g:255:24: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1866);
}
break;
}
s=(Token)match(input,SINGLE_QUOTE_STRING,FOLLOW_SINGLE_QUOTE_STRING_in_queryStmnt1871);
qMeta = HintProcessor.process(qMeta, (s!=null?s.getText():null));
// druidG.g:255:99: ( WS )?
int alt125=2;
int LA125_0 = input.LA(1);
if ( (LA125_0==WS) ) {
alt125=1;
}
switch (alt125) {
case 1 :
// druidG.g:255:99: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1875);
}
break;
}
match(input,RPARAN,FOLLOW_RPARAN_in_queryStmnt1878);
}
break;
}
}
break;
}
if (qMeta.intervals == null || qMeta.intervals.isEmpty()) {
qMeta = TimeBoundaryQueryMeta.promote(qMeta);
}
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return qMeta;
} }
|
public class class_name {
public final QueryMeta queryStmnt() throws RecognitionException {
QueryMeta qMeta = null;
Token id=null;
Token dir=null;
Token l=null;
Token s1=null;
Token s2=null;
Token s=null;
QueryMeta fromQuery =null;
Pair<Granularity, List<Pair<Integer, Integer>>> gran =null;
Having h =null;
PostAggItem p =null;
qMeta = GroupByQueryMeta.promote(new QueryMeta());
((BaseAggQueryMeta)qMeta).aggregations = new ArrayList<>();
qMeta.intervals = new ArrayList<>();
try {
// druidG.g:150:2: ( SELECT ( ( WS selectItems[qMeta] ( ( WS )? ',' ( WS )? selectItems[qMeta] )* ) | ( WS '*' ) )? WS FROM ( ( WS id= ID ) | ( WS LPARAN (fromQuery= queryStmnt ) RPARAN ) ) ( WS WHERE WS whereClause[qMeta] ( ( WS BREAK WS BY WS gran= granularityClause )? ( WS GROUP WS BY WS (id= ID ( ( WS )? ',' ( WS )? id= ID )* ) ( WS HAVING WS h= havingClause )? )? ( WS ORDER WS BY WS (id= ID ) ( WS dir= ( ASC | DESC ) )? )? ( WS LIMIT WS (l= LONG ) )? ( WS THEN WS p= postAggItem )? ) ( WS WHICH WS CONTAINS ( WS )? LPARAN ( WS )? (s1= SINGLE_QUOTE_STRING ( ( WS )? ',' ( WS )? s2= SINGLE_QUOTE_STRING )* ) ( WS )? RPARAN WS SORT ( WS )? LPARAN ( WS )? (s= SINGLE_QUOTE_STRING ) ( WS )? RPARAN )? ( WS HINT ( WS )? LPARAN ( WS )? s= SINGLE_QUOTE_STRING ( WS )? RPARAN )? )? )
// druidG.g:150:4: SELECT ( ( WS selectItems[qMeta] ( ( WS )? ',' ( WS )? selectItems[qMeta] )* ) | ( WS '*' ) )? WS FROM ( ( WS id= ID ) | ( WS LPARAN (fromQuery= queryStmnt ) RPARAN ) ) ( WS WHERE WS whereClause[qMeta] ( ( WS BREAK WS BY WS gran= granularityClause )? ( WS GROUP WS BY WS (id= ID ( ( WS )? ',' ( WS )? id= ID )* ) ( WS HAVING WS h= havingClause )? )? ( WS ORDER WS BY WS (id= ID ) ( WS dir= ( ASC | DESC ) )? )? ( WS LIMIT WS (l= LONG ) )? ( WS THEN WS p= postAggItem )? ) ( WS WHICH WS CONTAINS ( WS )? LPARAN ( WS )? (s1= SINGLE_QUOTE_STRING ( ( WS )? ',' ( WS )? s2= SINGLE_QUOTE_STRING )* ) ( WS )? RPARAN WS SORT ( WS )? LPARAN ( WS )? (s= SINGLE_QUOTE_STRING ) ( WS )? RPARAN )? ( WS HINT ( WS )? LPARAN ( WS )? s= SINGLE_QUOTE_STRING ( WS )? RPARAN )? )?
{
match(input,SELECT,FOLLOW_SELECT_in_queryStmnt1180);
// druidG.g:151:7: ( ( WS selectItems[qMeta] ( ( WS )? ',' ( WS )? selectItems[qMeta] )* ) | ( WS '*' ) )?
int alt101=3;
int LA101_0 = input.LA(1);
if ( (LA101_0==WS) ) {
int LA101_1 = input.LA(2);
if ( (LA101_1==90) ) {
alt101=2; // depends on control dependency: [if], data = [none]
}
else if ( (LA101_1==COUNT||LA101_1==DOUBLE_SUM||(LA101_1 >= HYPER_UNIQUE && LA101_1 <= ID)||LA101_1==JAVASCRIPT||LA101_1==LONG_SUM||LA101_1==MAX||LA101_1==MIN||LA101_1==UNIQUE) ) {
alt101=1; // depends on control dependency: [if], data = [none]
}
}
switch (alt101) {
case 1 :
// druidG.g:152:8: ( WS selectItems[qMeta] ( ( WS )? ',' ( WS )? selectItems[qMeta] )* )
{
// druidG.g:152:8: ( WS selectItems[qMeta] ( ( WS )? ',' ( WS )? selectItems[qMeta] )* )
// druidG.g:153:9: WS selectItems[qMeta] ( ( WS )? ',' ( WS )? selectItems[qMeta] )*
{
match(input,WS,FOLLOW_WS_in_queryStmnt1208);
pushFollow(FOLLOW_selectItems_in_queryStmnt1210);
selectItems(qMeta);
state._fsp--;
// druidG.g:153:31: ( ( WS )? ',' ( WS )? selectItems[qMeta] )*
loop100:
while (true) {
int alt100=2;
int LA100_0 = input.LA(1);
if ( (LA100_0==WS) ) {
int LA100_1 = input.LA(2);
if ( (LA100_1==91) ) {
alt100=1; // depends on control dependency: [if], data = [none]
}
}
else if ( (LA100_0==91) ) {
alt100=1; // depends on control dependency: [if], data = [none]
}
switch (alt100) {
case 1 :
// druidG.g:153:32: ( WS )? ',' ( WS )? selectItems[qMeta]
{
// druidG.g:153:32: ( WS )?
int alt98=2;
int LA98_0 = input.LA(1);
if ( (LA98_0==WS) ) {
alt98=1; // depends on control dependency: [if], data = [none]
}
switch (alt98) {
case 1 :
// druidG.g:153:32: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1214);
}
break;
}
match(input,91,FOLLOW_91_in_queryStmnt1217);
// druidG.g:153:40: ( WS )?
int alt99=2;
int LA99_0 = input.LA(1);
if ( (LA99_0==WS) ) {
alt99=1; // depends on control dependency: [if], data = [none]
}
switch (alt99) {
case 1 :
// druidG.g:153:40: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1219);
}
break;
}
pushFollow(FOLLOW_selectItems_in_queryStmnt1222);
selectItems(qMeta);
state._fsp--;
}
break;
default :
break loop100;
}
}
}
}
break;
case 2 :
// druidG.g:156:8: ( WS '*' )
{
// druidG.g:156:8: ( WS '*' )
// druidG.g:156:9: WS '*'
{
match(input,WS,FOLLOW_WS_in_queryStmnt1253);
match(input,90,FOLLOW_90_in_queryStmnt1255);
}
}
break;
}
match(input,WS,FOLLOW_WS_in_queryStmnt1270);
match(input,FROM,FOLLOW_FROM_in_queryStmnt1272);
// druidG.g:159:4: ( ( WS id= ID ) | ( WS LPARAN (fromQuery= queryStmnt ) RPARAN ) )
int alt102=2;
int LA102_0 = input.LA(1);
if ( (LA102_0==WS) ) {
int LA102_1 = input.LA(2);
if ( (LA102_1==ID) ) {
alt102=1; // depends on control dependency: [if], data = [none]
}
else if ( (LA102_1==LPARAN) ) {
alt102=2; // depends on control dependency: [if], data = [none]
}
else {
int nvaeMark = input.mark();
try {
input.consume(); // depends on control dependency: [try], data = [none]
NoViableAltException nvae =
new NoViableAltException("", 102, 1, input);
throw nvae;
} finally {
input.rewind(nvaeMark);
}
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 102, 0, input);
throw nvae;
}
switch (alt102) {
case 1 :
// druidG.g:160:13: ( WS id= ID )
{
// druidG.g:160:13: ( WS id= ID )
// druidG.g:160:14: WS id= ID
{
match(input,WS,FOLLOW_WS_in_queryStmnt1293);
id=(Token)match(input,ID,FOLLOW_ID_in_queryStmnt1297);
qMeta.dataSource = (id!=null?id.getText():null);
}
}
break;
case 2 :
// druidG.g:162:11: ( WS LPARAN (fromQuery= queryStmnt ) RPARAN )
{
// druidG.g:162:11: ( WS LPARAN (fromQuery= queryStmnt ) RPARAN )
// druidG.g:162:12: WS LPARAN (fromQuery= queryStmnt ) RPARAN
{
match(input,WS,FOLLOW_WS_in_queryStmnt1334);
match(input,LPARAN,FOLLOW_LPARAN_in_queryStmnt1336);
// druidG.g:162:22: (fromQuery= queryStmnt )
// druidG.g:162:23: fromQuery= queryStmnt
{
pushFollow(FOLLOW_queryStmnt_in_queryStmnt1341);
fromQuery=queryStmnt();
state._fsp--;
}
match(input,RPARAN,FOLLOW_RPARAN_in_queryStmnt1344);
qMeta.queryDataSource = fromQuery;
}
}
break;
}
if (((BaseAggQueryMeta)qMeta).aggregations.isEmpty()) {
qMeta = SelectQueryMeta.promote(qMeta); // depends on control dependency: [if], data = [none]
}
// druidG.g:168:2: ( WS WHERE WS whereClause[qMeta] ( ( WS BREAK WS BY WS gran= granularityClause )? ( WS GROUP WS BY WS (id= ID ( ( WS )? ',' ( WS )? id= ID )* ) ( WS HAVING WS h= havingClause )? )? ( WS ORDER WS BY WS (id= ID ) ( WS dir= ( ASC | DESC ) )? )? ( WS LIMIT WS (l= LONG ) )? ( WS THEN WS p= postAggItem )? ) ( WS WHICH WS CONTAINS ( WS )? LPARAN ( WS )? (s1= SINGLE_QUOTE_STRING ( ( WS )? ',' ( WS )? s2= SINGLE_QUOTE_STRING )* ) ( WS )? RPARAN WS SORT ( WS )? LPARAN ( WS )? (s= SINGLE_QUOTE_STRING ) ( WS )? RPARAN )? ( WS HINT ( WS )? LPARAN ( WS )? s= SINGLE_QUOTE_STRING ( WS )? RPARAN )? )?
int alt127=2;
int LA127_0 = input.LA(1);
if ( (LA127_0==WS) ) {
int LA127_1 = input.LA(2);
if ( (LA127_1==WHERE) ) {
alt127=1; // depends on control dependency: [if], data = [none]
}
}
switch (alt127) {
case 1 :
// druidG.g:169:4: WS WHERE WS whereClause[qMeta] ( ( WS BREAK WS BY WS gran= granularityClause )? ( WS GROUP WS BY WS (id= ID ( ( WS )? ',' ( WS )? id= ID )* ) ( WS HAVING WS h= havingClause )? )? ( WS ORDER WS BY WS (id= ID ) ( WS dir= ( ASC | DESC ) )? )? ( WS LIMIT WS (l= LONG ) )? ( WS THEN WS p= postAggItem )? ) ( WS WHICH WS CONTAINS ( WS )? LPARAN ( WS )? (s1= SINGLE_QUOTE_STRING ( ( WS )? ',' ( WS )? s2= SINGLE_QUOTE_STRING )* ) ( WS )? RPARAN WS SORT ( WS )? LPARAN ( WS )? (s= SINGLE_QUOTE_STRING ) ( WS )? RPARAN )? ( WS HINT ( WS )? LPARAN ( WS )? s= SINGLE_QUOTE_STRING ( WS )? RPARAN )?
{
match(input,WS,FOLLOW_WS_in_queryStmnt1363);
match(input,WHERE,FOLLOW_WHERE_in_queryStmnt1365);
match(input,WS,FOLLOW_WS_in_queryStmnt1367);
pushFollow(FOLLOW_whereClause_in_queryStmnt1369);
whereClause(qMeta);
state._fsp--;
// druidG.g:170:4: ( ( WS BREAK WS BY WS gran= granularityClause )? ( WS GROUP WS BY WS (id= ID ( ( WS )? ',' ( WS )? id= ID )* ) ( WS HAVING WS h= havingClause )? )? ( WS ORDER WS BY WS (id= ID ) ( WS dir= ( ASC | DESC ) )? )? ( WS LIMIT WS (l= LONG ) )? ( WS THEN WS p= postAggItem )? )
// druidG.g:171:5: ( WS BREAK WS BY WS gran= granularityClause )? ( WS GROUP WS BY WS (id= ID ( ( WS )? ',' ( WS )? id= ID )* ) ( WS HAVING WS h= havingClause )? )? ( WS ORDER WS BY WS (id= ID ) ( WS dir= ( ASC | DESC ) )? )? ( WS LIMIT WS (l= LONG ) )? ( WS THEN WS p= postAggItem )?
{
// druidG.g:171:5: ( WS BREAK WS BY WS gran= granularityClause )?
int alt103=2;
int LA103_0 = input.LA(1);
if ( (LA103_0==WS) ) {
int LA103_1 = input.LA(2);
if ( (LA103_1==BREAK) ) {
alt103=1; // depends on control dependency: [if], data = [none]
}
}
switch (alt103) {
case 1 :
// druidG.g:171:6: WS BREAK WS BY WS gran= granularityClause
{
match(input,WS,FOLLOW_WS_in_queryStmnt1387);
match(input,BREAK,FOLLOW_BREAK_in_queryStmnt1389);
match(input,WS,FOLLOW_WS_in_queryStmnt1391);
match(input,BY,FOLLOW_BY_in_queryStmnt1393);
match(input,WS,FOLLOW_WS_in_queryStmnt1395);
pushFollow(FOLLOW_granularityClause_in_queryStmnt1399);
gran=granularityClause();
state._fsp--;
qMeta.granularity = gran.a;
if (gran.b != null) {
qMeta.microIntervals.addAll(gran.b); // depends on control dependency: [if], data = [(gran.b]
}
}
break;
}
// druidG.g:178:5: ( WS GROUP WS BY WS (id= ID ( ( WS )? ',' ( WS )? id= ID )* ) ( WS HAVING WS h= havingClause )? )?
int alt108=2;
int LA108_0 = input.LA(1);
if ( (LA108_0==WS) ) {
int LA108_1 = input.LA(2);
if ( (LA108_1==GROUP) ) {
alt108=1; // depends on control dependency: [if], data = [none]
}
}
switch (alt108) {
case 1 :
// druidG.g:178:6: WS GROUP WS BY WS (id= ID ( ( WS )? ',' ( WS )? id= ID )* ) ( WS HAVING WS h= havingClause )?
{
match(input,WS,FOLLOW_WS_in_queryStmnt1416);
match(input,GROUP,FOLLOW_GROUP_in_queryStmnt1418);
match(input,WS,FOLLOW_WS_in_queryStmnt1420);
match(input,BY,FOLLOW_BY_in_queryStmnt1422);
match(input,WS,FOLLOW_WS_in_queryStmnt1424);
qMeta = GroupByQueryMeta.promote(qMeta);
if (((GroupByQueryMeta)qMeta).fetchDimensions == null) {
System.err.println("No dimensions !! "); // depends on control dependency: [if], data = [none]
}
// druidG.g:185:10: (id= ID ( ( WS )? ',' ( WS )? id= ID )* )
// druidG.g:185:11: id= ID ( ( WS )? ',' ( WS )? id= ID )*
{
id=(Token)match(input,ID,FOLLOW_ID_in_queryStmnt1450);
if (!((GroupByQueryMeta)qMeta).checkDimOrAlias((id!=null?id.getText():null))) {
System.err.println("Dimension/Alias " + (id!=null?id.getText():null) + " not valid.."); // depends on control dependency: [if], data = [none]
}
// druidG.g:191:14: ( ( WS )? ',' ( WS )? id= ID )*
loop106:
while (true) {
int alt106=2;
int LA106_0 = input.LA(1);
if ( (LA106_0==WS) ) {
int LA106_1 = input.LA(2);
if ( (LA106_1==91) ) {
alt106=1; // depends on control dependency: [if], data = [none]
}
}
else if ( (LA106_0==91) ) {
alt106=1; // depends on control dependency: [if], data = [none]
}
switch (alt106) {
case 1 :
// druidG.g:191:15: ( WS )? ',' ( WS )? id= ID
{
// druidG.g:191:15: ( WS )?
int alt104=2;
int LA104_0 = input.LA(1);
if ( (LA104_0==WS) ) {
alt104=1; // depends on control dependency: [if], data = [none]
}
switch (alt104) {
case 1 :
// druidG.g:191:15: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1482);
}
break;
}
match(input,91,FOLLOW_91_in_queryStmnt1485);
// druidG.g:191:23: ( WS )?
int alt105=2;
int LA105_0 = input.LA(1);
if ( (LA105_0==WS) ) {
alt105=1; // depends on control dependency: [if], data = [none]
}
switch (alt105) {
case 1 :
// druidG.g:191:23: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1487);
}
break;
}
id=(Token)match(input,ID,FOLLOW_ID_in_queryStmnt1492);
if (!((GroupByQueryMeta)qMeta).checkDimOrAlias((id!=null?id.getText():null))) {
System.err.println("Dimension/Alias " + (id!=null?id.getText():null) + " not valid.."); // depends on control dependency: [if], data = [none]
}
}
break;
default :
break loop106;
}
}
}
// druidG.g:199:10: ( WS HAVING WS h= havingClause )?
int alt107=2;
int LA107_0 = input.LA(1);
if ( (LA107_0==WS) ) {
int LA107_1 = input.LA(2);
if ( (LA107_1==HAVING) ) {
alt107=1; // depends on control dependency: [if], data = [none]
}
}
switch (alt107) {
case 1 :
// druidG.g:199:11: WS HAVING WS h= havingClause
{
match(input,WS,FOLLOW_WS_in_queryStmnt1549);
match(input,HAVING,FOLLOW_HAVING_in_queryStmnt1551);
match(input,WS,FOLLOW_WS_in_queryStmnt1553);
pushFollow(FOLLOW_havingClause_in_queryStmnt1557);
h=havingClause();
state._fsp--;
((GroupByQueryMeta)qMeta).having = h;
}
break;
}
}
break;
}
qMeta = QueryUtils.checkAndPromoteToTimeSeries(qMeta);
// druidG.g:203:5: ( WS ORDER WS BY WS (id= ID ) ( WS dir= ( ASC | DESC ) )? )?
int alt110=2;
int LA110_0 = input.LA(1);
if ( (LA110_0==WS) ) {
int LA110_1 = input.LA(2);
if ( (LA110_1==ORDER) ) {
alt110=1; // depends on control dependency: [if], data = [none]
}
}
switch (alt110) {
case 1 :
// druidG.g:203:6: WS ORDER WS BY WS (id= ID ) ( WS dir= ( ASC | DESC ) )?
{
match(input,WS,FOLLOW_WS_in_queryStmnt1601);
match(input,ORDER,FOLLOW_ORDER_in_queryStmnt1603);
match(input,WS,FOLLOW_WS_in_queryStmnt1605);
match(input,BY,FOLLOW_BY_in_queryStmnt1607);
match(input,WS,FOLLOW_WS_in_queryStmnt1609);
// druidG.g:203:24: (id= ID )
// druidG.g:203:25: id= ID
{
id=(Token)match(input,ID,FOLLOW_ID_in_queryStmnt1614);
}
if (((PlainDimQueryMeta)qMeta).fetchDimensions.size() != 1) {
((GroupByQueryMeta)qMeta).limitSpec = new LimitSpec(); // depends on control dependency: [if], data = [none]
} else {// If fetchDimensions = 1 then TopN is more optimal.
qMeta = TopNQueryMeta.promote(qMeta); // depends on control dependency: [if], data = [none]
((TopNQueryMeta)qMeta).metric = (id!=null?id.getText():null); // depends on control dependency: [if], data = [none]
}
// druidG.g:214:9: ( WS dir= ( ASC | DESC ) )?
int alt109=2;
int LA109_0 = input.LA(1);
if ( (LA109_0==WS) ) {
int LA109_1 = input.LA(2);
if ( (LA109_1==ASC||LA109_1==DESC) ) {
alt109=1; // depends on control dependency: [if], data = [none]
}
}
switch (alt109) {
case 1 :
// druidG.g:214:10: WS dir= ( ASC | DESC )
{
match(input,WS,FOLLOW_WS_in_queryStmnt1647);
dir=input.LT(1);
if ( input.LA(1)==ASC||input.LA(1)==DESC ) {
input.consume(); // depends on control dependency: [if], data = [none]
state.errorRecovery=false; // depends on control dependency: [if], data = [none]
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
throw mse;
}
if (qMeta instanceof GroupByQueryMeta && ((GroupByQueryMeta)qMeta).limitSpec != null) {
if (dir != null && (dir!=null?dir.getText():null) != null) {
((GroupByQueryMeta)qMeta).limitSpec.addColumn((id!=null?id.getText():null), (dir!=null?dir.getText():null)); // depends on control dependency: [if], data = [(dir]
} else {
((GroupByQueryMeta)qMeta).limitSpec.addColumn((id!=null?id.getText():null), "ASC"); // depends on control dependency: [if], data = [none]
}
}
}
break;
}
// At this point if the qMeta is not TopN and is still GroupBy then do the following(default is ascending sort).
if (qMeta instanceof GroupByQueryMeta && ((GroupByQueryMeta)qMeta).limitSpec != null) {
if (!((GroupByQueryMeta)qMeta).limitSpec.columns.containsKey((id!=null?id.getText():null))) {
((GroupByQueryMeta)qMeta).limitSpec.addColumn((id!=null?id.getText():null), "ASC"); // depends on control dependency: [if], data = [none]
}
}
}
break;
}
// druidG.g:235:5: ( WS LIMIT WS (l= LONG ) )?
int alt111=2;
int LA111_0 = input.LA(1);
if ( (LA111_0==WS) ) {
int LA111_1 = input.LA(2);
if ( (LA111_1==LIMIT) ) {
alt111=1; // depends on control dependency: [if], data = [none]
}
}
switch (alt111) {
case 1 :
// druidG.g:236:6: WS LIMIT WS (l= LONG )
{
match(input,WS,FOLLOW_WS_in_queryStmnt1701);
match(input,LIMIT,FOLLOW_LIMIT_in_queryStmnt1703);
match(input,WS,FOLLOW_WS_in_queryStmnt1705);
// druidG.g:236:18: (l= LONG )
// druidG.g:236:19: l= LONG
{
l=(Token)match(input,LONG,FOLLOW_LONG_in_queryStmnt1710);
}
if (qMeta instanceof SelectQueryMeta) {
((SelectQueryMeta)qMeta).pagingSpec.threshold = Integer.valueOf((l!=null?l.getText():null)); // depends on control dependency: [if], data = [none]
} else if (qMeta instanceof TopNQueryMeta) {
((TopNQueryMeta)qMeta).threshold = Integer.valueOf((l!=null?l.getText():null)); // depends on control dependency: [if], data = [none]
} else if (((PlainDimQueryMeta)qMeta).fetchDimensions.size() != 1) {
if (((GroupByQueryMeta)qMeta).limitSpec != null) {
((GroupByQueryMeta)qMeta).limitSpec.limit = Long.valueOf((l!=null?l.getText():null)); // depends on control dependency: [if], data = [null)]
}
}
}
break;
}
// druidG.g:250:7: ( WS THEN WS p= postAggItem )?
int alt112=2;
int LA112_0 = input.LA(1);
if ( (LA112_0==WS) ) {
int LA112_1 = input.LA(2);
if ( (LA112_1==THEN) ) {
alt112=1; // depends on control dependency: [if], data = [none]
}
}
switch (alt112) {
case 1 :
// druidG.g:250:8: WS THEN WS p= postAggItem
{
match(input,WS,FOLLOW_WS_in_queryStmnt1748);
match(input,THEN,FOLLOW_THEN_in_queryStmnt1750);
match(input,WS,FOLLOW_WS_in_queryStmnt1752);
pushFollow(FOLLOW_postAggItem_in_queryStmnt1756);
p=postAggItem();
state._fsp--;
QueryUtils.setPostAggregation(qMeta, p);
}
break;
}
}
// druidG.g:252:4: ( WS WHICH WS CONTAINS ( WS )? LPARAN ( WS )? (s1= SINGLE_QUOTE_STRING ( ( WS )? ',' ( WS )? s2= SINGLE_QUOTE_STRING )* ) ( WS )? RPARAN WS SORT ( WS )? LPARAN ( WS )? (s= SINGLE_QUOTE_STRING ) ( WS )? RPARAN )?
int alt122=2;
int LA122_0 = input.LA(1);
if ( (LA122_0==WS) ) {
int LA122_1 = input.LA(2);
if ( (LA122_1==WHICH) ) {
alt122=1; // depends on control dependency: [if], data = [none]
}
}
switch (alt122) {
case 1 :
// druidG.g:252:5: WS WHICH WS CONTAINS ( WS )? LPARAN ( WS )? (s1= SINGLE_QUOTE_STRING ( ( WS )? ',' ( WS )? s2= SINGLE_QUOTE_STRING )* ) ( WS )? RPARAN WS SORT ( WS )? LPARAN ( WS )? (s= SINGLE_QUOTE_STRING ) ( WS )? RPARAN
{
match(input,WS,FOLLOW_WS_in_queryStmnt1771);
match(input,WHICH,FOLLOW_WHICH_in_queryStmnt1773);
match(input,WS,FOLLOW_WS_in_queryStmnt1775);
match(input,CONTAINS,FOLLOW_CONTAINS_in_queryStmnt1777);
qMeta = SearchQueryMeta.promote(qMeta);
// druidG.g:252:68: ( WS )?
int alt113=2;
int LA113_0 = input.LA(1);
if ( (LA113_0==WS) ) {
alt113=1; // depends on control dependency: [if], data = [none]
}
switch (alt113) {
case 1 :
// druidG.g:252:68: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1781);
}
break;
}
match(input,LPARAN,FOLLOW_LPARAN_in_queryStmnt1784);
// druidG.g:252:79: ( WS )?
int alt114=2;
int LA114_0 = input.LA(1);
if ( (LA114_0==WS) ) {
alt114=1; // depends on control dependency: [if], data = [none]
}
switch (alt114) {
case 1 :
// druidG.g:252:79: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1786);
}
break;
}
// druidG.g:252:83: (s1= SINGLE_QUOTE_STRING ( ( WS )? ',' ( WS )? s2= SINGLE_QUOTE_STRING )* )
// druidG.g:252:84: s1= SINGLE_QUOTE_STRING ( ( WS )? ',' ( WS )? s2= SINGLE_QUOTE_STRING )*
{
s1=(Token)match(input,SINGLE_QUOTE_STRING,FOLLOW_SINGLE_QUOTE_STRING_in_queryStmnt1792);
((SearchQueryMeta)qMeta).type = "insensitive_contains";((SearchQueryMeta)qMeta).addValue((s1!=null?s1.getText():null));
// druidG.g:252:208: ( ( WS )? ',' ( WS )? s2= SINGLE_QUOTE_STRING )*
loop117:
while (true) {
int alt117=2;
int LA117_0 = input.LA(1);
if ( (LA117_0==WS) ) {
int LA117_1 = input.LA(2);
if ( (LA117_1==91) ) {
alt117=1; // depends on control dependency: [if], data = [none]
}
}
else if ( (LA117_0==91) ) {
alt117=1; // depends on control dependency: [if], data = [none]
}
switch (alt117) {
case 1 :
// druidG.g:252:209: ( WS )? ',' ( WS )? s2= SINGLE_QUOTE_STRING
{
// druidG.g:252:209: ( WS )?
int alt115=2;
int LA115_0 = input.LA(1);
if ( (LA115_0==WS) ) {
alt115=1; // depends on control dependency: [if], data = [none]
}
switch (alt115) {
case 1 :
// druidG.g:252:209: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1796);
}
break;
}
match(input,91,FOLLOW_91_in_queryStmnt1799);
// druidG.g:252:217: ( WS )?
int alt116=2;
int LA116_0 = input.LA(1);
if ( (LA116_0==WS) ) {
alt116=1; // depends on control dependency: [if], data = [none]
}
switch (alt116) {
case 1 :
// druidG.g:252:217: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1801);
}
break;
}
s2=(Token)match(input,SINGLE_QUOTE_STRING,FOLLOW_SINGLE_QUOTE_STRING_in_queryStmnt1806);
((SearchQueryMeta)qMeta).type = "fragment";((SearchQueryMeta)qMeta).addValue((s2!=null?s2.getText():null));
}
break;
default :
break loop117;
}
}
}
// druidG.g:252:337: ( WS )?
int alt118=2;
int LA118_0 = input.LA(1);
if ( (LA118_0==WS) ) {
alt118=1; // depends on control dependency: [if], data = [none]
}
switch (alt118) {
case 1 :
// druidG.g:252:337: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1813);
}
break;
}
match(input,RPARAN,FOLLOW_RPARAN_in_queryStmnt1816);
match(input,WS,FOLLOW_WS_in_queryStmnt1822);
match(input,SORT,FOLLOW_SORT_in_queryStmnt1824);
// druidG.g:253:13: ( WS )?
int alt119=2;
int LA119_0 = input.LA(1);
if ( (LA119_0==WS) ) {
alt119=1; // depends on control dependency: [if], data = [none]
}
switch (alt119) {
case 1 :
// druidG.g:253:13: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1826);
}
break;
}
match(input,LPARAN,FOLLOW_LPARAN_in_queryStmnt1829);
// druidG.g:253:24: ( WS )?
int alt120=2;
int LA120_0 = input.LA(1);
if ( (LA120_0==WS) ) {
alt120=1; // depends on control dependency: [if], data = [none]
}
switch (alt120) {
case 1 :
// druidG.g:253:24: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1831);
}
break;
}
// druidG.g:253:28: (s= SINGLE_QUOTE_STRING )
// druidG.g:253:29: s= SINGLE_QUOTE_STRING
{
s=(Token)match(input,SINGLE_QUOTE_STRING,FOLLOW_SINGLE_QUOTE_STRING_in_queryStmnt1837);
}
((SearchQueryMeta)qMeta).setSort((s!=null?s.getText():null));
// druidG.g:253:97: ( WS )?
int alt121=2;
int LA121_0 = input.LA(1);
if ( (LA121_0==WS) ) {
alt121=1; // depends on control dependency: [if], data = [none]
}
switch (alt121) {
case 1 :
// druidG.g:253:97: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1842);
}
break;
}
match(input,RPARAN,FOLLOW_RPARAN_in_queryStmnt1845);
}
break;
}
// druidG.g:255:4: ( WS HINT ( WS )? LPARAN ( WS )? s= SINGLE_QUOTE_STRING ( WS )? RPARAN )?
int alt126=2;
int LA126_0 = input.LA(1);
if ( (LA126_0==WS) ) {
int LA126_1 = input.LA(2);
if ( (LA126_1==HINT) ) {
alt126=1; // depends on control dependency: [if], data = [none]
}
}
switch (alt126) {
case 1 :
// druidG.g:255:5: WS HINT ( WS )? LPARAN ( WS )? s= SINGLE_QUOTE_STRING ( WS )? RPARAN
{
match(input,WS,FOLLOW_WS_in_queryStmnt1857);
match(input,HINT,FOLLOW_HINT_in_queryStmnt1859);
// druidG.g:255:13: ( WS )?
int alt123=2;
int LA123_0 = input.LA(1);
if ( (LA123_0==WS) ) {
alt123=1; // depends on control dependency: [if], data = [none]
}
switch (alt123) {
case 1 :
// druidG.g:255:13: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1861);
}
break;
}
match(input,LPARAN,FOLLOW_LPARAN_in_queryStmnt1864);
// druidG.g:255:24: ( WS )?
int alt124=2;
int LA124_0 = input.LA(1);
if ( (LA124_0==WS) ) {
alt124=1; // depends on control dependency: [if], data = [none]
}
switch (alt124) {
case 1 :
// druidG.g:255:24: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1866);
}
break;
}
s=(Token)match(input,SINGLE_QUOTE_STRING,FOLLOW_SINGLE_QUOTE_STRING_in_queryStmnt1871);
qMeta = HintProcessor.process(qMeta, (s!=null?s.getText():null));
// druidG.g:255:99: ( WS )?
int alt125=2;
int LA125_0 = input.LA(1);
if ( (LA125_0==WS) ) {
alt125=1; // depends on control dependency: [if], data = [none]
}
switch (alt125) {
case 1 :
// druidG.g:255:99: WS
{
match(input,WS,FOLLOW_WS_in_queryStmnt1875);
}
break;
}
match(input,RPARAN,FOLLOW_RPARAN_in_queryStmnt1878);
}
break;
}
}
break;
}
if (qMeta.intervals == null || qMeta.intervals.isEmpty()) {
qMeta = TimeBoundaryQueryMeta.promote(qMeta); // depends on control dependency: [if], data = [none]
}
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return qMeta;
} }
|
public class class_name {
protected void doAction(final HttpServletRequest req, final HttpServletResponse resp, String action) {
// Read the request.
ApiRequest srequest;
try {
srequest = readRequest(req);
srequest.setType(action);
} catch (Exception e) {
writeError(null, resp, e);
return;
}
final CountDownLatch latch = new CountDownLatch(1);
final ApiRequest finalRequest = srequest;
// Now execute the request via the apiman engine
IApiRequestExecutor executor = getEngine().executor(srequest, new IAsyncResultHandler<IEngineResult>() {
@Override
public void handle(IAsyncResult<IEngineResult> asyncResult) {
if (asyncResult.isSuccess()) {
IEngineResult engineResult = asyncResult.getResult();
if (engineResult.isResponse()) {
try {
writeResponse(resp, engineResult.getApiResponse());
final ServletOutputStream outputStream = resp.getOutputStream();
engineResult.bodyHandler(new IAsyncHandler<IApimanBuffer>() {
@Override
public void handle(IApimanBuffer chunk) {
try {
if (chunk instanceof ByteBuffer) {
byte [] buffer = (byte []) chunk.getNativeBuffer();
outputStream.write(buffer, 0, chunk.length());
} else {
outputStream.write(chunk.getBytes());
}
} catch (IOException e) {
// This will get caught by the API connector, which will abort the
// connection to the back-end API.
throw new RuntimeException(e);
}
}
});
engineResult.endHandler(new IAsyncHandler<Void>() {
@Override
public void handle(Void result) {
try {
resp.flushBuffer();
} catch (IOException e) {
// This will get caught by the API connector, which will abort the
// connection to the back-end API.
throw new RuntimeException(e);
} finally {
latch.countDown();
}
}
});
} catch (IOException e) {
// this would mean we couldn't get the output stream from the response, so we
// need to abort the engine result (which will let the back-end connection
// close down).
engineResult.abort(e);
latch.countDown();
throw new RuntimeException(e);
}
} else {
writeFailure(finalRequest, resp, engineResult.getPolicyFailure());
latch.countDown();
}
} else {
writeError(finalRequest, resp, asyncResult.getError());
latch.countDown();
}
}
});
executor.streamHandler(new IAsyncHandler<ISignalWriteStream>() {
@Override
public void handle(ISignalWriteStream connectorStream) {
try {
final InputStream is = req.getInputStream();
ByteBuffer buffer = new ByteBuffer(2048);
int numBytes = buffer.readFrom(is);
while (numBytes != -1) {
connectorStream.write(buffer);
numBytes = buffer.readFrom(is);
}
connectorStream.end();
} catch (Throwable e) {
connectorStream.abort(e);
}
}
});
executor.execute();
try { latch.await(); } catch (InterruptedException e) { }
} }
|
public class class_name {
protected void doAction(final HttpServletRequest req, final HttpServletResponse resp, String action) {
// Read the request.
ApiRequest srequest;
try {
srequest = readRequest(req); // depends on control dependency: [try], data = [none]
srequest.setType(action); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
writeError(null, resp, e);
return;
} // depends on control dependency: [catch], data = [none]
final CountDownLatch latch = new CountDownLatch(1);
final ApiRequest finalRequest = srequest;
// Now execute the request via the apiman engine
IApiRequestExecutor executor = getEngine().executor(srequest, new IAsyncResultHandler<IEngineResult>() {
@Override
public void handle(IAsyncResult<IEngineResult> asyncResult) {
if (asyncResult.isSuccess()) {
IEngineResult engineResult = asyncResult.getResult();
if (engineResult.isResponse()) {
try {
writeResponse(resp, engineResult.getApiResponse()); // depends on control dependency: [try], data = [none]
final ServletOutputStream outputStream = resp.getOutputStream();
engineResult.bodyHandler(new IAsyncHandler<IApimanBuffer>() {
@Override
public void handle(IApimanBuffer chunk) {
try {
if (chunk instanceof ByteBuffer) {
byte [] buffer = (byte []) chunk.getNativeBuffer();
outputStream.write(buffer, 0, chunk.length()); // depends on control dependency: [if], data = [none]
} else {
outputStream.write(chunk.getBytes()); // depends on control dependency: [if], data = [none]
}
} catch (IOException e) {
// This will get caught by the API connector, which will abort the
// connection to the back-end API.
throw new RuntimeException(e);
} // depends on control dependency: [catch], data = [none]
}
}); // depends on control dependency: [try], data = [none]
engineResult.endHandler(new IAsyncHandler<Void>() {
@Override
public void handle(Void result) {
try {
resp.flushBuffer(); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
// This will get caught by the API connector, which will abort the
// connection to the back-end API.
throw new RuntimeException(e);
} finally { // depends on control dependency: [catch], data = [none]
latch.countDown();
}
}
}); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
// this would mean we couldn't get the output stream from the response, so we
// need to abort the engine result (which will let the back-end connection
// close down).
engineResult.abort(e);
latch.countDown();
throw new RuntimeException(e);
} // depends on control dependency: [catch], data = [none]
} else {
writeFailure(finalRequest, resp, engineResult.getPolicyFailure()); // depends on control dependency: [if], data = [none]
latch.countDown(); // depends on control dependency: [if], data = [none]
}
} else {
writeError(finalRequest, resp, asyncResult.getError()); // depends on control dependency: [if], data = [none]
latch.countDown(); // depends on control dependency: [if], data = [none]
}
}
});
executor.streamHandler(new IAsyncHandler<ISignalWriteStream>() {
@Override
public void handle(ISignalWriteStream connectorStream) {
try {
final InputStream is = req.getInputStream();
ByteBuffer buffer = new ByteBuffer(2048);
int numBytes = buffer.readFrom(is);
while (numBytes != -1) {
connectorStream.write(buffer); // depends on control dependency: [while], data = [none]
numBytes = buffer.readFrom(is); // depends on control dependency: [while], data = [none]
}
connectorStream.end(); // depends on control dependency: [try], data = [none]
} catch (Throwable e) {
connectorStream.abort(e);
} // depends on control dependency: [catch], data = [none]
}
});
executor.execute();
try { latch.await(); } catch (InterruptedException e) { } // depends on control dependency: [try], data = [none] // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public TrustedAdvisorCheckResult withFlaggedResources(TrustedAdvisorResourceDetail... flaggedResources) {
if (this.flaggedResources == null) {
setFlaggedResources(new com.amazonaws.internal.SdkInternalList<TrustedAdvisorResourceDetail>(flaggedResources.length));
}
for (TrustedAdvisorResourceDetail ele : flaggedResources) {
this.flaggedResources.add(ele);
}
return this;
} }
|
public class class_name {
public TrustedAdvisorCheckResult withFlaggedResources(TrustedAdvisorResourceDetail... flaggedResources) {
if (this.flaggedResources == null) {
setFlaggedResources(new com.amazonaws.internal.SdkInternalList<TrustedAdvisorResourceDetail>(flaggedResources.length)); // depends on control dependency: [if], data = [none]
}
for (TrustedAdvisorResourceDetail ele : flaggedResources) {
this.flaggedResources.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} }
|
public class class_name {
public static long[] getLongIndexArray(int length) {
long[] index = new long[length];
for (int i=0; i<index.length; i++) {
index[i] = i;
}
return index;
} }
|
public class class_name {
public static long[] getLongIndexArray(int length) {
long[] index = new long[length];
for (int i=0; i<index.length; i++) {
index[i] = i; // depends on control dependency: [for], data = [i]
}
return index;
} }
|
public class class_name {
public static String writeSingleClientConfigAvro(Properties props) {
// TODO: Use a dedicated json lib. We shouldn't be manually manipulating json...
String avroConfig = "";
Boolean firstProp = true;
for(String key: props.stringPropertyNames()) {
if(firstProp) {
firstProp = false;
} else {
avroConfig = avroConfig + ",\n";
}
avroConfig = avroConfig + "\t\t\"" + key + "\": \"" + props.getProperty(key) + "\"";
}
if(avroConfig.isEmpty()) {
return "{}";
} else {
return "{\n" + avroConfig + "\n\t}";
}
} }
|
public class class_name {
public static String writeSingleClientConfigAvro(Properties props) {
// TODO: Use a dedicated json lib. We shouldn't be manually manipulating json...
String avroConfig = "";
Boolean firstProp = true;
for(String key: props.stringPropertyNames()) {
if(firstProp) {
firstProp = false; // depends on control dependency: [if], data = [none]
} else {
avroConfig = avroConfig + ",\n"; // depends on control dependency: [if], data = [none]
}
avroConfig = avroConfig + "\t\t\"" + key + "\": \"" + props.getProperty(key) + "\""; // depends on control dependency: [for], data = [key]
}
if(avroConfig.isEmpty()) {
return "{}";
} else {
return "{\n" + avroConfig + "\n\t}"; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@Override
public Resource getErrorpage(SlingHttpServletRequest request, int status) {
Resource errorpage = null;
ResourceResolver resolver = request.getResourceResolver();
if (StringUtils.isNotBlank(errorpagesPath)) {
if (errorpagesPath.startsWith("/")) {
// if the configured path is an absolute path use this path only
errorpage = resolver.getResource(errorpagesPath + "/" + status);
} else {
String path = request.getRequestPathInfo().getResourcePath();
Resource resource = resolver.resolve(request, path);
// skip non existing resource paths in the requested path
while (ResourceUtil.isNonExistingResource(resource)) {
int lastSlash = path.lastIndexOf('/');
if (lastSlash > 0) {
path = path.substring(0, lastSlash);
} else {
path = "/";
}
resource = resolver.resolve(request, path);
}
// scan upwards for an appropriate error page
while (errorpage == null && resource != null) {
path = resource.getPath();
if ("/".equals(path)) {
path = "";
}
errorpage = resolver.getResource(path + "/" + errorpagesPath + "/" + status);
if (errorpage == null) {
resource = resource.getParent();
}
}
}
}
if (errorpage == null && StringUtils.isNotBlank(defaultErrorpages)) {
// use the default page if no custom error page found
errorpage = resolver.getResource(defaultErrorpages + "/" + status);
}
return errorpage;
} }
|
public class class_name {
@Override
public Resource getErrorpage(SlingHttpServletRequest request, int status) {
Resource errorpage = null;
ResourceResolver resolver = request.getResourceResolver();
if (StringUtils.isNotBlank(errorpagesPath)) {
if (errorpagesPath.startsWith("/")) {
// if the configured path is an absolute path use this path only
errorpage = resolver.getResource(errorpagesPath + "/" + status); // depends on control dependency: [if], data = [none]
} else {
String path = request.getRequestPathInfo().getResourcePath();
Resource resource = resolver.resolve(request, path);
// skip non existing resource paths in the requested path
while (ResourceUtil.isNonExistingResource(resource)) {
int lastSlash = path.lastIndexOf('/');
if (lastSlash > 0) {
path = path.substring(0, lastSlash); // depends on control dependency: [if], data = [none]
} else {
path = "/"; // depends on control dependency: [if], data = [none]
}
resource = resolver.resolve(request, path); // depends on control dependency: [while], data = [none]
}
// scan upwards for an appropriate error page
while (errorpage == null && resource != null) {
path = resource.getPath(); // depends on control dependency: [while], data = [none]
if ("/".equals(path)) {
path = ""; // depends on control dependency: [if], data = [none]
}
errorpage = resolver.getResource(path + "/" + errorpagesPath + "/" + status); // depends on control dependency: [while], data = [none]
if (errorpage == null) {
resource = resource.getParent(); // depends on control dependency: [if], data = [none]
}
}
}
}
if (errorpage == null && StringUtils.isNotBlank(defaultErrorpages)) {
// use the default page if no custom error page found
errorpage = resolver.getResource(defaultErrorpages + "/" + status); // depends on control dependency: [if], data = [none]
}
return errorpage;
} }
|
public class class_name {
private Boolean isIsFt(Bw bw) {
Boolean value = false; // default to false (?)
for (Object o : bw.getRest()) {
if (o.getClass().equals(JAXBElement.class)) {
// should never be other than Boolean according to
// com.tibco.xmlns.applicationmanagement.BWServiceType
@SuppressWarnings("unchecked")
JAXBElement<Boolean> e = (JAXBElement<Boolean>) o;
value = e.getValue();
}
}
return value;
} }
|
public class class_name {
private Boolean isIsFt(Bw bw) {
Boolean value = false; // default to false (?)
for (Object o : bw.getRest()) {
if (o.getClass().equals(JAXBElement.class)) {
// should never be other than Boolean according to
// com.tibco.xmlns.applicationmanagement.BWServiceType
@SuppressWarnings("unchecked")
JAXBElement<Boolean> e = (JAXBElement<Boolean>) o;
value = e.getValue(); // depends on control dependency: [if], data = [none]
}
}
return value;
} }
|
public class class_name {
private void fvswap(int[] fmap, int yyp1, int yyp2, int yyn) {
while (yyn > 0) {
fswap(fmap, yyp1, yyp2);
yyp1++; yyp2++; yyn--;
}
} }
|
public class class_name {
private void fvswap(int[] fmap, int yyp1, int yyp2, int yyn) {
while (yyn > 0) {
fswap(fmap, yyp1, yyp2); // depends on control dependency: [while], data = [none]
yyp1++; yyp2++; yyn--; // depends on control dependency: [while], data = [none] // depends on control dependency: [while], data = [none] // depends on control dependency: [while], data = [none]
}
} }
|
public class class_name {
@Override
public void unbind(Object context) {
if (m_validationSession != null && context instanceof ValidationObject) {
m_validationSession.unbind((ValidationObject) context);
}
} }
|
public class class_name {
@Override
public void unbind(Object context) {
if (m_validationSession != null && context instanceof ValidationObject) {
m_validationSession.unbind((ValidationObject) context); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public static Track3 from(final String rawTrackData)
{
final Matcher matcher = track3Pattern.matcher(trimToEmpty(rawTrackData));
final String rawTrack3Data;
final String discretionaryData;
if (matcher.matches())
{
rawTrack3Data = getGroup(matcher, 1);
discretionaryData = getGroup(matcher, 2);
}
else
{
rawTrack3Data = null;
discretionaryData = "";
}
return new Track3(rawTrack3Data, discretionaryData);
} }
|
public class class_name {
public static Track3 from(final String rawTrackData)
{
final Matcher matcher = track3Pattern.matcher(trimToEmpty(rawTrackData));
final String rawTrack3Data;
final String discretionaryData;
if (matcher.matches())
{
rawTrack3Data = getGroup(matcher, 1); // depends on control dependency: [if], data = [none]
discretionaryData = getGroup(matcher, 2); // depends on control dependency: [if], data = [none]
}
else
{
rawTrack3Data = null; // depends on control dependency: [if], data = [none]
discretionaryData = ""; // depends on control dependency: [if], data = [none]
}
return new Track3(rawTrack3Data, discretionaryData);
} }
|
public class class_name {
public void setRGB(final BufferedImage IMAGE, final int X, final int Y, final int WIDTH, final int HEIGHT, final int[] PIXELS) {
int type = IMAGE.getType();
if (type == BufferedImage.TYPE_INT_ARGB || type == BufferedImage.TYPE_INT_RGB) {
IMAGE.getRaster().setDataElements(X, Y, WIDTH, HEIGHT, PIXELS);
} else {
IMAGE.setRGB(X, Y, WIDTH, HEIGHT, PIXELS, 0, WIDTH);
}
} }
|
public class class_name {
public void setRGB(final BufferedImage IMAGE, final int X, final int Y, final int WIDTH, final int HEIGHT, final int[] PIXELS) {
int type = IMAGE.getType();
if (type == BufferedImage.TYPE_INT_ARGB || type == BufferedImage.TYPE_INT_RGB) {
IMAGE.getRaster().setDataElements(X, Y, WIDTH, HEIGHT, PIXELS); // depends on control dependency: [if], data = [none]
} else {
IMAGE.setRGB(X, Y, WIDTH, HEIGHT, PIXELS, 0, WIDTH); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public BigDecimal getAvailable() {
if (available == null) {
return total
.subtract(frozen)
.subtract(loaned)
.add(borrowed)
.subtract(withdrawing)
.subtract(depositing);
} else {
return available;
}
} }
|
public class class_name {
public BigDecimal getAvailable() {
if (available == null) {
return total
.subtract(frozen)
.subtract(loaned)
.add(borrowed)
.subtract(withdrawing)
.subtract(depositing); // depends on control dependency: [if], data = [none]
} else {
return available; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private String convertWildcardExpressionToRegularExpression(final String pWildcardExpression) {
if (pWildcardExpression == null) {
if (mDebugging) {
out.println(DebugUtil.getPrefixDebugMessage(this) + "wildcard expression is null - also returning null as regexp!");
}
return null;
}
StringBuilder regexpBuffer = new StringBuilder();
boolean convertingError = false;
for (int i = 0; i < pWildcardExpression.length(); i++) {
if (convertingError) {
return null;
}
// Free-range character '*'
char stringMaskChar = pWildcardExpression.charAt(i);
if (isFreeRangeCharacter(stringMaskChar)) {
regexpBuffer.append("(([a-�A-�0-9]|.|_|-)*)");
}
// Free-pass character '?'
else if (isFreePassCharacter(stringMaskChar)) {
regexpBuffer.append("([a-�A_�0-9]|.|_|-)");
}
// Valid characters
else if (isInAlphabet(stringMaskChar)) {
regexpBuffer.append(stringMaskChar);
}
// Invalid character - aborting
else {
if (mDebugging) {
out.println(DebugUtil.getPrefixDebugMessage(this)
+ "one or more characters in string mask are not legal characters - returning null as regexp!");
}
convertingError = true;
}
}
return regexpBuffer.toString();
} }
|
public class class_name {
private String convertWildcardExpressionToRegularExpression(final String pWildcardExpression) {
if (pWildcardExpression == null) {
if (mDebugging) {
out.println(DebugUtil.getPrefixDebugMessage(this) + "wildcard expression is null - also returning null as regexp!");
// depends on control dependency: [if], data = [none]
}
return null;
// depends on control dependency: [if], data = [none]
}
StringBuilder regexpBuffer = new StringBuilder();
boolean convertingError = false;
for (int i = 0; i < pWildcardExpression.length(); i++) {
if (convertingError) {
return null;
// depends on control dependency: [if], data = [none]
}
// Free-range character '*'
char stringMaskChar = pWildcardExpression.charAt(i);
if (isFreeRangeCharacter(stringMaskChar)) {
regexpBuffer.append("(([a-�A-�0-9]|.|_|-)*)");
// depends on control dependency: [if], data = [none]
}
// Free-pass character '?'
else if (isFreePassCharacter(stringMaskChar)) {
regexpBuffer.append("([a-�A_�0-9]|.|_|-)");
// depends on control dependency: [if], data = [none]
}
// Valid characters
else if (isInAlphabet(stringMaskChar)) {
regexpBuffer.append(stringMaskChar);
// depends on control dependency: [if], data = [none]
}
// Invalid character - aborting
else {
if (mDebugging) {
out.println(DebugUtil.getPrefixDebugMessage(this)
+ "one or more characters in string mask are not legal characters - returning null as regexp!");
// depends on control dependency: [if], data = [none]
}
convertingError = true;
// depends on control dependency: [if], data = [none]
}
}
return regexpBuffer.toString();
} }
|
public class class_name {
public void addCustomTag(String classname, JavaFileManager fileManager, String tagletPath) {
try {
ClassLoader tagClassLoader;
if (!fileManager.hasLocation(TAGLET_PATH)) {
List<File> paths = new ArrayList<>();
if (tagletPath != null) {
for (String pathname : tagletPath.split(File.pathSeparator)) {
paths.add(new File(pathname));
}
}
if (fileManager instanceof StandardJavaFileManager) {
((StandardJavaFileManager) fileManager).setLocation(TAGLET_PATH, paths);
}
}
tagClassLoader = fileManager.getClassLoader(TAGLET_PATH);
Class<? extends jdk.javadoc.doclet.Taglet> customTagClass =
tagClassLoader.loadClass(classname).asSubclass(jdk.javadoc.doclet.Taglet.class);
jdk.javadoc.doclet.Taglet instance = customTagClass.getConstructor().newInstance();
instance.init(docEnv, doclet);
Taglet newLegacy = new UserTaglet(instance);
String tname = newLegacy.getName();
Taglet t = customTags.get(tname);
if (t != null) {
customTags.remove(tname);
}
customTags.put(tname, newLegacy);
messages.notice("doclet.Notice_taglet_registered", classname);
} catch (Exception exc) {
messages.error("doclet.Error_taglet_not_registered", exc.getClass().getName(), classname);
}
} }
|
public class class_name {
public void addCustomTag(String classname, JavaFileManager fileManager, String tagletPath) {
try {
ClassLoader tagClassLoader;
if (!fileManager.hasLocation(TAGLET_PATH)) {
List<File> paths = new ArrayList<>();
if (tagletPath != null) {
for (String pathname : tagletPath.split(File.pathSeparator)) {
paths.add(new File(pathname)); // depends on control dependency: [for], data = [pathname]
}
}
if (fileManager instanceof StandardJavaFileManager) {
((StandardJavaFileManager) fileManager).setLocation(TAGLET_PATH, paths); // depends on control dependency: [if], data = [none]
}
}
tagClassLoader = fileManager.getClassLoader(TAGLET_PATH); // depends on control dependency: [try], data = [none]
Class<? extends jdk.javadoc.doclet.Taglet> customTagClass =
tagClassLoader.loadClass(classname).asSubclass(jdk.javadoc.doclet.Taglet.class);
jdk.javadoc.doclet.Taglet instance = customTagClass.getConstructor().newInstance();
instance.init(docEnv, doclet);
Taglet newLegacy = new UserTaglet(instance);
String tname = newLegacy.getName();
Taglet t = customTags.get(tname);
if (t != null) {
customTags.remove(tname);
}
customTags.put(tname, newLegacy); // depends on control dependency: [try], data = [none]
messages.notice("doclet.Notice_taglet_registered", classname); // depends on control dependency: [try], data = [none]
} catch (Exception exc) {
messages.error("doclet.Error_taglet_not_registered", exc.getClass().getName(), classname);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
static public WsByteBuffer[] putByte(WsByteBuffer[] buffers, byte value, BNFHeadersImpl bnfObj) {
// verify input buffer information
if (null == buffers) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Null buffers sent to putByte");
}
return null;
}
// get the last buffer
WsByteBuffer buffer = buffers[buffers.length - 1];
try {
buffer.put(value);
} catch (BufferOverflowException boe) {
// no FFDC required
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "putByte overflow: " + buffer);
}
// allocate another buffer and put the byte into it
buffer.flip();
buffer = bnfObj.allocateBuffer(bnfObj.getOutgoingBufferSize());
buffer.put(value);
return WsByteBufferUtils.expandBufferArray(buffers, buffer);
}
return buffers;
} }
|
public class class_name {
static public WsByteBuffer[] putByte(WsByteBuffer[] buffers, byte value, BNFHeadersImpl bnfObj) {
// verify input buffer information
if (null == buffers) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Null buffers sent to putByte"); // depends on control dependency: [if], data = [none]
}
return null; // depends on control dependency: [if], data = [none]
}
// get the last buffer
WsByteBuffer buffer = buffers[buffers.length - 1];
try {
buffer.put(value); // depends on control dependency: [try], data = [none]
} catch (BufferOverflowException boe) {
// no FFDC required
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "putByte overflow: " + buffer); // depends on control dependency: [if], data = [none]
}
// allocate another buffer and put the byte into it
buffer.flip();
buffer = bnfObj.allocateBuffer(bnfObj.getOutgoingBufferSize());
buffer.put(value);
return WsByteBufferUtils.expandBufferArray(buffers, buffer);
} // depends on control dependency: [catch], data = [none]
return buffers;
} }
|
public class class_name {
public void Initialize(IAPEDecompress pAPEDecompress) {
//uninitialize if it is currently initialized
if (m_bInitialized)
Uninitialize();
if (pAPEDecompress == null) {
Uninitialize();
throw new JMACException("Error Initializing UnMAC");
}
//set the member pointer to the IAPEDecompress class
m_pAPEDecompress = pAPEDecompress;
//set the last decode frame to -1 so it forces a seek on start
m_LastDecodedFrameIndex = -1;
m_pAPEDecompressCore = new APEDecompressCore(pAPEDecompress);
m_pPrepare = new Prepare();
//set the initialized flag to TRUE
m_bInitialized = true;
m_wfeInput = m_pAPEDecompress.getApeInfoWaveFormatEx();
} }
|
public class class_name {
public void Initialize(IAPEDecompress pAPEDecompress) {
//uninitialize if it is currently initialized
if (m_bInitialized)
Uninitialize();
if (pAPEDecompress == null) {
Uninitialize(); // depends on control dependency: [if], data = [none]
throw new JMACException("Error Initializing UnMAC");
}
//set the member pointer to the IAPEDecompress class
m_pAPEDecompress = pAPEDecompress;
//set the last decode frame to -1 so it forces a seek on start
m_LastDecodedFrameIndex = -1;
m_pAPEDecompressCore = new APEDecompressCore(pAPEDecompress);
m_pPrepare = new Prepare();
//set the initialized flag to TRUE
m_bInitialized = true;
m_wfeInput = m_pAPEDecompress.getApeInfoWaveFormatEx();
} }
|
public class class_name {
@Override
public void writePage(int pageID, P page) {
try {
countWrite();
byte[] array = pageToByteArray(page);
long offset = ((long) (header.getReservedPages() + pageID)) * (long) pageSize;
assert offset >= 0 : header.getReservedPages() + " " + pageID + " " + pageSize + " " + offset;
file.seek(offset);
file.write(array);
page.setDirty(false);
}
catch(IOException e) {
throw new RuntimeException("Error writing to page file.", e);
}
} }
|
public class class_name {
@Override
public void writePage(int pageID, P page) {
try {
countWrite(); // depends on control dependency: [try], data = [none]
byte[] array = pageToByteArray(page);
long offset = ((long) (header.getReservedPages() + pageID)) * (long) pageSize;
assert offset >= 0 : header.getReservedPages() + " " + pageID + " " + pageSize + " " + offset;
file.seek(offset); // depends on control dependency: [try], data = [none]
file.write(array); // depends on control dependency: [try], data = [none]
page.setDirty(false); // depends on control dependency: [try], data = [none]
}
catch(IOException e) {
throw new RuntimeException("Error writing to page file.", e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
protected String normalize( String path ) {
if (path == null) {
return null;
}
// Create a place for the normalized path
String normalized = path;
if (normalized.equals("/.")) {
return "/";
}
// Normalize the slashes and add leading slash if necessary
if (normalized.indexOf('\\') >= 0) {
normalized = normalized.replace('\\', '/');
}
if (!normalized.startsWith("/")) {
normalized = "/" + normalized;
}
// Resolve occurrences of "//" in the normalized path
while (true) {
int index = normalized.indexOf("//");
if (index < 0) {
break;
}
normalized = normalized.substring(0, index) + normalized.substring(index + 1);
}
// Resolve occurrences of "/./" in the normalized path
while (true) {
int index = normalized.indexOf("/./");
if (index < 0) {
break;
}
normalized = normalized.substring(0, index) + normalized.substring(index + 2);
}
// Resolve occurrences of "/../" in the normalized path
while (true) {
int index = normalized.indexOf("/../");
if (index < 0) {
break;
}
if (index == 0) {
return (null); // Trying to go outside our context
}
int index2 = normalized.lastIndexOf('/', index - 1);
normalized = normalized.substring(0, index2) + normalized.substring(index + 3);
}
// Return the normalized path that we have completed
return (normalized);
} }
|
public class class_name {
protected String normalize( String path ) {
if (path == null) {
return null; // depends on control dependency: [if], data = [none]
}
// Create a place for the normalized path
String normalized = path;
if (normalized.equals("/.")) {
return "/"; // depends on control dependency: [if], data = [none]
}
// Normalize the slashes and add leading slash if necessary
if (normalized.indexOf('\\') >= 0) {
normalized = normalized.replace('\\', '/'); // depends on control dependency: [if], data = [none]
}
if (!normalized.startsWith("/")) {
normalized = "/" + normalized; // depends on control dependency: [if], data = [none]
}
// Resolve occurrences of "//" in the normalized path
while (true) {
int index = normalized.indexOf("//");
if (index < 0) {
break;
}
normalized = normalized.substring(0, index) + normalized.substring(index + 1);
}
// Resolve occurrences of "/./" in the normalized path
while (true) {
int index = normalized.indexOf("/./");
if (index < 0) {
break;
}
normalized = normalized.substring(0, index) + normalized.substring(index + 2);
}
// Resolve occurrences of "/../" in the normalized path
while (true) {
int index = normalized.indexOf("/../");
if (index < 0) {
break;
}
if (index == 0) {
return (null); // Trying to go outside our context // depends on control dependency: [if], data = [none]
}
int index2 = normalized.lastIndexOf('/', index - 1);
normalized = normalized.substring(0, index2) + normalized.substring(index + 3);
}
// Return the normalized path that we have completed
return (normalized);
} }
|
public class class_name {
public CFolderContent listFolder( CPath path )
throws CStorageException
{
JSONArray array = listObjectsWithinFolder( path, "/" );
CFile file;
if ( array == null || array.length() == 0 ) {
// List is empty ; can be caused by a really empty folder,
// a non existing folder, or a blob
// Distinguish the different cases :
file = getFile( path );
if ( file == null ) { // Nothing at that path
return null;
}
if ( file.isBlob() ) { // It is a blob : error !
throw new CInvalidFileTypeException( path, false );
}
return new CFolderContent( Collections.EMPTY_MAP );
}
Map<CPath, CFile> ret = new HashMap<CPath, CFile>();
boolean detailed;
JSONObject obj;
for ( int i = 0; i < array.length(); i++ ) {
obj = array.getJSONObject( i );
if ( obj.has( "subdir" ) ) {
// indicates a non empty sub directory
// There are two cases here : provider uses directory-markers, or not.
// - if yes, another entry should exist in json with more detailed informations.
// - if not, this will be the only entry that indicates a sub folder,
// so we keep this file, but we'll memorize it only if it is not already present
// in returned value.
file = new CFolder( new CPath( obj.getString( "subdir" ) ) );
detailed = false;
} else {
detailed = true;
if ( !CONTENT_TYPE_DIRECTORY.equals( obj.getString( "content_type" ) ) ) {
file = new CBlob( new CPath( obj.getString( "name" ) ),
obj.getLong( "bytes" ),
obj.getString( "content_type" ),
parseLastModified( obj ),
null ); // we do not have this detailed information
} else {
file = new CFolder( new CPath( obj.getString( "name" ) ),
parseLastModified( obj ),
null ); // we do not have this detailed information
}
}
if ( detailed || !ret.containsKey( path ) ) {
// If we got a detailed file, we always store it
// If we got only rough description, we keep it only if no detailed info already exists
ret.put( file.getPath(), file );
}
}
return new CFolderContent( ret );
} }
|
public class class_name {
public CFolderContent listFolder( CPath path )
throws CStorageException
{
JSONArray array = listObjectsWithinFolder( path, "/" );
CFile file;
if ( array == null || array.length() == 0 ) {
// List is empty ; can be caused by a really empty folder,
// a non existing folder, or a blob
// Distinguish the different cases :
file = getFile( path );
if ( file == null ) { // Nothing at that path
return null; // depends on control dependency: [if], data = [none]
}
if ( file.isBlob() ) { // It is a blob : error !
throw new CInvalidFileTypeException( path, false );
}
return new CFolderContent( Collections.EMPTY_MAP );
}
Map<CPath, CFile> ret = new HashMap<CPath, CFile>();
boolean detailed;
JSONObject obj;
for ( int i = 0; i < array.length(); i++ ) {
obj = array.getJSONObject( i );
if ( obj.has( "subdir" ) ) {
// indicates a non empty sub directory
// There are two cases here : provider uses directory-markers, or not.
// - if yes, another entry should exist in json with more detailed informations.
// - if not, this will be the only entry that indicates a sub folder,
// so we keep this file, but we'll memorize it only if it is not already present
// in returned value.
file = new CFolder( new CPath( obj.getString( "subdir" ) ) );
detailed = false;
} else {
detailed = true;
if ( !CONTENT_TYPE_DIRECTORY.equals( obj.getString( "content_type" ) ) ) {
file = new CBlob( new CPath( obj.getString( "name" ) ),
obj.getLong( "bytes" ),
obj.getString( "content_type" ),
parseLastModified( obj ),
null ); // we do not have this detailed information // depends on control dependency: [if], data = [none]
} else {
file = new CFolder( new CPath( obj.getString( "name" ) ),
parseLastModified( obj ),
null ); // we do not have this detailed information // depends on control dependency: [if], data = [none]
}
}
if ( detailed || !ret.containsKey( path ) ) {
// If we got a detailed file, we always store it
// If we got only rough description, we keep it only if no detailed info already exists
ret.put( file.getPath(), file );
}
}
return new CFolderContent( ret );
} }
|
public class class_name {
public double getWorstCaseRetainStructure() {
double pathLength = 0;
Edge verticalEdge;
Edge horizontalEdge;
Edge diagonalEdge;
GraphVertexTuple vertex = firstVertex;
while (vertex != lastVertex) {
verticalEdge = null;
horizontalEdge = null;
diagonalEdge = null;
List adjacentEdges = editDistanceGraph.outgoingEdgesOf(vertex);
// in this loop gather all available edges outgoing from a vertex
// and
// assign them to the corresponding variable '...Edge'.
for (Object o : adjacentEdges) {
Edge edge = (Edge) o;
GraphVertexTuple oppositeVertex = (GraphVertexTuple) edge.oppositeVertex(vertex);
int left = vertex.getLeft();
int right = vertex.getRight();
int oppositeLeft = oppositeVertex.getLeft();
int oppositeRight = oppositeVertex.getRight();
// first check if edge is a diagonal
if ((oppositeLeft == left + 1) && (oppositeRight == right + 1)) {
diagonalEdge = edge;
break;
}
// then check if this edge is a vertical (which means to delete
// a node)
else if ((oppositeLeft == left + 1) && (oppositeRight == right)) {
verticalEdge = edge;
} else
horizontalEdge = edge; // it is a horizontal edge (which
// means to add a node)
}
Edge edgeToWalk;
double weight = 0;
if (diagonalEdge != null) {
edgeToWalk = diagonalEdge;
weight = weightSubstitute;
} else if (verticalEdge != null) {
edgeToWalk = verticalEdge;
weight = edgeToWalk.getWeight();
} else {
edgeToWalk = horizontalEdge;
weight = edgeToWalk.getWeight();
}
pathLength += weight;
vertex = (GraphVertexTuple) edgeToWalk.oppositeVertex(vertex);
}
return pathLength;
} }
|
public class class_name {
public double getWorstCaseRetainStructure() {
double pathLength = 0;
Edge verticalEdge;
Edge horizontalEdge;
Edge diagonalEdge;
GraphVertexTuple vertex = firstVertex;
while (vertex != lastVertex) {
verticalEdge = null;
// depends on control dependency: [while], data = [none]
horizontalEdge = null;
// depends on control dependency: [while], data = [none]
diagonalEdge = null;
// depends on control dependency: [while], data = [none]
List adjacentEdges = editDistanceGraph.outgoingEdgesOf(vertex);
// in this loop gather all available edges outgoing from a vertex
// and
// assign them to the corresponding variable '...Edge'.
for (Object o : adjacentEdges) {
Edge edge = (Edge) o;
GraphVertexTuple oppositeVertex = (GraphVertexTuple) edge.oppositeVertex(vertex);
int left = vertex.getLeft();
int right = vertex.getRight();
int oppositeLeft = oppositeVertex.getLeft();
int oppositeRight = oppositeVertex.getRight();
// first check if edge is a diagonal
if ((oppositeLeft == left + 1) && (oppositeRight == right + 1)) {
diagonalEdge = edge;
// depends on control dependency: [if], data = [none]
break;
}
// then check if this edge is a vertical (which means to delete
// a node)
else if ((oppositeLeft == left + 1) && (oppositeRight == right)) {
verticalEdge = edge;
// depends on control dependency: [if], data = [none]
} else
horizontalEdge = edge; // it is a horizontal edge (which
// means to add a node)
}
Edge edgeToWalk;
double weight = 0;
if (diagonalEdge != null) {
edgeToWalk = diagonalEdge;
// depends on control dependency: [if], data = [none]
weight = weightSubstitute;
// depends on control dependency: [if], data = [none]
} else if (verticalEdge != null) {
edgeToWalk = verticalEdge;
// depends on control dependency: [if], data = [none]
weight = edgeToWalk.getWeight();
// depends on control dependency: [if], data = [none]
} else {
edgeToWalk = horizontalEdge;
// depends on control dependency: [if], data = [none]
weight = edgeToWalk.getWeight();
// depends on control dependency: [if], data = [none]
}
pathLength += weight;
// depends on control dependency: [while], data = [none]
vertex = (GraphVertexTuple) edgeToWalk.oppositeVertex(vertex);
// depends on control dependency: [while], data = [(vertex]
}
return pathLength;
} }
|
public class class_name {
public void computeWinsTiesLossesHTML(String path) {
List<Algorithm> alg = this.streams.get(0).algorithm;
int algorithmSize = this.streams.get(0).algorithm.size();
String tablaSalida = "";
tablaSalida += "<TABLE BORDER=1 WIDTH=\"100%\" ALIGN=CENTER>\n";
tablaSalida += "<CAPTION> Experiment";
tablaSalida += "<TR> <TD ROWSPAN = 2>Algorithm <TD TD ROWSPAN = 2>PM";
for (int i = 1; i < algorithmSize; i++) {
tablaSalida += "<TD COLSPAN = 3>" + alg.get(i).name;
}
tablaSalida += "<TD>AVG";
tablaSalida += "<TR>";
for (int i = 1; i < algorithmSize; i++) {
tablaSalida += "<TD>" + "Wins" + "<TD>" + "Losses" + "<TD>" + "Ties";
}
for (int i = 0; i < algorithmSize; i++) {
tablaSalida += "<TR><TD ROWSPAN = " + alg.get(i).measures.size() + ">" + alg.get(i).name;
List<Measure> measureRow[] = alg.get(i).getMeasuresPerData(streams);
int cont = 0;
while (cont != this.streams.get(0).algorithm.get(i).measures.size()) {
//String name = measureRow[i].get(cont).getName();
String name = alg.get(i).measures.get(cont).getName();
tablaSalida += "<TD>" + name;
double sum = 0.0;
for (int j = 1; j < algorithmSize; j++) {
List<Measure> measureCol[] = alg.get(j).getMeasuresPerData(streams);
int win = 0, losses = 0, ties = 0;
for (int k = 0; k < measureCol.length; k++) {
double alg1 = measureRow[k].get(cont).getValue();
double alg2 = measureCol[k].get(cont).getValue();
if (j == 1) {
sum += measureRow[k].get(cont).getValue();
}
if (measureRow[k].get(cont).isType()) {
if (Algorithm.Round(alg1) > Algorithm.Round(alg2)) {
win++;
} else if (Algorithm.Round(alg1) < Algorithm.Round(alg2)) {
losses++;
} else {
ties++;
}
} else {
if (alg1 < alg2) {
win++;
} else if (alg1 > alg2) {
losses++;
} else {
ties++;
}
}
}
if (i < j) {
tablaSalida += "<TD>" + win;
tablaSalida += "<TD>" + losses;
tablaSalida += "<TD>" + ties;
} else {
tablaSalida += "<TD> ";
tablaSalida += "<TD> ";
tablaSalida += "<TD> ";
}
}
sum = (double) sum / measureRow.length;
tablaSalida += "<TD>" + Algorithm.format(sum);
tablaSalida += "<TR>";
cont++;
}
}
tablaSalida += "</TABLE>";
// PrintStream salida = null;
// try {
// salida = new PrintStream(new BufferedOutputStream(new FileOutputStream(path + "summary.win.ties.losses.html")));
// } catch (FileNotFoundException ex) {
// Logger.getLogger(Summary.class.getName()).log(Level.SEVERE, null, ex);
// }
// System.setOut(salida);
// System.out.println(tablaSalida);
// salida.close();
try {
BufferedWriter out = new BufferedWriter(new FileWriter(path + "summary.win.ties.losses.html"));
out.write(tablaSalida); //Replace with the string
//you are trying to write
out.close();
} catch (IOException e) {
System.out.println("Error saving summary.win.ties.losses.html");
}
} }
|
public class class_name {
public void computeWinsTiesLossesHTML(String path) {
List<Algorithm> alg = this.streams.get(0).algorithm;
int algorithmSize = this.streams.get(0).algorithm.size();
String tablaSalida = "";
tablaSalida += "<TABLE BORDER=1 WIDTH=\"100%\" ALIGN=CENTER>\n";
tablaSalida += "<CAPTION> Experiment";
tablaSalida += "<TR> <TD ROWSPAN = 2>Algorithm <TD TD ROWSPAN = 2>PM";
for (int i = 1; i < algorithmSize; i++) {
tablaSalida += "<TD COLSPAN = 3>" + alg.get(i).name; // depends on control dependency: [for], data = [i]
}
tablaSalida += "<TD>AVG";
tablaSalida += "<TR>";
for (int i = 1; i < algorithmSize; i++) {
tablaSalida += "<TD>" + "Wins" + "<TD>" + "Losses" + "<TD>" + "Ties"; // depends on control dependency: [for], data = [none]
}
for (int i = 0; i < algorithmSize; i++) {
tablaSalida += "<TR><TD ROWSPAN = " + alg.get(i).measures.size() + ">" + alg.get(i).name; // depends on control dependency: [for], data = [i]
List<Measure> measureRow[] = alg.get(i).getMeasuresPerData(streams);
int cont = 0;
while (cont != this.streams.get(0).algorithm.get(i).measures.size()) {
//String name = measureRow[i].get(cont).getName();
String name = alg.get(i).measures.get(cont).getName();
tablaSalida += "<TD>" + name; // depends on control dependency: [while], data = [none]
double sum = 0.0;
for (int j = 1; j < algorithmSize; j++) {
List<Measure> measureCol[] = alg.get(j).getMeasuresPerData(streams);
int win = 0, losses = 0, ties = 0;
for (int k = 0; k < measureCol.length; k++) {
double alg1 = measureRow[k].get(cont).getValue();
double alg2 = measureCol[k].get(cont).getValue();
if (j == 1) {
sum += measureRow[k].get(cont).getValue(); // depends on control dependency: [if], data = [none]
}
if (measureRow[k].get(cont).isType()) {
if (Algorithm.Round(alg1) > Algorithm.Round(alg2)) {
win++; // depends on control dependency: [if], data = [none]
} else if (Algorithm.Round(alg1) < Algorithm.Round(alg2)) {
losses++; // depends on control dependency: [if], data = [none]
} else {
ties++; // depends on control dependency: [if], data = [none]
}
} else {
if (alg1 < alg2) {
win++; // depends on control dependency: [if], data = [none]
} else if (alg1 > alg2) {
losses++; // depends on control dependency: [if], data = [none]
} else {
ties++; // depends on control dependency: [if], data = [none]
}
}
}
if (i < j) {
tablaSalida += "<TD>" + win; // depends on control dependency: [if], data = [none]
tablaSalida += "<TD>" + losses; // depends on control dependency: [if], data = [none]
tablaSalida += "<TD>" + ties; // depends on control dependency: [if], data = [none]
} else {
tablaSalida += "<TD> ";
tablaSalida += "<TD> ";
tablaSalida += "<TD> "; // depends on control dependency: [if], data = [none]
}
}
sum = (double) sum / measureRow.length; // depends on control dependency: [while], data = [none]
tablaSalida += "<TD>" + Algorithm.format(sum); // depends on control dependency: [while], data = [none]
tablaSalida += "<TR>";
cont++; // depends on control dependency: [while], data = [none]
}
}
tablaSalida += "</TABLE>";
// PrintStream salida = null;
// try {
// salida = new PrintStream(new BufferedOutputStream(new FileOutputStream(path + "summary.win.ties.losses.html")));
// } catch (FileNotFoundException ex) {
// Logger.getLogger(Summary.class.getName()).log(Level.SEVERE, null, ex);
// }
// System.setOut(salida);
// System.out.println(tablaSalida);
// salida.close();
try {
BufferedWriter out = new BufferedWriter(new FileWriter(path + "summary.win.ties.losses.html"));
out.write(tablaSalida); //Replace with the string // depends on control dependency: [try], data = [none]
//you are trying to write
out.close(); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
System.out.println("Error saving summary.win.ties.losses.html");
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public void convertToPost() {
this.method = "POST";
//add parameters, if available
StringBuilder bodyBuilder = new StringBuilder();
boolean first = true;
for (Parameter param : parameters) {
if (!first) {
bodyBuilder.append("&");
}
bodyBuilder.append(param.getName());
bodyBuilder.append("=");
bodyBuilder.append(param.getValue());
first = false;
}
this.body = bodyBuilder.toString();
this.parameters = new ArrayList<>();
this.sortedParams = null;
} }
|
public class class_name {
public void convertToPost() {
this.method = "POST";
//add parameters, if available
StringBuilder bodyBuilder = new StringBuilder();
boolean first = true;
for (Parameter param : parameters) {
if (!first) {
bodyBuilder.append("&"); // depends on control dependency: [if], data = [none]
}
bodyBuilder.append(param.getName()); // depends on control dependency: [for], data = [param]
bodyBuilder.append("="); // depends on control dependency: [for], data = [none]
bodyBuilder.append(param.getValue()); // depends on control dependency: [for], data = [param]
first = false; // depends on control dependency: [for], data = [none]
}
this.body = bodyBuilder.toString();
this.parameters = new ArrayList<>();
this.sortedParams = null;
} }
|
public class class_name {
public static <P> List<P> extractPayloads(final List<? extends Record<P>> records) {
List<P> payloads = new ArrayList<>();
for (Record<P> record : records) {
payloads.add(record.getPayload());
}
return payloads;
} }
|
public class class_name {
public static <P> List<P> extractPayloads(final List<? extends Record<P>> records) {
List<P> payloads = new ArrayList<>();
for (Record<P> record : records) {
payloads.add(record.getPayload()); // depends on control dependency: [for], data = [record]
}
return payloads;
} }
|
public class class_name {
public String sign(RawTransaction rawTransaction) {
byte[] signedMessage;
if (chainId > ChainId.NONE) {
signedMessage = TransactionEncoder.signMessage(rawTransaction, chainId, credentials);
} else {
signedMessage = TransactionEncoder.signMessage(rawTransaction, credentials);
}
return Numeric.toHexString(signedMessage);
} }
|
public class class_name {
public String sign(RawTransaction rawTransaction) {
byte[] signedMessage;
if (chainId > ChainId.NONE) {
signedMessage = TransactionEncoder.signMessage(rawTransaction, chainId, credentials); // depends on control dependency: [if], data = [none]
} else {
signedMessage = TransactionEncoder.signMessage(rawTransaction, credentials); // depends on control dependency: [if], data = [none]
}
return Numeric.toHexString(signedMessage);
} }
|
public class class_name {
public Iterator<Row> iterator()
{
if (this.rows != null)
{
return Arrays.asList(this.rows).iterator();
}
else
{
return ConvertibleIteratorUtils.iterateAsRow(this.pbRows.iterator(), this.pbColumnDescriptions);
}
} }
|
public class class_name {
public Iterator<Row> iterator()
{
if (this.rows != null)
{
return Arrays.asList(this.rows).iterator(); // depends on control dependency: [if], data = [(this.rows]
}
else
{
return ConvertibleIteratorUtils.iterateAsRow(this.pbRows.iterator(), this.pbColumnDescriptions); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private Map<String, String> parseResources(List<String> optionValues) {
Map<String, String> resources = new HashMap<>();
for (String file : optionValues) {
String[] parts = file.split("::");
String fileName;
String scopeName = null;
if (parts.length == 2) {
scopeName = parts[0];
fileName = parts[1];
} else {
fileName = parts[0];
}
resources.put(fileName, scopeName);
}
return resources;
} }
|
public class class_name {
private Map<String, String> parseResources(List<String> optionValues) {
Map<String, String> resources = new HashMap<>();
for (String file : optionValues) {
String[] parts = file.split("::");
String fileName;
String scopeName = null;
if (parts.length == 2) {
scopeName = parts[0]; // depends on control dependency: [if], data = [none]
fileName = parts[1]; // depends on control dependency: [if], data = [none]
} else {
fileName = parts[0]; // depends on control dependency: [if], data = [none]
}
resources.put(fileName, scopeName); // depends on control dependency: [for], data = [file]
}
return resources;
} }
|
public class class_name {
public final boolean includes(Version version) {
if (minimum != null) {
int minimumComparison = minimum.compareTo(version);
if (minimumComparison > 0) {
return false;
}
if ((!minimumIncluded) && (minimumComparison == 0)) {
return false;
}
}
if (maximum != null) {
int maximumComparison = maximum.compareTo(version);
if (maximumComparison < 0) {
return false;
}
if ((!maximumIncluded) && (maximumComparison == 0)) {
return false;
}
}
return true;
} }
|
public class class_name {
public final boolean includes(Version version) {
if (minimum != null) {
int minimumComparison = minimum.compareTo(version);
if (minimumComparison > 0) {
return false; // depends on control dependency: [if], data = [none]
}
if ((!minimumIncluded) && (minimumComparison == 0)) {
return false; // depends on control dependency: [if], data = [none]
}
}
if (maximum != null) {
int maximumComparison = maximum.compareTo(version);
if (maximumComparison < 0) {
return false; // depends on control dependency: [if], data = [none]
}
if ((!maximumIncluded) && (maximumComparison == 0)) {
return false; // depends on control dependency: [if], data = [none]
}
}
return true;
} }
|
public class class_name {
public SecurityGroup withIpPermissionsEgress(IpPermission... ipPermissionsEgress) {
if (this.ipPermissionsEgress == null) {
setIpPermissionsEgress(new com.amazonaws.internal.SdkInternalList<IpPermission>(ipPermissionsEgress.length));
}
for (IpPermission ele : ipPermissionsEgress) {
this.ipPermissionsEgress.add(ele);
}
return this;
} }
|
public class class_name {
public SecurityGroup withIpPermissionsEgress(IpPermission... ipPermissionsEgress) {
if (this.ipPermissionsEgress == null) {
setIpPermissionsEgress(new com.amazonaws.internal.SdkInternalList<IpPermission>(ipPermissionsEgress.length)); // depends on control dependency: [if], data = [none]
}
for (IpPermission ele : ipPermissionsEgress) {
this.ipPermissionsEgress.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} }
|
public class class_name {
@Override
protected String badIdentifier(String expected) {
error(expected);
final String id = "@" + (nextId++);
if (!peekPunctuation()) {
final Token token = getNextToken();
return "BAD_IDENTIFIER" + "_" + friendlyName(token) + id;
} else {
return "NO_IDENTIFIER" + id;
}
} }
|
public class class_name {
@Override
protected String badIdentifier(String expected) {
error(expected);
final String id = "@" + (nextId++);
if (!peekPunctuation()) {
final Token token = getNextToken();
return "BAD_IDENTIFIER" + "_" + friendlyName(token) + id; // depends on control dependency: [if], data = [none]
} else {
return "NO_IDENTIFIER" + id; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public void addPenalizingValidation(Object key, PenalizingValidation penalizingValidation){
initMapOnce();
penalties.put(key, penalizingValidation);
// update penalized value
if(!penalizingValidation.passed()){
assignedPenalties = true;
double p = penalizingValidation.getPenalty();
penalizedValue += minimizing ? p : -p;
}
} }
|
public class class_name {
public void addPenalizingValidation(Object key, PenalizingValidation penalizingValidation){
initMapOnce();
penalties.put(key, penalizingValidation);
// update penalized value
if(!penalizingValidation.passed()){
assignedPenalties = true; // depends on control dependency: [if], data = [none]
double p = penalizingValidation.getPenalty();
penalizedValue += minimizing ? p : -p; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public static <T> String apiVersion(T item, String apiVersion) {
if (item instanceof HasMetadata && Utils.isNotNullOrEmpty(((HasMetadata) item).getApiVersion())) {
return trimVersion(((HasMetadata) item).getApiVersion());
} else if (apiVersion != null && !apiVersion.isEmpty()) {
return trimVersion(apiVersion);
}
return null;
} }
|
public class class_name {
public static <T> String apiVersion(T item, String apiVersion) {
if (item instanceof HasMetadata && Utils.isNotNullOrEmpty(((HasMetadata) item).getApiVersion())) {
return trimVersion(((HasMetadata) item).getApiVersion()); // depends on control dependency: [if], data = [none]
} else if (apiVersion != null && !apiVersion.isEmpty()) {
return trimVersion(apiVersion); // depends on control dependency: [if], data = [(apiVersion]
}
return null;
} }
|
public class class_name {
public static List<Long> lengths(final Iterable<Range<Long>> ranges) {
checkNotNull(ranges);
List<Long> lengths = new ArrayList<Long>();
for (Range<Long> range : ranges) {
lengths.add(length(range));
}
return lengths;
} }
|
public class class_name {
public static List<Long> lengths(final Iterable<Range<Long>> ranges) {
checkNotNull(ranges);
List<Long> lengths = new ArrayList<Long>();
for (Range<Long> range : ranges) {
lengths.add(length(range)); // depends on control dependency: [for], data = [range]
}
return lengths;
} }
|
public class class_name {
public static synchronized void removeDefaultResource(String name) {
if(defaultResources.contains(name)) {
defaultResources.remove(name);
for(Configuration conf : REGISTRY.keySet()) {
if(conf.loadDefaults) {
conf.reloadConfiguration();
}
}
}
} }
|
public class class_name {
public static synchronized void removeDefaultResource(String name) {
if(defaultResources.contains(name)) {
defaultResources.remove(name); // depends on control dependency: [if], data = [none]
for(Configuration conf : REGISTRY.keySet()) {
if(conf.loadDefaults) {
conf.reloadConfiguration(); // depends on control dependency: [if], data = [none]
}
}
}
} }
|
public class class_name {
public byte[] digest() {
byte[] result = new byte[20];
finalBuffer.put((byte)0x80);
if(finalBuffer.remaining() < 8) {
while(finalBuffer.remaining() > 0) {
finalBuffer.put((byte)0);
}
finalBuffer.position(0);
transform(finalBuffer);
finalBuffer.position(0);
}
while(finalBuffer.remaining() > 8) {
finalBuffer.put((byte)0);
}
finalBuffer.putLong(length << 3);
finalBuffer.position(0);
transform(finalBuffer);
finalBuffer.position(0);
finalBuffer.putInt(h0);
finalBuffer.putInt(h1);
finalBuffer.putInt(h2);
finalBuffer.putInt(h3);
finalBuffer.putInt(h4);
finalBuffer.position(0);
for(int i = 0 ; i < 20 ; i++) {
result[i] = finalBuffer.get();
}
return result;
} }
|
public class class_name {
public byte[] digest() {
byte[] result = new byte[20];
finalBuffer.put((byte)0x80);
if(finalBuffer.remaining() < 8) {
while(finalBuffer.remaining() > 0) {
finalBuffer.put((byte)0); // depends on control dependency: [while], data = [0)]
}
finalBuffer.position(0); // depends on control dependency: [if], data = [none]
transform(finalBuffer); // depends on control dependency: [if], data = [none]
finalBuffer.position(0); // depends on control dependency: [if], data = [none]
}
while(finalBuffer.remaining() > 8) {
finalBuffer.put((byte)0); // depends on control dependency: [while], data = [none]
}
finalBuffer.putLong(length << 3);
finalBuffer.position(0);
transform(finalBuffer);
finalBuffer.position(0);
finalBuffer.putInt(h0);
finalBuffer.putInt(h1);
finalBuffer.putInt(h2);
finalBuffer.putInt(h3);
finalBuffer.putInt(h4);
finalBuffer.position(0);
for(int i = 0 ; i < 20 ; i++) {
result[i] = finalBuffer.get(); // depends on control dependency: [for], data = [i]
}
return result;
} }
|
public class class_name {
@GET
public Response listDatastreams(@PathParam(RestParam.PID) String pid,
@QueryParam(RestParam.AS_OF_DATE_TIME) String dateTime,
@QueryParam(RestParam.FORMAT) @DefaultValue(HTML) String format,
@QueryParam(RestParam.FLASH) @DefaultValue("false") boolean flash,
@QueryParam(RestParam.PROFILES) @DefaultValue("false") boolean profiles,
@QueryParam(RestParam.DS_STATE) String dsState,
@QueryParam(RestParam.VALIDATE_CHECKSUM) @DefaultValue("false") boolean validateChecksum
) {
try {
Date asOfDateTime = DateUtility.parseDateOrNull(dateTime);
Context context = getContext();
MediaType mime = RestHelper.getContentType(format);
Reader output;
if (profiles){
mime=MediaType.TEXT_XML_TYPE;
final Datastream[] datastreams = m_management.getDatastreams(context, pid, asOfDateTime, dsState);
ReadableCharArrayWriter xml = new ReadableCharArrayWriter(2048);
getSerializer(context).datastreamProfilesToXML(pid, datastreams, asOfDateTime, validateChecksum, xml);
xml.close();
output = xml.toReader();
} else {
mime = RestHelper.getContentType(format);
DatastreamDef[] dsDefs =
m_access.listDatastreams(context, pid, asOfDateTime);
ReadableCharArrayWriter xml = new ReadableCharArrayWriter(1024);
getSerializer(context).dataStreamsToXML(pid, asOfDateTime, dsDefs, xml);
xml.close();
if (TEXT_HTML.isCompatible(mime)) {
Reader reader = xml.toReader();
xml = new ReadableCharArrayWriter(1024);
transform(reader, "access/listDatastreams.xslt", xml);
xml.close();
}
output = xml.toReader();
}
return Response.ok(output, mime).build();
} catch (Exception ex) {
return handleException(ex, flash);
}
} }
|
public class class_name {
@GET
public Response listDatastreams(@PathParam(RestParam.PID) String pid,
@QueryParam(RestParam.AS_OF_DATE_TIME) String dateTime,
@QueryParam(RestParam.FORMAT) @DefaultValue(HTML) String format,
@QueryParam(RestParam.FLASH) @DefaultValue("false") boolean flash,
@QueryParam(RestParam.PROFILES) @DefaultValue("false") boolean profiles,
@QueryParam(RestParam.DS_STATE) String dsState,
@QueryParam(RestParam.VALIDATE_CHECKSUM) @DefaultValue("false") boolean validateChecksum
) {
try {
Date asOfDateTime = DateUtility.parseDateOrNull(dateTime);
Context context = getContext();
MediaType mime = RestHelper.getContentType(format);
Reader output;
if (profiles){
mime=MediaType.TEXT_XML_TYPE; // depends on control dependency: [if], data = [none]
final Datastream[] datastreams = m_management.getDatastreams(context, pid, asOfDateTime, dsState);
ReadableCharArrayWriter xml = new ReadableCharArrayWriter(2048);
getSerializer(context).datastreamProfilesToXML(pid, datastreams, asOfDateTime, validateChecksum, xml); // depends on control dependency: [if], data = [none]
xml.close(); // depends on control dependency: [if], data = [none]
output = xml.toReader(); // depends on control dependency: [if], data = [none]
} else {
mime = RestHelper.getContentType(format); // depends on control dependency: [if], data = [none]
DatastreamDef[] dsDefs =
m_access.listDatastreams(context, pid, asOfDateTime);
ReadableCharArrayWriter xml = new ReadableCharArrayWriter(1024);
getSerializer(context).dataStreamsToXML(pid, asOfDateTime, dsDefs, xml); // depends on control dependency: [if], data = [none]
xml.close(); // depends on control dependency: [if], data = [none]
if (TEXT_HTML.isCompatible(mime)) {
Reader reader = xml.toReader();
xml = new ReadableCharArrayWriter(1024); // depends on control dependency: [if], data = [none]
transform(reader, "access/listDatastreams.xslt", xml); // depends on control dependency: [if], data = [none]
xml.close(); // depends on control dependency: [if], data = [none]
}
output = xml.toReader(); // depends on control dependency: [if], data = [none]
}
return Response.ok(output, mime).build(); // depends on control dependency: [try], data = [none]
} catch (Exception ex) {
return handleException(ex, flash);
} // depends on control dependency: [catch], data = [none]
} }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.