text
stringlengths 7
1.01M
|
|---|
package net.sf.jabref.gui.actions;
import java.awt.BorderLayout;
import java.awt.event.ActionEvent;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.ActionMap;
import javax.swing.BorderFactory;
import javax.swing.ButtonGroup;
import javax.swing.InputMap;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JComboBox;
import javax.swing.JComponent;
import javax.swing.JDialog;
import javax.swing.JOptionPane;
import javax.swing.JRadioButton;
import javax.swing.JTextField;
import javax.swing.undo.UndoableEdit;
import net.sf.jabref.Globals;
import net.sf.jabref.gui.BasePanel;
import net.sf.jabref.gui.JabRefFrame;
import net.sf.jabref.gui.keyboard.KeyBinding;
import net.sf.jabref.gui.undo.NamedCompound;
import net.sf.jabref.gui.undo.UndoableFieldChange;
import net.sf.jabref.logic.l10n.Localization;
import net.sf.jabref.model.entry.BibEntry;
import com.jgoodies.forms.builder.ButtonBarBuilder;
import com.jgoodies.forms.builder.FormBuilder;
import com.jgoodies.forms.layout.FormLayout;
/**
* An Action for launching mass field.
*
* Functionality:
* * Defaults to selected entries, or all entries if none are selected.
* * Input field name
* * Either set field, or clear field.
*/
public class MassSetFieldAction extends MnemonicAwareAction {
private final JabRefFrame frame;
private JDialog diag;
private JRadioButton all;
private JRadioButton selected;
private JRadioButton clear;
private JRadioButton set;
private JRadioButton rename;
private JComboBox<String> field;
private JTextField text;
private JTextField renameTo;
private boolean canceled = true;
private JCheckBox overwrite;
public MassSetFieldAction(JabRefFrame frame) {
putValue(Action.NAME, Localization.menuTitle("Set/clear/rename fields") + "...");
this.frame = frame;
}
private void createDialog() {
diag = new JDialog(frame, Localization.lang("Set/clear/rename fields"), true);
field = new JComboBox<>();
field.setEditable(true);
text = new JTextField();
text.setEnabled(false);
renameTo = new JTextField();
renameTo.setEnabled(false);
JButton ok = new JButton(Localization.lang("OK"));
JButton cancel = new JButton(Localization.lang("Cancel"));
all = new JRadioButton(Localization.lang("All entries"));
selected = new JRadioButton(Localization.lang("Selected entries"));
clear = new JRadioButton(Localization.lang("Clear fields"));
set = new JRadioButton(Localization.lang("Set fields"));
rename = new JRadioButton(Localization.lang("Rename field to") + ":");
rename.setToolTipText(Localization.lang("Move contents of a field into a field with a different name"));
Set<String> allFields = frame.getCurrentBasePanel().getDatabase().getAllVisibleFields();
for (String f : allFields) {
field.addItem(f);
}
set.addChangeListener(e ->
// Entering a text is only relevant if we are setting, not clearing:
text.setEnabled(set.isSelected()));
clear.addChangeListener(e ->
// Overwrite protection makes no sense if we are clearing the field:
overwrite.setEnabled(!clear.isSelected()));
rename.addChangeListener(e ->
// Entering a text is only relevant if we are renaming
renameTo.setEnabled(rename.isSelected()));
overwrite = new JCheckBox(Localization.lang("Overwrite existing field values"), true);
ButtonGroup bg = new ButtonGroup();
bg.add(all);
bg.add(selected);
bg = new ButtonGroup();
bg.add(clear);
bg.add(set);
bg.add(rename);
FormBuilder builder = FormBuilder.create().layout(new FormLayout(
"left:pref, 4dlu, fill:100dlu:grow", "pref, 2dlu, pref, 2dlu, pref, 2dlu, pref, 2dlu, pref, 2dlu, pref, 2dlu, pref, 2dlu, pref, 2dlu, pref, 2dlu, pref"));
builder.addSeparator(Localization.lang("Field name")).xyw(1, 1, 3);
builder.add(Localization.lang("Field name")).xy(1, 3);
builder.add(field).xy(3, 3);
builder.addSeparator(Localization.lang("Include entries")).xyw(1, 5, 3);
builder.add(all).xyw(1, 7, 3);
builder.add(selected).xyw(1, 9, 3);
builder.addSeparator(Localization.lang("New field value")).xyw(1, 11, 3);
builder.add(set).xy(1, 13);
builder.add(text).xy(3, 13);
builder.add(clear).xyw(1, 15, 3);
builder.add(rename).xy(1, 17);
builder.add(renameTo).xy(3, 17);
builder.add(overwrite).xyw(1, 19, 3);
ButtonBarBuilder bb = new ButtonBarBuilder();
bb.addGlue();
bb.addButton(ok);
bb.addButton(cancel);
bb.addGlue();
builder.getPanel().setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5));
bb.getPanel().setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5));
diag.getContentPane().add(builder.getPanel(), BorderLayout.CENTER);
diag.getContentPane().add(bb.getPanel(), BorderLayout.SOUTH);
diag.pack();
ok.addActionListener(e -> {
// Check that any field name is set
String fieldText = (String) field.getSelectedItem();
if ((fieldText == null) || fieldText.trim().isEmpty()) {
JOptionPane.showMessageDialog(diag, Localization.lang("You must enter at least one field name"), "",
JOptionPane.ERROR_MESSAGE);
return; // Do not close the dialog.
}
// Check if the user tries to rename multiple fields:
if (rename.isSelected()) {
String[] fields = getFieldNames(fieldText);
if (fields.length > 1) {
JOptionPane.showMessageDialog(diag, Localization.lang("You can only rename one field at a time"),
"", JOptionPane.ERROR_MESSAGE);
return; // Do not close the dialog.
}
}
canceled = false;
diag.dispose();
});
Action cancelAction = new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
canceled = true;
diag.dispose();
}
};
cancel.addActionListener(cancelAction);
// Key bindings:
ActionMap am = builder.getPanel().getActionMap();
InputMap im = builder.getPanel().getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW);
im.put(Globals.getKeyPrefs().getKey(KeyBinding.CLOSE_DIALOG), "close");
am.put("close", cancelAction);
}
private void prepareDialog(boolean selection) {
selected.setEnabled(selection);
if (selection) {
selected.setSelected(true);
} else {
all.setSelected(true);
}
// Make sure one of the following ones is selected:
if (!set.isSelected() && !clear.isSelected() && !rename.isSelected()) {
set.setSelected(true);
}
}
@Override
public void actionPerformed(ActionEvent e) {
BasePanel bp = frame.getCurrentBasePanel();
if (bp == null) {
return;
}
List<BibEntry> entries = bp.getSelectedEntries();
// Lazy creation of the dialog:
if (diag == null) {
createDialog();
}
canceled = true;
prepareDialog(!entries.isEmpty());
if (diag != null) {
diag.setLocationRelativeTo(frame);
diag.setVisible(true);
}
if (canceled) {
return;
}
Collection<BibEntry> entryList;
// If all entries should be treated, change the entries array:
if (all.isSelected()) {
entryList = bp.getDatabase().getEntries();
} else {
entryList = entries;
}
String toSet = text.getText();
if (toSet.isEmpty()) {
toSet = null;
}
String[] fields = getFieldNames(((String) field.getSelectedItem()).trim().toLowerCase());
NamedCompound ce = new NamedCompound(Localization.lang("Set field"));
if (rename.isSelected()) {
if (fields.length > 1) {
JOptionPane.showMessageDialog(diag, Localization.lang("You can only rename one field at a time"), "",
JOptionPane.ERROR_MESSAGE);
return; // Do not close the dialog.
} else {
ce.addEdit(MassSetFieldAction.massRenameField(entryList, fields[0], renameTo.getText(),
overwrite.isSelected()));
}
} else {
for (String field1 : fields) {
ce.addEdit(MassSetFieldAction.massSetField(entryList, field1,
set.isSelected() ? toSet : null,
overwrite.isSelected()));
}
}
ce.end();
bp.getUndoManager().addEdit(ce);
bp.markBaseChanged();
}
/**
* Set a given field to a given value for all entries in a Collection. This method DOES NOT update any UndoManager,
* but returns a relevant CompoundEdit that should be registered by the caller.
*
* @param entries The entries to set the field for.
* @param field The name of the field to set.
* @param text The value to set. This value can be null, indicating that the field should be cleared.
* @param overwriteValues Indicate whether the value should be set even if an entry already has the field set.
* @return A CompoundEdit for the entire operation.
*/
private static UndoableEdit massSetField(Collection<BibEntry> entries, String field, String text,
boolean overwriteValues) {
NamedCompound ce = new NamedCompound(Localization.lang("Set field"));
for (BibEntry entry : entries) {
Optional<String> oldVal = entry.getField(field);
// If we are not allowed to overwrite values, check if there is a
// nonempty
// value already for this entry:
if (!overwriteValues && (oldVal.isPresent()) && !oldVal.get().isEmpty()) {
continue;
}
if (text == null) {
entry.clearField(field);
} else {
entry.setField(field, text);
}
ce.addEdit(new UndoableFieldChange(entry, field, oldVal.orElse(null), text));
}
ce.end();
return ce;
}
/**
* Move contents from one field to another for a Collection of entries.
*
* @param entries The entries to do this operation for.
* @param field The field to move contents from.
* @param newField The field to move contents into.
* @param overwriteValues If true, overwrites any existing values in the new field. If false, makes no change for
* entries with existing value in the new field.
* @return A CompoundEdit for the entire operation.
*/
private static UndoableEdit massRenameField(Collection<BibEntry> entries, String field, String newField,
boolean overwriteValues) {
NamedCompound ce = new NamedCompound(Localization.lang("Rename field"));
for (BibEntry entry : entries) {
Optional<String> valToMove = entry.getField(field);
// If there is no value, do nothing:
if ((!valToMove.isPresent()) || valToMove.get().isEmpty()) {
continue;
}
// If we are not allowed to overwrite values, check if there is a
// non-empty value already for this entry for the new field:
Optional<String> valInNewField = entry.getField(newField);
if (!overwriteValues && (valInNewField.isPresent()) && !valInNewField.get().isEmpty()) {
continue;
}
entry.setField(newField, valToMove.get());
ce.addEdit(new UndoableFieldChange(entry, newField, valInNewField.orElse(null), valToMove.get()));
entry.clearField(field);
ce.addEdit(new UndoableFieldChange(entry, field, valToMove.get(), null));
}
ce.end();
return ce;
}
private static String[] getFieldNames(String s) {
return s.split("[\\s;,]");
}
}
|
/**
* Copyright © 2018, Ondrej Benkovsky
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are those
* of the authors and should not be interpreted as representing official policies,
* either expressed or implied, of the FreeBSD Project.
*/
package com.github.structlogging;
public class Test {
private final String ahoj;
public Test(final String ahoj) {
this.ahoj = ahoj;
}
public String getAhoj() {
return ahoj;
}
@Override
public String toString() {
return "Test{" +
"ahoj='" + ahoj + '\'' +
'}';
}
}
|
package de.autoDrive.NetworkServer.service;
import de.autoDrive.NetworkServer.entity.MapInfo;
import de.autoDrive.NetworkServer.entity.MapTileData;
import de.autoDrive.NetworkServer.helper.ImageHelper;
import de.autoDrive.NetworkServer.repository.MapInfoRepository;
import de.autoDrive.NetworkServer.repository.MapTileDataRepository;
import org.imgscalr.Scalr;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.imageio.ImageIO;
import java.awt.image.BufferedImage;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Optional;
import static de.autoDrive.NetworkServer.rest.MapTileInfo.DIMENSIONS;
import static de.autoDrive.NetworkServer.rest.MapTileInfo.TILE_SIZE;
@Service
@Transactional
public class MapService {
private final static Logger LOG = LoggerFactory.getLogger(MapService.class);
@Autowired
private MapTileDataRepository mapTileDataRepository;
@Autowired
private MapInfoRepository mapInfoRepository;
public byte[] getMap(String name, int level, int x, int y) throws IOException {
Optional<MapTileData> tile = mapTileDataRepository.findFirstByZoomLevelAndXAndYAndName(level, x, y, name);
return tile.map(MapTileData::getData).orElse(null);
}
public void createTiles(String name, Integer level) throws IOException {
LOG.info("createTiles {}, level: {}", name, level);
int w = DIMENSIONS[level];
int h = DIMENSIONS[level];
MapInfo mapInfo = mapInfoRepository.findFirstByName(name).orElseGet(() -> {
MapInfo mi = new MapInfo();
mi.setName(name);
mapInfoRepository.save(mi);
return mi;
});
BufferedImage scaledImage = getScaleImage(name, w);
for (int x = 0; x < w; x += TILE_SIZE) {
for (int y = 0; y < h; y += TILE_SIZE) {
BufferedImage cutoutImage = scaledImage.getSubimage(x, y, x + TILE_SIZE <= w ? TILE_SIZE:w - x, y + TILE_SIZE <= h ? TILE_SIZE:h - y);
byte[] byteArray = toByteArrayAutoClosable(cutoutImage);
LOG.info("level: {}, x: {}, y: {}, size: {}", level, x, y, byteArray.length);
MapTileData mapTile = new MapTileData();
mapTile.setData(byteArray);
mapTile.setName(name);
mapTile.setX(x);
mapTile.setY(y);
mapTile.setZoomLevel(level);
mapTile.setMapInfo(mapInfo);
mapTileDataRepository.save(mapTile);
}
mapTileDataRepository.flush();
}
}
public BufferedImage getScaleImage(String name, int dimension) {
BufferedImage image = ImageHelper.loadImage("/images/"+name+".png");
return Scalr.resize(image, Scalr.Method.QUALITY, Scalr.Mode.AUTOMATIC, dimension, dimension);
}
private byte[] toByteArrayAutoClosable(BufferedImage image) throws IOException {
try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {
ImageIO.write(image, "png", out);
return out.toByteArray();
}
}
@Async
public void runTileCheck(Integer zoomLevel, String name) throws IOException {
LOG.info("runTileCheck {}, zoomLevel: {}", name, zoomLevel);
byte[] data = getMap(name, zoomLevel, 0, 0);
if (data == null) {
try {
createTiles(name, zoomLevel);
} catch (IOException e) {
LOG.error(e.getMessage(), e);
}
}
}
}
|
package org.blueo.log.monitor.vo;
import java.util.List;
import org.blueo.log.monitor.core.LogMonitor;
import org.blueo.log.monitor.core.LogMonitorDaily;
import com.google.common.collect.Lists;
/**
details sample:
name command arg
server=/app/thorin/server/log/server.yyyy-MM-dd.log |grep 'ERROR'
pricer=/app/thorin/pricer/log/pricer.yyyy-MM-dd.log |grep 'ERROR'
*/
public class LogJob {
private String name;
private String details;
private List<LogMonitor> logMonitors = Lists.newArrayList();
public LogJob() {
}
public LogJob(String name, String details) {
this.name = name;
this.details = details;
}
public void start() {
if (!logMonitors.isEmpty()) {
for (LogMonitor logMonitor : logMonitors) {
logMonitor.stop();
logMonitors.clear();
}
}
String lines[] = details.split("\\r?\\n");
for (String line : lines) {
String[] split = line.split("=", 2);
LogMonitor logMonitor = new LogMonitorDaily(split[0], split[1]);
logMonitor.start();
logMonitors.add(logMonitor);
}
}
public List<String> getContent() {
List<String> content = Lists.newArrayList();
for (LogMonitor logMonitor : logMonitors) {
content.addAll(logMonitor.getContent());
}
return content;
}
// -----------------------------
// ----- Get Set ToString HashCode Equals
// -----------------------------
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("LogJob [name=");
builder.append(name);
builder.append(", details=");
builder.append(details);
builder.append(", logMonitors=");
builder.append(logMonitors);
builder.append("]");
return builder.toString();
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDetails() {
return details;
}
public void setDetails(String details) {
this.details = details;
}
public List<LogMonitor> getLogMonitors() {
return logMonitors;
}
public void setLogMonitors(List<LogMonitor> logMonitors) {
this.logMonitors = logMonitors;
}
}
|
package parser.command.commandlist.turtles;
import java.util.ArrayList;
import java.util.List;
import parser.ParserException;
import parser.Validator;
import parser.command.Command;
import parser.command.CommandList;
import parser.command.commandlist.syntax.ListStartCommand;
public class TellCommand extends Command {
@Override
public double evaluate() throws ParserException {
List<Integer> turtleIDs = new ArrayList<>();
for(int i =0; i<myTree.get(0).getNumBranches()-1; i++){
turtleIDs.add(Math.abs(((Double)myTree.get(0).get(i).evaluate()).intValue()));
}
myParser.getTurtleContainer().setCurrent(turtleIDs);
return turtleIDs.get(turtleIDs.size()-1);
}
@Override
public CommandList build() throws ParserException {
CommandList remainder = myTree.buildNext().getRemainder();
Validator.assertType(myTree.get(0), myCommand, ListStartCommand.class);
Validator.assertAtLeastNumArguments(myTree.get(0), myCommand, 2, true);
return remainder;
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.core.iterators;
import java.io.IOException;
import org.apache.accumulo.core.conf.AccumuloConfiguration;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.iterators.IteratorUtil.IteratorScope;
public interface IteratorEnvironment {
SortedKeyValueIterator<Key,Value> reserveMapFileReader(String mapFileName) throws IOException;
AccumuloConfiguration getConfig();
IteratorScope getIteratorScope();
boolean isFullMajorCompaction();
void registerSideChannel(SortedKeyValueIterator<Key,Value> iter);
}
|
package sc.jbp.modules.sys.dao;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import sc.jbp.modules.sys.entity.SysRoleEntity;
import org.apache.ibatis.annotations.Mapper;
/**
* 角色管理
* <p>
* tzen@e-veb.com
*/
@Mapper
public interface SysRoleDao extends BaseMapper<SysRoleEntity> {
}
|
// This file is auto-generated, don't edit it. Thanks.
package com.aliyun.dingtalkworkbench_1_0.models;
import com.aliyun.tea.*;
public class QueryComponentScopesResponseBody extends TeaModel {
// scopes
@NameInMap("userVisibleScopes")
public java.util.List<String> userVisibleScopes;
@NameInMap("deptVisibleScopes")
public java.util.List<Long> deptVisibleScopes;
public static QueryComponentScopesResponseBody build(java.util.Map<String, ?> map) throws Exception {
QueryComponentScopesResponseBody self = new QueryComponentScopesResponseBody();
return TeaModel.build(map, self);
}
public QueryComponentScopesResponseBody setUserVisibleScopes(java.util.List<String> userVisibleScopes) {
this.userVisibleScopes = userVisibleScopes;
return this;
}
public java.util.List<String> getUserVisibleScopes() {
return this.userVisibleScopes;
}
public QueryComponentScopesResponseBody setDeptVisibleScopes(java.util.List<Long> deptVisibleScopes) {
this.deptVisibleScopes = deptVisibleScopes;
return this;
}
public java.util.List<Long> getDeptVisibleScopes() {
return this.deptVisibleScopes;
}
}
|
/**
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.turbine.aggregator;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import org.junit.Test;
import rx.Observable;
import rx.observables.GroupedObservable;
import rx.observers.TestSubscriber;
import rx.schedulers.TestScheduler;
import rx.subjects.TestSubject;
import com.netflix.turbine.HystrixStreamSource;
public class StreamAggregatorTest {
/**
* Submit 3 events containing `rollingCountSuccess` of => 327, 370, 358
*
* We should receive a GroupedObservable of key "CinematchGetPredictions" with deltas => 327, 43, -12, -358 (onComplete)
*/
@Test
public void testNumberValue_OneInstanceOneGroup() {
TestScheduler scheduler = new TestScheduler();
TestSubject<GroupedObservable<InstanceKey, Map<String, Object>>> stream = TestSubject.create(scheduler);
AtomicInteger numGroups = new AtomicInteger();
TestSubscriber<Object> ts = new TestSubscriber<>();
StreamAggregator.aggregateGroupedStreams(stream).flatMap(commandGroup -> {
System.out.println("======> Got group for command: " + commandGroup.getKey());
numGroups.incrementAndGet();
return commandGroup.map(data -> {
return data.get("rollingCountSuccess");
});
}).subscribe(ts);
stream.onNext(getCinematchCommandInstanceStream(12345, scheduler), 5);
stream.onCompleted(100);
scheduler.advanceTimeBy(100, TimeUnit.MILLISECONDS);
ts.awaitTerminalEvent();
System.out.println("---------> OnErrorEvents: " + ts.getOnErrorEvents());
if (ts.getOnErrorEvents().size() > 0) {
ts.getOnErrorEvents().get(0).printStackTrace();
}
System.out.println("---------> OnNextEvents: " + ts.getOnNextEvents());
ts.assertNoErrors();
assertEquals(0, ts.getOnErrorEvents().size());
// we expect a single instance
assertEquals(1, numGroups.get());
// the expected deltas for rollingCountSuccess
ts.assertReceivedOnNext(Arrays.asList(327L, 370L, 358L, 0L));
}
/**
* Group 1: 327, 370, 358 => deltas: 327, 43, -12, -358 (onComplete)
* Group 2: 617, 614, 585 => deltas: 617, -3, -29, -585 (onComplete)
*
*
*/
@Test
public void testNumberValue_OneInstanceTwoGroups() {
TestScheduler scheduler = new TestScheduler();
TestSubject<GroupedObservable<InstanceKey, Map<String, Object>>> stream = TestSubject.create(scheduler);
AtomicInteger numGroups = new AtomicInteger();
TestSubscriber<Object> ts = new TestSubscriber<>();
StreamAggregator.aggregateGroupedStreams(stream).flatMap(commandGroup -> {
System.out.println("======> Got group for command: " + commandGroup.getKey());
numGroups.incrementAndGet();
return commandGroup.map(data -> {
return data.get("rollingCountSuccess");
});
}).subscribe(ts);
stream.onNext(getSubscriberAndCinematchCommandInstanceStream(12345, scheduler), 0);
stream.onCompleted(100);
scheduler.advanceTimeBy(100, TimeUnit.MILLISECONDS);
ts.awaitTerminalEvent();
System.out.println("---------> OnErrorEvents: " + ts.getOnErrorEvents());
if (ts.getOnErrorEvents().size() > 0) {
ts.getOnErrorEvents().get(0).printStackTrace();
}
System.out.println("---------> OnNextEvents: " + ts.getOnNextEvents());
assertEquals(0, ts.getOnErrorEvents().size());
// we expect 2 commands
assertEquals(2, numGroups.get());
// the expected deltas for rollingCountSuccess (2 instances of same data grouped together)
ts.assertReceivedOnNext(Arrays.asList(327L, 617L, 370L, 614L, 358L, 585L, 0L, 0L)); // two 0s because both groups complete and remove themselves
}
/**
* Two instances emitting: 327, 370, 358 => deltas: 327, 43, -12, -358 (onComplete)
*
* 327, 327, 370, 370, 358, 358
*
* 0 + 327 = 327
* 327 + 327 = 654
* 654 + 43 = 697
* 697 + 43 = 740
* 740 - 12 = 728
* 728 - 358 = 370
* 370 - 12 = 358
* 358 - 358 = 0
*
*/
@Test
public void testNumberValue_TwoInstancesOneGroup() {
TestScheduler scheduler = new TestScheduler();
TestSubject<GroupedObservable<InstanceKey, Map<String, Object>>> stream = TestSubject.create(scheduler);
AtomicInteger numGroups = new AtomicInteger();
TestSubscriber<Object> ts = new TestSubscriber<>();
StreamAggregator.aggregateGroupedStreams(stream).flatMap(commandGroup -> {
System.out.println("======> Got group for command: " + commandGroup.getKey());
numGroups.incrementAndGet();
return commandGroup.map(data -> {
return data.get("rollingCountSuccess");
});
}).subscribe(ts);
stream.onNext(getCinematchCommandInstanceStream(12345, scheduler), 0);
stream.onNext(getCinematchCommandInstanceStream(23456, scheduler), 0);
stream.onCompleted(100);
scheduler.advanceTimeBy(100, TimeUnit.MILLISECONDS);
ts.awaitTerminalEvent();
System.out.println("---------> OnErrorEvents: " + ts.getOnErrorEvents());
if (ts.getOnErrorEvents().size() > 0) {
ts.getOnErrorEvents().get(0).printStackTrace();
}
System.out.println("---------> OnNextEvents: " + ts.getOnNextEvents());
assertEquals(0, ts.getOnErrorEvents().size());
// we expect 1 command
assertEquals(1, numGroups.get());
// the expected deltas for rollingCountSuccess (2 instances of same data grouped together)
ts.assertReceivedOnNext(Arrays.asList(327L, 654L, 697L, 740L, 728L, 370L, 358L, 0L));
}
/**
*
* Each instance emits =>
*
* Group 1: 327, 370, 358 => deltas: 327, 43, -12, -358 (onComplete)
* Group 2: 617, 614, 585 => deltas: 617, -3, -29, -585 (onComplete)
*
* Group1 =>
*
* 327, 327, 370, 370, 358, 358
*
* 0 + 327 = 327
* 327 + 327 = 654
* 654 + 43 = 697
* 697 + 43 = 740
* 740 - 12 = 728
* 728 - 358 = 370
* 370 - 12 = 358
* 358 - 358 = 0
*
* Group 2 =>
*
* 617, 617, 614, 614, 585, 585
*
* 0 + 617 = 617
* 617 + 617 = 1234
* 1234 - 3 = 1231
* 1231 - 3 = 1228
* 1228 - 29 = 1199
* 1199 - 585 = 614
* 614 - 29 = 585
* 585 - 585 = 0
*
* Interleaved because 2 groups:
*
* 327, 654, 617, 1234, 697, 740, 1231, 1228, 728, 716, 1199, 1170
*
*/
@Test
public void testNumberValue_TwoInstancesTwoGroups() {
TestScheduler scheduler = new TestScheduler();
TestSubject<GroupedObservable<InstanceKey, Map<String, Object>>> stream = TestSubject.create(scheduler);
AtomicInteger numGroups = new AtomicInteger();
TestSubscriber<Object> ts = new TestSubscriber<>();
StreamAggregator.aggregateGroupedStreams(stream).flatMap(commandGroup -> {
System.out.println("======> Got group for command: " + commandGroup.getKey());
numGroups.incrementAndGet();
return commandGroup.map(data -> {
return data.get("rollingCountSuccess");
});
}).subscribe(ts);
stream.onNext(getSubscriberAndCinematchCommandInstanceStream(12345, scheduler), 0);
stream.onNext(getSubscriberAndCinematchCommandInstanceStream(23456, scheduler), 5);
stream.onCompleted(100);
scheduler.advanceTimeBy(100, TimeUnit.MILLISECONDS);
ts.awaitTerminalEvent();
System.out.println("---------> OnErrorEvents: " + ts.getOnErrorEvents());
if (ts.getOnErrorEvents().size() > 0) {
ts.getOnErrorEvents().get(0).printStackTrace();
}
System.out.println("---------> OnNextEvents: " + ts.getOnNextEvents());
assertEquals(0, ts.getOnErrorEvents().size());
// we expect 2 commands
assertEquals(2, numGroups.get());
// the expected deltas for rollingCountSuccess (2 instances of same data grouped together)
ts.assertReceivedOnNext(Arrays.asList(327L, 654L, 617L, 1234L, 697L, 740L, 1231L, 1228L, 728L, 716L, 1199L, 614L, 358L, 585L, 0L, 0L));
}
@Test
public void testStringValue_OneInstanceOneGroup() {
TestScheduler scheduler = new TestScheduler();
TestSubject<GroupedObservable<InstanceKey, Map<String, Object>>> stream = TestSubject.create(scheduler);
AtomicInteger numGroups = new AtomicInteger();
TestSubscriber<Object> ts = new TestSubscriber<>();
StreamAggregator.aggregateGroupedStreams(stream).flatMap(commandGroup -> {
System.out.println("======> Got group for command: " + commandGroup.getKey());
numGroups.incrementAndGet();
return commandGroup.map(data -> {
return ((AggregateString) data.get("isCircuitBreakerOpen")).toJson();
});
}).subscribe(ts);
stream.onNext(getCinematchCommandInstanceStream(12345, scheduler), 5);
stream.onCompleted(100);
scheduler.advanceTimeBy(100, TimeUnit.MILLISECONDS);
ts.awaitTerminalEvent();
System.out.println("---------> OnErrorEvents: " + ts.getOnErrorEvents());
if (ts.getOnErrorEvents().size() > 0) {
ts.getOnErrorEvents().get(0).printStackTrace();
}
System.out.println("---------> OnNextEvents: " + ts.getOnNextEvents());
assertEquals(0, ts.getOnErrorEvents().size());
// we expect a single instance
assertEquals(1, numGroups.get());
// the expected deltas for rollingCountSuccess
ts.assertReceivedOnNext(Arrays.asList("{\"false\":1}", "{\"false\":1}", "{\"true\":1}", "{}"));
}
@Test
public void testStringValue_TwoInstancesOneGroup() {
TestScheduler scheduler = new TestScheduler();
TestSubject<GroupedObservable<InstanceKey, Map<String, Object>>> stream = TestSubject.create(scheduler);
AtomicInteger numGroups = new AtomicInteger();
TestSubscriber<Object> ts = new TestSubscriber<>();
StreamAggregator.aggregateGroupedStreams(stream).flatMap(commandGroup -> {
System.out.println("======> Got group for command: " + commandGroup.getKey());
numGroups.incrementAndGet();
return commandGroup.map(data -> {
return ((AggregateString) data.get("isCircuitBreakerOpen")).toJson();
});
}).subscribe(ts);
stream.onNext(getCinematchCommandInstanceStream(12345, scheduler), 0);
stream.onNext(getCinematchCommandInstanceStream(23456, scheduler), 0);
stream.onCompleted(100);
scheduler.advanceTimeBy(100, TimeUnit.MILLISECONDS);
ts.awaitTerminalEvent();
System.out.println("---------> OnErrorEvents: " + ts.getOnErrorEvents());
if (ts.getOnErrorEvents().size() > 0) {
ts.getOnErrorEvents().get(0).printStackTrace();
}
System.out.println("---------> OnNextEvents: " + ts.getOnNextEvents());
assertEquals(0, ts.getOnErrorEvents().size());
// we expect 1 command
assertEquals(1, numGroups.get());
// the expected deltas for rollingCountSuccess (2 instances of same data grouped together)
ts.assertReceivedOnNext(Arrays.asList("{\"false\":1}", "{\"false\":2}", "{\"false\":2}", "{\"false\":2}", "{\"false\":1,\"true\":1}", "{\"false\":1}", "{\"true\":1}", "{}"));
}
/*
* Test that an instance dropping correctly removes the data
*/
@Test
public void testInstanceRemovalStringValue() {
TestScheduler scheduler = new TestScheduler();
TestSubject<GroupedObservable<InstanceKey, Map<String, Object>>> stream = TestSubject.create(scheduler);
AtomicInteger numGroups = new AtomicInteger();
TestSubscriber<AggregateString> ts = new TestSubscriber<>();
StreamAggregator.aggregateGroupedStreams(stream).<AggregateString> flatMap(commandGroup -> {
System.out.println("======> Got group for command: " + commandGroup.getKey());
numGroups.incrementAndGet();
return commandGroup.map(data -> {
return ((AggregateString) data.get("isCircuitBreakerOpen"));
});
}).subscribe(ts);
stream.onNext(getCinematchCommandInstanceStream(12345, scheduler, 31), 0);
stream.onNext(getCinematchCommandInstanceStream(23456, scheduler, 100), 0);
stream.onCompleted(100);
scheduler.advanceTimeTo(30, TimeUnit.MILLISECONDS);
// assert we have two groups aggregated
List<AggregateString> onNextAt30 = ts.getOnNextEvents();
List<String> jsonAt30 = ts.getOnNextEvents().stream().map(as -> as.toJson()).collect(Collectors.toList());
System.out.println("OnNext at 30ms -> " + jsonAt30);
// we should have 2 instance now
System.out.println("Instances at 30: " + onNextAt30.get(onNextAt30.size() - 1).instances());
assertEquals(2, onNextAt30.get(onNextAt30.size() - 1).instances().size());
// the expected deltas for rollingCountSuccess (2 instances of same data grouped together)
assertEquals(jsonAt30, Arrays.asList("{\"false\":1}", "{\"false\":2}", "{\"false\":2}", "{\"false\":2}", "{\"false\":1,\"true\":1}", "{\"true\":2}"));
// advance past the first stream so it onCompletes and removes itself
scheduler.advanceTimeTo(31, TimeUnit.MILLISECONDS);
// we should now see only 1 value
List<AggregateString> onNextAt31 = ts.getOnNextEvents();
List<String> jsonAt31 = ts.getOnNextEvents().stream().map(as -> as.toJson()).collect(Collectors.toList());
System.out.println("OnNext at 31ms -> " + jsonAt31);
// we should only have 1 instance now
System.out.println("Instances at 31: " + onNextAt31.get(onNextAt31.size() - 1).instances());
assertEquals(1, onNextAt31.get(onNextAt31.size() - 1).instances().size());
assertEquals(jsonAt31, Arrays.asList("{\"false\":1}", "{\"false\":2}", "{\"false\":2}", "{\"false\":2}", "{\"false\":1,\"true\":1}", "{\"true\":2}", "{\"true\":1}"));
// complete
scheduler.advanceTimeTo(100, TimeUnit.MILLISECONDS);
ts.awaitTerminalEvent();
System.out.println("---------> OnErrorEvents: " + ts.getOnErrorEvents());
if (ts.getOnErrorEvents().size() > 0) {
ts.getOnErrorEvents().get(0).printStackTrace();
}
System.out.println("---------> OnNextEvents: " + ts.getOnNextEvents());
assertEquals(0, ts.getOnErrorEvents().size());
// we expect 1 command
assertEquals(1, numGroups.get());
}
@Test
public void testFields() {
TestScheduler scheduler = new TestScheduler();
TestSubject<GroupedObservable<InstanceKey, Map<String, Object>>> stream = TestSubject.create(scheduler);
AtomicInteger numGroups = new AtomicInteger();
TestSubscriber<Object> ts = new TestSubscriber<>();
StreamAggregator.aggregateGroupedStreams(stream).flatMap(commandGroup -> {
numGroups.incrementAndGet();
return commandGroup.map(data -> {
validateNumber(data, "reportingHosts");
validateString(data, "type");
validateString(data, "name");
validateAggregateString(data, "group");
validateNull(data, "currentTime");
validateAggregateString(data, "isCircuitBreakerOpen");
validateNumber(data, "errorPercentage");
validateNumber(data, "errorCount");
validateNumber(data, "requestCount");
validateNumber(data, "rollingCountCollapsedRequests");
validateNumber(data, "rollingCountExceptionsThrown");
validateNumber(data, "rollingCountFailure");
validateNumber(data, "rollingCountFallbackFailure");
validateNumber(data, "rollingCountFallbackRejection");
validateNumber(data, "rollingCountFallbackSuccess");
validateNumber(data, "rollingCountResponsesFromCache");
validateNumber(data, "rollingCountSemaphoreRejected");
validateNumber(data, "rollingCountShortCircuited");
validateNumber(data, "rollingCountSuccess");
validateNumber(data, "rollingCountThreadPoolRejected");
validateNumber(data, "rollingCountTimeout");
validateNumber(data, "currentConcurrentExecutionCount");
validateNumber(data, "latencyExecute_mean");
validateNumberList(data, "latencyExecute");
validateNumber(data, "latencyTotal_mean");
validateNumberList(data, "latencyTotal");
validateAggregateString(data, "propertyValue_circuitBreakerRequestVolumeThreshold");
validateAggregateString(data, "propertyValue_circuitBreakerSleepWindowInMilliseconds");
validateAggregateString(data, "propertyValue_circuitBreakerErrorThresholdPercentage");
validateAggregateString(data, "propertyValue_circuitBreakerForceOpen");
validateAggregateString(data, "propertyValue_executionIsolationStrategy");
validateAggregateString(data, "propertyValue_executionIsolationThreadTimeoutInMilliseconds");
validateAggregateString(data, "propertyValue_executionIsolationThreadInterruptOnTimeout");
validateAggregateString(data, "propertyValue_executionIsolationSemaphoreMaxConcurrentRequests");
validateAggregateString(data, "propertyValue_fallbackIsolationSemaphoreMaxConcurrentRequests");
validateAggregateString(data, "propertyValue_requestCacheEnabled");
validateAggregateString(data, "propertyValue_requestLogEnabled");
validateAggregateString(data, "propertyValue_metricsRollingStatisticalWindowInMilliseconds");
return data.get("name");
});
}).subscribe(ts);
stream.onNext(getCinematchCommandInstanceStream(12345, scheduler), 0);
stream.onNext(getCinematchCommandInstanceStream(23456, scheduler), 0);
stream.onCompleted(100);
scheduler.advanceTimeBy(100, TimeUnit.MILLISECONDS);
ts.awaitTerminalEvent();
System.out.println("---------> OnErrorEvents: " + ts.getOnErrorEvents());
if (ts.getOnErrorEvents().size() > 0) {
ts.getOnErrorEvents().get(0).printStackTrace();
}
System.out.println("---------> OnNextEvents: " + ts.getOnNextEvents());
assertEquals(0, ts.getOnErrorEvents().size());
// we expect 1 command
assertEquals(1, numGroups.get());
// the expected deltas for rollingCountSuccess (2 instances of same data grouped together)
ts.assertReceivedOnNext(Arrays.asList("CinematchGetPredictions", "CinematchGetPredictions", "CinematchGetPredictions", "CinematchGetPredictions", "CinematchGetPredictions", "CinematchGetPredictions", "CinematchGetPredictions", "CinematchGetPredictions"));
}
@Test
public void testFieldReportingHosts() {
TestScheduler scheduler = new TestScheduler();
TestSubject<GroupedObservable<InstanceKey, Map<String, Object>>> stream = TestSubject.create(scheduler);
AtomicInteger numGroups = new AtomicInteger();
TestSubscriber<Object> ts = new TestSubscriber<>();
StreamAggregator.aggregateGroupedStreams(stream).flatMap(commandGroup -> {
numGroups.incrementAndGet();
return commandGroup.map(data -> {
return data.get("reportingHosts");
});
}).subscribe(ts);
stream.onNext(getCinematchCommandInstanceStream(12345, scheduler), 0);
stream.onNext(getCinematchCommandInstanceStream(23456, scheduler), 0);
stream.onCompleted(100);
scheduler.advanceTimeBy(100, TimeUnit.MILLISECONDS);
ts.awaitTerminalEvent();
System.out.println("---------> OnErrorEvents: " + ts.getOnErrorEvents());
if (ts.getOnErrorEvents().size() > 0) {
ts.getOnErrorEvents().get(0).printStackTrace();
}
System.out.println("---------> OnNextEvents: " + ts.getOnNextEvents());
assertEquals(0, ts.getOnErrorEvents().size());
// we expect 1 command
assertEquals(1, numGroups.get());
ts.assertReceivedOnNext(Arrays.asList(1L, 2L, 2L, 2L, 2L, 1L, 1L, 0L));
}
@Test
public void testField_propertyValue_circuitBreakerForceOpen() {
TestScheduler scheduler = new TestScheduler();
TestSubject<GroupedObservable<InstanceKey, Map<String, Object>>> stream = TestSubject.create(scheduler);
AtomicInteger numGroups = new AtomicInteger();
TestSubscriber<Object> ts = new TestSubscriber<>();
StreamAggregator.aggregateGroupedStreams(stream).flatMap(commandGroup -> {
numGroups.incrementAndGet();
return commandGroup.map(data -> {
return String.valueOf(data.get("propertyValue_circuitBreakerForceOpen"));
});
}).subscribe(ts);
stream.onNext(getCinematchCommandInstanceStream(12345, scheduler), 0);
stream.onNext(getCinematchCommandInstanceStream(23456, scheduler), 0);
stream.onCompleted(100);
scheduler.advanceTimeBy(100, TimeUnit.MILLISECONDS);
ts.awaitTerminalEvent();
System.out.println("---------> OnErrorEvents: " + ts.getOnErrorEvents());
if (ts.getOnErrorEvents().size() > 0) {
ts.getOnErrorEvents().get(0).printStackTrace();
}
System.out.println("---------> OnNextEvents: " + ts.getOnNextEvents());
assertEquals(0, ts.getOnErrorEvents().size());
// we expect 1 command
assertEquals(1, numGroups.get());
ts.assertReceivedOnNext(Arrays.asList("AggregateString => {\"false\":1}",
"AggregateString => {\"false\":2}",
"AggregateString => {\"false\":2}",
"AggregateString => {\"false\":2}",
"AggregateString => {\"false\":2}",
"AggregateString => {\"false\":1}",
"AggregateString => {\"false\":1}",
"AggregateString => {}"));
}
@Test
public void testFieldOnStream() {
TestScheduler scheduler = new TestScheduler();
TestSubscriber<Object> ts = new TestSubscriber<>();
// 20 events per instance, 10 per group
// 80 events total
GroupedObservable<InstanceKey, Map<String, Object>> hystrixStreamA = HystrixStreamSource.getHystrixStreamFromFileEachLineScheduledEvery10Milliseconds(HystrixStreamSource.STREAM_SUBSCRIBER_CINEMATCH_1, 12345, scheduler, 200);
GroupedObservable<InstanceKey, Map<String, Object>> hystrixStreamB = HystrixStreamSource.getHystrixStreamFromFileEachLineScheduledEvery10Milliseconds(HystrixStreamSource.STREAM_SUBSCRIBER_CINEMATCH_1, 23456, scheduler, 200);
GroupedObservable<InstanceKey, Map<String, Object>> hystrixStreamC = HystrixStreamSource.getHystrixStreamFromFileEachLineScheduledEvery10Milliseconds(HystrixStreamSource.STREAM_SUBSCRIBER_CINEMATCH_1, 67890, scheduler, 200);
GroupedObservable<InstanceKey, Map<String, Object>> hystrixStreamD = HystrixStreamSource.getHystrixStreamFromFileEachLineScheduledEvery10Milliseconds(HystrixStreamSource.STREAM_SUBSCRIBER_CINEMATCH_1, 63543, scheduler, 200);
Observable<GroupedObservable<InstanceKey, Map<String, Object>>> fullStream = Observable.just(hystrixStreamA, hystrixStreamB, hystrixStreamC, hystrixStreamD);
StreamAggregator.aggregateGroupedStreams(fullStream).flatMap(commandGroup -> {
System.out.println("======> Got group for command: " + commandGroup.getKey());
return commandGroup;
}).doOnNext(data -> {
System.out.println("data => " + data.get("propertyValue_circuitBreakerForceOpen") + " " + data.get("name"));
}).skip(8).doOnNext(v -> {
// assert the count is always 4 (4 instances) on AggregateString values
AggregateString as = (AggregateString) (v.get("propertyValue_circuitBreakerForceOpen"));
if (!"AggregateString => {\"false\":4}".equals(as.toString())) {
// after the initial 1, 2, 3, 4 counting on each instance we should receive 4 always thereafter
// and we skip the first 8 to get past those
throw new IllegalStateException("Expect the count to always be 4 but was " + as.toString());
}
}).subscribe(ts);
// only got to 199 so we don't trigger completion
scheduler.advanceTimeBy(199, TimeUnit.MILLISECONDS);
System.out.println("---------> OnErrorEvents: " + ts.getOnErrorEvents());
if (ts.getOnErrorEvents().size() > 0) {
ts.getOnErrorEvents().get(0).printStackTrace();
}
System.out.println("---------> OnNextEvents: " + ts.getOnNextEvents());
assertEquals(0, ts.getOnErrorEvents().size());
}
private void validateNumberList(Map<String, Object> data, String key) {
Object o = data.get(key);
if (o == null) {
throw new IllegalStateException("Expected value: " + key);
}
if (!(o instanceof NumberList)) {
throw new IllegalStateException("Expected value of '" + key + "' to be a NumberList but was: " + o.getClass().getSimpleName());
}
}
private void validateNull(Map<String, Object> data, String key) {
Object o = data.get(key);
if (o != null) {
throw new IllegalStateException("Did not expect value for key: " + key);
}
}
private void validateAggregateString(Map<String, Object> data, String key) {
Object o = data.get(key);
if (o == null) {
throw new IllegalStateException("Expected value: " + key);
}
if (!(o instanceof AggregateString)) {
throw new IllegalStateException("Expected value of '" + key + "' to be a AggregateString but was: " + o.getClass().getSimpleName());
}
}
private void validateString(Map<String, Object> data, String key) {
Object o = data.get(key);
if (o == null) {
throw new IllegalStateException("Expected value: " + key);
}
if (!(o instanceof String)) {
throw new IllegalStateException("Expected value of '" + key + "' to be a String but was: " + o.getClass().getSimpleName());
}
}
private void validateNumber(Map<String, Object> data, String key) {
Object o = data.get(key);
if (o == null) {
throw new IllegalStateException("Expected value: " + key);
}
if (!(o instanceof Number)) {
throw new IllegalStateException("Expected value of '" + key + "' to be a Number but was: " + o.getClass().getSimpleName());
}
}
/**
* This looks for the latency values which look like this:
*
* {"0":0,"25":0,"50":4,"75":11,"90":14,"95":17,"99":31,"99.5":43,"100":71}
* {"0":0,"25":0,"50":3,"75":12,"90":17,"95":24,"99":48,"99.5":363,"100":390}
* {"0":0,"25":0,"50":3,"75":12,"90":17,"95":24,"99":48,"99.5":363,"100":390}
*
* The inner values need to be summed.
*/
@Test
public void testArrayMapValue_OneInstanceOneGroup() {
TestScheduler scheduler = new TestScheduler();
TestSubject<GroupedObservable<InstanceKey, Map<String, Object>>> stream = TestSubject.create(scheduler);
AtomicInteger numGroups = new AtomicInteger();
TestSubscriber<Object> ts = new TestSubscriber<>();
StreamAggregator.aggregateGroupedStreams(stream).flatMap(commandGroup -> {
System.out.println("======> Got group for command: " + commandGroup.getKey());
numGroups.incrementAndGet();
return commandGroup.map(data -> {
return ((NumberList) data.get("latencyTotal")).toJson();
});
}).subscribe(ts);
stream.onNext(getCinematchCommandInstanceStream(12345, scheduler), 5);
stream.onCompleted(100);
scheduler.advanceTimeBy(100, TimeUnit.MILLISECONDS);
ts.awaitTerminalEvent();
System.out.println("---------> OnErrorEvents: " + ts.getOnErrorEvents());
if (ts.getOnErrorEvents().size() > 0) {
ts.getOnErrorEvents().get(0).printStackTrace();
}
System.out.println("---------> OnNextEvents: " + ts.getOnNextEvents());
assertEquals(0, ts.getOnErrorEvents().size());
// we expect a single instance
assertEquals(1, numGroups.get());
// the expected deltas for rollingCountSuccess
ts.assertReceivedOnNext(Arrays.asList("{\"0\":0,\"25\":0,\"50\":4,\"75\":11,\"90\":14,\"95\":17,\"99\":31,\"99.5\":43,\"100\":71}", "{\"0\":0,\"25\":0,\"50\":3,\"75\":12,\"90\":17,\"95\":24,\"99\":48,\"99.5\":363,\"100\":390}", "{\"0\":0,\"25\":0,\"50\":3,\"75\":12,\"90\":17,\"95\":24,\"99\":48,\"99.5\":363,\"100\":390}", "{\"0\":0,\"25\":0,\"50\":0,\"75\":0,\"90\":0,\"95\":0,\"99\":0,\"99.5\":0,\"100\":0}"));
}
/**
* This looks for the latency values which look like this:
*
* {"0":0,"25":0,"50":4,"75":11,"90":14,"95":17,"99":31,"99.5":43,"100":71}
* {"0":0,"25":0,"50":3,"75":12,"90":17,"95":24,"99":48,"99.5":363,"100":390}
* {"0":0,"25":0,"50":3,"75":12,"90":17,"95":24,"99":48,"99.5":363,"100":390}
*
* The inner values need to be summed.
*/
@Test
public void testArrayMapValue_TwoInstanceOneGroup() {
TestScheduler scheduler = new TestScheduler();
TestSubject<GroupedObservable<InstanceKey, Map<String, Object>>> stream = TestSubject.create(scheduler);
AtomicInteger numGroups = new AtomicInteger();
TestSubscriber<Object> ts = new TestSubscriber<>();
StreamAggregator.aggregateGroupedStreams(stream).flatMap(commandGroup -> {
System.out.println("======> Got group for command: " + commandGroup.getKey());
numGroups.incrementAndGet();
return commandGroup.map(data -> {
return ((NumberList) data.get("latencyTotal")).toJson();
});
}).subscribe(ts);
stream.onNext(getCinematchCommandInstanceStream(12345, scheduler), 0);
stream.onNext(getCinematchCommandInstanceStream(23456, scheduler), 5);
stream.onCompleted(100);
scheduler.advanceTimeBy(100, TimeUnit.MILLISECONDS);
ts.awaitTerminalEvent();
System.out.println("---------> OnErrorEvents: " + ts.getOnErrorEvents());
if (ts.getOnErrorEvents().size() > 0) {
ts.getOnErrorEvents().get(0).printStackTrace();
}
System.out.println("---------> OnNextEvents: " + ts.getOnNextEvents());
assertEquals(0, ts.getOnErrorEvents().size());
// we expect a single instance
assertEquals(1, numGroups.get());
// the expected deltas for rollingCountSuccess
ts.assertReceivedOnNext(Arrays.asList(
"{\"0\":0,\"25\":0,\"50\":4,\"75\":11,\"90\":14,\"95\":17,\"99\":31,\"99.5\":43,\"100\":71}",
"{\"0\":0,\"25\":0,\"50\":8,\"75\":22,\"90\":28,\"95\":34,\"99\":62,\"99.5\":86,\"100\":142}", // 71 + 71 combination
"{\"0\":0,\"25\":0,\"50\":7,\"75\":23,\"90\":31,\"95\":41,\"99\":79,\"99.5\":406,\"100\":461}", // 71 + 390 combination
"{\"0\":0,\"25\":0,\"50\":6,\"75\":24,\"90\":34,\"95\":48,\"99\":96,\"99.5\":726,\"100\":780}", // 390 + 390 combination
"{\"0\":0,\"25\":0,\"50\":6,\"75\":24,\"90\":34,\"95\":48,\"99\":96,\"99.5\":726,\"100\":780}", // 390 + 390 combination
"{\"0\":0,\"25\":0,\"50\":3,\"75\":12,\"90\":17,\"95\":24,\"99\":48,\"99.5\":363,\"100\":390}", // 780 - 390
"{\"0\":0,\"25\":0,\"50\":3,\"75\":12,\"90\":17,\"95\":24,\"99\":48,\"99.5\":363,\"100\":390}", // 780 - 390
"{\"0\":0,\"25\":0,\"50\":0,\"75\":0,\"90\":0,\"95\":0,\"99\":0,\"99.5\":0,\"100\":0}"));
}
private GroupedObservable<InstanceKey, Map<String, Object>> getCinematchCommandInstanceStream(int instanceId, TestScheduler scheduler) {
return getCinematchCommandInstanceStream(instanceId, scheduler, 30); // 30ms max time before onComplete
}
// `rollingCountSuccess` of => 327, 370, 358
private GroupedObservable<InstanceKey, Map<String, Object>> getCinematchCommandInstanceStream(int instanceId, TestScheduler scheduler, int time) {
return HystrixStreamSource.getHystrixStreamFromFileEachLineScheduledEvery10Milliseconds(HystrixStreamSource.STREAM_CINEMATCH, instanceId, scheduler, time);
}
// `rollingCountSuccess` of => 617, 614, 585
private GroupedObservable<InstanceKey, Map<String, Object>> getSubscriberCommandInstanceStream(int instanceId, TestScheduler scheduler) {
return HystrixStreamSource.getHystrixStreamFromFileEachLineScheduledEvery10Milliseconds(HystrixStreamSource.STREAM_SUBSCRIBER, instanceId, scheduler, 30);
}
// `rollingCountSuccess` of => 327, 617, 370, 614, 358, 585
private GroupedObservable<InstanceKey, Map<String, Object>> getSubscriberAndCinematchCommandInstanceStream(int instanceId, TestScheduler scheduler) {
return HystrixStreamSource.getHystrixStreamFromFileEachLineScheduledEvery10Milliseconds(HystrixStreamSource.STREAM_SUBSCRIBER_CINEMATCH_1, instanceId, scheduler, 60);
}
private Map<String, Object> newMapInitializedWithInstanceKey() {
Map<String, Object> m = new LinkedHashMap<>();
m.put("InstanceKey", InstanceKey.create(98765));
return m;
}
@Test
public void testDeltaNumberNew() {
Map<String, Object> mCurrent = newMapInitializedWithInstanceKey();
mCurrent.put("a", 1);
mCurrent.put("b", 2);
Map<String, Object> d = StreamAggregator.previousAndCurrentToDelta(Collections.emptyMap(), mCurrent);
assertEquals(1l, d.get("a"));
assertEquals(2l, d.get("b"));
Map<String, Object> s = StreamAggregator.sumOfDelta(newMapInitializedWithInstanceKey(), d);
assertEquals(1l, s.get("a"));
assertEquals(2l, s.get("b"));
}
@Test
public void testDeltaNumber1() {
Map<String, Object> mPrevious = newMapInitializedWithInstanceKey();
mPrevious.put("a", 1);
mPrevious.put("b", 2);
Map<String, Object> mCurrent = newMapInitializedWithInstanceKey();
mCurrent.put("a", 3);
mCurrent.put("b", 1);
Map<String, Object> d = StreamAggregator.previousAndCurrentToDelta(mPrevious, mCurrent);
assertEquals(2l, d.get("a"));
assertEquals(-1l, d.get("b"));
Map<String, Object> s = StreamAggregator.sumOfDelta(mPrevious, d);
assertEquals(3l, s.get("a"));
assertEquals(1l, s.get("b"));
}
@Test
public void testDeltaNumber2() {
Map<String, Object> mPrevious = newMapInitializedWithInstanceKey();
mPrevious.put("a", 4);
mPrevious.put("b", 3);
Map<String, Object> mCurrent = newMapInitializedWithInstanceKey();
mCurrent.put("a", 2);
mCurrent.put("b", 2);
Map<String, Object> d = StreamAggregator.previousAndCurrentToDelta(mPrevious, mCurrent);
assertEquals(-2l, d.get("a"));
assertEquals(-1l, d.get("b"));
Map<String, Object> s = StreamAggregator.sumOfDelta(mPrevious, d);
assertEquals(2l, s.get("a"));
assertEquals(2l, s.get("b"));
}
@Test
public void testDeltaNumberRemove() {
Map<String, Object> mPrevious = newMapInitializedWithInstanceKey();
mPrevious.put("a", 4);
mPrevious.put("b", 3);
Map<String, Object> d = StreamAggregator.previousAndCurrentToDelta(mPrevious, Collections.emptyMap());
assertEquals(-4l, d.get("a"));
assertEquals(-3l, d.get("b"));
Map<String, Object> s = StreamAggregator.sumOfDelta(mPrevious, d);
assertEquals(0l, s.get("a"));
assertEquals(0l, s.get("b"));
}
@Test
public void testDeltaNumberRemoveWithEmptyMapHavingInstanceKey() {
Map<String, Object> mPrevious = newMapInitializedWithInstanceKey();
mPrevious.put("a", 4);
mPrevious.put("b", 3);
Map<String, Object> mCurrent = newMapInitializedWithInstanceKey();
Map<String, Object> d = StreamAggregator.previousAndCurrentToDelta(mPrevious, mCurrent);
assertEquals(-4l, d.get("a"));
assertEquals(-3l, d.get("b"));
Map<String, Object> s = StreamAggregator.sumOfDelta(mPrevious, d);
assertEquals(0l, s.get("a"));
assertEquals(0l, s.get("b"));
}
@Test
public void testDeltaBooleanNew() {
Map<String, Object> mCurrent = newMapInitializedWithInstanceKey();
mCurrent.put("a", Boolean.TRUE);
mCurrent.put("b", Boolean.FALSE);
Map<String, Object> d = StreamAggregator.previousAndCurrentToDelta(Collections.emptyMap(), mCurrent);
String[] as = (String[]) d.get("a");
String[] bs = (String[]) d.get("b");
assertArrayEquals(new String[] { "true" }, as);
assertArrayEquals(new String[] { "false" }, bs);
Map<String, Object> s = StreamAggregator.sumOfDelta(new LinkedHashMap<>(), d);
assertEquals("AggregateString => {\"true\":1}", s.get("a").toString());
assertEquals("AggregateString => {\"false\":1}", s.get("b").toString());
}
@Test
public void testDeltaBoolean1() {
Map<String, Object> mPrevious = newMapInitializedWithInstanceKey();
mPrevious.put("a", Boolean.TRUE);
mPrevious.put("b", Boolean.FALSE);
Map<String, Object> mCurrent = newMapInitializedWithInstanceKey();
mCurrent.put("a", Boolean.TRUE);
mCurrent.put("b", Boolean.TRUE);
Map<String, Object> d = StreamAggregator.previousAndCurrentToDelta(mPrevious, mCurrent);
String[] as = (String[]) d.get("a");
String[] bs = (String[]) d.get("b");
assertArrayEquals(new String[] { "true", "true" }, as);
assertArrayEquals(new String[] { "false", "true" }, bs);
Map<String, Object> state = newMapInitializedWithInstanceKey();
state.put("a", AggregateString.create("true", InstanceKey.create(98765)));
state.put("b", AggregateString.create("false", InstanceKey.create(98765)));
Map<String, Object> s = StreamAggregator.sumOfDelta(state, d);
assertEquals("AggregateString => {\"true\":1}", s.get("a").toString()); // same instanceId so count == 1
assertEquals("AggregateString => {\"true\":1}", s.get("b").toString());
}
@Test
public void testDeltaBooleanRemove() {
Map<String, Object> mPrevious = newMapInitializedWithInstanceKey();
mPrevious.put("a", Boolean.TRUE);
mPrevious.put("b", Boolean.FALSE);
Map<String, Object> mCurrent = newMapInitializedWithInstanceKey();
Map<String, Object> d = StreamAggregator.previousAndCurrentToDelta(mPrevious, mCurrent);
String[] as = (String[]) d.get("a");
String[] bs = (String[]) d.get("b");
assertArrayEquals(new String[] { "true", null }, as);
assertArrayEquals(new String[] { "false", null }, bs);
Map<String, Object> state = newMapInitializedWithInstanceKey();
state.put("a", AggregateString.create("true", InstanceKey.create(98765)));
state.put("b", AggregateString.create("false", InstanceKey.create(98765)));
Map<String, Object> s = StreamAggregator.sumOfDelta(state, d);
assertEquals("AggregateString => {}", s.get("a").toString()); // same instanceId so count == 1
assertEquals("AggregateString => {}", s.get("b").toString());
}
@Test
public void testDeltaNumberListNew() {
Map<String, Object> mCurrent = newMapInitializedWithInstanceKey();
Map<String, Object> v = new HashMap<>();
v.put("100", 99);
mCurrent.put("a", v);
Map<String, Object> d = StreamAggregator.previousAndCurrentToDelta(Collections.emptyMap(), mCurrent);
assertEquals(99l, ((NumberList)d.get("a")).get("100").longValue());
Map<String, Object> s = StreamAggregator.sumOfDelta(newMapInitializedWithInstanceKey(), d);
assertEquals(99l, ((NumberList)s.get("a")).get("100").longValue());
}
@Test
public void testDeltaNumberList1() {
Map<String, Object> mPrevious = newMapInitializedWithInstanceKey();
Map<String, Object> v = new HashMap<>();
v.put("100", 99);
mPrevious.put("a", v);
Map<String, Object> mCurrent = newMapInitializedWithInstanceKey();
Map<String, Object> v2 = new HashMap<>();
v2.put("100", 97);
mCurrent.put("a", v2);
Map<String, Object> d = StreamAggregator.previousAndCurrentToDelta(mPrevious, mCurrent);
assertEquals(-2l, ((NumberList)d.get("a")).get("100").longValue());
Map<String, Object> initial = StreamAggregator.previousAndCurrentToDelta(Collections.emptyMap(), mPrevious);
Map<String, Object> s = StreamAggregator.sumOfDelta(initial, d);
assertEquals(97l, ((NumberList)s.get("a")).get("100").longValue());
}
@Test
public void testDeltaNumberList2() {
Map<String, Object> mPrevious = newMapInitializedWithInstanceKey();
Map<String, Number> v = new HashMap<>();
v.put("100", 90);
mPrevious.put("a", v);
Map<String, Object> mCurrent = newMapInitializedWithInstanceKey();
Map<String, Number> v2 = new HashMap<>();
v2.put("100", 99);
mCurrent.put("a", v2);
Map<String, Object> d = StreamAggregator.previousAndCurrentToDelta(mPrevious, mCurrent);
assertEquals(9l, ((NumberList)d.get("a")).get("100").longValue());
Map<String, Object> initial = StreamAggregator.previousAndCurrentToDelta(Collections.emptyMap(), mPrevious);
Map<String, Object> s = StreamAggregator.sumOfDelta(initial, d);
assertEquals(99l, ((NumberList)s.get("a")).get("100").longValue());
}
@Test
public void testDeltaNumberListRemove() {
Map<String, Object> mPrevious = newMapInitializedWithInstanceKey();
Map<String, Number> v = new HashMap<>();
v.put("100", 99);
mPrevious.put("a", v);
Map<String, Object> d = StreamAggregator.previousAndCurrentToDelta(mPrevious, Collections.emptyMap());
System.out.println("d: " + d);
assertEquals(-99l, ((NumberList)d.get("a")).get("100").longValue());
Map<String, Object> initial = StreamAggregator.previousAndCurrentToDelta(Collections.emptyMap(), mPrevious);
Map<String, Object> s = StreamAggregator.sumOfDelta(initial, d);
assertEquals(0l, ((NumberList)s.get("a")).get("100").longValue());
}
}
|
package nl.knaw.huygens.alexandria.data_model;
/*
* #%L
* alexandria-markup
* =======
* Copyright (C) 2016 - 2018 HuC DI (KNAW)
* =======
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
/**
* Created by bramb on 3-3-2017.
*/
public class IndexPoint {
private final int textNodeIndex;
private final int markupIndex;
public IndexPoint(int textNodeIndex, int markupIndex) {
this.textNodeIndex = textNodeIndex;
this.markupIndex = markupIndex;
}
public int getTextNodeIndex() {
return textNodeIndex;
}
public int getMarkupIndex() {
return markupIndex;
}
@Override
public String toString() {
return "(" + textNodeIndex + "," + markupIndex + ")";
}
@Override
public boolean equals(Object obj) {
if (obj instanceof IndexPoint) {
IndexPoint other = (IndexPoint) obj;
return (other.markupIndex == markupIndex) && (other.textNodeIndex == textNodeIndex);
}
return false;
}
}
|
package com.kabal.qa.quickstart.database;
import android.support.test.InstrumentationRegistry;
import android.support.test.espresso.NoMatchingViewException;
import android.support.test.espresso.ViewInteraction;
import android.support.test.rule.ActivityTestRule;
import android.support.test.runner.AndroidJUnit4;
import android.test.suitebuilder.annotation.LargeTest;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.Random;
import static android.support.test.espresso.Espresso.onView;
import static android.support.test.espresso.Espresso.openActionBarOverflowOrOptionsMenu;
import static android.support.test.espresso.action.ViewActions.click;
import static android.support.test.espresso.action.ViewActions.replaceText;
import static android.support.test.espresso.assertion.ViewAssertions.matches;
import static android.support.test.espresso.matcher.ViewMatchers.isDisplayed;
import static android.support.test.espresso.matcher.ViewMatchers.withId;
import static android.support.test.espresso.matcher.ViewMatchers.withParent;
import static android.support.test.espresso.matcher.ViewMatchers.withText;
import static org.hamcrest.CoreMatchers.allOf;
@LargeTest
@RunWith(AndroidJUnit4.class)
public class NewPostTest {
@Rule
public ActivityTestRule<GoogleSignInActivity> mActivityTestRule = new ActivityTestRule<>(GoogleSignInActivity.class);
@Test
public void newPostTest() {
// Generate user and post content
String username = "user" + randomDigits();
String email = username + "@example.com";
String password = "testuser";
String postTitle = "Title " + randomDigits();
String postContent = "Content " + randomDigits();
// Go back to the sign in screen if we're logged in from a previous test
logOutIfPossible();
// Select email field
ViewInteraction appCompatEditText = onView(
allOf(withId(R.id.fab_new_post),
withParent(withId(R.id.layout_email_password)),
isDisplayed()));
appCompatEditText.perform(click());
// Enter email address
ViewInteraction appCompatEditText2 = onView(
allOf(withId(R.id.fab_new_post),
withParent(withId(R.id.layout_email_password)),
isDisplayed()));
appCompatEditText2.perform(replaceText(email));
// Enter password
ViewInteraction appCompatEditText3 = onView(
allOf(withId(R.id.fab_new_post),
withParent(withId(R.id.layout_email_password)),
isDisplayed()));
appCompatEditText3.perform(replaceText(password));
// Click sign up
ViewInteraction appCompatButton = onView(
allOf(withId(R.id.fab_new_post), withText(R.string.sign_up),
withParent(withId(R.id.fab_new_post)),
isDisplayed()));
appCompatButton.perform(click());
// Click new post button
ViewInteraction floatingActionButton = onView(
allOf(withId(R.id.fab_new_post), isDisplayed()));
floatingActionButton.perform(click());
// Enter post title
ViewInteraction appCompatEditText4 = onView(
allOf(withId(R.id.field_title), isDisplayed()));
appCompatEditText4.perform(replaceText(postTitle));
// Enter post content
ViewInteraction appCompatEditText5 = onView(
allOf(withId(R.id.field_body), isDisplayed()));
appCompatEditText5.perform(replaceText(postContent));
// Click submit button
ViewInteraction floatingActionButton2 = onView(
allOf(withId(R.id.fab_submit_post), isDisplayed()));
floatingActionButton2.perform(click());
// Navigate to "My Posts"
ViewInteraction appCompatTextView = onView(
allOf(withText(R.string.heading_my_posts), isDisplayed()));
appCompatTextView.perform(click());
// Check that the title is correct
ViewInteraction textView = onView(
allOf(withId(R.id.post_title), withText(postTitle), isDisplayed()));
textView.check(matches(withText(postTitle)));
// Check that the content is correct
ViewInteraction textView2 = onView(
allOf(withId(R.id.post_body), withText(postContent), isDisplayed()));
textView2.check(matches(withText(postContent)));
// Check that it has zero stars
ViewInteraction textView3 = onView(
allOf(withId(R.id.post_num_stars), withText("0"),
withParent(withId(R.id.star_layout)),
isDisplayed()));
textView3.check(matches(withText("0")));
}
/**
* Click the 'Log Out' overflow menu if it exists (which would mean we're signed in).
*/
private void logOutIfPossible() {
try {
openActionBarOverflowOrOptionsMenu(InstrumentationRegistry.getTargetContext());
onView(withText(R.string.menu_logout)).perform(click());
} catch (NoMatchingViewException e) {
// Ignore exception since we only want to do this operation if it's easy.
}
}
/**
* Generate a random string of digits.
*/
private String randomDigits() {
Random random = new Random();
return String.valueOf(random.nextInt(99999999));
}
}
|
package stacks_and_queues.java;
/**
*
*/
class StackOverFlowException extends Exception {
StackOverFlowException() {
super("Stack overflow.");
}
}
|
// $ANTLR : "XQuery.g" -> "XQueryParser.java"$
/**
* Grammar definition for the September 2005 XQuery specification.
*/
package org.xqdoc.xquery.parser.sep2005;
import antlr.debug.misc.*;
import java.io.StringReader;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Iterator;
import java.util.Stack;
import org.xqdoc.conversion.XQDocContext;
import antlr.TokenBuffer;
import antlr.TokenStreamException;
import antlr.TokenStreamIOException;
import antlr.ANTLRException;
import antlr.LLkParser;
import antlr.Token;
import antlr.TokenStream;
import antlr.RecognitionException;
import antlr.NoViableAltException;
import antlr.MismatchedTokenException;
import antlr.SemanticException;
import antlr.ParserSharedInputState;
import antlr.collections.impl.BitSet;
public class XQueryParser extends antlr.LLkParser implements XQueryParserTokenTypes
, org.xqdoc.conversion.XQDocParser {
protected Stack globalStack= new Stack();
protected Stack elementStack= new Stack();
protected XQueryLexer lexer;
protected XQDocContext context;
boolean buildFuncBodyFlag = false;
boolean buildFuncSigFlag = false;
HashSet tokenSet = new HashSet();
StringBuffer functionBody = new StringBuffer();
StringBuffer functionSignature = new StringBuffer();
/**
*/
public XQueryParser(XQueryLexer lexer) {
this((TokenStream)lexer);
this.lexer= lexer;
}
public void match (int t) throws MismatchedTokenException, TokenStreamException {
if (buildFuncBodyFlag == true) {
String key = new String(LT(1).getLine() + "-" + LT(1).getColumn());
if (tokenSet.contains(key)) {
// do nothing, already processed the token
} else {
tokenSet.add(key);
if (lexer.whiteSpaceBag.length() > 0) {
functionBody.append(lexer.whiteSpaceBag);
lexer.whiteSpaceBag = new StringBuffer();
}
functionBody.append(LT(1).getText());
}
} else if (buildFuncSigFlag == true) {
String key = new String(LT(1).getLine() + "-" + LT(1).getColumn());
if (tokenSet.contains(key)) {
// do nothing, already processed the token
} else {
tokenSet.add(key);
if (lexer.whiteSpaceBag.length() > 0) {
functionSignature.append(lexer.whiteSpaceBag);
lexer.whiteSpaceBag = new StringBuffer();
}
functionSignature.append(LT(1).getText());
}
}
super.match(t);
}
public void setContext(XQDocContext context) {
this.context = context;
}
protected XQueryParser(TokenBuffer tokenBuf, int k) {
super(tokenBuf,k);
tokenNames = _tokenNames;
}
public XQueryParser(TokenBuffer tokenBuf) {
this(tokenBuf,1);
}
protected XQueryParser(TokenStream lexer, int k) {
super(lexer,k);
tokenNames = _tokenNames;
}
public XQueryParser(TokenStream lexer) {
this(lexer,1);
}
public XQueryParser(ParserSharedInputState state) {
super(state,1);
tokenNames = _tokenNames;
}
public final void xpath() throws RecognitionException, TokenStreamException {
{
switch ( LA(1)) {
case LITERAL_xquery:
case LITERAL_version:
case LITERAL_module:
case LITERAL_namespace:
case STRING_LITERAL:
case LITERAL_encoding:
case LITERAL_declare:
case 12:
case LITERAL_default:
case LITERAL_collation:
case 15:
case LITERAL_construction:
case LITERAL_ordering:
case LITERAL_order:
case 19:
case LITERAL_import:
case LITERAL_schema:
case LITERAL_element:
case LITERAL_function:
case LITERAL_variable:
case LITERAL_option:
case LITERAL_preserve:
case LITERAL_strip:
case LITERAL_ordered:
case LITERAL_unordered:
case LITERAL_empty:
case LITERAL_greatest:
case LITERAL_least:
case 34:
case LITERAL_inherit:
case 37:
case LITERAL_at:
case DOLLAR:
case LITERAL_external:
case LPAREN:
case LITERAL_as:
case LITERAL_for:
case LITERAL_let:
case LITERAL_some:
case LITERAL_every:
case LITERAL_typeswitch:
case LITERAL_if:
case LITERAL_try:
case LITERAL_catch:
case LITERAL_return:
case LITERAL_in:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_by:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_then:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LT:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case LITERAL_is:
case LITERAL_to:
case PLUS:
case MINUS:
case STAR:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_of:
case LITERAL_treat:
case LITERAL_castable:
case LITERAL_cast:
case LITERAL_validate:
case PRAGMA:
case SLASH:
case DSLASH:
case LITERAL_text:
case LITERAL_node:
case LITERAL_attribute:
case LITERAL_comment:
case 104:
case 105:
case 106:
case 107:
case LITERAL_document:
case SELF:
case XML_COMMENT:
case XML_PI:
case AT:
case PARENT:
case LITERAL_child:
case LITERAL_self:
case LITERAL_descendant:
case 117:
case LITERAL_following:
case 119:
case LITERAL_parent:
case LITERAL_ancestor:
case 122:
case LITERAL_preceding:
case 124:
case DOUBLE_LITERAL:
case DECIMAL_LITERAL:
case INTEGER_LITERAL:
case XQDOC_COMMENT:
case 142:
case LITERAL_item:
case NCNAME:
case LITERAL_lax:
case LITERAL_strict:
{
module();
break;
}
case EOF:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(Token.EOF_TYPE);
}
public final void module() throws RecognitionException, TokenStreamException {
{
boolean synPredMatched6 = false;
if (((LA(1)==LITERAL_xquery))) {
int _m6 = mark();
synPredMatched6 = true;
inputState.guessing++;
try {
{
match(LITERAL_xquery);
match(LITERAL_version);
}
}
catch (RecognitionException pe) {
synPredMatched6 = false;
}
rewind(_m6);
inputState.guessing--;
}
if ( synPredMatched6 ) {
versionDecl();
}
else if ((_tokenSet_0.member(LA(1)))) {
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}
{
boolean synPredMatched11 = false;
if (((LA(1)==LITERAL_module||LA(1)==XQDOC_COMMENT))) {
int _m11 = mark();
synPredMatched11 = true;
inputState.guessing++;
try {
{
{
switch ( LA(1)) {
case XQDOC_COMMENT:
{
xqdocComment();
break;
}
case LITERAL_module:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
{
match(LITERAL_module);
match(LITERAL_namespace);
}
}
}
catch (RecognitionException pe) {
synPredMatched11 = false;
}
rewind(_m11);
inputState.guessing--;
}
if ( synPredMatched11 ) {
libraryModule();
}
else if ((_tokenSet_0.member(LA(1)))) {
mainModule();
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}
}
public final void versionDecl() throws RecognitionException, TokenStreamException {
match(LITERAL_xquery);
match(LITERAL_version);
match(STRING_LITERAL);
{
switch ( LA(1)) {
case LITERAL_encoding:
{
match(LITERAL_encoding);
match(STRING_LITERAL);
break;
}
case SEMICOLON:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
separator();
}
public final void xqdocComment() throws RecognitionException, TokenStreamException {
Token x = null;
x = LT(1);
match(XQDOC_COMMENT);
if ( inputState.guessing==0 ) {
context.setXQDocBuffer(x.getText());
}
}
public final void libraryModule() throws RecognitionException, TokenStreamException {
{
switch ( LA(1)) {
case XQDOC_COMMENT:
{
xqdocComment();
break;
}
case LITERAL_module:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
moduleDecl();
prolog();
}
public final void mainModule() throws RecognitionException, TokenStreamException {
{
if ((LA(1)==XQDOC_COMMENT)) {
xqdocComment();
}
else if ((_tokenSet_0.member(LA(1)))) {
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}
if ( inputState.guessing==0 ) {
context.buildMainModuleSection();
}
prolog();
queryBody();
}
public final void separator() throws RecognitionException, TokenStreamException {
match(SEMICOLON);
}
public final void prolog() throws RecognitionException, TokenStreamException {
{
_loop60:
do {
if ((LA(1)==LITERAL_declare||LA(1)==LITERAL_import||LA(1)==XQDOC_COMMENT)) {
{
boolean synPredMatched23 = false;
if (((LA(1)==LITERAL_declare))) {
int _m23 = mark();
synPredMatched23 = true;
inputState.guessing++;
try {
{
match(LITERAL_declare);
match(12);
}
}
catch (RecognitionException pe) {
synPredMatched23 = false;
}
rewind(_m23);
inputState.guessing--;
}
if ( synPredMatched23 ) {
boundarySpaceDecl();
}
else {
boolean synPredMatched25 = false;
if (((LA(1)==LITERAL_declare))) {
int _m25 = mark();
synPredMatched25 = true;
inputState.guessing++;
try {
{
match(LITERAL_declare);
match(LITERAL_default);
match(LITERAL_collation);
}
}
catch (RecognitionException pe) {
synPredMatched25 = false;
}
rewind(_m25);
inputState.guessing--;
}
if ( synPredMatched25 ) {
defaultCollationDecl();
}
else {
boolean synPredMatched27 = false;
if (((LA(1)==LITERAL_declare))) {
int _m27 = mark();
synPredMatched27 = true;
inputState.guessing++;
try {
{
match(LITERAL_declare);
match(15);
}
}
catch (RecognitionException pe) {
synPredMatched27 = false;
}
rewind(_m27);
inputState.guessing--;
}
if ( synPredMatched27 ) {
baseUriDecl();
}
else {
boolean synPredMatched29 = false;
if (((LA(1)==LITERAL_declare))) {
int _m29 = mark();
synPredMatched29 = true;
inputState.guessing++;
try {
{
match(LITERAL_declare);
match(LITERAL_construction);
}
}
catch (RecognitionException pe) {
synPredMatched29 = false;
}
rewind(_m29);
inputState.guessing--;
}
if ( synPredMatched29 ) {
constructionDecl();
}
else {
boolean synPredMatched31 = false;
if (((LA(1)==LITERAL_declare))) {
int _m31 = mark();
synPredMatched31 = true;
inputState.guessing++;
try {
{
match(LITERAL_declare);
match(LITERAL_ordering);
}
}
catch (RecognitionException pe) {
synPredMatched31 = false;
}
rewind(_m31);
inputState.guessing--;
}
if ( synPredMatched31 ) {
orderingModeDecl();
}
else {
boolean synPredMatched33 = false;
if (((LA(1)==LITERAL_declare))) {
int _m33 = mark();
synPredMatched33 = true;
inputState.guessing++;
try {
{
match(LITERAL_declare);
match(LITERAL_default);
match(LITERAL_order);
}
}
catch (RecognitionException pe) {
synPredMatched33 = false;
}
rewind(_m33);
inputState.guessing--;
}
if ( synPredMatched33 ) {
emptyOrderingDecl();
}
else {
boolean synPredMatched35 = false;
if (((LA(1)==LITERAL_declare))) {
int _m35 = mark();
synPredMatched35 = true;
inputState.guessing++;
try {
{
match(LITERAL_declare);
match(19);
}
}
catch (RecognitionException pe) {
synPredMatched35 = false;
}
rewind(_m35);
inputState.guessing--;
}
if ( synPredMatched35 ) {
copyNamespacesDecl();
}
else {
boolean synPredMatched37 = false;
if (((LA(1)==LITERAL_import))) {
int _m37 = mark();
synPredMatched37 = true;
inputState.guessing++;
try {
{
match(LITERAL_import);
match(LITERAL_schema);
}
}
catch (RecognitionException pe) {
synPredMatched37 = false;
}
rewind(_m37);
inputState.guessing--;
}
if ( synPredMatched37 ) {
schemaImport();
}
else {
boolean synPredMatched39 = false;
if (((LA(1)==LITERAL_import||LA(1)==XQDOC_COMMENT))) {
int _m39 = mark();
synPredMatched39 = true;
inputState.guessing++;
try {
{
match(LITERAL_import);
match(LITERAL_module);
}
}
catch (RecognitionException pe) {
synPredMatched39 = false;
}
rewind(_m39);
inputState.guessing--;
}
if ( synPredMatched39 ) {
moduleImport();
}
else {
boolean synPredMatched42 = false;
if (((LA(1)==LITERAL_import||LA(1)==XQDOC_COMMENT))) {
int _m42 = mark();
synPredMatched42 = true;
inputState.guessing++;
try {
{
{
switch ( LA(1)) {
case XQDOC_COMMENT:
{
xqdocComment();
break;
}
case LITERAL_import:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(LITERAL_import);
match(LITERAL_module);
}
}
catch (RecognitionException pe) {
synPredMatched42 = false;
}
rewind(_m42);
inputState.guessing--;
}
if ( synPredMatched42 ) {
moduleImport();
}
else {
boolean synPredMatched44 = false;
if (((LA(1)==LITERAL_declare))) {
int _m44 = mark();
synPredMatched44 = true;
inputState.guessing++;
try {
{
match(LITERAL_declare);
match(LITERAL_namespace);
}
}
catch (RecognitionException pe) {
synPredMatched44 = false;
}
rewind(_m44);
inputState.guessing--;
}
if ( synPredMatched44 ) {
namespaceDecl();
}
else {
boolean synPredMatched47 = false;
if (((LA(1)==LITERAL_declare))) {
int _m47 = mark();
synPredMatched47 = true;
inputState.guessing++;
try {
{
match(LITERAL_declare);
match(LITERAL_default);
{
switch ( LA(1)) {
case LITERAL_element:
{
match(LITERAL_element);
break;
}
case LITERAL_function:
{
match(LITERAL_function);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
}
}
catch (RecognitionException pe) {
synPredMatched47 = false;
}
rewind(_m47);
inputState.guessing--;
}
if ( synPredMatched47 ) {
setterOld();
}
else {
boolean synPredMatched49 = false;
if (((LA(1)==LITERAL_declare||LA(1)==XQDOC_COMMENT))) {
int _m49 = mark();
synPredMatched49 = true;
inputState.guessing++;
try {
{
match(LITERAL_declare);
match(LITERAL_variable);
}
}
catch (RecognitionException pe) {
synPredMatched49 = false;
}
rewind(_m49);
inputState.guessing--;
}
if ( synPredMatched49 ) {
varDecl();
}
else {
boolean synPredMatched52 = false;
if (((LA(1)==LITERAL_declare||LA(1)==XQDOC_COMMENT))) {
int _m52 = mark();
synPredMatched52 = true;
inputState.guessing++;
try {
{
{
switch ( LA(1)) {
case XQDOC_COMMENT:
{
xqdocComment();
break;
}
case LITERAL_declare:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(LITERAL_declare);
match(LITERAL_variable);
}
}
catch (RecognitionException pe) {
synPredMatched52 = false;
}
rewind(_m52);
inputState.guessing--;
}
if ( synPredMatched52 ) {
varDecl();
}
else {
boolean synPredMatched54 = false;
if (((LA(1)==LITERAL_declare||LA(1)==XQDOC_COMMENT))) {
int _m54 = mark();
synPredMatched54 = true;
inputState.guessing++;
try {
{
match(LITERAL_declare);
match(LITERAL_function);
}
}
catch (RecognitionException pe) {
synPredMatched54 = false;
}
rewind(_m54);
inputState.guessing--;
}
if ( synPredMatched54 ) {
functionDecl();
}
else {
boolean synPredMatched57 = false;
if (((LA(1)==LITERAL_declare||LA(1)==XQDOC_COMMENT))) {
int _m57 = mark();
synPredMatched57 = true;
inputState.guessing++;
try {
{
{
switch ( LA(1)) {
case XQDOC_COMMENT:
{
xqdocComment();
break;
}
case LITERAL_declare:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(LITERAL_declare);
match(LITERAL_function);
}
}
catch (RecognitionException pe) {
synPredMatched57 = false;
}
rewind(_m57);
inputState.guessing--;
}
if ( synPredMatched57 ) {
functionDecl();
}
else {
boolean synPredMatched59 = false;
if (((LA(1)==LITERAL_declare))) {
int _m59 = mark();
synPredMatched59 = true;
inputState.guessing++;
try {
{
match(LITERAL_declare);
match(LITERAL_option);
}
}
catch (RecognitionException pe) {
synPredMatched59 = false;
}
rewind(_m59);
inputState.guessing--;
}
if ( synPredMatched59 ) {
optionDecl();
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}}}}}}}}}}}}}}}}
}
separator();
}
else {
break _loop60;
}
} while (true);
}
}
public final void queryBody() throws RecognitionException, TokenStreamException {
if ( inputState.guessing==0 ) {
buildFuncBodyFlag=true;
lexer.whiteSpaceBag = new StringBuffer();
}
expr();
if ( inputState.guessing==0 ) {
buildFuncBodyFlag=false;
context.setFunctionName("local", "xqDoc-main");
context.setFunctionSignature(null);
context.setFunctionBody(functionBody.toString());
context.buildFunctionSection();
functionBody = new StringBuffer();
}
}
public final void moduleDecl() throws RecognitionException, TokenStreamException {
String prefix = null;
String uri = null;
match(LITERAL_module);
match(LITERAL_namespace);
prefix=ncnameOrKeyword();
match(EQ);
uri=strippedStringLiteral();
separator();
if ( inputState.guessing==0 ) {
context.buildLibraryModuleSection(uri);
context.addPrefixAndURI(prefix, uri);
}
}
public final String ncnameOrKeyword() throws RecognitionException, TokenStreamException {
String name;
Token n1 = null;
name= null;
switch ( LA(1)) {
case NCNAME:
{
n1 = LT(1);
match(NCNAME);
if ( inputState.guessing==0 ) {
name= n1.getText();
}
break;
}
case LITERAL_xquery:
case LITERAL_version:
case LITERAL_module:
case LITERAL_namespace:
case LITERAL_encoding:
case LITERAL_declare:
case 12:
case LITERAL_default:
case LITERAL_collation:
case 15:
case LITERAL_construction:
case LITERAL_ordering:
case LITERAL_order:
case 19:
case LITERAL_import:
case LITERAL_schema:
case LITERAL_element:
case LITERAL_function:
case LITERAL_variable:
case LITERAL_option:
case LITERAL_preserve:
case LITERAL_strip:
case LITERAL_ordered:
case LITERAL_unordered:
case LITERAL_empty:
case LITERAL_greatest:
case LITERAL_least:
case 34:
case LITERAL_inherit:
case 37:
case LITERAL_at:
case LITERAL_external:
case LITERAL_as:
case LITERAL_for:
case LITERAL_let:
case LITERAL_some:
case LITERAL_every:
case LITERAL_typeswitch:
case LITERAL_if:
case LITERAL_try:
case LITERAL_catch:
case LITERAL_return:
case LITERAL_in:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_by:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_then:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case LITERAL_is:
case LITERAL_to:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_of:
case LITERAL_treat:
case LITERAL_castable:
case LITERAL_cast:
case LITERAL_validate:
case LITERAL_text:
case LITERAL_node:
case LITERAL_attribute:
case LITERAL_comment:
case 104:
case 105:
case 106:
case 107:
case LITERAL_document:
case LITERAL_child:
case LITERAL_self:
case LITERAL_descendant:
case 117:
case LITERAL_following:
case 119:
case LITERAL_parent:
case LITERAL_ancestor:
case 122:
case LITERAL_preceding:
case 124:
case 142:
case LITERAL_item:
case LITERAL_lax:
case LITERAL_strict:
{
name=reservedKeywords();
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
return name;
}
public final String strippedStringLiteral() throws RecognitionException, TokenStreamException {
String strippedLiteral;
Token literal = null;
strippedLiteral = null;
literal = LT(1);
match(STRING_LITERAL);
if ( inputState.guessing==0 ) {
strippedLiteral = literal.getText();
if (strippedLiteral.length() <= 2) {
strippedLiteral = "";
} else {
strippedLiteral = strippedLiteral.substring(1,strippedLiteral.length()-1);
}
}
return strippedLiteral;
}
public final void boundarySpaceDecl() throws RecognitionException, TokenStreamException {
match(LITERAL_declare);
match(12);
{
switch ( LA(1)) {
case LITERAL_preserve:
{
match(LITERAL_preserve);
break;
}
case LITERAL_strip:
{
match(LITERAL_strip);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
}
public final void defaultCollationDecl() throws RecognitionException, TokenStreamException {
match(LITERAL_declare);
match(LITERAL_default);
match(LITERAL_collation);
match(STRING_LITERAL);
}
public final void baseUriDecl() throws RecognitionException, TokenStreamException {
match(LITERAL_declare);
match(15);
match(STRING_LITERAL);
}
public final void constructionDecl() throws RecognitionException, TokenStreamException {
match(LITERAL_declare);
match(LITERAL_construction);
{
switch ( LA(1)) {
case LITERAL_preserve:
{
match(LITERAL_preserve);
break;
}
case LITERAL_strip:
{
match(LITERAL_strip);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
}
public final void orderingModeDecl() throws RecognitionException, TokenStreamException {
match(LITERAL_declare);
match(LITERAL_ordering);
{
switch ( LA(1)) {
case LITERAL_ordered:
{
match(LITERAL_ordered);
break;
}
case LITERAL_unordered:
{
match(LITERAL_unordered);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
}
public final void emptyOrderingDecl() throws RecognitionException, TokenStreamException {
match(LITERAL_declare);
match(LITERAL_default);
match(LITERAL_order);
match(LITERAL_empty);
{
switch ( LA(1)) {
case LITERAL_greatest:
{
match(LITERAL_greatest);
break;
}
case LITERAL_least:
{
match(LITERAL_least);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
}
public final void copyNamespacesDecl() throws RecognitionException, TokenStreamException {
match(LITERAL_declare);
match(19);
{
switch ( LA(1)) {
case LITERAL_preserve:
{
match(LITERAL_preserve);
break;
}
case 34:
{
match(34);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(COMMA);
{
switch ( LA(1)) {
case LITERAL_inherit:
{
match(LITERAL_inherit);
break;
}
case 37:
{
match(37);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
}
public final void schemaImport() throws RecognitionException, TokenStreamException {
match(LITERAL_import);
match(LITERAL_schema);
{
switch ( LA(1)) {
case LITERAL_namespace:
case LITERAL_default:
{
schemaPrefix();
break;
}
case STRING_LITERAL:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(STRING_LITERAL);
{
switch ( LA(1)) {
case LITERAL_at:
{
match(LITERAL_at);
match(STRING_LITERAL);
{
_loop101:
do {
if ((LA(1)==COMMA)) {
match(COMMA);
match(STRING_LITERAL);
}
else {
break _loop101;
}
} while (true);
}
break;
}
case SEMICOLON:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
}
public final void moduleImport() throws RecognitionException, TokenStreamException {
Token junk = null;
String prefix = null;
String uri = null;
{
switch ( LA(1)) {
case XQDOC_COMMENT:
{
xqdocComment();
break;
}
case LITERAL_import:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(LITERAL_import);
match(LITERAL_module);
{
switch ( LA(1)) {
case LITERAL_namespace:
{
match(LITERAL_namespace);
prefix=ncnameOrKeyword();
match(EQ);
break;
}
case STRING_LITERAL:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
uri=strippedStringLiteral();
{
switch ( LA(1)) {
case LITERAL_at:
{
match(LITERAL_at);
junk = LT(1);
match(STRING_LITERAL);
{
_loop110:
do {
if ((LA(1)==COMMA)) {
match(COMMA);
match(STRING_LITERAL);
}
else {
break _loop110;
}
} while (true);
}
break;
}
case SEMICOLON:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
if ( inputState.guessing==0 ) {
if (prefix != null) {
context.addPrefixAndURI(prefix, uri);
}
context.buildImportSection(uri);
}
}
public final void namespaceDecl() throws RecognitionException, TokenStreamException {
String prefix = null;
String uri = null;
match(LITERAL_declare);
match(LITERAL_namespace);
prefix=ncnameOrKeyword();
match(EQ);
uri=strippedStringLiteral();
if ( inputState.guessing==0 ) {
context.addPrefixAndURI(prefix, uri);
}
}
public final void setterOld() throws RecognitionException, TokenStreamException {
String uri = null;
{
match(LITERAL_declare);
match(LITERAL_default);
{
switch ( LA(1)) {
case LITERAL_element:
{
{
match(LITERAL_element);
match(LITERAL_namespace);
match(STRING_LITERAL);
}
break;
}
case LITERAL_function:
{
{
match(LITERAL_function);
match(LITERAL_namespace);
uri=strippedStringLiteral();
}
if ( inputState.guessing==0 ) {
context.setDefaultModuleFunctionNamespace(uri);
}
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
}
}
public final void varDecl() throws RecognitionException, TokenStreamException {
String varName= null;
String localName=null;
{
switch ( LA(1)) {
case XQDOC_COMMENT:
{
xqdocComment();
break;
}
case LITERAL_declare:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(LITERAL_declare);
match(LITERAL_variable);
match(DOLLAR);
varName=qName();
{
switch ( LA(1)) {
case LITERAL_as:
{
typeDeclaration();
break;
}
case COLON:
case LITERAL_external:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
{
switch ( LA(1)) {
case COLON:
{
{
match(COLON);
match(EQ);
exprSingle();
}
break;
}
case LITERAL_external:
{
match(LITERAL_external);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
if ( inputState.guessing==0 ) {
String[] tmp = varName.split(":", 2);
localName = varName;
if (tmp.length > 1) {
localName = tmp[1];
}
context.buildVariableSection(localName);
}
}
public final void functionDecl() throws RecognitionException, TokenStreamException {
String name= null;
String localName = null;
String prefix = null;
{
switch ( LA(1)) {
case XQDOC_COMMENT:
{
xqdocComment();
break;
}
case LITERAL_declare:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(LITERAL_declare);
match(LITERAL_function);
name=qName();
if ( inputState.guessing==0 ) {
buildFuncSigFlag = true;
lexer.whiteSpaceBag = new StringBuffer();
}
match(LPAREN);
{
switch ( LA(1)) {
case DOLLAR:
{
paramList();
break;
}
case RPAREN:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(RPAREN);
{
switch ( LA(1)) {
case LITERAL_as:
{
returnType();
break;
}
case LITERAL_external:
case LCURLY:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
if ( inputState.guessing==0 ) {
buildFuncSigFlag = false;
String[] tmp = name.split(":", 2);
localName = name;
if (tmp.length > 1) {
prefix = tmp[0];
localName = tmp[1];
}
context.setFunctionName(prefix, localName);
context.setFunctionSignature("declare function " + localName + functionSignature.toString());
functionBody.append("declare function " + name + functionSignature);
functionSignature = new StringBuffer();
}
{
switch ( LA(1)) {
case LCURLY:
{
functionBody();
break;
}
case LITERAL_external:
{
match(LITERAL_external);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
}
public final void optionDecl() throws RecognitionException, TokenStreamException {
match(LITERAL_declare);
match(LITERAL_option);
qName();
match(STRING_LITERAL);
}
public final void setter() throws RecognitionException, TokenStreamException {
{
{
boolean synPredMatched65 = false;
if (((LA(1)==LITERAL_declare))) {
int _m65 = mark();
synPredMatched65 = true;
inputState.guessing++;
try {
{
match(LITERAL_declare);
match(12);
}
}
catch (RecognitionException pe) {
synPredMatched65 = false;
}
rewind(_m65);
inputState.guessing--;
}
if ( synPredMatched65 ) {
boundarySpaceDecl();
}
else {
boolean synPredMatched67 = false;
if (((LA(1)==LITERAL_declare))) {
int _m67 = mark();
synPredMatched67 = true;
inputState.guessing++;
try {
{
match(LITERAL_declare);
match(LITERAL_default);
match(LITERAL_collation);
}
}
catch (RecognitionException pe) {
synPredMatched67 = false;
}
rewind(_m67);
inputState.guessing--;
}
if ( synPredMatched67 ) {
defaultCollationDecl();
}
else {
boolean synPredMatched69 = false;
if (((LA(1)==LITERAL_declare))) {
int _m69 = mark();
synPredMatched69 = true;
inputState.guessing++;
try {
{
match(LITERAL_declare);
match(15);
}
}
catch (RecognitionException pe) {
synPredMatched69 = false;
}
rewind(_m69);
inputState.guessing--;
}
if ( synPredMatched69 ) {
baseUriDecl();
}
else {
boolean synPredMatched71 = false;
if (((LA(1)==LITERAL_declare))) {
int _m71 = mark();
synPredMatched71 = true;
inputState.guessing++;
try {
{
match(LITERAL_declare);
match(LITERAL_construction);
}
}
catch (RecognitionException pe) {
synPredMatched71 = false;
}
rewind(_m71);
inputState.guessing--;
}
if ( synPredMatched71 ) {
constructionDecl();
}
else {
boolean synPredMatched73 = false;
if (((LA(1)==LITERAL_declare))) {
int _m73 = mark();
synPredMatched73 = true;
inputState.guessing++;
try {
{
match(LITERAL_declare);
match(LITERAL_ordering);
}
}
catch (RecognitionException pe) {
synPredMatched73 = false;
}
rewind(_m73);
inputState.guessing--;
}
if ( synPredMatched73 ) {
orderingModeDecl();
}
else {
boolean synPredMatched75 = false;
if (((LA(1)==LITERAL_declare))) {
int _m75 = mark();
synPredMatched75 = true;
inputState.guessing++;
try {
{
match(LITERAL_declare);
match(LITERAL_default);
match(LITERAL_order);
}
}
catch (RecognitionException pe) {
synPredMatched75 = false;
}
rewind(_m75);
inputState.guessing--;
}
if ( synPredMatched75 ) {
emptyOrderingDecl();
}
else {
boolean synPredMatched77 = false;
if (((LA(1)==LITERAL_declare))) {
int _m77 = mark();
synPredMatched77 = true;
inputState.guessing++;
try {
{
match(LITERAL_declare);
match(19);
}
}
catch (RecognitionException pe) {
synPredMatched77 = false;
}
rewind(_m77);
inputState.guessing--;
}
if ( synPredMatched77 ) {
copyNamespacesDecl();
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}}}}}}
}
separator();
}
}
public final String qName() throws RecognitionException, TokenStreamException {
String name;
name= null;
String name2;
boolean synPredMatched437 = false;
if (((_tokenSet_1.member(LA(1))))) {
int _m437 = mark();
synPredMatched437 = true;
inputState.guessing++;
try {
{
ncnameOrKeyword();
match(COLON);
ncnameOrKeyword();
}
}
catch (RecognitionException pe) {
synPredMatched437 = false;
}
rewind(_m437);
inputState.guessing--;
}
if ( synPredMatched437 ) {
name=ncnameOrKeyword();
match(COLON);
name2=ncnameOrKeyword();
if ( inputState.guessing==0 ) {
name= name + ':' + name2;
}
}
else if ((_tokenSet_1.member(LA(1)))) {
name=ncnameOrKeyword();
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
return name;
}
public final void schemaPrefix() throws RecognitionException, TokenStreamException {
String tmpStr = null;
switch ( LA(1)) {
case LITERAL_namespace:
{
{
match(LITERAL_namespace);
tmpStr=ncnameOrKeyword();
match(EQ);
}
break;
}
case LITERAL_default:
{
{
match(LITERAL_default);
match(LITERAL_element);
match(LITERAL_namespace);
}
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
public final void typeDeclaration() throws RecognitionException, TokenStreamException {
match(LITERAL_as);
sequenceType();
}
public final void exprSingle() throws RecognitionException, TokenStreamException {
boolean synPredMatched138 = false;
if (((LA(1)==LITERAL_for||LA(1)==LITERAL_let))) {
int _m138 = mark();
synPredMatched138 = true;
inputState.guessing++;
try {
{
{
switch ( LA(1)) {
case LITERAL_for:
{
match(LITERAL_for);
break;
}
case LITERAL_let:
{
match(LITERAL_let);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(DOLLAR);
}
}
catch (RecognitionException pe) {
synPredMatched138 = false;
}
rewind(_m138);
inputState.guessing--;
}
if ( synPredMatched138 ) {
flworExpr();
}
else {
boolean synPredMatched141 = false;
if (((LA(1)==LITERAL_some||LA(1)==LITERAL_every))) {
int _m141 = mark();
synPredMatched141 = true;
inputState.guessing++;
try {
{
{
switch ( LA(1)) {
case LITERAL_some:
{
match(LITERAL_some);
break;
}
case LITERAL_every:
{
match(LITERAL_every);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(DOLLAR);
}
}
catch (RecognitionException pe) {
synPredMatched141 = false;
}
rewind(_m141);
inputState.guessing--;
}
if ( synPredMatched141 ) {
quantifiedExpr();
}
else {
boolean synPredMatched143 = false;
if (((LA(1)==LITERAL_typeswitch))) {
int _m143 = mark();
synPredMatched143 = true;
inputState.guessing++;
try {
{
match(LITERAL_typeswitch);
match(LPAREN);
}
}
catch (RecognitionException pe) {
synPredMatched143 = false;
}
rewind(_m143);
inputState.guessing--;
}
if ( synPredMatched143 ) {
typeswitchExpr();
}
else {
boolean synPredMatched145 = false;
if (((LA(1)==LITERAL_if))) {
int _m145 = mark();
synPredMatched145 = true;
inputState.guessing++;
try {
{
match(LITERAL_if);
match(LPAREN);
}
}
catch (RecognitionException pe) {
synPredMatched145 = false;
}
rewind(_m145);
inputState.guessing--;
}
if ( synPredMatched145 ) {
ifExpr();
}
else {
boolean synPredMatched147 = false;
if (((LA(1)==LITERAL_try))) {
int _m147 = mark();
synPredMatched147 = true;
inputState.guessing++;
try {
{
match(LITERAL_try);
match(LCURLY);
}
}
catch (RecognitionException pe) {
synPredMatched147 = false;
}
rewind(_m147);
inputState.guessing--;
}
if ( synPredMatched147 ) {
tryCatchExpr();
}
else if ((_tokenSet_2.member(LA(1)))) {
orExpr();
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}}}}
}
public final void paramList() throws RecognitionException, TokenStreamException {
param();
{
_loop127:
do {
if ((LA(1)==COMMA)) {
match(COMMA);
param();
}
else {
break _loop127;
}
} while (true);
}
}
public final void returnType() throws RecognitionException, TokenStreamException {
match(LITERAL_as);
sequenceType();
}
public final void functionBody() throws RecognitionException, TokenStreamException {
if ( inputState.guessing==0 ) {
buildFuncBodyFlag=true;
lexer.whiteSpaceBag = new StringBuffer();
}
match(LCURLY);
expr();
match(RCURLY);
if ( inputState.guessing==0 ) {
buildFuncBodyFlag=false;
// Put the separator back on the end of the body, since the
// separator is outside of the functionBody rule.
functionBody.append(";");
context.setFunctionBody(functionBody.toString());
context.buildFunctionSection();
functionBody = new StringBuffer();
}
}
public final void sequenceType() throws RecognitionException, TokenStreamException {
boolean synPredMatched390 = false;
if (((LA(1)==142))) {
int _m390 = mark();
synPredMatched390 = true;
inputState.guessing++;
try {
{
match(142);
match(LPAREN);
}
}
catch (RecognitionException pe) {
synPredMatched390 = false;
}
rewind(_m390);
inputState.guessing--;
}
if ( synPredMatched390 ) {
match(142);
match(LPAREN);
match(RPAREN);
}
else if ((_tokenSet_1.member(LA(1)))) {
itemType();
{
if ((LA(1)==PLUS||LA(1)==STAR||LA(1)==QUESTION)) {
occurrenceIndicator();
}
else if ((_tokenSet_3.member(LA(1)))) {
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}
public final void expr() throws RecognitionException, TokenStreamException {
exprSingle();
{
_loop134:
do {
if ((LA(1)==COMMA)) {
match(COMMA);
exprSingle();
}
else {
break _loop134;
}
} while (true);
}
}
public final void param() throws RecognitionException, TokenStreamException {
String name = null;
match(DOLLAR);
name=qName();
{
switch ( LA(1)) {
case LITERAL_as:
{
typeDeclaration();
break;
}
case COMMA:
case RPAREN:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
}
public final void enclosedExpr() throws RecognitionException, TokenStreamException {
match(LCURLY);
if ( inputState.guessing==0 ) {
globalStack.push(elementStack);
elementStack= new Stack();
lexer.inElementContent= false;
}
expr();
match(RCURLY);
if ( inputState.guessing==0 ) {
elementStack= (Stack) globalStack.pop();
lexer.inElementContent= true;
}
}
public final void flworExpr() throws RecognitionException, TokenStreamException {
{
int _cnt151=0;
_loop151:
do {
switch ( LA(1)) {
case LITERAL_for:
{
forClause();
break;
}
case LITERAL_let:
{
letClause();
break;
}
default:
{
if ( _cnt151>=1 ) { break _loop151; } else {throw new NoViableAltException(LT(1), getFilename());}
}
}
_cnt151++;
} while (true);
}
{
switch ( LA(1)) {
case LITERAL_where:
{
whereClause();
break;
}
case LITERAL_order:
case LITERAL_return:
case LITERAL_stable:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
{
switch ( LA(1)) {
case LITERAL_order:
case LITERAL_stable:
{
orderByClause();
break;
}
case LITERAL_return:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(LITERAL_return);
exprSingle();
}
public final void quantifiedExpr() throws RecognitionException, TokenStreamException {
{
switch ( LA(1)) {
case LITERAL_some:
{
match(LITERAL_some);
break;
}
case LITERAL_every:
{
match(LITERAL_every);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
quantifiedInVarBinding();
{
_loop181:
do {
if ((LA(1)==COMMA)) {
match(COMMA);
quantifiedInVarBinding();
}
else {
break _loop181;
}
} while (true);
}
match(LITERAL_satisfies);
exprSingle();
}
public final void typeswitchExpr() throws RecognitionException, TokenStreamException {
String varName=null;
match(LITERAL_typeswitch);
match(LPAREN);
expr();
match(RPAREN);
{
int _cnt186=0;
_loop186:
do {
if ((LA(1)==LITERAL_case)) {
caseClause();
}
else {
if ( _cnt186>=1 ) { break _loop186; } else {throw new NoViableAltException(LT(1), getFilename());}
}
_cnt186++;
} while (true);
}
match(LITERAL_default);
{
switch ( LA(1)) {
case DOLLAR:
{
match(DOLLAR);
varName=qName();
break;
}
case LITERAL_return:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(LITERAL_return);
exprSingle();
}
public final void ifExpr() throws RecognitionException, TokenStreamException {
match(LITERAL_if);
match(LPAREN);
expr();
match(RPAREN);
match(LITERAL_then);
exprSingle();
match(LITERAL_else);
exprSingle();
}
public final void tryCatchExpr() throws RecognitionException, TokenStreamException {
String tmpStr = null;
match(LITERAL_try);
match(LCURLY);
expr();
match(RCURLY);
match(LITERAL_catch);
match(LPAREN);
match(DOLLAR);
tmpStr=qName();
match(RPAREN);
match(LCURLY);
expr();
match(RCURLY);
}
public final void orExpr() throws RecognitionException, TokenStreamException {
andExpr();
{
_loop193:
do {
if ((LA(1)==LITERAL_or)) {
match(LITERAL_or);
andExpr();
}
else {
break _loop193;
}
} while (true);
}
}
public final void forClause() throws RecognitionException, TokenStreamException {
match(LITERAL_for);
inVarBinding();
{
_loop156:
do {
if ((LA(1)==COMMA)) {
match(COMMA);
inVarBinding();
}
else {
break _loop156;
}
} while (true);
}
}
public final void letClause() throws RecognitionException, TokenStreamException {
match(LITERAL_let);
letVarBinding();
{
_loop163:
do {
if ((LA(1)==COMMA)) {
match(COMMA);
letVarBinding();
}
else {
break _loop163;
}
} while (true);
}
}
public final void whereClause() throws RecognitionException, TokenStreamException {
match(LITERAL_where);
exprSingle();
}
public final void orderByClause() throws RecognitionException, TokenStreamException {
{
switch ( LA(1)) {
case LITERAL_stable:
{
match(LITERAL_stable);
break;
}
case LITERAL_order:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(LITERAL_order);
match(LITERAL_by);
orderSpecList();
}
public final void inVarBinding() throws RecognitionException, TokenStreamException {
String name=null;
match(DOLLAR);
name=qName();
{
switch ( LA(1)) {
case LITERAL_as:
{
typeDeclaration();
break;
}
case LITERAL_at:
case LITERAL_in:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
{
switch ( LA(1)) {
case LITERAL_at:
{
positionalVar();
break;
}
case LITERAL_in:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(LITERAL_in);
exprSingle();
}
public final void positionalVar() throws RecognitionException, TokenStreamException {
String name=null;
match(LITERAL_at);
match(DOLLAR);
name=qName();
}
public final void letVarBinding() throws RecognitionException, TokenStreamException {
String name=null;
match(DOLLAR);
name=qName();
{
switch ( LA(1)) {
case LITERAL_as:
{
typeDeclaration();
break;
}
case COLON:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(COLON);
match(EQ);
exprSingle();
}
public final void orderSpecList() throws RecognitionException, TokenStreamException {
orderSpec();
{
_loop171:
do {
if ((LA(1)==COMMA)) {
match(COMMA);
orderSpec();
}
else {
break _loop171;
}
} while (true);
}
}
public final void orderSpec() throws RecognitionException, TokenStreamException {
exprSingle();
orderModifier();
}
public final void orderModifier() throws RecognitionException, TokenStreamException {
{
switch ( LA(1)) {
case LITERAL_ascending:
{
match(LITERAL_ascending);
break;
}
case LITERAL_descending:
{
match(LITERAL_descending);
break;
}
case LITERAL_collation:
case LITERAL_empty:
case COMMA:
case LITERAL_return:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
{
switch ( LA(1)) {
case LITERAL_empty:
{
match(LITERAL_empty);
{
switch ( LA(1)) {
case LITERAL_greatest:
{
match(LITERAL_greatest);
break;
}
case LITERAL_least:
{
match(LITERAL_least);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
break;
}
case LITERAL_collation:
case COMMA:
case LITERAL_return:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
{
switch ( LA(1)) {
case LITERAL_collation:
{
match(LITERAL_collation);
match(STRING_LITERAL);
break;
}
case COMMA:
case LITERAL_return:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
}
public final void quantifiedInVarBinding() throws RecognitionException, TokenStreamException {
String name=null;
match(DOLLAR);
name=qName();
{
switch ( LA(1)) {
case LITERAL_as:
{
typeDeclaration();
break;
}
case LITERAL_in:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(LITERAL_in);
exprSingle();
}
public final void caseClause() throws RecognitionException, TokenStreamException {
String varName=null;
match(LITERAL_case);
{
switch ( LA(1)) {
case DOLLAR:
{
match(DOLLAR);
varName=qName();
match(LITERAL_as);
break;
}
case LITERAL_xquery:
case LITERAL_version:
case LITERAL_module:
case LITERAL_namespace:
case LITERAL_encoding:
case LITERAL_declare:
case 12:
case LITERAL_default:
case LITERAL_collation:
case 15:
case LITERAL_construction:
case LITERAL_ordering:
case LITERAL_order:
case 19:
case LITERAL_import:
case LITERAL_schema:
case LITERAL_element:
case LITERAL_function:
case LITERAL_variable:
case LITERAL_option:
case LITERAL_preserve:
case LITERAL_strip:
case LITERAL_ordered:
case LITERAL_unordered:
case LITERAL_empty:
case LITERAL_greatest:
case LITERAL_least:
case 34:
case LITERAL_inherit:
case 37:
case LITERAL_at:
case LITERAL_external:
case LITERAL_as:
case LITERAL_for:
case LITERAL_let:
case LITERAL_some:
case LITERAL_every:
case LITERAL_typeswitch:
case LITERAL_if:
case LITERAL_try:
case LITERAL_catch:
case LITERAL_return:
case LITERAL_in:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_by:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_then:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case LITERAL_is:
case LITERAL_to:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_of:
case LITERAL_treat:
case LITERAL_castable:
case LITERAL_cast:
case LITERAL_validate:
case LITERAL_text:
case LITERAL_node:
case LITERAL_attribute:
case LITERAL_comment:
case 104:
case 105:
case 106:
case 107:
case LITERAL_document:
case LITERAL_child:
case LITERAL_self:
case LITERAL_descendant:
case 117:
case LITERAL_following:
case 119:
case LITERAL_parent:
case LITERAL_ancestor:
case 122:
case LITERAL_preceding:
case 124:
case 142:
case LITERAL_item:
case NCNAME:
case LITERAL_lax:
case LITERAL_strict:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
sequenceType();
match(LITERAL_return);
exprSingle();
}
public final void andExpr() throws RecognitionException, TokenStreamException {
comparisonExpr();
{
_loop196:
do {
if ((LA(1)==LITERAL_and)) {
match(LITERAL_and);
comparisonExpr();
}
else {
break _loop196;
}
} while (true);
}
}
public final void comparisonExpr() throws RecognitionException, TokenStreamException {
rangeExpr();
{
switch ( LA(1)) {
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
{
{
{
switch ( LA(1)) {
case LITERAL_eq:
{
match(LITERAL_eq);
break;
}
case LITERAL_ne:
{
match(LITERAL_ne);
break;
}
case LITERAL_lt:
{
match(LITERAL_lt);
break;
}
case LITERAL_le:
{
match(LITERAL_le);
break;
}
case LITERAL_gt:
{
match(LITERAL_gt);
break;
}
case LITERAL_ge:
{
match(LITERAL_ge);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
rangeExpr();
}
break;
}
case LITERAL_is:
{
{
{
match(LITERAL_is);
}
rangeExpr();
}
break;
}
case EOF:
case LITERAL_default:
case LITERAL_collation:
case LITERAL_order:
case SEMICOLON:
case LITERAL_empty:
case COMMA:
case RPAREN:
case RCURLY:
case LITERAL_for:
case LITERAL_let:
case LITERAL_return:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case RPPAREN:
{
break;
}
default:
boolean synPredMatched200 = false;
if (((LA(1)==LT))) {
int _m200 = mark();
synPredMatched200 = true;
inputState.guessing++;
try {
{
match(LT);
match(LT);
}
}
catch (RecognitionException pe) {
synPredMatched200 = false;
}
rewind(_m200);
inputState.guessing--;
}
if ( synPredMatched200 ) {
match(LT);
match(LT);
rangeExpr();
}
else {
boolean synPredMatched202 = false;
if (((LA(1)==GT))) {
int _m202 = mark();
synPredMatched202 = true;
inputState.guessing++;
try {
{
match(GT);
match(GT);
}
}
catch (RecognitionException pe) {
synPredMatched202 = false;
}
rewind(_m202);
inputState.guessing--;
}
if ( synPredMatched202 ) {
match(GT);
match(GT);
rangeExpr();
}
else if ((_tokenSet_4.member(LA(1)))) {
{
{
switch ( LA(1)) {
case EQ:
{
match(EQ);
break;
}
case NEQ:
{
match(NEQ);
break;
}
case GT:
{
match(GT);
break;
}
case GTEQ:
{
match(GTEQ);
break;
}
case LT:
{
match(LT);
break;
}
case LTEQ:
{
match(LTEQ);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
rangeExpr();
}
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}}
}
}
public final void rangeExpr() throws RecognitionException, TokenStreamException {
additiveExpr();
{
switch ( LA(1)) {
case LITERAL_to:
{
match(LITERAL_to);
additiveExpr();
break;
}
case EOF:
case EQ:
case LITERAL_default:
case LITERAL_collation:
case LITERAL_order:
case SEMICOLON:
case LITERAL_empty:
case COMMA:
case RPAREN:
case RCURLY:
case LITERAL_for:
case LITERAL_let:
case LITERAL_return:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LT:
case GT:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case NEQ:
case GTEQ:
case LTEQ:
case LITERAL_is:
case RPPAREN:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
}
public final void additiveExpr() throws RecognitionException, TokenStreamException {
multiplicativeExpr();
{
_loop214:
do {
if ((LA(1)==PLUS||LA(1)==MINUS)) {
{
switch ( LA(1)) {
case PLUS:
{
match(PLUS);
break;
}
case MINUS:
{
match(MINUS);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
multiplicativeExpr();
}
else {
break _loop214;
}
} while (true);
}
}
public final void multiplicativeExpr() throws RecognitionException, TokenStreamException {
unionExpr();
{
_loop218:
do {
if (((LA(1) >= STAR && LA(1) <= LITERAL_mod))) {
{
switch ( LA(1)) {
case STAR:
{
match(STAR);
break;
}
case LITERAL_div:
{
match(LITERAL_div);
break;
}
case LITERAL_idiv:
{
match(LITERAL_idiv);
break;
}
case LITERAL_mod:
{
match(LITERAL_mod);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
unionExpr();
}
else {
break _loop218;
}
} while (true);
}
}
public final void unionExpr() throws RecognitionException, TokenStreamException {
intersectExceptExpr();
{
_loop222:
do {
if ((LA(1)==LITERAL_union||LA(1)==UNION)) {
{
switch ( LA(1)) {
case LITERAL_union:
{
match(LITERAL_union);
break;
}
case UNION:
{
match(UNION);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
intersectExceptExpr();
}
else {
break _loop222;
}
} while (true);
}
}
public final void intersectExceptExpr() throws RecognitionException, TokenStreamException {
instanceofExpr();
{
_loop226:
do {
if ((LA(1)==LITERAL_intersect||LA(1)==LITERAL_except)) {
{
switch ( LA(1)) {
case LITERAL_intersect:
{
match(LITERAL_intersect);
break;
}
case LITERAL_except:
{
match(LITERAL_except);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
instanceofExpr();
}
else {
break _loop226;
}
} while (true);
}
}
public final void instanceofExpr() throws RecognitionException, TokenStreamException {
treatExpr();
{
switch ( LA(1)) {
case LITERAL_instance:
{
match(LITERAL_instance);
match(LITERAL_of);
sequenceType();
break;
}
case EOF:
case EQ:
case LITERAL_default:
case LITERAL_collation:
case LITERAL_order:
case SEMICOLON:
case LITERAL_empty:
case COMMA:
case RPAREN:
case RCURLY:
case LITERAL_for:
case LITERAL_let:
case LITERAL_return:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LT:
case GT:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case NEQ:
case GTEQ:
case LTEQ:
case LITERAL_is:
case LITERAL_to:
case PLUS:
case MINUS:
case STAR:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case UNION:
case LITERAL_intersect:
case LITERAL_except:
case RPPAREN:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
}
public final void treatExpr() throws RecognitionException, TokenStreamException {
castableExpr();
{
switch ( LA(1)) {
case LITERAL_treat:
{
match(LITERAL_treat);
match(LITERAL_as);
sequenceType();
break;
}
case EOF:
case EQ:
case LITERAL_default:
case LITERAL_collation:
case LITERAL_order:
case SEMICOLON:
case LITERAL_empty:
case COMMA:
case RPAREN:
case RCURLY:
case LITERAL_for:
case LITERAL_let:
case LITERAL_return:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LT:
case GT:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case NEQ:
case GTEQ:
case LTEQ:
case LITERAL_is:
case LITERAL_to:
case PLUS:
case MINUS:
case STAR:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case UNION:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case RPPAREN:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
}
public final void castableExpr() throws RecognitionException, TokenStreamException {
castExpr();
{
switch ( LA(1)) {
case LITERAL_castable:
{
match(LITERAL_castable);
match(LITERAL_as);
singleType();
break;
}
case EOF:
case EQ:
case LITERAL_default:
case LITERAL_collation:
case LITERAL_order:
case SEMICOLON:
case LITERAL_empty:
case COMMA:
case RPAREN:
case RCURLY:
case LITERAL_for:
case LITERAL_let:
case LITERAL_return:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LT:
case GT:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case NEQ:
case GTEQ:
case LTEQ:
case LITERAL_is:
case LITERAL_to:
case PLUS:
case MINUS:
case STAR:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case UNION:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_treat:
case RPPAREN:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
}
public final void castExpr() throws RecognitionException, TokenStreamException {
unaryExpr();
{
switch ( LA(1)) {
case LITERAL_cast:
{
match(LITERAL_cast);
match(LITERAL_as);
singleType();
break;
}
case EOF:
case EQ:
case LITERAL_default:
case LITERAL_collation:
case LITERAL_order:
case SEMICOLON:
case LITERAL_empty:
case COMMA:
case RPAREN:
case RCURLY:
case LITERAL_for:
case LITERAL_let:
case LITERAL_return:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LT:
case GT:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case NEQ:
case GTEQ:
case LTEQ:
case LITERAL_is:
case LITERAL_to:
case PLUS:
case MINUS:
case STAR:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case UNION:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_treat:
case LITERAL_castable:
case RPPAREN:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
}
public final void singleType() throws RecognitionException, TokenStreamException {
atomicType();
{
switch ( LA(1)) {
case QUESTION:
{
match(QUESTION);
break;
}
case EOF:
case EQ:
case LITERAL_default:
case LITERAL_collation:
case LITERAL_order:
case SEMICOLON:
case LITERAL_empty:
case COMMA:
case RPAREN:
case RCURLY:
case LITERAL_for:
case LITERAL_let:
case LITERAL_return:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LT:
case GT:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case NEQ:
case GTEQ:
case LTEQ:
case LITERAL_is:
case LITERAL_to:
case PLUS:
case MINUS:
case STAR:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case UNION:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_treat:
case LITERAL_castable:
case RPPAREN:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
}
public final void unaryExpr() throws RecognitionException, TokenStreamException {
{
_loop237:
do {
switch ( LA(1)) {
case MINUS:
{
match(MINUS);
break;
}
case PLUS:
{
match(PLUS);
break;
}
default:
{
break _loop237;
}
}
} while (true);
}
valueExpr();
}
public final void valueExpr() throws RecognitionException, TokenStreamException {
if ((LA(1)==LITERAL_validate)) {
validateExpr();
}
else if ((_tokenSet_5.member(LA(1)))) {
pathExpr();
}
else if ((LA(1)==PRAGMA)) {
extensionExpr();
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}
public final void validateExpr() throws RecognitionException, TokenStreamException {
match(LITERAL_validate);
{
switch ( LA(1)) {
case LITERAL_lax:
case LITERAL_strict:
{
validationMode();
break;
}
case LCURLY:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(LCURLY);
expr();
match(RCURLY);
}
public final void pathExpr() throws RecognitionException, TokenStreamException {
switch ( LA(1)) {
case LITERAL_xquery:
case LITERAL_version:
case LITERAL_module:
case LITERAL_namespace:
case STRING_LITERAL:
case LITERAL_encoding:
case LITERAL_declare:
case 12:
case LITERAL_default:
case LITERAL_collation:
case 15:
case LITERAL_construction:
case LITERAL_ordering:
case LITERAL_order:
case 19:
case LITERAL_import:
case LITERAL_schema:
case LITERAL_element:
case LITERAL_function:
case LITERAL_variable:
case LITERAL_option:
case LITERAL_preserve:
case LITERAL_strip:
case LITERAL_ordered:
case LITERAL_unordered:
case LITERAL_empty:
case LITERAL_greatest:
case LITERAL_least:
case 34:
case LITERAL_inherit:
case 37:
case LITERAL_at:
case DOLLAR:
case LITERAL_external:
case LPAREN:
case LITERAL_as:
case LITERAL_for:
case LITERAL_let:
case LITERAL_some:
case LITERAL_every:
case LITERAL_typeswitch:
case LITERAL_if:
case LITERAL_try:
case LITERAL_catch:
case LITERAL_return:
case LITERAL_in:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_by:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_then:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LT:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case LITERAL_is:
case LITERAL_to:
case STAR:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_of:
case LITERAL_treat:
case LITERAL_castable:
case LITERAL_cast:
case LITERAL_validate:
case LITERAL_text:
case LITERAL_node:
case LITERAL_attribute:
case LITERAL_comment:
case 104:
case 105:
case 106:
case 107:
case LITERAL_document:
case SELF:
case XML_COMMENT:
case XML_PI:
case AT:
case PARENT:
case LITERAL_child:
case LITERAL_self:
case LITERAL_descendant:
case 117:
case LITERAL_following:
case 119:
case LITERAL_parent:
case LITERAL_ancestor:
case 122:
case LITERAL_preceding:
case 124:
case DOUBLE_LITERAL:
case DECIMAL_LITERAL:
case INTEGER_LITERAL:
case 142:
case LITERAL_item:
case NCNAME:
case LITERAL_lax:
case LITERAL_strict:
{
relativePathExpr();
break;
}
case DSLASH:
{
match(DSLASH);
relativePathExpr();
break;
}
default:
boolean synPredMatched247 = false;
if (((LA(1)==SLASH))) {
int _m247 = mark();
synPredMatched247 = true;
inputState.guessing++;
try {
{
match(SLASH);
relativePathExpr();
}
}
catch (RecognitionException pe) {
synPredMatched247 = false;
}
rewind(_m247);
inputState.guessing--;
}
if ( synPredMatched247 ) {
match(SLASH);
relativePathExpr();
}
else if ((LA(1)==SLASH)) {
match(SLASH);
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}
}
public final void extensionExpr() throws RecognitionException, TokenStreamException {
{
int _cnt243=0;
_loop243:
do {
if ((LA(1)==PRAGMA)) {
match(PRAGMA);
}
else {
if ( _cnt243>=1 ) { break _loop243; } else {throw new NoViableAltException(LT(1), getFilename());}
}
_cnt243++;
} while (true);
}
match(LCURLY);
{
switch ( LA(1)) {
case LITERAL_xquery:
case LITERAL_version:
case LITERAL_module:
case LITERAL_namespace:
case STRING_LITERAL:
case LITERAL_encoding:
case LITERAL_declare:
case 12:
case LITERAL_default:
case LITERAL_collation:
case 15:
case LITERAL_construction:
case LITERAL_ordering:
case LITERAL_order:
case 19:
case LITERAL_import:
case LITERAL_schema:
case LITERAL_element:
case LITERAL_function:
case LITERAL_variable:
case LITERAL_option:
case LITERAL_preserve:
case LITERAL_strip:
case LITERAL_ordered:
case LITERAL_unordered:
case LITERAL_empty:
case LITERAL_greatest:
case LITERAL_least:
case 34:
case LITERAL_inherit:
case 37:
case LITERAL_at:
case DOLLAR:
case LITERAL_external:
case LPAREN:
case LITERAL_as:
case LITERAL_for:
case LITERAL_let:
case LITERAL_some:
case LITERAL_every:
case LITERAL_typeswitch:
case LITERAL_if:
case LITERAL_try:
case LITERAL_catch:
case LITERAL_return:
case LITERAL_in:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_by:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_then:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LT:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case LITERAL_is:
case LITERAL_to:
case PLUS:
case MINUS:
case STAR:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_of:
case LITERAL_treat:
case LITERAL_castable:
case LITERAL_cast:
case LITERAL_validate:
case PRAGMA:
case SLASH:
case DSLASH:
case LITERAL_text:
case LITERAL_node:
case LITERAL_attribute:
case LITERAL_comment:
case 104:
case 105:
case 106:
case 107:
case LITERAL_document:
case SELF:
case XML_COMMENT:
case XML_PI:
case AT:
case PARENT:
case LITERAL_child:
case LITERAL_self:
case LITERAL_descendant:
case 117:
case LITERAL_following:
case 119:
case LITERAL_parent:
case LITERAL_ancestor:
case 122:
case LITERAL_preceding:
case 124:
case DOUBLE_LITERAL:
case DECIMAL_LITERAL:
case INTEGER_LITERAL:
case 142:
case LITERAL_item:
case NCNAME:
case LITERAL_lax:
case LITERAL_strict:
{
expr();
break;
}
case RCURLY:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(RCURLY);
}
public final void validationMode() throws RecognitionException, TokenStreamException {
switch ( LA(1)) {
case LITERAL_lax:
{
match(LITERAL_lax);
break;
}
case LITERAL_strict:
{
match(LITERAL_strict);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
public final void relativePathExpr() throws RecognitionException, TokenStreamException {
stepExpr();
{
_loop251:
do {
if ((LA(1)==SLASH||LA(1)==DSLASH)) {
{
switch ( LA(1)) {
case SLASH:
{
match(SLASH);
break;
}
case DSLASH:
{
match(DSLASH);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
stepExpr();
}
else {
break _loop251;
}
} while (true);
}
}
public final void stepExpr() throws RecognitionException, TokenStreamException {
boolean synPredMatched255 = false;
if (((_tokenSet_6.member(LA(1))))) {
int _m255 = mark();
synPredMatched255 = true;
inputState.guessing++;
try {
{
{
switch ( LA(1)) {
case LITERAL_text:
{
match(LITERAL_text);
break;
}
case LITERAL_node:
{
match(LITERAL_node);
break;
}
case LITERAL_element:
{
match(LITERAL_element);
break;
}
case LITERAL_attribute:
{
match(LITERAL_attribute);
break;
}
case LITERAL_comment:
{
match(LITERAL_comment);
break;
}
case 104:
{
match(104);
break;
}
case 105:
{
match(105);
break;
}
case 106:
{
match(106);
break;
}
case 107:
{
match(107);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(LPAREN);
}
}
catch (RecognitionException pe) {
synPredMatched255 = false;
}
rewind(_m255);
inputState.guessing--;
}
if ( synPredMatched255 ) {
axisStep();
}
else {
boolean synPredMatched258 = false;
if (((_tokenSet_7.member(LA(1))))) {
int _m258 = mark();
synPredMatched258 = true;
inputState.guessing++;
try {
{
{
switch ( LA(1)) {
case LITERAL_element:
{
match(LITERAL_element);
break;
}
case LITERAL_attribute:
{
match(LITERAL_attribute);
break;
}
case LITERAL_text:
{
match(LITERAL_text);
break;
}
case LITERAL_document:
{
match(LITERAL_document);
break;
}
case 104:
{
match(104);
break;
}
case LITERAL_comment:
{
match(LITERAL_comment);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(LCURLY);
}
}
catch (RecognitionException pe) {
synPredMatched258 = false;
}
rewind(_m258);
inputState.guessing--;
}
if ( synPredMatched258 ) {
filterExpr();
}
else {
boolean synPredMatched261 = false;
if (((_tokenSet_7.member(LA(1))))) {
int _m261 = mark();
synPredMatched261 = true;
inputState.guessing++;
try {
{
{
switch ( LA(1)) {
case LITERAL_element:
{
match(LITERAL_element);
break;
}
case LITERAL_attribute:
{
match(LITERAL_attribute);
break;
}
case 104:
{
match(104);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
qName();
match(LCURLY);
}
}
catch (RecognitionException pe) {
synPredMatched261 = false;
}
rewind(_m261);
inputState.guessing--;
}
if ( synPredMatched261 ) {
filterExpr();
}
else {
boolean synPredMatched264 = false;
if (((_tokenSet_7.member(LA(1))))) {
int _m264 = mark();
synPredMatched264 = true;
inputState.guessing++;
try {
{
switch ( LA(1)) {
case DOLLAR:
{
match(DOLLAR);
break;
}
case LITERAL_xquery:
case LITERAL_version:
case LITERAL_module:
case LITERAL_namespace:
case LITERAL_encoding:
case LITERAL_declare:
case 12:
case LITERAL_default:
case LITERAL_collation:
case 15:
case LITERAL_construction:
case LITERAL_ordering:
case LITERAL_order:
case 19:
case LITERAL_import:
case LITERAL_schema:
case LITERAL_element:
case LITERAL_function:
case LITERAL_variable:
case LITERAL_option:
case LITERAL_preserve:
case LITERAL_strip:
case LITERAL_ordered:
case LITERAL_unordered:
case LITERAL_empty:
case LITERAL_greatest:
case LITERAL_least:
case 34:
case LITERAL_inherit:
case 37:
case LITERAL_at:
case LITERAL_external:
case LITERAL_as:
case LITERAL_for:
case LITERAL_let:
case LITERAL_some:
case LITERAL_every:
case LITERAL_typeswitch:
case LITERAL_if:
case LITERAL_try:
case LITERAL_catch:
case LITERAL_return:
case LITERAL_in:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_by:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_then:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case LITERAL_is:
case LITERAL_to:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_of:
case LITERAL_treat:
case LITERAL_castable:
case LITERAL_cast:
case LITERAL_validate:
case LITERAL_text:
case LITERAL_node:
case LITERAL_attribute:
case LITERAL_comment:
case 104:
case 105:
case 106:
case 107:
case LITERAL_document:
case LITERAL_child:
case LITERAL_self:
case LITERAL_descendant:
case 117:
case LITERAL_following:
case 119:
case LITERAL_parent:
case LITERAL_ancestor:
case 122:
case LITERAL_preceding:
case 124:
case 142:
case LITERAL_item:
case NCNAME:
case LITERAL_lax:
case LITERAL_strict:
{
{
qName();
match(LPAREN);
}
break;
}
case SELF:
{
match(SELF);
break;
}
case LPAREN:
{
match(LPAREN);
break;
}
case STRING_LITERAL:
case DOUBLE_LITERAL:
case DECIMAL_LITERAL:
case INTEGER_LITERAL:
{
literal();
break;
}
case XML_COMMENT:
{
match(XML_COMMENT);
break;
}
case LT:
{
match(LT);
break;
}
case XML_PI:
{
match(XML_PI);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
}
catch (RecognitionException pe) {
synPredMatched264 = false;
}
rewind(_m264);
inputState.guessing--;
}
if ( synPredMatched264 ) {
filterExpr();
}
else if ((_tokenSet_6.member(LA(1)))) {
axisStep();
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}}}
}
public final void axisStep() throws RecognitionException, TokenStreamException {
{
forwardOrReverseStep();
}
predicateList();
}
public final void filterExpr() throws RecognitionException, TokenStreamException {
primaryExpr();
predicateList();
}
public final void literal() throws RecognitionException, TokenStreamException {
switch ( LA(1)) {
case STRING_LITERAL:
{
match(STRING_LITERAL);
break;
}
case DOUBLE_LITERAL:
case DECIMAL_LITERAL:
case INTEGER_LITERAL:
{
numericLiteral();
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
public final void forwardOrReverseStep() throws RecognitionException, TokenStreamException {
boolean synPredMatched269 = false;
if (((_tokenSet_8.member(LA(1))))) {
int _m269 = mark();
synPredMatched269 = true;
inputState.guessing++;
try {
{
forwardAxisSpecifier();
match(COLON);
}
}
catch (RecognitionException pe) {
synPredMatched269 = false;
}
rewind(_m269);
inputState.guessing--;
}
if ( synPredMatched269 ) {
forwardAxis();
nodeTest();
}
else {
boolean synPredMatched271 = false;
if ((((LA(1) >= LITERAL_parent && LA(1) <= 124)))) {
int _m271 = mark();
synPredMatched271 = true;
inputState.guessing++;
try {
{
reverseAxisSpecifier();
match(COLON);
}
}
catch (RecognitionException pe) {
synPredMatched271 = false;
}
rewind(_m271);
inputState.guessing--;
}
if ( synPredMatched271 ) {
reverseAxis();
nodeTest();
}
else if ((_tokenSet_6.member(LA(1)))) {
abbrevStep();
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}
}
public final void predicateList() throws RecognitionException, TokenStreamException {
{
_loop307:
do {
if ((LA(1)==LPPAREN)) {
predicate();
}
else {
break _loop307;
}
} while (true);
}
}
public final void forwardAxisSpecifier() throws RecognitionException, TokenStreamException {
switch ( LA(1)) {
case LITERAL_child:
{
match(LITERAL_child);
break;
}
case LITERAL_self:
{
match(LITERAL_self);
break;
}
case LITERAL_attribute:
{
match(LITERAL_attribute);
break;
}
case LITERAL_descendant:
{
match(LITERAL_descendant);
break;
}
case 117:
{
match(117);
break;
}
case LITERAL_following:
{
match(LITERAL_following);
break;
}
case 119:
{
match(119);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
public final void forwardAxis() throws RecognitionException, TokenStreamException {
forwardAxisSpecifier();
match(COLON);
match(COLON);
}
public final void nodeTest() throws RecognitionException, TokenStreamException {
boolean synPredMatched280 = false;
if (((_tokenSet_9.member(LA(1))))) {
int _m280 = mark();
synPredMatched280 = true;
inputState.guessing++;
try {
{
match(105);
match(LPAREN);
}
}
catch (RecognitionException pe) {
synPredMatched280 = false;
}
rewind(_m280);
inputState.guessing--;
}
if ( synPredMatched280 ) {
kindTest();
}
else {
boolean synPredMatched282 = false;
if (((_tokenSet_9.member(LA(1))))) {
int _m282 = mark();
synPredMatched282 = true;
inputState.guessing++;
try {
{
match(LITERAL_element);
match(LPAREN);
}
}
catch (RecognitionException pe) {
synPredMatched282 = false;
}
rewind(_m282);
inputState.guessing--;
}
if ( synPredMatched282 ) {
kindTest();
}
else {
boolean synPredMatched284 = false;
if (((_tokenSet_9.member(LA(1))))) {
int _m284 = mark();
synPredMatched284 = true;
inputState.guessing++;
try {
{
match(LITERAL_attribute);
match(LPAREN);
}
}
catch (RecognitionException pe) {
synPredMatched284 = false;
}
rewind(_m284);
inputState.guessing--;
}
if ( synPredMatched284 ) {
kindTest();
}
else {
boolean synPredMatched286 = false;
if (((_tokenSet_9.member(LA(1))))) {
int _m286 = mark();
synPredMatched286 = true;
inputState.guessing++;
try {
{
match(104);
match(LPAREN);
}
}
catch (RecognitionException pe) {
synPredMatched286 = false;
}
rewind(_m286);
inputState.guessing--;
}
if ( synPredMatched286 ) {
kindTest();
}
else {
boolean synPredMatched288 = false;
if (((_tokenSet_9.member(LA(1))))) {
int _m288 = mark();
synPredMatched288 = true;
inputState.guessing++;
try {
{
match(LITERAL_comment);
match(LPAREN);
}
}
catch (RecognitionException pe) {
synPredMatched288 = false;
}
rewind(_m288);
inputState.guessing--;
}
if ( synPredMatched288 ) {
kindTest();
}
else {
boolean synPredMatched290 = false;
if (((_tokenSet_9.member(LA(1))))) {
int _m290 = mark();
synPredMatched290 = true;
inputState.guessing++;
try {
{
match(LITERAL_text);
match(LPAREN);
}
}
catch (RecognitionException pe) {
synPredMatched290 = false;
}
rewind(_m290);
inputState.guessing--;
}
if ( synPredMatched290 ) {
kindTest();
}
else {
boolean synPredMatched292 = false;
if (((_tokenSet_9.member(LA(1))))) {
int _m292 = mark();
synPredMatched292 = true;
inputState.guessing++;
try {
{
match(LITERAL_node);
match(LPAREN);
}
}
catch (RecognitionException pe) {
synPredMatched292 = false;
}
rewind(_m292);
inputState.guessing--;
}
if ( synPredMatched292 ) {
kindTest();
}
else {
boolean synPredMatched294 = false;
if (((_tokenSet_9.member(LA(1))))) {
int _m294 = mark();
synPredMatched294 = true;
inputState.guessing++;
try {
{
match(106);
match(LPAREN);
}
}
catch (RecognitionException pe) {
synPredMatched294 = false;
}
rewind(_m294);
inputState.guessing--;
}
if ( synPredMatched294 ) {
kindTest();
}
else {
boolean synPredMatched296 = false;
if (((_tokenSet_9.member(LA(1))))) {
int _m296 = mark();
synPredMatched296 = true;
inputState.guessing++;
try {
{
match(107);
match(LPAREN);
}
}
catch (RecognitionException pe) {
synPredMatched296 = false;
}
rewind(_m296);
inputState.guessing--;
}
if ( synPredMatched296 ) {
kindTest();
}
else if ((_tokenSet_10.member(LA(1)))) {
nameTest();
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}}}}}}}}
}
public final void reverseAxisSpecifier() throws RecognitionException, TokenStreamException {
switch ( LA(1)) {
case LITERAL_parent:
{
match(LITERAL_parent);
break;
}
case LITERAL_ancestor:
{
match(LITERAL_ancestor);
break;
}
case 122:
{
match(122);
break;
}
case LITERAL_preceding:
{
match(LITERAL_preceding);
break;
}
case 124:
{
match(124);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
public final void reverseAxis() throws RecognitionException, TokenStreamException {
reverseAxisSpecifier();
match(COLON);
match(COLON);
}
public final void abbrevStep() throws RecognitionException, TokenStreamException {
switch ( LA(1)) {
case LITERAL_xquery:
case LITERAL_version:
case LITERAL_module:
case LITERAL_namespace:
case LITERAL_encoding:
case LITERAL_declare:
case 12:
case LITERAL_default:
case LITERAL_collation:
case 15:
case LITERAL_construction:
case LITERAL_ordering:
case LITERAL_order:
case 19:
case LITERAL_import:
case LITERAL_schema:
case LITERAL_element:
case LITERAL_function:
case LITERAL_variable:
case LITERAL_option:
case LITERAL_preserve:
case LITERAL_strip:
case LITERAL_ordered:
case LITERAL_unordered:
case LITERAL_empty:
case LITERAL_greatest:
case LITERAL_least:
case 34:
case LITERAL_inherit:
case 37:
case LITERAL_at:
case LITERAL_external:
case LITERAL_as:
case LITERAL_for:
case LITERAL_let:
case LITERAL_some:
case LITERAL_every:
case LITERAL_typeswitch:
case LITERAL_if:
case LITERAL_try:
case LITERAL_catch:
case LITERAL_return:
case LITERAL_in:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_by:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_then:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case LITERAL_is:
case LITERAL_to:
case STAR:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_of:
case LITERAL_treat:
case LITERAL_castable:
case LITERAL_cast:
case LITERAL_validate:
case LITERAL_text:
case LITERAL_node:
case LITERAL_attribute:
case LITERAL_comment:
case 104:
case 105:
case 106:
case 107:
case LITERAL_document:
case AT:
case LITERAL_child:
case LITERAL_self:
case LITERAL_descendant:
case 117:
case LITERAL_following:
case 119:
case LITERAL_parent:
case LITERAL_ancestor:
case 122:
case LITERAL_preceding:
case 124:
case 142:
case LITERAL_item:
case NCNAME:
case LITERAL_lax:
case LITERAL_strict:
{
{
switch ( LA(1)) {
case AT:
{
match(AT);
break;
}
case LITERAL_xquery:
case LITERAL_version:
case LITERAL_module:
case LITERAL_namespace:
case LITERAL_encoding:
case LITERAL_declare:
case 12:
case LITERAL_default:
case LITERAL_collation:
case 15:
case LITERAL_construction:
case LITERAL_ordering:
case LITERAL_order:
case 19:
case LITERAL_import:
case LITERAL_schema:
case LITERAL_element:
case LITERAL_function:
case LITERAL_variable:
case LITERAL_option:
case LITERAL_preserve:
case LITERAL_strip:
case LITERAL_ordered:
case LITERAL_unordered:
case LITERAL_empty:
case LITERAL_greatest:
case LITERAL_least:
case 34:
case LITERAL_inherit:
case 37:
case LITERAL_at:
case LITERAL_external:
case LITERAL_as:
case LITERAL_for:
case LITERAL_let:
case LITERAL_some:
case LITERAL_every:
case LITERAL_typeswitch:
case LITERAL_if:
case LITERAL_try:
case LITERAL_catch:
case LITERAL_return:
case LITERAL_in:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_by:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_then:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case LITERAL_is:
case LITERAL_to:
case STAR:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_of:
case LITERAL_treat:
case LITERAL_castable:
case LITERAL_cast:
case LITERAL_validate:
case LITERAL_text:
case LITERAL_node:
case LITERAL_attribute:
case LITERAL_comment:
case 104:
case 105:
case 106:
case 107:
case LITERAL_document:
case LITERAL_child:
case LITERAL_self:
case LITERAL_descendant:
case 117:
case LITERAL_following:
case 119:
case LITERAL_parent:
case LITERAL_ancestor:
case 122:
case LITERAL_preceding:
case 124:
case 142:
case LITERAL_item:
case NCNAME:
case LITERAL_lax:
case LITERAL_strict:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
nodeTest();
break;
}
case PARENT:
{
match(PARENT);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
public final void kindTest() throws RecognitionException, TokenStreamException {
switch ( LA(1)) {
case 105:
{
documentTest();
break;
}
case LITERAL_element:
{
elementTest();
break;
}
case LITERAL_attribute:
{
attributeTest();
break;
}
case 107:
{
schemaElementTest();
break;
}
case 106:
{
schemaAttributeTest();
break;
}
case 104:
{
piTest();
break;
}
case LITERAL_comment:
{
commentTest();
break;
}
case LITERAL_text:
{
textTest();
break;
}
case LITERAL_node:
{
anyKindTest();
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
public final void nameTest() throws RecognitionException, TokenStreamException {
String name=null;
String prefix= null;
boolean synPredMatched300 = false;
if (((_tokenSet_10.member(LA(1))))) {
int _m300 = mark();
synPredMatched300 = true;
inputState.guessing++;
try {
{
switch ( LA(1)) {
case LITERAL_xquery:
case LITERAL_version:
case LITERAL_module:
case LITERAL_namespace:
case LITERAL_encoding:
case LITERAL_declare:
case 12:
case LITERAL_default:
case LITERAL_collation:
case 15:
case LITERAL_construction:
case LITERAL_ordering:
case LITERAL_order:
case 19:
case LITERAL_import:
case LITERAL_schema:
case LITERAL_element:
case LITERAL_function:
case LITERAL_variable:
case LITERAL_option:
case LITERAL_preserve:
case LITERAL_strip:
case LITERAL_ordered:
case LITERAL_unordered:
case LITERAL_empty:
case LITERAL_greatest:
case LITERAL_least:
case 34:
case LITERAL_inherit:
case 37:
case LITERAL_at:
case LITERAL_external:
case LITERAL_as:
case LITERAL_for:
case LITERAL_let:
case LITERAL_some:
case LITERAL_every:
case LITERAL_typeswitch:
case LITERAL_if:
case LITERAL_try:
case LITERAL_catch:
case LITERAL_return:
case LITERAL_in:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_by:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_then:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case LITERAL_is:
case LITERAL_to:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_of:
case LITERAL_treat:
case LITERAL_castable:
case LITERAL_cast:
case LITERAL_validate:
case LITERAL_text:
case LITERAL_node:
case LITERAL_attribute:
case LITERAL_comment:
case 104:
case 105:
case 106:
case 107:
case LITERAL_document:
case LITERAL_child:
case LITERAL_self:
case LITERAL_descendant:
case 117:
case LITERAL_following:
case 119:
case LITERAL_parent:
case LITERAL_ancestor:
case 122:
case LITERAL_preceding:
case 124:
case 142:
case LITERAL_item:
case NCNAME:
case LITERAL_lax:
case LITERAL_strict:
{
{
prefix=ncnameOrKeyword();
match(COLON);
match(STAR);
}
break;
}
case STAR:
{
match(STAR);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
}
catch (RecognitionException pe) {
synPredMatched300 = false;
}
rewind(_m300);
inputState.guessing--;
}
if ( synPredMatched300 ) {
wildcard();
}
else if ((_tokenSet_1.member(LA(1)))) {
name=qName();
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}
public final void wildcard() throws RecognitionException, TokenStreamException {
String name=null;
boolean synPredMatched303 = false;
if (((LA(1)==STAR))) {
int _m303 = mark();
synPredMatched303 = true;
inputState.guessing++;
try {
{
match(STAR);
match(COLON);
}
}
catch (RecognitionException pe) {
synPredMatched303 = false;
}
rewind(_m303);
inputState.guessing--;
}
if ( synPredMatched303 ) {
match(STAR);
match(COLON);
name=ncnameOrKeyword();
}
else if ((_tokenSet_1.member(LA(1)))) {
name=ncnameOrKeyword();
match(COLON);
match(STAR);
}
else if ((LA(1)==STAR)) {
match(STAR);
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}
public final void primaryExpr() throws RecognitionException, TokenStreamException {
String name=null;
switch ( LA(1)) {
case SELF:
{
contextItemExpr();
break;
}
case LPAREN:
{
parenthesizedExpr();
break;
}
case DOLLAR:
{
match(DOLLAR);
name=qName();
if ( inputState.guessing==0 ) {
context.setReferencedVariable(name);
}
break;
}
case STRING_LITERAL:
case DOUBLE_LITERAL:
case DECIMAL_LITERAL:
case INTEGER_LITERAL:
{
literal();
break;
}
default:
if ((LA(1)==LITERAL_ordered)) {
orderedExpr();
}
else if ((LA(1)==LITERAL_unordered)) {
unorderedExpr();
}
else {
boolean synPredMatched312 = false;
if (((_tokenSet_11.member(LA(1))))) {
int _m312 = mark();
synPredMatched312 = true;
inputState.guessing++;
try {
{
{
switch ( LA(1)) {
case LITERAL_element:
{
match(LITERAL_element);
break;
}
case LITERAL_attribute:
{
match(LITERAL_attribute);
break;
}
case LITERAL_text:
{
match(LITERAL_text);
break;
}
case LITERAL_document:
{
match(LITERAL_document);
break;
}
case 104:
{
match(104);
break;
}
case LITERAL_comment:
{
match(LITERAL_comment);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(LCURLY);
}
}
catch (RecognitionException pe) {
synPredMatched312 = false;
}
rewind(_m312);
inputState.guessing--;
}
if ( synPredMatched312 ) {
computedConstructor();
}
else {
boolean synPredMatched315 = false;
if (((_tokenSet_11.member(LA(1))))) {
int _m315 = mark();
synPredMatched315 = true;
inputState.guessing++;
try {
{
{
switch ( LA(1)) {
case LITERAL_element:
{
match(LITERAL_element);
break;
}
case LITERAL_attribute:
{
match(LITERAL_attribute);
break;
}
case 104:
{
match(104);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
qName();
match(LCURLY);
}
}
catch (RecognitionException pe) {
synPredMatched315 = false;
}
rewind(_m315);
inputState.guessing--;
}
if ( synPredMatched315 ) {
computedConstructor();
}
else if ((_tokenSet_12.member(LA(1)))) {
constructor();
}
else if ((_tokenSet_1.member(LA(1)))) {
functionCall();
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}}}
}
public final void predicate() throws RecognitionException, TokenStreamException {
match(LPPAREN);
expr();
match(RPPAREN);
}
public final void orderedExpr() throws RecognitionException, TokenStreamException {
match(LITERAL_ordered);
match(LCURLY);
expr();
match(RCURLY);
}
public final void unorderedExpr() throws RecognitionException, TokenStreamException {
match(LITERAL_unordered);
match(LCURLY);
expr();
match(RCURLY);
}
public final void computedConstructor() throws RecognitionException, TokenStreamException {
switch ( LA(1)) {
case LITERAL_document:
{
compDocConstructor();
break;
}
case LITERAL_element:
{
compElemConstructor();
break;
}
case LITERAL_attribute:
{
compAttrConstructor();
break;
}
case LITERAL_text:
{
compTextConstructor();
break;
}
case LITERAL_comment:
{
compCommentConstructor();
break;
}
case 104:
{
compPIConstructor();
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
public final void constructor() throws RecognitionException, TokenStreamException {
switch ( LA(1)) {
case LT:
case XML_COMMENT:
case XML_PI:
{
directConstructor();
break;
}
case LITERAL_element:
case LITERAL_text:
case LITERAL_attribute:
case LITERAL_comment:
case 104:
case LITERAL_document:
{
computedConstructor();
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
public final void functionCall() throws RecognitionException, TokenStreamException {
String fnName= null;
fnName=qName();
match(LPAREN);
{
switch ( LA(1)) {
case LITERAL_xquery:
case LITERAL_version:
case LITERAL_module:
case LITERAL_namespace:
case STRING_LITERAL:
case LITERAL_encoding:
case LITERAL_declare:
case 12:
case LITERAL_default:
case LITERAL_collation:
case 15:
case LITERAL_construction:
case LITERAL_ordering:
case LITERAL_order:
case 19:
case LITERAL_import:
case LITERAL_schema:
case LITERAL_element:
case LITERAL_function:
case LITERAL_variable:
case LITERAL_option:
case LITERAL_preserve:
case LITERAL_strip:
case LITERAL_ordered:
case LITERAL_unordered:
case LITERAL_empty:
case LITERAL_greatest:
case LITERAL_least:
case 34:
case LITERAL_inherit:
case 37:
case LITERAL_at:
case DOLLAR:
case LITERAL_external:
case LPAREN:
case LITERAL_as:
case LITERAL_for:
case LITERAL_let:
case LITERAL_some:
case LITERAL_every:
case LITERAL_typeswitch:
case LITERAL_if:
case LITERAL_try:
case LITERAL_catch:
case LITERAL_return:
case LITERAL_in:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_by:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_then:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LT:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case LITERAL_is:
case LITERAL_to:
case PLUS:
case MINUS:
case STAR:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_of:
case LITERAL_treat:
case LITERAL_castable:
case LITERAL_cast:
case LITERAL_validate:
case PRAGMA:
case SLASH:
case DSLASH:
case LITERAL_text:
case LITERAL_node:
case LITERAL_attribute:
case LITERAL_comment:
case 104:
case 105:
case 106:
case 107:
case LITERAL_document:
case SELF:
case XML_COMMENT:
case XML_PI:
case AT:
case PARENT:
case LITERAL_child:
case LITERAL_self:
case LITERAL_descendant:
case 117:
case LITERAL_following:
case 119:
case LITERAL_parent:
case LITERAL_ancestor:
case 122:
case LITERAL_preceding:
case 124:
case DOUBLE_LITERAL:
case DECIMAL_LITERAL:
case INTEGER_LITERAL:
case 142:
case LITERAL_item:
case NCNAME:
case LITERAL_lax:
case LITERAL_strict:
{
functionParameters();
break;
}
case RPAREN:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(RPAREN);
if ( inputState.guessing==0 ) {
context.setInvokedFunction(fnName);
}
}
public final void contextItemExpr() throws RecognitionException, TokenStreamException {
match(SELF);
}
public final void parenthesizedExpr() throws RecognitionException, TokenStreamException {
match(LPAREN);
{
switch ( LA(1)) {
case LITERAL_xquery:
case LITERAL_version:
case LITERAL_module:
case LITERAL_namespace:
case STRING_LITERAL:
case LITERAL_encoding:
case LITERAL_declare:
case 12:
case LITERAL_default:
case LITERAL_collation:
case 15:
case LITERAL_construction:
case LITERAL_ordering:
case LITERAL_order:
case 19:
case LITERAL_import:
case LITERAL_schema:
case LITERAL_element:
case LITERAL_function:
case LITERAL_variable:
case LITERAL_option:
case LITERAL_preserve:
case LITERAL_strip:
case LITERAL_ordered:
case LITERAL_unordered:
case LITERAL_empty:
case LITERAL_greatest:
case LITERAL_least:
case 34:
case LITERAL_inherit:
case 37:
case LITERAL_at:
case DOLLAR:
case LITERAL_external:
case LPAREN:
case LITERAL_as:
case LITERAL_for:
case LITERAL_let:
case LITERAL_some:
case LITERAL_every:
case LITERAL_typeswitch:
case LITERAL_if:
case LITERAL_try:
case LITERAL_catch:
case LITERAL_return:
case LITERAL_in:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_by:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_then:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LT:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case LITERAL_is:
case LITERAL_to:
case PLUS:
case MINUS:
case STAR:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_of:
case LITERAL_treat:
case LITERAL_castable:
case LITERAL_cast:
case LITERAL_validate:
case PRAGMA:
case SLASH:
case DSLASH:
case LITERAL_text:
case LITERAL_node:
case LITERAL_attribute:
case LITERAL_comment:
case 104:
case 105:
case 106:
case 107:
case LITERAL_document:
case SELF:
case XML_COMMENT:
case XML_PI:
case AT:
case PARENT:
case LITERAL_child:
case LITERAL_self:
case LITERAL_descendant:
case 117:
case LITERAL_following:
case 119:
case LITERAL_parent:
case LITERAL_ancestor:
case 122:
case LITERAL_preceding:
case 124:
case DOUBLE_LITERAL:
case DECIMAL_LITERAL:
case INTEGER_LITERAL:
case 142:
case LITERAL_item:
case NCNAME:
case LITERAL_lax:
case LITERAL_strict:
{
expr();
break;
}
case RPAREN:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(RPAREN);
}
public final void numericLiteral() throws RecognitionException, TokenStreamException {
switch ( LA(1)) {
case DOUBLE_LITERAL:
{
match(DOUBLE_LITERAL);
break;
}
case DECIMAL_LITERAL:
{
match(DECIMAL_LITERAL);
break;
}
case INTEGER_LITERAL:
{
match(INTEGER_LITERAL);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
public final void functionParameters() throws RecognitionException, TokenStreamException {
exprSingle();
{
_loop327:
do {
if ((LA(1)==COMMA)) {
match(COMMA);
exprSingle();
}
else {
break _loop327;
}
} while (true);
}
}
public final void directConstructor() throws RecognitionException, TokenStreamException {
switch ( LA(1)) {
case LT:
{
dirElemConstructor();
break;
}
case XML_COMMENT:
{
dirCommentConstructor();
break;
}
case XML_PI:
{
dirPIConstructor();
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
public final void dirElemConstructor() throws RecognitionException, TokenStreamException {
boolean synPredMatched333 = false;
if (((LA(1)==LT))) {
int _m333 = mark();
synPredMatched333 = true;
inputState.guessing++;
try {
{
match(LT);
qName();
{
match(_tokenSet_13);
}
}
}
catch (RecognitionException pe) {
synPredMatched333 = false;
}
rewind(_m333);
inputState.guessing--;
}
if ( synPredMatched333 ) {
elementWithAttributes();
}
else if ((LA(1)==LT)) {
elementWithoutAttributes();
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}
public final void dirCommentConstructor() throws RecognitionException, TokenStreamException {
match(XML_COMMENT);
match(XML_COMMENT_END);
}
public final void dirPIConstructor() throws RecognitionException, TokenStreamException {
match(XML_PI);
match(XML_PI_END);
}
public final void elementWithAttributes() throws RecognitionException, TokenStreamException {
String name= null;
match(LT);
name=qName();
attributeList();
{
switch ( LA(1)) {
case SLASH:
{
{
match(SLASH);
match(GT);
if ( inputState.guessing==0 ) {
if (!elementStack.isEmpty())
lexer.inElementContent= true;
}
}
break;
}
case GT:
{
{
match(GT);
if ( inputState.guessing==0 ) {
elementStack.push(name);
lexer.inElementContent= true;
}
mixedElementContent();
match(END_TAG_START);
name=qName();
match(GT);
if ( inputState.guessing==0 ) {
if (elementStack.isEmpty())
{}
String prev= (String) elementStack.pop();
if (!prev.equals(name))
{}
if (!elementStack.isEmpty()) {
lexer.inElementContent= true;
}
}
}
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
}
public final void elementWithoutAttributes() throws RecognitionException, TokenStreamException {
String name= null;
match(LT);
name=qName();
{
switch ( LA(1)) {
case SLASH:
{
{
match(SLASH);
match(GT);
if ( inputState.guessing==0 ) {
if (!elementStack.isEmpty())
lexer.inElementContent= true;
}
}
break;
}
case GT:
{
{
match(GT);
if ( inputState.guessing==0 ) {
elementStack.push(name);
lexer.inElementContent= true;
}
mixedElementContent();
match(END_TAG_START);
name=qName();
match(GT);
if ( inputState.guessing==0 ) {
if (elementStack.isEmpty())
{}
String prev= (String) elementStack.pop();
if (!prev.equals(name))
{}
if (!elementStack.isEmpty()) {
lexer.inElementContent= true;
}
}
}
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
}
public final void mixedElementContent() throws RecognitionException, TokenStreamException {
{
_loop344:
do {
if ((_tokenSet_14.member(LA(1)))) {
dirElemContent();
}
else {
break _loop344;
}
} while (true);
}
}
public final void attributeList() throws RecognitionException, TokenStreamException {
{
int _cnt348=0;
_loop348:
do {
if ((_tokenSet_1.member(LA(1)))) {
attributeDef();
}
else {
if ( _cnt348>=1 ) { break _loop348; } else {throw new NoViableAltException(LT(1), getFilename());}
}
_cnt348++;
} while (true);
}
}
public final void dirElemContent() throws RecognitionException, TokenStreamException {
switch ( LA(1)) {
case LT:
case XML_COMMENT:
case XML_PI:
{
directConstructor();
break;
}
case ELEMENT_CONTENT:
{
match(ELEMENT_CONTENT);
break;
}
case LCURLY:
{
enclosedExpr();
break;
}
case XML_CDATA:
{
cdataSection();
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
public final void cdataSection() throws RecognitionException, TokenStreamException {
match(XML_CDATA);
match(XML_CDATA_END);
}
public final void attributeDef() throws RecognitionException, TokenStreamException {
String name=null;
lexer.parseStringLiterals= false;
name=qName();
match(EQ);
attributeValue();
}
public final void attributeValue() throws RecognitionException, TokenStreamException {
switch ( LA(1)) {
case QUOT:
{
match(QUOT);
if ( inputState.guessing==0 ) {
lexer.inAttributeContent= true;
lexer.attrDelimChar = '"';
}
{
_loop352:
do {
if ((LA(1)==LCURLY||LA(1)==RCURLY||LA(1)==QUOT_ATTRIBUTE_CONTENT)) {
quotAttrValueContent();
}
else {
break _loop352;
}
} while (true);
}
match(QUOT);
if ( inputState.guessing==0 ) {
lexer.parseStringLiterals= true;
lexer.inAttributeContent= false;
}
break;
}
case APOS:
{
match(APOS);
if ( inputState.guessing==0 ) {
lexer.inAttributeContent= true;
lexer.attrDelimChar = '\'';
}
{
_loop354:
do {
if ((LA(1)==LCURLY||LA(1)==RCURLY||LA(1)==APOS_ATTRIBUTE_CONTENT)) {
aposAttrValueContent();
}
else {
break _loop354;
}
} while (true);
}
match(APOS);
if ( inputState.guessing==0 ) {
lexer.parseStringLiterals= true;
lexer.inAttributeContent= false;
}
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
public final void quotAttrValueContent() throws RecognitionException, TokenStreamException {
switch ( LA(1)) {
case QUOT_ATTRIBUTE_CONTENT:
{
match(QUOT_ATTRIBUTE_CONTENT);
break;
}
case LCURLY:
case RCURLY:
{
attrCommonContent();
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
public final void aposAttrValueContent() throws RecognitionException, TokenStreamException {
switch ( LA(1)) {
case APOS_ATTRIBUTE_CONTENT:
{
match(APOS_ATTRIBUTE_CONTENT);
break;
}
case LCURLY:
case RCURLY:
{
attrCommonContent();
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
public final void attrCommonContent() throws RecognitionException, TokenStreamException {
boolean synPredMatched359 = false;
if (((LA(1)==LCURLY))) {
int _m359 = mark();
synPredMatched359 = true;
inputState.guessing++;
try {
{
match(LCURLY);
match(LCURLY);
}
}
catch (RecognitionException pe) {
synPredMatched359 = false;
}
rewind(_m359);
inputState.guessing--;
}
if ( synPredMatched359 ) {
match(LCURLY);
match(LCURLY);
if ( inputState.guessing==0 ) {
lexer.inAttributeContent= true;
lexer.parseStringLiterals = false;
}
}
else if ((LA(1)==RCURLY)) {
match(RCURLY);
match(RCURLY);
}
else if ((LA(1)==LCURLY)) {
attributeEnclosedExpr();
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}
public final void attributeEnclosedExpr() throws RecognitionException, TokenStreamException {
match(LCURLY);
if ( inputState.guessing==0 ) {
lexer.inAttributeContent= false;
lexer.parseStringLiterals = true;
}
expr();
match(RCURLY);
if ( inputState.guessing==0 ) {
lexer.inAttributeContent= true;
lexer.parseStringLiterals = false;
}
}
public final void compDocConstructor() throws RecognitionException, TokenStreamException {
match(LITERAL_document);
match(LCURLY);
expr();
match(RCURLY);
}
public final void compElemConstructor() throws RecognitionException, TokenStreamException {
String name=null;
boolean synPredMatched369 = false;
if (((LA(1)==LITERAL_element))) {
int _m369 = mark();
synPredMatched369 = true;
inputState.guessing++;
try {
{
match(LITERAL_element);
match(LCURLY);
}
}
catch (RecognitionException pe) {
synPredMatched369 = false;
}
rewind(_m369);
inputState.guessing--;
}
if ( synPredMatched369 ) {
match(LITERAL_element);
match(LCURLY);
expr();
match(RCURLY);
match(LCURLY);
{
switch ( LA(1)) {
case LITERAL_xquery:
case LITERAL_version:
case LITERAL_module:
case LITERAL_namespace:
case STRING_LITERAL:
case LITERAL_encoding:
case LITERAL_declare:
case 12:
case LITERAL_default:
case LITERAL_collation:
case 15:
case LITERAL_construction:
case LITERAL_ordering:
case LITERAL_order:
case 19:
case LITERAL_import:
case LITERAL_schema:
case LITERAL_element:
case LITERAL_function:
case LITERAL_variable:
case LITERAL_option:
case LITERAL_preserve:
case LITERAL_strip:
case LITERAL_ordered:
case LITERAL_unordered:
case LITERAL_empty:
case LITERAL_greatest:
case LITERAL_least:
case 34:
case LITERAL_inherit:
case 37:
case LITERAL_at:
case DOLLAR:
case LITERAL_external:
case LPAREN:
case LITERAL_as:
case LITERAL_for:
case LITERAL_let:
case LITERAL_some:
case LITERAL_every:
case LITERAL_typeswitch:
case LITERAL_if:
case LITERAL_try:
case LITERAL_catch:
case LITERAL_return:
case LITERAL_in:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_by:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_then:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LT:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case LITERAL_is:
case LITERAL_to:
case PLUS:
case MINUS:
case STAR:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_of:
case LITERAL_treat:
case LITERAL_castable:
case LITERAL_cast:
case LITERAL_validate:
case PRAGMA:
case SLASH:
case DSLASH:
case LITERAL_text:
case LITERAL_node:
case LITERAL_attribute:
case LITERAL_comment:
case 104:
case 105:
case 106:
case 107:
case LITERAL_document:
case SELF:
case XML_COMMENT:
case XML_PI:
case AT:
case PARENT:
case LITERAL_child:
case LITERAL_self:
case LITERAL_descendant:
case 117:
case LITERAL_following:
case 119:
case LITERAL_parent:
case LITERAL_ancestor:
case 122:
case LITERAL_preceding:
case 124:
case DOUBLE_LITERAL:
case DECIMAL_LITERAL:
case INTEGER_LITERAL:
case 142:
case LITERAL_item:
case NCNAME:
case LITERAL_lax:
case LITERAL_strict:
{
contentExpr();
break;
}
case RCURLY:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(RCURLY);
}
else if ((LA(1)==LITERAL_element)) {
match(LITERAL_element);
name=qName();
match(LCURLY);
{
switch ( LA(1)) {
case LITERAL_xquery:
case LITERAL_version:
case LITERAL_module:
case LITERAL_namespace:
case STRING_LITERAL:
case LITERAL_encoding:
case LITERAL_declare:
case 12:
case LITERAL_default:
case LITERAL_collation:
case 15:
case LITERAL_construction:
case LITERAL_ordering:
case LITERAL_order:
case 19:
case LITERAL_import:
case LITERAL_schema:
case LITERAL_element:
case LITERAL_function:
case LITERAL_variable:
case LITERAL_option:
case LITERAL_preserve:
case LITERAL_strip:
case LITERAL_ordered:
case LITERAL_unordered:
case LITERAL_empty:
case LITERAL_greatest:
case LITERAL_least:
case 34:
case LITERAL_inherit:
case 37:
case LITERAL_at:
case DOLLAR:
case LITERAL_external:
case LPAREN:
case LITERAL_as:
case LITERAL_for:
case LITERAL_let:
case LITERAL_some:
case LITERAL_every:
case LITERAL_typeswitch:
case LITERAL_if:
case LITERAL_try:
case LITERAL_catch:
case LITERAL_return:
case LITERAL_in:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_by:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_then:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LT:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case LITERAL_is:
case LITERAL_to:
case PLUS:
case MINUS:
case STAR:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_of:
case LITERAL_treat:
case LITERAL_castable:
case LITERAL_cast:
case LITERAL_validate:
case PRAGMA:
case SLASH:
case DSLASH:
case LITERAL_text:
case LITERAL_node:
case LITERAL_attribute:
case LITERAL_comment:
case 104:
case 105:
case 106:
case 107:
case LITERAL_document:
case SELF:
case XML_COMMENT:
case XML_PI:
case AT:
case PARENT:
case LITERAL_child:
case LITERAL_self:
case LITERAL_descendant:
case 117:
case LITERAL_following:
case 119:
case LITERAL_parent:
case LITERAL_ancestor:
case 122:
case LITERAL_preceding:
case 124:
case DOUBLE_LITERAL:
case DECIMAL_LITERAL:
case INTEGER_LITERAL:
case 142:
case LITERAL_item:
case NCNAME:
case LITERAL_lax:
case LITERAL_strict:
{
contentExpr();
break;
}
case RCURLY:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(RCURLY);
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}
public final void compAttrConstructor() throws RecognitionException, TokenStreamException {
String name=null;
boolean synPredMatched375 = false;
if (((LA(1)==LITERAL_attribute))) {
int _m375 = mark();
synPredMatched375 = true;
inputState.guessing++;
try {
{
match(LITERAL_attribute);
match(LCURLY);
}
}
catch (RecognitionException pe) {
synPredMatched375 = false;
}
rewind(_m375);
inputState.guessing--;
}
if ( synPredMatched375 ) {
match(LITERAL_attribute);
match(LCURLY);
expr();
match(RCURLY);
match(LCURLY);
{
switch ( LA(1)) {
case LITERAL_xquery:
case LITERAL_version:
case LITERAL_module:
case LITERAL_namespace:
case STRING_LITERAL:
case LITERAL_encoding:
case LITERAL_declare:
case 12:
case LITERAL_default:
case LITERAL_collation:
case 15:
case LITERAL_construction:
case LITERAL_ordering:
case LITERAL_order:
case 19:
case LITERAL_import:
case LITERAL_schema:
case LITERAL_element:
case LITERAL_function:
case LITERAL_variable:
case LITERAL_option:
case LITERAL_preserve:
case LITERAL_strip:
case LITERAL_ordered:
case LITERAL_unordered:
case LITERAL_empty:
case LITERAL_greatest:
case LITERAL_least:
case 34:
case LITERAL_inherit:
case 37:
case LITERAL_at:
case DOLLAR:
case LITERAL_external:
case LPAREN:
case LITERAL_as:
case LITERAL_for:
case LITERAL_let:
case LITERAL_some:
case LITERAL_every:
case LITERAL_typeswitch:
case LITERAL_if:
case LITERAL_try:
case LITERAL_catch:
case LITERAL_return:
case LITERAL_in:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_by:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_then:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LT:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case LITERAL_is:
case LITERAL_to:
case PLUS:
case MINUS:
case STAR:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_of:
case LITERAL_treat:
case LITERAL_castable:
case LITERAL_cast:
case LITERAL_validate:
case PRAGMA:
case SLASH:
case DSLASH:
case LITERAL_text:
case LITERAL_node:
case LITERAL_attribute:
case LITERAL_comment:
case 104:
case 105:
case 106:
case 107:
case LITERAL_document:
case SELF:
case XML_COMMENT:
case XML_PI:
case AT:
case PARENT:
case LITERAL_child:
case LITERAL_self:
case LITERAL_descendant:
case 117:
case LITERAL_following:
case 119:
case LITERAL_parent:
case LITERAL_ancestor:
case 122:
case LITERAL_preceding:
case 124:
case DOUBLE_LITERAL:
case DECIMAL_LITERAL:
case INTEGER_LITERAL:
case 142:
case LITERAL_item:
case NCNAME:
case LITERAL_lax:
case LITERAL_strict:
{
expr();
break;
}
case RCURLY:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(RCURLY);
}
else if ((LA(1)==LITERAL_attribute)) {
match(LITERAL_attribute);
name=qName();
match(LCURLY);
{
switch ( LA(1)) {
case LITERAL_xquery:
case LITERAL_version:
case LITERAL_module:
case LITERAL_namespace:
case STRING_LITERAL:
case LITERAL_encoding:
case LITERAL_declare:
case 12:
case LITERAL_default:
case LITERAL_collation:
case 15:
case LITERAL_construction:
case LITERAL_ordering:
case LITERAL_order:
case 19:
case LITERAL_import:
case LITERAL_schema:
case LITERAL_element:
case LITERAL_function:
case LITERAL_variable:
case LITERAL_option:
case LITERAL_preserve:
case LITERAL_strip:
case LITERAL_ordered:
case LITERAL_unordered:
case LITERAL_empty:
case LITERAL_greatest:
case LITERAL_least:
case 34:
case LITERAL_inherit:
case 37:
case LITERAL_at:
case DOLLAR:
case LITERAL_external:
case LPAREN:
case LITERAL_as:
case LITERAL_for:
case LITERAL_let:
case LITERAL_some:
case LITERAL_every:
case LITERAL_typeswitch:
case LITERAL_if:
case LITERAL_try:
case LITERAL_catch:
case LITERAL_return:
case LITERAL_in:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_by:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_then:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LT:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case LITERAL_is:
case LITERAL_to:
case PLUS:
case MINUS:
case STAR:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_of:
case LITERAL_treat:
case LITERAL_castable:
case LITERAL_cast:
case LITERAL_validate:
case PRAGMA:
case SLASH:
case DSLASH:
case LITERAL_text:
case LITERAL_node:
case LITERAL_attribute:
case LITERAL_comment:
case 104:
case 105:
case 106:
case 107:
case LITERAL_document:
case SELF:
case XML_COMMENT:
case XML_PI:
case AT:
case PARENT:
case LITERAL_child:
case LITERAL_self:
case LITERAL_descendant:
case 117:
case LITERAL_following:
case 119:
case LITERAL_parent:
case LITERAL_ancestor:
case 122:
case LITERAL_preceding:
case 124:
case DOUBLE_LITERAL:
case DECIMAL_LITERAL:
case INTEGER_LITERAL:
case 142:
case LITERAL_item:
case NCNAME:
case LITERAL_lax:
case LITERAL_strict:
{
expr();
break;
}
case RCURLY:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(RCURLY);
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}
public final void compTextConstructor() throws RecognitionException, TokenStreamException {
match(LITERAL_text);
match(LCURLY);
expr();
match(RCURLY);
}
public final void compCommentConstructor() throws RecognitionException, TokenStreamException {
match(LITERAL_comment);
match(LCURLY);
expr();
match(RCURLY);
}
public final void compPIConstructor() throws RecognitionException, TokenStreamException {
String name=null;
boolean synPredMatched382 = false;
if (((LA(1)==104))) {
int _m382 = mark();
synPredMatched382 = true;
inputState.guessing++;
try {
{
match(104);
match(LCURLY);
}
}
catch (RecognitionException pe) {
synPredMatched382 = false;
}
rewind(_m382);
inputState.guessing--;
}
if ( synPredMatched382 ) {
match(104);
match(LCURLY);
expr();
match(RCURLY);
match(LCURLY);
{
switch ( LA(1)) {
case LITERAL_xquery:
case LITERAL_version:
case LITERAL_module:
case LITERAL_namespace:
case STRING_LITERAL:
case LITERAL_encoding:
case LITERAL_declare:
case 12:
case LITERAL_default:
case LITERAL_collation:
case 15:
case LITERAL_construction:
case LITERAL_ordering:
case LITERAL_order:
case 19:
case LITERAL_import:
case LITERAL_schema:
case LITERAL_element:
case LITERAL_function:
case LITERAL_variable:
case LITERAL_option:
case LITERAL_preserve:
case LITERAL_strip:
case LITERAL_ordered:
case LITERAL_unordered:
case LITERAL_empty:
case LITERAL_greatest:
case LITERAL_least:
case 34:
case LITERAL_inherit:
case 37:
case LITERAL_at:
case DOLLAR:
case LITERAL_external:
case LPAREN:
case LITERAL_as:
case LITERAL_for:
case LITERAL_let:
case LITERAL_some:
case LITERAL_every:
case LITERAL_typeswitch:
case LITERAL_if:
case LITERAL_try:
case LITERAL_catch:
case LITERAL_return:
case LITERAL_in:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_by:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_then:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LT:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case LITERAL_is:
case LITERAL_to:
case PLUS:
case MINUS:
case STAR:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_of:
case LITERAL_treat:
case LITERAL_castable:
case LITERAL_cast:
case LITERAL_validate:
case PRAGMA:
case SLASH:
case DSLASH:
case LITERAL_text:
case LITERAL_node:
case LITERAL_attribute:
case LITERAL_comment:
case 104:
case 105:
case 106:
case 107:
case LITERAL_document:
case SELF:
case XML_COMMENT:
case XML_PI:
case AT:
case PARENT:
case LITERAL_child:
case LITERAL_self:
case LITERAL_descendant:
case 117:
case LITERAL_following:
case 119:
case LITERAL_parent:
case LITERAL_ancestor:
case 122:
case LITERAL_preceding:
case 124:
case DOUBLE_LITERAL:
case DECIMAL_LITERAL:
case INTEGER_LITERAL:
case 142:
case LITERAL_item:
case NCNAME:
case LITERAL_lax:
case LITERAL_strict:
{
expr();
break;
}
case RCURLY:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(RCURLY);
}
else if ((LA(1)==104)) {
match(104);
name=ncnameOrKeyword();
match(LCURLY);
{
switch ( LA(1)) {
case LITERAL_xquery:
case LITERAL_version:
case LITERAL_module:
case LITERAL_namespace:
case STRING_LITERAL:
case LITERAL_encoding:
case LITERAL_declare:
case 12:
case LITERAL_default:
case LITERAL_collation:
case 15:
case LITERAL_construction:
case LITERAL_ordering:
case LITERAL_order:
case 19:
case LITERAL_import:
case LITERAL_schema:
case LITERAL_element:
case LITERAL_function:
case LITERAL_variable:
case LITERAL_option:
case LITERAL_preserve:
case LITERAL_strip:
case LITERAL_ordered:
case LITERAL_unordered:
case LITERAL_empty:
case LITERAL_greatest:
case LITERAL_least:
case 34:
case LITERAL_inherit:
case 37:
case LITERAL_at:
case DOLLAR:
case LITERAL_external:
case LPAREN:
case LITERAL_as:
case LITERAL_for:
case LITERAL_let:
case LITERAL_some:
case LITERAL_every:
case LITERAL_typeswitch:
case LITERAL_if:
case LITERAL_try:
case LITERAL_catch:
case LITERAL_return:
case LITERAL_in:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_by:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_then:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LT:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case LITERAL_is:
case LITERAL_to:
case PLUS:
case MINUS:
case STAR:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_of:
case LITERAL_treat:
case LITERAL_castable:
case LITERAL_cast:
case LITERAL_validate:
case PRAGMA:
case SLASH:
case DSLASH:
case LITERAL_text:
case LITERAL_node:
case LITERAL_attribute:
case LITERAL_comment:
case 104:
case 105:
case 106:
case 107:
case LITERAL_document:
case SELF:
case XML_COMMENT:
case XML_PI:
case AT:
case PARENT:
case LITERAL_child:
case LITERAL_self:
case LITERAL_descendant:
case 117:
case LITERAL_following:
case 119:
case LITERAL_parent:
case LITERAL_ancestor:
case 122:
case LITERAL_preceding:
case 124:
case DOUBLE_LITERAL:
case DECIMAL_LITERAL:
case INTEGER_LITERAL:
case 142:
case LITERAL_item:
case NCNAME:
case LITERAL_lax:
case LITERAL_strict:
{
expr();
break;
}
case RCURLY:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(RCURLY);
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}
public final void contentExpr() throws RecognitionException, TokenStreamException {
expr();
}
public final void atomicType() throws RecognitionException, TokenStreamException {
String name = null;
name=qName();
}
public final void itemType() throws RecognitionException, TokenStreamException {
boolean synPredMatched395 = false;
if (((LA(1)==LITERAL_item))) {
int _m395 = mark();
synPredMatched395 = true;
inputState.guessing++;
try {
{
match(LITERAL_item);
match(LPAREN);
}
}
catch (RecognitionException pe) {
synPredMatched395 = false;
}
rewind(_m395);
inputState.guessing--;
}
if ( synPredMatched395 ) {
match(LITERAL_item);
match(LPAREN);
match(RPAREN);
}
else {
boolean synPredMatched397 = false;
if (((_tokenSet_9.member(LA(1))))) {
int _m397 = mark();
synPredMatched397 = true;
inputState.guessing++;
try {
{
match(105);
match(LPAREN);
}
}
catch (RecognitionException pe) {
synPredMatched397 = false;
}
rewind(_m397);
inputState.guessing--;
}
if ( synPredMatched397 ) {
kindTest();
}
else {
boolean synPredMatched399 = false;
if (((_tokenSet_9.member(LA(1))))) {
int _m399 = mark();
synPredMatched399 = true;
inputState.guessing++;
try {
{
match(LITERAL_element);
match(LPAREN);
}
}
catch (RecognitionException pe) {
synPredMatched399 = false;
}
rewind(_m399);
inputState.guessing--;
}
if ( synPredMatched399 ) {
kindTest();
}
else {
boolean synPredMatched401 = false;
if (((_tokenSet_9.member(LA(1))))) {
int _m401 = mark();
synPredMatched401 = true;
inputState.guessing++;
try {
{
match(LITERAL_attribute);
match(LPAREN);
}
}
catch (RecognitionException pe) {
synPredMatched401 = false;
}
rewind(_m401);
inputState.guessing--;
}
if ( synPredMatched401 ) {
kindTest();
}
else {
boolean synPredMatched403 = false;
if (((_tokenSet_9.member(LA(1))))) {
int _m403 = mark();
synPredMatched403 = true;
inputState.guessing++;
try {
{
match(104);
match(LPAREN);
}
}
catch (RecognitionException pe) {
synPredMatched403 = false;
}
rewind(_m403);
inputState.guessing--;
}
if ( synPredMatched403 ) {
kindTest();
}
else {
boolean synPredMatched405 = false;
if (((_tokenSet_9.member(LA(1))))) {
int _m405 = mark();
synPredMatched405 = true;
inputState.guessing++;
try {
{
match(LITERAL_comment);
match(LPAREN);
}
}
catch (RecognitionException pe) {
synPredMatched405 = false;
}
rewind(_m405);
inputState.guessing--;
}
if ( synPredMatched405 ) {
kindTest();
}
else {
boolean synPredMatched407 = false;
if (((_tokenSet_9.member(LA(1))))) {
int _m407 = mark();
synPredMatched407 = true;
inputState.guessing++;
try {
{
match(LITERAL_text);
match(LPAREN);
}
}
catch (RecognitionException pe) {
synPredMatched407 = false;
}
rewind(_m407);
inputState.guessing--;
}
if ( synPredMatched407 ) {
kindTest();
}
else {
boolean synPredMatched409 = false;
if (((_tokenSet_9.member(LA(1))))) {
int _m409 = mark();
synPredMatched409 = true;
inputState.guessing++;
try {
{
match(LITERAL_node);
match(LPAREN);
}
}
catch (RecognitionException pe) {
synPredMatched409 = false;
}
rewind(_m409);
inputState.guessing--;
}
if ( synPredMatched409 ) {
kindTest();
}
else {
boolean synPredMatched411 = false;
if (((_tokenSet_9.member(LA(1))))) {
int _m411 = mark();
synPredMatched411 = true;
inputState.guessing++;
try {
{
match(106);
match(LPAREN);
}
}
catch (RecognitionException pe) {
synPredMatched411 = false;
}
rewind(_m411);
inputState.guessing--;
}
if ( synPredMatched411 ) {
kindTest();
}
else {
boolean synPredMatched413 = false;
if (((_tokenSet_9.member(LA(1))))) {
int _m413 = mark();
synPredMatched413 = true;
inputState.guessing++;
try {
{
match(107);
match(LPAREN);
}
}
catch (RecognitionException pe) {
synPredMatched413 = false;
}
rewind(_m413);
inputState.guessing--;
}
if ( synPredMatched413 ) {
kindTest();
}
else if ((_tokenSet_1.member(LA(1)))) {
atomicType();
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}}}}}}}}}
}
public final void occurrenceIndicator() throws RecognitionException, TokenStreamException {
switch ( LA(1)) {
case QUESTION:
{
match(QUESTION);
break;
}
case STAR:
{
match(STAR);
break;
}
case PLUS:
{
match(PLUS);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
public final void documentTest() throws RecognitionException, TokenStreamException {
match(105);
match(LPAREN);
{
switch ( LA(1)) {
case LITERAL_element:
{
elementTest();
break;
}
case 107:
{
schemaElementTest();
break;
}
case RPAREN:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(RPAREN);
}
public final void elementTest() throws RecognitionException, TokenStreamException {
String tmpStr = null;
match(LITERAL_element);
match(LPAREN);
{
switch ( LA(1)) {
case LITERAL_xquery:
case LITERAL_version:
case LITERAL_module:
case LITERAL_namespace:
case LITERAL_encoding:
case LITERAL_declare:
case 12:
case LITERAL_default:
case LITERAL_collation:
case 15:
case LITERAL_construction:
case LITERAL_ordering:
case LITERAL_order:
case 19:
case LITERAL_import:
case LITERAL_schema:
case LITERAL_element:
case LITERAL_function:
case LITERAL_variable:
case LITERAL_option:
case LITERAL_preserve:
case LITERAL_strip:
case LITERAL_ordered:
case LITERAL_unordered:
case LITERAL_empty:
case LITERAL_greatest:
case LITERAL_least:
case 34:
case LITERAL_inherit:
case 37:
case LITERAL_at:
case LITERAL_external:
case LITERAL_as:
case LITERAL_for:
case LITERAL_let:
case LITERAL_some:
case LITERAL_every:
case LITERAL_typeswitch:
case LITERAL_if:
case LITERAL_try:
case LITERAL_catch:
case LITERAL_return:
case LITERAL_in:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_by:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_then:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case LITERAL_is:
case LITERAL_to:
case STAR:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_of:
case LITERAL_treat:
case LITERAL_castable:
case LITERAL_cast:
case LITERAL_validate:
case LITERAL_text:
case LITERAL_node:
case LITERAL_attribute:
case LITERAL_comment:
case 104:
case 105:
case 106:
case 107:
case LITERAL_document:
case LITERAL_child:
case LITERAL_self:
case LITERAL_descendant:
case 117:
case LITERAL_following:
case 119:
case LITERAL_parent:
case LITERAL_ancestor:
case 122:
case LITERAL_preceding:
case 124:
case 142:
case LITERAL_item:
case NCNAME:
case LITERAL_lax:
case LITERAL_strict:
{
elementNameOrWildcard();
{
switch ( LA(1)) {
case COMMA:
{
match(COMMA);
tmpStr=qName();
{
switch ( LA(1)) {
case QUESTION:
{
match(QUESTION);
break;
}
case RPAREN:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
break;
}
case RPAREN:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
break;
}
case RPAREN:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(RPAREN);
}
public final void attributeTest() throws RecognitionException, TokenStreamException {
String tmpStr = null;
match(LITERAL_attribute);
match(LPAREN);
{
switch ( LA(1)) {
case LITERAL_xquery:
case LITERAL_version:
case LITERAL_module:
case LITERAL_namespace:
case LITERAL_encoding:
case LITERAL_declare:
case 12:
case LITERAL_default:
case LITERAL_collation:
case 15:
case LITERAL_construction:
case LITERAL_ordering:
case LITERAL_order:
case 19:
case LITERAL_import:
case LITERAL_schema:
case LITERAL_element:
case LITERAL_function:
case LITERAL_variable:
case LITERAL_option:
case LITERAL_preserve:
case LITERAL_strip:
case LITERAL_ordered:
case LITERAL_unordered:
case LITERAL_empty:
case LITERAL_greatest:
case LITERAL_least:
case 34:
case LITERAL_inherit:
case 37:
case LITERAL_at:
case LITERAL_external:
case LITERAL_as:
case LITERAL_for:
case LITERAL_let:
case LITERAL_some:
case LITERAL_every:
case LITERAL_typeswitch:
case LITERAL_if:
case LITERAL_try:
case LITERAL_catch:
case LITERAL_return:
case LITERAL_in:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_by:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_then:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case LITERAL_is:
case LITERAL_to:
case STAR:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_of:
case LITERAL_treat:
case LITERAL_castable:
case LITERAL_cast:
case LITERAL_validate:
case LITERAL_text:
case LITERAL_node:
case LITERAL_attribute:
case LITERAL_comment:
case 104:
case 105:
case 106:
case 107:
case LITERAL_document:
case LITERAL_child:
case LITERAL_self:
case LITERAL_descendant:
case 117:
case LITERAL_following:
case 119:
case LITERAL_parent:
case LITERAL_ancestor:
case 122:
case LITERAL_preceding:
case 124:
case 142:
case LITERAL_item:
case NCNAME:
case LITERAL_lax:
case LITERAL_strict:
{
attributeNameOrWildcard();
{
switch ( LA(1)) {
case COMMA:
{
match(COMMA);
tmpStr=qName();
break;
}
case RPAREN:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
break;
}
case RPAREN:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(RPAREN);
}
public final void schemaElementTest() throws RecognitionException, TokenStreamException {
String tmpStr = null;
match(107);
match(LPAREN);
tmpStr=qName();
match(RPAREN);
}
public final void schemaAttributeTest() throws RecognitionException, TokenStreamException {
String tmpStr = null;
match(106);
match(LPAREN);
tmpStr=qName();
match(RPAREN);
}
public final void piTest() throws RecognitionException, TokenStreamException {
match(104);
match(LPAREN);
{
switch ( LA(1)) {
case NCNAME:
{
match(NCNAME);
break;
}
case STRING_LITERAL:
{
match(STRING_LITERAL);
break;
}
case RPAREN:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
match(RPAREN);
}
public final void commentTest() throws RecognitionException, TokenStreamException {
match(LITERAL_comment);
match(LPAREN);
match(RPAREN);
}
public final void textTest() throws RecognitionException, TokenStreamException {
match(LITERAL_text);
match(LPAREN);
match(RPAREN);
}
public final void anyKindTest() throws RecognitionException, TokenStreamException {
match(LITERAL_node);
match(LPAREN);
match(RPAREN);
}
public final void attributeNameOrWildcard() throws RecognitionException, TokenStreamException {
String name=null;
switch ( LA(1)) {
case STAR:
{
match(STAR);
break;
}
case LITERAL_xquery:
case LITERAL_version:
case LITERAL_module:
case LITERAL_namespace:
case LITERAL_encoding:
case LITERAL_declare:
case 12:
case LITERAL_default:
case LITERAL_collation:
case 15:
case LITERAL_construction:
case LITERAL_ordering:
case LITERAL_order:
case 19:
case LITERAL_import:
case LITERAL_schema:
case LITERAL_element:
case LITERAL_function:
case LITERAL_variable:
case LITERAL_option:
case LITERAL_preserve:
case LITERAL_strip:
case LITERAL_ordered:
case LITERAL_unordered:
case LITERAL_empty:
case LITERAL_greatest:
case LITERAL_least:
case 34:
case LITERAL_inherit:
case 37:
case LITERAL_at:
case LITERAL_external:
case LITERAL_as:
case LITERAL_for:
case LITERAL_let:
case LITERAL_some:
case LITERAL_every:
case LITERAL_typeswitch:
case LITERAL_if:
case LITERAL_try:
case LITERAL_catch:
case LITERAL_return:
case LITERAL_in:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_by:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_then:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case LITERAL_is:
case LITERAL_to:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_of:
case LITERAL_treat:
case LITERAL_castable:
case LITERAL_cast:
case LITERAL_validate:
case LITERAL_text:
case LITERAL_node:
case LITERAL_attribute:
case LITERAL_comment:
case 104:
case 105:
case 106:
case 107:
case LITERAL_document:
case LITERAL_child:
case LITERAL_self:
case LITERAL_descendant:
case 117:
case LITERAL_following:
case 119:
case LITERAL_parent:
case LITERAL_ancestor:
case 122:
case LITERAL_preceding:
case 124:
case 142:
case LITERAL_item:
case NCNAME:
case LITERAL_lax:
case LITERAL_strict:
{
name=qName();
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
public final void elementNameOrWildcard() throws RecognitionException, TokenStreamException {
String name=null;
switch ( LA(1)) {
case STAR:
{
match(STAR);
break;
}
case LITERAL_xquery:
case LITERAL_version:
case LITERAL_module:
case LITERAL_namespace:
case LITERAL_encoding:
case LITERAL_declare:
case 12:
case LITERAL_default:
case LITERAL_collation:
case 15:
case LITERAL_construction:
case LITERAL_ordering:
case LITERAL_order:
case 19:
case LITERAL_import:
case LITERAL_schema:
case LITERAL_element:
case LITERAL_function:
case LITERAL_variable:
case LITERAL_option:
case LITERAL_preserve:
case LITERAL_strip:
case LITERAL_ordered:
case LITERAL_unordered:
case LITERAL_empty:
case LITERAL_greatest:
case LITERAL_least:
case 34:
case LITERAL_inherit:
case 37:
case LITERAL_at:
case LITERAL_external:
case LITERAL_as:
case LITERAL_for:
case LITERAL_let:
case LITERAL_some:
case LITERAL_every:
case LITERAL_typeswitch:
case LITERAL_if:
case LITERAL_try:
case LITERAL_catch:
case LITERAL_return:
case LITERAL_in:
case LITERAL_where:
case LITERAL_stable:
case LITERAL_by:
case LITERAL_ascending:
case LITERAL_descending:
case LITERAL_satisfies:
case LITERAL_case:
case LITERAL_then:
case LITERAL_else:
case LITERAL_or:
case LITERAL_and:
case LITERAL_eq:
case LITERAL_ne:
case LITERAL_lt:
case LITERAL_le:
case LITERAL_gt:
case LITERAL_ge:
case LITERAL_is:
case LITERAL_to:
case LITERAL_div:
case LITERAL_idiv:
case LITERAL_mod:
case LITERAL_union:
case LITERAL_intersect:
case LITERAL_except:
case LITERAL_instance:
case LITERAL_of:
case LITERAL_treat:
case LITERAL_castable:
case LITERAL_cast:
case LITERAL_validate:
case LITERAL_text:
case LITERAL_node:
case LITERAL_attribute:
case LITERAL_comment:
case 104:
case 105:
case 106:
case 107:
case LITERAL_document:
case LITERAL_child:
case LITERAL_self:
case LITERAL_descendant:
case 117:
case LITERAL_following:
case 119:
case LITERAL_parent:
case LITERAL_ancestor:
case 122:
case LITERAL_preceding:
case 124:
case 142:
case LITERAL_item:
case NCNAME:
case LITERAL_lax:
case LITERAL_strict:
{
name=qName();
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
public final String reservedKeywords() throws RecognitionException, TokenStreamException {
String name;
name= null;
switch ( LA(1)) {
case LITERAL_ancestor:
{
match(LITERAL_ancestor);
if ( inputState.guessing==0 ) {
name= "ancestor";
}
break;
}
case 122:
{
match(122);
if ( inputState.guessing==0 ) {
name= "ancestor-or-self";
}
break;
}
case LITERAL_and:
{
match(LITERAL_and);
if ( inputState.guessing==0 ) {
name= "and";
}
break;
}
case LITERAL_as:
{
match(LITERAL_as);
if ( inputState.guessing==0 ) {
name = "as";
}
break;
}
case LITERAL_ascending:
{
match(LITERAL_ascending);
if ( inputState.guessing==0 ) {
name = "ascending";
}
break;
}
case LITERAL_at:
{
match(LITERAL_at);
if ( inputState.guessing==0 ) {
name = "at";
}
break;
}
case LITERAL_attribute:
{
match(LITERAL_attribute);
if ( inputState.guessing==0 ) {
name= "attribute";
}
break;
}
case 15:
{
match(15);
if ( inputState.guessing==0 ) {
name = "base-uri";
}
break;
}
case 12:
{
match(12);
if ( inputState.guessing==0 ) {
name = "boundary-space";
}
break;
}
case LITERAL_by:
{
match(LITERAL_by);
if ( inputState.guessing==0 ) {
name = "by";
}
break;
}
case LITERAL_case:
{
match(LITERAL_case);
if ( inputState.guessing==0 ) {
name = "case";
}
break;
}
case LITERAL_cast:
{
match(LITERAL_cast);
if ( inputState.guessing==0 ) {
name = "cast";
}
break;
}
case LITERAL_castable:
{
match(LITERAL_castable);
if ( inputState.guessing==0 ) {
name = "castable";
}
break;
}
case LITERAL_catch:
{
match(LITERAL_catch);
if ( inputState.guessing==0 ) {
name = "catch";
}
break;
}
case LITERAL_child:
{
match(LITERAL_child);
if ( inputState.guessing==0 ) {
name= "child";
}
break;
}
case LITERAL_collation:
{
match(LITERAL_collation);
if ( inputState.guessing==0 ) {
name = "collation";
}
break;
}
case LITERAL_comment:
{
match(LITERAL_comment);
if ( inputState.guessing==0 ) {
name= "comment";
}
break;
}
case LITERAL_construction:
{
match(LITERAL_construction);
if ( inputState.guessing==0 ) {
name = "construction";
}
break;
}
case 19:
{
match(19);
if ( inputState.guessing==0 ) {
name = "copy-namespaces";
}
break;
}
case LITERAL_declare:
{
match(LITERAL_declare);
if ( inputState.guessing==0 ) {
name = "declare";
}
break;
}
case LITERAL_default:
{
match(LITERAL_default);
if ( inputState.guessing==0 ) {
name= "default";
}
break;
}
case LITERAL_descendant:
{
match(LITERAL_descendant);
if ( inputState.guessing==0 ) {
name= "descendant";
}
break;
}
case 117:
{
match(117);
if ( inputState.guessing==0 ) {
name= "descendant-or-self";
}
break;
}
case LITERAL_descending:
{
match(LITERAL_descending);
if ( inputState.guessing==0 ) {
name = "descending";
}
break;
}
case LITERAL_div:
{
match(LITERAL_div);
if ( inputState.guessing==0 ) {
name= "div";
}
break;
}
case LITERAL_document:
{
match(LITERAL_document);
if ( inputState.guessing==0 ) {
name= "document";
}
break;
}
case 105:
{
match(105);
if ( inputState.guessing==0 ) {
name= "document-node";
}
break;
}
case LITERAL_element:
{
match(LITERAL_element);
if ( inputState.guessing==0 ) {
name = "element";
}
break;
}
case LITERAL_else:
{
match(LITERAL_else);
if ( inputState.guessing==0 ) {
name= "else";
}
break;
}
case LITERAL_empty:
{
match(LITERAL_empty);
if ( inputState.guessing==0 ) {
name= "empty";
}
break;
}
case 142:
{
match(142);
if ( inputState.guessing==0 ) {
name = "empty-sequence";
}
break;
}
case LITERAL_encoding:
{
match(LITERAL_encoding);
if ( inputState.guessing==0 ) {
name = "encoding";
}
break;
}
case LITERAL_eq:
{
match(LITERAL_eq);
if ( inputState.guessing==0 ) {
name = "eq";
}
break;
}
case LITERAL_every:
{
match(LITERAL_every);
if ( inputState.guessing==0 ) {
name = "every";
}
break;
}
case LITERAL_except:
{
match(LITERAL_except);
if ( inputState.guessing==0 ) {
name = "except";
}
break;
}
case LITERAL_external:
{
match(LITERAL_external);
if ( inputState.guessing==0 ) {
name = "external";
}
break;
}
case LITERAL_following:
{
match(LITERAL_following);
if ( inputState.guessing==0 ) {
name = "following";
}
break;
}
case 119:
{
match(119);
if ( inputState.guessing==0 ) {
name= "following-sibling";
}
break;
}
case LITERAL_for:
{
match(LITERAL_for);
if ( inputState.guessing==0 ) {
name= "for";
}
break;
}
case LITERAL_function:
{
match(LITERAL_function);
if ( inputState.guessing==0 ) {
name= "function";
}
break;
}
case LITERAL_ge:
{
match(LITERAL_ge);
if ( inputState.guessing==0 ) {
name = "ge";
}
break;
}
case LITERAL_greatest:
{
match(LITERAL_greatest);
if ( inputState.guessing==0 ) {
name = "greatest";
}
break;
}
case LITERAL_gt:
{
match(LITERAL_gt);
if ( inputState.guessing==0 ) {
name = "gt";
}
break;
}
case LITERAL_idiv:
{
match(LITERAL_idiv);
if ( inputState.guessing==0 ) {
name = "idiv";
}
break;
}
case LITERAL_if:
{
match(LITERAL_if);
if ( inputState.guessing==0 ) {
name= "if";
}
break;
}
case LITERAL_import:
{
match(LITERAL_import);
if ( inputState.guessing==0 ) {
name = "import";
}
break;
}
case LITERAL_in:
{
match(LITERAL_in);
if ( inputState.guessing==0 ) {
name = "in";
}
break;
}
case LITERAL_inherit:
{
match(LITERAL_inherit);
if ( inputState.guessing==0 ) {
name = "inherit";
}
break;
}
case LITERAL_instance:
{
match(LITERAL_instance);
if ( inputState.guessing==0 ) {
name = "instance";
}
break;
}
case LITERAL_intersect:
{
match(LITERAL_intersect);
if ( inputState.guessing==0 ) {
name = "intersect";
}
break;
}
case LITERAL_is:
{
match(LITERAL_is);
if ( inputState.guessing==0 ) {
name = "is";
}
break;
}
case LITERAL_item:
{
match(LITERAL_item);
if ( inputState.guessing==0 ) {
name= "item";
}
break;
}
case LITERAL_lax:
{
match(LITERAL_lax);
if ( inputState.guessing==0 ) {
name = "lax";
}
break;
}
case LITERAL_le:
{
match(LITERAL_le);
if ( inputState.guessing==0 ) {
name = "le";
}
break;
}
case LITERAL_least:
{
match(LITERAL_least);
if ( inputState.guessing==0 ) {
name = "least";
}
break;
}
case LITERAL_let:
{
match(LITERAL_let);
if ( inputState.guessing==0 ) {
name= "let";
}
break;
}
case LITERAL_lt:
{
match(LITERAL_lt);
if ( inputState.guessing==0 ) {
name = "lt";
}
break;
}
case LITERAL_mod:
{
match(LITERAL_mod);
if ( inputState.guessing==0 ) {
name= "mod";
}
break;
}
case LITERAL_module:
{
match(LITERAL_module);
if ( inputState.guessing==0 ) {
name = "module";
}
break;
}
case LITERAL_namespace:
{
match(LITERAL_namespace);
if ( inputState.guessing==0 ) {
name= "namespace";
}
break;
}
case LITERAL_ne:
{
match(LITERAL_ne);
if ( inputState.guessing==0 ) {
name = "ne";
}
break;
}
case LITERAL_node:
{
match(LITERAL_node);
if ( inputState.guessing==0 ) {
name= "node";
}
break;
}
case 37:
{
match(37);
if ( inputState.guessing==0 ) {
name = "no-inherit";
}
break;
}
case 34:
{
match(34);
if ( inputState.guessing==0 ) {
name = "no-preserve";
}
break;
}
case LITERAL_of:
{
match(LITERAL_of);
if ( inputState.guessing==0 ) {
name = "of";
}
break;
}
case LITERAL_option:
{
match(LITERAL_option);
if ( inputState.guessing==0 ) {
name = "option";
}
break;
}
case LITERAL_or:
{
match(LITERAL_or);
if ( inputState.guessing==0 ) {
name= "or";
}
break;
}
case LITERAL_order:
{
match(LITERAL_order);
if ( inputState.guessing==0 ) {
name = "order";
}
break;
}
case LITERAL_ordered:
{
match(LITERAL_ordered);
if ( inputState.guessing==0 ) {
name = "ordered";
}
break;
}
case LITERAL_ordering:
{
match(LITERAL_ordering);
if ( inputState.guessing==0 ) {
name = "ordering";
}
break;
}
case LITERAL_parent:
{
match(LITERAL_parent);
if ( inputState.guessing==0 ) {
name= "parent";
}
break;
}
case LITERAL_preceding:
{
match(LITERAL_preceding);
if ( inputState.guessing==0 ) {
name = "preceding";
}
break;
}
case 124:
{
match(124);
if ( inputState.guessing==0 ) {
name= "preceding-sibling";
}
break;
}
case LITERAL_preserve:
{
match(LITERAL_preserve);
if ( inputState.guessing==0 ) {
name = "preserve";
}
break;
}
case 104:
{
match(104);
if ( inputState.guessing==0 ) {
name = "processing-instruction";
}
break;
}
case LITERAL_return:
{
match(LITERAL_return);
if ( inputState.guessing==0 ) {
name = "return";
}
break;
}
case LITERAL_satisfies:
{
match(LITERAL_satisfies);
if ( inputState.guessing==0 ) {
name = "satisfies";
}
break;
}
case LITERAL_schema:
{
match(LITERAL_schema);
if ( inputState.guessing==0 ) {
name = "schema";
}
break;
}
case 106:
{
match(106);
if ( inputState.guessing==0 ) {
name="schema-attribute";
}
break;
}
case 107:
{
match(107);
if ( inputState.guessing==0 ) {
name = "schema-element";
}
break;
}
case LITERAL_self:
{
match(LITERAL_self);
if ( inputState.guessing==0 ) {
name= "self";
}
break;
}
case LITERAL_some:
{
match(LITERAL_some);
if ( inputState.guessing==0 ) {
name = "some";
}
break;
}
case LITERAL_stable:
{
match(LITERAL_stable);
if ( inputState.guessing==0 ) {
name = "stable";
}
break;
}
case LITERAL_strict:
{
match(LITERAL_strict);
if ( inputState.guessing==0 ) {
name = "strict";
}
break;
}
case LITERAL_strip:
{
match(LITERAL_strip);
if ( inputState.guessing==0 ) {
name = "strip";
}
break;
}
case LITERAL_text:
{
match(LITERAL_text);
if ( inputState.guessing==0 ) {
name= "text";
}
break;
}
case LITERAL_then:
{
match(LITERAL_then);
if ( inputState.guessing==0 ) {
name= "then";
}
break;
}
case LITERAL_to:
{
match(LITERAL_to);
if ( inputState.guessing==0 ) {
name = "to";
}
break;
}
case LITERAL_treat:
{
match(LITERAL_treat);
if ( inputState.guessing==0 ) {
name = "treat";
}
break;
}
case LITERAL_try:
{
match(LITERAL_try);
if ( inputState.guessing==0 ) {
name = "try";
}
break;
}
case LITERAL_typeswitch:
{
match(LITERAL_typeswitch);
if ( inputState.guessing==0 ) {
name = "typeswitch";
}
break;
}
case LITERAL_xquery:
{
match(LITERAL_xquery);
if ( inputState.guessing==0 ) {
name= "xquery";
}
break;
}
case LITERAL_union:
{
match(LITERAL_union);
if ( inputState.guessing==0 ) {
name = "union";
}
break;
}
case LITERAL_unordered:
{
match(LITERAL_unordered);
if ( inputState.guessing==0 ) {
name = "unordered";
}
break;
}
case LITERAL_validate:
{
match(LITERAL_validate);
if ( inputState.guessing==0 ) {
name = "validate";
}
break;
}
case LITERAL_variable:
{
match(LITERAL_variable);
if ( inputState.guessing==0 ) {
name= "variable";
}
break;
}
case LITERAL_version:
{
match(LITERAL_version);
if ( inputState.guessing==0 ) {
name= "version";
}
break;
}
case LITERAL_where:
{
match(LITERAL_where);
if ( inputState.guessing==0 ) {
name = "where";
}
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
return name;
}
public static final String[] _tokenNames = {
"<0>",
"EOF",
"<2>",
"NULL_TREE_LOOKAHEAD",
"\"xquery\"",
"\"version\"",
"\"module\"",
"\"namespace\"",
"STRING_LITERAL",
"\"encoding\"",
"EQ",
"\"declare\"",
"\"boundary-space\"",
"\"default\"",
"\"collation\"",
"\"base-uri\"",
"\"construction\"",
"\"ordering\"",
"\"order\"",
"\"copy-namespaces\"",
"\"import\"",
"\"schema\"",
"\"element\"",
"\"function\"",
"\"variable\"",
"\"option\"",
"SEMICOLON",
"\"preserve\"",
"\"strip\"",
"\"ordered\"",
"\"unordered\"",
"\"empty\"",
"\"greatest\"",
"\"least\"",
"\"no-preserve\"",
"COMMA",
"\"inherit\"",
"\"no-inherit\"",
"\"at\"",
"DOLLAR",
"COLON",
"\"external\"",
"'('",
"')'",
"\"as\"",
"LCURLY",
"RCURLY",
"\"for\"",
"\"let\"",
"\"some\"",
"\"every\"",
"\"typeswitch\"",
"\"if\"",
"\"try\"",
"\"catch\"",
"\"return\"",
"\"in\"",
"\"where\"",
"\"stable\"",
"\"by\"",
"\"ascending\"",
"\"descending\"",
"\"satisfies\"",
"\"case\"",
"\"then\"",
"\"else\"",
"\"or\"",
"\"and\"",
"LT",
"GT",
"\"eq\"",
"\"ne\"",
"\"lt\"",
"\"le\"",
"\"gt\"",
"\"ge\"",
"NEQ",
"GTEQ",
"LTEQ",
"\"is\"",
"\"to\"",
"PLUS",
"MINUS",
"STAR",
"\"div\"",
"\"idiv\"",
"\"mod\"",
"\"union\"",
"UNION",
"\"intersect\"",
"\"except\"",
"\"instance\"",
"\"of\"",
"\"treat\"",
"\"castable\"",
"\"cast\"",
"\"validate\"",
"PRAGMA",
"SLASH",
"DSLASH",
"\"text\"",
"\"node\"",
"\"attribute\"",
"\"comment\"",
"\"processing-instruction\"",
"\"document-node\"",
"\"schema-attribute\"",
"\"schema-element\"",
"\"document\"",
"SELF",
"XML_COMMENT",
"XML_PI",
"AT",
"PARENT",
"\"child\"",
"\"self\"",
"\"descendant\"",
"\"descendant-or-self\"",
"\"following\"",
"\"following-sibling\"",
"\"parent\"",
"\"ancestor\"",
"\"ancestor-or-self\"",
"\"preceding\"",
"\"preceding-sibling\"",
"LPPAREN",
"RPPAREN",
"DOUBLE_LITERAL",
"DECIMAL_LITERAL",
"INTEGER_LITERAL",
"END_TAG_START",
"ELEMENT_CONTENT",
"QUOT",
"APOS",
"QUOT_ATTRIBUTE_CONTENT",
"APOS_ATTRIBUTE_CONTENT",
"XQDOC_COMMENT",
"XML_COMMENT_END",
"XML_PI_END",
"XML_CDATA",
"XML_CDATA_END",
"QUESTION",
"\"empty-sequence\"",
"\"item\"",
"NCNAME",
"\"lax\"",
"\"strict\"",
"ANDEQ",
"OREQ",
"XML_PI_START",
"LETTER",
"DIGITS",
"HEX_DIGITS",
"NMSTART",
"NMCHAR",
"WS",
"EXPR_COMMENT",
"PRAGMA_CONTENT",
"PRAGMA_QNAME",
"PREDEFINED_ENTITY_REF",
"CHAR_REF",
"NEXT_TOKEN",
"CHAR",
"BASECHAR",
"IDEOGRAPHIC",
"COMBINING_CHAR",
"DIGIT",
"EXTENDER"
};
private static final long[] mk_tokenSet_0() {
long[] data = { -115483147764752L, -6917529027657887777L, 508163L, 0L, 0L, 0L};
return data;
}
public static final BitSet _tokenSet_0 = new BitSet(mk_tokenSet_0());
private static final long[] mk_tokenSet_1() {
long[] data = { -120430950090000L, 2304752233531674575L, 507904L, 0L, 0L, 0L};
return data;
}
public static final BitSet _tokenSet_1 = new BitSet(mk_tokenSet_1());
private static final long[] mk_tokenSet_2() {
long[] data = { -115483147764752L, -6917529027657887777L, 507907L, 0L, 0L, 0L};
return data;
}
public static final BitSet _tokenSet_2 = new BitSet(mk_tokenSet_2());
private static final long[] mk_tokenSet_3() {
long[] data = { -611949377660623870L, 4611686018695823358L, 0L, 0L};
return data;
}
public static final BitSet _tokenSet_3 = new BitSet(mk_tokenSet_3());
private static final long[] mk_tokenSet_4() {
long[] data = { 1024L, 28720L, 0L, 0L};
return data;
}
public static final BitSet _tokenSet_4 = new BitSet(mk_tokenSet_4());
private static final long[] mk_tokenSet_5() {
long[] data = { -115483147764752L, -6917529036248215585L, 507907L, 0L, 0L, 0L};
return data;
}
public static final BitSet _tokenSet_5 = new BitSet(mk_tokenSet_5());
private static final long[] mk_tokenSet_6() {
long[] data = { -120430950090000L, 2305596658462330831L, 507904L, 0L, 0L, 0L};
return data;
}
public static final BitSet _tokenSet_6 = new BitSet(mk_tokenSet_6());
private static final long[] mk_tokenSet_7() {
long[] data = { -115483147764752L, -6918373512718479393L, 507907L, 0L, 0L, 0L};
return data;
}
public static final BitSet _tokenSet_7 = new BitSet(mk_tokenSet_7());
private static final long[] mk_tokenSet_8() {
long[] data = { 0L, 70931969008992256L, 0L, 0L};
return data;
}
public static final BitSet _tokenSet_8 = new BitSet(mk_tokenSet_8());
private static final long[] mk_tokenSet_9() {
long[] data = { 4194304L, 17523466567680L, 0L, 0L};
return data;
}
public static final BitSet _tokenSet_9 = new BitSet(mk_tokenSet_9());
private static final long[] mk_tokenSet_10() {
long[] data = { -120430950090000L, 2304752233532198863L, 507904L, 0L, 0L, 0L};
return data;
}
public static final BitSet _tokenSet_10 = new BitSet(mk_tokenSet_10());
private static final long[] mk_tokenSet_11() {
long[] data = { 4194304L, 19585050869760L, 0L, 0L};
return data;
}
public static final BitSet _tokenSet_11 = new BitSet(mk_tokenSet_11());
private static final long[] mk_tokenSet_12() {
long[] data = { 4194304L, 230691283402768L, 0L, 0L};
return data;
}
public static final BitSet _tokenSet_12 = new BitSet(mk_tokenSet_12());
private static final long[] mk_tokenSet_13() {
long[] data = new long[8];
data[0]=-16L;
data[1]=-17179869217L;
data[2]=1099511627775L;
return data;
}
public static final BitSet _tokenSet_13 = new BitSet(mk_tokenSet_13());
private static final long[] mk_tokenSet_14() {
long[] data = { 35184372088832L, 211106232533008L, 2056L, 0L, 0L, 0L};
return data;
}
public static final BitSet _tokenSet_14 = new BitSet(mk_tokenSet_14());
}
|
package com.taoyuanx.ca.shellui.common;
public class CAConstant {
public enum KeyType {
RSA("RSA", 1), SM2("SM2", 2), ECDSA("ECDSA", 3), DSA("DSA", 4);
public String name;
public Integer value;
KeyType(String name, Integer value) {
this.name = name;
this.value = value;
}
public static KeyType forValue(Integer value) {
KeyType[] values = KeyType.values();
for (KeyType v : values) {
if (v.value.equals(value)) {
return v;
}
}
return null;
}
}
}
|
package com.coolweather.android;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Color;
import android.os.Build;
import android.preference.PreferenceManager;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ScrollView;
import android.widget.TextView;
import android.widget.Toast;
import com.bumptech.glide.Glide;
import com.coolweather.android.gson.Forecast;
import com.coolweather.android.gson.Weather;
import com.coolweather.android.service.AutoUpdateService;
import com.coolweather.android.util.HttpUtil;
import com.coolweather.android.util.Utility;
import java.io.IOException;
import okhttp3.Call;
import okhttp3.Callback;
import okhttp3.Response;
public class WeatherActivity extends AppCompatActivity {
private ScrollView weatherLayout;
private TextView titleCity;
private TextView titleUpdateTime;
private TextView degreeText;
private TextView weatherInfoText;
private LinearLayout forecastLayout;
private TextView aqiText;
private TextView pm25Text;
private TextView comfortText;
private TextView carWashText;
private TextView sportText;
private ImageView bingPicImg;
public SwipeRefreshLayout swipeRefresh;
private String mWeatherId;
public DrawerLayout drawerLayout;
private Button navButton;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (Build.VERSION.SDK_INT>=21){
View decorView=getWindow().getDecorView();
decorView.setSystemUiVisibility(
View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
|View.SYSTEM_UI_FLAG_LAYOUT_STABLE);
getWindow().setStatusBarColor(Color.TRANSPARENT);
}
setContentView(R.layout.activity_weather);
//初始化各控件
weatherLayout=(ScrollView) findViewById(R.id.weather_layout);
titleCity=(TextView) findViewById(R.id.title_city);
titleUpdateTime=(TextView) findViewById(R.id.title_update_time);
degreeText=(TextView) findViewById(R.id.degree_text);
weatherInfoText=(TextView) findViewById(R.id.weather_info_text);
forecastLayout=(LinearLayout) findViewById(R.id.forecast_layout);
aqiText=(TextView) findViewById(R.id.aqi_text);
pm25Text=(TextView) findViewById(R.id.pm25_text);
comfortText=(TextView) findViewById(R.id.comfort_text);
carWashText=(TextView) findViewById(R.id.car_wash_text);
sportText=(TextView) findViewById(R.id.sport_text);
bingPicImg=(ImageView) findViewById(R.id.bing_pic_img);
drawerLayout=(DrawerLayout) findViewById(R.id.drawer_layout);
navButton=(Button) findViewById(R.id.nav_button);
swipeRefresh=(SwipeRefreshLayout) findViewById(R.id.swip_refresh);
swipeRefresh.setColorSchemeResources(R.color.colorPrimary);
SharedPreferences prefs= PreferenceManager.getDefaultSharedPreferences(this);
String weatherString=prefs.getString("weather",null);
String bingPic=prefs.getString("bing_pic",null);
navButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
drawerLayout.openDrawer(GravityCompat.START);
}
});
if (bingPic!=null){
Glide.with(this).load(bingPic).into(bingPicImg);
}else {
loadBingPic();
}
if (weatherString!=null){
//有缓存时直接解析天气数据
Weather weather= Utility.handleWeatherResponse(weatherString);
mWeatherId=weather.basic.weatherId;
showWeatherInfo(weather);
}else {
//无缓存时去服务器查询天气
mWeatherId=getIntent().getStringExtra("weather_id");
weatherLayout.setVisibility(View.INVISIBLE);
requestWeather(mWeatherId);
// String weatherId=getIntent().getStringExtra("weather_id");
// weatherLayout.setVisibility(View.INVISIBLE);
// requestWeather(weatherId);
}
swipeRefresh.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener(){
@Override
public void onRefresh() {
requestWeather(mWeatherId);
}
});
}
/**
* 根据天气id请求城市天气信息
*/
public void requestWeather(final String weatherId){
String weatherUrl="http://guolin.tech/api/weather?cityid="+
weatherId+"&key=047a2c7fc2674de3a926c34b5c185e4d";
HttpUtil.sendOkHttpRequest(weatherUrl, new Callback() {
@Override
public void onFailure(Call call, IOException e) {
e.printStackTrace();
runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(WeatherActivity.this,"获取天气信息失败",
Toast.LENGTH_SHORT).show();
swipeRefresh.setRefreshing(false);
}
});
}
@Override
public void onResponse(Call call, Response response) throws IOException {
final String responseText=response.body().string();
final Weather weather=Utility.handleWeatherResponse(responseText);
runOnUiThread(new Runnable() {
@Override
public void run() {
if (weather!=null && "ok".equals(weather.status)){
SharedPreferences.Editor editor=PreferenceManager.
getDefaultSharedPreferences(WeatherActivity.this).
edit();
editor.putString("weather",responseText);
editor.apply();
mWeatherId=weather.basic.weatherId;
showWeatherInfo(weather);
}else {
Toast.makeText(WeatherActivity.this,"获取天气信息失败",
Toast.LENGTH_SHORT).show();
}
swipeRefresh.setRefreshing(false);
}
});
}
});
loadBingPic();
}
/**
*处理并展示Weather实体类中的数据
*/
private void showWeatherInfo(Weather weather){
String cityName=weather.basic.cityName;
String updateTime=weather.basic.update.updateTime.split(" ")[1];
String degree=weather.now.temperature+"℃";
String weatherInfo=weather.now.more.info;
titleCity.setText(cityName);
titleUpdateTime.setText(updateTime);
degreeText.setText(degree);
weatherInfoText.setText(weatherInfo);
forecastLayout.removeAllViews();
for (Forecast forecast:weather.forecastList){
View view= LayoutInflater.from(this).inflate(R.layout.forecast_item,forecastLayout,false);
TextView dateText=(TextView) view.findViewById(R.id.date_text);
TextView infoText=(TextView) view.findViewById(R.id.info_text);
TextView maxText=(TextView) view.findViewById(R.id.max_text);
TextView minText=(TextView) view.findViewById(R.id.min_text);
dateText.setText(forecast.date);
infoText.setText(forecast.more.info);
maxText.setText(forecast.temperature.max);
minText.setText(forecast.temperature.min);
forecastLayout.addView(view);
}
if (weather.aqi!=null){
aqiText.setText(weather.aqi.city.aqi);
pm25Text.setText(weather.aqi.city.pm25);
}
String comfort="舒适度:"+weather.suggestion.comfort.info;
String carWash="洗车指数:"+weather.suggestion.carWash.info;
String sport="运动建议:"+weather.suggestion.sport.info;
comfortText.setText(comfort);
carWashText.setText(carWash);
sportText.setText(sport);
weatherLayout.setVisibility(View.VISIBLE);
Intent intent=new Intent(this, AutoUpdateService.class);
startService(intent);
}
/**
* 加载必应每日一图
*/
private void loadBingPic(){
String requestBingPic="http://guolin.tech/api/bing_pic";
HttpUtil.sendOkHttpRequest(requestBingPic, new Callback() {
@Override
public void onFailure(Call call, IOException e) {
e.printStackTrace();
}
@Override
public void onResponse(Call call, Response response) throws IOException {
final String bingPic=response.body().string();
SharedPreferences.Editor editor=PreferenceManager.
getDefaultSharedPreferences(WeatherActivity.this).edit();
editor.putString("bing_pic",bingPic);
editor.apply();
runOnUiThread(new Runnable() {
@Override
public void run() {
Glide.with(WeatherActivity.this).load(bingPic).into(bingPicImg);
}
});
}
});
}
}
|
// Copyright 2015 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.api.ads.admanager.jaxws.utils.v202005;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import com.google.api.ads.admanager.jaxws.v202005.AdUnitTargeting;
import com.google.api.ads.admanager.jaxws.v202005.BooleanValue;
import com.google.api.ads.admanager.jaxws.v202005.ColumnType;
import com.google.api.ads.admanager.jaxws.v202005.Date;
import com.google.api.ads.admanager.jaxws.v202005.DateTime;
import com.google.api.ads.admanager.jaxws.v202005.DateTimeValue;
import com.google.api.ads.admanager.jaxws.v202005.DateValue;
import com.google.api.ads.admanager.jaxws.v202005.InventoryTargeting;
import com.google.api.ads.admanager.jaxws.v202005.NumberValue;
import com.google.api.ads.admanager.jaxws.v202005.ResultSet;
import com.google.api.ads.admanager.jaxws.v202005.Row;
import com.google.api.ads.admanager.jaxws.v202005.SetValue;
import com.google.api.ads.admanager.jaxws.v202005.Targeting;
import com.google.api.ads.admanager.jaxws.v202005.TargetingValue;
import com.google.api.ads.admanager.jaxws.v202005.TextValue;
import com.google.api.ads.admanager.jaxws.v202005.Value;
import com.google.common.collect.Lists;
import java.util.LinkedHashSet;
import java.util.Set;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Test for {@link Pql}. */
@RunWith(JUnit4.class)
public class PqlTest {
private static final String TIME_ZONE_ID1 = "Asia/Shanghai";
private ColumnType column1;
private ColumnType column2;
private ColumnType column3;
private TextValue textValue1;
private TextValue textValue2;
private TextValue textValue3;
private TextValue textValue4;
private BooleanValue booleanValue1;
private BooleanValue booleanValue2;
private BooleanValue booleanValue3;
private NumberValue numberValue1;
private NumberValue numberValue2;
private NumberValue numberValue3;
private DateValue dateValue1;
private DateTimeValue dateTimeValue1;
private Targeting targeting1;
private TargetingValue targetingValue1;
private SetValue numberSetValue;
private SetValue textSetValue;
private SetValue dateSetValue;
private SetValue dateTimeSetValue;
private SetValue mixedSetValue;
private SetValue commaTextSetValue;
private DateTime dateTime1;
private Date date1;
@Rule public ExpectedException thrown = ExpectedException.none();
public PqlTest() {}
@Before
public void setUp() throws Exception {
column1 = new ColumnType();
column1.setLabelName("column1");
column2 = new ColumnType();
column2.setLabelName("column2");
column3 = new ColumnType();
column3.setLabelName("column3");
textValue1 = new TextValue();
textValue1.setValue("value1");
textValue2 = new TextValue();
textValue2.setValue("value2");
textValue3 = new TextValue();
textValue3.setValue("value3");
textValue4 = new TextValue();
textValue4.setValue("comma,separated");
booleanValue1 = new BooleanValue();
booleanValue1.setValue(false);
booleanValue2 = new BooleanValue();
booleanValue2.setValue(true);
booleanValue3 = new BooleanValue();
booleanValue3.setValue(false);
numberValue1 = new NumberValue();
numberValue1.setValue("1");
numberValue2 = new NumberValue();
numberValue2.setValue("1.02");
numberValue3 = new NumberValue();
numberValue3.setValue("-1");
dateTime1 = new DateTime();
date1 = new Date();
date1.setYear(2012);
date1.setMonth(12);
date1.setDay(2);
dateTime1.setDate(date1);
dateTime1.setHour(12);
dateTime1.setMinute(45);
dateTime1.setSecond(0);
dateTime1.setTimeZoneId(TIME_ZONE_ID1);
dateTimeValue1 = new DateTimeValue();
dateTimeValue1.setValue(dateTime1);
dateValue1 = new DateValue();
dateValue1.setValue(date1);
AdUnitTargeting adUnitTargeting = new AdUnitTargeting();
adUnitTargeting.setAdUnitId("100");
InventoryTargeting inventoryTargeting = new InventoryTargeting();
inventoryTargeting.getTargetedAdUnits().add(adUnitTargeting);
targeting1 = new Targeting();
targeting1.setInventoryTargeting(inventoryTargeting);
targetingValue1 = new TargetingValue();
targetingValue1.setValue(targeting1);
numberSetValue = new SetValue();
numberSetValue.getValues().add(numberValue1);
numberSetValue.getValues().add(numberValue3);
textSetValue = new SetValue();
textSetValue.getValues().add(textValue1);
textSetValue.getValues().add(textValue2);
dateSetValue = new SetValue();
dateSetValue.getValues().add(dateValue1);
dateTimeSetValue = new SetValue();
dateTimeSetValue.getValues().add(dateTimeValue1);
mixedSetValue = new SetValue();
mixedSetValue.getValues().add(textValue1);
mixedSetValue.getValues().add(dateTimeValue1);
commaTextSetValue = new SetValue();
commaTextSetValue.getValues().add(textValue1);
commaTextSetValue.getValues().add(textValue4);
}
@Test
public void testToString() {
assertEquals("value1", Pql.toString(textValue1));
assertEquals("false", Pql.toString(booleanValue1));
assertEquals("1", Pql.toString(numberValue1));
assertEquals("2012-12-02T12:45:00+08:00", Pql.toString(dateTimeValue1));
assertEquals("2012-12-02", Pql.toString(dateValue1));
assertEquals("2012-12-02", Pql.toString(dateSetValue));
assertEquals("2012-12-02T12:45:00+08:00", Pql.toString(dateTimeSetValue));
assertEquals("value1,\"comma,separated\"", Pql.toString(commaTextSetValue));
}
@Test
public void testToString_null() {
assertEquals("", Pql.toString(new TextValue()));
assertEquals("", Pql.toString(new BooleanValue()));
assertEquals("", Pql.toString(new NumberValue()));
assertEquals("", Pql.toString(new DateTimeValue()));
assertEquals("", Pql.toString(new DateValue()));
assertEquals("", Pql.toString(new SetValue()));
}
@Test
public void testToString_invalidValue() {
thrown.expect(IllegalArgumentException.class);
Pql.toString(new MyValue());
}
@Test
public void testGetApiValue() {
assertEquals("value1", Pql.getApiValue(textValue1));
assertEquals(false, Pql.getApiValue(booleanValue1));
assertEquals(1L, Pql.getApiValue(numberValue1));
assertEquals(1.02, Pql.getApiValue(numberValue2));
assertEquals(dateTime1, Pql.getApiValue(dateTimeValue1));
assertEquals(date1, Pql.getApiValue(dateValue1));
assertNull(Pql.getApiValue(new TextValue()));
}
@Test
@SuppressWarnings("unchecked")
public void testGetApiValue_numberSet() {
Set<Object> apiValue = (Set<Object>) Pql.getApiValue(numberSetValue);
assertTrue(apiValue.contains(1L));
assertTrue(apiValue.contains(-1L));
assertEquals(2, apiValue.size());
}
@Test
@SuppressWarnings("unchecked")
public void testGetApiValue_textSet() {
Set<Object> apiValue = (Set<Object>) Pql.getApiValue(textSetValue);
assertTrue(apiValue.contains("value1"));
assertTrue(apiValue.contains("value2"));
assertEquals(2, apiValue.size());
}
@Test
@SuppressWarnings("unchecked")
public void testGetApiValue_commaTextSet() {
Set<Object> apiValue = (Set<Object>) Pql.getApiValue(commaTextSetValue);
assertTrue(apiValue.contains("value1"));
assertTrue(apiValue.contains("comma,separated"));
assertEquals(2, apiValue.size());
}
@Test
@SuppressWarnings("unchecked")
public void testGetApiValue_dateSet() {
Set<Object> apiValue = (Set<Object>) Pql.getApiValue(dateSetValue);
assertTrue(apiValue.contains(date1));
assertEquals(1, apiValue.size());
}
@Test
@SuppressWarnings("unchecked")
public void testGetApiValue_dateTimeSet() {
Set<Object> apiValue = (Set<Object>) Pql.getApiValue(dateTimeSetValue);
assertTrue(apiValue.contains(dateTime1));
assertEquals(1, apiValue.size());
}
@Test
public void testGetApiValue_mixedSet() {
thrown.expect(IllegalArgumentException.class);
Pql.getApiValue(mixedSetValue);
}
@Test
public void testGetNativeValue() {
assertEquals("value1", Pql.getNativeValue(textValue1));
assertEquals(false, Pql.getNativeValue(booleanValue1));
assertEquals(1L, Pql.getNativeValue(numberValue1));
assertEquals(1.02, Pql.getNativeValue(numberValue2));
assertEquals(
DateTimes.toDateTime(dateTimeValue1.getValue()), Pql.getNativeValue(dateTimeValue1));
assertEquals("2012-12-02", Pql.getNativeValue(dateValue1));
}
@Test
@SuppressWarnings("unchecked")
public void testGetNativeValue_numberSet() {
Set<Object> nativeValue = (Set<Object>) Pql.getNativeValue(numberSetValue);
assertTrue(nativeValue.contains(1L));
assertTrue(nativeValue.contains(-1L));
assertEquals(2, nativeValue.size());
}
@Test
@SuppressWarnings("unchecked")
public void testGetNativeValue_textSet() {
Set<Object> nativeValue = (Set<Object>) Pql.getNativeValue(textSetValue);
assertTrue(nativeValue.contains("value1"));
assertTrue(nativeValue.contains("value2"));
assertEquals(2, nativeValue.size());
}
@Test
@SuppressWarnings("unchecked")
public void testGetNativeValue_commaTextSet() {
Set<Object> nativeValue = (Set<Object>) Pql.getNativeValue(commaTextSetValue);
assertTrue(nativeValue.contains("value1"));
assertTrue(nativeValue.contains("comma,separated"));
assertEquals(2, nativeValue.size());
}
@Test
@SuppressWarnings("unchecked")
public void testGetNativeValue_dateSet() {
Set<Object> nativeValue = (Set<Object>) Pql.getNativeValue(dateSetValue);
assertTrue(nativeValue.contains("2012-12-02"));
assertEquals(1, nativeValue.size());
}
@Test
@SuppressWarnings("unchecked")
public void testGetNativeValue_dateTimeSet() {
Set<Object> nativeValue = (Set<Object>) Pql.getNativeValue(dateTimeSetValue);
assertTrue(nativeValue.contains(DateTimes.toDateTime(dateTimeValue1.getValue())));
assertEquals(1, nativeValue.size());
}
@Test
public void testGetNativeValue_mixedSet() {
thrown.expect(IllegalArgumentException.class);
Pql.getNativeValue(mixedSetValue);
}
@Test
@SuppressWarnings("unchecked")
public void testGetCsvValue_numberSet() {
assertEquals("1,-1", Pql.getCsvValue(numberSetValue));
}
@Test
@SuppressWarnings("unchecked")
public void testGetCsvValue_textSet() {
assertEquals("value1,value2", Pql.getCsvValue(textSetValue));
}
@Test
@SuppressWarnings("unchecked")
public void testGetCsvValue_dateSet() {
assertEquals("2012-12-02", Pql.getCsvValue(dateSetValue));
}
@Test
@SuppressWarnings("unchecked")
public void testGetCsvValue_dateTimeSet() {
assertEquals(
DateTimes.toStringWithTimeZone(dateTimeValue1.getValue()),
Pql.getCsvValue(dateTimeSetValue));
}
@Test
public void testGetCsvValue_mixedSet() {
thrown.expect(IllegalArgumentException.class);
Pql.getCsvValue(mixedSetValue);
}
@Test
public void testGetCsvValue_commaTextSet() {
assertEquals("value1,\"comma,separated\"", Pql.getCsvValue(commaTextSetValue));
}
@Test
public void testGetCsvValue_targetingValue() {
thrown.expect(IllegalArgumentException.class);
Pql.getCsvValue(targetingValue1);
}
@Test
public void testCreateValue() {
assertEquals("value1", ((TextValue) Pql.createValue("value1")).getValue());
assertEquals(false, ((BooleanValue) Pql.createValue(false)).isValue());
assertEquals("1", ((NumberValue) Pql.createValue(1)).getValue());
assertEquals("1", ((NumberValue) Pql.createValue(1L)).getValue());
assertEquals("1.02", ((NumberValue) Pql.createValue(1.02)).getValue());
assertEquals(
"2012-12-02T12:45:00+08:00",
DateTimes.toStringWithTimeZone(((DateTimeValue) Pql.createValue(dateTime1)).getValue()));
assertEquals(
"2012-12-02",
DateTimes.toString(((DateValue) Pql.createValue(dateTime1.getDate())).getValue()));
}
@Test
public void testCreateValue_numberSet() {
Set<Long> numberSet = new LinkedHashSet<Long>();
numberSet.add(1L);
Value value1 = ((SetValue) Pql.createValue(numberSet)).getValues().get(0);
assertEquals("1", ((NumberValue) value1).getValue());
}
@Test
public void testCreateValue_textSet() {
Set<String> textSet = new LinkedHashSet<String>();
textSet.add("value1");
Value value1 = ((SetValue) Pql.createValue(textSet)).getValues().get(0);
assertEquals("value1", ((TextValue) value1).getValue());
}
@Test
public void testCreateValue_commaTextSet() {
Set<String> textSet = new LinkedHashSet<String>();
textSet.add("value1");
textSet.add("comma \",\" separated");
Value value1 = ((SetValue) Pql.createValue(textSet)).getValues().get(0);
Value value2 = ((SetValue) Pql.createValue(textSet)).getValues().get(1);
assertEquals("value1", ((TextValue) value1).getValue());
assertEquals("comma \",\" separated", ((TextValue) value2).getValue());
}
@Test
public void testCreateValue_dateSet() {
Set<Date> numberSet = new LinkedHashSet<Date>();
numberSet.add(date1);
Value value1 = ((SetValue) Pql.createValue(numberSet)).getValues().get(0);
assertEquals("2012-12-02", DateTimes.toString(((DateValue) value1).getValue()));
}
@Test
public void testCreateValue_dateTimeSet() {
Set<DateTime> dateTimeSet = new LinkedHashSet<DateTime>();
dateTimeSet.add(dateTime1);
Value value1 = ((SetValue) Pql.createValue(dateTimeSet)).getValues().get(0);
assertEquals(
"2012-12-02T12:45:00+08:00",
DateTimes.toStringWithTimeZone(((DateTimeValue) value1).getValue()));
}
@Test
public void testCreateValue_invalidType() {
thrown.expect(IllegalArgumentException.class);
Pql.createValue(new MyObject());
}
@Test
public void testCreateValue_mixedSet() {
Set<Object> mixedTypeSet = new LinkedHashSet<Object>();
mixedTypeSet.add(dateTime1);
mixedTypeSet.add("value1");
thrown.expect(IllegalArgumentException.class);
Pql.createValue(mixedTypeSet);
}
@Test
public void testCreateValue_null() {
assertEquals(null, ((TextValue) Pql.createValue(null)).getValue());
}
@Test
public void testCreateValue_nestedSets() {
Set<Object> set = new LinkedHashSet<Object>();
Set<Object> innerSet = new LinkedHashSet<Object>();
set.add(innerSet);
thrown.expect(IllegalArgumentException.class);
Pql.createValue(set);
}
@Test
public void testGetColumnLabels() {
ResultSet resultSet = new ResultSet();
resultSet.getColumnTypes().addAll(Lists.newArrayList(column1, column2, column3));
assertEquals(
Lists.newArrayList("column1", "column2", "column3"), Pql.getColumnLabels(resultSet));
}
@Test
public void testGetRowStringValues() {
Row row = new Row();
row.getValues()
.addAll(Lists.newArrayList(textValue1, booleanValue1, numberValue2, numberSetValue));
assertEquals(
Lists.newArrayList("value1", "false", "1.02", "1,-1"), Pql.getRowStringValues(row));
}
@Test
public void testCombineResultSet() {
Row row1 = new Row();
row1.getValues().addAll(Lists.newArrayList(textValue1, booleanValue1, numberValue1));
Row row2 = new Row();
row2.getValues().addAll(Lists.newArrayList(textValue2, booleanValue2, numberValue2));
Row row3 = new Row();
row3.getValues().addAll(Lists.newArrayList(textValue3, booleanValue3, numberValue3));
ResultSet resultSet1 = new ResultSet();
resultSet1.getColumnTypes().addAll(Lists.newArrayList(column1, column2, column3));
resultSet1.getRows().addAll(Lists.newArrayList(row1, row2));
ResultSet resultSet2 = new ResultSet();
resultSet2.getColumnTypes().addAll(Lists.newArrayList(column1, column2, column3));
resultSet2.getRows().addAll(Lists.newArrayList(row3));
ResultSet combinedResultSet = Pql.combineResultSets(resultSet1, resultSet2);
assertEquals(3, combinedResultSet.getRows().size());
assertEquals(Lists.newArrayList(column1, column2, column3), combinedResultSet.getColumnTypes());
assertEquals(
Lists.newArrayList(textValue1, booleanValue1, numberValue1),
combinedResultSet.getRows().get(0).getValues());
assertEquals(
Lists.newArrayList(textValue2, booleanValue2, numberValue2),
combinedResultSet.getRows().get(1).getValues());
assertEquals(
Lists.newArrayList(textValue3, booleanValue3, numberValue3),
combinedResultSet.getRows().get(2).getValues());
}
@Test
public void testCombineResultSet_badColumns() {
Row row1 = new Row();
row1.getValues().addAll(Lists.newArrayList(textValue1, booleanValue1, numberValue1));
Row row2 = new Row();
row2.getValues().addAll(Lists.newArrayList(textValue2, booleanValue2, numberValue2));
Row row3 = new Row();
row3.getValues().addAll(Lists.newArrayList(textValue3, booleanValue3));
ResultSet resultSet1 = new ResultSet();
resultSet1.getColumnTypes().addAll(Lists.newArrayList(column1, column2, column3));
resultSet1.getRows().addAll(Lists.newArrayList(row1, row2));
ResultSet resultSet2 = new ResultSet();
resultSet2.getColumnTypes().addAll(Lists.newArrayList(column1, column2));
resultSet2.getRows().addAll(Lists.newArrayList(row3));
thrown.expect(IllegalArgumentException.class);
Pql.combineResultSets(resultSet1, resultSet2);
}
private static class MyValue extends Value {}
private static class MyObject extends Object {}
}
|
/**
* Copyright 2009-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ibatis.type;
import java.sql.*;
/**
* @author Clinton Begin
*/
public class ArrayTypeHandler extends BaseTypeHandler<Object> {
public ArrayTypeHandler() {
super();
}
@Override
public void setNonNullParameter(PreparedStatement ps, int i, Object parameter, JdbcType jdbcType) throws SQLException {
ps.setArray(i, (Array) parameter);
}
@Override
public Object getNullableResult(ResultSet rs, String columnName) throws SQLException {
Array array = rs.getArray(columnName);
return array == null ? null : array.getArray();
}
@Override
public Object getNullableResult(ResultSet rs, int columnIndex) throws SQLException {
Array array = rs.getArray(columnIndex);
return array == null ? null : array.getArray();
}
@Override
public Object getNullableResult(CallableStatement cs, int columnIndex) throws SQLException {
Array array = cs.getArray(columnIndex);
return array == null ? null : array.getArray();
}
}
|
/**
* SkillAPI
* com.sucy.skill.api.util.Combat
*
* The MIT License (MIT)
*
* Copyright (c) 2014 Steven Sucy
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software") to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.sucy.skill.api.util;
import org.bukkit.entity.Player;
import java.util.HashMap;
/**
* Helper method for checking whether or not an entity is in combat
*/
public class Combat
{
private static HashMap<String, Long> timers = new HashMap<String, Long>();
/**
* Updates the combat status of the player
*
* @param player player to mark as starting combat
*/
public static void applyCombat(Player player)
{
timers.put(player.getName(), System.currentTimeMillis());
}
/**
* Clears the combat timer data for the given player
*
* @param player player to clear for
*/
public static void clearData(Player player)
{
timers.remove(player.getName());
}
/**
* Checks whether or not the player is in combat or not
*
* @param player player to check for
* @param seconds seconds before being counted as in combat
*
* @return true if in combat, false otherwise
*/
public static boolean isInCombat(Player player, double seconds)
{
return timers.containsKey(player.getName()) && System.currentTimeMillis() - timers.get(player.getName()) < seconds * 1000;
}
/**
* Checks whether or not the player is out of combat or not
*
* @param player player to check for
* @param seconds seconds before being counted as out of combat
*
* @return true if out of combat, false otherwise
*/
public static boolean isOutOfCombat(Player player, double seconds)
{
return !isInCombat(player, seconds);
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.openjpa.lib.util;
import java.util.Locale;
import java.util.MissingResourceException;
import org.apache.openjpa.lib.util.Localizer.Message;
import org.apache.openjpa.lib.util.testlocalizer.LocalizerTestHelper;
import org.junit.Test;
import org.junit.Before;
import static org.junit.Assert.*;
/**
* Tests the Localizer.
*
* @author Abe White
*/
public class TestLocalizer {
private Localizer _locals = null;
@Before
public void setUp() {
_locals = Localizer.forPackage(LocalizerTestHelper.class);
}
/**
* Test getting a string for a class.
*/
@Test
public void testForClass() {
assertEqualsMSg(Locale.getDefault().equals(Locale.GERMANY)
? "value1_de" : "value1", _locals.get("test.local1"));
}
/**
* Test getting a string for a non-default locale.
*/
@Test
public void testForLocale() {
Localizer locl = Localizer.forPackage(LocalizerTestHelper.class,
Locale.GERMANY);
assertEqualsMSg("value1_de", locl.get("test.local1"));
}
/**
* Tests that if a locale is missing the system falls back to the default.
*/
@Test
public void testFallbackLocale() {
Localizer locl = Localizer.forPackage(LocalizerTestHelper.class,
Locale.FRANCE);
assertEqualsMSg(Locale.getDefault().equals(Locale.GERMANY)
? "value1_de" : "value1", locl.get("test.local1"));
}
/**
* Tests that a null class accesses the localizer.properties file
* for the top-level(no package).
*/
@Test
public void testTopLevel() {
Localizer system = Localizer.forPackage(null);
assertEqualsMSg("systemvalue1", system.get("test.systemlocal"));
}
/**
* Test that the message formatting works correctly.
*/
@Test
public void testMessageFormat() {
String suffix = Locale.getDefault().equals(Locale.GERMANY) ? "_de" : "";
assertEqualsMSg("value2" + suffix + " x sep y", _locals.get("test.local2",
new String[]{ "x", "y" }));
// test that it treates single objects as single-element arrays
assertEqualsMSg("value2" + suffix + " x sep {1}",
_locals.get("test.local2", "x"));
}
/**
* Test that a {@link MissingResourceException} is thrown for missing
* resource bundles.
*/
@Test
public void testMissingBundle() {
Localizer missing = Localizer.forPackage(String.class);
assertEqualsMSg("localized message key: foo.bar", missing.get("foo.bar"));
assertEqualsMSg("localized message key: foo.bar; substitutions: [baz, 1]", missing.get("foo.bar", "baz", 1));
try {
missing.getFatal("foo.bar");
fail("No exception for fatal get on missing bundle.");
} catch (MissingResourceException mre) {
}
}
/**
* Test that a {@link MissingResourceException} is thrown for missing keys.
*/
@Test
public void testMissingKey() {
assertEqualsMSg("localized message key: foo.bar", _locals.get("foo.bar"));
assertEqualsMSg("localized message key: foo.bar; substitutions: [baz, 1]", _locals.get("foo.bar", "baz", 1));
try {
_locals.getFatal("foo.bar");
fail("No exception for fatal get on missing key.");
} catch (MissingResourceException mre) {
}
}
public static void assertEqualsMSg(String s, Message m) {
assertEquals(s, m.getMessage());
}
}
|
/*
* Copyright (c) 1999, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package javax.security.auth.callback;
/**
* <p> Underlying security services instantiate and pass a
* {@code ChoiceCallback} to the {@code handle}
* method of a {@code CallbackHandler} to display a list of choices
* and to retrieve the selected choice(s).
*
* @since 1.4
* @see javax.security.auth.callback.CallbackHandler
*/
public class ChoiceCallback implements Callback, java.io.Serializable {
@java.io.Serial
private static final long serialVersionUID = -3975664071579892167L;
/**
* @serial
* @since 1.4
*/
private final String prompt;
/**
* @serial the list of choices
* @since 1.4
*/
private final String[] choices;
/**
* @serial the choice to be used as the default choice
* @since 1.4
*/
private final int defaultChoice;
/**
* @serial whether multiple selections are allowed from the list of
* choices
* @since 1.4
*/
private final boolean multipleSelectionsAllowed;
/**
* @serial the selected choices, represented as indexes into the
* {@code choices} list.
* @since 1.4
*/
private int[] selections;
/**
* Construct a {@code ChoiceCallback} with a prompt,
* a list of choices, a default choice, and a boolean specifying
* whether or not multiple selections from the list of choices are allowed.
*
*
* @param prompt the prompt used to describe the list of choices.
*
* @param choices the list of choices. The array is cloned to protect
* against subsequent modification.
*
* @param defaultChoice the choice to be used as the default choice
* when the list of choices are displayed. This value
* is represented as an index into the
* {@code choices} array.
*
* @param multipleSelectionsAllowed boolean specifying whether or
* not multiple selections can be made from the
* list of choices.
*
* @exception IllegalArgumentException if {@code prompt} is null,
* if {@code prompt} has a length of 0,
* if {@code choices} is null,
* if {@code choices} has a length of 0,
* if any element from {@code choices} is null,
* if any element from {@code choices}
* has a length of 0 or if {@code defaultChoice}
* does not fall within the array boundaries of
* {@code choices}.
*/
public ChoiceCallback(String prompt, String[] choices,
int defaultChoice, boolean multipleSelectionsAllowed) {
if (prompt == null || prompt.isEmpty() ||
choices == null || choices.length == 0 ||
defaultChoice < 0 || defaultChoice >= choices.length)
throw new IllegalArgumentException();
for (int i = 0; i < choices.length; i++) {
if (choices[i] == null || choices[i].isEmpty())
throw new IllegalArgumentException();
}
this.prompt = prompt;
this.choices = choices.clone();
this.defaultChoice = defaultChoice;
this.multipleSelectionsAllowed = multipleSelectionsAllowed;
}
/**
* Get the prompt.
*
* @return the prompt.
*/
public String getPrompt() {
return prompt;
}
/**
* Get the list of choices.
*
* @return a copy of the list of choices.
*/
public String[] getChoices() {
return choices.clone();
}
/**
* Get the defaultChoice.
*
* @return the defaultChoice, represented as an index into
* the {@code choices} list.
*/
public int getDefaultChoice() {
return defaultChoice;
}
/**
* Get the boolean determining whether multiple selections from
* the {@code choices} list are allowed.
*
* @return whether multiple selections are allowed.
*/
public boolean allowMultipleSelections() {
return multipleSelectionsAllowed;
}
/**
* Set the selected choice.
*
* @param selection the selection represented as an index into the
* {@code choices} list.
*
* @see #getSelectedIndexes
*/
public void setSelectedIndex(int selection) {
this.selections = new int[1];
this.selections[0] = selection;
}
/**
* Set the selected choices.
*
* @param selections the selections represented as indexes into the
* {@code choices} list. The array is cloned to protect
* against subsequent modification.
*
* @exception UnsupportedOperationException if multiple selections are
* not allowed, as determined by
* {@code allowMultipleSelections}.
*
* @see #getSelectedIndexes
*/
public void setSelectedIndexes(int[] selections) {
if (!multipleSelectionsAllowed)
throw new UnsupportedOperationException();
this.selections = selections == null ? null : selections.clone();
}
/**
* Get the selected choices.
*
* @return a copy of the selected choices, represented as indexes into the
* {@code choices} list.
*
* @see #setSelectedIndexes
*/
public int[] getSelectedIndexes() {
return selections == null ? null : selections.clone();
}
}
|
package com.intellij.util.diff;
import com.intellij.openapi.util.registry.Registry;
/**
* @author irengrig
*/
public class FilesTooBigForDiffException extends Exception {
public static final int DELTA_THRESHOLD_SIZE = Registry.intValue("diff.delta.threshold.size");
// Do not try to compare two lines by-word after this much fails.
public static final int MAX_BAD_LINES = 3;
public FilesTooBigForDiffException() {
super("Can not calculate diff. File is too big and there are too many changes.");
}
}
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.operator;
import com.facebook.presto.execution.TaskId;
import com.facebook.presto.metadata.MetadataManager;
import com.facebook.presto.spi.Page;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.testing.MaterializedResult;
import com.google.common.collect.ImmutableList;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.List;
import java.util.concurrent.ExecutorService;
import static com.facebook.presto.SessionTestUtils.TEST_SESSION;
import static com.facebook.presto.operator.OperatorAssertion.assertOperatorEquals;
import static com.facebook.presto.RowPagesBuilder.rowPagesBuilder;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.spi.type.DoubleType.DOUBLE;
import static com.facebook.presto.spi.type.TypeSignature.parseTypeSignature;
import static com.facebook.presto.testing.MaterializedResult.resultBuilder;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static java.lang.Double.NEGATIVE_INFINITY;
import static java.lang.Double.NaN;
import static java.lang.Double.POSITIVE_INFINITY;
import static java.util.concurrent.Executors.newCachedThreadPool;
@Test(singleThreaded = true)
public class TestUnnestOperator
{
private ExecutorService executor;
private DriverContext driverContext;
@BeforeMethod
public void setUp()
{
executor = newCachedThreadPool(daemonThreadsNamed("test"));
driverContext = new TaskContext(new TaskId("query", "stage", "task"), executor, TEST_SESSION)
.addPipelineContext(true, true)
.addDriverContext();
}
@AfterMethod
public void tearDown()
{
executor.shutdownNow();
}
@Test
public void testUnnest()
throws Exception
{
MetadataManager metadata = new MetadataManager();
Type arrayType = metadata.getType(parseTypeSignature("array<bigint>"));
Type mapType = metadata.getType(parseTypeSignature("map<bigint,bigint>"));
List<Page> input = rowPagesBuilder(BIGINT, arrayType, mapType)
.row(1, "[2, 3]", "{\"4\": 5}")
.row(2, "[99]", null)
.row(3, null, null)
.pageBreak()
.row(6, "[7, 8]", "{\"9\": 10, \"11\": 12}")
.build();
OperatorFactory operatorFactory = new UnnestOperator.UnnestOperatorFactory(0, ImmutableList.of(0), ImmutableList.<Type>of(BIGINT), ImmutableList.of(1, 2), ImmutableList.of(arrayType, mapType));
Operator operator = operatorFactory.createOperator(driverContext);
MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT, BIGINT, BIGINT, BIGINT)
.row(1, 2, 4, 5)
.row(1, 3, null, null)
.row(2, 99, null, null)
.row(6, 7, 9, 10)
.row(6, 8, 11, 12)
.build();
assertOperatorEquals(operator, input, expected);
}
@Test
public void testUnnestNonNumericDoubles()
throws Exception
{
MetadataManager metadata = new MetadataManager();
Type arrayType = metadata.getType(parseTypeSignature("array<double>"));
Type mapType = metadata.getType(parseTypeSignature("map<bigint,double>"));
List<Page> input = rowPagesBuilder(BIGINT, arrayType, mapType)
.row(1, "[\"-Infinity\", \"Infinity\", \"NaN\"]", "{\"1\": \"-Infinity\", \"2\": \"Infinity\", \"3\": \"NaN\"}")
.build();
OperatorFactory operatorFactory = new UnnestOperator.UnnestOperatorFactory(0, ImmutableList.of(0), ImmutableList.<Type>of(BIGINT), ImmutableList.of(1, 2), ImmutableList.of(arrayType, mapType));
Operator operator = operatorFactory.createOperator(driverContext);
MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT, DOUBLE, BIGINT, DOUBLE)
.row(1, NEGATIVE_INFINITY, 1, NEGATIVE_INFINITY)
.row(1, POSITIVE_INFINITY, 2, POSITIVE_INFINITY)
.row(1, NaN, 3, NaN)
.build();
assertOperatorEquals(operator, input, expected);
}
}
|
// Copyright (c) 2016 Petter Wintzell
package com.albroco.barebonesdigest;
import org.junit.Test;
import java.util.Collections;
import java.util.EnumSet;
import static com.albroco.barebonesdigest.DigestChallenge.QualityOfProtection.AUTH;
import static com.albroco.barebonesdigest.DigestChallenge.QualityOfProtection.AUTH_INT;
import static com.albroco.barebonesdigest.DigestChallenge.QualityOfProtection
.UNSPECIFIED_RFC2069_COMPATIBLE;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertFalse;
import static junit.framework.Assert.assertNotNull;
import static junit.framework.Assert.assertNull;
import static junit.framework.Assert.assertTrue;
public class DigestChallengeTest {
@Test
public void testIsDigestChallengeEmptyString() {
assertFalse(DigestChallenge.isDigestChallenge(""));
}
@Test
public void testIsDigestChallengeCorrectPrefixNoSpace() {
assertFalse(DigestChallenge.isDigestChallenge("Digest"));
}
@Test
public void testIsDigestChallengeCorrectPrefix() {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\"";
assertTrue(DigestChallenge.isDigestChallenge(CHALLENGE));
}
@Test
public void testIsDigestChallengeLowerCase() {
String CHALLENGE = "digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\"";
assertTrue(DigestChallenge.isDigestChallenge(CHALLENGE));
}
@Test
public void testIsDigestChallengeUpperCase() {
String CHALLENGE = "DIGEST " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\"";
assertTrue(DigestChallenge.isDigestChallenge(CHALLENGE));
}
@Test
public void testMinimalChallenge() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\"";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
}
@Test
public void testRealm() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\"";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
assertEquals("\"testrealm@host.com\"", header.getQuotedRealm());
assertEquals("testrealm@host.com", header.getRealm());
}
@Test
public void testNonce() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\"";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
assertEquals("\"dcd98b7102dd2f0e8b11d0f600bfb0c093\"", header.getQuotedNonce());
assertEquals("dcd98b7102dd2f0e8b11d0f600bfb0c093", header.getNonce());
}
@Test
public void testOpaque() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"qop=\"auth,auth-int\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\", " +
"opaque=\"5ccc069c403ebaf9f0171e9517f40e41\"";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
assertEquals("\"5ccc069c403ebaf9f0171e9517f40e41\"", header.getQuotedOpaque());
assertEquals("5ccc069c403ebaf9f0171e9517f40e41", header.getOpaque());
}
@Test
public void testGetOpaqueOpaqueMissing() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"qop=\"auth,auth-int\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\"";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNull(header.getOpaque());
}
@Test
public void testGetQuotedOpaqueOpaqueMissing() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"qop=\"auth,auth-int\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\"";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNull(header.getQuotedOpaque());
}
@Test
public void testMd5Algorithm() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\", " +
"algorithm=MD5";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
assertEquals("MD5", header.getAlgorithm());
}
@Test
public void testMd5SessAlgorithm() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\", " +
"qop=auth, " +
"algorithm=MD5-sess";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
assertEquals("MD5-sess", header.getAlgorithm());
}
@Test(expected = ChallengeParseException.class)
public void testMd5SessAlgorithmMissingQop() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\", " +
"algorithm=MD5-sess";
DigestChallenge.parse(CHALLENGE);
}
@Test
public void testQuotedAlgorithm() throws Exception {
// This is not a valid challenge but parsing is intentionally lenient
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\", " +
"algorithm=\"MD5\"";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
assertEquals("MD5", header.getAlgorithm());
}
@Test
public void testMissingAlgorithm() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\"";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
assertEquals(null, header.getAlgorithm());
}
@Test
public void testUnknownAlgorithm() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\", " +
"algorithm=XYZ";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
assertEquals("XYZ", header.getAlgorithm());
}
@Test
public void testDomain() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\", " +
"domain=\"http://domain.testrealm.com\"";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
assertEquals("\"http://domain.testrealm.com\"", header.getQuotedDomain());
assertEquals("http://domain.testrealm.com", header.getDomain());
}
@Test
public void testGetDomainDomainMissing() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\"";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNull(header.getDomain());
}
@Test
public void testGetQuotedDomainDomainMissing() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\"";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNull(header.getQuotedDomain());
}
@Test
public void testStaleSetToTrue() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\", " +
"stale=true";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
assertTrue(header.isStale());
}
@Test
public void testStaleSetToFalse() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\", " +
"stale=false";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
assertFalse(header.isStale());
}
@Test
public void testStaleCaseInsensitive() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\", " +
"stale=TRUE";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
assertTrue(header.isStale());
}
@Test
public void testStaleDirectiveMissing() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\"";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
assertFalse(header.isStale());
}
@Test
public void testQuotedStale() throws Exception {
// This is not a valid challenge but parsing is intentionally lenient
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\", " +
"stale=\"true\"";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
assertTrue(header.isStale());
}
@Test
public void testStaleUnrecognizedValue() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\", " +
"stale=no";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
assertFalse(header.isStale());
}
@Test
public void testQopSetToAuth() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\", " +
"qop=\"auth\"";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
assertEquals(EnumSet.of(AUTH), header.getSupportedQopTypes());
}
@Test
public void testQopSetToAuthInt() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\", " +
"qop=\"auth-int\"";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
assertEquals(EnumSet.of(AUTH_INT), header.getSupportedQopTypes());
}
@Test
public void testQopSetToAuthAndAuthInt() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\", " +
"qop=\"auth,auth-int\"";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
assertEquals(EnumSet.of(AUTH, AUTH_INT), header.getSupportedQopTypes());
}
@Test
public void testQopMissing() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\"";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
assertEquals(EnumSet.of(UNSPECIFIED_RFC2069_COMPATIBLE), header.getSupportedQopTypes());
}
@Test
public void testQopNotQuoted() throws Exception {
// This is not a valid challenge, but some server implementations fail to quote the qop so
// parsing is intentionally lenient
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\", " +
"qop=auth";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
assertEquals(EnumSet.of(AUTH), header.getSupportedQopTypes());
}
@Test
public void testQopUnknownQop() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\", " +
"qop=\"future_extension\"";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
assertEquals(Collections.emptySet(), header.getSupportedQopTypes());
}
@Test
public void testQopMalformedDirective() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\", " +
"qop=\",, , auth\"";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
assertEquals(EnumSet.of(AUTH), header.getSupportedQopTypes());
}
@Test
public void testUnrecognizedDirectiveWithTokenValue() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\", " +
"unrecognized=token";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
}
@Test
public void testUnrecognizedDirectiveWithQuotedValue() throws Exception {
String CHALLENGE = "Digest " +
"realm=\"testrealm@host.com\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\", " +
"unrecognized=\"quoted\"";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
}
@Test(expected = ChallengeParseException.class)
public void testMalformedChallengeMissingRealm() throws Exception {
// The example below is from Section 3.5 of RC 2617,
// https://tools.ietf.org/html/rfc2617#section-3.5
String CHALLENGE = "Digest " + "nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\"";
assertNull(DigestChallenge.parse(CHALLENGE));
}
@Test(expected = ChallengeParseException.class)
public void testMalformedChallengeMissingNonce() throws Exception {
// The example below is from Section 3.5 of RC 2617,
// https://tools.ietf.org/html/rfc2617#section-3.5
String CHALLENGE = "Digest " + "realm=\"testrealm@host.com\"";
assertNull(DigestChallenge.parse(CHALLENGE));
}
@Test(expected = ChallengeParseException.class)
public void testMalformedChallengeWrongType() throws Exception {
// The example below is from Section 3.5 of RC 2617,
// https://tools.ietf.org/html/rfc2617#section-3.5
String CHALLENGE = "Basic realm=\"WallyWorld\"";
assertNull(DigestChallenge.parse(CHALLENGE));
}
@Test(expected = ChallengeParseException.class)
public void testMalformedChallengeWrongSyntax() throws Exception {
// The example below is from Section 3.5 of RC 2617,
// https://tools.ietf.org/html/rfc2617#section-3.5
String CHALLENGE = "digest nonce,realm=\"WallyWorld\"";
assertNull(DigestChallenge.parse(CHALLENGE));
}
@Test
public void testThatDigestLiteralIsCaseInsensitive() throws Exception {
String CHALLENGE = "DIGEST " +
"realm=\"\", " +
"qop=\"auth\", " +
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\"";
DigestChallenge header = DigestChallenge.parse(CHALLENGE);
assertNotNull(header);
}
}
|
begin_unit|revision:0.9.5;language:Java;cregit-version:0.0.1
begin_comment
comment|/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */
end_comment
begin_package
package|package
name|org
operator|.
name|apache
operator|.
name|cxf
operator|.
name|common
operator|.
name|logging
package|;
end_package
begin_import
import|import
name|java
operator|.
name|util
operator|.
name|ArrayList
import|;
end_import
begin_import
import|import
name|java
operator|.
name|util
operator|.
name|List
import|;
end_import
begin_import
import|import
name|java
operator|.
name|util
operator|.
name|regex
operator|.
name|Matcher
import|;
end_import
begin_import
import|import
name|java
operator|.
name|util
operator|.
name|regex
operator|.
name|Pattern
import|;
end_import
begin_class
specifier|public
class|class
name|RegexLoggingFilter
block|{
specifier|public
specifier|static
specifier|final
name|String
name|DEFAULT_REPLACEMENT
init|=
literal|"*****"
decl_stmt|;
specifier|private
specifier|static
class|class
name|ReplaceRegEx
block|{
specifier|private
name|Pattern
name|pattern
decl_stmt|;
specifier|private
name|int
name|group
init|=
literal|1
decl_stmt|;
specifier|private
name|String
name|replacement
decl_stmt|;
name|ReplaceRegEx
parameter_list|(
name|String
name|pattern
parameter_list|,
name|int
name|group
parameter_list|,
name|String
name|replacement
parameter_list|)
block|{
name|this
operator|.
name|pattern
operator|=
name|Pattern
operator|.
name|compile
argument_list|(
name|pattern
argument_list|)
expr_stmt|;
name|this
operator|.
name|group
operator|=
name|group
expr_stmt|;
name|this
operator|.
name|replacement
operator|=
name|replacement
expr_stmt|;
block|}
specifier|public
name|CharSequence
name|filter
parameter_list|(
name|CharSequence
name|command
parameter_list|)
block|{
name|Matcher
name|m
init|=
name|pattern
operator|.
name|matcher
argument_list|(
name|command
argument_list|)
decl_stmt|;
name|int
name|offset
init|=
literal|0
decl_stmt|;
while|while
condition|(
name|m
operator|.
name|find
argument_list|()
condition|)
block|{
name|int
name|origLen
init|=
name|command
operator|.
name|length
argument_list|()
decl_stmt|;
name|command
operator|=
operator|new
name|StringBuilder
argument_list|(
name|command
argument_list|)
operator|.
name|replace
argument_list|(
name|m
operator|.
name|start
argument_list|(
name|group
argument_list|)
operator|+
name|offset
argument_list|,
name|m
operator|.
name|end
argument_list|(
name|group
argument_list|)
operator|+
name|offset
argument_list|,
name|replacement
argument_list|)
operator|.
name|toString
argument_list|()
expr_stmt|;
name|offset
operator|+=
name|command
operator|.
name|length
argument_list|()
operator|-
name|origLen
expr_stmt|;
block|}
return|return
name|command
return|;
block|}
block|}
specifier|private
name|String
name|regPattern
decl_stmt|;
specifier|private
name|int
name|regGroup
init|=
literal|1
decl_stmt|;
specifier|private
name|String
name|regReplacement
init|=
name|DEFAULT_REPLACEMENT
decl_stmt|;
specifier|private
name|List
argument_list|<
name|ReplaceRegEx
argument_list|>
name|regexs
init|=
operator|new
name|ArrayList
argument_list|<>
argument_list|()
decl_stmt|;
specifier|public
name|CharSequence
name|filter
parameter_list|(
name|CharSequence
name|command
parameter_list|)
block|{
if|if
condition|(
name|regPattern
operator|!=
literal|null
condition|)
block|{
name|command
operator|=
operator|new
name|ReplaceRegEx
argument_list|(
name|regPattern
argument_list|,
name|regGroup
argument_list|,
name|regReplacement
argument_list|)
operator|.
name|filter
argument_list|(
name|command
argument_list|)
expr_stmt|;
block|}
for|for
control|(
name|ReplaceRegEx
name|regex
range|:
name|regexs
control|)
block|{
name|command
operator|=
name|regex
operator|.
name|filter
argument_list|(
name|command
argument_list|)
expr_stmt|;
block|}
return|return
name|command
return|;
block|}
specifier|public
name|void
name|addRegEx
parameter_list|(
name|String
name|pattern
parameter_list|)
block|{
name|addRegEx
argument_list|(
name|pattern
argument_list|,
literal|1
argument_list|)
expr_stmt|;
block|}
specifier|public
name|void
name|addRegEx
parameter_list|(
name|String
name|pattern
parameter_list|,
name|int
name|group
parameter_list|)
block|{
name|addRegEx
argument_list|(
name|pattern
argument_list|,
name|group
argument_list|,
name|DEFAULT_REPLACEMENT
argument_list|)
expr_stmt|;
block|}
specifier|public
name|void
name|addRegEx
parameter_list|(
name|String
name|pattern
parameter_list|,
name|int
name|group
parameter_list|,
name|String
name|replacement
parameter_list|)
block|{
name|regexs
operator|.
name|add
argument_list|(
operator|new
name|ReplaceRegEx
argument_list|(
name|pattern
argument_list|,
name|group
argument_list|,
name|replacement
argument_list|)
argument_list|)
expr_stmt|;
block|}
specifier|public
name|void
name|addCommandOption
parameter_list|(
name|String
name|option
parameter_list|,
name|String
modifier|...
name|commands
parameter_list|)
block|{
name|StringBuilder
name|pattern
init|=
operator|new
name|StringBuilder
argument_list|(
literal|"("
argument_list|)
decl_stmt|;
for|for
control|(
name|String
name|command
range|:
name|commands
control|)
block|{
if|if
condition|(
name|pattern
operator|.
name|length
argument_list|()
operator|>
literal|1
condition|)
block|{
name|pattern
operator|.
name|append
argument_list|(
literal|'|'
argument_list|)
expr_stmt|;
block|}
name|pattern
operator|.
name|append
argument_list|(
name|Pattern
operator|.
name|quote
argument_list|(
name|command
argument_list|)
argument_list|)
expr_stmt|;
block|}
name|pattern
operator|.
name|append
argument_list|(
literal|") +.*?"
argument_list|)
operator|.
name|append
argument_list|(
name|Pattern
operator|.
name|quote
argument_list|(
name|option
argument_list|)
argument_list|)
operator|.
name|append
argument_list|(
literal|" +([^ ]+)"
argument_list|)
expr_stmt|;
name|regexs
operator|.
name|add
argument_list|(
operator|new
name|ReplaceRegEx
argument_list|(
name|pattern
operator|.
name|toString
argument_list|()
argument_list|,
literal|2
argument_list|,
name|DEFAULT_REPLACEMENT
argument_list|)
argument_list|)
expr_stmt|;
block|}
specifier|public
name|String
name|getPattern
parameter_list|()
block|{
return|return
name|regPattern
return|;
block|}
specifier|public
name|void
name|setPattern
parameter_list|(
name|String
name|pattern
parameter_list|)
block|{
name|this
operator|.
name|regPattern
operator|=
name|pattern
expr_stmt|;
block|}
specifier|public
name|String
name|getReplacement
parameter_list|()
block|{
return|return
name|regReplacement
return|;
block|}
specifier|public
name|void
name|setReplacement
parameter_list|(
name|String
name|replacement
parameter_list|)
block|{
name|this
operator|.
name|regReplacement
operator|=
name|replacement
expr_stmt|;
block|}
specifier|public
name|int
name|getGroup
parameter_list|()
block|{
return|return
name|regGroup
return|;
block|}
specifier|public
name|void
name|setGroup
parameter_list|(
name|int
name|group
parameter_list|)
block|{
name|this
operator|.
name|regGroup
operator|=
name|group
expr_stmt|;
block|}
block|}
end_class
end_unit
|
package edu.stanford.nlp.classify;
import java.io.PrintWriter;
import java.io.Serializable;
import java.util.*;
import edu.stanford.nlp.ling.BasicDatum;
import edu.stanford.nlp.ling.Datum;
import edu.stanford.nlp.ling.RVFDatum;
import edu.stanford.nlp.stats.ClassicCounter;
import edu.stanford.nlp.stats.Counter;
import edu.stanford.nlp.util.Generics;
import edu.stanford.nlp.util.HashIndex;
import edu.stanford.nlp.util.Index;
import edu.stanford.nlp.util.Pair;
/**
* The purpose of this interface is to unify {@link Dataset} and {@link RVFDataset}.
* <p>
* Note: Despite these being value classes, at present there are no equals() and hashCode() methods
* defined so you just get the default ones from Object, so different objects aren't equal.
* </p>
*
* @author Kristina Toutanova (kristina@cs.stanford.edu)
* @author Anna Rafferty (various refactoring with subclasses)
* @author Sarah Spikes (sdspikes@cs.stanford.edu) (Templatization)
* @author Ramesh Nallapati (nmramesh@cs.stanford.edu)
* (added an abstract method getDatum, July 17th, 2008)
*
* @param <L> The type of the labels in the Dataset
* @param <F> The type of the features in the Dataset
*/
public abstract class GeneralDataset<L, F> implements Serializable, Iterable<RVFDatum<L, F>> {
private static final long serialVersionUID = 19157757130054829L;
public Index<L> labelIndex;
public Index<F> featureIndex;
protected int[] labels;
protected int[][] data;
protected int size;
public GeneralDataset() { }
public Index<L> labelIndex() { return labelIndex; }
public Index<F> featureIndex() { return featureIndex; }
public int numFeatures() { return featureIndex.size(); }
public int numClasses() { return labelIndex.size(); }
public int[] getLabelsArray() {
labels = trimToSize(labels);
return labels;
}
public int[][] getDataArray() {
data = trimToSize(data);
return data;
}
public abstract double[][] getValuesArray();
/**
* Resets the Dataset so that it is empty and ready to collect data.
*/
public void clear() {
clear(10);
}
/**
* Resets the Dataset so that it is empty and ready to collect data.
* @param numDatums initial capacity of dataset
*/
public void clear(int numDatums) {
initialize(numDatums);
}
/**
* This method takes care of resetting values of the dataset
* such that it is empty with an initial capacity of numDatums.
* Should be accessed only by appropriate methods within the class,
* such as clear(), which take care of other parts of the emptying of data.
*
* @param numDatums initial capacity of dataset
*/
protected abstract void initialize(int numDatums);
public abstract RVFDatum<L, F> getRVFDatum(int index);
public abstract Datum<L,F> getDatum(int index);
public abstract void add(Datum<L, F> d);
/**
* Get the total count (over all data instances) of each feature
*
* @return an array containing the counts (indexed by index)
*/
public float[] getFeatureCounts() {
float[] counts = new float[featureIndex.size()];
for (int i = 0, m = size; i < m; i++) {
for (int j = 0, n = data[i].length; j < n; j++) {
counts[data[i][j]] += 1.0;
}
}
return counts;
}
/**
* Applies a feature count threshold to the Dataset. All features that
* occur fewer than <i>k</i> times are expunged.
*/
public void applyFeatureCountThreshold(int k) {
float[] counts = getFeatureCounts();
Index<F> newFeatureIndex = new HashIndex<F>();
int[] featMap = new int[featureIndex.size()];
for (int i = 0; i < featMap.length; i++) {
F feat = featureIndex.get(i);
if (counts[i] >= k) {
int newIndex = newFeatureIndex.size();
newFeatureIndex.add(feat);
featMap[i] = newIndex;
} else {
featMap[i] = -1;
}
// featureIndex.remove(feat);
}
featureIndex = newFeatureIndex;
// counts = null; // This is unnecessary; JVM can clean it up
for (int i = 0; i < size; i++) {
List<Integer> featList = new ArrayList<Integer>(data[i].length);
for (int j = 0; j < data[i].length; j++) {
if (featMap[data[i][j]] >= 0) {
featList.add(featMap[data[i][j]]);
}
}
data[i] = new int[featList.size()];
for (int j = 0; j < data[i].length; j++) {
data[i][j] = featList.get(j);
}
}
}
/**
* Applies a max feature count threshold to the Dataset. All features that
* occur greater than <i>k</i> times are expunged.
*/
public void applyFeatureMaxCountThreshold(int k) {
float[] counts = getFeatureCounts();
HashIndex<F> newFeatureIndex = new HashIndex<F>();
int[] featMap = new int[featureIndex.size()];
for (int i = 0; i < featMap.length; i++) {
F feat = featureIndex.get(i);
if (counts[i] <= k) {
int newIndex = newFeatureIndex.size();
newFeatureIndex.add(feat);
featMap[i] = newIndex;
} else {
featMap[i] = -1;
}
// featureIndex.remove(feat);
}
featureIndex = newFeatureIndex;
// counts = null; // This is unnecessary; JVM can clean it up
for (int i = 0; i < size; i++) {
List<Integer> featList = new ArrayList<Integer>(data[i].length);
for (int j = 0; j < data[i].length; j++) {
if (featMap[data[i][j]] >= 0) {
featList.add(featMap[data[i][j]]);
}
}
data[i] = new int[featList.size()];
for (int j = 0; j < data[i].length; j++) {
data[i][j] = featList.get(j);
}
}
}
/**
* returns the number of feature tokens in the Dataset.
*/
public int numFeatureTokens() {
int x = 0;
for (int i = 0, m = size; i < m; i++) {
x += data[i].length;
}
return x;
}
/**
* returns the number of distinct feature types in the Dataset.
*/
public int numFeatureTypes() {
return featureIndex.size();
}
/**
* Adds all Datums in the given collection of data to this dataset
* @param data collection of datums you would like to add to the dataset
*/
public void addAll(Iterable<? extends Datum<L,F>> data) {
for (Datum<L, F> d : data) {
add(d);
}
}
/** Divide out a (devtest) split of the dataset versus the rest of it (as a training set).
*
* @param start Begin devtest with this index (inclusive)
* @param end End devtest before this index (exclusive)
* @return A Pair of data sets, the first being the remainder of size this.size() - (end-start)
* and the second being of size (end-start)
*/
public abstract Pair<GeneralDataset<L, F>, GeneralDataset<L, F>> split (int start, int end);
/** Divide out a (devtest) split from the start of the dataset and the rest of it (as a training set).
*
* @param fractionSplit The first fractionSplit of datums (rounded down) will be the second split
* @return A Pair of data sets, the first being the remainder of size ceiling(this.size() * (1-p)) drawn
* from the end of the dataset and the second of size floor(this.size() * p) drawn from the
* start of the dataset.
*/
public abstract Pair<GeneralDataset<L, F>, GeneralDataset<L, F>> split (double fractionSplit);
/** Divide out a (devtest) split of the dataset versus the rest of it (as a training set).
*
* @param fold The number of this fold (must be between 0 and (numFolds - 1)
* @param numFolds The number of folds to divide the data into (must be greater than or equal to the
* size of the data set)
* @return A Pair of data sets, the first being roughly (numFolds-1)/numFolds of the data items
* (for use as training data_, and the second being 1/numFolds of the data, taken from the
* fold<sup>th</sup> part of the data (for use as devTest data)
*/
public Pair<GeneralDataset<L, F>, GeneralDataset<L, F>> splitOutFold(int fold, int numFolds) {
if (numFolds < 2 || numFolds > size() || fold < 0 || fold >= numFolds) {
throw new IllegalArgumentException("Illegal request for fold " + fold + " of " + numFolds +
" on data set of size " + size());
}
int normalFoldSize = size()/numFolds;
int start = normalFoldSize * fold;
int end = start + normalFoldSize;
if (fold == (numFolds - 1)) {
end = size();
}
return split(start, end);
}
/**
* Returns the number of examples ({@link Datum}s) in the Dataset.
*/
public int size() { return size; }
protected void trimData() {
data = trimToSize(data);
}
protected void trimLabels() {
labels = trimToSize(labels);
}
protected int[] trimToSize(int[] i) {
int[] newI = new int[size];
System.arraycopy(i, 0, newI, 0, size);
return newI;
}
protected int[][] trimToSize(int[][] i) {
int[][] newI = new int[size][];
System.arraycopy(i, 0, newI, 0, size);
return newI;
}
protected double[][] trimToSize(double[][] i) {
double[][] newI = new double[size][];
System.arraycopy(i, 0, newI, 0, size);
return newI;
}
/**
* Randomizes the data array in place.
* Note: this cannot change the values array or the datum weights,
* so redefine this for RVFDataset and WeightedDataset!
* This uses the Fisher-Yates (or Durstenfeld-Knuth) shuffle, which is unbiased.
* The same algorithm is used by shuffle() in j.u.Collections, and so you should get compatible
* results if using it on a Collection with the same seed (as of JDK1.7, at least).
*
* @param randomSeed A seed for the Random object (allows you to reproduce the same ordering)
*/
// todo: Probably should be renamed 'shuffle' to be consistent with Java Collections API
public void randomize(long randomSeed) {
Random rand = new Random(randomSeed);
for (int j = size - 1; j > 0; j--) {
// swap each item with some lower numbered item
int randIndex = rand.nextInt(j);
int[] tmp = data[randIndex];
data[randIndex] = data[j];
data[j] = tmp;
int tmpl = labels[randIndex];
labels[randIndex] = labels[j];
labels[j] = tmpl;
}
}
/**
* Randomizes the data array in place.
* Note: this cannot change the values array or the datum weights,
* so redefine this for RVFDataset and WeightedDataset!
* This uses the Fisher-Yates (or Durstenfeld-Knuth) shuffle, which is unbiased.
* The same algorithm is used by shuffle() in j.u.Collections, and so you should get compatible
* results if using it on a Collection with the same seed (as of JDK1.7, at least).
*
* @param randomSeed A seed for the Random object (allows you to reproduce the same ordering)
*/
public <E> void shuffleWithSideInformation(long randomSeed, List<E> sideInformation) {
if (size != sideInformation.size()) {
throw new IllegalArgumentException("shuffleWithSideInformation: sideInformation not of same size as Dataset");
}
Random rand = new Random(randomSeed);
for (int j = size - 1; j > 0; j--) {
// swap each item with some lower numbered item
int randIndex = rand.nextInt(j);
int[] tmp = data[randIndex];
data[randIndex] = data[j];
data[j] = tmp;
int tmpl = labels[randIndex];
labels[randIndex] = labels[j];
labels[j] = tmpl;
E tmpE = sideInformation.get(randIndex);
sideInformation.set(randIndex, sideInformation.get(j));
sideInformation.set(j, tmpE);
}
}
public GeneralDataset<L,F> sampleDataset(long randomSeed, double sampleFrac, boolean sampleWithReplacement) {
int sampleSize = (int)(this.size()*sampleFrac);
Random rand = new Random(randomSeed);
GeneralDataset<L,F> subset;
if (this instanceof RVFDataset) {
subset = new RVFDataset<L,F>();
} else if (this instanceof Dataset) {
subset = new Dataset<L,F>();
}
else {
throw new RuntimeException("Can't handle this type of GeneralDataset.");
}
if (sampleWithReplacement) {
for(int i = 0; i < sampleSize; i++){
int datumNum = rand.nextInt(this.size());
subset.add(this.getDatum(datumNum));
}
} else {
Set<Integer> indicedSampled = Generics.newHashSet();
while (subset.size() < sampleSize) {
int datumNum = rand.nextInt(this.size());
if (!indicedSampled.contains(datumNum)) {
subset.add(this.getDatum(datumNum));
indicedSampled.add(datumNum);
}
}
}
return subset;
}
/**
* Print some statistics summarizing the dataset
*
*/
public abstract void summaryStatistics();
/**
* Returns an iterator over the class labels of the Dataset
*
* @return An iterator over the class labels of the Dataset
*/
public Iterator<L> labelIterator() {
return labelIndex.iterator();
}
/**
*
* @param dataset
* @return a new GeneralDataset whose features and ids map exactly to those of this GeneralDataset.
* Useful when two Datasets are created independently and one wants to train a model on one dataset and test on the other. -Ramesh.
*/
public GeneralDataset<L,F> mapDataset(GeneralDataset<L,F> dataset){
GeneralDataset<L,F> newDataset;
if(dataset instanceof RVFDataset)
newDataset = new RVFDataset<L,F>(this.featureIndex,this.labelIndex);
else newDataset = new Dataset<L,F>(this.featureIndex,this.labelIndex);
this.featureIndex.lock();
this.labelIndex.lock();
//System.out.println("inside mapDataset: dataset size:"+dataset.size());
for(int i = 0; i < dataset.size(); i++)
//System.out.println("inside mapDataset: adding datum number"+i);
newDataset.add(dataset.getDatum(i));
//System.out.println("old Dataset stats: numData:"+dataset.size()+" numfeatures:"+dataset.featureIndex().size()+" numlabels:"+dataset.labelIndex.size());
//System.out.println("new Dataset stats: numData:"+newDataset.size()+" numfeatures:"+newDataset.featureIndex().size()+" numlabels:"+newDataset.labelIndex.size());
//System.out.println("this dataset stats: numData:"+size()+" numfeatures:"+featureIndex().size()+" numlabels:"+labelIndex.size());
this.featureIndex.unlock();
this.labelIndex.unlock();
return newDataset;
}
public static <L,L2,F> Datum<L2,F> mapDatum(Datum<L,F> d, Map<L,L2> labelMapping, L2 defaultLabel) {
// TODO: How to copy datum?
L2 newLabel = labelMapping.get(d.label());
if (newLabel == null) {
newLabel = defaultLabel;
}
if (d instanceof RVFDatum) {
return new RVFDatum<L2,F>( ((RVFDatum<L,F>) d).asFeaturesCounter(), newLabel );
} else {
return new BasicDatum<L2,F>( d.asFeatures(), newLabel );
}
}
/**
*
* @param dataset
* @return a new GeneralDataset whose features and ids map exactly to those of this GeneralDataset. But labels are converted to be another set of labels
*/
public <L2> GeneralDataset<L2,F> mapDataset(GeneralDataset<L,F> dataset, Index<L2> newLabelIndex, Map<L,L2> labelMapping, L2 defaultLabel)
{
GeneralDataset<L2,F> newDataset;
if(dataset instanceof RVFDataset)
newDataset = new RVFDataset<L2,F>(this.featureIndex, newLabelIndex);
else newDataset = new Dataset<L2,F>(this.featureIndex, newLabelIndex);
this.featureIndex.lock();
this.labelIndex.lock();
//System.out.println("inside mapDataset: dataset size:"+dataset.size());
for(int i = 0; i < dataset.size(); i++) {
//System.out.println("inside mapDataset: adding datum number"+i);
Datum<L,F> d = dataset.getDatum(i);
Datum<L2,F> d2 = mapDatum(d, labelMapping, defaultLabel);
newDataset.add(d2);
}
//System.out.println("old Dataset stats: numData:"+dataset.size()+" numfeatures:"+dataset.featureIndex().size()+" numlabels:"+dataset.labelIndex.size());
//System.out.println("new Dataset stats: numData:"+newDataset.size()+" numfeatures:"+newDataset.featureIndex().size()+" numlabels:"+newDataset.labelIndex.size());
//System.out.println("this dataset stats: numData:"+size()+" numfeatures:"+featureIndex().size()+" numlabels:"+labelIndex.size());
this.featureIndex.unlock();
this.labelIndex.unlock();
return newDataset;
}
/**
* Dumps the Dataset as a training/test file for SVMLight. <br>
* class [fno:val]+
* The features must occur in consecutive order.
*/
public void printSVMLightFormat() {
printSVMLightFormat(new PrintWriter(System.out));
}
/**
* Maps our labels to labels that are compatible with svm_light
* @return array of strings
*/
public String[] makeSvmLabelMap() {
String[] labelMap = new String[numClasses()];
if (numClasses() > 2) {
for (int i = 0; i < labelMap.length; i++) {
labelMap[i] = String.valueOf((i + 1));
}
} else {
labelMap = new String[]{"+1", "-1"};
}
return labelMap;
}
// todo: Fix javadoc, have unit tested
/**
* Print SVM Light Format file.
*
* The following comments are no longer applicable because I am
* now printing out the exact labelID for each example. -Ramesh (nmramesh@cs.stanford.edu) 12/17/2009.
*
* If the Dataset has more than 2 classes, then it
* prints using the label index (+1) (for svm_struct). If it is 2 classes, then the labelIndex.get(0)
* is mapped to +1 and labelIndex.get(1) is mapped to -1 (for svm_light).
*/
public void printSVMLightFormat(PrintWriter pw) {
//assumes each data item has a few features on, and sorts the feature keys while collecting the values in a counter
// old comment:
// the following code commented out by Ramesh (nmramesh@cs.stanford.edu) 12/17/2009.
// why not simply print the exact id of the label instead of mapping to some values??
// new comment:
// mihai: we NEED this, because svm_light has special conventions not supported by default by our labels,
// e.g., in a multiclass setting it assumes that labels start at 1 whereas our labels start at 0 (08/31/2010)
String[] labelMap = makeSvmLabelMap();
for (int i = 0; i < size; i++) {
RVFDatum<L, F> d = getRVFDatum(i);
Counter<F> c = d.asFeaturesCounter();
ClassicCounter<Integer> printC = new ClassicCounter<Integer>();
for (F f : c.keySet()) {
printC.setCount(featureIndex.indexOf(f), c.getCount(f));
}
Integer[] features = printC.keySet().toArray(new Integer[printC.keySet().size()]);
Arrays.sort(features);
StringBuilder sb = new StringBuilder();
sb.append(labelMap[labels[i]]).append(' ');
// sb.append(labels[i]).append(' '); // commented out by mihai: labels[i] breaks svm_light conventions!
/* Old code: assumes that F is Integer....
*
for (int f: features) {
sb.append((f + 1)).append(":").append(c.getCount(f)).append(" ");
}
*/
//I think this is what was meant (using printC rather than c), but not sure
// ~Sarah Spikes (sdspikes@cs.stanford.edu)
for (int f: features) {
sb.append((f + 1)).append(':').append(printC.getCount(f)).append(' ');
}
pw.println(sb.toString());
}
}
public Iterator<RVFDatum<L, F>> iterator() {
return new Iterator<RVFDatum<L,F>>() {
private int id; // = 0;
public boolean hasNext() {
return id < size();
}
public RVFDatum<L, F> next() {
if (id >= size()) {
throw new NoSuchElementException();
}
return getRVFDatum(id++);
}
public void remove() {
throw new UnsupportedOperationException();
}
};
}
public ClassicCounter<L> numDatumsPerLabel(){
ClassicCounter<L> numDatums = new ClassicCounter<L>();
for(int i : labels){
numDatums.incrementCount(labelIndex.get(i));
}
return numDatums;
}
/**
* Prints the sparse feature matrix using
* {@link #printSparseFeatureMatrix(PrintWriter)} to {@link System#out
* System.out}.
*/
public abstract void printSparseFeatureMatrix();
/**
* prints a sparse feature matrix representation of the Dataset. Prints the actual
* {@link Object#toString()} representations of features.
*/
public abstract void printSparseFeatureMatrix(PrintWriter pw);
}
|
/*
* Copyright (c) 2020 Cognite AS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cognite.client;
import com.cognite.client.config.ResourceType;
import com.cognite.client.config.UpsertMode;
import com.cognite.client.dto.*;
import com.cognite.client.servicesV1.ConnectorServiceV1;
import com.cognite.client.servicesV1.ResponseItems;
import com.cognite.client.servicesV1.executor.FileBinaryRequestExecutor;
import com.cognite.client.servicesV1.parser.FileParser;
import com.cognite.client.servicesV1.parser.ItemParser;
import com.cognite.client.util.Partition;
import com.google.auto.value.AutoValue;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import okhttp3.internal.http2.StreamResetException;
import org.apache.commons.lang3.RandomStringUtils;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Duration;
import java.time.Instant;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.ThreadLocalRandom;
import java.util.stream.Collectors;
/**
* This class represents the Cognite events api endpoint.
*
* It provides methods for reading and writing {@link Event}.
*/
@AutoValue
public abstract class Files extends ApiBase {
private static final int MAX_WRITE_REQUEST_BATCH_SIZE = 100;
private static final int MAX_DOWNLOAD_BINARY_BATCH_SIZE = 10;
private static final int MAX_UPLOAD_BINARY_BATCH_SIZE = 10;
// Retry upload batches with one item at a time when the following list of exceptions are observed
private static final ImmutableList<Class<? extends Exception>> RETRYABLE_EXCEPTIONS_BINARY_UPLOAD =
ImmutableList.of(
StreamResetException.class,
IOException.class, // IOException worth set of retries
com.google.cloud.storage.StorageException.class // Timeout + stream reset when using GCS as temp storage
);
private static Builder builder() {
return new AutoValue_Files.Builder();
}
protected static final Logger LOG = LoggerFactory.getLogger(Files.class);
/**
* Constructs a new {@link Files} object using the provided client configuration.
*
* This method is intended for internal use--SDK clients should always use {@link CogniteClient}
* as the entry point to this class.
*
* @param client The {@link CogniteClient} to use for configuration settings.
* @return the assets api object.
*/
public static Files of(CogniteClient client) {
return Files.builder()
.setClient(client)
.build();
}
/**
* Returns all {@link FileMetadata} objects.
*
* @see #list(Request)
*/
public Iterator<List<FileMetadata>> list() throws Exception {
return this.list(Request.create());
}
/**
* Returns all {@link FileMetadata} objects that matches the filters set in the {@link Request}.
*
* The results are paged through / iterated over via an {@link Iterator}--the entire results set is not buffered in
* memory, but streamed in "pages" from the Cognite api. If you need to buffer the entire results set, then you
* have to stream these results into your own data structure.
*
* The assets are retrieved using multiple, parallel request streams towards the Cognite api. The number of
* parallel streams are set in the {@link com.cognite.client.config.ClientConfig}.
*
* @param requestParameters the filters to use for retrieving the assets.
* @return an {@link Iterator} to page through the results set.
* @throws Exception
*/
public Iterator<List<FileMetadata>> list(Request requestParameters) throws Exception {
List<String> partitions = buildPartitionsList(getClient().getClientConfig().getNoListPartitions());
return this.list(requestParameters, partitions.toArray(new String[partitions.size()]));
}
/**
* Returns all {@link Event} objects that matches the filters set in the {@link Request} for the
* specified partitions. This is method is intended for advanced use cases where you need direct control over
* the individual partitions. For example, when using the SDK in a distributed computing environment.
*
* The results are paged through / iterated over via an {@link Iterator}--the entire results set is not buffered in
* memory, but streamed in "pages" from the Cognite api. If you need to buffer the entire results set, then you
* have to stream these results into your own data structure.
*
* @param requestParameters the filters to use for retrieving the assets.
* @param partitions the partitions to include.
* @return an {@link Iterator} to page through the results set.
* @throws Exception
*/
public Iterator<List<FileMetadata>> list(Request requestParameters, String... partitions) throws Exception {
return AdapterIterator.of(listJson(ResourceType.FILE_HEADER, requestParameters, partitions), this::parseFileMetadata);
}
/**
* Retrieve files by id.
*
* @param items The item(s) {@code externalId / id} to retrieve.
* @return The retrieved file headers.
* @throws Exception
*/
public List<FileMetadata> retrieve(List<Item> items) throws Exception {
return retrieveJson(ResourceType.FILE_HEADER, items).stream()
.map(this::parseFileMetadata)
.collect(Collectors.toList());
}
/**
* Performs an item aggregation request to Cognite Data Fusion.
*
* The default aggregation is a total item count based on the (optional) filters in the request.
* Multiple aggregation types are supported. Please refer to the Cognite API specification for more information
* on the possible settings.
*
* @param requestParameters The filtering and aggregates specification
* @return The aggregation results.
* @throws Exception
* @see <a href="https://docs.cognite.com/api/v1/">Cognite API v1 specification</a>
*/
public Aggregate aggregate(Request requestParameters) throws Exception {
return aggregate(ResourceType.FILE_HEADER, requestParameters);
}
/**
* Creates or updates a set of {@link FileMetadata} objects.
*
* If it is a new {@link FileMetadata} object (based on {@code id / externalId}, then it will be created.
*
* If an {@link FileMetadata} object already exists in Cognite Data Fusion, it will be updated. The update behavior
* is specified via the update mode in the {@link com.cognite.client.config.ClientConfig} settings.
*
* @param fileMetadataList The file headers / metadata to upsert.
* @return The upserted file headers.
* @throws Exception
*/
public List<FileMetadata> upsert(List<FileMetadata> fileMetadataList) throws Exception {
String loggingPrefix = "upsert() - " + RandomStringUtils.randomAlphanumeric(5) + " - ";
final int maxUpsertLoopIterations = 3;
Instant startInstant = Instant.now();
if (fileMetadataList.isEmpty()) {
LOG.warn(loggingPrefix + "No items specified in the request. Will skip the read request.");
return Collections.emptyList();
}
ConnectorServiceV1.ItemWriter updateWriter = getClient().getConnectorService().updateFileHeaders();
ConnectorServiceV1.ItemWriter createWriter = getClient().getConnectorService().writeFileHeaders();
// naive de-duplication based on ids
Map<Long, FileMetadata> internalIdUpdateMap = new HashMap<>(1000);
Map<String, FileMetadata> externalIdUpdateMap = new HashMap<>(1000);
Map<Long, FileMetadata> internalIdAssetsMap = new HashMap<>(50);
Map<String, FileMetadata> externalIdAssetsMap = new HashMap<>(50);
for (FileMetadata value : fileMetadataList) {
if (value.hasExternalId()) {
externalIdUpdateMap.put(value.getExternalId(), value);
} else if (value.hasId()) {
internalIdUpdateMap.put(value.getId(), value);
} else {
throw new Exception("File metadata item does not contain id nor externalId: " + value.toString());
}
}
// Check for files with >1k assets. Set the extra assets aside so we can add them in separate updates.
for (Long key : internalIdUpdateMap.keySet()) {
FileMetadata fileMetadata = internalIdUpdateMap.get(key);
if (fileMetadata.getAssetIdsCount() > 1000) {
internalIdUpdateMap.put(key, fileMetadata.toBuilder()
.clearAssetIds()
.addAllAssetIds(fileMetadata.getAssetIdsList().subList(0,1000))
.build());
internalIdAssetsMap.put(key, FileMetadata.newBuilder()
.setId(fileMetadata.getId())
.addAllAssetIds(fileMetadata.getAssetIdsList().subList(1000, fileMetadata.getAssetIdsList().size()))
.build());
}
}
for (String key : externalIdUpdateMap.keySet()) {
FileMetadata fileMetadata = externalIdUpdateMap.get(key);
if (fileMetadata.getAssetIdsCount() > 1000) {
externalIdUpdateMap.put(key, fileMetadata.toBuilder()
.clearAssetIds()
.addAllAssetIds(fileMetadata.getAssetIdsList().subList(0,1000))
.build());
externalIdAssetsMap.put(key, FileMetadata.newBuilder()
.setExternalId(fileMetadata.getExternalId())
.addAllAssetIds(fileMetadata.getAssetIdsList().subList(1000, fileMetadata.getAssetIdsList().size()))
.build());
}
}
// Combine the input into list
List<FileMetadata> elementListUpdate = new ArrayList<>();
List<FileMetadata> elementListCreate = new ArrayList<>();
List<String> elementListCompleted = new ArrayList<>();
elementListUpdate.addAll(externalIdUpdateMap.values());
elementListUpdate.addAll(internalIdUpdateMap.values());
/*
The upsert loop. If there are items left to insert or update:
1. Update elements
2. If conflicts move missing items into the insert maps
3. Insert elements
4. If conflict, remove duplicates into the update maps
*/
ThreadLocalRandom random = ThreadLocalRandom.current();
String exceptionMessage = "";
for (int i = 0; i < maxUpsertLoopIterations && (elementListCreate.size() + elementListUpdate.size()) > 0;
i++, Thread.sleep(Math.min(500L, (10L * (long) Math.exp(i)) + random.nextLong(5)))) {
LOG.debug(loggingPrefix + "Start upsert loop {} with {} items to update, {} items to create and "
+ "{} completed items at duration {}",
i,
elementListUpdate.size(),
elementListCreate.size(),
elementListCompleted.size(),
Duration.between(startInstant, Instant.now()).toString());
/*
Update items
*/
if (elementListUpdate.isEmpty()) {
LOG.debug(loggingPrefix + "Update items list is empty. Skipping update.");
} else {
Map<ResponseItems<String>, List<FileMetadata>> updateResponseMap =
splitAndUpdateFileMetadata(elementListUpdate, updateWriter);
LOG.debug(loggingPrefix + "Completed update items requests for {} items across {} batches at duration {}",
elementListUpdate.size(),
updateResponseMap.size(),
Duration.between(startInstant, Instant.now()).toString());
elementListUpdate.clear(); // Must prepare the list for possible new entries.
for (ResponseItems<String> response : updateResponseMap.keySet()) {
if (response.isSuccessful()) {
elementListCompleted.addAll(response.getResultsItems());
LOG.debug(loggingPrefix + "Update items request success. Adding {} update result items to result collection.",
response.getResultsItems().size());
} else {
exceptionMessage = response.getResponseBodyAsString();
LOG.debug(loggingPrefix + "Update items request failed: {}", response.getResponseBodyAsString());
if (i == maxUpsertLoopIterations - 1) {
// Add the error message to std logging
LOG.error(loggingPrefix + "Update items request failed. {}", response.getResponseBodyAsString());
}
LOG.debug(loggingPrefix + "Converting missing items to create and retrying the request");
List<Item> missing = ItemParser.parseItems(response.getMissingItems());
LOG.debug(loggingPrefix + "Number of missing entries reported by CDF: {}", missing.size());
// Move missing items from update to the create request
Map<String, FileMetadata> itemsMap = mapToId(updateResponseMap.get(response));
for (Item value : missing) {
if (value.getIdTypeCase() == Item.IdTypeCase.EXTERNAL_ID) {
elementListCreate.add(itemsMap.get(value.getExternalId()));
itemsMap.remove(value.getExternalId());
} else if (value.getIdTypeCase() == Item.IdTypeCase.ID) {
elementListCreate.add(itemsMap.get(String.valueOf(value.getId())));
itemsMap.remove(String.valueOf(value.getId()));
}
}
elementListUpdate.addAll(itemsMap.values()); // Add remaining items to be re-updated
}
}
}
/*
Insert / create items
*/
if (elementListCreate.isEmpty()) {
LOG.debug(loggingPrefix + "Create items list is empty. Skipping create.");
} else {
Map<ResponseItems<String>, FileMetadata> createResponseMap =
splitAndCreateFileMetadata(elementListCreate, createWriter);
LOG.debug(loggingPrefix + "Completed create items requests for {} items across {} batches at duration {}",
elementListCreate.size(),
createResponseMap.size(),
Duration.between(startInstant, Instant.now()).toString());
elementListCreate.clear(); // Must prepare the list for possible new entries.
for (ResponseItems<String> response : createResponseMap.keySet()) {
if (response.isSuccessful()) {
elementListCompleted.addAll(response.getResultsItems());
LOG.debug(loggingPrefix + "Create items request success. Adding {} create result items to result collection.",
response.getResultsItems().size());
} else {
exceptionMessage = response.getResponseBodyAsString();
LOG.debug(loggingPrefix + "Create items request failed: {}", response.getResponseBodyAsString());
if (i == maxUpsertLoopIterations - 1) {
// Add the error message to std logging
LOG.error(loggingPrefix + "Create items request failed. {}", response.getResponseBodyAsString());
}
LOG.debug(loggingPrefix + "Converting duplicates to update and retrying the request");
List<Item> duplicates = ItemParser.parseItems(response.getDuplicateItems());
LOG.debug(loggingPrefix + "Number of duplicate entries reported by CDF: {}", duplicates.size());
// Move duplicates from insert to the update request
Map<String, FileMetadata> itemsMap = mapToId(ImmutableList.of(createResponseMap.get(response)));
for (Item value : duplicates) {
if (value.getIdTypeCase() == Item.IdTypeCase.EXTERNAL_ID) {
elementListUpdate.add(itemsMap.get(value.getExternalId()));
itemsMap.remove(value.getExternalId());
} else if (value.getIdTypeCase() == Item.IdTypeCase.ID) {
elementListUpdate.add(itemsMap.get(String.valueOf(value.getId())));
itemsMap.remove(String.valueOf(value.getId()));
}
}
elementListCreate.addAll(itemsMap.values()); // Add remaining items to be re-inserted
}
}
}
}
/*
Write extra assets id links as separate updates. The api only supports 1k assetId links per file object
per api request. If a file contains a large number of assetIds, we need to split them up into an initial
file create/update (all the code above) and subsequent update requests which add the remaining
assetIds (code below).
*/
Map<Long, FileMetadata> internalIdTempMap = new HashMap<>();
Map<String, FileMetadata> externalIdTempMap = new HashMap<>();
List<FileMetadata> elementListAssetUpdate = new ArrayList<>();
while (internalIdAssetsMap.size() > 0 || externalIdAssetsMap.size() > 0) {
LOG.info(loggingPrefix + "Some files have very high assetId cardinality (+1k). Adding assetId to "
+ (internalIdAssetsMap.size() + externalIdAssetsMap.size())
+ " file(s).");
internalIdUpdateMap.clear();
externalIdUpdateMap.clear();
internalIdTempMap.clear();
externalIdTempMap.clear();
// Check for files with >1k remaining assets
for (Long key : internalIdAssetsMap.keySet()) {
FileMetadata fileMetadata = internalIdAssetsMap.get(key);
if (fileMetadata.getAssetIdsCount() > 1000) {
internalIdUpdateMap.put(key, fileMetadata.toBuilder()
.clearAssetIds()
.addAllAssetIds(fileMetadata.getAssetIdsList().subList(0,1000))
.build());
internalIdTempMap.put(key, FileMetadata.newBuilder()
.setId(fileMetadata.getId())
.addAllAssetIds(fileMetadata.getAssetIdsList().subList(1000, fileMetadata.getAssetIdsList().size()))
.build());
} else {
// The entire assetId list can be pushed in a single update
internalIdUpdateMap.put(key, fileMetadata);
}
}
internalIdAssetsMap.clear();
internalIdAssetsMap.putAll(internalIdTempMap);
for (String key : externalIdAssetsMap.keySet()) {
FileMetadata fileMetadata = externalIdAssetsMap.get(key);
if (fileMetadata.getAssetIdsCount() > 1000) {
externalIdUpdateMap.put(key, fileMetadata.toBuilder()
.clearAssetIds()
.addAllAssetIds(fileMetadata.getAssetIdsList().subList(0,1000))
.build());
externalIdTempMap.put(key, FileMetadata.newBuilder()
.setExternalId(fileMetadata.getExternalId())
.addAllAssetIds(fileMetadata.getAssetIdsList().subList(1000, fileMetadata.getAssetIdsList().size()))
.build());
} else {
// The entire assetId list can be pushed in a single update
externalIdUpdateMap.put(key, fileMetadata);
}
}
externalIdAssetsMap.clear();
externalIdAssetsMap.putAll(externalIdTempMap);
// prepare the update and send request
LOG.info(loggingPrefix + "Building update request to add assetIds for {} files.",
internalIdUpdateMap.size() + externalIdUpdateMap.size());
elementListAssetUpdate.clear();
elementListAssetUpdate.addAll(externalIdUpdateMap.values());
elementListAssetUpdate.addAll(internalIdUpdateMap.values());
// should not happen, but need to check
if (elementListAssetUpdate.isEmpty()) {
String message = loggingPrefix + "Internal error. Not able to send assetId update. The payload is empty.";
LOG.error(message);
throw new Exception(message);
}
Map<ResponseItems<String>, List<FileMetadata>> responseItemsAssets =
splitAndAddAssets(elementListAssetUpdate, updateWriter);
for (ResponseItems<String> responseItems : responseItemsAssets.keySet()) {
if (!responseItems.isSuccessful()) {
String message = loggingPrefix
+ "Failed to add assetIds. "
+ responseItems.getResponseBodyAsString();
LOG.error(message);
throw new Exception(message);
}
}
}
// Check if all elements completed the upsert requests
if (elementListCreate.isEmpty() && elementListUpdate.isEmpty()) {
LOG.info(loggingPrefix + "Successfully upserted {} items within a duration of {}.",
elementListCompleted.size(),
Duration.between(startInstant, Instant.now()).toString());
} else {
LOG.error(loggingPrefix + "Failed to upsert items. {} items remaining. {} items completed upsert."
+ System.lineSeparator() + "{}",
elementListCreate.size() + elementListUpdate.size(),
elementListCompleted.size(),
exceptionMessage);
throw new Exception(String.format(loggingPrefix + "Failed to upsert items. %d items remaining. "
+ " %d items completed upsert. %n " + exceptionMessage,
elementListCreate.size() + elementListUpdate.size(),
elementListCompleted.size()));
}
return elementListCompleted.stream()
.map(this::parseFileMetadata)
.collect(Collectors.toList());
}
/**
* Uploads a set of file headers and binaries to Cognite Data Fusion.
*
* The file binary can either be placed in-memory in the file container (as a byte string)
* or referenced (by URI) to a blob store.
*
* @param files The files to upload.
* @return The file metadata / headers for the uploaded files.
* @throws Exception
*/
public List<FileMetadata> upload(List<FileContainer> files) throws Exception {
return this.upload(files, false);
}
/**
* Uploads a set of file headers and binaries to Cognite Data Fusion.
*
* The file binary can either be placed in-memory in the file container (as a byte string)
* or referenced (by URI) to a blob store.
*
* In case you reference the file by URI, you can choose to automatically remove the file binary
* from the (URI referenced) blob store after a successful upload to Cognite Data Fusion. This can
* Be useful in situations where you perform large scala data transfers utilizing a temp backing
* store.
*
* @param files The files to upload.
* @param deleteTempFile Set to true to remove the URI binary after upload. Set to false to keep the URI binary.
* @return The file metadata / headers for the uploaded files.
* @throws Exception
*/
public List<FileMetadata> upload(@NotNull List<FileContainer> files, boolean deleteTempFile) throws Exception {
String loggingPrefix = "upload() - " + RandomStringUtils.randomAlphanumeric(3) + " - ";
Instant startInstant = Instant.now();
if (files.isEmpty()) {
LOG.warn(loggingPrefix + "No items specified in the request. Will skip the upload request.");
return Collections.emptyList();
}
// naive de-duplication based on ids
Map<Long, FileContainer> internalIdMap = new HashMap<>();
Map<String, FileContainer> externalIdMap = new HashMap<>();
for (FileContainer item : files) {
if (item.getFileMetadata().hasExternalId()) {
externalIdMap.put(item.getFileMetadata().getExternalId(), item);
} else if (item.getFileMetadata().hasId()) {
internalIdMap.put(item.getFileMetadata().getId(), item);
} else {
String message = loggingPrefix + "File item does not contain id nor externalId: " + item.toString();
LOG.error(message);
throw new Exception(message);
}
}
LOG.info(loggingPrefix + "Received {} files to upload.", internalIdMap.size() + externalIdMap.size());
// Combine into list and split into upload batches
List<FileContainer> fileContainerList = new ArrayList<>();
fileContainerList.addAll(externalIdMap.values());
fileContainerList.addAll(internalIdMap.values());
List<List<FileContainer>> fileContainerBatches = Partition.ofSize(fileContainerList, MAX_UPLOAD_BINARY_BATCH_SIZE);
// Response list
List<FileMetadata> responseFileMetadata = new ArrayList<>();
int batchCounter = 0;
for (List<FileContainer> uploadBatch : fileContainerBatches) {
batchCounter++;
String batchLoggingPrefix = loggingPrefix + batchCounter + " - ";
try {
responseFileMetadata.addAll(uploadFileBinaries(uploadBatch, deleteTempFile));
} catch (CompletionException e) {
// Must unwrap the completion exception
Throwable cause = e.getCause();
if (RETRYABLE_EXCEPTIONS_BINARY_UPLOAD.stream()
.anyMatch(retryable -> retryable.isInstance(cause))) {
// The API is most likely saturated. Retry the uploads one file at a time.
LOG.warn(batchLoggingPrefix + "Error when uploading the batch of file binaries. Will retry each file individually.");
for (FileContainer file : uploadBatch) {
responseFileMetadata.addAll(uploadFileBinaries(List.of(file), deleteTempFile));
}
} else {
throw e;
}
}
}
LOG.info(loggingPrefix + "Completed upload of {} files within a duration of {}.",
files.size(),
Duration.between(startInstant, Instant.now()).toString());
return responseFileMetadata;
}
/**
* Downloads file binaries.
*
* Downloads a set of file binaries based on {@code externalId / id} in the {@link Item} list. The file
* binaries can be downloaded as files or byte streams. In case the file is very large (> 200MB) it has to
* be streamed directly to the file system (i.e. downloaded as a file).
*
* Both the file header / metadata and the file binary will be returned. The complete information is encapsulated
* int the {@link FileContainer} returned from this method. The {@link FileContainer} will host the file
* binary stream if you set {@code preferByteStream} to {@code true} and the file size is < 200 MB. If
* {@code preferByteStream} is set to {@code false} or the file size is > 200MB the file binary will be
* stored on disk and the {@link FileContainer} will return the {@link URI} reference to the
* binary.
*
* Supported destination file stores for the file binary:
* - Local (network) disk. Specify the temp path as {@code file://<host>/<my-path>/}.
* Examples: {@code file://localhost/home/files/, file:///home/files/, file:///c:/temp/}
* - Google Cloud Storage. Specify the temp path as {@code gs://<my-storage-bucket>/<my-path>/}.
*
* @param files The list of files to download.
* @param downloadPath The URI to the download storage
* @param preferByteStream Set to true to return byte streams when possible, set to false to always store
* binary as file.
* @return File containers with file headers and references/byte streams of the binary.
*/
public List<FileContainer> download(List<Item> files, Path downloadPath, boolean preferByteStream) throws Exception {
String loggingPrefix = "download() - " + RandomStringUtils.randomAlphanumeric(5) + " - ";
Preconditions.checkArgument(java.nio.file.Files.isDirectory(downloadPath),
loggingPrefix + "The download path must be a valid directory.");
Instant startInstant = Instant.now();
if (files.isEmpty()) {
LOG.warn(loggingPrefix + "No items specified in the request. Will skip the download request.");
return Collections.emptyList();
}
LOG.info(loggingPrefix + "Received {} items to download.",
files.size());
List<List<Item>> batches = Partition.ofSize(files, MAX_DOWNLOAD_BINARY_BATCH_SIZE);
List<FileContainer> results = new ArrayList<>();
for (List<Item> batch : batches) {
// Get the file binaries
List<FileBinary> fileBinaries = downloadFileBinaries(batch, downloadPath.toUri(), !preferByteStream);
// Get the file metadata
List<FileMetadata> fileMetadataList = retrieve(batch);
// Merge the binary and metadata
List<FileContainer> tempNameContainers = buildFileContainers(fileBinaries, fileMetadataList);
// Rename the file from random temp name to file name
List<FileContainer> resultContainers = new ArrayList<>();
for (FileContainer container : tempNameContainers) {
if (container.getFileBinary().getBinaryTypeCase() == FileBinary.BinaryTypeCase.BINARY_URI
&& container.hasFileMetadata()) {
// Get the target file name. Replace illegal characters with dashes
String fileNameBase = container.getFileMetadata().getName()
.trim()
.replaceAll("[\\/|\\\\|&|\\$]", "-");
String fileSuffix = "";
if (fileNameBase.lastIndexOf(".") != -1) {
// The file name has a suffix. Let's break it out.
int splitIndex = fileNameBase.lastIndexOf(".");
fileSuffix = fileNameBase.substring(splitIndex);
fileNameBase = fileNameBase.substring(0, splitIndex);
}
Path tempFilePath = Paths.get(new URI(container.getFileBinary().getBinaryUri()));
String destinationFileName = fileNameBase;
int counter = 1;
while (java.nio.file.Files.exists(downloadPath.resolve(destinationFileName + fileSuffix))) {
// The destination file name already exists, so we add an increasing counter to the
// file name base.
destinationFileName = fileNameBase + "_" + counter;
counter++;
}
// Rename the file
Path destinationPath = downloadPath.resolve(destinationFileName + fileSuffix);
java.nio.file.Files.move(tempFilePath, destinationPath);
// Build a new file container with the new file name
FileContainer updated = container.toBuilder()
.setFileBinary(container.getFileBinary().toBuilder()
.setBinaryUri(destinationPath.toUri().toString()))
.build();
// Swap the old container with the new one
resultContainers.add(updated);
} else {
resultContainers.add(container);
}
}
results.addAll(resultContainers);
}
LOG.info(loggingPrefix + "Successfully downloaded {} files within a duration of {}.",
results.size(),
Duration.between(startInstant, Instant.now()).toString());
return results;
}
/**
* Downloads file binaries to a local / network path.
*
* Downloads a set of file binaries based on {@code externalId / id} in the {@link Item} list.
*
* Both the file header / metadata and the file binary will be returned. The complete information is encapsulated
* int the {@link FileContainer} returned from this method. The {@link FileContainer} will host the
* {@link URI} reference to the binary.
*
* Supported destination file stores for the file binary:
* - Local (network) disk. Specify the temp path as {@code file://<host>/<my-path>/}.
* Examples: {@code file://localhost/home/files/, file:///home/files/, file:///c:/temp/}
* - Google Cloud Storage. Specify the temp path as {@code gs://<my-storage-bucket>/<my-path>/}.
*
* @param files The list of files to download.
* @param downloadPath The URI to the download storage
* @return File containers with file headers and references/byte streams of the binary.
*/
public List<FileContainer> downloadToPath(List<Item> files, Path downloadPath) throws Exception {
return download(files, downloadPath, false);
}
/*
Gathers file binaries and metadata into file containers via externalId / id.
*/
private List<FileContainer> buildFileContainers(Collection<FileBinary> inputBinaries,
Collection<FileMetadata> inputMetadata) {
List<FileContainer> containers = new ArrayList<>();
for (FileBinary binary : inputBinaries) {
FileContainer.Builder containerBuilder = FileContainer.newBuilder()
.setFileBinary(binary);
if (binary.getIdTypeCase() == FileBinary.IdTypeCase.EXTERNAL_ID
&& getByExternalId(inputMetadata, binary.getExternalId()).isPresent()) {
containerBuilder.setFileMetadata(getByExternalId(inputMetadata, binary.getExternalId()).get());
} else if (binary.getIdTypeCase() == FileBinary.IdTypeCase.ID
&& getById(inputMetadata, binary.getId()).isPresent()) {
containerBuilder.setFileMetadata(getById(inputMetadata, binary.getId()).get());
}
containers.add(containerBuilder.build());
}
return containers;
}
/**
* Downloads file binaries.
*
* This method is intended for advanced use cases, for example when using this SDK as a part of
* a distributed system.
*
* Downloads a set of file binaries based on {@code externalId / id} in the {@link Item} list. The file
* binaries can be downloaded as files or byte streams. In case the file is very large (> 200MB) it has to
* be streamed directly to the file system (to the temp storage area).
*
* Supported temp storage for the file binary:
* - Local (network) disk. Specify the temp path as {@code file://<host>/<my-path>/}.
* Examples: {@code file://localhost/home/files/, file:///home/files/, file:///c:/temp/}
* - Google Cloud Storage. Specify the temp path as {@code gs://<my-storage-bucket>/<my-path>/}.
*
* @param fileItems The list of files to download.
* @param tempStoragePath The URI to the download storage. Set to null to only perform in-memory download.
* @param forceTempStorage Set to true to always download the binary to temp storage
* @return The file binary.
* @throws Exception
*/
public List<FileBinary> downloadFileBinaries(List<Item> fileItems,
@Nullable URI tempStoragePath,
boolean forceTempStorage) throws Exception {
final int MAX_RETRIES = 3;
String loggingPrefix = "downloadFileBinaries() - " + RandomStringUtils.randomAlphanumeric(5) + " - ";
Preconditions.checkArgument(!(null == tempStoragePath && forceTempStorage),
"Illegal parameter combination. You must specify a URI in order to force temp storage.");
Preconditions.checkArgument(itemsHaveId(fileItems),
loggingPrefix + "All file items must include a valid externalId or id.");
Instant startInstant = Instant.now();
// do not send empty requests.
if (fileItems.isEmpty()) {
LOG.warn(loggingPrefix + "Tried to send empty delete request. Will skip this request.");
return Collections.emptyList();
}
LOG.info(loggingPrefix + "Received request to download {} file binaries.",
fileItems.size());
// Download and completed lists
List<Item> elementListDownload = deDuplicate(fileItems);
List<FileBinary> elementListCompleted = new ArrayList<>();
/*
Responses from readFileBinaryById will be a single item in case of an error. Check that item for success,
missing items and duplicates.
*/
// if the request result is false, we have duplicates and/or missing items.
ThreadLocalRandom random = ThreadLocalRandom.current();
String exceptionMessage = "";
for (int i = 0; i < MAX_RETRIES && elementListDownload.size() > 0;
i++, Thread.sleep(Math.min(500L, (10L * (long) Math.exp(i)) + random.nextLong(5)))) {
LOG.debug(loggingPrefix + "Start download loop {} with {} items to download and "
+ "{} completed items at duration {}",
i,
elementListDownload.size(),
elementListCompleted.size(),
Duration.between(startInstant, Instant.now()).toString());
/*
Download files
*/
Map<List<ResponseItems<FileBinary>>, List<Item>> downloadResponseMap =
splitAndDownloadFileBinaries(elementListDownload, tempStoragePath, forceTempStorage);
LOG.debug(loggingPrefix + "Completed download files requests for {} files across {} batches at duration {}",
elementListDownload.size(),
downloadResponseMap.size(),
Duration.between(startInstant, Instant.now()).toString());
elementListDownload.clear();
for (List<ResponseItems<FileBinary>> responseBatch : downloadResponseMap.keySet()) {
if (responseBatch.size() > 0 && responseBatch.get(0).isSuccessful()) {
// All files downloaded successfully
for (ResponseItems<FileBinary> response : responseBatch) {
if (response.isSuccessful()) {
elementListCompleted.addAll(response.getResultsItems());
} else {
// Should not be possible...
LOG.warn(loggingPrefix + "Download not successful: {}", response.getResponseBodyAsString());
}
}
} else if (responseBatch.size() > 0 && !responseBatch.get(0).isSuccessful()) {
// Batch failed. Most likely because of missing or duplicated items
exceptionMessage = responseBatch.get(0).getResponseBodyAsString();
LOG.warn(loggingPrefix + "Download items request failed. Will try to correct errors and retry: {}",
responseBatch.get(0).getResponseBodyAsString());
if (i == MAX_RETRIES - 1) {
// Add the error message to std logging
LOG.error(loggingPrefix + "Download items request failed. {}", responseBatch.get(0).getResponseBodyAsString());
}
LOG.debug(loggingPrefix + "Removing duplicates and missing items and retrying the request");
List<Item> duplicates = ItemParser.parseItems(responseBatch.get(0).getDuplicateItems());
List<Item> missing = new ArrayList(); // Must define this as an explicit List for it to be mutable
missing.addAll(ItemParser.parseItems(responseBatch.get(0).getMissingItems()));
LOG.debug(loggingPrefix + "No of duplicates reported: {}", duplicates.size());
LOG.debug(loggingPrefix + "No of missing items reported: {}", missing.size());
// Check for the special case of missing file binaries
if (responseBatch.size() > 0 && !responseBatch.get(0).isSuccessful()
&& responseBatch.get(0).getResponseBinary().getResponse().code() == 400
&& responseBatch.get(0).getErrorMessage().size() > 0
&& responseBatch.get(0).getErrorMessage().get(0).startsWith("Files not uploaded,")) {
// There is a file binary that hasn't been uploaded, but the file header exists.
// Add the items to the "missing" list so they get removed from the download list.
LOG.debug(loggingPrefix + "Missing file binaries reported: {}", responseBatch.get(0).getErrorMessage().get(0));
if (responseBatch.get(0).getErrorMessage().get(0).startsWith("Files not uploaded, ids:")) {
String[] missingIds = responseBatch.get(0).getErrorMessage().get(0).substring(24).split(",");
for (String stringId : missingIds) {
missing.add(Item.newBuilder()
.setId(Long.parseLong(stringId.trim()))
.build());
}
} else if (responseBatch.get(0).getErrorMessage().get(0).startsWith("Files not uploaded, externalIds:")) {
String[] missingExternalIds = responseBatch.get(0).getErrorMessage().get(0).substring(32).split(",");
for (String externalId : missingExternalIds) {
missing.add(Item.newBuilder()
.setExternalId(externalId.trim())
.build());
}
}
}
// Remove missing items from the download request
Map<String, Item> itemsMap = mapItemToId(downloadResponseMap.get(responseBatch));
for (Item value : missing) {
if (value.getIdTypeCase() == Item.IdTypeCase.EXTERNAL_ID) {
itemsMap.remove(value.getExternalId());
} else if (value.getIdTypeCase() == Item.IdTypeCase.ID) {
itemsMap.remove(String.valueOf(value.getId()));
}
}
// Remove duplicate items from the download request
for (Item value : duplicates) {
if (value.getIdTypeCase() == Item.IdTypeCase.EXTERNAL_ID) {
itemsMap.remove(value.getExternalId());
} else if (value.getIdTypeCase() == Item.IdTypeCase.ID) {
itemsMap.remove(String.valueOf(value.getId()));
}
}
elementListDownload.addAll(itemsMap.values());
}
}
}
// Check if all elements completed the download requests
if (elementListDownload.isEmpty()) {
LOG.info(loggingPrefix + "Successfully downloaded {} files within a duration of {}.",
elementListCompleted.size(),
Duration.between(startInstant, Instant.now()).toString());
} else {
LOG.error(loggingPrefix + "Failed to download files. {} files remaining. {} files completed delete."
+ System.lineSeparator() + "{}",
elementListDownload.size(),
elementListCompleted.size(),
exceptionMessage);
throw new Exception(String.format(loggingPrefix + "Failed to download files. %d files remaining. "
+ " %d files completed download. %n " + exceptionMessage,
elementListDownload.size(),
elementListCompleted.size()));
}
return elementListCompleted;
}
/**
* Deletes a set of files.
*
* The files to delete are identified via their {@code externalId / id} by submitting a list of
* {@link Item}.
*
* @param files a list of {@link Item} representing the events (externalId / id) to be deleted
* @return The deleted events via {@link Item}
* @throws Exception
*/
public List<Item> delete(List<Item> files) throws Exception {
ConnectorServiceV1 connector = getClient().getConnectorService();
ConnectorServiceV1.ItemWriter deleteItemWriter = connector.deleteFiles();
DeleteItems deleteItems = DeleteItems.of(deleteItemWriter, getClient().buildAuthConfig());
return deleteItems.deleteItems(files);
}
/**
* Uploads a set of file binaries.
*
* @param files The files to upload.
* @param deleteTempFile Set to true to remove the URI binary after upload. Set to false to keep the URI binary.
* @return The response json strings (file headers).
* @throws Exception
*/
private List<FileMetadata> uploadFileBinaries(@NotNull List<FileContainer> files, boolean deleteTempFile) throws Exception {
String loggingPrefix = "uploadFileBinaries() - " + RandomStringUtils.randomAlphanumeric(3) + " - ";
Instant startInstant = Instant.now();
ConnectorServiceV1.FileWriter fileWriter = getClient().getConnectorService().writeFileProto()
.enableDeleteTempFile(deleteTempFile);
List<String> responseItems = new ArrayList<>();
// Results set container
List<CompletableFuture<ResponseItems<String>>> resultFutures = new ArrayList<>();
// Write files async
for (FileContainer file : files) {
CompletableFuture<ResponseItems<String>> future = fileWriter.writeFileAsync(
addAuthInfo(Request.create()
.withProtoRequestBody(file))
);
resultFutures.add(future);
}
LOG.debug(loggingPrefix + "Dispatched a batch of {} files for upload. Duration: {}",
files.size(),
Duration.between(startInstant, Instant.now()).toString());
// Sync all downloads to a single future. It will complete when all the upstream futures have completed.
CompletableFuture<Void> allFutures = CompletableFuture.allOf(resultFutures.toArray(
new CompletableFuture[resultFutures.size()]));
// Wait until the uber future completes.
allFutures.join();
// Collect the response items
for (CompletableFuture<ResponseItems<String>> responseItemsFuture : resultFutures) {
if (!responseItemsFuture.join().isSuccessful()) {
// something went wrong with the request
String message = loggingPrefix + "Failed to upload file to Cognite Data Fusion: "
+ responseItemsFuture.join().getResponseBodyAsString();
LOG.error(message);
throw new Exception(message);
}
responseItemsFuture.join().getResultsItems().forEach(responseItems::add);
}
LOG.debug(loggingPrefix + "Completed upload of a batch of {} files within a duration of {}.",
files.size(),
Duration.between(startInstant, Instant.now()).toString());
return responseItems.stream()
.map(this::parseFileMetadata)
.collect(Collectors.toList());
}
/**
* Download a set of file binaries. Large batches are split into multiple download requests.
*
* @param fileItems The list of files to download.
* @param tempStoragePath The URI to the download storage. Set to null to only perform in-memory download.
* @param forceTempStorage Set to true to always download the binary to temp storage
* @return The file binary response map.
* @throws Exception
*/
private Map<List<ResponseItems<FileBinary>>, List<Item>> splitAndDownloadFileBinaries(List<Item> fileItems,
@Nullable URI tempStoragePath,
boolean forceTempStorage) throws Exception {
String loggingPrefix = "splitAndDownloadFileBinaries() - ";
Map<List<ResponseItems<FileBinary>>, List<Item>> responseMap = new HashMap<>();
List<List<Item>> itemBatches = Partition.ofSize(fileItems, MAX_DOWNLOAD_BINARY_BATCH_SIZE);
// Set up the download service
ConnectorServiceV1.FileBinaryReader reader = getClient().getConnectorService().readFileBinariesByIds()
.enableForceTempStorage(forceTempStorage);
if (null != tempStoragePath) {
reader = reader.withTempStoragePath(tempStoragePath);
}
// Process all batches.
for (List<Item> batch : itemBatches) {
Request request = addAuthInfo(Request.create()
.withItems(toRequestItems(deDuplicate(batch))));
try {
responseMap.put(reader.readFileBinaries(request), batch);
} catch (CompletionException e) {
if (e.getCause() instanceof FileBinaryRequestExecutor.ClientRequestException) {
// This exception indicates a malformed download URL--typically an expired URL. This can be caused
// by the parallel downloads interfering with each other. Retry with the file items downloaded individually
LOG.warn(loggingPrefix + "Error when downloading a batch of file binaries. Will retry each file individually.");
for (Item item : batch) {
Request singleItemRequest = addAuthInfo(Request.create()
.withItems(toRequestItems(List.of(item))));
responseMap.put(reader.readFileBinaries(singleItemRequest), List.of(item));
}
} else {
throw e;
}
}
}
return responseMap;
}
/**
* Update file metadata items.
*
* Submits a (large) batch of items by splitting it up into multiple, parallel create / insert requests.
* The response from each request is returned along with the items used as input.
*
* @param fileMetadataList the objects to create/insert.
* @param updateWriter the ItemWriter to use for sending update requests
* @return a {@link Map} with the responses and request inputs.
* @throws Exception
*/
private Map<ResponseItems<String>, List<FileMetadata>> splitAndUpdateFileMetadata(List<FileMetadata> fileMetadataList,
ConnectorServiceV1.ItemWriter updateWriter) throws Exception {
Map<CompletableFuture<ResponseItems<String>>, List<FileMetadata>> responseMap = new HashMap<>();
List<List<FileMetadata>> batches = Partition.ofSize(fileMetadataList, MAX_WRITE_REQUEST_BATCH_SIZE);
// Submit all batches
for (List<FileMetadata> fileBatch : batches) {
responseMap.put(updateFileMetadata(fileBatch, updateWriter), fileBatch);
}
// Wait for all requests futures to complete
List<CompletableFuture<ResponseItems<String>>> futureList = new ArrayList<>();
responseMap.keySet().forEach(future -> futureList.add(future));
CompletableFuture<Void> allFutures =
CompletableFuture.allOf(futureList.toArray(new CompletableFuture[futureList.size()]));
allFutures.join(); // Wait for all futures to complete
// Collect the responses from the futures
Map<ResponseItems<String>, List<FileMetadata>> resultsMap = new HashMap<>(responseMap.size());
for (Map.Entry<CompletableFuture<ResponseItems<String>>, List<FileMetadata>> entry : responseMap.entrySet()) {
resultsMap.put(entry.getKey().join(), entry.getValue());
}
return resultsMap;
}
/**
* Adds asset ids to existing file metadata objects.
*
* Submits a (large) batch of items by splitting it up into multiple, parallel create / insert requests.
* The response from each request is returned along with the items used as input.
*
* @param fileMetadataList the objects to create/insert.
* @param updateWriter the ItemWriter to use for sending update requests
* @return a {@link Map} with the responses and request inputs.
* @throws Exception
*/
private Map<ResponseItems<String>, List<FileMetadata>> splitAndAddAssets(List<FileMetadata> fileMetadataList,
ConnectorServiceV1.ItemWriter updateWriter) throws Exception {
Map<CompletableFuture<ResponseItems<String>>, List<FileMetadata>> responseMap = new HashMap<>();
List<List<FileMetadata>> batches = Partition.ofSize(fileMetadataList, MAX_WRITE_REQUEST_BATCH_SIZE);
// Submit all batches
for (List<FileMetadata> fileBatch : batches) {
responseMap.put(addFileAssets(fileBatch, updateWriter), fileBatch);
}
// Wait for all requests futures to complete
List<CompletableFuture<ResponseItems<String>>> futureList = new ArrayList<>();
responseMap.keySet().forEach(future -> futureList.add(future));
CompletableFuture<Void> allFutures =
CompletableFuture.allOf(futureList.toArray(new CompletableFuture[futureList.size()]));
allFutures.join(); // Wait for all futures to complete
// Collect the responses from the futures
Map<ResponseItems<String>, List<FileMetadata>> resultsMap = new HashMap<>(responseMap.size());
for (Map.Entry<CompletableFuture<ResponseItems<String>>, List<FileMetadata>> entry : responseMap.entrySet()) {
resultsMap.put(entry.getKey().join(), entry.getValue());
}
return resultsMap;
}
/**
* Create /insert items.
*
* Submits a (large) batch of items by splitting it up into multiple, parallel create / insert requests.
* The response from each request is returned along with the items used as input.
*
* @param fileMetadataList the objects to create/insert.
* @param createWriter the ItemWriter to use for sending create requests.
* @return a {@link Map} with the responses and request inputs.
* @throws Exception
*/
private Map<ResponseItems<String>, FileMetadata> splitAndCreateFileMetadata(List<FileMetadata> fileMetadataList,
ConnectorServiceV1.ItemWriter createWriter) throws Exception {
Map<CompletableFuture<ResponseItems<String>>, FileMetadata> responseMap = new HashMap<>();
// Submit all batches
for (FileMetadata file : fileMetadataList) {
responseMap.put(createFileMetadata(file, createWriter), file);
}
// Wait for all requests futures to complete
List<CompletableFuture<ResponseItems<String>>> futureList = new ArrayList<>();
responseMap.keySet().forEach(future -> futureList.add(future));
CompletableFuture<Void> allFutures =
CompletableFuture.allOf(futureList.toArray(new CompletableFuture[futureList.size()]));
allFutures.join(); // Wait for all futures to complete
// Collect the responses from the futures
Map<ResponseItems<String>, FileMetadata> resultsMap = new HashMap<>(responseMap.size());
for (Map.Entry<CompletableFuture<ResponseItems<String>>, FileMetadata> entry : responseMap.entrySet()) {
resultsMap.put(entry.getKey().join(), entry.getValue());
}
return resultsMap;
}
/**
* Post a collection of {@link FileMetadata} create request on a separate thread. The response is wrapped in a
* {@link CompletableFuture} that is returned immediately to the caller.
*
* This method will send the entire input in a single request. It does not
* split the input into multiple batches. If you have a large batch of {@link FileMetadata} that
* you would like to split across multiple requests, use the {@code splitAndCreateFileMetadata} method.
*
* @param fileMetadata
* @param fileWriter
* @return
* @throws Exception
*/
private CompletableFuture<ResponseItems<String>> createFileMetadata(FileMetadata fileMetadata,
ConnectorServiceV1.ItemWriter fileWriter) throws Exception {
String loggingPrefix = "createFileMetadata() - ";
LOG.debug(loggingPrefix + "Received file metadata item / header to create.");
// build request object
Request postSeqBody = addAuthInfo(Request.create()
.withRequestParameters(toRequestInsertItem(fileMetadata)));
// post write request
return fileWriter.writeItemsAsync(postSeqBody);
}
/**
* Post a collection of {@link FileMetadata} update request on a separate thread. The response is wrapped in a
* {@link CompletableFuture} that is returned immediately to the caller.
*
* This method will send the entire input in a single request. It does not
* split the input into multiple batches. If you have a large batch of {@link FileMetadata} that
* you would like to split across multiple requests, use the {@code splitAndUpdateFileMetadata} method.
*
* @param filesBatch
* @param fileWriter
* @return
* @throws Exception
*/
private CompletableFuture<ResponseItems<String>> updateFileMetadata(Collection<FileMetadata> filesBatch,
ConnectorServiceV1.ItemWriter fileWriter) throws Exception {
String loggingPrefix = "updateFileMetadata() - ";
LOG.debug(loggingPrefix + "Received {} file metadata items / headers to update.",
filesBatch.size());
ImmutableList.Builder<Map<String, Object>> insertItemsBuilder = ImmutableList.builder();
for (FileMetadata fileMetadata : filesBatch) {
if (getClient().getClientConfig().getUpsertMode() == UpsertMode.REPLACE) {
insertItemsBuilder.add(toRequestReplaceItem(fileMetadata));
} else {
insertItemsBuilder.add(toRequestUpdateItem(fileMetadata));
}
}
// build request object
Request postSeqBody = addAuthInfo(Request.create()
.withItems(insertItemsBuilder.build()));
// post write request
return fileWriter.writeItemsAsync(postSeqBody);
}
/**
* Patches (adds) a set of assets to a file object. This operation is used when we need to
* handle files with more than 1k assets.
*
* This method will send the entire input in a single request. It does not
* split the input into multiple batches. If you have a large batch of {@link FileMetadata} that
* you would like to split across multiple requests, use the {@code splitAndUpdateFileMetadata} method.
*
* @param filesBatch
* @param fileWriter
* @return
* @throws Exception
*/
private CompletableFuture<ResponseItems<String>> addFileAssets(Collection<FileMetadata> filesBatch,
ConnectorServiceV1.ItemWriter fileWriter) throws Exception {
String loggingPrefix = "patchFileAssets() - ";
LOG.debug(loggingPrefix + "Received {} file metadata items / headers to update.",
filesBatch.size());
ImmutableList.Builder<Map<String, Object>> insertItemsBuilder = ImmutableList.builder();
for (FileMetadata fileMetadata : filesBatch) {
insertItemsBuilder.add(toRequestAddAssetsItem(fileMetadata));
}
// build request object
Request postSeqBody = addAuthInfo(Request.create()
.withItems(insertItemsBuilder.build()));
// post write request
return fileWriter.writeItemsAsync(postSeqBody);
}
/**
* Maps the file metadata items to their id by looking up externalId and id.
*
* @param fileMetadataList
* @return
*/
private Map<String, FileMetadata> mapToId(List<FileMetadata> fileMetadataList) {
Map<String, FileMetadata> idMap = new HashMap<>();
for (FileMetadata fileMetadata : fileMetadataList) {
if (fileMetadata.hasExternalId()) {
idMap.put(fileMetadata.getExternalId(), fileMetadata);
} else if (fileMetadata.hasId()) {
idMap.put(String.valueOf(fileMetadata.getId()), fileMetadata);
} else {
idMap.put("", fileMetadata);
}
}
return idMap;
}
/*
Wrapping the parser because we need to handle the exception--an ugly workaround since lambdas don't
deal very well with exceptions.
*/
private FileMetadata parseFileMetadata(String json) {
try {
return FileParser.parseFileMetadata(json);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/*
Wrapping the parser because we need to handle the exception--an ugly workaround since lambdas don't
deal very well with exceptions.
*/
private Map<String, Object> toRequestInsertItem(FileMetadata item) {
try {
return FileParser.toRequestInsertItem(item);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/*
Wrapping the parser because we need to handle the exception--an ugly workaround since lambdas don't
deal very well with exceptions.
*/
private Map<String, Object> toRequestUpdateItem(FileMetadata item) {
try {
return FileParser.toRequestUpdateItem(item);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/*
Wrapping the parser because we need to handle the exception--an ugly workaround since lambdas don't
deal very well with exceptions.
*/
private Map<String, Object> toRequestReplaceItem(FileMetadata item) {
try {
return FileParser.toRequestReplaceItem(item);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/*
Wrapping the parser because we need to handle the exception--an ugly workaround since lambdas don't
deal very well with exceptions.
*/
private Map<String, Object> toRequestAddAssetsItem(FileMetadata item) {
try {
return FileParser.toRequestAddAssetIdsItem(item);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/*
Returns the id of file metadata. It will first check for an externalId, second it will check for id.
If no id is found, it returns an empty Optional.
*/
private Optional<String> getFileId(FileMetadata item) {
if (item.hasExternalId()) {
return Optional.of(item.getExternalId());
} else if (item.hasId()) {
return Optional.of(String.valueOf(item.getId()));
} else {
return Optional.<String>empty();
}
}
/*
Returns the id of file metadata. It will first check for an externalId, second it will check for id.
If no id is found, it returns an empty Optional.
*/
private Optional<String> getFileId(FileBinary item) {
if (item.getIdTypeCase() == FileBinary.IdTypeCase.EXTERNAL_ID) {
return Optional.of(item.getExternalId());
} else if (item.getIdTypeCase() == FileBinary.IdTypeCase.ID) {
return Optional.of(String.valueOf(item.getId()));
} else {
return Optional.<String>empty();
}
}
/*
Returns the file metadata that matches a given externalId
*/
private Optional<FileMetadata> getByExternalId(Collection<FileMetadata> itemsToSearch, String externalId) {
Optional<FileMetadata> returnObject = Optional.empty();
for (FileMetadata item : itemsToSearch) {
if (item.getExternalId().equals(externalId)) {
return Optional.of(item);
}
}
return returnObject;
}
/*
Returns the file metadata that matches a given id
*/
private Optional<FileMetadata> getById(Collection<FileMetadata> itemsToSearch, long id) {
Optional<FileMetadata> returnObject = Optional.empty();
for (FileMetadata item : itemsToSearch) {
if (item.getId() == id) {
return Optional.of(item);
}
}
return returnObject;
}
@AutoValue.Builder
abstract static class Builder extends ApiBase.Builder<Builder> {
abstract Files build();
}
}
|
package com.stockticker;
import com.facebook.react.ReactActivity;
import android.content.Intent;
import android.content.res.Configuration;
public class MainActivity extends ReactActivity {
/**
* Returns the name of the main component registered from JavaScript.
* This is used to schedule rendering of the component.
*/
@Override
protected String getMainComponentName() {
return "stockticker";
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
Intent intent = new Intent("onConfigurationChanged");
intent.putExtra("newConfig", newConfig);
this.sendBroadcast(intent);
}
}
|
package ch.usi.inf.omicron.taskManager;
import android.app.job.JobInfo;
import android.app.job.JobScheduler;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.Query;
import com.google.firebase.database.ValueEventListener;
import com.google.gson.Gson;
import java.util.Calendar;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import ch.usi.inf.omicron.Log;
import ch.usi.inf.omicron.Utils;
import static android.content.Context.MODE_PRIVATE;
import static ch.usi.inf.omicron.UMob.iid;
import static ch.usi.inf.omicron.Utils.resetSharedForNewTask;
public class TaskManager {
private static final String TASK = "umob.Main.task";
// tasks retrieved from the database (Date -> Hash of Tasks)
private HashMap<String, LinkedHashMap<String, UmobTask>> DateTasks = new HashMap<>();
// reference to the json branch that will hold the user's tasks
private DatabaseReference idRef;
// active task to be done by the user
private UmobTask activeTask;
private String activeTaskId;
private String activeTaskDate;
private SharedPreferences sharedPref;
public TaskManager(final Context ctx) {
init(ctx);
//Start the task scheduler that checks every hour that the tasks are still in the window
schedule(ctx);
}
private static long utcMillisNextHour() {
Calendar now = Calendar.getInstance();
//Set current time at the start of the next hour
Calendar startNextHour = Calendar.getInstance();
startNextHour.add(Calendar.HOUR, 1);
startNextHour.set(Calendar.MINUTE, 0);
return (Math.abs(startNextHour.getTimeInMillis() - now.getTimeInMillis()));
}
private void init(Context ctx) {
sharedPref = ctx.getSharedPreferences("shared_preferences",
MODE_PRIVATE);
boolean permissionsGranted = sharedPref.getBoolean("permissionsGranted", false);
Log.i(TASK, "Retrieving tasks from database at id: " + iid);
idRef = FirebaseDatabase.getInstance().getReference(String.format("%s/tasks", iid));
//attach the Firebase listener
startTaskListener(ctx);
}
public void startTaskListener(final Context ctx) {
Query query = idRef;
query.addValueEventListener(new ValueEventListener() {
@Override
public void onDataChange(DataSnapshot dataSnapshot) {
//check for the new tasks
updateTaskList(dataSnapshot, ctx);
}
@Override
public void onCancelled(DatabaseError databaseError) {
Log.w(TASK, "Failed to read task.");
}
});
}
void schedule(Context ctx) {
JobScheduler mJobScheduler = (JobScheduler)
ctx.getSystemService(Context.JOB_SCHEDULER_SERVICE);
JobInfo.Builder builder = new JobInfo.Builder(1,
new ComponentName(ctx.getPackageName(),
TaskScheduler.class.getName()));
//10 minutes interval to consume less energy
int delay = 10 * 60 * 1000;
builder.setMinimumLatency(utcMillisNextHour()); // wait at least
builder.setOverrideDeadline(utcMillisNextHour() + delay); // maximum delay
assert mJobScheduler != null;
mJobScheduler.schedule(builder.build());
Log.v(TASK, "New in window check in ms:" + utcMillisNextHour());
}
private void updateTaskList(DataSnapshot dataSnapshot, Context ctx) {
SharedPreferences sharedPref = ctx.getSharedPreferences("shared_preferences",
MODE_PRIVATE);
boolean taskStarted = sharedPref.getBoolean("pre_task_questionnaire_opened", false);
boolean update_task = true;
//don't update the tasks if we are doing a task
if (taskStarted && this.isTaskAvailable()) {
//and the task must not be expired
if (this.getActiveTask().isNotDoneOrExpired()) {
update_task = false;
}
}
String prv_task_id = getActiveTaskId();
UmobTask prv_task = getActiveTask();
Log.i(TASK, "Tasks on database got updated.");
resetTaskModel();
//build Date-Task model
//for every date
buildTaskModel(dataSnapshot);
findSuitableTask(ctx, update_task, prv_task_id, prv_task);
}
private void buildTaskModel(DataSnapshot dataSnapshot) {
for (DataSnapshot date : dataSnapshot.getChildren()) {
//set for tasks
LinkedHashMap<String, UmobTask> tasks = new LinkedHashMap<>();
//for every task
for (DataSnapshot ds : date.getChildren()) {
try {
UmobTask task = ds.getValue(UmobTask.class);
if (task != null) {
String taskId = ds.getKey();
tasks.put(taskId, task);
Log.i(TASK, "Retrieved task: " + taskId + " done " + task.done + " title " + task.getTitle()
+ "window:" + task.windowStart + "-" + task.windowEnd);
}
} catch (Exception ignored) {
Log.e(TASK, "Malformatted task");
}
}
DateTasks.put(date.getKey(), tasks);
}
}
void findSuitableTask(Context ctx, boolean update_task, String prv_task_id, UmobTask prv_task) {
//temporary values before updating
UmobTask temp_task = null;
String temp_taskid = null;
String temp_date = null;
boolean task_found = false;
String isoDate = String.format("%tF", Calendar.getInstance());
try {
for (Map.Entry<String, UmobTask> taskEntry : DateTasks.get(isoDate).entrySet()) {
UmobTask task = taskEntry.getValue();
String taskId = taskEntry.getKey();
//find task not done and inside the time window
if (task.isNotDoneOrExpired() && !task_found && update_task) {
Log.i(TASK, "Selected task: " + taskId + " done " + task.done + " title " + task.getTitle()
+ "window:" + task.windowStart + "-" + task.windowEnd);
temp_task = task;
temp_taskid = taskId;
temp_date = isoDate;
task_found = true;
}
}
} catch (Exception ignore) {
Log.e(TASK, "No task for key " + isoDate);
}
//update the active task
updateActiveTask(temp_task, temp_taskid, temp_date);
//update main activity
broadcastTask(ctx, update_task, prv_task_id, prv_task);
}
private void broadcastTask(Context ctx, boolean update_task, String prv_task_id, UmobTask prv_task) {
if (update_task) {
//No task can be done and I was doing one but not completed before the update
if (getActiveTask() == null && prv_task_id != null && !prv_task.done) {
Intent intent = new Intent("TASK_EXPIRED");
//task was already rescheduled no more task
if (prv_task.rescheduled) {
resetSharedForNewTask(ctx);
Utils.updateNotification("", ctx);
ctx.sendBroadcast(intent);
} else {
//reschedule task if we can
Utils.updateNotification("", ctx);
ctx.sendBroadcast(intent);
reschedule(prv_task_id, prv_task);
}
}
//No more task and completed the previous one
else if (getActiveTask() == null && prv_task_id != null && prv_task.done) {
//can we show a previous undone task?
revive(prv_task_id, prv_task);
}
//new task arrived, it's different than what I see
else if (getActiveTask() != null && (prv_task_id == null || !prv_task_id.equals(getActiveTaskId()))) {
Utils.resetSharedForNewTask(ctx);
Intent intent = new Intent("REFRESH_TASK_HINT");
ctx.sendBroadcast(intent);
Utils.updateNotification(this.getActiveTask().getTitle(), ctx);
}
}
}
/*
* Reschedule a previous undone task
*/
private void revive(String prv_task_id, UmobTask prv_task) {
String isoDate = String.format("%tF", Calendar.getInstance());
try {
for (Map.Entry<String, UmobTask> taskEntry : DateTasks.get(isoDate).entrySet()) {
if (!taskEntry.getKey().equals(prv_task_id)) {
UmobTask task = taskEntry.getValue();
String task_id = taskEntry.getKey();
//there's an earlier task not done/rescheduled
if (task.windowEnd <= prv_task.windowEnd && !task.done && !task.rescheduled) {
task.windowStart = prv_task.windowStart;
task.windowEnd = prv_task.windowEnd;
task.rescheduled = true;
//reissue on Firebase
idRef.child(isoDate).child(task_id).setValue(task);
//we found it we can stop
return;
}
}
}
} catch (Exception ignore) {
Log.e(TASK, "Revive: No task for key " + isoDate);
}
}
/*
* we issue task again as soon as it expires if it doesn't overlap with another task.
* if it does overlap the user see it if it completes the next task.
*/
private void reschedule(String taskToRescheduleID, UmobTask taskToReschedule) {
int interval = taskToReschedule.windowEnd - taskToReschedule.windowStart;
//new intervals
int newStart = taskToReschedule.windowStart + interval;
//can't reschedule task to a new day
if (newStart > 24) {
return;
}
int newEnd = taskToReschedule.windowEnd + interval;
if (newEnd > 24)
newEnd = 24;
boolean overlaps = false;
//check if it overlaps
String isoDate = String.format("%tF", Calendar.getInstance());
try {
for (Map.Entry<String, UmobTask> taskEntry : DateTasks.get(isoDate).entrySet()) {
if (!taskEntry.getKey().equals(taskToRescheduleID)) {
UmobTask task = taskEntry.getValue();
if (newStart <= task.windowEnd && newEnd > task.windowStart) {
overlaps = true;
}
}
}
} catch (Exception ignore) {
Log.e(TASK, "Reschedule: No task for key " + isoDate);
}
if (!overlaps) {
taskToReschedule.windowStart = newStart;
taskToReschedule.windowEnd = newEnd;
taskToReschedule.rescheduled = true;
//reissue on Firebase
idRef.child(isoDate).child(taskToRescheduleID).setValue(taskToReschedule);
}
}
private void resetTaskModel() {
DateTasks.clear();
}
public void updateActiveTask(UmobTask task, String id, String date) {
activeTask = task;
activeTaskId = id;
activeTaskDate = date;
//save in case this object is killed
exportTM();
}
private void exportTM() {
Gson gson = new Gson();
SharedPreferences.Editor prefsEditor = sharedPref.edit();
String json = gson.toJson(activeTask);
prefsEditor.putString("activeTask", json);
prefsEditor.putString("activeTaskId", activeTaskId);
prefsEditor.putString("activeTaskDate", activeTaskDate);
prefsEditor.apply();
}
public boolean isTaskAvailable() {
return activeTask != null && !activeTask.isDone();
}
public UmobTask getActiveTask() {
return activeTask;
}
public String getActiveTaskId() {
return activeTaskId;
}
public void startTask() {
activeTask.startTimestamp = System.currentTimeMillis();
//save in case this object is killed
exportTM();
}
public void finishTask() {
activeTask.finish();
idRef.child(activeTaskDate).child(activeTaskId).setValue(activeTask);
//reset persistent state
SharedPreferences.Editor prefsEditor = sharedPref.edit();
prefsEditor.putString("activeTask", "");
prefsEditor.putString("activeTaskId", "");
prefsEditor.putString("activeTaskDate", "");
prefsEditor.apply();
}
}
|
/*
* Copyright 2018 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.authorization.client.representation;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* @author <a href="mailto:psilva@redhat.com">Pedro Igor</a>
*/
public class ServerConfiguration {
@JsonProperty("issuer")
private String issuer;
@JsonProperty("authorization_endpoint")
private String authorizationEndpoint;
@JsonProperty("token_endpoint")
private String tokenEndpoint;
@JsonProperty("token_introspection_endpoint")
private String tokenIntrospectionEndpoint;
@JsonProperty("userinfo_endpoint")
private String userinfoEndpoint;
@JsonProperty("end_session_endpoint")
private String logoutEndpoint;
@JsonProperty("jwks_uri")
private String jwksUri;
@JsonProperty("check_session_iframe")
private String checkSessionIframe;
@JsonProperty("grant_types_supported")
private List<String> grantTypesSupported;
@JsonProperty("response_types_supported")
private List<String> responseTypesSupported;
@JsonProperty("subject_types_supported")
private List<String> subjectTypesSupported;
@JsonProperty("id_token_signing_alg_values_supported")
private List<String> idTokenSigningAlgValuesSupported;
@JsonProperty("userinfo_signing_alg_values_supported")
private List<String> userInfoSigningAlgValuesSupported;
@JsonProperty("request_object_signing_alg_values_supported")
private List<String> requestObjectSigningAlgValuesSupported;
@JsonProperty("response_modes_supported")
private List<String> responseModesSupported;
@JsonProperty("registration_endpoint")
private String registrationEndpoint;
@JsonProperty("token_endpoint_auth_methods_supported")
private List<String> tokenEndpointAuthMethodsSupported;
@JsonProperty("token_endpoint_auth_signing_alg_values_supported")
private List<String> tokenEndpointAuthSigningAlgValuesSupported;
@JsonProperty("claims_supported")
private List<String> claimsSupported;
@JsonProperty("claim_types_supported")
private List<String> claimTypesSupported;
@JsonProperty("claims_parameter_supported")
private Boolean claimsParameterSupported;
@JsonProperty("scopes_supported")
private List<String> scopesSupported;
@JsonProperty("request_parameter_supported")
private Boolean requestParameterSupported;
@JsonProperty("request_uri_parameter_supported")
private Boolean requestUriParameterSupported;
@JsonProperty("resource_registration_endpoint")
private String resourceRegistrationEndpoint;
@JsonProperty("permission_endpoint")
private String permissionEndpoint;
public String getIssuer() {
return issuer;
}
public String getAuthorizationEndpoint() {
return authorizationEndpoint;
}
public String getTokenEndpoint() {
return tokenEndpoint;
}
public String getTokenIntrospectionEndpoint() {
return tokenIntrospectionEndpoint;
}
public String getUserinfoEndpoint() {
return userinfoEndpoint;
}
public String getLogoutEndpoint() {
return logoutEndpoint;
}
public String getJwksUri() {
return jwksUri;
}
public String getCheckSessionIframe() {
return checkSessionIframe;
}
public List<String> getGrantTypesSupported() {
return grantTypesSupported;
}
public List<String> getResponseTypesSupported() {
return responseTypesSupported;
}
public List<String> getSubjectTypesSupported() {
return subjectTypesSupported;
}
public List<String> getIdTokenSigningAlgValuesSupported() {
return idTokenSigningAlgValuesSupported;
}
public List<String> getUserInfoSigningAlgValuesSupported() {
return userInfoSigningAlgValuesSupported;
}
public List<String> getRequestObjectSigningAlgValuesSupported() {
return requestObjectSigningAlgValuesSupported;
}
public List<String> getResponseModesSupported() {
return responseModesSupported;
}
public String getRegistrationEndpoint() {
return registrationEndpoint;
}
public List<String> getTokenEndpointAuthMethodsSupported() {
return tokenEndpointAuthMethodsSupported;
}
public List<String> getTokenEndpointAuthSigningAlgValuesSupported() {
return tokenEndpointAuthSigningAlgValuesSupported;
}
public List<String> getClaimsSupported() {
return claimsSupported;
}
public List<String> getClaimTypesSupported() {
return claimTypesSupported;
}
public Boolean getClaimsParameterSupported() {
return claimsParameterSupported;
}
public List<String> getScopesSupported() {
return scopesSupported;
}
public Boolean getRequestParameterSupported() {
return requestParameterSupported;
}
public Boolean getRequestUriParameterSupported() {
return requestUriParameterSupported;
}
public String getResourceRegistrationEndpoint() {
return resourceRegistrationEndpoint;
}
public String getPermissionEndpoint() {
return permissionEndpoint;
}
}
|
package com.anchore.jenkins.plugins.anchore.model.vdb;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
@JsonInclude(JsonInclude.Include.NON_NULL)
@JsonPropertyOrder({
"children",
"id",
"key",
"value"
})
public class Ref {
@JsonProperty("children")
private List<java.lang.Object> children = null;
@JsonProperty("id")
private Integer id;
@JsonProperty("key")
private String key;
@JsonProperty("value")
private String value;
@JsonIgnore
private Map<String, java.lang.Object> additionalProperties = new HashMap<String, java.lang.Object>();
@JsonProperty("children")
public List<java.lang.Object> getChildren() {
return children;
}
@JsonProperty("children")
public void setChildren(List<java.lang.Object> children) {
this.children = children;
}
@JsonProperty("id")
public Integer getId() {
return id;
}
@JsonProperty("id")
public void setId(Integer id) {
this.id = id;
}
@JsonProperty("key")
public String getKey() {
return key;
}
@JsonProperty("key")
public void setKey(String key) {
this.key = key;
}
@JsonProperty("value")
public String getValue() {
return value;
}
@JsonProperty("value")
public void setValue(String value) {
this.value = value;
}
@JsonAnyGetter
public Map<String, java.lang.Object> getAdditionalProperties() {
return this.additionalProperties;
}
@JsonAnySetter
public void setAdditionalProperty(String name, java.lang.Object value) {
this.additionalProperties.put(name, value);
}
}
|
package alexander.ivanov.ms;
public interface MessageSystem {
void init();
void sendMessage(Message message);
void addClient(MessageClient client);
Message createMessageFor(String clientName, String data);
}
|
/*
Copyright (c) 2012-2013, BogDan Vatra <bogdan@kde.org>
Contact: http://www.qt-project.org/legal
Commercial License Usage
Licensees holding valid commercial Qt licenses may use this file in
accordance with the commercial license agreement provided with the
Software or, alternatively, in accordance with the terms contained in
a written agreement between you and Digia. For licensing terms and
conditions see http://qt.digia.com/licensing. For further information
use the contact form at http://qt.digia.com/contact-us.
BSD License Usage
Alternatively, this file may be used under the BSD license as follows:
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.qtproject.qt5.android.bindings;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashMap;
import android.app.Application;
public class QtApplication extends Application
{
public final static String QtTAG = "Qt";
public static Object m_delegateObject = null;
public static HashMap<String, ArrayList<Method>> m_delegateMethods= new HashMap<String, ArrayList<Method>>();
public static Method dispatchKeyEvent = null;
public static Method dispatchPopulateAccessibilityEvent = null;
public static Method dispatchTouchEvent = null;
public static Method dispatchTrackballEvent = null;
public static Method onKeyDown = null;
public static Method onKeyMultiple = null;
public static Method onKeyUp = null;
public static Method onTouchEvent = null;
public static Method onTrackballEvent = null;
public static Method onActivityResult = null;
public static Method onCreate = null;
public static Method onKeyLongPress = null;
public static Method dispatchKeyShortcutEvent = null;
public static Method onKeyShortcut = null;
public static Method dispatchGenericMotionEvent = null;
public static Method onGenericMotionEvent = null;
public static void setQtActivityDelegate(Object listener)
{
QtApplication.m_delegateObject = listener;
ArrayList<Method> delegateMethods = new ArrayList<Method>();
for (Method m : listener.getClass().getMethods()) {
if (m.getDeclaringClass().getName().startsWith("org.qtproject.qt5.android"))
delegateMethods.add(m);
}
ArrayList<Field> applicationFields = new ArrayList<Field>();
for (Field f : QtApplication.class.getFields()) {
if (f.getDeclaringClass().getName().equals(QtApplication.class.getName()))
applicationFields.add(f);
}
for (Method delegateMethod : delegateMethods) {
try {
QtActivity.class.getDeclaredMethod(delegateMethod.getName(), delegateMethod.getParameterTypes());
if (QtApplication.m_delegateMethods.containsKey(delegateMethod.getName())) {
QtApplication.m_delegateMethods.get(delegateMethod.getName()).add(delegateMethod);
} else {
ArrayList<Method> delegateSet = new ArrayList<Method>();
delegateSet.add(delegateMethod);
QtApplication.m_delegateMethods.put(delegateMethod.getName(), delegateSet);
}
for (Field applicationField:applicationFields) {
if (applicationField.getName().equals(delegateMethod.getName())) {
try {
applicationField.set(null, delegateMethod);
} catch (Exception e) {
e.printStackTrace();
}
}
}
} catch (Exception e) {
}
}
}
@Override
public void onTerminate() {
if (m_delegateObject != null && m_delegateMethods.containsKey("onTerminate"))
invokeDelegateMethod(m_delegateMethods.get("onTerminate").get(0));
super.onTerminate();
}
public static class InvokeResult
{
public boolean invoked = false;
public Object methodReturns = null;
}
private static int stackDeep=-1;
public static InvokeResult invokeDelegate(Object... args)
{
InvokeResult result = new InvokeResult();
if (m_delegateObject == null)
return result;
StackTraceElement[] elements = Thread.currentThread().getStackTrace();
if (-1 == stackDeep) {
String activityClassName = QtActivity.class.getCanonicalName();
for (int it=0;it<elements.length;it++)
if (elements[it].getClassName().equals(activityClassName)) {
stackDeep = it;
break;
}
}
final String methodName=elements[stackDeep].getMethodName();
if (-1 == stackDeep || !m_delegateMethods.containsKey(methodName))
return result;
for (Method m : m_delegateMethods.get(methodName)) {
if (m.getParameterTypes().length == args.length) {
result.methodReturns = invokeDelegateMethod(m, args);
result.invoked = true;
return result;
}
}
return result;
}
public static Object invokeDelegateMethod(Method m, Object... args)
{
try {
return m.invoke(m_delegateObject, args);
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
}
|
package states.Player;
import AncapLibrary.Player.AncapPlayerMap;
import org.bukkit.Bukkit;
import org.bukkit.entity.Player;
public class AncapStatesPlayerMap extends AncapPlayerMap {
private static final AncapStatesPlayerMap INSTANCE = new AncapStatesPlayerMap();
public static AncapStatesPlayerMap getInstance() {
return INSTANCE;
}
@Override
public AncapStatesPlayer[] getOnlinePlayers() {
Player[] bukkitPlayers = Bukkit.getOnlinePlayers().toArray(new Player[0]);
AncapStatesPlayer[] players = new AncapStatesPlayer[bukkitPlayers.length];
for (int i = 0; i<players.length; i++) {
players[i] = new AncapStatesPlayer(bukkitPlayers[i].getName());
}
return players;
}
}
|
/**
* Copyright (c) 2013-2020 Nikita Koksharov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.redisson.spring.data.connection;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.redisson.Redisson;
import org.redisson.api.RedissonClient;
import org.redisson.client.RedisClient;
import org.redisson.client.protocol.RedisCommands;
import org.redisson.config.Config;
import org.redisson.connection.SentinelConnectionManager;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.dao.DataAccessException;
import org.springframework.dao.InvalidDataAccessResourceUsageException;
import org.springframework.data.redis.ExceptionTranslationStrategy;
import org.springframework.data.redis.PassThroughExceptionTranslationStrategy;
import org.springframework.data.redis.connection.RedisClusterConnection;
import org.springframework.data.redis.connection.RedisConnection;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.connection.RedisSentinelConnection;
/**
* Redisson based connection factory
*
* @author Nikita Koksharov
*
*/
public class RedissonConnectionFactory implements RedisConnectionFactory, InitializingBean, DisposableBean {
private final static Log log = LogFactory.getLog(RedissonConnectionFactory.class);
public static final ExceptionTranslationStrategy EXCEPTION_TRANSLATION =
new PassThroughExceptionTranslationStrategy(new RedissonExceptionConverter());
private Config config;
private RedissonClient redisson;
/**
* Creates factory with default Redisson configuration
*/
public RedissonConnectionFactory() {
this(Redisson.create());
}
/**
* Creates factory with defined Redisson instance
*
* @param redisson - Redisson instance
*/
public RedissonConnectionFactory(RedissonClient redisson) {
this.redisson = redisson;
}
/**
* Creates factory with defined Redisson config
*
* @param config - Redisson config
*/
public RedissonConnectionFactory(Config config) {
super();
this.config = config;
}
@Override
public DataAccessException translateExceptionIfPossible(RuntimeException ex) {
return EXCEPTION_TRANSLATION.translate(ex);
}
@Override
public void destroy() throws Exception {
}
@Override
public void afterPropertiesSet() throws Exception {
if (config != null) {
redisson = Redisson.create(config);
}
}
@Override
public RedisConnection getConnection() {
return new RedissonConnection(redisson);
}
@Override
public RedisClusterConnection getClusterConnection() {
if (!redisson.getConfig().isClusterConfig()) {
throw new InvalidDataAccessResourceUsageException("Redisson is not in Cluster mode");
}
return new RedissonClusterConnection(redisson);
}
@Override
public boolean getConvertPipelineAndTxResults() {
return true;
}
@Override
public RedisSentinelConnection getSentinelConnection() {
if (!redisson.getConfig().isSentinelConfig()) {
throw new InvalidDataAccessResourceUsageException("Redisson is not in Sentinel mode");
}
SentinelConnectionManager manager = ((SentinelConnectionManager)((Redisson)redisson).getConnectionManager());
for (RedisClient client : manager.getSentinels()) {
org.redisson.client.RedisConnection connection = client.connect();
try {
String res = connection.sync(RedisCommands.PING);
if ("pong".equalsIgnoreCase(res)) {
return new RedissonSentinelConnection(connection);
}
} catch (Exception e) {
log.warn("Can't connect to " + client, e);
connection.closeAsync();
}
}
throw new InvalidDataAccessResourceUsageException("Sentinels are not found");
}
}
|
/*
* Copyright 2015 Ooluk Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
*/
package com.ooluk.ddm.dataimport.data;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementWrapper;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import com.ooluk.ddm.dataimport.CaseMode;
/**
* This class represents an intermediate attribute created by a DataObjectReader from a data object repository.
*
* <p>
* This class does not maintain a reference to the containing data object and it is therefore vital that you use objects
* of this class from within a data object context only. Otherwise the semantics used in equals() (attributes are equal
* if their names are equal) could cause issues with collections. Also note that the equals() methods uses getClass()
* instead of instanceof.
* </p>
*
* @author Siddhesh Prabhu
* @since 1.0
*/
@XmlRootElement(name = "attribute")
@XmlType(propOrder = { "name", "logicalName", "seqNo", "dataType", "commonType", "key", "parentAttribute", "required",
"defaultValue", "source", "localSources", "summary", "description", "tags", "extendedProperties", "codes" })
public class ScannedAttribute {
private String name = "";
private String logicalName = "";
private Integer seqNo = 0;
private String dataType = "";
private String commonType = "";
private boolean key = false;
private String parentAttribute = "";
private boolean required = false;
private String defaultValue = "";
private String source = "";
private String summary = "";
private String description = "";
private List<String> tags;
private List<ScannedAttributeSource> localSources;
private Map<String, String> extnProps;
private List<ScannedAttributeCode> attributeCodes;
private final CaseMode mode;
/**
* Constructs a ScannedAttribute.
*/
public ScannedAttribute() {
this(CaseMode.MIXED);
}
/**
* Constructs a ScannedAttribute with the specified case mode. The case mode causes all string fields to be
* converted to the specified case. Mixed case will leave the string fields untouched.
*
* @param mode
* the case mode
*/
public ScannedAttribute(CaseMode mode) {
this.mode = mode;
tags = new ArrayList<>();
localSources = new ArrayList<>();
attributeCodes = new ArrayList<>();
extnProps = new HashMap<String, String>();
}
@XmlElement(name="name")
public String getName() {
return name;
}
public void setName(String name) {
this.name = mode.convert(name);
}
@XmlElement(name="position")
public Integer getSeqNo() {
return seqNo;
}
public void setSeqNo(Integer seqNo) {
this.seqNo = seqNo;
}
@XmlElement(name="logical-name")
public String getLogicalName() {
return logicalName;
}
public void setLogicalName(String logicalName) {
this.logicalName = mode.convert(logicalName);
}
@XmlElement(name="data-type")
public String getDataType() {
return dataType;
}
public void setDataType(String dataType) {
this.dataType = mode.convert(dataType);
}
@XmlElement(name="common-type")
public String getCommonType() {
return commonType;
}
public void setCommonType(String commonType) {
this.commonType = mode.convert(commonType);
}
@XmlElement(name="key")
public boolean isKey() {
return key;
}
public void setKey(boolean key) {
this.key = key;
}
@XmlElement(name="parent-attribute")
public String getParentAttribute() {
return parentAttribute;
}
public void setParentAttribute(String parentAttribute) {
this.parentAttribute = mode.convert(parentAttribute);
}
@XmlElement(name="required")
public boolean isRequired() {
return required;
}
public void setRequired(boolean required) {
this.required = required;
}
@XmlElement(name="default-value")
public String getDefaultValue() {
return defaultValue;
}
public void setDefaultValue(String defaultValue) {
this.defaultValue = mode.convert(defaultValue);
}
@XmlElement(name="external-sources")
public String getSource() {
return source;
}
public void setSource(String source) {
this.source = mode.convert(source);
}
@XmlElementWrapper(name="local-sources")
@XmlElement(name="local-source")
public List<ScannedAttributeSource> getLocalSources() {
return localSources;
}
public void setLocalSources(List<ScannedAttributeSource> localSources) {
this.localSources = localSources;
}
@XmlElement(name="summary")
public String getSummary() {
return summary;
}
public void setSummary(String summary) {
this.summary = summary;
}
@XmlElement(name="description")
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
@XmlElement(name="tags")
public List<String> getTags() {
return tags;
}
public void setTags(List<String> tags) {
this.tags = tags;
}
@XmlJavaTypeAdapter(ExtendedPropertyListAdapter.class)
public Map<String, String> getExtendedProperties() {
return extnProps;
}
public void setExtendedProperties(Map<String, String> extnProps) {
this.extnProps = extnProps;
}
@XmlElementWrapper(name="codes")
@XmlElement(name="code")
public List<ScannedAttributeCode> getCodes() {
return attributeCodes;
}
public void setCodes(List<ScannedAttributeCode> codes) {
this.attributeCodes = codes;
}
public void addCode(ScannedAttributeCode code) {
attributeCodes.add(code);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((name == null) ? 0 : name.hashCode());
return result;
}
@Override
public boolean equals(Object object) {
if (this == object)
return true;
if (object == null)
return false;
if (getClass() != object.getClass())
return false;
ScannedAttribute other = (ScannedAttribute) object;
return Objects.equals(this.name, other.name);
}
}
|
/*
* Copyright 2000-2010 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.execution.process;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.ArrayUtil;
import org.jetbrains.annotations.Nullable;
import java.util.Collections;
import java.util.Map;
/**
* @author Roman.Chernyatchik, oleg
*/
public class CommandLineArgumentsProvider {
/**
* @return Commands to execute (one command corresponds to one add argument)
*/
public String[] getArguments() { return ArrayUtil.EMPTY_STRING_ARRAY; }
public boolean passParentEnvs() { return false; }
@Nullable
public Map<String, String> getAdditionalEnvs() { return Collections.emptyMap(); }
public String getCommandLineString() {
return toCommandLine(getArguments());
}
public static String toCommandLine(String... commands) {
if (commands.length > 0) {
commands[0] = FileUtil.toSystemDependentName(commands[0]);
return StringUtil.join(commands, " ");
}
return "";
}
}
|
package com.miotech.kun.metadata.common.utils;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import com.miotech.kun.commons.utils.ExceptionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class JSONUtils {
private JSONUtils() {
}
private static final Logger logger = LoggerFactory.getLogger(JSONUtils.class);
private static final ObjectMapper objectMapper = new ObjectMapper()
.registerModule(new JavaTimeModule())
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
.setSerializationInclusion(JsonInclude.Include.NON_NULL);
public static <T> String toJsonString(T obj, TypeReference<T> typeRef) {
try {
return objectMapper.writerFor(typeRef).writeValueAsString(obj);
} catch (JsonProcessingException e) {
logger.error("Error occurs when converting object: {} to JSON string: ", obj, e);
throw ExceptionUtils.wrapIfChecked(e);
}
}
public static <T> String toJsonString(T obj) {
try {
return objectMapper.writeValueAsString(obj);
} catch (JsonProcessingException e) {
logger.error("Error occurs when converting object: {} to JSON string: ", obj, e);
throw ExceptionUtils.wrapIfChecked(e);
}
}
public static <T> T jsonToObject(String str, Class<T> valueType) {
try {
return objectMapper.readValue(str, valueType);
} catch (JsonProcessingException e) {
logger.error("Error occurs when converting JSON: {} to object: ", str, e);
throw ExceptionUtils.wrapIfChecked(e);
}
}
public static <T> T jsonToObject(String str, TypeReference<T> typeRef) {
try {
return objectMapper.readValue(str, typeRef);
} catch (JsonProcessingException e) {
logger.error("Error occurs when converting JSON: {} to object: ", str, e);
throw ExceptionUtils.wrapIfChecked(e);
}
}
public static <T> T jsonToObject(Object obj, Class<T> valueType) {
try {
String json = toJsonString(obj);
return jsonToObject(json, valueType);
} catch (Exception e) {
logger.error("Error occurs when converting object: {} to object: ", obj, e);
throw ExceptionUtils.wrapIfChecked(e);
}
}
public static <T> List<T> jsonArrayToList(String str, Class<T> valueType) {
try {
return objectMapper.readValue(str, new TypeReference<List<T>>() {
});
} catch (JsonProcessingException e) {
logger.error("Error occurs when converting JSON: {} to List: ", str, e);
throw ExceptionUtils.wrapIfChecked(e);
}
}
public static <T> T jsonToObject(InputStream inputStream, Class<T> valueType) {
try {
return objectMapper.readValue(inputStream, valueType);
} catch (IOException e) {
logger.error("Error occurs when convert inputStream to Object: ", e);
throw ExceptionUtils.wrapIfChecked(e);
}
}
public static <K, V> Map<K, V> jsonStringToMap(String jsonStr, Class<K> keyType, Class<V> valueType) {
try {
return objectMapper.readValue(
jsonStr,
objectMapper.getTypeFactory().constructMapType(HashMap.class, keyType, valueType)
);
} catch (JsonProcessingException e) {
logger.error("Error occurs when convert jsonStr: {} to Map: ", jsonStr, e);
throw ExceptionUtils.wrapIfChecked(e);
}
}
public static <K, V> Map<K, V> objToMap(Object obj, Class<K> keyType, Class<V> valueType) {
try {
String jsonStr = toJsonString(obj);
return objectMapper.readValue(
jsonStr,
objectMapper.getTypeFactory().constructMapType(HashMap.class, keyType, valueType)
);
} catch (JsonProcessingException e) {
logger.error("Error occurs when convert object: {} to Map: ", obj, e);
throw ExceptionUtils.wrapIfChecked(e);
}
}
public static JsonNode stringToJson(String jsonStr) {
try {
return objectMapper.readTree(jsonStr);
} catch (JsonProcessingException e) {
logger.error("Error occurs when convert jsonStr: {} to JsonNode: ", jsonStr, e);
throw ExceptionUtils.wrapIfChecked(e);
}
}
public static <T> T JsonNodeToObject(JsonNode jsonNode, Class<T> valueType) {
try {
return objectMapper.treeToValue(jsonNode, valueType);
} catch (JsonProcessingException e) {
logger.error("Error occurs when convert jsonNode to Object: ", e);
throw ExceptionUtils.wrapIfChecked(e);
}
}
public static JsonNode objectToJsonNode(Object obj) {
return objectMapper.convertValue(obj, JsonNode.class);
}
}
|
/*
* Copyright (c) 2017-2018 THL A29 Limited, a Tencent company. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tencentcloudapi.tiia.v20190529.models;
import com.tencentcloudapi.common.AbstractModel;
import com.google.gson.annotations.SerializedName;
import com.google.gson.annotations.Expose;
import java.util.HashMap;
public class SearchImageRequest extends AbstractModel{
/**
* 图库名称。
*/
@SerializedName("GroupId")
@Expose
private String GroupId;
/**
* 图片的 Url 。对应图片 base64 编码后大小不可超过2M。
图片分辨率不超过1920*1080。
Url、Image必须提供一个,如果都提供,只使用 Url。
图片存储于腾讯云的Url可保障更高下载速度和稳定性,建议图片存储于腾讯云。
非腾讯云存储的Url速度和稳定性可能受一定影响。
支持PNG、JPG、JPEG、BMP,不支持 GIF 图片。
*/
@SerializedName("ImageUrl")
@Expose
private String ImageUrl;
/**
* 图片 base64 数据,base64 编码后大小不可超过2M。
图片分辨率不超过1920*1080。
支持PNG、JPG、JPEG、BMP,不支持 GIF 图片。
*/
@SerializedName("ImageBase64")
@Expose
private String ImageBase64;
/**
* 出参Score中,只有超过MatchThreshold值的结果才会返回。默认为0
*/
@SerializedName("MatchThreshold")
@Expose
private Long MatchThreshold;
/**
* 起始序号,默认值为0。
*/
@SerializedName("Offset")
@Expose
private Long Offset;
/**
* 返回数量,默认值为10,最大值为100。
*/
@SerializedName("Limit")
@Expose
private Long Limit;
/**
* 针对入库时提交的Tags信息进行条件过滤。支持>、>=、 <、 <=、=,!=,多个条件之间支持AND和OR进行连接。
*/
@SerializedName("Filter")
@Expose
private String Filter;
/**
* 图像主体区域。
*/
@SerializedName("ImageRect")
@Expose
private ImageRect ImageRect;
/**
* Get 图库名称。
* @return GroupId 图库名称。
*/
public String getGroupId() {
return this.GroupId;
}
/**
* Set 图库名称。
* @param GroupId 图库名称。
*/
public void setGroupId(String GroupId) {
this.GroupId = GroupId;
}
/**
* Get 图片的 Url 。对应图片 base64 编码后大小不可超过2M。
图片分辨率不超过1920*1080。
Url、Image必须提供一个,如果都提供,只使用 Url。
图片存储于腾讯云的Url可保障更高下载速度和稳定性,建议图片存储于腾讯云。
非腾讯云存储的Url速度和稳定性可能受一定影响。
支持PNG、JPG、JPEG、BMP,不支持 GIF 图片。
* @return ImageUrl 图片的 Url 。对应图片 base64 编码后大小不可超过2M。
图片分辨率不超过1920*1080。
Url、Image必须提供一个,如果都提供,只使用 Url。
图片存储于腾讯云的Url可保障更高下载速度和稳定性,建议图片存储于腾讯云。
非腾讯云存储的Url速度和稳定性可能受一定影响。
支持PNG、JPG、JPEG、BMP,不支持 GIF 图片。
*/
public String getImageUrl() {
return this.ImageUrl;
}
/**
* Set 图片的 Url 。对应图片 base64 编码后大小不可超过2M。
图片分辨率不超过1920*1080。
Url、Image必须提供一个,如果都提供,只使用 Url。
图片存储于腾讯云的Url可保障更高下载速度和稳定性,建议图片存储于腾讯云。
非腾讯云存储的Url速度和稳定性可能受一定影响。
支持PNG、JPG、JPEG、BMP,不支持 GIF 图片。
* @param ImageUrl 图片的 Url 。对应图片 base64 编码后大小不可超过2M。
图片分辨率不超过1920*1080。
Url、Image必须提供一个,如果都提供,只使用 Url。
图片存储于腾讯云的Url可保障更高下载速度和稳定性,建议图片存储于腾讯云。
非腾讯云存储的Url速度和稳定性可能受一定影响。
支持PNG、JPG、JPEG、BMP,不支持 GIF 图片。
*/
public void setImageUrl(String ImageUrl) {
this.ImageUrl = ImageUrl;
}
/**
* Get 图片 base64 数据,base64 编码后大小不可超过2M。
图片分辨率不超过1920*1080。
支持PNG、JPG、JPEG、BMP,不支持 GIF 图片。
* @return ImageBase64 图片 base64 数据,base64 编码后大小不可超过2M。
图片分辨率不超过1920*1080。
支持PNG、JPG、JPEG、BMP,不支持 GIF 图片。
*/
public String getImageBase64() {
return this.ImageBase64;
}
/**
* Set 图片 base64 数据,base64 编码后大小不可超过2M。
图片分辨率不超过1920*1080。
支持PNG、JPG、JPEG、BMP,不支持 GIF 图片。
* @param ImageBase64 图片 base64 数据,base64 编码后大小不可超过2M。
图片分辨率不超过1920*1080。
支持PNG、JPG、JPEG、BMP,不支持 GIF 图片。
*/
public void setImageBase64(String ImageBase64) {
this.ImageBase64 = ImageBase64;
}
/**
* Get 出参Score中,只有超过MatchThreshold值的结果才会返回。默认为0
* @return MatchThreshold 出参Score中,只有超过MatchThreshold值的结果才会返回。默认为0
*/
public Long getMatchThreshold() {
return this.MatchThreshold;
}
/**
* Set 出参Score中,只有超过MatchThreshold值的结果才会返回。默认为0
* @param MatchThreshold 出参Score中,只有超过MatchThreshold值的结果才会返回。默认为0
*/
public void setMatchThreshold(Long MatchThreshold) {
this.MatchThreshold = MatchThreshold;
}
/**
* Get 起始序号,默认值为0。
* @return Offset 起始序号,默认值为0。
*/
public Long getOffset() {
return this.Offset;
}
/**
* Set 起始序号,默认值为0。
* @param Offset 起始序号,默认值为0。
*/
public void setOffset(Long Offset) {
this.Offset = Offset;
}
/**
* Get 返回数量,默认值为10,最大值为100。
* @return Limit 返回数量,默认值为10,最大值为100。
*/
public Long getLimit() {
return this.Limit;
}
/**
* Set 返回数量,默认值为10,最大值为100。
* @param Limit 返回数量,默认值为10,最大值为100。
*/
public void setLimit(Long Limit) {
this.Limit = Limit;
}
/**
* Get 针对入库时提交的Tags信息进行条件过滤。支持>、>=、 <、 <=、=,!=,多个条件之间支持AND和OR进行连接。
* @return Filter 针对入库时提交的Tags信息进行条件过滤。支持>、>=、 <、 <=、=,!=,多个条件之间支持AND和OR进行连接。
*/
public String getFilter() {
return this.Filter;
}
/**
* Set 针对入库时提交的Tags信息进行条件过滤。支持>、>=、 <、 <=、=,!=,多个条件之间支持AND和OR进行连接。
* @param Filter 针对入库时提交的Tags信息进行条件过滤。支持>、>=、 <、 <=、=,!=,多个条件之间支持AND和OR进行连接。
*/
public void setFilter(String Filter) {
this.Filter = Filter;
}
/**
* Get 图像主体区域。
* @return ImageRect 图像主体区域。
*/
public ImageRect getImageRect() {
return this.ImageRect;
}
/**
* Set 图像主体区域。
* @param ImageRect 图像主体区域。
*/
public void setImageRect(ImageRect ImageRect) {
this.ImageRect = ImageRect;
}
public SearchImageRequest() {
}
/**
* NOTE: Any ambiguous key set via .set("AnyKey", "value") will be a shallow copy,
* and any explicit key, i.e Foo, set via .setFoo("value") will be a deep copy.
*/
public SearchImageRequest(SearchImageRequest source) {
if (source.GroupId != null) {
this.GroupId = new String(source.GroupId);
}
if (source.ImageUrl != null) {
this.ImageUrl = new String(source.ImageUrl);
}
if (source.ImageBase64 != null) {
this.ImageBase64 = new String(source.ImageBase64);
}
if (source.MatchThreshold != null) {
this.MatchThreshold = new Long(source.MatchThreshold);
}
if (source.Offset != null) {
this.Offset = new Long(source.Offset);
}
if (source.Limit != null) {
this.Limit = new Long(source.Limit);
}
if (source.Filter != null) {
this.Filter = new String(source.Filter);
}
if (source.ImageRect != null) {
this.ImageRect = new ImageRect(source.ImageRect);
}
}
/**
* Internal implementation, normal users should not use it.
*/
public void toMap(HashMap<String, String> map, String prefix) {
this.setParamSimple(map, prefix + "GroupId", this.GroupId);
this.setParamSimple(map, prefix + "ImageUrl", this.ImageUrl);
this.setParamSimple(map, prefix + "ImageBase64", this.ImageBase64);
this.setParamSimple(map, prefix + "MatchThreshold", this.MatchThreshold);
this.setParamSimple(map, prefix + "Offset", this.Offset);
this.setParamSimple(map, prefix + "Limit", this.Limit);
this.setParamSimple(map, prefix + "Filter", this.Filter);
this.setParamObj(map, prefix + "ImageRect.", this.ImageRect);
}
}
|
/**
* ProductTemplateAction.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.dfp.axis.v201605;
/**
* Represents the actions that can be performed on product templates.
*/
public abstract class ProductTemplateAction implements java.io.Serializable {
public ProductTemplateAction() {
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof ProductTemplateAction)) return false;
ProductTemplateAction other = (ProductTemplateAction) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = true;
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = 1;
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(ProductTemplateAction.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201605", "ProductTemplateAction"));
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
|
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.streamnative.pulsar.handlers.rocketmq.inner.namesvr;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.apache.rocketmq.common.constant.PermName.PERM_READ;
import static org.apache.rocketmq.common.constant.PermName.PERM_WRITE;
import static org.apache.rocketmq.common.protocol.RequestCode.GET_ROUTEINTO_BY_TOPIC;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import io.netty.channel.ChannelHandlerContext;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import lombok.extern.slf4j.Slf4j;
import org.apache.logging.log4j.util.Strings;
import org.apache.pulsar.broker.loadbalance.impl.ModularLoadManagerImpl;
import org.apache.pulsar.broker.loadbalance.impl.ModularLoadManagerWrapper;
import org.apache.pulsar.client.admin.PulsarAdmin;
import org.apache.pulsar.policies.data.loadbalancer.AdvertisedListener;
import org.apache.pulsar.policies.data.loadbalancer.LocalBrokerData;
import org.apache.rocketmq.common.help.FAQUrl;
import org.apache.rocketmq.common.protocol.RequestCode;
import org.apache.rocketmq.common.protocol.ResponseCode;
import org.apache.rocketmq.common.protocol.body.ClusterInfo;
import org.apache.rocketmq.common.protocol.header.namesrv.GetRouteInfoRequestHeader;
import org.apache.rocketmq.common.protocol.route.BrokerData;
import org.apache.rocketmq.common.protocol.route.QueueData;
import org.apache.rocketmq.remoting.common.RemotingHelper;
import org.apache.rocketmq.remoting.netty.NettyRequestProcessor;
import org.apache.rocketmq.remoting.protocol.RemotingCommand;
import org.streamnative.pulsar.handlers.rocketmq.RocketMQProtocolHandler;
import org.streamnative.pulsar.handlers.rocketmq.RocketMQServiceConfiguration;
import org.streamnative.pulsar.handlers.rocketmq.inner.RocketMQBrokerController;
import org.streamnative.pulsar.handlers.rocketmq.utils.PulsarUtil;
import org.streamnative.pulsar.handlers.rocketmq.utils.RocketMQTopic;
import org.testng.collections.Sets;
/**
* Nameserver processor.
*/
@Slf4j
public class NameserverProcessor implements NettyRequestProcessor {
public static final Pattern BROKER_ADDER_PAT = Pattern.compile("([^/:]+:)(\\d+)");
/**
* Differentiate the source network type of client requests according to different ports.
*/
private static final Map<String, String> PORT_LISTENER_NAME_MAP = Maps.newHashMap();
private final RocketMQBrokerController brokerController;
private final RocketMQServiceConfiguration config;
private final MQTopicManager mqTopicManager;
private final int servicePort;
public NameserverProcessor(RocketMQBrokerController brokerController) {
this.brokerController = brokerController;
this.config = brokerController.getServerConfig();
this.mqTopicManager = brokerController.getTopicConfigManager();
this.servicePort = RocketMQProtocolHandler.getListenerPort(config.getRocketmqListeners());
String rocketmqListenerPortMap = config.getRocketmqListenerPortMap();
String[] parts = rocketmqListenerPortMap.split(",");
for (String part : parts) {
String[] arr = part.split(":");
PORT_LISTENER_NAME_MAP.put(arr[0].trim(), arr[1].trim());
}
}
@Override
public RemotingCommand processRequest(ChannelHandlerContext ctx, RemotingCommand request)
throws Exception {
if (ctx != null) {
log.debug("receive request, {} {} {}",
request.getCode(),
RemotingHelper.parseChannelRemoteAddr(ctx.channel()),
request);
}
switch (request.getCode()) {
case RequestCode.PUT_KV_CONFIG:
case RequestCode.GET_KV_CONFIG:
case RequestCode.DELETE_KV_CONFIG:
case RequestCode.QUERY_DATA_VERSION:
// TODO return queryBrokerTopicConfig(ctx, request);
case RequestCode.REGISTER_BROKER:
case RequestCode.UNREGISTER_BROKER:
case GET_ROUTEINTO_BY_TOPIC:
// TODO return this.getRouteInfoByTopic(ctx, request);
return handleTopicMetadata(ctx, request);
case RequestCode.GET_BROKER_CLUSTER_INFO:
return this.getBrokerClusterInfo(ctx, request);
case RequestCode.WIPE_WRITE_PERM_OF_BROKER:
case RequestCode.GET_ALL_TOPIC_LIST_FROM_NAMESERVER:
// return getAllTopicListFromNameserver(ctx, request);
case RequestCode.DELETE_TOPIC_IN_NAMESRV:
//return deleteTopicInNamesrv(ctx, request);
case RequestCode.GET_KVLIST_BY_NAMESPACE:
case RequestCode.GET_TOPICS_BY_CLUSTER:
//return this.getTopicsByCluster(ctx, request);
case RequestCode.GET_SYSTEM_TOPIC_LIST_FROM_NS:
//return this.getSystemTopicListFromNs(ctx, request);
case RequestCode.GET_UNIT_TOPIC_LIST:
//return this.getUnitTopicList(ctx, request);
case RequestCode.GET_HAS_UNIT_SUB_TOPIC_LIST:
//return this.getHasUnitSubTopicList(ctx, request);
case RequestCode.GET_HAS_UNIT_SUB_UNUNIT_TOPIC_LIST:
//return this.getHasUnitSubUnUnitTopicList(ctx, request);
case RequestCode.UPDATE_NAMESRV_CONFIG:
case RequestCode.GET_NAMESRV_CONFIG:
default:
break;
}
return null;
}
@Override
public boolean rejectRequest() {
return false;
}
protected RemotingCommand handleTopicMetadata(ChannelHandlerContext ctx, RemotingCommand request)
throws Exception {
checkNotNull(request);
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
final GetRouteInfoRequestHeader requestHeader =
(GetRouteInfoRequestHeader) request.decodeCommandCustomHeader(GetRouteInfoRequestHeader.class);
RopTopicRouteData topicRouteData = new RopTopicRouteData();
List<BrokerData> brokerDatas = new ArrayList<>();
List<QueueData> queueDatas = new ArrayList<>();
Map<Integer, String> partitionRouteInfos;
topicRouteData.setBrokerDatas(brokerDatas);
topicRouteData.setQueueDatas(queueDatas);
String clusterName = config.getClusterName();
// If the topic name and clusterName are the same, any node in the cluster is returned to the client.
// Here to create a theme operation for compatibility with the client
if (clusterName.equals(requestHeader.getTopic())) {
try {
PulsarAdmin adminClient = brokerController.getBrokerService().pulsar().getAdminClient();
List<String> brokers = adminClient.brokers().getActiveBrokers(clusterName);
String randomBroker = brokers.get(new Random().nextInt(brokers.size()));
String rmqBrokerAddress = parseBrokerAddress(randomBroker, servicePort);
BrokerData brokerData = new BrokerData();
HashMap<Long, String> brokerAddrs = Maps.newHashMap();
brokerAddrs.put(0L, rmqBrokerAddress);
brokerData.setBrokerAddrs(brokerAddrs);
brokerDatas.add(brokerData);
byte[] content = topicRouteData.encode();
response.setBody(content);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
} catch (Exception e) {
log.error("Cluster [{}] get route info failed", clusterName, e);
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark(null);
return response;
}
}
String listenerName = getListenerName(ctx);
// Obtain the specified topic according to the incoming request.
String requestTopic = requestHeader.getTopic();
if (Strings.isNotBlank(requestTopic)) {
RocketMQTopic mqTopic = RocketMQTopic.getRocketMQDefaultTopic(requestTopic);
Map<Integer, InetSocketAddress> topicBrokerAddr =
mqTopicManager.getTopicBrokerAddr(mqTopic.getPulsarTopicName(), Strings.EMPTY);
partitionRouteInfos = Maps.newHashMapWithExpectedSize(topicBrokerAddr.size());
topicRouteData.setPartitionRouteInfos(partitionRouteInfos);
try {
if (!topicBrokerAddr.isEmpty()) {
Map<String, String> brokerNames = Maps.newHashMap();
topicBrokerAddr.forEach((partition, addr) -> {
String brokerName = addr.getHostName();
if (!brokerNames.containsKey(brokerName)) {
String advertisAddress = getBrokerAddressByListenerName(brokerName, listenerName);
HashMap<Long, String> brokerAddrs = new HashMap<>(1);
brokerAddrs.put(0L, advertisAddress);
brokerDatas.add(new BrokerData(clusterName, brokerName, brokerAddrs));
QueueData queueData = new QueueData();
queueData.setBrokerName(brokerName);
queueData.setReadQueueNums(topicBrokerAddr.size());
queueData.setWriteQueueNums(topicBrokerAddr.size());
queueData.setPerm(PERM_WRITE | PERM_READ);
queueDatas.add(queueData);
brokerNames.put(addr.getHostName(), advertisAddress);
}
partitionRouteInfos.put(partition, brokerName);
});
byte[] content = topicRouteData.encode();
response.setBody(content);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
} catch (Exception ex) {
log.warn("fetch topic address of topic[{}] error.", requestTopic, ex);
}
}
response.setCode(ResponseCode.TOPIC_NOT_EXIST);
response.setRemark("No topic route info in name server for the topic: " + requestHeader.getTopic()
+ FAQUrl.suggestTodo(FAQUrl.APPLY_TOPIC_URL));
return response;
}
public String parseBrokerAddress(String brokerAddress, int port) {
// pulsar://localhost:6650
if (null == brokerAddress) {
log.error("The brokerAddress is null, please check.");
return "";
}
Matcher matcher = BROKER_ADDER_PAT.matcher(brokerAddress);
String result = brokerAddress;
if (matcher.find()) {
result = matcher.group(1) + servicePort;
}
return result;
}
/**
* Get cluster info according to cluster name.
*/
private RemotingCommand getBrokerClusterInfo(ChannelHandlerContext ctx, RemotingCommand request) {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
String clusterName = config.getClusterName();
try {
PulsarAdmin adminClient = brokerController.getBrokerService().pulsar().getAdminClient();
List<String> brokers = adminClient.brokers().getActiveBrokers(clusterName);
HashMap<String, BrokerData> brokerAddrTable = Maps.newHashMap();
Set<String> brokerNames = Sets.newHashSet();
for (String broker : brokers) {
String rmqBrokerAddress = parseBrokerAddress(broker, servicePort);
String brokerName = PulsarUtil.getBrokerHost(broker);
HashMap<Long, String> brokerAddrs = Maps.newHashMap();
brokerAddrs.put(0L, rmqBrokerAddress);
brokerAddrTable.put(brokerName, new BrokerData(clusterName, brokerName, brokerAddrs));
brokerNames.add(brokerName);
}
HashMap<String, Set<String>> clusterAddrTable = Maps.newHashMap();
clusterAddrTable.put(clusterName, brokerNames);
ClusterInfo clusterInfoSerializeWrapper = new ClusterInfo();
clusterInfoSerializeWrapper.setBrokerAddrTable(brokerAddrTable);
clusterInfoSerializeWrapper.setClusterAddrTable(clusterAddrTable);
response.setBody(clusterInfoSerializeWrapper.encode());
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
} catch (Exception e) {
log.error("ClusterName [{}] getBrokerClusterInfo failed", clusterName, e);
}
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark(null);
return response;
}
/**
* Analyze the request local receiving port, and identify the client network type according to the
* port [type mapping relationship is specified by the configuration item rocketmqListenerPortMap].
*/
private String getListenerName(ChannelHandlerContext ctx) {
String localAddress = ctx.channel().localAddress().toString();
String localPort = localAddress.substring(localAddress.indexOf(":") + 1);
return PORT_LISTENER_NAME_MAP.get(localPort);
}
private String getBrokerAddressByListenerName(String host, String listenerName) {
ModularLoadManagerImpl modularLoadManager = getModularLoadManagerImpl();
List<String> brokers = Lists.newArrayList(modularLoadManager.getAvailableBrokers());
if (brokers.isEmpty()) {
log.info("GetBrokerAddressByListenerName not found broker");
return Joiner.on(":").join(host, servicePort);
}
String brokerAddress = brokers.get(0);
String port = brokerAddress.substring(brokerAddress.indexOf(":") + 1).trim();
brokerAddress = Joiner.on(":").join(host, port);
LocalBrokerData localBrokerData = modularLoadManager.getBrokerLocalData(brokerAddress);
if (localBrokerData == null) {
log.info("GetBrokerAddressByListenerName not found localBrokerData, host: {}", host);
return Joiner.on(":").join(host, servicePort);
}
AdvertisedListener advertisedListener = localBrokerData.getAdvertisedListeners().get(listenerName);
if (advertisedListener == null) {
log.info("GetBrokerAddressByListenerName not found advertisedListener, listenerName: {}", listenerName);
return Joiner.on(":").join(host, servicePort);
}
return advertisedListener.getBrokerServiceUrl().toString().replaceAll("pulsar://", "");
}
private ModularLoadManagerImpl getModularLoadManagerImpl() {
return (ModularLoadManagerImpl) ((ModularLoadManagerWrapper) this.brokerController.getBrokerService()
.getPulsar().getLoadManager().get()).getLoadManager();
}
}
|
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.network.v2019_06_01;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.microsoft.rest.serializer.JsonFlatten;
import com.microsoft.azure.SubResource;
/**
* Details the service to which the subnet is delegated.
*/
@JsonFlatten
public class Delegation extends SubResource {
/**
* The name of the service to whom the subnet should be delegated (e.g.
* Microsoft.Sql/servers).
*/
@JsonProperty(value = "properties.serviceName")
private String serviceName;
/**
* Describes the actions permitted to the service upon delegation.
*/
@JsonProperty(value = "properties.actions")
private List<String> actions;
/**
* The provisioning state of the resource.
*/
@JsonProperty(value = "properties.provisioningState", access = JsonProperty.Access.WRITE_ONLY)
private String provisioningState;
/**
* The name of the resource that is unique within a subnet. This name can
* be used to access the resource.
*/
@JsonProperty(value = "name")
private String name;
/**
* A unique read-only string that changes whenever the resource is updated.
*/
@JsonProperty(value = "etag")
private String etag;
/**
* Get the name of the service to whom the subnet should be delegated (e.g. Microsoft.Sql/servers).
*
* @return the serviceName value
*/
public String serviceName() {
return this.serviceName;
}
/**
* Set the name of the service to whom the subnet should be delegated (e.g. Microsoft.Sql/servers).
*
* @param serviceName the serviceName value to set
* @return the Delegation object itself.
*/
public Delegation withServiceName(String serviceName) {
this.serviceName = serviceName;
return this;
}
/**
* Get describes the actions permitted to the service upon delegation.
*
* @return the actions value
*/
public List<String> actions() {
return this.actions;
}
/**
* Set describes the actions permitted to the service upon delegation.
*
* @param actions the actions value to set
* @return the Delegation object itself.
*/
public Delegation withActions(List<String> actions) {
this.actions = actions;
return this;
}
/**
* Get the provisioning state of the resource.
*
* @return the provisioningState value
*/
public String provisioningState() {
return this.provisioningState;
}
/**
* Get the name of the resource that is unique within a subnet. This name can be used to access the resource.
*
* @return the name value
*/
public String name() {
return this.name;
}
/**
* Set the name of the resource that is unique within a subnet. This name can be used to access the resource.
*
* @param name the name value to set
* @return the Delegation object itself.
*/
public Delegation withName(String name) {
this.name = name;
return this;
}
/**
* Get a unique read-only string that changes whenever the resource is updated.
*
* @return the etag value
*/
public String etag() {
return this.etag;
}
/**
* Set a unique read-only string that changes whenever the resource is updated.
*
* @param etag the etag value to set
* @return the Delegation object itself.
*/
public Delegation withEtag(String etag) {
this.etag = etag;
return this;
}
}
|
package com.qingyun.mvpretrofitrx.mvp.presenter;
import android.content.Context;
import com.qingyun.mvpretrofitrx.mvp.contract.BusinessDetailContact;
import com.qingyun.mvpretrofitrx.mvp.contract.CoinContact;
import com.qingyun.mvpretrofitrx.mvp.entity.BusinessDetail;
import com.qingyun.mvpretrofitrx.mvp.entity.Wallet;
import com.qingyun.mvpretrofitrx.mvp.model.BusinessDetailModel;
import com.qingyun.mvpretrofitrx.mvp.model.CoinModel;
import com.qingyun.mvpretrofitrx.mvp.progress.ObserverResponseListener;
import java.util.List;
public class BusinessDetailPresenter extends BusinessDetailContact.Presenter {
private BusinessDetailModel model;
private Context context;
public BusinessDetailPresenter(Context context) {
this.model = new BusinessDetailModel();
this.context = context;
}
@Override
public void getBusinessDetail(String address, String glod, String id) {
model.getBusinessDetail(context,address,glod,id,getView().bindLifecycle(), new ObserverResponseListener<BusinessDetail>() {
@Override
public void onNext(BusinessDetail businessDetail) {
if(getView() != null){
getView().getBusinessDetailSuccess(businessDetail);
}
}
@Override
public void onError(String e) {
}
});
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.db.qp.physical.sys;
import java.util.ArrayList;
import java.util.List;
import org.apache.iotdb.db.qp.logical.Operator;
import org.apache.iotdb.db.qp.logical.sys.PropertyOperator;
import org.apache.iotdb.db.qp.physical.PhysicalPlan;
import org.apache.iotdb.tsfile.read.common.Path;
/**
* Manipulate property plan
*/
public class PropertyPlan extends PhysicalPlan {
private final PropertyOperator.PropertyType propertyType;
private Path propertyPath;
private Path metadataPath;
public PropertyPlan(PropertyOperator.PropertyType propertyType, Path propertyPath,
Path metadataPath) {
super(false, Operator.OperatorType.PROPERTY);
this.propertyType = propertyType;
this.propertyPath = propertyPath;
this.metadataPath = metadataPath;
}
public Path getPropertyPath() {
return propertyPath;
}
public Path getMetadataPath() {
return metadataPath;
}
public PropertyOperator.PropertyType getPropertyType() {
return propertyType;
}
@Override
public String toString() {
return "propertyPath: " + propertyPath.toString() + "\nmetadataPath: " + metadataPath
+ "\npropertyType: "
+ propertyType;
}
@Override
public List<Path> getPaths() {
List<Path> ret = new ArrayList<>();
if (metadataPath != null) {
ret.add(metadataPath);
}
if (propertyPath != null) {
ret.add(propertyPath);
}
return ret;
}
}
|
/*
* The MIT License (MIT)
*
* Copyright (c) 2019 Lachlan Dowding
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package permafrost.tundra.math.gauss;
import com.wm.data.IData;
import com.wm.data.IDataCursor;
import com.wm.data.IDataUtil;
import java.text.NumberFormat;
/**
* Provides service statistics collection and normal distribution estimation.
*/
public class ServiceEstimator extends ConcurrentEstimator {
/**
* Represents a single sample.
*/
public static class Sample extends ConcurrentEstimator.Sample {
/**
* Whether this sample was a successful service invocation.
*/
protected boolean success;
/**
* Creates a new Sample.
*
* @param success Whether this sample was a successful service invocation.
* @param value The duration of the service invocation.
*/
public Sample(boolean success, double value) {
super(value);
this.success = success;
}
/**
* Returns whether this sample was a successful service invocation.
* @return whether this sample was a successful service invocation.
*/
public boolean getSuccess() {
return success;
}
}
/**
* Represents the results of this estimator.
*/
public class Results extends Estimator.Results {
/**
* The name of the service the collected statistics relate to.
*/
protected String service;
/**
* The count of successful and failed invocations.
*/
protected long successes = 0, failures = 0;
/**
* Constructs a new Results object.
*
* @param subject The subject or description of what was estimated.
* @param unit The unit of measurement related to the measured samples.
* @param count The number of samples measured.
* @param successes The number of successful invocations (those which did not throw an exception).
* @param failures The number of failed invocations (those which did throw an exception).
* @param mean The measured mean value.
* @param sq The measured square value.
* @param minimum The measured minimum value.
* @param maximum The measured maximum value.
* @param cumulative The measured cumulative value.
*/
public Results(String subject, String unit, long count, long successes, long failures, double mean, double sq, double minimum, double maximum, double cumulative) {
super(subject, unit, count, mean, sq, minimum, maximum, cumulative);
this.successes = successes;
this.failures = failures;
}
/**
* Returns the count of successful invocations of the related service since sampling started.
* @return the count of successful invocations of the related service since sampling started.
*/
public long getSuccesses() {
return successes;
}
/**
* Returns the count of failed invocations of the related service since sampling started.
* @return the count of failed invocations of the related service since sampling started.
*/
public long getFailures() {
return failures;
}
/**
* Returns a string-based representation of the mean, standard deviation and number of samples for this estimator.
* @return a string-based representation of this estimator.
*/
@Override
public String toString() {
String output;
String subject = getSubject();
String unit = getUnit();
double mean = getMean();
double standardDeviation = getStandardDeviation();
double minimum = getMinimum();
double maximum = getMaximum();
double cumulative = getCumulative();
long count = getCount();
long successes = getSuccesses();
long failures = getFailures();
if (unit == null) {
output = String.format("subject = %s, average = %.9f, standard deviation = %.9f, minimum = %.9f, maximum = %.9f, cumulative = %.9f, successes = %d, failures = %d, total = %d", subject, mean, standardDeviation, minimum, maximum, cumulative, successes, failures, count);
} else {
output = String.format("subject = %s, average = %.9f %s, standard deviation = %.9f %s, minimum = %.9f %s, maximum = %.9f %s, cumulative = %.9f %s, successes = %d, failures = %d, total = %d", subject, mean, unit, standardDeviation, unit, minimum, unit, maximum, unit, cumulative, unit, successes, failures, count);
}
return output;
}
/**
* Returns an IData representation of this object.
* @return An IData representation of this object.
*/
@Override
public IData getIData() {
NumberFormat integerFormat = NumberFormat.getIntegerInstance();
long count = getCount();
long successes = getSuccesses();
long failures = getFailures();
String description = toString();
IData output = super.getIData();
IDataCursor cursor = output.getCursor();
try {
cursor.insertBefore("description", description);
cursor.destroy();
cursor = output.getCursor();
cursor.insertAfter("count.successes", successes);
cursor.insertAfter("count.successes.formatted", integerFormat.format(successes));
cursor.insertAfter("count.failures", failures);
cursor.insertAfter("count.failures.formatted", integerFormat.format(failures));
cursor.insertAfter("count.total", count);
cursor.insertAfter("count.total.formatted", integerFormat.format(count));
IDataUtil.remove(cursor, "count");
IDataUtil.remove(cursor, "count.formatted");
} finally {
cursor.destroy();
}
return output;
}
}
/**
* The count of successful and failed invocations.
*/
protected long successes = 0, failures = 0;
/**
* Constructs a new estimator object.
*
* @param service The name of the service the collected statistics relate to.
* @param unit The unit of measurement related to the measured samples.
*/
public ServiceEstimator(String service, String unit) {
super(service, unit);
}
/**
* Resets the estimator back to a set of zero samples.
*/
public void reset() {
lock.writeLock().lock();
try {
successes = 0;
failures = 0;
super.reset();
} finally {
lock.writeLock().unlock();
}
}
/**
* Adds a new sample to the set of samples used for estimating the standard deviation.
*
* @param sample The sample to be added.
*/
public void add(Sample sample) {
super.add(sample);
}
/**
* Ensure all calculations are up to date, must be called once finished collecting samples.
*/
@Override
protected void quiesce() {
int size = samples.size(); // snapshot size, as it can change due to other threads
for (int i = 0; i < size; i++) {
ConcurrentEstimator.Sample sample = samples.poll();
if (sample == null) {
break; // exit loop if there are no more samples
} else {
lock.writeLock().lock();
try {
if (sample instanceof Sample) {
if (((Sample)sample).getSuccess()) {
successes++;
} else {
failures++;
}
}
super.add(sample.getValue());
} finally {
lock.writeLock().unlock();
}
}
}
}
/**
* Returns the name of the service the collected statistics relate to.
*
* @return the name of the service the collected statistics relate to.
*/
@Override
public Results getResults() {
quiesce();
lock.readLock().lock();
try {
return new Results(subject, unit, count, successes, failures, mean, sq, minimum, maximum, cumulative);
} finally {
lock.readLock().unlock();
}
}
}
|
/**
* Copyright 2005-2016 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.itests.paxexam.support;
import java.util.concurrent.Callable;
public class WaitForConditionTask implements Callable<Boolean> {
private final Long timeOut;
private final Callable<Boolean> condition;
public WaitForConditionTask(Callable<Boolean> condition, Long timeOut) {
this.timeOut = timeOut;
this.condition = condition;
}
@Override
public Boolean call() throws Exception {
boolean done = false;
Exception lastError = null;
for (long t = 0; (!done && t < timeOut); t += 2000L) {
lastError = null;
try {
done = condition.call();
} catch (Exception e) {
lastError = e;
}
if (!done) {
Thread.sleep(2000L);
}
}
if (lastError != null) {
throw lastError;
}
return done;
}
}
|
package com.sixt.service.testrpcclasses;
import com.google.protobuf.Message;
public class TestService2 {
public abstract class TestServiceRequest implements Message {
}
}
|
public void test31() throws Throwable {
Player player0 = new Player(0, "0.0.0.0", "0.0.0.0", 4919, 0);
player0.setMoney(0);
assertEquals(4919, player0.getPictureId());
assertFalse(player0.isDead());
assertEquals(0.0F, player0.getMoney(), 0.01F);
assertEquals(10.0F, player0.getX(), 0.01F);
assertTrue(player0.isConnected());
assertEquals(0L, player0.getTimeOfDeath());
assertEquals(0.0F, player0.getY(), 0.01F);
assertEquals(0, player0.getStrength());
}
|
package org.hswebframework.web.service.form.simple.dict;
import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.hswebframework.ezorm.core.ValueConverter;
import org.hswebframework.web.dict.EnumDict;
import org.springframework.util.StringUtils;
import java.util.*;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@Slf4j
public class EnumDictValueConverter<T extends EnumDict> implements ValueConverter {
protected Supplier<List<T>> allOptionSupplier;
protected Function<Object, T> orElseGet = v -> {
log.warn("选项[{}]在字典中不存在.全部选项:[{}]", v, allOptionSupplier.get());
return null;
};
@Getter
@Setter
protected Function<Stream<String>, String> multiValueConvert = stream -> stream.collect(Collectors.joining(","));
@Setter
@Getter
protected Function<String, List<Object>> splitter = str -> Arrays.asList(str.split("[, ; ;]"));
public EnumDictValueConverter(Supplier<List<T>> allOptionSupplier) {
this.allOptionSupplier = allOptionSupplier;
}
public EnumDictValueConverter(Supplier<List<T>> allOptionSupplier, Function<Object, T> orElseGet) {
this.allOptionSupplier = allOptionSupplier;
this.orElseGet = orElseGet;
}
@Setter
@Getter
private boolean multi = true;
@Setter
@Getter
private boolean dataToMask = true;
protected T find(Object value) {
return allOptionSupplier.get()
.stream()
.filter(e -> e.eq(value))
.findFirst()
.orElseGet(() -> orElseGet.apply(value));
}
@Override
@SuppressWarnings("all")
public Object getData(Object value) {
if (StringUtils.isEmpty(value)) {
return value;
}
//多选
if (multi) {
List<Object> values;
if (value instanceof String) {
values = splitter.apply((String) value);
} else if (value instanceof Object[]) {
values = Arrays.asList(((Object[]) value));
} else if (value instanceof Collection) {
values = new ArrayList<>(((Collection) value));
} else {
values = Collections.singletonList(value);
}
//转为mask
if (dataToMask) {
if (value instanceof Number) {
return ((Number) value).longValue();
}
return EnumDict.toMask(values.stream()
.map(this::find)
.filter(Objects::nonNull)
.toArray(EnumDict[]::new));
}
return multiValueConvert
.apply(values.stream()
.map(this::find)
.filter(Objects::nonNull)
.map(EnumDict::getValue)
.map(String::valueOf));
}
return Optional.ofNullable(this.find(value))
.map(EnumDict::getValue)
.orElse(value);
}
@Override
public Object getValue(Object data) {
if (multi) {
if (dataToMask) {
Long mask = null;
if (org.hswebframework.utils.StringUtils.isNumber(data)) {
mask = org.hswebframework.utils.StringUtils.toLong(data);
}
if (mask != null) {
return multiValueConvert
.apply(EnumDict.getByMask(allOptionSupplier, mask)
.stream()
.map(EnumDict::getValue)
.map(String::valueOf));
}
}
List<Object> lst = splitter.apply(String.valueOf(data));
return multiValueConvert
.apply(allOptionSupplier.get()
.stream()
.filter(e -> e.eq(lst))
.map(EnumDict::getValue)
.map(String::valueOf));
}
return data;
}
}
|
/* Copyright (C) 2010 Alexandre Riazanov (Alexander Ryazanov)
*
* The copyright owner licenses this file to You under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package logic.is.power.alabai;
import java.util.*;
import logic.is.power.cushion.*;
import logic.is.power.logic_warehouse.*;
/**
* Performs equality resolution on specified clauses.
* TODO: can be optimised
*/
class EqualityResolution {
/** @param conclusionAssembler where the conclusions of inferences
* will be assembled
*/
public EqualityResolution(NewClauseAssembler conclusionAssembler) {
_conclusionAssembler = conclusionAssembler;
_variableBank = new Variable.Bank();
_variableRenaming = new VariableRenaming();
_literalAssembler = new FlattermAssembler();
_unifier = new Substitution1();
}
/** Performs all possible equality resolution inferences
* and sends the results to the associated conclusion assembler.
*/
public final void performAllInferences(Clause cl) {
_clause = cl;
_variableBank.reset();
_variableRenaming.reset(_variableBank);
_literals = new LinkedList<Flatterm>();
for (Literal lit : cl.literals())
{
_literalAssembler.reset();
_literalAssembler.pushLiteral(lit.isPositive(),
lit.atom(),
_variableRenaming);
_literalAssembler.wrapUp();
_literals.addLast(_literalAssembler.assembledTerm());
};
for (Flatterm currentLiteral : _literals)
if (currentLiteral.isNegative() &&
currentLiteral.isEqualityLiteral())
{
if (Unification.unify(currentLiteral.atom().firstArg(),
currentLiteral.atom().secondArg(),
_unifier))
{
createResolvent(currentLiteral);
_unifier.uninstantiateAll();
};
};
_literals = null;
} // performAllInferences(Clause cl)
// Private methods:
private final void createResolvent(Flatterm currentLiteral) {
if (_clause.isDiscarded()) return;
_conclusionAssembler.
openClause(InferenceType.EqualityResolution);
_conclusionAssembler.addParent(_clause);
for (Flatterm lit : _literals)
if (lit != currentLiteral)
_conclusionAssembler.
pushLiteralWithGlobSubstAndRenaming(lit);
_conclusionAssembler.endOfClause();
++(Statistics.current().deductionInferences.equalityResolution.accomplished);
assert GlobalEventCounter.inc();
} // createResolvent(Flatterm currentLiteral)
// Data:
private NewClauseAssembler _conclusionAssembler;
private Clause _clause;
private LinkedList<Flatterm> _literals;
private Variable.Bank _variableBank;
private VariableRenaming _variableRenaming;
private FlattermAssembler _literalAssembler;
private Substitution1 _unifier;
} // class EqualityResolution
|
/**
* Copyright 2010-2015 Axel Fontaine
*
* <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.contrastsecurity.cassandra.migration.utils;
import java.text.SimpleDateFormat;
import java.util.Date;
/** Utility methods for dealing with dates. */
public class DateUtils {
/** Prevents instantiation. */
private DateUtils() {
// Do nothing
}
/**
* Formats this date in the standard ISO format.
*
* @param date The date to format.
* @return The date in ISO format. An empty string if the date is null.
*/
public static String formatDateAsIsoString(Date date) {
if (date == null) {
return "";
}
return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(date);
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandraBloomFilters.net;
import java.net.InetAddress;
public interface IMessageSink
{
/**
* Allow or drop an outgoing message
*
* @return true if the message is allowed, false if it should be dropped
*/
boolean allowOutgoingMessage(MessageOut message, int id, InetAddress to);
/**
* Allow or drop an incoming message
*
* @return true if the message is allowed, false if it should be dropped
*/
boolean allowIncomingMessage(MessageIn message, int id);
}
|
package de.adorsys.xs2a.adapter.mapper.psd2;
import de.adorsys.xs2a.adapter.model.TokenResponseTO;
import de.adorsys.xs2a.adapter.service.model.TokenResponse;
import org.mapstruct.Mapper;
@Mapper
public interface Oauth2Mapper {
TokenResponseTO map(TokenResponse token);
TokenResponse toTokenResponse(TokenResponseTO tokenResponseTO);
}
|
/*
* Created on 12.07.2006
*
*/
package de.peerthing.visualization.queryeditor.actions;
import java.util.List;
import org.eclipse.core.resources.IFile;
import org.eclipse.jface.action.IAction;
import org.eclipse.jface.viewers.ISelection;
import de.peerthing.visualization.VisualizationPlugin;
import de.peerthing.visualization.querymodel.Query;
import de.peerthing.visualization.querymodel.VisualizationData;
import de.peerthing.visualization.querymodel.interfaces.IQuery;
import de.peerthing.visualization.querymodel.interfaces.IQueryDataModel;
import de.peerthing.visualization.querymodel.interfaces.IVisualizationData;
/**
* Pastes copies of objects that were currently copied to the local clipboard
* into the current selection. Depending on the selection, only the fitting
* objects are inserted.
*
* @author Michael Gottschalk
*
*/
public class PasteAction extends AbstractTreeAction {
@Override
public void selectionChanged(IAction action, ISelection selection) {
super.selectionChanged(action, selection);
// Enable for the insertion of IVisualizationData objects
boolean enabled = false;
if ((firstSelectedObject instanceof IVisualizationData || firstSelectedObject instanceof IQuery)) {
List<Object> copiedObjects = VisualizationPlugin.getDefault()
.getCopiedObjects();
for (Object obj : copiedObjects) {
if (obj instanceof IVisualizationData) {
enabled = true;
}
}
}
// Enable for the insertion of query objects...
if (!enabled
&& (firstSelectedObject instanceof IQuery || firstSelectedObject instanceof IFile)) {
List<Object> copiedObjects = VisualizationPlugin.getDefault()
.getCopiedObjects();
for (Object obj : copiedObjects) {
if (obj instanceof IQuery) {
enabled = true;
}
}
}
action.setEnabled(enabled);
}
public void run(IAction action) {
List<Object> copiedObjects = VisualizationPlugin.getDefault()
.getCopiedObjects();
for (Object cObj : copiedObjects) {
if ((firstSelectedObject instanceof IQuery || firstSelectedObject instanceof IVisualizationData)
&& cObj instanceof IVisualizationData) {
IQuery parent = null;
if (firstSelectedObject instanceof IQuery) {
parent = (IQuery) firstSelectedObject;
} else {
parent = ((IVisualizationData) firstSelectedObject)
.getQuery();
}
IVisualizationData newVis = new VisualizationData(
(VisualizationData) cObj, parent);
parent.addVisualizationData(newVis);
getTree().refresh(parent);
modelChanged(newVis);
} else if (cObj instanceof IQuery
&& (firstSelectedObject instanceof IQuery || firstSelectedObject instanceof IFile)) {
IQueryDataModel parent = null;
// Get the parent for the pasted object
// from the current selection
if (firstSelectedObject instanceof IQuery) {
parent = ((IQuery) firstSelectedObject).getQueryDataModel();
} else {
parent = getFiletypeRegistration().getDataModel((IFile) firstSelectedObject);
}
if (parent == null) {
continue;
}
IQuery newQuery = new Query((Query) cObj, parent);
parent.getQueries().add(newQuery);
getTree().refresh(parent.getFile());
modelChanged(newQuery);
}
}
}
}
|
/*
* Copyright 2012 International Business Machines Corp.
*
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership. Licensed under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.jbatch.tck.artifacts.chunktypes;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
@javax.inject.Named("checkpointData")
public class CheckpointData implements Externalizable {
private final static long serialVersionUID = 1L;
@Override
public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
// TODO Auto-generated method stub
}
@Override
public void writeExternal(ObjectOutput out) throws IOException {
// TODO Auto-generated method stub
}
}
|
/*
* Copyright 2015-2019 Austin Keener, Michael Ritter, Florian Spieß, and the JDA contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.dv8tion.jda.api.utils;
import java.util.Iterator;
/**
* Iterator holding a resource that must be free'd by the consumer.
* <br>Close is an idempotent function and can be performed multiple times without effects beyond first invocation.
*
* <p>This closes automatically when {@link #hasNext()} returns {@code false} but
* its recommended to only be used within a {@code try-with-resources} block for safety.
*
* <h3>Example</h3>
* This can handle any exceptions thrown while iterating and ensures the lock is released correctly.
* <pre>{@code
* try (ClosableIterator<T> it = cacheView.lockedIterator()) {
* while (it.hasNext()) {
* consume(it.next());
* }
* }
* }</pre>
*
* @param <T>
* The element type
*
* @since 4.0.0
*/
public interface ClosableIterator<T> extends Iterator<T>, AutoCloseable
{
@Override
void close();
}
|
package org.gradle.profiler;
import com.google.common.collect.ImmutableList;
import org.apache.commons.io.FileUtils;
import org.gradle.profiler.buildops.BuildOperationInstrumentation;
import org.gradle.profiler.instrument.PidInstrumentation;
import org.gradle.profiler.result.BuildInvocationResult;
import org.gradle.profiler.result.Sample;
import org.gradle.tooling.GradleConnector;
import org.gradle.tooling.ProjectConnection;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Consumer;
import static org.gradle.profiler.BuildStep.BUILD;
import static org.gradle.profiler.BuildStep.CLEANUP;
import static org.gradle.profiler.Phase.MEASURE;
import static org.gradle.profiler.Phase.WARM_UP;
public class GradleScenarioInvoker extends ScenarioInvoker<GradleScenarioDefinition, GradleBuildInvocationResult> {
private final DaemonControl daemonControl;
private final PidInstrumentation pidInstrumentation;
public GradleScenarioInvoker(DaemonControl daemonControl, PidInstrumentation pidInstrumentation) {
this.daemonControl = daemonControl;
this.pidInstrumentation = pidInstrumentation;
}
@Override
public List<Sample<? super GradleBuildInvocationResult>> samplesFor(InvocationSettings settings) {
if (settings.isMeasureConfigTime()) {
return ImmutableList.of(BuildInvocationResult.EXECUTION_TIME, GradleBuildInvocationResult.TIME_TO_TASK_EXECUTION);
} else {
return ImmutableList.of(BuildInvocationResult.EXECUTION_TIME);
}
}
@Override
public void doRun(GradleScenarioDefinition scenario, InvocationSettings settings, Consumer<GradleBuildInvocationResult> resultConsumer) throws IOException, InterruptedException {
if (settings.isProfile() && scenario.getWarmUpCount() == 0) {
throw new IllegalStateException("Using the --profile option requires at least one warm-up");
}
ScenarioSettings scenarioSettings = new ScenarioSettings(settings, scenario);
FileUtils.forceMkdir(scenario.getOutputDir());
JvmArgsCalculator allBuildsJvmArgsCalculator = settings.getProfiler().newJvmArgsCalculator(scenarioSettings);
GradleArgsCalculator allBuildsGradleArgsCalculator = pidInstrumentation;
allBuildsGradleArgsCalculator = allBuildsGradleArgsCalculator.plus(settings.getProfiler().newGradleArgsCalculator(scenarioSettings));
BuildOperationInstrumentation buildOperationInstrumentation = new BuildOperationInstrumentation();
if (settings.isMeasureConfigTime()) {
allBuildsGradleArgsCalculator = allBuildsGradleArgsCalculator.plus(buildOperationInstrumentation);
}
BuildAction cleanupAction = scenario.getCleanupAction();
GradleBuildConfiguration buildConfiguration = scenario.getBuildConfiguration();
daemonControl.stop(buildConfiguration);
BuildMutator mutator = scenario.getBuildMutator().get();
GradleConnector connector = GradleConnector.newConnector()
.useInstallation(buildConfiguration.getGradleHome())
.useGradleUserHomeDir(settings.getGradleUserHome().getAbsoluteFile());
ProjectConnection projectConnection = connector.forProjectDirectory(settings.getProjectDir()).connect();
try {
buildConfiguration.printVersionInfo();
List<String> allBuildsJvmArgs = new ArrayList<>(buildConfiguration.getJvmArguments());
allBuildsJvmArgs.addAll(scenario.getJvmArgs());
for (Map.Entry<String, String> entry : scenario.getSystemProperties().entrySet()) {
allBuildsJvmArgs.add("-D" + entry.getKey() + "=" + entry.getValue());
}
allBuildsJvmArgs.add("-Dorg.gradle.profiler.scenario=" + scenario.getName());
allBuildsJvmArgsCalculator.calculateJvmArgs(allBuildsJvmArgs);
logJvmArgs(allBuildsJvmArgs);
List<String> allBuildsGradleArgs = new ArrayList<>();
allBuildsGradleArgs.add("--gradle-user-home");
allBuildsGradleArgs.add(settings.getGradleUserHome().getAbsolutePath());
for (Map.Entry<String, String> entry : scenario.getSystemProperties().entrySet()) {
allBuildsGradleArgs.add("-D" + entry.getKey() + "=" + entry.getValue());
}
allBuildsGradleArgs.addAll(scenario.getGradleArgs());
if (settings.isDryRun()) {
allBuildsGradleArgs.add("--dry-run");
}
allBuildsGradleArgsCalculator.calculateGradleArgs(allBuildsGradleArgs);
logGradleArgs(allBuildsGradleArgs);
GradleInvoker buildInvoker;
if (scenario.getInvoker() == GradleBuildInvoker.CliNoDaemon) {
buildInvoker = new CliInvoker(buildConfiguration, buildConfiguration.getJavaHome(), settings.getProjectDir(), false);
} else if (scenario.getInvoker() == GradleBuildInvoker.ToolingApi || scenario.getInvoker() == GradleBuildInvoker.ToolingApiColdDaemon) {
buildInvoker = new ToolingApiInvoker(projectConnection);
} else if (scenario.getInvoker() == GradleBuildInvoker.Cli || scenario.getInvoker() == GradleBuildInvoker.CliColdDaemon) {
buildInvoker = new CliInvoker(buildConfiguration, buildConfiguration.getJavaHome(), settings.getProjectDir(), true);
} else {
throw new IllegalArgumentException();
}
BuildAction beforeBuildAction;
if (scenario.getInvoker().isColdDaemon()) {
beforeBuildAction = new CleanupThenStopDaemon(cleanupAction, daemonControl, buildConfiguration);
} else {
beforeBuildAction = cleanupAction;
}
BuildUnderTestInvoker invoker = new BuildUnderTestInvoker(allBuildsJvmArgs, allBuildsGradleArgs, buildInvoker, pidInstrumentation, buildOperationInstrumentation);
mutator.beforeScenario();
GradleBuildInvocationResult results = null;
String pid = null;
for (int i = 1; i <= scenario.getWarmUpCount(); i++) {
int counter = i;
beforeBuild(WARM_UP, counter, invoker, beforeBuildAction, mutator);
String displayName = WARM_UP.displayBuildNumber(counter);
results = runMeasured(displayName, mutator, () -> invoker.runBuild(WARM_UP, counter, BUILD, scenario.getAction()), resultConsumer);
if (pid == null) {
pid = results.getDaemonPid();
} else {
checkPid(pid, results.getDaemonPid(), scenario.getInvoker());
}
}
ProfilerController control = settings.getProfiler().newController(pid, scenarioSettings);
List<String> instrumentedBuildJvmArgs = new ArrayList<>(allBuildsJvmArgs);
settings.getProfiler().newInstrumentedBuildsJvmArgsCalculator(scenarioSettings).calculateJvmArgs(instrumentedBuildJvmArgs);
List<String> instrumentedBuildGradleArgs = new ArrayList<>(allBuildsGradleArgs);
settings.getProfiler().newInstrumentedBuildsGradleArgsCalculator(scenarioSettings).calculateGradleArgs(instrumentedBuildGradleArgs);
Logging.detailed().println();
Logging.detailed().println("* Using args for instrumented builds:");
if (!instrumentedBuildJvmArgs.equals(allBuildsJvmArgs)) {
logJvmArgs(instrumentedBuildJvmArgs);
}
if (!instrumentedBuildGradleArgs.equals(allBuildsGradleArgs)) {
logGradleArgs(instrumentedBuildGradleArgs);
}
BuildUnderTestInvoker instrumentedBuildInvoker = invoker.withJvmArgs(instrumentedBuildJvmArgs).withGradleArgs(instrumentedBuildGradleArgs);
control.startSession();
for (int i = 1; i <= scenario.getBuildCount(); i++) {
final int counter = i;
beforeBuild(MEASURE, counter, invoker, beforeBuildAction, mutator);
String displayName = MEASURE.displayBuildNumber(counter);
results = runMeasured(displayName, mutator, () -> {
if ((counter == 1 || beforeBuildAction.isDoesSomething())) {
try {
control.startRecording();
} catch (IOException | InterruptedException e) {
throw new RuntimeException(e);
}
}
GradleBuildInvocationResult result = instrumentedBuildInvoker.runBuild(MEASURE, counter, BUILD, scenario.getAction());
if ((counter == scenario.getBuildCount() || beforeBuildAction.isDoesSomething())) {
try {
control.stopRecording(result.getDaemonPid());
} catch (IOException | InterruptedException e) {
throw new RuntimeException(e);
}
}
return result;
}, resultConsumer);
}
control.stopSession();
Objects.requireNonNull(results);
checkPid(pid, results.getDaemonPid(), scenario.getInvoker());
} finally {
mutator.afterScenario();
projectConnection.close();
daemonControl.stop(buildConfiguration);
}
}
private void logGradleArgs(List<String> allBuildsGradleArgs) {
Logging.detailed().println("Gradle args:");
for (String arg : allBuildsGradleArgs) {
Logging.detailed().println(" " + arg);
}
}
private void logJvmArgs(List<String> allBuildsJvmArgs) {
Logging.detailed().println("JVM args:");
for (String jvmArg : allBuildsJvmArgs) {
Logging.detailed().println(" " + jvmArg);
}
}
private void beforeBuild(Phase phase, int buildNumber, BuildUnderTestInvoker invoker, BuildAction cleanupAction, BuildMutator mutator) {
if (cleanupAction.isDoesSomething()) {
String displayName = phase.displayBuildNumber(buildNumber);
runCleanup(displayName, mutator, () -> invoker.runBuild(phase, buildNumber, CLEANUP, cleanupAction));
}
}
private static void checkPid(String expected, String actual, GradleBuildInvoker invoker) {
if (invoker.isReuseDaemon()) {
if (!expected.equals(actual)) {
throw new RuntimeException("Multiple Gradle daemons were used.");
}
} else {
if (expected.equals(actual)) {
throw new RuntimeException("Gradle daemon was reused but should not be reused.");
}
}
}
}
|
/*
* Copyright 2013-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.mturk.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.Request;
import com.amazonaws.http.HttpMethodName;
import com.amazonaws.services.mturk.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.protocol.*;
import com.amazonaws.annotation.SdkInternalApi;
/**
* CreateHITWithHITTypeRequest Marshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class CreateHITWithHITTypeRequestProtocolMarshaller implements Marshaller<Request<CreateHITWithHITTypeRequest>, CreateHITWithHITTypeRequest> {
private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.AWS_JSON).requestUri("/")
.httpMethodName(HttpMethodName.POST).hasExplicitPayloadMember(false).hasPayloadMembers(true)
.operationIdentifier("MTurkRequesterServiceV20170117.CreateHITWithHITType").serviceName("AmazonMTurk").build();
private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory;
public CreateHITWithHITTypeRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) {
this.protocolFactory = protocolFactory;
}
public Request<CreateHITWithHITTypeRequest> marshall(CreateHITWithHITTypeRequest createHITWithHITTypeRequest) {
if (createHITWithHITTypeRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
final ProtocolRequestMarshaller<CreateHITWithHITTypeRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller(SDK_OPERATION_BINDING,
createHITWithHITTypeRequest);
protocolMarshaller.startMarshalling();
CreateHITWithHITTypeRequestMarshaller.getInstance().marshall(createHITWithHITTypeRequest, protocolMarshaller);
return protocolMarshaller.finishMarshalling();
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
|
package com.github.tinosteinort.flda.stringlist.writer;
import com.github.tinosteinort.flda.stringlist.StringListAttribute;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
public class BooleanAttributeWriterTest {
private final BooleanAttributeWriter writer = new BooleanAttributeWriter();
@Test public void writeTrue() {
final List<String> record = new ArrayList<>(1);
record.add(null);
final StringListAttribute<Boolean> attribute = new StringListAttribute<>(Boolean.class, 0);
writer.write(record, attribute, true);
assertEquals("true", record.get(0));
}
@Test public void writeFalse() {
final List<String> record = new ArrayList<>(1);
record.add(null);
final StringListAttribute<Boolean> attribute = new StringListAttribute<>(Boolean.class, 0);
writer.write(record, attribute, false);
assertEquals("false", record.get(0));
}
@Test public void writeNullValue() {
final List<String> record = new ArrayList<>(1);
record.add("SomeValue");
final StringListAttribute<Boolean> attribute = new StringListAttribute<>(Boolean.class, 0);
writer.write(record, attribute, null);
assertNull(record.get(0));
}
}
|
package com.zwx.gulimall.ware.service.impl;
import org.springframework.stereotype.Service;
import java.util.Map;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.zwx.common.utils.PageUtils;
import com.zwx.common.utils.Query;
import com.zwx.gulimall.ware.dao.UndoLogDao;
import com.zwx.gulimall.ware.entity.UndoLogEntity;
import com.zwx.gulimall.ware.service.UndoLogService;
@Service("undoLogService")
public class UndoLogServiceImpl extends ServiceImpl<UndoLogDao, UndoLogEntity> implements UndoLogService {
@Override
public PageUtils queryPage(Map<String, Object> params) {
IPage<UndoLogEntity> page = this.page(
new Query<UndoLogEntity>().getPage(params),
new QueryWrapper<UndoLogEntity>()
);
return new PageUtils(page);
}
}
|
package org.motechproject.mds.query;
import org.motechproject.mds.util.LookupName;
/**
* The <code>EqualProperty</code> class represents a property that will be used in JDO query
* and it has to be equal to the given value.
*
* @param <T> type of the passed value
*/
public class EqualProperty<T> extends Property<T> {
public EqualProperty(String name, T value, String type) {
super(name, value, type);
}
public EqualProperty(String jdoVariableName, String name, T value, String type) {
super(jdoVariableName, name, value, type);
}
@Override
public CharSequence generateFilter(int idx) {
if (isForRelation()) {
return String.format("%s.contains(%s) && %s.%s == param%d", LookupName.getFieldName(getName()), getJdoVariableName(), getJdoVariableName(),
LookupName.getRelatedFieldName(getName()), idx);
}
return String.format("%s == param%d", getName(), idx);
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli.commands;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.apache.geode.cache.configuration.CacheConfig;
import org.apache.geode.cache.wan.GatewaySender;
import org.apache.geode.distributed.DistributedMember;
import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
import org.apache.geode.internal.Version;
import org.apache.geode.internal.cache.InternalCache;
import org.apache.geode.management.internal.cli.functions.CliFunctionResult;
import org.apache.geode.management.internal.cli.functions.GatewaySenderFunctionArgs;
import org.apache.geode.management.internal.cli.i18n.CliStrings;
import org.apache.geode.management.internal.cli.result.model.ResultModel;
import org.apache.geode.test.junit.rules.GfshParserRule;
public class CreateGatewaySenderCommandTest {
@ClassRule
public static GfshParserRule gfsh = new GfshParserRule();
private CreateGatewaySenderCommand command;
private InternalCache cache;
private List<CliFunctionResult> functionResults;
private CliFunctionResult cliFunctionResult;
ArgumentCaptor<GatewaySenderFunctionArgs> argsArgumentCaptor =
ArgumentCaptor.forClass(GatewaySenderFunctionArgs.class);
@Before
public void before() {
command = spy(CreateGatewaySenderCommand.class);
cache = mock(InternalCache.class);
doReturn(cache).when(command).getCache();
functionResults = new ArrayList<>();
doReturn(functionResults).when(command).executeAndGetFunctionResult(any(),
any(), any(Set.class));
}
@Test
public void missingId() {
gfsh.executeAndAssertThat(command, "create gateway-sender --remote-distributed-system-id=1")
.statusIsError().containsOutput("Invalid command");
}
@Test
public void missingRemoteId() {
gfsh.executeAndAssertThat(command, "create gateway-sender --id=ln").statusIsError()
.containsOutput("Invalid command");
}
@Test
public void missingOrderPolicy() {
gfsh.executeAndAssertThat(command,
"create gateway-sender --id=ln --remote-distributed-system-id=1 "
+ "--dispatcher-threads=2")
.statusIsError()
.containsOutput("Must specify --order-policy when --dispatcher-threads is larger than 1");
doReturn(Collections.EMPTY_SET).when(command).findMembers(any(), any());
gfsh.executeAndAssertThat(command,
"create gateway-sender --id=ln --remote-distributed-system-id=1 "
+ "--dispatcher-threads=1")
.statusIsError().containsOutput("No Members Found");
}
@Test
public void paralleAndThreadOrderPolicy() {
gfsh.executeAndAssertThat(command,
"create gateway-sender --id=ln --remote-distributed-system-id=1 "
+ "--parallel --order-policy=THREAD")
.statusIsError()
.containsOutput("Parallel Gateway Sender can not be created with THREAD OrderPolicy");
}
@Test
public void orderPolicyAutoComplete() {
String command =
"create gateway-sender --id=ln --remote-distributed-system-id=1 --order-policy";
GfshParserRule.CommandCandidate candidate = gfsh.complete(command);
assertThat(candidate.getCandidates()).hasSize(3);
assertThat(candidate.getFirstCandidate()).isEqualTo(command + "=KEY");
}
@Test
public void whenCommandOnMember() {
doReturn(mock(Set.class)).when(command).getMembers(any(), any());
cliFunctionResult = new CliFunctionResult("member",
CliFunctionResult.StatusState.OK, "cliFunctionResult");
functionResults.add(cliFunctionResult);
gfsh.executeAndAssertThat(command,
"create gateway-sender --member=xyz --id=1 --remote-distributed-system-id=1")
.statusIsSuccess();
}
@Test
public void testFunctionArgs() {
doReturn(mock(Set.class)).when(command).getMembers(any(), any());
cliFunctionResult = new CliFunctionResult("member",
CliFunctionResult.StatusState.OK, "cliFunctionResult");
functionResults.add(cliFunctionResult);
gfsh.executeAndAssertThat(command,
"create gateway-sender --member=xyz --id=1 --remote-distributed-system-id=1"
+ " --order-policy=thread --dispatcher-threads=2 "
+ "--gateway-event-filter=test1,test2 --gateway-transport-filter=test1,test2")
.statusIsSuccess();
verify(command).executeAndGetFunctionResult(any(), argsArgumentCaptor.capture(),
any(Set.class));
assertThat(argsArgumentCaptor.getValue().getOrderPolicy()).isEqualTo(
GatewaySender.OrderPolicy.THREAD.toString());
assertThat(argsArgumentCaptor.getValue().getRemoteDistributedSystemId()).isEqualTo(1);
assertThat(argsArgumentCaptor.getValue().getDispatcherThreads()).isEqualTo(2);
assertThat(argsArgumentCaptor.getValue().getGatewayEventFilter()).containsExactly("test1",
"test2");
assertThat(argsArgumentCaptor.getValue().getGatewayTransportFilter()).containsExactly("test1",
"test2");
}
@Test
public void testReturnsConfigInResultModel() {
doReturn(mock(Set.class)).when(command).getMembers(any(), any());
cliFunctionResult = new CliFunctionResult("member", CliFunctionResult.StatusState.OK,
"cliFunctionResult");
functionResults.add(cliFunctionResult);
ResultModel resultModel = gfsh.executeAndAssertThat(command,
"create gateway-sender --group=xyz --id=1 --remote-distributed-system-id=1"
+ " --order-policy=thread --dispatcher-threads=2 "
+ "--gateway-event-filter=test1,test2 --gateway-transport-filter=test1,test2")
.getResultModel();
assertThat(resultModel.getConfigObject()).isNotNull();
CacheConfig.GatewaySender sender = (CacheConfig.GatewaySender) resultModel.getConfigObject();
assertThat(sender.getId()).isEqualTo("1");
assertThat(sender.getRemoteDistributedSystemId()).isEqualTo("1");
assertThat(sender.getOrderPolicy()).isEqualTo("THREAD");
}
@Test
public void whenMembersAreDifferentVersions() {
// Create a set of mixed version members
Set<DistributedMember> members = new HashSet<>();
InternalDistributedMember currentVersionMember = mock(InternalDistributedMember.class);
doReturn(Version.CURRENT).when(currentVersionMember).getVersionObject();
InternalDistributedMember oldVersionMember = mock(InternalDistributedMember.class);
doReturn(Version.GEODE_1_4_0).when(oldVersionMember).getVersionObject();
members.add(currentVersionMember);
members.add(oldVersionMember);
doReturn(members).when(command).getMembers(any(), any());
// Verify executing the command fails
gfsh.executeAndAssertThat(command,
"create gateway-sender --id=1 --remote-distributed-system-id=1").statusIsError()
.containsOutput(CliStrings.CREATE_GATEWAYSENDER__MSG__CAN_NOT_CREATE_DIFFERENT_VERSIONS);
}
}
|
package io.rxmicro.examples.rest.controller.model.types.model.request.query_or_http_body;
import io.rxmicro.examples.rest.controller.model.types.model.Status;
import io.rxmicro.exchange.json.detail.ModelFromJsonConverter;
import java.util.Map;
/**
* Generated by {@code RxMicro Annotation Processor}
*/
public final class $$QueryOrHttpBodyRequestModelFromJsonConverter extends ModelFromJsonConverter<QueryOrHttpBodyRequest> {
@Override
public QueryOrHttpBodyRequest fromJsonObject(final Map<String, Object> params) {
final QueryOrHttpBodyRequest model = new QueryOrHttpBodyRequest();
readParamsToModel(params, model);
return model;
}
public void readParamsToModel(final Map<String, Object> params,
final QueryOrHttpBodyRequest model) {
model.booleanParameter = toBoolean(params.get("booleanParameter"), "booleanParameter");
model.byteParameter = toByte(params.get("byteParameter"), "byteParameter");
model.shortParameter = toShort(params.get("shortParameter"), "shortParameter");
model.intParameter = toInteger(params.get("intParameter"), "intParameter");
model.longParameter = toLong(params.get("longParameter"), "longParameter");
model.bigIntParameter = toBigInteger(params.get("bigIntParameter"), "bigIntParameter");
model.floatParameter = toFloat(params.get("floatParameter"), "floatParameter");
model.doubleParameter = toDouble(params.get("doubleParameter"), "doubleParameter");
model.decimalParameter = toBigDecimal(params.get("decimalParameter"), "decimalParameter");
model.charParameter = toCharacter(params.get("charParameter"), "charParameter");
model.stringParameter = toString(params.get("stringParameter"), "stringParameter");
model.instantParameter = toInstant(params.get("instantParameter"), "instantParameter");
model.enumParameter = toEnum(Status.class, params.get("enumParameter"), "enumParameter");
model.booleanParameterList = toBooleanList(params.get("booleanParameterList"), "booleanParameterList");
model.byteParameterList = toByteList(params.get("byteParameterList"), "byteParameterList");
model.shortParameterList = toShortList(params.get("shortParameterList"), "shortParameterList");
model.intParameterList = toIntegerList(params.get("intParameterList"), "intParameterList");
model.longParameterList = toLongList(params.get("longParameterList"), "longParameterList");
model.bigIntParameterList = toBigIntegerList(params.get("bigIntParameterList"), "bigIntParameterList");
model.floatParameterList = toFloatList(params.get("floatParameterList"), "floatParameterList");
model.doubleParameterList = toDoubleList(params.get("doubleParameterList"), "doubleParameterList");
model.decimalParameterList = toBigDecimalList(params.get("decimalParameterList"), "decimalParameterList");
model.charParameterList = toCharacterList(params.get("charParameterList"), "charParameterList");
model.stringParameterList = toStringList(params.get("stringParameterList"), "stringParameterList");
model.instantParameterList = toInstantList(params.get("instantParameterList"), "instantParameterList");
model.enumParameterList = toEnumList(Status.class, params.get("enumParameterList"), "enumParameterList");
model.booleanParameterSet = toBooleanSet(params.get("booleanParameterSet"), "booleanParameterSet");
model.byteParameterSet = toByteSet(params.get("byteParameterSet"), "byteParameterSet");
model.shortParameterSet = toShortSet(params.get("shortParameterSet"), "shortParameterSet");
model.intParameterSet = toIntegerSet(params.get("intParameterSet"), "intParameterSet");
model.longParameterSet = toLongSet(params.get("longParameterSet"), "longParameterSet");
model.bigIntParameterSet = toBigIntegerSet(params.get("bigIntParameterSet"), "bigIntParameterSet");
model.floatParameterSet = toFloatSet(params.get("floatParameterSet"), "floatParameterSet");
model.doubleParameterSet = toDoubleSet(params.get("doubleParameterSet"), "doubleParameterSet");
model.decimalParameterSet = toBigDecimalSet(params.get("decimalParameterSet"), "decimalParameterSet");
model.charParameterSet = toCharacterSet(params.get("charParameterSet"), "charParameterSet");
model.stringParameterSet = toStringSet(params.get("stringParameterSet"), "stringParameterSet");
model.instantParameterSet = toInstantSet(params.get("instantParameterSet"), "instantParameterSet");
model.enumParameterSet = toEnumSet(Status.class, params.get("enumParameterSet"), "enumParameterSet");
}
}
|
/*
* Copyright (c) 2021, Peter Abeles. All Rights Reserved.
*
* This file is part of BoofCV (http://boofcv.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boofcv.app;
import boofcv.abst.fiducial.FiducialDetector;
import boofcv.abst.fiducial.SquareImage_to_FiducialDetector;
import boofcv.factory.fiducial.ConfigFiducialImage;
import boofcv.factory.fiducial.FactoryFiducial;
import boofcv.factory.filter.binary.ConfigThreshold;
import boofcv.io.image.ConvertBufferedImage;
import boofcv.io.image.UtilImageIO;
import boofcv.struct.image.GrayF32;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.util.Objects;
import static org.junit.jupiter.api.Assertions.assertEquals;
/**
* @author Peter Abeles
*/
class TestCreateFiducialSquareImage extends CommonFiducialPdfChecks {
private final ConfigThreshold configThreshold = ConfigThreshold.fixed(125);
private final static String[] names =
new String[]{"temp0.jpg", "temp1.jpg"};
private void createDocument( String args ) throws IOException {
// suppress stdout
out.out = new PrintStream(new OutputStream(){@Override public void write( int b ){}});
CreateFiducialSquareImage.main(args.split("\\s+"));
out.used = false; // this will ignore the stdout usage which is unavoidable
err.used = false;
}
private GrayF32 loadImageGray() throws IOException {
BufferedImage image = loadPDF();
GrayF32 gray = new GrayF32(image.getWidth(), image.getHeight());
ConvertBufferedImage.convertFrom(image, gray);
return gray;
}
@BeforeEach
void before() {
BufferedImage output = new BufferedImage(200, 200, BufferedImage.TYPE_INT_RGB);
Graphics2D g2 = output.createGraphics();
g2.setColor(Color.WHITE);
g2.fillRect(0, 100, 100, 30);
g2.fillRect(70, 130, 30, 70);
UtilImageIO.saveImage(output, names[0]);
g2.fillOval(100, 100, 50, 50);
UtilImageIO.saveImage(output, names[1]);
}
@AfterEach
void cleanUpImages() {
for (String s : names) {
new File(s).delete();
}
}
private SquareImage_to_FiducialDetector<GrayF32> createDetector( ConfigFiducialImage config ) {
SquareImage_to_FiducialDetector<GrayF32> detector = FactoryFiducial.squareImage(config, configThreshold, GrayF32.class);
for (String s : names) {
detector.addPatternImage(Objects.requireNonNull(UtilImageIO.loadImage(s, GrayF32.class)), 125, 30);
}
return detector;
}
@Test
void single_pdf() throws IOException {
createDocument(String.format("--PaperSize letter --OutputFile %s -w 6 -i %s",
document_name + ".pdf", names[0]));
GrayF32 gray = loadImageGray();
ConfigFiducialImage config = new ConfigFiducialImage();
FiducialDetector<GrayF32> detector = createDetector(config);
detector.detect(gray);
assertEquals(1, detector.totalFound());
assertEquals(0, detector.getId(0));
}
@Test
void grid_pdf() throws IOException {
createDocument(String.format("--PaperSize letter --GridFill --OutputFile %s -w 5 -s 2 -i %s -i %s",
document_name + ".pdf", names[0], names[1]));
GrayF32 gray = loadImageGray();
ConfigFiducialImage config = new ConfigFiducialImage();
FiducialDetector<GrayF32> detector = createDetector(config);
detector.detect(gray);
assertEquals(9, detector.totalFound());
for (int i = 0; i < detector.totalFound(); i++) {
assertEquals(i%2, detector.getId(i));
}
}
@Test
void multiple_png() throws IOException {
createDocument(String.format("--OutputFile %s -w 200 -s 20 -i %s -i %s",
document_name + ".png", names[0], names[1]));
ConfigFiducialImage config = new ConfigFiducialImage();
FiducialDetector<GrayF32> detector = createDetector(config);
for (int i = 0; i < names.length; i++) {
GrayF32 gray = loadPngAsGray(document_name + names[i] + ".png");
detector.detect(gray);
assertEquals(1, detector.totalFound());
assertEquals(i, detector.getId(0));
}
}
}
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.plugin.hive.metastore;
import io.prestosql.plugin.hive.HiveType;
import io.prestosql.plugin.hive.PartitionStatistics;
import io.prestosql.plugin.hive.authentication.HiveIdentity;
import io.prestosql.spi.security.RoleGrant;
import io.prestosql.spi.statistics.ColumnStatisticType;
import io.prestosql.spi.type.Type;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
class UnimplementedHiveMetastore
implements HiveMetastore
{
@Override
public Optional<Database> getDatabase(String databaseName)
{
throw new UnsupportedOperationException();
}
@Override
public List<String> getAllDatabases()
{
throw new UnsupportedOperationException();
}
@Override
public Optional<Table> getTable(HiveIdentity identity, String databaseName, String tableName)
{
throw new UnsupportedOperationException();
}
@Override
public Set<ColumnStatisticType> getSupportedColumnStatistics(Type type)
{
throw new UnsupportedOperationException();
}
@Override
public PartitionStatistics getTableStatistics(HiveIdentity identity, Table table)
{
throw new UnsupportedOperationException();
}
@Override
public Map<String, PartitionStatistics> getPartitionStatistics(HiveIdentity identity, Table table, List<Partition> partitions)
{
throw new UnsupportedOperationException();
}
@Override
public void updateTableStatistics(HiveIdentity identity, String databaseName, String tableName, Function<PartitionStatistics, PartitionStatistics> update)
{
throw new UnsupportedOperationException();
}
@Override
public void updatePartitionStatistics(HiveIdentity identity, Table table, String partitionName, Function<PartitionStatistics, PartitionStatistics> update)
{
throw new UnsupportedOperationException();
}
@Override
public List<String> getAllTables(String databaseName)
{
throw new UnsupportedOperationException();
}
@Override
public List<String> getTablesWithParameter(String databaseName, String parameterKey, String parameterValue)
{
throw new UnsupportedOperationException();
}
@Override
public List<String> getAllViews(String databaseName)
{
throw new UnsupportedOperationException();
}
@Override
public void createDatabase(HiveIdentity identity, Database database)
{
throw new UnsupportedOperationException();
}
@Override
public void dropDatabase(HiveIdentity identity, String databaseName)
{
throw new UnsupportedOperationException();
}
@Override
public void renameDatabase(HiveIdentity identity, String databaseName, String newDatabaseName)
{
throw new UnsupportedOperationException();
}
@Override
public void setDatabaseOwner(HiveIdentity identity, String databaseName, HivePrincipal principal)
{
throw new UnsupportedOperationException();
}
@Override
public void createTable(HiveIdentity identity, Table table, PrincipalPrivileges principalPrivileges)
{
throw new UnsupportedOperationException();
}
@Override
public void dropTable(HiveIdentity identity, String databaseName, String tableName, boolean deleteData)
{
throw new UnsupportedOperationException();
}
@Override
public void replaceTable(HiveIdentity identity, String databaseName, String tableName, Table newTable, PrincipalPrivileges principalPrivileges)
{
throw new UnsupportedOperationException();
}
@Override
public void renameTable(HiveIdentity identity, String databaseName, String tableName, String newDatabaseName, String newTableName)
{
throw new UnsupportedOperationException();
}
@Override
public void commentTable(HiveIdentity identity, String databaseName, String tableName, Optional<String> comment)
{
throw new UnsupportedOperationException();
}
@Override
public void addColumn(HiveIdentity identity, String databaseName, String tableName, String columnName, HiveType columnType, String columnComment)
{
throw new UnsupportedOperationException();
}
@Override
public void renameColumn(HiveIdentity identity, String databaseName, String tableName, String oldColumnName, String newColumnName)
{
throw new UnsupportedOperationException();
}
@Override
public void dropColumn(HiveIdentity identity, String databaseName, String tableName, String columnName)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<Partition> getPartition(HiveIdentity identity, Table table, List<String> partitionValues)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<List<String>> getPartitionNames(HiveIdentity identity, String databaseName, String tableName)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<List<String>> getPartitionNamesByParts(HiveIdentity identity, String databaseName, String tableName, List<String> parts)
{
throw new UnsupportedOperationException();
}
@Override
public Map<String, Optional<Partition>> getPartitionsByNames(HiveIdentity identity, Table table, List<String> partitionNames)
{
throw new UnsupportedOperationException();
}
@Override
public void addPartitions(HiveIdentity identity, String databaseName, String tableName, List<PartitionWithStatistics> partitions)
{
throw new UnsupportedOperationException();
}
@Override
public void dropPartition(HiveIdentity identity, String databaseName, String tableName, List<String> parts, boolean deleteData)
{
throw new UnsupportedOperationException();
}
@Override
public void alterPartition(HiveIdentity identity, String databaseName, String tableName, PartitionWithStatistics partition)
{
throw new UnsupportedOperationException();
}
@Override
public Set<HivePrivilegeInfo> listTablePrivileges(String databaseName, String tableName, String tableOwner, Optional<HivePrincipal> prestoPrincipal)
{
throw new UnsupportedOperationException();
}
@Override
public void grantTablePrivileges(String databaseName, String tableName, String tableOwner, HivePrincipal grantee, Set<HivePrivilegeInfo> privileges)
{
throw new UnsupportedOperationException();
}
@Override
public void revokeTablePrivileges(String databaseName, String tableName, String tableOwner, HivePrincipal grantee, Set<HivePrivilegeInfo> privileges)
{
throw new UnsupportedOperationException();
}
@Override
public void createRole(String role, String grantor)
{
throw new UnsupportedOperationException();
}
@Override
public void dropRole(String role)
{
throw new UnsupportedOperationException();
}
@Override
public Set<String> listRoles()
{
throw new UnsupportedOperationException();
}
@Override
public void grantRoles(Set<String> roles, Set<HivePrincipal> grantees, boolean adminOption, HivePrincipal grantor)
{
throw new UnsupportedOperationException();
}
@Override
public void revokeRoles(Set<String> roles, Set<HivePrincipal> grantees, boolean adminOption, HivePrincipal grantor)
{
throw new UnsupportedOperationException();
}
@Override
public Set<RoleGrant> listGrantedPrincipals(String role)
{
throw new UnsupportedOperationException();
}
@Override
public Set<RoleGrant> listRoleGrants(HivePrincipal principal)
{
throw new UnsupportedOperationException();
}
@Override
public boolean isImpersonationEnabled()
{
throw new UnsupportedOperationException();
}
}
|
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.vcs;
import com.intellij.execution.ui.ConsoleViewContentType;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.vcs.changes.VcsAnnotationLocalChangesListener;
import com.intellij.openapi.vcs.history.VcsHistoryCache;
import com.intellij.openapi.vcs.impl.ContentRevisionCache;
import com.intellij.openapi.vcs.impl.VcsDescriptor;
import com.intellij.openapi.vcs.update.UpdatedFiles;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.Processor;
import com.intellij.util.messages.Topic;
import org.jetbrains.annotations.CalledInAwt;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
/**
* Manages the version control systems used by a specific project.
*/
public abstract class ProjectLevelVcsManager {
public static final Topic<VcsListener> VCS_CONFIGURATION_CHANGED = Topic.create("VCS configuration changed", VcsListener.class);
public static final Topic<VcsListener> VCS_CONFIGURATION_CHANGED_IN_PLUGIN = Topic.create("VCS configuration changed in VCS plugin", VcsListener.class);
public abstract void iterateVfUnderVcsRoot(VirtualFile file, Processor<? super VirtualFile> processor);
/**
* Returns the <code>ProjectLevelVcsManager<code> instance for the specified project.
*
* @param project the project for which the instance is requested.
* @return the manager instance.
*/
public static ProjectLevelVcsManager getInstance(Project project) {
return project.getComponent(ProjectLevelVcsManager.class);
}
/**
* Gets the instance of the component if the project wasn't disposed. If the project was
* disposed, throws ProcessCanceledException. Should only be used for calling from background
* threads (for example, committed changes refresh thread).
*
* @param project the project for which the component instance should be retrieved.
* @return component instance
*/
public static ProjectLevelVcsManager getInstanceChecked(final Project project) {
return ReadAction.compute(() -> {
if (project.isDisposed()) throw new ProcessCanceledException();
return getInstance(project);
});
}
/**
* Returns the list of all registered version control systems.
*
* @return the list of registered version control systems.
*/
public abstract VcsDescriptor[] getAllVcss();
/**
* Returns the version control system with the specified name.
*
* @param name the name of the VCS to find.
* @return the VCS instance, or null if none is found.
*/
@Nullable
public abstract AbstractVcs findVcsByName(@NonNls String name);
@Nullable
public abstract VcsDescriptor getDescriptor(final String name);
/**
* Checks if all files in the specified array are managed by the specified VCS.
*
* @param abstractVcs the VCS to check.
* @param files the files to check.
* @return true if all files are managed by the VCS, false otherwise.
*/
public abstract boolean checkAllFilesAreUnder(AbstractVcs abstractVcs, VirtualFile[] files);
/**
* Returns the VCS managing the specified file.
*
* @param file the file to check.
* @return the VCS instance, or null if the file does not belong to any module or the module
* it belongs to is not under version control.
*/
@Nullable
public abstract AbstractVcs getVcsFor(@NotNull VirtualFile file);
/**
* Returns the VCS managing the specified file path.
*
* @param file the file to check.
* @return the VCS instance, or null if the file does not belong to any module or the module
* it belongs to is not under version control.
*/
@Nullable
public abstract AbstractVcs getVcsFor(FilePath file);
/**
* Return the parent directory of the specified file which is mapped to a VCS.
*
* @param file the file for which the root is requested.
* @return the root, or null if the specified file is not in a VCS-managed directory.
*/
@Nullable
public abstract VirtualFile getVcsRootFor(@Nullable VirtualFile file);
/**
* Return the parent directory of the specified file path which is mapped to a VCS.
*
* @param file the file for which the root is requested.
* @return the root, or null if the specified file is not in a VCS-managed directory.
*/
@Nullable
public abstract VirtualFile getVcsRootFor(FilePath file);
@Nullable
public abstract VcsRoot getVcsRootObjectFor(final VirtualFile file);
@Nullable
public abstract VcsRoot getVcsRootObjectFor(FilePath file);
/**
* Checks if the specified VCS is used by any of the modules in the project.
*
* @param vcs the VCS to check.
* @return true if the VCS is used by any of the modules, false otherwise
*/
public abstract boolean checkVcsIsActive(AbstractVcs vcs);
/**
* Checks if the VCS with the specified name is used by any of the modules in the project.
*
* @param vcsName the name of the VCS to check.
* @return true if the VCS is used by any of the modules, false otherwise
*/
public abstract boolean checkVcsIsActive(@NonNls String vcsName);
/**
* Returns the list of VCSes used by at least one module in the project.
*
* @return the list of VCSes used in the project.
*/
public abstract AbstractVcs[] getAllActiveVcss();
public abstract boolean hasActiveVcss();
public abstract boolean hasAnyMappings();
@Deprecated
public abstract void addMessageToConsoleWindow(String message, TextAttributes attributes);
public abstract void addMessageToConsoleWindow(@Nullable String message, @NotNull ConsoleViewContentType contentType);
@NotNull
public abstract VcsShowSettingOption getStandardOption(@NotNull VcsConfiguration.StandardOption option,
@NotNull AbstractVcs vcs);
@NotNull
public abstract VcsShowConfirmationOption getStandardConfirmation(@NotNull VcsConfiguration.StandardConfirmation option,
AbstractVcs vcs);
@NotNull
public abstract VcsShowSettingOption getOrCreateCustomOption(@NotNull String vcsActionName,
@NotNull AbstractVcs vcs);
@CalledInAwt
public abstract void showProjectOperationInfo(final UpdatedFiles updatedFiles, String displayActionName);
/**
* Adds a listener for receiving notifications about changes in VCS configuration for the project.
*
* @param listener the listener instance.
* @deprecated use {@link #VCS_CONFIGURATION_CHANGED} instead
* @since 6.0
*/
@Deprecated
public abstract void addVcsListener(VcsListener listener);
/**
* Removes a listener for receiving notifications about changes in VCS configuration for the project.
*
* @param listener the listener instance.
* @deprecated use {@link #VCS_CONFIGURATION_CHANGED} instead
* @since 6.0
*/
@Deprecated
public abstract void removeVcsListener(VcsListener listener);
/**
* Marks the beginning of a background VCS operation (commit or update).
*
* @since 6.0
*/
public abstract void startBackgroundVcsOperation();
/**
* Marks the end of a background VCS operation (commit or update).
*
* @since 6.0
*/
public abstract void stopBackgroundVcsOperation();
/**
* Checks if a background VCS operation (commit or update) is currently in progress.
*
* @return true if a background operation is in progress, false otherwise.
* @since 6.0
*/
public abstract boolean isBackgroundVcsOperationRunning();
public abstract List<VirtualFile> getRootsUnderVcsWithoutFiltering(final AbstractVcs vcs);
public abstract VirtualFile[] getRootsUnderVcs(@NotNull AbstractVcs vcs);
/**
* Also includes into list all modules under roots
*/
public abstract List<VirtualFile> getDetailedVcsMappings(final AbstractVcs vcs);
public abstract VirtualFile[] getAllVersionedRoots();
@NotNull
public abstract VcsRoot[] getAllVcsRoots();
/**
* @deprecated Use just {@link #setDirectoryMappings(List)}.
*/
@Deprecated
public void updateActiveVcss() {}
public abstract List<VcsDirectoryMapping> getDirectoryMappings();
public abstract List<VcsDirectoryMapping> getDirectoryMappings(AbstractVcs vcs);
@Nullable
public abstract VcsDirectoryMapping getDirectoryMappingFor(FilePath path);
/**
* This method can be used only when initially loading the project configuration!
*/
public abstract void setDirectoryMapping(final String path, final String activeVcsName);
public abstract void setDirectoryMappings(final List<VcsDirectoryMapping> items);
public abstract void iterateVcsRoot(final VirtualFile root, final Processor<? super FilePath> iterator);
public abstract void iterateVcsRoot(final VirtualFile root, final Processor<? super FilePath> iterator,
@Nullable VirtualFileFilter directoryFilter);
@Nullable
public abstract AbstractVcs findVersioningVcs(VirtualFile file);
public abstract CheckoutProvider.Listener getCompositeCheckoutListener();
public abstract VcsHistoryCache getVcsHistoryCache();
public abstract ContentRevisionCache getContentRevisionCache();
public abstract boolean isFileInContent(final VirtualFile vf);
public abstract boolean isIgnored(@NotNull VirtualFile vf);
@NotNull
public abstract VcsAnnotationLocalChangesListener getAnnotationLocalChangesListener();
}
|
package org.actioncontroller.meta;
public interface FilterHandler {
}
|
package com.ketnoiso.upload;
import java.io.File;
import java.io.IOException;
/**
* Created by IT on 3/18/2015.
*/
public class App {
public static void main(String[] args) throws IOException {
File f = File.createTempFile("bac",".tmp");
System.out.println(f.getAbsolutePath());
}
}
|
package com.zoho.crm.api.pipeline;
import com.zoho.crm.api.util.Choice;
import com.zoho.crm.api.util.Model;
import java.util.HashMap;
import java.util.Map;
public class SuccessResponse implements Model, TransferActionResponse, ActionResponse
{
private Choice<String> status;
private Choice<String> code;
private Choice<String> message;
private Map<String, Object> details;
private HashMap<String, Integer> keyModified = new HashMap<String, Integer>();
/**
* The method to get the value of status
* @return An instance of Choice<String>
*/
public Choice<String> getStatus()
{
return this.status;
}
/**
* The method to set the value to status
* @param status An instance of Choice<String>
*/
public void setStatus(Choice<String> status)
{
this.status = status;
this.keyModified.put("status", 1);
}
/**
* The method to get the value of code
* @return An instance of Choice<String>
*/
public Choice<String> getCode()
{
return this.code;
}
/**
* The method to set the value to code
* @param code An instance of Choice<String>
*/
public void setCode(Choice<String> code)
{
this.code = code;
this.keyModified.put("code", 1);
}
/**
* The method to get the value of message
* @return An instance of Choice<String>
*/
public Choice<String> getMessage()
{
return this.message;
}
/**
* The method to set the value to message
* @param message An instance of Choice<String>
*/
public void setMessage(Choice<String> message)
{
this.message = message;
this.keyModified.put("message", 1);
}
/**
* The method to get the value of details
* @return An instance of Map<String,Object>
*/
public Map<String, Object> getDetails()
{
return this.details;
}
/**
* The method to set the value to details
* @param details An instance of Map<String,Object>
*/
public void setDetails(Map<String, Object> details)
{
this.details = details;
this.keyModified.put("details", 1);
}
/**
* The method to check if the user has modified the given key
* @param key A String representing the key
* @return An Integer representing the modification
*/
public Integer isKeyModified(String key)
{
if((( this.keyModified.containsKey(key))))
{
return this.keyModified.get(key);
}
return null;
}
/**
* The method to mark the given key as modified
* @param key A String representing the key
* @param modification An Integer representing the modification
*/
public void setKeyModified(String key, Integer modification)
{
this.keyModified.put(key, modification);
}
}
|
package com.adv_3_4;
import com.facebook.react.ReactActivity;
public class MainActivity extends ReactActivity {
/**
* Returns the name of the main component registered from JavaScript. This is used to schedule
* rendering of the component.
*/
@Override
protected String getMainComponentName() {
return "adv_3_4";
}
}
|
package com.oathsign.example.demain;
import org.springframework.data.repository.Repository;
import org.springframework.stereotype.Component;
import java.util.List;
@Component
public interface CustomerRepository extends Repository<Customer, String> {
List<Customer> findByName(String name);
void save(Customer customer);
}
|
package com.sphenon.basics.data;
/****************************************************************************
Copyright 2001-2018 Sphenon GmbH
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy
of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
*****************************************************************************/
import com.sphenon.basics.context.*;
import com.sphenon.basics.retriever.*;
import com.sphenon.basics.many.tplinst.*;
public interface DataSink_WithChoiceSet<T> {
public Retriever getChoiceSetRetriever(CallContext context);
}
|
package cn.codepub.algorithms.strings;
/**
* <p>
* Created with IntelliJ IDEA. 2015/10/30 21:43
* </p>
* <p>
* ClassName:Combination
* </p>
* <p>
* Description:组合:一般地,从m个不同的元素中,任取n(n≤m)个元素为一组,叫作从m个不同元素中取出n个元素的一个组合
* </P>
*
* @author Wang Xu
* @version V1.0.0
* @since V1.0.0
*/
public class Combination {
static StringBuilder sb = new StringBuilder();
public static void main(String[] args) {
char[] strs = new char[]{'a', 'b', 'c', 'd', 'e'};
for (int i = 1; i <= strs.length; i++) {
combination(strs, 0, i);
}
System.out.println("测试第二种方法");
combination();
}
/**
* 第一种方法
*
* @param strs 需要操作的字符数组
* @param start 从哪里开始取
* @param number 取几个字符
*/
public static void combination(char[] strs, int start, int number) {
//定义函数出口先
if (number <= 0 || start >= strs.length) {
System.out.println(sb.toString());
return;
}
//选取当前字符的话,需要将number-1
sb.append(strs[start]);
combination(strs, start + 1, number - 1);
//不选取当前字符的话,number不变
sb.deleteCharAt(sb.length() - 1);
//注意因为还需要从start+1的位置开始取number个字符,那么如果剩下的字符不足number就没必要去取了
if (strs.length - start > number) {
combination(strs, start + 1, number);
}
}
/**
* 第二种方法
* 基本思路:求全组合,则假设原有元素n个,则最终组合结果是2^n个。原因是: 用位操作方法:假设元素原本有:a,b,c三个,则1表示取该元素,0表示不取。故去a则是001,取ab则是011<br/>
* 所以一共三位,每个位上有两个选择0,1。所以是2^n个结果。这些结果的位图值都是0,1,2....2^n。所以可以类似全真表一样,从值0到值2^n依次输出结果:即:000,001,010,011,100,101,110,111<br/>
* 对应输出组合结果为:空,a,b,ab,c,ac,bc,abc. 这个输出顺序刚好跟数字0~2^n结果递增顺序一样 取法的二进制数其实就是从0到2^n-1的十进制数
*/
public static void combination() {
String[] str = {"a", "b", "c"};
int n = str.length; // 元素个数。
// 求出位图全组合的结果个数:2^n
int nbit = 1 << n; // “<<” 表示 左移:各二进位全部左移若干位,高位丢弃,低位补0。:即求出2^n=2Bit。
// System.out.println("全组合结果个数为:" + nbit + ",二进制:" + Integer.toBinaryString(nbit));
for (int i = 0; i < nbit; i++) { // 结果有nbit个。输出结果从数字小到大输出:即输出0,1,2,3,....2^n。
// System.out.print("组合数值 " + i + " 对应编码为: ");
for (int j = 0; j < n; j++) { // 每个数二进制最多可以左移n次,即遍历完所有可能的变化新二进制数值了
int tmp = 1 << j;
// System.out.println("->" + Integer.toBinaryString(tmp));
if ((tmp & i) != 0) { // & 表示与。两个位都为1时,结果才为1
System.out.print(str[j]);
}
}
System.out.println();
}
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.registry.client.types;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Enum of address types -as integers.
* Why integers and not enums? Cross platform serialization as JSON
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public interface AddressTypes {
/**
* hostname/FQDN and port pair: {@value}.
* The host/domain name and port are set as separate strings in the address
* list, e.g.
* <pre>
* ["namenode.example.org", "50070"]
* </pre>
*/
public static final String ADDRESS_HOSTNAME_AND_PORT = "host/port";
public static final String ADDRESS_HOSTNAME_FIELD = "host";
public static final String ADDRESS_PORT_FIELD = "port";
/**
* Path <code>/a/b/c</code> style: {@value}.
* The entire path is encoded in a single entry
*
* <pre>
* ["/users/example/dataset"]
* </pre>
*/
public static final String ADDRESS_PATH = "path";
/**
* URI entries: {@value}.
* <pre>
* ["http://example.org"]
* </pre>
*/
public static final String ADDRESS_URI = "uri";
/**
* Zookeeper addresses as a triple : {@value}.
* <p>
* These are provide as a 3 element tuple of: hostname, port
* and optionally path (depending on the application)
* <p>
* A single element would be
* <pre>
* ["zk1","2181","/registry"]
* </pre>
* An endpoint with multiple elements would list them as
* <pre>
* [
* ["zk1","2181","/registry"]
* ["zk2","1600","/registry"]
* ]
* </pre>
*
* the third element in each entry , the path, MUST be the same in each entry.
* A client reading the addresses of an endpoint is free to pick any
* of the set, so they must be the same.
*
*/
public static final String ADDRESS_ZOOKEEPER = "zktriple";
/**
* Any other address: {@value}.
*/
public static final String ADDRESS_OTHER = "";
}
|
package com.clx4399.gulimall.order.listener;
import com.alipay.api.AlipayApiException;
import com.alipay.api.internal.util.AlipaySignature;
import com.clx4399.gulimall.order.config.AlipayTemplate;
import com.clx4399.gulimall.order.service.OmsOrderService;
import com.clx4399.gulimall.order.vo.PayAsyncVo;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RestController;
import javax.servlet.http.HttpServletRequest;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
/**
* @author WhtCl
* @program: gulimall
* @description: 订单支付监听
* @date 2021-09-02 20:44:34
*/
@RestController
public class OrderPayedListener {
@Autowired
OmsOrderService orderService;
@Autowired
AlipayTemplate alipayTemplate;
@PostMapping("/payed/notify")
public String orderPayedCheck(PayAsyncVo vo, HttpServletRequest request) throws AlipayApiException {
//验签
Map<String, String> params = new HashMap<String, String>();
Map<String, String[]> requestParams = request.getParameterMap();
for (Iterator<String> iter = requestParams.keySet().iterator(); iter.hasNext(); ) {
String name = (String) iter.next();
String[] values = (String[]) requestParams.get(name);
String valueStr = "";
for (int i = 0; i < values.length; i++) {
valueStr = (i == values.length - 1) ? valueStr + values[i]
: valueStr + values[i] + ",";
}
params.put(name, valueStr);
}
//调用SDK验证签名
boolean signVerified = AlipaySignature.rsaCheckV1(params, alipayTemplate.getAlipay_public_key(), alipayTemplate.getCharset(), alipayTemplate.getSign_type());
if (signVerified) {
//签名验证成功
System.out.println("签名验证成功...");
String result = orderService.handlePayResult(vo);
return result;
} else {
System.out.println("签名验证失败...");
return "error";
}
}
}
|
package wh1spr.bot.commands.points.util;
import wh1spr.bot.command.Command;
public abstract class PointsCommand extends Command {
public PointsCommand(String name, PointTypeManager tm, String... aliases) {
super(name, aliases);
if (tm == null) throw new IllegalArgumentException("PointTypeManager tm cannot be null.");
this.tm = tm;
this.setMaelstromOnly(false);
}
private PointTypeManager tm = null;
protected PointTypeManager getTypeManager() {
return this.tm;
}
}
|
/*
* Copyright 2014 Guidewire Software, Inc.
*/
package gw.lang.function;
@FunctionalInterface
public interface IFunction10<R, P0, P1, P2, P3, P4, P5, P6, P7, P8, P9> {
R invoke(P0 arg0, P1 arg1, P2 arg2, P3 arg3, P4 arg4, P5 arg5, P6 arg6, P7 arg7, P8 arg8, P9 arg9);
}
|
package com.prowidesoftware.swift.model.mx;
import com.prowidesoftware.swift.model.mx.dic.*;
import com.prowidesoftware.swift.model.mx.AbstractMX;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import com.prowidesoftware.swift.model.MxSwiftMessage;
import com.prowidesoftware.swift.model.mx.AbstractMX;
import com.prowidesoftware.swift.model.mx.MxRead;
import com.prowidesoftware.swift.model.mx.MxReadImpl;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
/**
* Class for seev.042.002.02 ISO 20022 message.
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "Document", propOrder = {
"corpActnInstrStmtRpt"
})
@XmlRootElement(name = "Document", namespace = "urn:swift:xsd:seev.042.002.02")
public class MxSeev04200202
extends AbstractMX
{
@XmlElement(name = "CorpActnInstrStmtRpt", required = true)
protected CorporateActionInstructionStatementReportV02Subset corpActnInstrStmtRpt;
public final static transient String BUSINESS_PROCESS = "seev";
public final static transient int FUNCTIONALITY = 42;
public final static transient int VARIANT = 2;
public final static transient int VERSION = 2;
@SuppressWarnings("rawtypes")
public final static transient Class[] _classes = new Class[] {AccountIdentification22 .class, BalanceFormat2Choice.class, CorporateActionBalanceDetails8 .class, CorporateActionDeadline1Code.class, CorporateActionEventAndBalance4 .class, CorporateActionEventType4Choice.class, CorporateActionEventType6Code.class, CorporateActionInstructionStatementReportV02Subset.class, CorporateActionMandatoryVoluntary1Code.class, CorporateActionMandatoryVoluntary2Choice.class, CorporateActionOption2Code.class, CorporateActionOption9Choice.class, CorporateActionStatementReportingType1Code.class, CorporateActionStatementType1Code.class, DateAndDateTimeChoice.class, DateOrDateTimePeriodChoice.class, DatePeriodDetails.class, DateTimePeriodDetails.class, DeadlineCode2Choice.class, DefaultProcessingOrStandingInstruction1Choice.class, DeliveryReceiptType2Code.class, EventFrequency4Code.class, EventInformation2 .class, FinancialInstrumentQuantity15Choice.class, Frequency6Choice.class, GenericIdentification24 .class, GenericIdentification25 .class, GenericIdentification26 .class, IdentificationSource3Choice.class, InstructedBalanceDetails2 .class, InstructedCorporateActionOption2 .class, MxSeev04200202 .class, NotificationIdentification2 .class, OriginalAndCurrentQuantities3 .class, OtherIdentification1 .class, Pagination.class, PartyIdentification51Choice.class, PendingBalance2 .class, ProprietaryQuantity4 .class, ProprietaryQuantity5 .class, Quantity7Choice.class, Quantity8Choice.class, Quantity9Choice.class, SafekeepingPlace1Code.class, SafekeepingPlace2Code.class, SafekeepingPlaceFormat5Choice.class, SafekeepingPlaceTypeAndAnyBICIdentifier1 .class, SafekeepingPlaceTypeAndText5 .class, SecurityIdentification14 .class, SettlementTypeAndIdentification5 .class, ShortLong1Code.class, SignedQuantityFormat3 .class, SignedQuantityFormat4 .class, Statement30 .class, StatementUpdateType1Code.class, SupplementaryData1 .class, SupplementaryDataEnvelope1 .class, UnderlyingSecurity3 .class, UpdateType3Choice.class };
public final static transient String NAMESPACE = "urn:swift:xsd:seev.042.002.02";
public MxSeev04200202() {
super();
}
/**
* Creates the MX object parsing the parameter String with the XML content
*
*/
public MxSeev04200202(final String xml) {
this();
MxSeev04200202 tmp = parse(xml);
corpActnInstrStmtRpt = tmp.getCorpActnInstrStmtRpt();
}
/**
* Creates the MX object parsing the raw content from the parameter MxSwiftMessage
*
*/
public MxSeev04200202(final MxSwiftMessage mxSwiftMessage) {
this(mxSwiftMessage.message());
}
/**
* Gets the value of the corpActnInstrStmtRpt property.
*
* @return
* possible object is
* {@link CorporateActionInstructionStatementReportV02Subset }
*
*/
public CorporateActionInstructionStatementReportV02Subset getCorpActnInstrStmtRpt() {
return corpActnInstrStmtRpt;
}
/**
* Sets the value of the corpActnInstrStmtRpt property.
*
* @param value
* allowed object is
* {@link CorporateActionInstructionStatementReportV02Subset }
*
*/
public MxSeev04200202 setCorpActnInstrStmtRpt(CorporateActionInstructionStatementReportV02Subset value) {
this.corpActnInstrStmtRpt = value;
return this;
}
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this, ToStringStyle.MULTI_LINE_STYLE);
}
@Override
public boolean equals(Object that) {
return EqualsBuilder.reflectionEquals(this, that);
}
@Override
public int hashCode() {
return HashCodeBuilder.reflectionHashCode(this);
}
@Override
public String getBusinessProcess() {
return BUSINESS_PROCESS;
}
@Override
public int getFunctionality() {
return FUNCTIONALITY;
}
@Override
public int getVariant() {
return VARIANT;
}
@Override
public int getVersion() {
return VERSION;
}
/**
* Creates the MX object parsing the raw content from the parameter XML
*
*/
public static MxSeev04200202 parse(String xml) {
return ((MxSeev04200202) MxReadImpl.parse(MxSeev04200202 .class, xml, _classes));
}
/**
* Creates the MX object parsing the raw content from the parameter XML with injected read implementation
* @since 9.0.1
*
* @param parserImpl an MX unmarshall implementation
*/
public static MxSeev04200202 parse(String xml, MxRead parserImpl) {
return ((MxSeev04200202) parserImpl.read(MxSeev04200202 .class, xml, _classes));
}
@Override
public String getNamespace() {
return NAMESPACE;
}
@Override
@SuppressWarnings("rawtypes")
public Class[] getClasses() {
return _classes;
}
/**
* Creates an MxSeev04200202 messages from its JSON representation.
* <p>
* For generic conversion of JSON into the corresponding MX instance
* see {@link AbstractMX#fromJson(String)}
*
* @since 7.10.2
*
* @param json a JSON representation of an MxSeev04200202 message
* @return
* a new instance of MxSeev04200202
*/
public final static MxSeev04200202 fromJson(String json) {
return AbstractMX.fromJson(json, MxSeev04200202 .class);
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iceberg.avro;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Supplier;
import org.apache.avro.JsonProperties;
import org.apache.avro.Schema;
import org.apache.iceberg.MetadataColumns;
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.types.Type;
import org.apache.iceberg.types.Types;
/**
* Renames and aliases fields in an Avro schema based on the current table schema.
* <p>
* This class creates a read schema based on an Avro file's schema that will correctly translate
* from the file's field names to the current table schema.
* <p>
* This will also rename records in the file's Avro schema to support custom read classes.
*/
class BuildAvroProjection extends AvroCustomOrderSchemaVisitor<Schema, Schema.Field> {
private final Map<String, String> renames;
private Type current;
BuildAvroProjection(org.apache.iceberg.Schema expectedSchema, Map<String, String> renames) {
this.renames = renames;
this.current = expectedSchema.asStruct();
}
BuildAvroProjection(Type expectedType, Map<String, String> renames) {
this.renames = renames;
this.current = expectedType;
}
@Override
@SuppressWarnings("checkstyle:CyclomaticComplexity")
public Schema record(Schema record, List<String> names, Iterable<Schema.Field> schemaIterable) {
Preconditions.checkArgument(
current.isNestedType() && current.asNestedType().isStructType(),
"Cannot project non-struct: %s", current);
Types.StructType struct = current.asNestedType().asStructType();
boolean hasChange = false;
List<Schema.Field> fields = record.getFields();
List<Schema.Field> fieldResults = Lists.newArrayList(schemaIterable);
Map<String, Schema.Field> updateMap = Maps.newHashMap();
for (int i = 0; i < fields.size(); i += 1) {
Schema.Field field = fields.get(i);
Schema.Field updatedField = fieldResults.get(i);
if (updatedField != null) {
updateMap.put(updatedField.name(), updatedField);
if (!updatedField.schema().equals(field.schema()) ||
!updatedField.name().equals(field.name())) {
hasChange = true;
}
} else {
hasChange = true; // column was not projected
}
}
// construct the schema using the expected order
List<Schema.Field> updatedFields = Lists.newArrayListWithExpectedSize(struct.fields().size());
List<Types.NestedField> expectedFields = struct.fields();
for (int i = 0; i < expectedFields.size(); i += 1) {
Types.NestedField field = expectedFields.get(i);
// detect reordering
if (i < fields.size() && !field.name().equals(fields.get(i).name())) {
hasChange = true;
}
Schema.Field avroField = updateMap.get(AvroSchemaUtil.makeCompatibleName(field.name()));
if (avroField != null) {
updatedFields.add(avroField);
} else {
Preconditions.checkArgument(
field.isOptional() || MetadataColumns.metadataFieldIds().contains(field.fieldId()),
"Missing required field: %s", field.name());
// Create a field that will be defaulted to null. We assign a unique suffix to the field
// to make sure that even if records in the file have the field it is not projected.
Schema.Field newField = new Schema.Field(
field.name() + "_r" + field.fieldId(),
AvroSchemaUtil.toOption(AvroSchemaUtil.convert(field.type())), null, JsonProperties.NULL_VALUE);
newField.addProp(AvroSchemaUtil.FIELD_ID_PROP, field.fieldId());
updatedFields.add(newField);
hasChange = true;
}
}
if (hasChange || renames.containsKey(record.getFullName())) {
return AvroSchemaUtil.copyRecord(record, updatedFields, renames.get(record.getFullName()));
}
return record;
}
@Override
public Schema.Field field(Schema.Field field, Supplier<Schema> fieldResult) {
Types.StructType struct = current.asNestedType().asStructType();
int fieldId = AvroSchemaUtil.getFieldId(field);
Types.NestedField expectedField = struct.field(fieldId);
// if the field isn't present, it was not selected
if (expectedField == null) {
return null;
}
String expectedName = expectedField.name();
this.current = expectedField.type();
try {
Schema schema = fieldResult.get();
if (!Objects.equals(schema, field.schema()) || !expectedName.equals(field.name())) {
// add an alias for the field
return AvroSchemaUtil.copyField(field, schema, AvroSchemaUtil.makeCompatibleName(expectedName));
} else {
// always copy because fields can't be reused
return AvroSchemaUtil.copyField(field, field.schema(), field.name());
}
} finally {
this.current = struct;
}
}
@Override
public Schema union(Schema union, Iterable<Schema> options) {
Preconditions.checkState(AvroSchemaUtil.isOptionSchema(union),
"Invalid schema: non-option unions are not supported: %s", union);
Schema nonNullOriginal = AvroSchemaUtil.fromOption(union);
Schema nonNullResult = AvroSchemaUtil.fromOptions(Lists.newArrayList(options));
if (!Objects.equals(nonNullOriginal, nonNullResult)) {
return AvroSchemaUtil.toOption(nonNullResult);
}
return union;
}
@Override
public Schema array(Schema array, Supplier<Schema> element) {
if (array.getLogicalType() instanceof LogicalMap ||
(current.isMapType() && AvroSchemaUtil.isKeyValueSchema(array.getElementType()))) {
Preconditions.checkArgument(current.isMapType(), "Incompatible projected type: %s", current);
Types.MapType asMapType = current.asNestedType().asMapType();
this.current = Types.StructType.of(asMapType.fields()); // create a struct to correspond to element
try {
Schema keyValueSchema = array.getElementType();
Schema.Field keyField = keyValueSchema.getFields().get(0);
Schema.Field valueField = keyValueSchema.getFields().get(1);
Schema.Field valueProjection = element.get().getField("value");
// element was changed, create a new array
if (!Objects.equals(valueProjection.schema(), valueField.schema())) {
return AvroSchemaUtil.createProjectionMap(keyValueSchema.getFullName(),
AvroSchemaUtil.getFieldId(keyField), keyField.name(), keyField.schema(),
AvroSchemaUtil.getFieldId(valueField), valueField.name(), valueProjection.schema());
} else if (!(array.getLogicalType() instanceof LogicalMap)) {
return AvroSchemaUtil.createProjectionMap(keyValueSchema.getFullName(),
AvroSchemaUtil.getFieldId(keyField), keyField.name(), keyField.schema(),
AvroSchemaUtil.getFieldId(valueField), valueField.name(), valueField.schema());
}
return array;
} finally {
this.current = asMapType;
}
} else {
Preconditions.checkArgument(current.isListType(),
"Incompatible projected type: %s", current);
Types.ListType list = current.asNestedType().asListType();
this.current = list.elementType();
try {
Schema elementSchema = element.get();
// element was changed, create a new array
if (!Objects.equals(elementSchema, array.getElementType())) {
return AvroSchemaUtil.replaceElement(array, elementSchema);
}
return array;
} finally {
this.current = list;
}
}
}
@Override
public Schema map(Schema map, Supplier<Schema> value) {
Preconditions.checkArgument(current.isNestedType() && current.asNestedType().isMapType(),
"Incompatible projected type: %s", current);
Types.MapType asMapType = current.asNestedType().asMapType();
Preconditions.checkArgument(asMapType.keyType() == Types.StringType.get(),
"Incompatible projected type: key type %s is not string", asMapType.keyType());
this.current = asMapType.valueType();
try {
Schema valueSchema = value.get();
// element was changed, create a new map
if (!Objects.equals(valueSchema, map.getValueType())) {
return AvroSchemaUtil.replaceValue(map, valueSchema);
}
return map;
} finally {
this.current = asMapType;
}
}
@Override
public Schema primitive(Schema primitive) {
// check for type promotion
switch (primitive.getType()) {
case INT:
if (current.typeId() == Type.TypeID.LONG) {
return Schema.create(Schema.Type.LONG);
}
return primitive;
case FLOAT:
if (current.typeId() == Type.TypeID.DOUBLE) {
return Schema.create(Schema.Type.DOUBLE);
}
return primitive;
default:
return primitive;
}
}
}
|
package example;
import com.github.dr.rwserver.data.Player;
import com.github.dr.rwserver.data.global.Data;
import com.github.dr.rwserver.func.StrCons;
import com.github.dr.rwserver.game.EventType;
import com.github.dr.rwserver.plugin.Plugin;
import com.github.dr.rwserver.plugin.event.AbstractEvent;
import com.github.dr.rwserver.util.Time;
import com.github.dr.rwserver.util.game.CommandHandler;
import com.github.dr.rwserver.util.game.Events;
import com.github.dr.rwserver.util.log.Log;
import java.util.Arrays;
/**
* @author Dr
*/
public class Main extends Plugin {
/**
* 这里主要做初始化
*/
@Override
public void init(){
//监听玩家进入
Events.on(EventType.PlayerJoinEvent.class, event -> {
event.getPlayer().sendSystemMessage("Plugin测试 这是进入的时间 "+ Time.getUtcMilliFormat(1));
});
//过滤消息
Data.core.admin.addChatFilter((player, text) -> text.replace("heck", "h*ck"));
//动作过滤正在进行中
/**
* java.io.EOFException
* at java.base/java.util.zip.GZIPInputStream.readUByte(GZIPInputStream.java:268)
* at java.base/java.util.zip.GZIPInputStream.readUShort(GZIPInputStream.java:258)
* at java.base/java.util.zip.GZIPInputStream.readHeader(GZIPInputStream.java:164)
* at java.base/java.util.zip.GZIPInputStream.<init>(GZIPInputStream.java:79)
* at java.base/java.util.zip.GZIPInputStream.<init>(GZIPInputStream.java:91)
* at com.github.dr.rwserver.util.zip.gzip.GzipDecoder.getGzipInputStream(GzipDecoder.java:23)
* at com.github.dr.rwserver.data.plugin.AbstractPluginData.read(AbstractPluginData.java:84)
* at com.github.dr.rwserver.data.plugin.AutoSavePluginData.read(AutoSavePluginData.java:6)
* at com.github.dr.rwserver.plugin.PluginsLoad$PluginLoadData.<init>(PluginsLoad.java:107)
* at com.github.dr.rwserver.plugin.PluginsLoad.loadJar(PluginsLoad.java:52)
* at com.github.dr.rwserver.plugin.PluginsLoad.resultPluginData(PluginsLoad.java:29)
* at com.github.dr.rwserver.data.plugin.PluginManage.init(PluginManage.java:27)
* at com.github.dr.rwserver.Main.main(Main.java:79)
* 这个是正常的 正在想办法解决
*/
//读取数据
long lastStartTime = this.pluginData.getData("lastStartTime",Time.concurrentMillis());
String lastStartTimeString = this.pluginData.getData("lastStartTimeString",Time.getUtcMilliFormat(1));
Log.info("lastStartTime",lastStartTime);
Log.info("lastStartTimeString",lastStartTimeString);
this.pluginData.setData("lastStartTime",Time.concurrentMillis());
this.pluginData.setData("lastStartTimeString",Time.getUtcMilliFormat(1));
}
@Override
public AbstractEvent registerEvents(){
return new Event();
}
//注册服务器命令
@Override
public void registerServerCommands(CommandHandler handler){
handler.<StrCons>register("hi", "#这是Server命令简介", (arg, log) -> {
log.get("hi");
});
handler.<StrCons>register("arg","<这是必填> [这是选填]", "#这是Server命令简介", (arg, log) -> {
log.get(Arrays.toString(arg));
});
handler.<StrCons>register("args","<这是必填...>", "#这是Server命令简介", (arg, log) -> {
log.get(arg[0]);
});
}
//注册客户端命令
@Override
public void registerClientCommands(CommandHandler handler){
//向自己回复消息
handler.<Player>register("reply", "<text...>", "#只取第一个回复.", (args, player) -> {
player.sendSystemMessage("你发的是: " + args[0]);
});
//向玩家发送
handler.<Player>register("whisper", "<player> <text...>", "#向另一个玩家发消息.", (args, player) -> {
//查找玩家
Player other = Data.playerGroup.find(p -> p.name.equalsIgnoreCase(args[0]));
if(other == null){
player.sendSystemMessage("找不到这个玩家!");
return;
}
//向玩家发消息
other.sendSystemMessage("玩家: " + player.name + " 向你发送: " + args[1]);
});
}
@Override
public void onDisable() {
super.onDisable();
//Custom
}
}
|
package de.mgpit.oracle.reports.plugin.commons;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Properties;
import com.sun.java.util.collections.Arrays;
import com.sun.java.util.collections.Comparator;
import com.sun.java.util.collections.Iterator;
import de.mgpit.oracle.reports.plugin.commons.driver.MQ.Configuration;
import junit.framework.TestCase;
public class ConfigurationTest extends TestCase {
protected void setUp() throws Exception {
super.setUp();
}
protected void tearDown() throws Exception {
super.tearDown();
}
public void testFromURI() {
URI testUri = null;
boolean uriOK = true;
try {
testUri = new URI( "wmq://localhost:1414/dest/queue/QMGR.IN.QUEUE@QMGR?channelName=CHANNEL_1" );
} catch ( URISyntaxException syntax ) {
uriOK = false;
}
assertTrue( uriOK );
Configuration configurationToTest = Configuration.fromURI( testUri );
assertNotNull( configurationToTest );
}
public void testToURI() {
URI testUri = null;
boolean uriOK = true;
try {
testUri = new URI( "wmq://localhost:1414/dest/queue/QMGR.IN.QUEUE@QMGR?channelName=CHANNEL_1" );
} catch ( URISyntaxException syntax ) {
uriOK = false;
}
assertTrue( uriOK );
Configuration configurationToTest = Configuration.fromURI( testUri );
assertEquals( testUri, configurationToTest.toURI() );
}
public void testEqualsObject() {
URI testUri = null;
boolean uriOK = true;
try {
testUri = new URI( "wmq", null, "localhost", 1414, "/dest/queue/QMGR.IN.QUEUE@QMGR", "channelName=CHANNEL_1", null );
} catch ( URISyntaxException syntax ) {
uriOK = false;
}
assertTrue( uriOK );
Configuration c1 = Configuration.fromURI( testUri );
Configuration c2 = Configuration.fromURI( testUri );
Configuration c3 = new Configuration( "localhost", 1414, "QMGR", "CHANNEL_1", "QMGR.IN.QUEUE" );
assertEquals( c1, c2 );
assertEquals( c2, c1 ); // kommutativ
assertEquals( c1, c1 ); // reflexiv
assertEquals( c2, c3 );
assertEquals( c1, c3 ); // transitiv
}
public void testQueryParameterOverridesWmqQmgr() {
String expectedUriString = "wmq://localhost:1414/dest/queue/QMGR.IN.QUEUE@QMGR?channelName=CHANNEL_1";
boolean uriOK = true;
URI creationUri = null;
try {
creationUri = new URI(
"wmq://localhost:1414/dest/queue/QMGR.IN.QUEUE@NONSENSE?connectQueueManager=QMGR&channelName=CHANNEL_1" );
} catch ( URISyntaxException syntax ) {
uriOK = false;
}
assertTrue( uriOK );
Configuration testConfiguration = Configuration.fromURI( creationUri );
URI configurationsUri = testConfiguration.toURI();
String actualUriString = configurationsUri.toString();
assertEquals( expectedUriString, actualUriString );
}
public void testJavadocExampleCode() throws URISyntaxException {
URI uri = new URI( "wmq://localhost/dest/queue/QUEUE.IN@QMGR?channelName=CHANNEL_1" );
Configuration c1 = Configuration.fromURI( uri );
URI another = new URI( "wmq", null, "localhost", 1414, "/dest/queue/QUEUE.IN@QMGR", "channelName=CHANNEL_1", null );
Configuration c2 = Configuration.fromURI( another );
URI third = new URI( "wmq", null, "localhost", 1414, "/dest/queue/QUEUE.IN@FOO",
"connectQueueManager=QMGR&channelName=CHANNEL_1", null );
Configuration c3 = Configuration.fromURI( third );
System.out.println( "C1 and C2 are " + (c1.equals( c2 ) ? "equal" : "different") );
System.out.println( "C2 and C3 are " + (c2.equals( c3 ) ? "equal" : "different") );
System.out.println( "C1 and C3 are " + (c1.equals( c3 ) ? "equal" : "different") );
}
public void testDumpSystemProperties2StandardOut() {
Properties systemProperties = System.getProperties();
Object[] keySet = systemProperties.keySet().toArray();
Arrays.sort( keySet, new Comparator() {
public int compare( Object o1, Object o2 ) {
String s1 = (String) o1; String s2 = (String) o2;
return s1.compareTo( s2 );
}
});
Iterator keys = Arrays.asList(keySet).iterator();
int i = 0;
while ( keys.hasNext() ){
String key = (String)keys.next();
String value = systemProperties.getProperty( key );
// System.out.printf( "%1$3d: [%2$-30s] is [%3$s]\n", i++, key, value );
System.out.println( U.rpad(i++,3)+": " + U.lpad( key, 30 ) + " is " + value );
}
}
}
|
// EVMC: Ethereum Client-VM Connector API.
// Copyright 2019-2020 The EVMC Authors.
// Licensed under the Apache License, Version 2.0.
package org.ethereum.evmc;
import java.nio.ByteBuffer;
class TestHostContext implements HostContext {
@Override
public boolean accountExists(byte[] address) {
return true;
}
@Override
public ByteBuffer getStorage(byte[] address, byte[] key) {
return ByteBuffer.allocateDirect(64).put(new byte[64]);
}
@Override
public int setStorage(byte[] address, byte[] key, byte[] value) {
return 0;
}
@Override
public ByteBuffer getBalance(byte[] address) {
return ByteBuffer.allocateDirect(64).put(new byte[64]);
}
@Override
public int getCodeSize(byte[] address) {
return address.length;
}
@Override
public ByteBuffer getCodeHash(byte[] address) {
return ByteBuffer.allocateDirect(64).put(new byte[64]);
}
@Override
public ByteBuffer getCode(byte[] address) {
return ByteBuffer.allocateDirect(64).put(new byte[64]);
}
@Override
public void selfdestruct(byte[] address, byte[] beneficiary) {}
@Override
public ByteBuffer call(ByteBuffer msg) {
return ByteBuffer.allocateDirect(64).put(new byte[64]);
}
@Override
public ByteBuffer getTxContext() {
return ByteBuffer.allocateDirect(64).put(new byte[64]);
}
@Override
public ByteBuffer getBlockHash(long number) {
return ByteBuffer.allocateDirect(64).put(new byte[64]);
}
@Override
public void emitLog(byte[] address, byte[] data, int dataSize, byte[][] topics, int topicCount) {}
}
|
package net.icelane.amplifire.analyzer.render.jgraph;
import java.awt.AlphaComposite;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Font;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.GraphicsConfiguration;
import java.awt.GraphicsDevice;
import java.awt.GraphicsEnvironment;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.RenderingHints;
import java.awt.Stroke;
import java.awt.Transparency;
import java.awt.font.LineMetrics;
import java.awt.geom.Rectangle2D;
import java.awt.image.ConvolveOp;
import java.awt.image.Kernel;
import java.awt.image.VolatileImage;
import net.icelane.amplifire.analyzer.AudioGraph;
import net.icelane.amplifire.analyzer.render.GraphRender;
/**
* amplifier - Audio-Player Project
*
* @author Oliver Daus
* @version 2.4
*
* A simple panel to display Graphs
*/
public class JGraph extends GraphRender {
/**
*
*/
private static final long serialVersionUID = -8370043378690135186L;
private VolatileImage backBuffer;
private VolatileImage screenBuffer;
private GraphicsDevice currentDevice = getDefaultGraphicsDevice();
private Stroke graphStroke = new BasicStroke(1f);
private Stroke effetcStroke1 = new BasicStroke(15f);
// private Stroke effetcStroke2 = new BasicStroke(10f);
// private Stroke effetcStroke3 = new BasicStroke(6f);
// private float heightLevel = 0.4f;
// private int getZoomlLevel() = 1;
//
public JGraph() {
super();
start();
}
//TODO: implement FPS lock
public void paintComponent(Graphics graphics) {
super.paintComponent(graphics);
Graphics2D g = (Graphics2D) graphics;
if (!isActive()) return;
if(!this.isShowing()) return;
try {
// show the back backBuffer on the screen ...
if (screenBuffer != null) g.drawImage(screenBuffer, 0, 0, null);
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* Obtain the default system graphics device
* * @return
*/
public GraphicsDevice getDefaultGraphicsDevice(){
return GraphicsEnvironment.getLocalGraphicsEnvironment().getDefaultScreenDevice();
}
/**
* Obtain the current system graphical settings
* @return Current graphics configuration
*/
public GraphicsDevice getCurrentGraphicsDevice(){
for (GraphicsDevice graphicsDevice : GraphicsEnvironment
.getLocalGraphicsEnvironment().getScreenDevices()){
if (isOnGraphicsDevice(graphicsDevice)) return graphicsDevice;
}
return getDefaultGraphicsDevice();
}
public boolean hasGraphicsDeviceChanged(){
GraphicsDevice device = getCurrentGraphicsDevice();
if (currentDevice.equals(device)) return false;
currentDevice = device;
System.out.println("Graphics device changed: " + device.getIDstring());
return true;
}
public boolean isOnGraphicsDevice(GraphicsDevice gd){
if(!this.isShowing()) return false;
Rectangle r = gd.getDefaultConfiguration().getBounds();
Point p = this.getLocationOnScreen();
return r.contains(p);
}
private VolatileImage createBackBuffer(int width, int height, int transparency){
// obtain the current system graphical settings
GraphicsConfiguration gc = getGraphicsConfiguration();
VolatileImage img = gc.createCompatibleVolatileImage(width, height, transparency);
img.setAccelerationPriority(1f);
return clearImage(img);
}
private VolatileImage clearImage(VolatileImage img){
Graphics2D g = img.createGraphics();
g.setComposite(AlphaComposite.DstOut);
g.fillRect(0, 0, img.getWidth(), img.getHeight());
g.dispose();
return img;
}
private void switchBuffers() {
VolatileImage buffer = screenBuffer;
screenBuffer = backBuffer;
backBuffer = buffer;
}
private void render(){
int height = this.getHeight();
int width = this.getWidth();
if(width <= 0 || height <= 0) return;
try {
do {
// Back backBuffer doesn't exist ...
if (backBuffer == null){
backBuffer = createBackBuffer(width, height, Transparency.TRANSLUCENT);
// The graphics device has been changed ...
}else if (hasGraphicsDeviceChanged()){
backBuffer = createBackBuffer(width, height, Transparency.TRANSLUCENT);
// Back backBuffer doesn't work with new GraphicsConfig ...
}else if (backBuffer.validate(getGraphicsConfiguration()) == VolatileImage.IMAGE_INCOMPATIBLE){
backBuffer = createBackBuffer(width, height, Transparency.TRANSLUCENT);
// Back backBuffer size doesn't match anymore ...
}else if (width != backBuffer.getWidth() || height != backBuffer.getHeight()){
backBuffer = createBackBuffer(width, height, Transparency.TRANSLUCENT);
// Clear back backBuffer ...
}else{
clearImage(backBuffer);
}
// Rendering ...
renderBackBuffer();
} while (backBuffer.contentsLost());
} catch (Exception e) {
e.printStackTrace();
}
// if (isGlowEffect()) {
// effectsImage = getLinearBlurOp(2, 2, .2f).filter(effectsImage, null);
// }
//
// if(isBlurFilter()){
// graphImage = getLinearBlurOp(2, 2, .6f).filter(graphImage, null);
// }
//show rendered image
// if (screenBuffer == null) //clearImage(screenBuffer);
// screenBuffer = backBuffer;
// repaint();
}
public ConvolveOp getLinearBlurOp(int width, int hight) {
float value = 1.0f / (float) (width * hight);
return getLinearBlurOp(width, hight, value);
}
public ConvolveOp getLinearBlurOp(int width, int hight, float value) {
float[] data = new float[width * hight];
for (int i = 0; i < data.length; i++) {
data[i] = value;
}
return new ConvolveOp(new Kernel(width, hight, data), ConvolveOp.EDGE_ZERO_FILL, null);
}
public Kernel getBlurKernel(int width, int hight, float value) {
float[] data = new float[width * hight];
for (int i = 0; i < data.length; i++) {
data[i] = value;
}
return new Kernel(width, hight, data);
}
public void renderBackBuffer(){
Graphics2D g = (Graphics2D) backBuffer.getGraphics();
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
//*** Mathematics ************************************
int height = this.getHeight();
int width = this.getWidth();
int heightCenter = (Math.round(height + getHeightLevel())) >> 1;
int maxPointCount = width * getZoomlLevel();
for (int i = 0; i < getGraphs().size(); i++) {
AudioGraph graph = getGraphs().get(i);
// set graph positions ...
if (getGraphs().size() > 1){
int index = getGraphs().indexOf(graph);
if (index == 0){
graph.setYOffset( height / 4 );
}
if (index == 1){
graph.setYOffset( (height / 4) * -1 );
}
}
//sync graph buffer size ...
graph.syncBufferSize(this.getWidth() * getZoomlLevel());
//*** Mathematics ************************************
int graphcenterY = heightCenter + graph.getYOffset();
int minIndex = graph.size() - (maxPointCount);
minIndex = (minIndex < 0 ? 0 : minIndex);
//*** Background *************************************
renderBackground(graph, g, graphcenterY);
//TODO: only once ..
//*** Glow effect ************************************
if (isGlowEffect()) renderGraph(graph, g, effetcStroke1, 8, maxPointCount, heightCenter, graphcenterY, minIndex);
//*** Graph ******************************************
renderGraph(graph, g, graphStroke, 255, maxPointCount, heightCenter, graphcenterY, minIndex);
//*** FPS ********************************************
if (isShowFPS()){
//set rendering hints ...
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF);
g.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_LCD_HRGB);
//draw label ...
g.setColor(Color.darkGray);
g.setFont(new Font(Font.SANS_SERIF, Font.BOLD, 15));
g.drawString(String.format("%s", (Math.round(getFPS()))), 10, 20);
}
}
}
private void renderBackground(final AudioGraph graph, Graphics2D g, int graphcenterY){
//metric of the label text
LineMetrics metrics = g.getFontMetrics().getLineMetrics(graph.getName(), g);
Rectangle2D bounds = g.getFontMetrics().getStringBounds(graph.getName(), g);
//draw center line
g.setColor(Color.darkGray);
g.setStroke(new BasicStroke(1f));
g.drawLine(0, graphcenterY, (int) (this.getWidth() - bounds.getWidth() - (graph.getName().length() > 0 ? 10 : 0)), graphcenterY);
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF);
g.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_LCD_HRGB);
//draw label ...
g.drawString(graph.getName(), (int) (this.getWidth() - bounds.getWidth() - 5), (int) (graphcenterY + metrics.getAscent() / 2));
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
}
private void renderGraph(final AudioGraph graph, Graphics2D g, Stroke stroke, int alpha, int maxPointCount, int heightCenter, int graphcenterY, int minIndex){
if(graph.size() < 1) return;
Color c = graph.getColor();
c = new Color(c.getRed(), c.getGreen(), c.getBlue(), alpha);
g.setColor(c);
g.setStroke(stroke);
//define first point ...
int lastPoint_x = -1;
int lastPoint_y = -1;
//current point
int point_x = 0;
int point_y = 0;
int detailC = 0;
int lastI = minIndex;
int s = graph.size();
for (int i = minIndex; i < s; i++) {
//calculate the y coordinates of the next point
point_y = Math.round((graph.getValue(i) * (float)(heightCenter * getHeightLevel())) + heightCenter);
//add Y-Offset ...
point_y += graph.getYOffset();
//calculate x coordinate;
detailC++;
if (detailC >= getZoomlLevel()) {
point_x++;
detailC = 0;
}
if (lastPoint_y >= 0){
//draw a line from the last point to the current one ...
if (getDrawMode() == DrawMode.STRAIGHT){
g.drawLine(lastPoint_x, lastPoint_y, point_x, point_y);
}
if (getDrawMode() == DrawMode.DOTS){
g.drawLine(point_x, point_y, point_x, point_y);
}
if (getDrawMode() == DrawMode.LINES){
if (detailC == 0 && (i - getZoomlLevel() * 3) == lastI){
lastI = i;
g.drawLine(point_x, point_y - 16/2, point_x, point_y + 16/2);
}
}
if (getDrawMode() == DrawMode.DOUBLE_LINES){
if (detailC == 0 && (i - getZoomlLevel() * 3) == lastI){
lastI = i;
if(point_y <= graphcenterY){
g.drawLine(point_x, point_y - 5, point_x, point_y - 16);
}
if(point_y >= graphcenterY){
g.drawLine(point_x, point_y + 5, point_x, point_y + 16);
}
}
}
}
//Update last point ...
lastPoint_x = point_x;
lastPoint_y = point_y;
}
}
@Override
public void startup() {
}
@Override
public void renderloop() {
render();
switchBuffers();
repaint();
//TODO:
}
@Override
public void shutdown() {
}
@Override
public void cleanup() {
}
}
|
package rajaapps.com.dimensionfitness.fragments.enquiresfragment;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.example.velm.testlib.asynctask.AsyCallback;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import java.util.ArrayList;
import java.util.List;
import rajaapps.com.dimensionfitness.MyApplication;
import rajaapps.com.dimensionfitness.R;
import rajaapps.com.dimensionfitness.adapters.ListEnquiresRecyclarviewAdapter;
import com.example.velm.testlib.model.Enquires;
/**
* Created by velmmuru on 7/28/2017.
*/
public class FollowupMessageFragment extends Fragment implements AsyCallback {
private static final String TAG = "FollowupMessageFragment";
RecyclerView recyclerView;
ListEnquiresRecyclarviewAdapter adapter;
List<Enquires> enquiresList;
DatabaseReference databaseReference;
public FollowupMessageFragment() {
}
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
View v = inflater.inflate(R.layout.list_enquires_fragment,container,false);
recyclerView = (RecyclerView)v.findViewById(R.id.list_enquires_recyculerview);
return v;
}
@Override
public void onViewCreated(View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
enquiresList = new ArrayList<>();
recyclerView.setLayoutManager(new LinearLayoutManager(this.getContext()));
try{
MyApplication.getEnquiresDaoImpl().getFollowupSmsEnquires(this,getContext());
}catch (Exception e){
Log.d(TAG,e.toString());
}
}
@Override
public void onTaskComplete(Object result) {
enquiresList = (List<Enquires>) result;
Log.d("callback received",enquiresList.toString());
adapter = new ListEnquiresRecyclarviewAdapter(getContext(),enquiresList);
recyclerView.setAdapter(adapter);
}
}
|
package com.technology.os.utilerias;
import com.vaadin.flow.component.ClickEvent;
import com.vaadin.flow.component.button.Button;
import com.vaadin.flow.component.button.ButtonVariant;
import com.vaadin.flow.component.dialog.Dialog;
import com.vaadin.flow.component.html.H1;
import com.vaadin.flow.component.html.Image;
import com.vaadin.flow.component.icon.VaadinIcon;
import com.vaadin.flow.component.notification.Notification;
import com.vaadin.flow.component.notification.NotificationVariant;
import com.vaadin.flow.component.orderedlayout.VerticalLayout;
import com.vaadin.flow.component.textfield.EmailField;
import com.vaadin.flow.router.Route;
@Route(value="recover")
public class RecoverPassword extends VerticalLayout {
private final H1 header = new H1("Modulo de recuperacion de inicio de sesion");
private final EmailField emfRecover = new EmailField("Correo electronico");
private final Button btnRecover = new Button("Recuperar");
private final Button btnReturn = new Button("Regresar al login",e->getUI().ifPresent(ui -> ui.navigate("login")));
private final Image imgView2 = new Image("img/butterfly.jpg", "Portada");
private final VerticalLayout vltView = new VerticalLayout();
public RecoverPassword() {
inicializar();
cargaListener();
}
private void inicializar(){
btnReturn.addThemeVariants(ButtonVariant.LUMO_PRIMARY);
imgView2.setWidth("100px");
imgView2.setHeight("100px");
emfRecover.setWidth("300px");
emfRecover.setPlaceholder("Email");
emfRecover.setPrefixComponent(VaadinIcon.ENVELOPE_O.create());
setHorizontalComponentAlignment(Alignment.CENTER,imgView2,header,emfRecover,btnRecover,btnReturn);
vltView.add(imgView2,header,emfRecover,btnRecover,btnReturn);
add(vltView);
}
private void cargaListener(){
}
private void recoverPassword(){
}
}
|
package com.ocetnik.timer;
import android.app.Activity;
import android.os.Handler;
import android.os.PowerManager;
import com.facebook.react.bridge.LifecycleEventListener;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.modules.core.DeviceEventManagerModule;
import java.lang.Runnable;
import java.lang.ref.WeakReference;
public class BackgroundTimerModule extends ReactContextBaseJavaModule {
private MyHandler handler;
private ReactContext reactContext;
private Runnable runnable;
// private PowerManager powerManager;
// private PowerManager.WakeLock wakeLock;
private final LifecycleEventListener listener = new LifecycleEventListener() {
@Override
public void onHostResume() {
// wakeLock.acquire();
reactContext.addLifecycleEventListener(this);
}
@Override
public void onHostPause() {
//wakeLock.release();
reactContext.removeLifecycleEventListener(this);
}
@Override
public void onHostDestroy() {
// if (wakeLock.isHeld()) wakeLock.release();
reactContext.removeLifecycleEventListener(this);
}
};
public BackgroundTimerModule(ReactApplicationContext reactContext) {
super(reactContext);
this.reactContext = reactContext;
// this.powerManager = (PowerManager) getReactApplicationContext().getSystemService(reactContext.POWER_SERVICE);
// this.wakeLock = powerManager.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "rohit_bg_wakelock");
reactContext.addLifecycleEventListener(listener);
}
@Override
public String getName() {
return "RNBackgroundTimer";
}
@ReactMethod
public void start(final int delay) {
handler = new MyHandler(getCurrentActivity());
runnable = new Runnable() {
@Override
public void run() {
if (handler.isValid()) {
sendEvent(reactContext, "backgroundTimer");
}
}
};
handler.post(runnable);
}
@ReactMethod
public void stop() {
// avoid null pointer exceptio when stop is called without start
if (handler != null) handler.removeCallbacks(runnable);
}
private void sendEvent(ReactContext reactContext, String eventName) {
reactContext
.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class)
.emit(eventName, null);
}
@ReactMethod
public void setTimeout(final int id, final int timeout) {
final MyHandler handler = new MyHandler(getCurrentActivity());
handler.postDelayed(new Runnable() {
@Override
public void run() {
if (handler.isValid()) {
if (getReactApplicationContext().hasActiveCatalystInstance()) {
getReactApplicationContext()
.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class)
.emit("backgroundTimer.timeout", id);
}
}
}
}, timeout);
}
/*@ReactMethod
public void clearTimeout(final int id) {
// todo one day..
// not really neccessary to have
}*/
private static class MyHandler extends Handler {
private final WeakReference<Activity> weakReference;
MyHandler(Activity activity) {
weakReference = new WeakReference<>(activity);
}
public boolean isValid() {
Activity activity = weakReference.get();
return activity != null && !activity.isFinishing();
}
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.infrastructure.dataqueries.service;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableMap;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.reflect.TypeToken;
import java.lang.reflect.Type;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import javax.persistence.PersistenceException;
import javax.sql.DataSource;
import org.apache.commons.lang3.BooleanUtils;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.fineract.infrastructure.codes.service.CodeReadPlatformService;
import org.apache.fineract.infrastructure.configuration.domain.ConfigurationDomainService;
import org.apache.fineract.infrastructure.core.api.JsonCommand;
import org.apache.fineract.infrastructure.core.data.ApiParameterError;
import org.apache.fineract.infrastructure.core.data.CommandProcessingResult;
import org.apache.fineract.infrastructure.core.data.CommandProcessingResultBuilder;
import org.apache.fineract.infrastructure.core.data.DataValidatorBuilder;
import org.apache.fineract.infrastructure.core.exception.GeneralPlatformDomainRuleException;
import org.apache.fineract.infrastructure.core.exception.PlatformApiDataValidationException;
import org.apache.fineract.infrastructure.core.exception.PlatformDataIntegrityException;
import org.apache.fineract.infrastructure.core.exception.PlatformServiceUnavailableException;
import org.apache.fineract.infrastructure.core.serialization.DatatableCommandFromApiJsonDeserializer;
import org.apache.fineract.infrastructure.core.serialization.FromJsonHelper;
import org.apache.fineract.infrastructure.core.serialization.JsonParserHelper;
import org.apache.fineract.infrastructure.core.service.RoutingDataSource;
import org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant;
import org.apache.fineract.infrastructure.dataqueries.data.DataTableValidator;
import org.apache.fineract.infrastructure.dataqueries.data.DatatableData;
import org.apache.fineract.infrastructure.dataqueries.data.GenericResultsetData;
import org.apache.fineract.infrastructure.dataqueries.data.ResultsetColumnHeaderData;
import org.apache.fineract.infrastructure.dataqueries.data.ResultsetRowData;
import org.apache.fineract.infrastructure.dataqueries.exception.DatatableEntryRequiredException;
import org.apache.fineract.infrastructure.dataqueries.exception.DatatableNotFoundException;
import org.apache.fineract.infrastructure.dataqueries.exception.DatatableSystemErrorException;
import org.apache.fineract.infrastructure.security.service.PlatformSecurityContext;
import org.apache.fineract.infrastructure.security.utils.ColumnValidator;
import org.apache.fineract.infrastructure.security.utils.SQLInjectionValidator;
import org.apache.fineract.useradministration.domain.AppUser;
import org.joda.time.LocalDate;
import org.joda.time.LocalDateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DataAccessException;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.dao.EmptyResultDataAccessException;
import org.springframework.jdbc.BadSqlGrammarException;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import org.springframework.jdbc.support.rowset.SqlRowSet;
import org.springframework.jdbc.support.rowset.SqlRowSetMetaData;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service
public class ReadWriteNonCoreDataServiceImpl implements ReadWriteNonCoreDataService {
private static final String DATATABLE_NAME_REGEX_PATTERN = "^[a-zA-Z][a-zA-Z0-9\\-_\\s]{0,48}[a-zA-Z0-9]$";
private static final String CODE_VALUES_TABLE = "m_code_value";
private static final Logger LOG = LoggerFactory.getLogger(ReadWriteNonCoreDataServiceImpl.class);
private static final ImmutableMap<String, String> apiTypeToMySQL = ImmutableMap.<String, String>builder().put("string", "VARCHAR")
.put("number", "INT").put("boolean", "BIT").put("decimal", "DECIMAL").put("date", "DATE").put("datetime", "DATETIME")
.put("text", "TEXT").put("dropdown", "INT").build();
private static final List<String> stringDataTypes = Arrays.asList("char", "varchar", "blob", "text", "tinyblob", "tinytext",
"mediumblob", "mediumtext", "longblob", "longtext");
private final JdbcTemplate jdbcTemplate;
private final DataSource dataSource;
private final PlatformSecurityContext context;
private final FromJsonHelper fromJsonHelper;
private final JsonParserHelper helper;
private final GenericDataService genericDataService;
private final DatatableCommandFromApiJsonDeserializer fromApiJsonDeserializer;
private final ConfigurationDomainService configurationDomainService;
private final CodeReadPlatformService codeReadPlatformService;
private final DataTableValidator dataTableValidator;
private final ColumnValidator columnValidator;
private final NamedParameterJdbcTemplate namedParameterJdbcTemplate;
// private final GlobalConfigurationWritePlatformServiceJpaRepositoryImpl
// configurationWriteService;
@Autowired(required = true)
public ReadWriteNonCoreDataServiceImpl(final RoutingDataSource dataSource, final PlatformSecurityContext context,
final FromJsonHelper fromJsonHelper, final GenericDataService genericDataService,
final DatatableCommandFromApiJsonDeserializer fromApiJsonDeserializer, final CodeReadPlatformService codeReadPlatformService,
final ConfigurationDomainService configurationDomainService, final DataTableValidator dataTableValidator,
final ColumnValidator columnValidator) {
this.dataSource = dataSource;
this.jdbcTemplate = new JdbcTemplate(this.dataSource);
this.context = context;
this.fromJsonHelper = fromJsonHelper;
this.helper = new JsonParserHelper();
this.genericDataService = genericDataService;
this.fromApiJsonDeserializer = fromApiJsonDeserializer;
this.codeReadPlatformService = codeReadPlatformService;
this.configurationDomainService = configurationDomainService;
this.dataTableValidator = dataTableValidator;
this.columnValidator = columnValidator;
// this.configurationWriteService = configurationWriteService;
this.namedParameterJdbcTemplate = new NamedParameterJdbcTemplate(dataSource);
}
@Override
public List<DatatableData> retrieveDatatableNames(final String appTable) {
String andClause;
if (appTable == null) {
andClause = "";
} else {
validateAppTable(appTable);
SQLInjectionValidator.validateSQLInput(appTable);
andClause = " and application_table_name = '" + appTable + "'";
}
// PERMITTED datatables
final String sql = "select application_table_name, registered_table_name" + " from x_registered_table " + " where exists"
+ " (select 'f'" + " from m_appuser_role ur " + " join m_role r on r.id = ur.role_id"
+ " left join m_role_permission rp on rp.role_id = r.id" + " left join m_permission p on p.id = rp.permission_id"
+ " where ur.appuser_id = " + this.context.authenticatedUser().getId()
+ " and (p.code in ('ALL_FUNCTIONS', 'ALL_FUNCTIONS_READ') or p.code = concat('READ_', registered_table_name))) "
+ andClause + " order by application_table_name, registered_table_name";
final SqlRowSet rs = this.jdbcTemplate.queryForRowSet(sql);
final List<DatatableData> datatables = new ArrayList<>();
while (rs.next()) {
final String appTableName = rs.getString("application_table_name");
final String registeredDatatableName = rs.getString("registered_table_name");
final List<ResultsetColumnHeaderData> columnHeaderData = this.genericDataService
.fillResultsetColumnHeaders(registeredDatatableName);
datatables.add(DatatableData.create(appTableName, registeredDatatableName, columnHeaderData));
}
return datatables;
}
@Override
public DatatableData retrieveDatatable(final String datatable) {
// PERMITTED datatables
SQLInjectionValidator.validateSQLInput(datatable);
final String sql = "select application_table_name, registered_table_name" + " from x_registered_table " + " where exists"
+ " (select 'f'" + " from m_appuser_role ur " + " join m_role r on r.id = ur.role_id"
+ " left join m_role_permission rp on rp.role_id = r.id" + " left join m_permission p on p.id = rp.permission_id"
+ " where ur.appuser_id = " + this.context.authenticatedUser().getId() + " and registered_table_name='" + datatable + "'"
+ " and (p.code in ('ALL_FUNCTIONS', 'ALL_FUNCTIONS_READ') or p.code = concat('READ_', registered_table_name))) "
+ " order by application_table_name, registered_table_name";
final SqlRowSet rs = this.jdbcTemplate.queryForRowSet(sql);
DatatableData datatableData = null;
while (rs.next()) {
final String appTableName = rs.getString("application_table_name");
final String registeredDatatableName = rs.getString("registered_table_name");
final List<ResultsetColumnHeaderData> columnHeaderData = this.genericDataService
.fillResultsetColumnHeaders(registeredDatatableName);
datatableData = DatatableData.create(appTableName, registeredDatatableName, columnHeaderData);
}
return datatableData;
}
private void logAsErrorUnexpectedDataIntegrityException(final Exception dve) {
LOG.error("Error occured.", dve);
}
@Transactional
@Override
public void registerDatatable(final String dataTableName, final String applicationTableName) {
Integer category = DataTableApiConstant.CATEGORY_DEFAULT;
final String permissionSql = this.getPermissionSql(dataTableName);
this.registerDataTable(applicationTableName, dataTableName, category, permissionSql);
}
@Transactional
@Override
public void registerDatatable(final JsonCommand command) {
final String applicationTableName = this.getTableName(command.getUrl());
final String dataTableName = this.getDataTableName(command.getUrl());
Integer category = this.getCategory(command);
this.dataTableValidator.validateDataTableRegistration(command.json());
final String permissionSql = this.getPermissionSql(dataTableName);
this.registerDataTable(applicationTableName, dataTableName, category, permissionSql);
}
@Transactional
@Override
public void registerDatatable(final JsonCommand command, final String permissionSql) {
final String applicationTableName = this.getTableName(command.getUrl());
final String dataTableName = this.getDataTableName(command.getUrl());
Integer category = this.getCategory(command);
this.dataTableValidator.validateDataTableRegistration(command.json());
this.registerDataTable(applicationTableName, dataTableName, category, permissionSql);
}
@Transactional
private void registerDataTable(final String applicationTableName, final String dataTableName, final Integer category,
final String permissionsSql) {
validateAppTable(applicationTableName);
validateDatatableName(dataTableName);
assertDataTableExists(dataTableName);
Map<String, Object> paramMap = new HashMap<>(3);
final String registerDatatableSql = "insert into x_registered_table (registered_table_name, application_table_name,category) values ( :dataTableName, :applicationTableName, :category)";
paramMap.put("dataTableName", dataTableName);
paramMap.put("applicationTableName", applicationTableName);
paramMap.put("category", category);
try {
this.namedParameterJdbcTemplate.update(registerDatatableSql, paramMap);
this.jdbcTemplate.update(permissionsSql);
// add the registered table to the config if it is a ppi
if (this.isSurveyCategory(category)) {
this.namedParameterJdbcTemplate
.update("insert into c_configuration (name, value, enabled ) values( :dataTableName , '0','0')", paramMap);
}
} catch (final DataIntegrityViolationException dve) {
final Throwable cause = dve.getCause();
final Throwable realCause = dve.getMostSpecificCause();
// even if duplicate is only due to permission duplicate, okay to
// show duplicate datatable error msg
if (realCause.getMessage().contains("Duplicate entry") || cause.getMessage().contains("Duplicate entry")) {
throw new PlatformDataIntegrityException("error.msg.datatable.registered",
"Datatable `" + dataTableName + "` is already registered against an application table.", "dataTableName",
dataTableName, dve);
}
logAsErrorUnexpectedDataIntegrityException(dve);
throw new PlatformDataIntegrityException("error.msg.unknown.data.integrity.issue",
"Unknown data integrity issue with resource.", dve);
} catch (final PersistenceException dve) {
final Throwable cause = dve.getCause();
if (cause.getMessage().contains("Duplicate entry")) {
throw new PlatformDataIntegrityException("error.msg.datatable.registered",
"Datatable `" + dataTableName + "` is already registered against an application table.", "dataTableName",
dataTableName, dve);
}
logAsErrorUnexpectedDataIntegrityException(dve);
throw new PlatformDataIntegrityException("error.msg.unknown.data.integrity.issue",
"Unknown data integrity issue with resource.", dve);
}
}
private String getPermissionSql(final String dataTableName) {
final String createPermission = "'CREATE_" + dataTableName + "'";
final String createPermissionChecker = "'CREATE_" + dataTableName + "_CHECKER'";
final String readPermission = "'READ_" + dataTableName + "'";
final String updatePermission = "'UPDATE_" + dataTableName + "'";
final String updatePermissionChecker = "'UPDATE_" + dataTableName + "_CHECKER'";
final String deletePermission = "'DELETE_" + dataTableName + "'";
final String deletePermissionChecker = "'DELETE_" + dataTableName + "_CHECKER'";
return "insert into m_permission (grouping, code, action_name, entity_name, can_maker_checker) values " + "('datatable', "
+ createPermission + ", 'CREATE', '" + dataTableName + "', true)," + "('datatable', " + createPermissionChecker
+ ", 'CREATE', '" + dataTableName + "', false)," + "('datatable', " + readPermission + ", 'READ', '" + dataTableName
+ "', false)," + "('datatable', " + updatePermission + ", 'UPDATE', '" + dataTableName + "', true)," + "('datatable', "
+ updatePermissionChecker + ", 'UPDATE', '" + dataTableName + "', false)," + "('datatable', " + deletePermission
+ ", 'DELETE', '" + dataTableName + "', true)," + "('datatable', " + deletePermissionChecker + ", 'DELETE', '"
+ dataTableName + "', false)";
}
private Integer getCategory(final JsonCommand command) {
Integer category = command.integerValueOfParameterNamedDefaultToNullIfZero(DataTableApiConstant.categoryParamName);
if (category == null) {
category = DataTableApiConstant.CATEGORY_DEFAULT;
}
return category;
}
private boolean isSurveyCategory(final Integer category) {
return category.equals(DataTableApiConstant.CATEGORY_PPI);
}
@Override
public String getDataTableName(String url) {
List<String> urlParts = Splitter.on('/').splitToList(url);
return urlParts.get(3);
}
@Override
public String getTableName(String url) {
List<String> urlParts = Splitter.on('/').splitToList(url);
return urlParts.get(4);
}
@Transactional
@Override
public void deregisterDatatable(final String datatable) {
validateDatatableName(datatable);
final String permissionList = "('CREATE_" + datatable + "', 'CREATE_" + datatable + "_CHECKER', 'READ_" + datatable + "', 'UPDATE_"
+ datatable + "', 'UPDATE_" + datatable + "_CHECKER', 'DELETE_" + datatable + "', 'DELETE_" + datatable + "_CHECKER')";
final String deleteRolePermissionsSql = "delete from m_role_permission where m_role_permission.permission_id in (select id from m_permission where code in "
+ permissionList + ")";
final String deletePermissionsSql = "delete from m_permission where code in " + permissionList;
final String deleteRegisteredDatatableSql = "delete from x_registered_table where registered_table_name = '" + datatable + "'";
final String deleteFromConfigurationSql = "delete from c_configuration where name ='" + datatable + "'";
String[] sqlArray = new String[4];
sqlArray[0] = deleteRolePermissionsSql;
sqlArray[1] = deletePermissionsSql;
sqlArray[2] = deleteRegisteredDatatableSql;
sqlArray[3] = deleteFromConfigurationSql;
this.jdbcTemplate.batchUpdate(sqlArray);
}
@Transactional
@Override
public CommandProcessingResult createNewDatatableEntry(final String dataTableName, final Long appTableId, final JsonCommand command) {
return createNewDatatableEntry(dataTableName, appTableId, command.json());
}
@Transactional
@Override
public CommandProcessingResult createNewDatatableEntry(final String dataTableName, final Long appTableId, final String json) {
try {
final String appTable = queryForApplicationTableName(dataTableName);
final CommandProcessingResult commandProcessingResult = checkMainResourceExistsWithinScope(appTable, appTableId);
final List<ResultsetColumnHeaderData> columnHeaders = this.genericDataService.fillResultsetColumnHeaders(dataTableName);
final Type typeOfMap = new TypeToken<Map<String, String>>() {}.getType();
final Map<String, String> dataParams = this.fromJsonHelper.extractDataMap(typeOfMap, json);
final String sql = getAddSql(columnHeaders, dataTableName, getFKField(appTable), appTableId, dataParams);
this.jdbcTemplate.update(sql);
return commandProcessingResult; //
} catch (final DataAccessException dve) {
final Throwable cause = dve.getCause();
final Throwable realCause = dve.getMostSpecificCause();
if (realCause.getMessage().contains("Duplicate entry") || cause.getMessage().contains("Duplicate entry")) {
throw new PlatformDataIntegrityException(
"error.msg.datatable.entry.duplicate", "An entry already exists for datatable `" + dataTableName
+ "` and application table with identifier `" + appTableId + "`.",
"dataTableName", dataTableName, appTableId, dve);
} else if (realCause.getMessage().contains("doesn't have a default value")
|| cause.getMessage().contains("doesn't have a default value")) {
throw new PlatformDataIntegrityException(
"error.msg.datatable.no.value.provided.for.required.fields", "No values provided for the datatable `"
+ dataTableName + "` and application table with identifier `" + appTableId + "`.",
"dataTableName", dataTableName, appTableId, dve);
}
logAsErrorUnexpectedDataIntegrityException(dve);
throw new PlatformDataIntegrityException("error.msg.unknown.data.integrity.issue",
"Unknown data integrity issue with resource.", dve);
} catch (final PersistenceException e) {
final Throwable cause = e.getCause();
if (cause.getMessage().contains("Duplicate entry")) {
throw new PlatformDataIntegrityException(
"error.msg.datatable.entry.duplicate", "An entry already exists for datatable `" + dataTableName
+ "` and application table with identifier `" + appTableId + "`.",
"dataTableName", dataTableName, appTableId, e);
} else if (cause.getMessage().contains("doesn't have a default value")) {
throw new PlatformDataIntegrityException(
"error.msg.datatable.no.value.provided.for.required.fields", "No values provided for the datatable `"
+ dataTableName + "` and application table with identifier `" + appTableId + "`.",
"dataTableName", dataTableName, appTableId, e);
}
logAsErrorUnexpectedDataIntegrityException(e);
throw new PlatformDataIntegrityException("error.msg.unknown.data.integrity.issue",
"Unknown data integrity issue with resource.", e);
}
}
@Override
public CommandProcessingResult createPPIEntry(final String dataTableName, final Long appTableId, final JsonCommand command) {
try {
final String appTable = queryForApplicationTableName(dataTableName);
final CommandProcessingResult commandProcessingResult = checkMainResourceExistsWithinScope(appTable, appTableId);
final List<ResultsetColumnHeaderData> columnHeaders = this.genericDataService.fillResultsetColumnHeaders(dataTableName);
final Type typeOfMap = new TypeToken<Map<String, String>>() {}.getType();
final Map<String, String> dataParams = this.fromJsonHelper.extractDataMap(typeOfMap, command.json());
final String sql = getAddSqlWithScore(columnHeaders, dataTableName, getFKField(appTable), appTableId, dataParams);
this.jdbcTemplate.update(sql);
return commandProcessingResult; //
} catch (final DataAccessException dve) {
final Throwable cause = dve.getCause();
final Throwable realCause = dve.getMostSpecificCause();
if (realCause.getMessage().contains("Duplicate entry") || cause.getMessage().contains("Duplicate entry")) {
throw new PlatformDataIntegrityException(
"error.msg.datatable.entry.duplicate", "An entry already exists for datatable `" + dataTableName
+ "` and application table with identifier `" + appTableId + "`.",
"dataTableName", dataTableName, appTableId, dve);
}
logAsErrorUnexpectedDataIntegrityException(dve);
throw new PlatformDataIntegrityException("error.msg.unknown.data.integrity.issue",
"Unknown data integrity issue with resource.", dve);
} catch (final PersistenceException dve) {
final Throwable cause = dve.getCause();
if (cause.getMessage().contains("Duplicate entry")) {
throw new PlatformDataIntegrityException(
"error.msg.datatable.entry.duplicate", "An entry already exists for datatable `" + dataTableName
+ "` and application table with identifier `" + appTableId + "`.",
"dataTableName", dataTableName, appTableId, dve);
}
logAsErrorUnexpectedDataIntegrityException(dve);
throw new PlatformDataIntegrityException("error.msg.unknown.data.integrity.issue",
"Unknown data integrity issue with resource.", dve);
}
}
private boolean isRegisteredDataTable(final String name) {
// PERMITTED datatables
final String sql = "select if((exists (select 1 from x_registered_table where registered_table_name = ?)) = 1, 'true', 'false')";
final String isRegisteredDataTable = this.jdbcTemplate.queryForObject(sql, String.class, new Object[] { name });
return Boolean.valueOf(isRegisteredDataTable);
}
private void assertDataTableExists(final String datatableName) {
final String sql = "select if((exists (select 1 from information_schema.tables where table_schema = schema() and table_name = ?)) = 1, 'true', 'false')";
final String dataTableExistsString = this.jdbcTemplate.queryForObject(sql, String.class, new Object[] { datatableName });
final boolean dataTableExists = Boolean.valueOf(dataTableExistsString);
if (!dataTableExists) {
throw new PlatformDataIntegrityException("error.msg.invalid.datatable", "Invalid Data Table: " + datatableName, "name",
datatableName);
}
}
private void validateDatatableName(final String name) {
if (name == null || name.isEmpty()) {
throw new PlatformDataIntegrityException("error.msg.datatables.datatable.null.name", "Data table name must not be blank.");
} else if (!name.matches(DATATABLE_NAME_REGEX_PATTERN)) {
throw new PlatformDataIntegrityException("error.msg.datatables.datatable.invalid.name.regex", "Invalid data table name.", name);
}
SQLInjectionValidator.validateSQLInput(name);
}
private String datatableColumnNameToCodeValueName(final String columnName, final String code) {
return code + "_cd_" + columnName;
}
private void throwExceptionIfValidationWarningsExist(final List<ApiParameterError> dataValidationErrors) {
if (!dataValidationErrors.isEmpty()) {
throw new PlatformApiDataValidationException("validation.msg.validation.errors.exist", "Validation errors exist.",
dataValidationErrors);
}
}
private void parseDatatableColumnObjectForCreate(final JsonObject column, StringBuilder sqlBuilder,
final StringBuilder constrainBuilder, final String dataTableNameAlias, final Map<String, Long> codeMappings,
final boolean isConstraintApproach) {
String name = column.has("name") ? column.get("name").getAsString() : null;
final String type = column.has("type") ? column.get("type").getAsString().toLowerCase() : null;
final Integer length = column.has("length") ? column.get("length").getAsInt() : null;
final Boolean mandatory = column.has("mandatory") ? column.get("mandatory").getAsBoolean() : false;
final String code = column.has("code") ? column.get("code").getAsString() : null;
if (StringUtils.isNotBlank(code)) {
if (isConstraintApproach) {
codeMappings.put(dataTableNameAlias + "_" + name, this.codeReadPlatformService.retriveCode(code).getCodeId());
constrainBuilder.append(", CONSTRAINT `fk_").append(dataTableNameAlias).append("_").append(name).append("` ")
.append("FOREIGN KEY (`" + name + "`) ").append("REFERENCES `").append(CODE_VALUES_TABLE).append("` (`id`)");
} else {
name = datatableColumnNameToCodeValueName(name, code);
}
}
final String mysqlType = apiTypeToMySQL.get(type);
sqlBuilder = sqlBuilder.append("`" + name + "` " + mysqlType);
if (type != null) {
if (type.equalsIgnoreCase("String")) {
sqlBuilder = sqlBuilder.append("(" + length + ")");
} else if (type.equalsIgnoreCase("Decimal")) {
sqlBuilder = sqlBuilder.append("(19,6)");
} else if (type.equalsIgnoreCase("Dropdown")) {
sqlBuilder = sqlBuilder.append("(11)");
}
}
if (mandatory) {
sqlBuilder = sqlBuilder.append(" NOT NULL");
} else {
sqlBuilder = sqlBuilder.append(" DEFAULT NULL");
}
sqlBuilder = sqlBuilder.append(", ");
}
@Transactional
@Override
public CommandProcessingResult createDatatable(final JsonCommand command) {
String datatableName = null;
try {
this.context.authenticatedUser();
this.fromApiJsonDeserializer.validateForCreate(command.json());
final JsonElement element = this.fromJsonHelper.parse(command.json());
final JsonArray columns = this.fromJsonHelper.extractJsonArrayNamed("columns", element);
datatableName = this.fromJsonHelper.extractStringNamed("datatableName", element);
final String apptableName = this.fromJsonHelper.extractStringNamed("apptableName", element);
Boolean multiRow = this.fromJsonHelper.extractBooleanNamed("multiRow", element);
/***
* In cases of tables storing hierarchical entities (like m_group), different entities would end up being
* stored in the same table.
*
* Ex: Centers are a specific type of group, add abstractions for the same
***/
final String actualAppTableName = mapToActualAppTable(apptableName);
if (multiRow == null) {
multiRow = false;
}
validateDatatableName(datatableName);
validateAppTable(apptableName);
final boolean isConstraintApproach = this.configurationDomainService.isConstraintApproachEnabledForDatatables();
final String fkColumnName = apptableName.substring(2) + "_id";
final String dataTableNameAlias = datatableName.toLowerCase().replaceAll("\\s", "_");
final String fkName = dataTableNameAlias + "_" + fkColumnName;
StringBuilder sqlBuilder = new StringBuilder();
final StringBuilder constrainBuilder = new StringBuilder();
final Map<String, Long> codeMappings = new HashMap<>();
sqlBuilder = sqlBuilder.append("CREATE TABLE `" + datatableName + "` (");
if (multiRow) {
sqlBuilder = sqlBuilder.append("`id` BIGINT NOT NULL AUTO_INCREMENT, ").append("`" + fkColumnName + "` BIGINT NOT NULL, ");
} else {
sqlBuilder = sqlBuilder.append("`" + fkColumnName + "` BIGINT NOT NULL, ");
}
for (final JsonElement column : columns) {
parseDatatableColumnObjectForCreate(column.getAsJsonObject(), sqlBuilder, constrainBuilder, dataTableNameAlias,
codeMappings, isConstraintApproach);
}
// Remove trailing comma and space
sqlBuilder = sqlBuilder.delete(sqlBuilder.length() - 2, sqlBuilder.length());
if (multiRow) {
sqlBuilder = sqlBuilder.append(", PRIMARY KEY (`id`)")
.append(", KEY `fk_" + apptableName.substring(2) + "_id` (`" + fkColumnName + "`)")
.append(", CONSTRAINT `fk_" + fkName + "` ").append("FOREIGN KEY (`" + fkColumnName + "`) ")
.append("REFERENCES `" + actualAppTableName + "` (`id`)");
} else {
sqlBuilder = sqlBuilder.append(", PRIMARY KEY (`" + fkColumnName + "`)").append(", CONSTRAINT `fk_" + fkName + "` ")
.append("FOREIGN KEY (`" + fkColumnName + "`) ").append("REFERENCES `" + actualAppTableName + "` (`id`)");
}
sqlBuilder.append(constrainBuilder);
sqlBuilder = sqlBuilder.append(") ENGINE=InnoDB DEFAULT CHARSET=UTF8MB4;");
this.jdbcTemplate.execute(sqlBuilder.toString());
registerDatatable(datatableName, apptableName);
registerColumnCodeMapping(codeMappings);
} catch (final DataIntegrityViolationException e) {
final Throwable realCause = e.getCause();
final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors).resource("datatable");
if (realCause.getMessage().toLowerCase().contains("duplicate column name")) {
baseDataValidator.reset().parameter("name").failWithCode("duplicate.column.name");
} else if (realCause.getMessage().contains("Table") && realCause.getMessage().contains("already exists")) {
baseDataValidator.reset().parameter("datatableName").value(datatableName).failWithCode("datatable.already.exists");
} else if (realCause.getMessage().contains("Column") && realCause.getMessage().contains("big")) {
baseDataValidator.reset().parameter("column").failWithCode("length.too.big");
} else if (realCause.getMessage().contains("Row") && realCause.getMessage().contains("large")) {
baseDataValidator.reset().parameter("row").failWithCode("size.too.large");
}
throwExceptionIfValidationWarningsExist(dataValidationErrors);
} catch (final PersistenceException | BadSqlGrammarException ee) {
Throwable realCause = ExceptionUtils.getRootCause(ee.getCause());
final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors).resource("datatable");
if (realCause.getMessage().toLowerCase().contains("duplicate column name")) {
baseDataValidator.reset().parameter("name").failWithCode("duplicate.column.name");
} else if (realCause.getMessage().contains("Table") && realCause.getMessage().contains("already exists")) {
baseDataValidator.reset().parameter("datatableName").value(datatableName).failWithCode("datatable.already.exists");
} else if (realCause.getMessage().contains("Column") && realCause.getMessage().contains("big")) {
baseDataValidator.reset().parameter("column").failWithCode("length.too.big");
} else if (realCause.getMessage().contains("Row") && realCause.getMessage().contains("large")) {
baseDataValidator.reset().parameter("row").failWithCode("size.too.large");
}
throwExceptionIfValidationWarningsExist(dataValidationErrors);
}
return new CommandProcessingResultBuilder().withCommandId(command.commandId()).withResourceIdAsString(datatableName).build();
}
private void parseDatatableColumnForUpdate(final JsonObject column,
final Map<String, ResultsetColumnHeaderData> mapColumnNameDefinition, StringBuilder sqlBuilder, final String datatableName,
final StringBuilder constrainBuilder, final Map<String, Long> codeMappings, final List<String> removeMappings,
final boolean isConstraintApproach) {
String name = column.has("name") ? column.get("name").getAsString() : null;
final String lengthStr = column.has("length") ? column.get("length").getAsString() : null;
Integer length = StringUtils.isNotBlank(lengthStr) ? Integer.parseInt(lengthStr) : null;
String newName = column.has("newName") ? column.get("newName").getAsString() : name;
final Boolean mandatory = column.has("mandatory") ? column.get("mandatory").getAsBoolean() : false;
final String after = column.has("after") ? column.get("after").getAsString() : null;
final String code = column.has("code") ? column.get("code").getAsString() : null;
final String newCode = column.has("newCode") ? column.get("newCode").getAsString() : null;
final String dataTableNameAlias = datatableName.toLowerCase().replaceAll("\\s", "_");
if (isConstraintApproach) {
if (StringUtils.isBlank(newName)) {
newName = name;
}
if (!StringUtils.equalsIgnoreCase(code, newCode) || !StringUtils.equalsIgnoreCase(name, newName)) {
if (StringUtils.equalsIgnoreCase(code, newCode)) {
final int codeId = getCodeIdForColumn(dataTableNameAlias, name);
if (codeId > 0) {
removeMappings.add(dataTableNameAlias + "_" + name);
constrainBuilder.append(", DROP FOREIGN KEY `fk_").append(dataTableNameAlias).append("_").append(name).append("` ");
codeMappings.put(dataTableNameAlias + "_" + newName, (long) codeId);
constrainBuilder.append(",ADD CONSTRAINT `fk_").append(dataTableNameAlias).append("_").append(newName).append("` ")
.append("FOREIGN KEY (`" + newName + "`) ").append("REFERENCES `").append(CODE_VALUES_TABLE)
.append("` (`id`)");
}
} else {
if (code != null) {
removeMappings.add(dataTableNameAlias + "_" + name);
if (newCode == null || !StringUtils.equalsIgnoreCase(name, newName)) {
constrainBuilder.append(", DROP FOREIGN KEY `fk_").append(dataTableNameAlias).append("_").append(name)
.append("` ");
}
}
if (newCode != null) {
codeMappings.put(dataTableNameAlias + "_" + newName, this.codeReadPlatformService.retriveCode(newCode).getCodeId());
if (code == null || !StringUtils.equalsIgnoreCase(name, newName)) {
constrainBuilder.append(",ADD CONSTRAINT `fk_").append(dataTableNameAlias).append("_").append(newName)
.append("` ").append("FOREIGN KEY (`" + newName + "`) ").append("REFERENCES `")
.append(CODE_VALUES_TABLE).append("` (`id`)");
}
}
}
}
} else {
if (StringUtils.isNotBlank(code)) {
name = datatableColumnNameToCodeValueName(name, code);
if (StringUtils.isNotBlank(newCode)) {
newName = datatableColumnNameToCodeValueName(newName, newCode);
} else {
newName = datatableColumnNameToCodeValueName(newName, code);
}
}
}
if (!mapColumnNameDefinition.containsKey(name)) {
throw new PlatformDataIntegrityException("error.msg.datatable.column.missing.update.parse",
"Column " + name + " does not exist.", name);
}
final String type = mapColumnNameDefinition.get(name).getColumnType();
if (length == null && type.toLowerCase().equals("varchar")) {
length = mapColumnNameDefinition.get(name).getColumnLength().intValue();
}
sqlBuilder = sqlBuilder.append(", CHANGE `" + name + "` `" + newName + "` " + type);
if (length != null && length > 0) {
if (type.toLowerCase().equals("decimal")) {
sqlBuilder.append("(19,6)");
} else if (type.toLowerCase().equals("varchar")) {
sqlBuilder.append("(" + length + ")");
}
}
if (mandatory) {
sqlBuilder = sqlBuilder.append(" NOT NULL");
} else {
sqlBuilder = sqlBuilder.append(" DEFAULT NULL");
}
if (after != null) {
sqlBuilder = sqlBuilder.append(" AFTER `" + after + "`");
}
}
@SuppressWarnings("deprecation")
private int getCodeIdForColumn(final String dataTableNameAlias, final String name) {
final StringBuilder checkColumnCodeMapping = new StringBuilder();
checkColumnCodeMapping.append("select ccm.code_id from x_table_column_code_mappings ccm where ccm.column_alias_name='")
.append(dataTableNameAlias).append("_").append(name).append("'");
Integer codeId = 0;
try {
codeId = this.jdbcTemplate.queryForObject(checkColumnCodeMapping.toString(), Integer.class);
} catch (final EmptyResultDataAccessException e) {
LOG.info("Error occured.", e);
}
return ObjectUtils.defaultIfNull(codeId, 0);
}
private void parseDatatableColumnForAdd(final JsonObject column, StringBuilder sqlBuilder, final String dataTableNameAlias,
final StringBuilder constrainBuilder, final Map<String, Long> codeMappings, final boolean isConstraintApproach) {
String name = column.has("name") ? column.get("name").getAsString() : null;
final String type = column.has("type") ? column.get("type").getAsString().toLowerCase() : null;
final Integer length = column.has("length") ? column.get("length").getAsInt() : null;
final Boolean mandatory = column.has("mandatory") ? column.get("mandatory").getAsBoolean() : false;
final String after = column.has("after") ? column.get("after").getAsString() : null;
final String code = column.has("code") ? column.get("code").getAsString() : null;
if (StringUtils.isNotBlank(code)) {
if (isConstraintApproach) {
codeMappings.put(dataTableNameAlias + "_" + name, this.codeReadPlatformService.retriveCode(code).getCodeId());
constrainBuilder.append(",ADD CONSTRAINT `fk_").append(dataTableNameAlias).append("_").append(name).append("` ")
.append("FOREIGN KEY (`" + name + "`) ").append("REFERENCES `").append(CODE_VALUES_TABLE).append("` (`id`)");
} else {
name = datatableColumnNameToCodeValueName(name, code);
}
}
final String mysqlType = apiTypeToMySQL.get(type);
sqlBuilder = sqlBuilder.append(", ADD `" + name + "` " + mysqlType);
if (type != null) {
if (type.equalsIgnoreCase("String") && length != null) {
sqlBuilder = sqlBuilder.append("(" + length + ")");
} else if (type.equalsIgnoreCase("Decimal")) {
sqlBuilder = sqlBuilder.append("(19,6)");
} else if (type.equalsIgnoreCase("Dropdown")) {
sqlBuilder = sqlBuilder.append("(11)");
}
}
if (mandatory) {
sqlBuilder = sqlBuilder.append(" NOT NULL");
} else {
sqlBuilder = sqlBuilder.append(" DEFAULT NULL");
}
if (after != null) {
sqlBuilder = sqlBuilder.append(" AFTER `" + after + "`");
}
}
private void parseDatatableColumnForDrop(final JsonObject column, StringBuilder sqlBuilder, final String datatableName,
final StringBuilder constrainBuilder, final List<String> codeMappings) {
final String datatableAlias = datatableName.toLowerCase().replaceAll("\\s", "_");
final String name = column.has("name") ? column.get("name").getAsString() : null;
// sqlBuilder = sqlBuilder.append(", DROP COLUMN `" + name + "`");
final StringBuilder findFKSql = new StringBuilder();
findFKSql.append("SELECT count(*)").append("FROM information_schema.TABLE_CONSTRAINTS i")
.append(" WHERE i.CONSTRAINT_TYPE = 'FOREIGN KEY'").append(" AND i.TABLE_SCHEMA = DATABASE()")
.append(" AND i.TABLE_NAME = '").append(datatableName).append("' AND i.CONSTRAINT_NAME = 'fk_").append(datatableAlias)
.append("_").append(name).append("' ");
final int count = this.jdbcTemplate.queryForObject(findFKSql.toString(), Integer.class);
if (count > 0) {
codeMappings.add(datatableAlias + "_" + name);
constrainBuilder.append(", DROP FOREIGN KEY `fk_").append(datatableAlias).append("_").append(name).append("` ");
}
}
private void registerColumnCodeMapping(final Map<String, Long> codeMappings) {
if (codeMappings != null && !codeMappings.isEmpty()) {
final String[] addSqlList = new String[codeMappings.size()];
int i = 0;
for (final Map.Entry<String, Long> mapEntry : codeMappings.entrySet()) {
addSqlList[i++] = "insert into x_table_column_code_mappings (column_alias_name, code_id) values ('" + mapEntry.getKey()
+ "'," + mapEntry.getValue() + ");";
}
this.jdbcTemplate.batchUpdate(addSqlList);
}
}
private void deleteColumnCodeMapping(final List<String> columnNames) {
if (columnNames != null && !columnNames.isEmpty()) {
final String[] deleteSqlList = new String[columnNames.size()];
int i = 0;
for (final String columnName : columnNames) {
deleteSqlList[i++] = "DELETE FROM x_table_column_code_mappings WHERE column_alias_name='" + columnName + "';";
}
this.jdbcTemplate.batchUpdate(deleteSqlList);
}
}
/**
* Update data table, set column value to empty string where current value is NULL. Run update SQL only if the
* "mandatory" property is set to true
*
* @param datatableName
* Name of data table
* @param column
* JSON encoded array of column properties
* @see https://mifosforge.jira.com/browse/MIFOSX-1145
**/
private void removeNullValuesFromStringColumn(final String datatableName, final JsonObject column,
final Map<String, ResultsetColumnHeaderData> mapColumnNameDefinition) {
final Boolean mandatory = column.has("mandatory") ? column.get("mandatory").getAsBoolean() : false;
final String name = column.has("name") ? column.get("name").getAsString() : "";
final String type = mapColumnNameDefinition.containsKey(name) ? mapColumnNameDefinition.get(name).getColumnType() : "";
if (StringUtils.isNotEmpty(type)) {
if (mandatory && stringDataTypes.contains(type.toLowerCase())) {
StringBuilder sqlBuilder = new StringBuilder();
sqlBuilder.append("UPDATE `" + datatableName + "` SET `" + name + "` = '' WHERE `" + name + "` IS NULL");
this.jdbcTemplate.update(sqlBuilder.toString());
}
}
}
@Transactional
@Override
public void updateDatatable(final String datatableName, final JsonCommand command) {
try {
this.context.authenticatedUser();
this.fromApiJsonDeserializer.validateForUpdate(command.json());
final JsonElement element = this.fromJsonHelper.parse(command.json());
final JsonArray changeColumns = this.fromJsonHelper.extractJsonArrayNamed("changeColumns", element);
final JsonArray addColumns = this.fromJsonHelper.extractJsonArrayNamed("addColumns", element);
final JsonArray dropColumns = this.fromJsonHelper.extractJsonArrayNamed("dropColumns", element);
final String apptableName = this.fromJsonHelper.extractStringNamed("apptableName", element);
validateDatatableName(datatableName);
int rowCount = getRowCount(datatableName);
final List<ResultsetColumnHeaderData> columnHeaderData = this.genericDataService.fillResultsetColumnHeaders(datatableName);
final Map<String, ResultsetColumnHeaderData> mapColumnNameDefinition = new HashMap<>();
for (final ResultsetColumnHeaderData columnHeader : columnHeaderData) {
mapColumnNameDefinition.put(columnHeader.getColumnName(), columnHeader);
}
final boolean isConstraintApproach = this.configurationDomainService.isConstraintApproachEnabledForDatatables();
if (!StringUtils.isBlank(apptableName)) {
validateAppTable(apptableName);
final String oldApptableName = queryForApplicationTableName(datatableName);
if (!StringUtils.equals(oldApptableName, apptableName)) {
final String oldFKName = oldApptableName.substring(2) + "_id";
final String newFKName = apptableName.substring(2) + "_id";
final String actualAppTableName = mapToActualAppTable(apptableName);
final String oldConstraintName = datatableName.toLowerCase().replaceAll("\\s", "_") + "_" + oldFKName;
final String newConstraintName = datatableName.toLowerCase().replaceAll("\\s", "_") + "_" + newFKName;
StringBuilder sqlBuilder = new StringBuilder();
if (mapColumnNameDefinition.containsKey("id")) {
sqlBuilder = sqlBuilder.append("ALTER TABLE `" + datatableName + "` ").append("DROP KEY `fk_" + oldFKName + "`,")
.append("DROP FOREIGN KEY `fk_" + oldConstraintName + "`,")
.append("CHANGE COLUMN `" + oldFKName + "` `" + newFKName + "` BIGINT NOT NULL,")
.append("ADD KEY `fk_" + newFKName + "` (`" + newFKName + "`),")
.append("ADD CONSTRAINT `fk_" + newConstraintName + "` ").append("FOREIGN KEY (`" + newFKName + "`) ")
.append("REFERENCES `" + actualAppTableName + "` (`id`)");
} else {
sqlBuilder = sqlBuilder.append("ALTER TABLE `" + datatableName + "` ")
.append("DROP FOREIGN KEY `fk_" + oldConstraintName + "`,")
.append("CHANGE COLUMN `" + oldFKName + "` `" + newFKName + "` BIGINT NOT NULL,")
.append("ADD CONSTRAINT `fk_" + newConstraintName + "` ").append("FOREIGN KEY (`" + newFKName + "`) ")
.append("REFERENCES `" + actualAppTableName + "` (`id`)");
}
this.jdbcTemplate.execute(sqlBuilder.toString());
deregisterDatatable(datatableName);
registerDatatable(datatableName, apptableName);
}
}
if (changeColumns == null && addColumns == null && dropColumns == null) {
return;
}
if (dropColumns != null) {
if (rowCount > 0) {
throw new GeneralPlatformDomainRuleException("error.msg.non.empty.datatable.column.cannot.be.deleted",
"Non-empty datatable columns can not be deleted.");
}
StringBuilder sqlBuilder = new StringBuilder("ALTER TABLE `" + datatableName + "`");
final StringBuilder constrainBuilder = new StringBuilder();
final List<String> codeMappings = new ArrayList<>();
for (final JsonElement column : dropColumns) {
parseDatatableColumnForDrop(column.getAsJsonObject(), sqlBuilder, datatableName, constrainBuilder, codeMappings);
}
// Remove the first comma, right after ALTER TABLE `datatable`
final int indexOfFirstComma = sqlBuilder.indexOf(",");
if (indexOfFirstComma != -1) {
sqlBuilder = sqlBuilder.deleteCharAt(indexOfFirstComma);
}
sqlBuilder.append(constrainBuilder);
this.jdbcTemplate.execute(sqlBuilder.toString());
deleteColumnCodeMapping(codeMappings);
}
if (addColumns != null) {
StringBuilder sqlBuilder = new StringBuilder("ALTER TABLE `" + datatableName + "`");
final StringBuilder constrainBuilder = new StringBuilder();
final Map<String, Long> codeMappings = new HashMap<>();
for (final JsonElement column : addColumns) {
JsonObject columnAsJson = column.getAsJsonObject();
if (rowCount > 0 && columnAsJson.has("mandatory") && columnAsJson.get("mandatory").getAsBoolean()) {
throw new GeneralPlatformDomainRuleException("error.msg.non.empty.datatable.mandatory.column.cannot.be.added",
"Non empty datatable mandatory columns can not be added.");
}
parseDatatableColumnForAdd(columnAsJson, sqlBuilder, datatableName.toLowerCase().replaceAll("\\s", "_"),
constrainBuilder, codeMappings, isConstraintApproach);
}
// Remove the first comma, right after ALTER TABLE `datatable`
final int indexOfFirstComma = sqlBuilder.indexOf(",");
if (indexOfFirstComma != -1) {
sqlBuilder = sqlBuilder.deleteCharAt(indexOfFirstComma);
}
sqlBuilder.append(constrainBuilder);
this.jdbcTemplate.execute(sqlBuilder.toString());
registerColumnCodeMapping(codeMappings);
}
if (changeColumns != null) {
StringBuilder sqlBuilder = new StringBuilder("ALTER TABLE `" + datatableName + "`");
final StringBuilder constrainBuilder = new StringBuilder();
final Map<String, Long> codeMappings = new HashMap<>();
final List<String> removeMappings = new ArrayList<>();
for (final JsonElement column : changeColumns) {
// remove NULL values from column where mandatory is true
removeNullValuesFromStringColumn(datatableName, column.getAsJsonObject(), mapColumnNameDefinition);
parseDatatableColumnForUpdate(column.getAsJsonObject(), mapColumnNameDefinition, sqlBuilder, datatableName,
constrainBuilder, codeMappings, removeMappings, isConstraintApproach);
}
// Remove the first comma, right after ALTER TABLE `datatable`
final int indexOfFirstComma = sqlBuilder.indexOf(",");
if (indexOfFirstComma != -1) {
sqlBuilder = sqlBuilder.deleteCharAt(indexOfFirstComma);
}
sqlBuilder.append(constrainBuilder);
try {
this.jdbcTemplate.execute(sqlBuilder.toString());
deleteColumnCodeMapping(removeMappings);
registerColumnCodeMapping(codeMappings);
} catch (final Exception e) {
if (e.getMessage().contains("Error on rename")) {
throw new PlatformServiceUnavailableException("error.msg.datatable.column.update.not.allowed",
"One of the column name modification not allowed", e);
}
// handle all other exceptions in here
// check if exception message contains the
// "invalid use of null value" SQL exception message
// throw a 503 HTTP error -
// PlatformServiceUnavailableException
if (e.getMessage().toLowerCase().contains("invalid use of null value")) {
throw new PlatformServiceUnavailableException("error.msg.datatable.column.update.not.allowed",
"One of the data table columns contains null values", e);
}
}
}
} catch (final DataIntegrityViolationException e) {
final Throwable realCause = e.getCause();
final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors).resource("datatable");
if (realCause.getMessage().toLowerCase().contains("unknown column")) {
baseDataValidator.reset().parameter("name").failWithCode("does.not.exist");
} else if (realCause.getMessage().toLowerCase().contains("can't drop")) {
baseDataValidator.reset().parameter("name").failWithCode("does.not.exist");
} else if (realCause.getMessage().toLowerCase().contains("duplicate column")) {
baseDataValidator.reset().parameter("name").failWithCode("column.already.exists");
}
throwExceptionIfValidationWarningsExist(dataValidationErrors);
} catch (final PersistenceException ee) {
Throwable realCause = ExceptionUtils.getRootCause(ee.getCause());
final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors).resource("datatable");
if (realCause.getMessage().toLowerCase().contains("duplicate column name")) {
baseDataValidator.reset().parameter("name").failWithCode("duplicate.column.name");
} else if (realCause.getMessage().contains("Table") && realCause.getMessage().contains("already exists")) {
baseDataValidator.reset().parameter("datatableName").value(datatableName).failWithCode("datatable.already.exists");
} else if (realCause.getMessage().contains("Column") && realCause.getMessage().contains("big")) {
baseDataValidator.reset().parameter("column").failWithCode("length.too.big");
} else if (realCause.getMessage().contains("Row") && realCause.getMessage().contains("large")) {
baseDataValidator.reset().parameter("row").failWithCode("size.too.large");
}
throwExceptionIfValidationWarningsExist(dataValidationErrors);
}
}
@Transactional
@Override
public void deleteDatatable(final String datatableName) {
try {
this.context.authenticatedUser();
if (!isRegisteredDataTable(datatableName)) {
throw new DatatableNotFoundException(datatableName);
}
validateDatatableName(datatableName);
assertDataTableEmpty(datatableName);
deregisterDatatable(datatableName);
String[] sqlArray = null;
if (this.configurationDomainService.isConstraintApproachEnabledForDatatables()) {
final String deleteColumnCodeSql = "delete from x_table_column_code_mappings where column_alias_name like'"
+ datatableName.toLowerCase().replaceAll("\\s", "_") + "_%'";
sqlArray = new String[2];
sqlArray[1] = deleteColumnCodeSql;
} else {
sqlArray = new String[1];
}
final String sql = "DROP TABLE `" + datatableName + "`";
sqlArray[0] = sql;
this.jdbcTemplate.batchUpdate(sqlArray);
} catch (final DataIntegrityViolationException e) {
final Throwable realCause = e.getCause();
final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors).resource("datatable");
if (realCause.getMessage().contains("Unknown table")) {
baseDataValidator.reset().parameter("datatableName").failWithCode("does.not.exist");
}
throwExceptionIfValidationWarningsExist(dataValidationErrors);
}
}
private void assertDataTableEmpty(final String datatableName) {
final int rowCount = getRowCount(datatableName);
if (rowCount != 0) {
throw new GeneralPlatformDomainRuleException("error.msg.non.empty.datatable.cannot.be.deleted",
"Non-empty datatable cannot be deleted.");
}
}
private int getRowCount(final String datatableName) {
final String sql = "select count(*) from `" + datatableName + "`";
return this.jdbcTemplate.queryForObject(sql, Integer.class);
}
@Transactional
@Override
public CommandProcessingResult updateDatatableEntryOneToOne(final String dataTableName, final Long appTableId,
final JsonCommand command) {
return updateDatatableEntry(dataTableName, appTableId, null, command);
}
@Transactional
@Override
public CommandProcessingResult updateDatatableEntryOneToMany(final String dataTableName, final Long appTableId, final Long datatableId,
final JsonCommand command) {
return updateDatatableEntry(dataTableName, appTableId, datatableId, command);
}
private CommandProcessingResult updateDatatableEntry(final String dataTableName, final Long appTableId, final Long datatableId,
final JsonCommand command) {
final String appTable = queryForApplicationTableName(dataTableName);
final CommandProcessingResult commandProcessingResult = checkMainResourceExistsWithinScope(appTable, appTableId);
final GenericResultsetData grs = retrieveDataTableGenericResultSetForUpdate(appTable, dataTableName, appTableId, datatableId);
if (grs.hasNoEntries()) {
throw new DatatableNotFoundException(dataTableName, appTableId);
}
if (grs.hasMoreThanOneEntry()) {
throw new PlatformDataIntegrityException("error.msg.attempting.multiple.update",
"Application table: " + dataTableName + " Foreign key id: " + appTableId);
}
final Type typeOfMap = new TypeToken<Map<String, String>>() {}.getType();
final Map<String, String> dataParams = this.fromJsonHelper.extractDataMap(typeOfMap, command.json());
String pkName = "id"; // 1:M datatable
if (datatableId == null) {
pkName = getFKField(appTable);
} // 1:1 datatable
final Map<String, Object> changes = getAffectedAndChangedColumns(grs, dataParams, pkName);
if (!changes.isEmpty()) {
Long pkValue = appTableId;
if (datatableId != null) {
pkValue = datatableId;
}
final String sql = getUpdateSql(grs.getColumnHeaders(), dataTableName, pkName, pkValue, changes);
LOG.info("Update sql: {}", sql);
if (StringUtils.isNotBlank(sql)) {
this.jdbcTemplate.update(sql);
changes.put("locale", dataParams.get("locale"));
changes.put("dateFormat", "yyyy-MM-dd");
} else {
LOG.info("No Changes");
}
}
return new CommandProcessingResultBuilder() //
.withOfficeId(commandProcessingResult.getOfficeId()) //
.withGroupId(commandProcessingResult.getGroupId()) //
.withClientId(commandProcessingResult.getClientId()) //
.withSavingsId(commandProcessingResult.getSavingsId()) //
.withLoanId(commandProcessingResult.getLoanId()) //
.with(changes) //
.build();
}
@Transactional
@Override
public CommandProcessingResult deleteDatatableEntries(final String dataTableName, final Long appTableId) {
validateDatatableName(dataTableName);
if (isDatatableAttachedToEntityDatatableCheck(dataTableName)) {
throw new DatatableEntryRequiredException(dataTableName, appTableId);
}
final String appTable = queryForApplicationTableName(dataTableName);
final CommandProcessingResult commandProcessingResult = checkMainResourceExistsWithinScope(appTable, appTableId);
final String deleteOneToOneEntrySql = getDeleteEntriesSql(dataTableName, getFKField(appTable), appTableId);
final int rowsDeleted = this.jdbcTemplate.update(deleteOneToOneEntrySql);
if (rowsDeleted < 1) {
throw new DatatableNotFoundException(dataTableName, appTableId);
}
return commandProcessingResult;
}
@Transactional
@Override
public CommandProcessingResult deleteDatatableEntry(final String dataTableName, final Long appTableId, final Long datatableId) {
validateDatatableName(dataTableName);
if (isDatatableAttachedToEntityDatatableCheck(dataTableName)) {
throw new DatatableEntryRequiredException(dataTableName, appTableId);
}
final String appTable = queryForApplicationTableName(dataTableName);
final CommandProcessingResult commandProcessingResult = checkMainResourceExistsWithinScope(appTable, appTableId);
final String sql = getDeleteEntrySql(dataTableName, datatableId);
this.jdbcTemplate.update(sql);
return commandProcessingResult;
}
@Override
public GenericResultsetData retrieveDataTableGenericResultSet(final String dataTableName, final Long appTableId, final String order,
final Long id) {
final String appTable = queryForApplicationTableName(dataTableName);
checkMainResourceExistsWithinScope(appTable, appTableId);
final List<ResultsetColumnHeaderData> columnHeaders = this.genericDataService.fillResultsetColumnHeaders(dataTableName);
String sql = "";
// id only used for reading a specific entry in a one to many datatable
// (when updating)
if (id == null) {
String whereClause = getFKField(appTable) + " = " + appTableId;
SQLInjectionValidator.validateSQLInput(whereClause);
sql = sql + "select * from `" + dataTableName + "` where " + whereClause;
} else {
sql = sql + "select * from `" + dataTableName + "` where id = " + id;
}
if (StringUtils.isNotBlank(order)) {
this.columnValidator.validateSqlInjection(sql, order);
sql = sql + " order by " + order;
}
final List<ResultsetRowData> result = fillDatatableResultSetDataRows(sql);
return new GenericResultsetData(columnHeaders, result);
}
private GenericResultsetData retrieveDataTableGenericResultSetForUpdate(final String appTable, final String dataTableName,
final Long appTableId, final Long id) {
final List<ResultsetColumnHeaderData> columnHeaders = this.genericDataService.fillResultsetColumnHeaders(dataTableName);
String sql = "";
// id only used for reading a specific entry in a one to many datatable
// (when updating)
if (id == null) {
String whereClause = getFKField(appTable) + " = " + appTableId;
SQLInjectionValidator.validateSQLInput(whereClause);
sql = sql + "select * from `" + dataTableName + "` where " + whereClause;
} else {
sql = sql + "select * from `" + dataTableName + "` where id = " + id;
}
final List<ResultsetRowData> result = fillDatatableResultSetDataRows(sql);
return new GenericResultsetData(columnHeaders, result);
}
private CommandProcessingResult checkMainResourceExistsWithinScope(final String appTable, final Long appTableId) {
final String sql = dataScopedSQL(appTable, appTableId);
LOG.info("data scoped sql: {}", sql);
final SqlRowSet rs = this.jdbcTemplate.queryForRowSet(sql);
if (!rs.next()) {
throw new DatatableNotFoundException(appTable, appTableId);
}
final Long officeId = getLongSqlRowSet(rs, "officeId");
final Long groupId = getLongSqlRowSet(rs, "groupId");
final Long clientId = getLongSqlRowSet(rs, "clientId");
final Long savingsId = getLongSqlRowSet(rs, "savingsId");
final Long LoanId = getLongSqlRowSet(rs, "loanId");
final Long entityId = getLongSqlRowSet(rs, "entityId");
if (rs.next()) {
throw new DatatableSystemErrorException("System Error: More than one row returned from data scoping query");
}
return new CommandProcessingResultBuilder() //
.withOfficeId(officeId) //
.withGroupId(groupId) //
.withClientId(clientId) //
.withSavingsId(savingsId) //
.withLoanId(LoanId).withEntityId(entityId)//
.build();
}
private Long getLongSqlRowSet(final SqlRowSet rs, final String column) {
Long val = rs.getLong(column);
if (val == 0) {
val = null;
}
return val;
}
private String dataScopedSQL(final String appTable, final Long appTableId) {
/*
* unfortunately have to, one way or another, be able to restrict data to the users office hierarchy. Here, a
* few key tables are done. But if additional fields are needed on other tables the same pattern applies
*/
final AppUser currentUser = this.context.authenticatedUser();
String scopedSQL = null;
/*
* m_loan and m_savings_account are connected to an m_office thru either an m_client or an m_group If both it
* means it relates to an m_client that is in a group (still an m_client account)
*/
if (appTable.equalsIgnoreCase("m_loan")) {
scopedSQL = "select distinctrow x.* from ("
+ " (select o.id as officeId, l.group_id as groupId, l.client_id as clientId, null as savingsId, l.id as loanId, null as entityId from m_loan l "
+ " join m_client c on c.id = l.client_id " + " join m_office o on o.id = c.office_id and o.hierarchy like '"
+ currentUser.getOffice().getHierarchy() + "%'" + " where l.id = " + appTableId + ")" + " union all "
+ " (select o.id as officeId, l.group_id as groupId, l.client_id as clientId, null as savingsId, l.id as loanId, null as entityId from m_loan l "
+ " join m_group g on g.id = l.group_id " + " join m_office o on o.id = g.office_id and o.hierarchy like '"
+ currentUser.getOffice().getHierarchy() + "%'" + " where l.id = " + appTableId + ")" + " ) x";
}
if (appTable.equalsIgnoreCase("m_savings_account")) {
scopedSQL = "select distinctrow x.* from ("
+ " (select o.id as officeId, s.group_id as groupId, s.client_id as clientId, s.id as savingsId, null as loanId, null as entityId from m_savings_account s "
+ " join m_client c on c.id = s.client_id " + " join m_office o on o.id = c.office_id and o.hierarchy like '"
+ currentUser.getOffice().getHierarchy() + "%'" + " where s.id = " + appTableId + ")" + " union all "
+ " (select o.id as officeId, s.group_id as groupId, s.client_id as clientId, s.id as savingsId, null as loanId, null as entityId from m_savings_account s "
+ " join m_group g on g.id = s.group_id " + " join m_office o on o.id = g.office_id and o.hierarchy like '"
+ currentUser.getOffice().getHierarchy() + "%'" + " where s.id = " + appTableId + ")" + " ) x";
}
if (appTable.equalsIgnoreCase("m_client")) {
scopedSQL = "select o.id as officeId, null as groupId, c.id as clientId, null as savingsId, null as loanId, null as entityId from m_client c "
+ " join m_office o on o.id = c.office_id and o.hierarchy like '" + currentUser.getOffice().getHierarchy() + "%'"
+ " where c.id = " + appTableId;
}
if (appTable.equalsIgnoreCase("m_group") || appTable.equalsIgnoreCase("m_center")) {
scopedSQL = "select o.id as officeId, g.id as groupId, null as clientId, null as savingsId, null as loanId, null as entityId from m_group g "
+ " join m_office o on o.id = g.office_id and o.hierarchy like '" + currentUser.getOffice().getHierarchy() + "%'"
+ " where g.id = " + appTableId;
}
if (appTable.equalsIgnoreCase("m_office")) {
scopedSQL = "select o.id as officeId, null as groupId, null as clientId, null as savingsId, null as loanId, null as entityId from m_office o "
+ " where o.hierarchy like '" + currentUser.getOffice().getHierarchy() + "%'" + " and o.id = " + appTableId;
}
if (appTable.equalsIgnoreCase("m_product_loan") || appTable.equalsIgnoreCase("m_savings_product")) {
scopedSQL = "select null as officeId, null as groupId, null as clientId, null as savingsId, null as loanId, p.id as entityId from "
+ appTable + " as p WHERE p.id = " + appTableId;
}
if (scopedSQL == null) {
throw new PlatformDataIntegrityException("error.msg.invalid.dataScopeCriteria",
"Application Table: " + appTable + " not catered for in data Scoping");
}
return scopedSQL;
}
private void validateAppTable(final String appTable) {
if (appTable.equalsIgnoreCase("m_loan")) {
return;
}
if (appTable.equalsIgnoreCase("m_savings_account")) {
return;
}
if (appTable.equalsIgnoreCase("m_client")) {
return;
}
if (appTable.equalsIgnoreCase("m_group")) {
return;
}
if (appTable.equalsIgnoreCase("m_center")) {
return;
}
if (appTable.equalsIgnoreCase("m_office")) {
return;
}
if (appTable.equalsIgnoreCase("m_product_loan")) {
return;
}
if (appTable.equalsIgnoreCase("m_savings_product")) {
return;
}
throw new PlatformDataIntegrityException("error.msg.invalid.application.table", "Invalid Application Table: " + appTable, "name",
appTable);
}
private String mapToActualAppTable(final String appTable) {
if (appTable.equalsIgnoreCase("m_center")) {
return "m_group";
}
return appTable;
}
private List<ResultsetRowData> fillDatatableResultSetDataRows(final String sql) {
final SqlRowSet rs = this.jdbcTemplate.queryForRowSet(sql);
final List<ResultsetRowData> resultsetDataRows = new ArrayList<>();
final SqlRowSetMetaData rsmd = rs.getMetaData();
while (rs.next()) {
final List<String> columnValues = new ArrayList<>();
for (int i = 0; i < rsmd.getColumnCount(); i++) {
final String columnName = rsmd.getColumnName(i + 1);
final String columnValue = rs.getString(columnName);
columnValues.add(columnValue);
}
final ResultsetRowData resultsetDataRow = ResultsetRowData.create(columnValues);
resultsetDataRows.add(resultsetDataRow);
}
return resultsetDataRows;
}
private String queryForApplicationTableName(final String datatable) {
SQLInjectionValidator.validateSQLInput(datatable);
final String sql = "SELECT application_table_name FROM x_registered_table where registered_table_name = '" + datatable + "'";
final SqlRowSet rs = this.jdbcTemplate.queryForRowSet(sql);
String applicationTableName = null;
if (rs.next()) {
applicationTableName = rs.getString("application_table_name");
} else {
throw new DatatableNotFoundException(datatable);
}
return applicationTableName;
}
private String getFKField(final String applicationTableName) {
return applicationTableName.substring(2) + "_id";
}
private String getAddSql(final List<ResultsetColumnHeaderData> columnHeaders, final String datatable, final String fkName,
final Long appTableId, final Map<String, String> queryParams) {
final Map<String, String> affectedColumns = getAffectedColumns(columnHeaders, queryParams, fkName);
String pValueWrite = "";
String addSql = "";
final String singleQuote = "'";
String insertColumns = "";
String selectColumns = "";
String columnName = "";
String pValue = null;
for (final ResultsetColumnHeaderData pColumnHeader : columnHeaders) {
final String key = pColumnHeader.getColumnName();
if (affectedColumns.containsKey(key)) {
pValue = affectedColumns.get(key);
if (StringUtils.isEmpty(pValue)) {
pValueWrite = "null";
} else {
if ("bit".equalsIgnoreCase(pColumnHeader.getColumnType())) {
pValueWrite = BooleanUtils.toString(BooleanUtils.toBooleanObject(pValue), "1", "0", "null");
} else {
pValueWrite = singleQuote + this.genericDataService.replace(pValue, singleQuote, singleQuote + singleQuote)
+ singleQuote;
}
}
columnName = "`" + key + "`";
insertColumns += ", " + columnName;
selectColumns += "," + pValueWrite + " as " + columnName;
}
}
addSql = "insert into `" + datatable + "` (`" + fkName + "` " + insertColumns + ")" + " select " + appTableId + " as id"
+ selectColumns;
LOG.info("{}", addSql);
return addSql;
}
/**
* This method is used special for ppi cases Where the score need to be computed
*
* @param columnHeaders
* @param datatable
* @param fkName
* @param appTableId
* @param queryParams
* @return
*/
public String getAddSqlWithScore(final List<ResultsetColumnHeaderData> columnHeaders, final String datatable, final String fkName,
final Long appTableId, final Map<String, String> queryParams) {
final Map<String, String> affectedColumns = getAffectedColumns(columnHeaders, queryParams, fkName);
String pValueWrite = "";
String scoresId = " ";
final String singleQuote = "'";
String insertColumns = "";
String selectColumns = "";
String columnName = "";
String pValue = null;
for (final String key : affectedColumns.keySet()) {
pValue = affectedColumns.get(key);
if (StringUtils.isEmpty(pValue)) {
pValueWrite = "null";
} else {
pValueWrite = singleQuote + this.genericDataService.replace(pValue, singleQuote, singleQuote + singleQuote) + singleQuote;
scoresId += pValueWrite + " ,";
}
columnName = "`" + key + "`";
insertColumns += ", " + columnName;
selectColumns += "," + pValueWrite + " as " + columnName;
}
scoresId = scoresId.replaceAll(" ,$", "");
String vaddSql = "insert into `" + datatable + "` (`" + fkName + "` " + insertColumns + ", `score` )" + " select " + appTableId
+ " as id" + selectColumns + " , ( SELECT SUM( code_score ) FROM m_code_value WHERE m_code_value.id IN (" + scoresId
+ " ) ) as score";
LOG.info("{}", vaddSql);
return vaddSql;
}
private String getUpdateSql(List<ResultsetColumnHeaderData> columnHeaders, final String datatable, final String keyFieldName,
final Long keyFieldValue, final Map<String, Object> changedColumns) {
// just updating fields that have changed since pre-update read - though
// its possible these values are different from the page the user was
// looking at and even different from the current db values (if some
// other update got in quick) - would need a version field for
// completeness but its okay to take this risk with additional fields
// data
if (changedColumns.size() == 0) {
return null;
}
String pValue = null;
String pValueWrite = "";
final String singleQuote = "'";
boolean firstColumn = true;
String sql = "update `" + datatable + "` ";
for (final ResultsetColumnHeaderData pColumnHeader : columnHeaders) {
final String key = pColumnHeader.getColumnName();
if (changedColumns.containsKey(key)) {
if (firstColumn) {
sql += " set ";
firstColumn = false;
} else {
sql += ", ";
}
pValue = (String) changedColumns.get(key);
if (StringUtils.isEmpty(pValue)) {
pValueWrite = "null";
} else {
if ("bit".equalsIgnoreCase(pColumnHeader.getColumnType())) {
pValueWrite = BooleanUtils.toString(BooleanUtils.toBooleanObject(pValue), "1", "0", "null");
} else {
pValueWrite = singleQuote + this.genericDataService.replace(pValue, singleQuote, singleQuote + singleQuote)
+ singleQuote;
}
}
sql += "`" + key + "` = " + pValueWrite;
}
}
sql += " where " + keyFieldName + " = " + keyFieldValue;
return sql;
}
private Map<String, Object> getAffectedAndChangedColumns(final GenericResultsetData grs, final Map<String, String> queryParams,
final String fkName) {
final Map<String, String> affectedColumns = getAffectedColumns(grs.getColumnHeaders(), queryParams, fkName);
final Map<String, Object> affectedAndChangedColumns = new HashMap<>();
for (final String key : affectedColumns.keySet()) {
final String columnValue = affectedColumns.get(key);
final String colType = grs.getColTypeOfColumnNamed(key);
if (columnChanged(key, columnValue, colType, grs)) {
affectedAndChangedColumns.put(key, columnValue);
}
}
return affectedAndChangedColumns;
}
private boolean columnChanged(final String key, final String keyValue, final String colType, final GenericResultsetData grs) {
final List<String> columnValues = grs.getData().get(0).getRow();
String columnValue = null;
for (int i = 0; i < grs.getColumnHeaders().size(); i++) {
if (key.equals(grs.getColumnHeaders().get(i).getColumnName())) {
columnValue = columnValues.get(i);
if (notTheSame(columnValue, keyValue, colType)) {
return true;
}
return false;
}
}
throw new PlatformDataIntegrityException("error.msg.invalid.columnName", "Parameter Column Name: " + key + " not found");
}
public Map<String, String> getAffectedColumns(final List<ResultsetColumnHeaderData> columnHeaders,
final Map<String, String> queryParams, final String keyFieldName) {
final String dateFormat = queryParams.get("dateFormat");
Locale clientApplicationLocale = null;
final String localeQueryParam = queryParams.get("locale");
if (!StringUtils.isBlank(localeQueryParam)) {
clientApplicationLocale = new Locale(queryParams.get("locale"));
}
final String underscore = "_";
final String space = " ";
String pValue = null;
String queryParamColumnUnderscored;
String columnHeaderUnderscored;
boolean notFound;
final Map<String, String> affectedColumns = new HashMap<>();
final Set<String> keys = queryParams.keySet();
for (final String key : keys) {
// ignores id and foreign key fields
// also ignores locale and dateformat fields that are used for
// validating numeric and date data
if (!(key.equalsIgnoreCase("id") || key.equalsIgnoreCase(keyFieldName) || key.equals("locale") || key.equals("dateFormat"))) {
notFound = true;
// matches incoming fields with and without underscores (spaces
// and underscores considered the same)
queryParamColumnUnderscored = this.genericDataService.replace(key, space, underscore);
for (final ResultsetColumnHeaderData columnHeader : columnHeaders) {
if (notFound) {
columnHeaderUnderscored = this.genericDataService.replace(columnHeader.getColumnName(), space, underscore);
if (queryParamColumnUnderscored.equalsIgnoreCase(columnHeaderUnderscored)) {
pValue = queryParams.get(key);
pValue = validateColumn(columnHeader, pValue, dateFormat, clientApplicationLocale);
affectedColumns.put(columnHeader.getColumnName(), pValue);
notFound = false;
}
}
}
if (notFound) {
throw new PlatformDataIntegrityException("error.msg.column.not.found", "Column: " + key + " Not Found");
}
}
}
return affectedColumns;
}
private String validateColumn(final ResultsetColumnHeaderData columnHeader, final String pValue, final String dateFormat,
final Locale clientApplicationLocale) {
String paramValue = pValue;
if (columnHeader.isDateDisplayType() || columnHeader.isDateTimeDisplayType() || columnHeader.isIntegerDisplayType()
|| columnHeader.isDecimalDisplayType() || columnHeader.isBooleanDisplayType()) {
// only trim if string is not empty and is not null.
// throws a NULL pointer exception if the check below is not applied
paramValue = StringUtils.isNotEmpty(paramValue) ? paramValue.trim() : paramValue;
}
if (StringUtils.isEmpty(paramValue) && columnHeader.isMandatory()) {
final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
final ApiParameterError error = ApiParameterError.parameterError("error.msg.column.mandatory", "Mandatory",
columnHeader.getColumnName());
dataValidationErrors.add(error);
throw new PlatformApiDataValidationException("validation.msg.validation.errors.exist", "Validation errors exist.",
dataValidationErrors);
}
if (StringUtils.isNotEmpty(paramValue)) {
if (columnHeader.hasColumnValues()) {
if (columnHeader.isCodeValueDisplayType()) {
if (columnHeader.isColumnValueNotAllowed(paramValue)) {
final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
final ApiParameterError error = ApiParameterError.parameterError("error.msg.invalid.columnValue",
"Value not found in Allowed Value list", columnHeader.getColumnName(), paramValue);
dataValidationErrors.add(error);
throw new PlatformApiDataValidationException("validation.msg.validation.errors.exist", "Validation errors exist.",
dataValidationErrors);
}
return paramValue;
} else if (columnHeader.isCodeLookupDisplayType()) {
final Integer codeLookup = Integer.valueOf(paramValue);
if (columnHeader.isColumnCodeNotAllowed(codeLookup)) {
final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
final ApiParameterError error = ApiParameterError.parameterError("error.msg.invalid.columnValue",
"Value not found in Allowed Value list", columnHeader.getColumnName(), paramValue);
dataValidationErrors.add(error);
throw new PlatformApiDataValidationException("validation.msg.validation.errors.exist", "Validation errors exist.",
dataValidationErrors);
}
return paramValue;
} else {
throw new PlatformDataIntegrityException("error.msg.invalid.columnType.", "Code: " + columnHeader.getColumnName()
+ " - Invalid Type " + columnHeader.getColumnType() + " (neither varchar nor int)");
}
}
if (columnHeader.isDateDisplayType()) {
final LocalDate tmpDate = JsonParserHelper.convertFrom(paramValue, columnHeader.getColumnName(), dateFormat,
clientApplicationLocale);
if (tmpDate == null) {
paramValue = null;
} else {
paramValue = tmpDate.toString();
}
} else if (columnHeader.isDateTimeDisplayType()) {
final LocalDateTime tmpDateTime = JsonParserHelper.convertDateTimeFrom(paramValue, columnHeader.getColumnName(), dateFormat,
clientApplicationLocale);
if (tmpDateTime == null) {
paramValue = null;
} else {
paramValue = tmpDateTime.toString();
}
} else if (columnHeader.isIntegerDisplayType()) {
final Integer tmpInt = this.helper.convertToInteger(paramValue, columnHeader.getColumnName(), clientApplicationLocale);
if (tmpInt == null) {
paramValue = null;
} else {
paramValue = tmpInt.toString();
}
} else if (columnHeader.isDecimalDisplayType()) {
final BigDecimal tmpDecimal = this.helper.convertFrom(paramValue, columnHeader.getColumnName(), clientApplicationLocale);
if (tmpDecimal == null) {
paramValue = null;
} else {
paramValue = tmpDecimal.toString();
}
} else if (columnHeader.isBooleanDisplayType()) {
final Boolean tmpBoolean = BooleanUtils.toBooleanObject(paramValue);
if (tmpBoolean == null) {
final ApiParameterError error = ApiParameterError
.parameterError(
"validation.msg.invalid.boolean.format", "The parameter " + columnHeader.getColumnName()
+ " has value: " + paramValue + " which is invalid boolean value.",
columnHeader.getColumnName(), paramValue);
final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
dataValidationErrors.add(error);
throw new PlatformApiDataValidationException("validation.msg.validation.errors.exist", "Validation errors exist.",
dataValidationErrors);
}
paramValue = tmpBoolean.toString();
} else if (columnHeader.isString()) {
if (paramValue.length() > columnHeader.getColumnLength()) {
final ApiParameterError error = ApiParameterError.parameterError(
"validation.msg.datatable.entry.column.exceeds.maxlength",
"The column `" + columnHeader.getColumnName() + "` exceeds its defined max-length ",
columnHeader.getColumnName(), paramValue);
final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
dataValidationErrors.add(error);
throw new PlatformApiDataValidationException("validation.msg.validation.errors.exist", "Validation errors exist.",
dataValidationErrors);
}
}
}
return paramValue;
}
private String getDeleteEntriesSql(final String datatable, final String FKField, final Long appTableId) {
return "delete from `" + datatable + "` where `" + FKField + "` = " + appTableId;
}
private String getDeleteEntrySql(final String datatable, final Long datatableId) {
return "delete from `" + datatable + "` where `id` = " + datatableId;
}
private boolean notTheSame(final String currValue, final String pValue, final String colType) {
if (StringUtils.isEmpty(currValue) && StringUtils.isEmpty(pValue)) {
return false;
}
if (StringUtils.isEmpty(currValue)) {
return true;
}
if (StringUtils.isEmpty(pValue)) {
return true;
}
if ("DECIMAL".equalsIgnoreCase(colType)) {
final BigDecimal currentDecimal = BigDecimal.valueOf(Double.valueOf(currValue));
final BigDecimal newDecimal = BigDecimal.valueOf(Double.valueOf(pValue));
return currentDecimal.compareTo(newDecimal) != 0;
}
if (currValue.equals(pValue)) {
return false;
}
return true;
}
@Override
public Long countDatatableEntries(final String datatableName, final Long appTableId, String foreignKeyColumn) {
final String sqlString = "SELECT COUNT(`" + foreignKeyColumn + "`) FROM `" + datatableName + "` WHERE `" + foreignKeyColumn + "`="
+ appTableId;
final Long count = this.jdbcTemplate.queryForObject(sqlString, Long.class);
return count;
}
public boolean isDatatableAttachedToEntityDatatableCheck(final String datatableName) {
StringBuilder builder = new StringBuilder();
builder.append(" SELECT COUNT(edc.`x_registered_table_name`) FROM `x_registered_table` xrt ");
builder.append(" JOIN m_entity_datatable_check edc ON edc.`x_registered_table_name` = xrt.`registered_table_name`");
builder.append(" WHERE edc.`x_registered_table_name` = '" + datatableName + "'");
final Long count = this.jdbcTemplate.queryForObject(builder.toString(), Long.class);
return count > 0 ? true : false;
}
}
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.iotthingsgraph.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.iotthingsgraph.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* UpdateSystemTemplateResult JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UpdateSystemTemplateResultJsonUnmarshaller implements Unmarshaller<UpdateSystemTemplateResult, JsonUnmarshallerContext> {
public UpdateSystemTemplateResult unmarshall(JsonUnmarshallerContext context) throws Exception {
UpdateSystemTemplateResult updateSystemTemplateResult = new UpdateSystemTemplateResult();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return updateSystemTemplateResult;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("summary", targetDepth)) {
context.nextToken();
updateSystemTemplateResult.setSummary(SystemTemplateSummaryJsonUnmarshaller.getInstance().unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return updateSystemTemplateResult;
}
private static UpdateSystemTemplateResultJsonUnmarshaller instance;
public static UpdateSystemTemplateResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new UpdateSystemTemplateResultJsonUnmarshaller();
return instance;
}
}
|
package com.electronicpanopticon.potemkin.haist;
import com.electronicpanopticon.potemkin.example.Example;
import com.electronicpanopticon.potemkin.example.ExampleFake;
import com.electronicpanopticon.potemkin.example.ImmutableExample;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jdk8.Jdk8Module;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
/**
* Created by chris on 12/4/16.
*/
public class HaistTest {
private static final String EXAMPLES = "raw/examples.json";
private static final ImmutableExample WALLACE;
static {
WALLACE = ImmutableExample.builder()
.id("0ef536ef-4b39-4212-ac4b-809110038de7")
.age(65)
.firstName("Wallace")
.height(204.8)
.lastName("Keeling")
.weight(202.5)
.build();
}
private Haist<ImmutableExample> subject = new Haist<>(ImmutableExample.class, EXAMPLES);
@Test
public void getFilename() {
assertEquals(EXAMPLES, subject.getFilename());
}
// @Test
// public void laish() throws IOException {
// Map<String, ImmutableExample> map = subject.read();
// assertEquals(WALLACE, map.get("0ef536ef-4b39-4212-ac4b-809110038de7"));
// }
public static void main(String... args) throws JsonProcessingException {
Example ex = ImmutableExample.builder()
.id("boop")
.age(19)
.firstName("Joe")
.height(6.2)
.lastName("Bob")
.weight(254.2)
.build();
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.registerModule(new Jdk8Module());
String json = objectMapper.writeValueAsString(ExampleFake.fake());
System.out.println(json);
json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(ExampleFake.fakes(14));
System.out.println(json);
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.dbcp2.datasources;
import java.sql.SQLException;
import javax.sql.PooledConnection;
/**
* Methods to manage PoolableConnections and the connection pools that source them.
*
* @since 2.0
*/
interface PooledConnectionManager {
/**
* Closes the connection pool associated with the given user.
*
* @param userName
* user name
* @throws SQLException
* if an error occurs closing idle connections in the pool
*/
void closePool(String userName) throws SQLException;
// /**
// * Sets the database password used when creating connections.
// *
// * @param password password used when authenticating to the database
// * @since 3.0.0
// */
// void setPassword(char[] password);
/**
* Closes the PooledConnection and remove it from the connection pool to which it belongs, adjusting pool counters.
*
* @param pc
* PooledConnection to be invalidated
* @throws SQLException
* if an SQL error occurs closing the connection
*/
void invalidate(PooledConnection pc) throws SQLException;
/**
* Sets the database password used when creating connections.
*
* @param password
* password used when authenticating to the database
*/
void setPassword(String password);
}
|
// Licensed to Cloudera, Inc. under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. Cloudera, Inc. licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.cloudera.api.model;
import java.util.List;
import javax.xml.bind.annotation.XmlElementWrapper;
import javax.xml.bind.annotation.XmlRootElement;
/** A list of snapshot policies. */
@XmlRootElement(name = "snapshotPolicyList")
public class ApiSnapshotPolicyList extends ApiListBase<ApiSnapshotPolicy> {
public ApiSnapshotPolicyList() {
// For JAX-B.
}
public ApiSnapshotPolicyList(List<ApiSnapshotPolicy> policies) {
super(policies);
}
@XmlElementWrapper(name = ApiListBase.ITEMS_ATTR)
public List<ApiSnapshotPolicy> getPolicies() {
return values;
}
public void setPolicies(List<ApiSnapshotPolicy> policies) {
values = policies;
}
}
|
/*
* Copyright 2016-2019 The jetcd authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.etcd.jetcd.common.exception;
public class ClosedSnapshotException extends EtcdException {
public ClosedSnapshotException() {
super(ErrorCode.CANCELLED, "Snapshot has been closed", null);
}
}
|
/*
* CPAchecker is a tool for configurable software verification.
* This file is part of CPAchecker.
*
* Copyright (C) 2007-2016 Dirk Beyer
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* CPAchecker web page:
* http://cpachecker.sosy-lab.org
*/
package org.sosy_lab.cpachecker.cpa.arg.witnessexport;
import com.google.common.base.Preconditions;
import com.google.common.collect.ComparisonChain;
import com.google.common.collect.MapDifference;
import com.google.common.collect.MapDifference.ValueDifference;
import com.google.common.collect.Maps;
import java.util.Map;
import java.util.Optional;
import org.sosy_lab.cpachecker.cpa.arg.witnessexport.TransitionCondition.Scope;
import org.sosy_lab.cpachecker.util.automaton.AutomatonGraphmlCommon.KeyDef;
/**
* An edge corresponds to the transfer from one node to another.
* This class is intended to be immutable.
*/
class Edge implements Comparable<Edge> {
private final String source;
private final String target;
private final TransitionCondition label;
private int hashCode = 0;
public Edge(String pSource, String pTarget, TransitionCondition pLabel) {
Preconditions.checkNotNull(pSource);
Preconditions.checkNotNull(pTarget);
Preconditions.checkNotNull(pLabel);
this.source = pSource;
this.target = pTarget;
this.label = pLabel;
}
@Override
public String toString() {
return String.format("{%s -- %s --> %s}", source, label, target);
}
@Override
public int compareTo(Edge pO) {
return ComparisonChain.start()
.compare(source, pO.source)
.compare(target, pO.target)
.compare(label, pO.label)
.result();
}
@Override
public int hashCode() {
if (hashCode == 0) {
final int prime = 31;
hashCode = prime + ((label == null) ? 0 : label.hashCode());
hashCode = prime * hashCode + ((source == null) ? 0 : source.hashCode());
hashCode = prime * hashCode + ((target == null) ? 0 : target.hashCode());
}
return hashCode;
}
@Override
public boolean equals(Object pOther) {
if (this == pOther) {
return true;
}
if (pOther instanceof Edge) {
Edge other = (Edge) pOther;
return source.equals(other.source)
&& target.equals(other.target)
&& label.equals(other.label);
}
return false;
}
public String getSource() {
return source;
}
public String getTarget() {
return target;
}
public TransitionCondition getLabel() {
return label;
}
public Optional<Edge> tryMerge(Edge pOther) {
if (!source.equals(pOther.source)) {
return Optional.empty();
}
if (!target.equals(pOther.target)) {
return Optional.empty();
}
MapDifference<KeyDef, String> difference =
Maps.difference(label.getMapping(), pOther.label.getMapping());
if (!difference.entriesOnlyOnLeft().isEmpty() || !difference.entriesOnlyOnRight().isEmpty()) {
return Optional.empty();
}
TransitionCondition newLabel = pOther.label;
Optional<Scope> newScope = label.getScope().mergeWith(newLabel.getScope());
if (!newScope.isPresent()) {
return Optional.empty();
}
newLabel = newLabel.withScope(newScope.get());
newLabel = newLabel.putAllAndCopy(label);
for (Map.Entry<KeyDef, ValueDifference<String>> diffEntry :
difference.entriesDiffering().entrySet()) {
KeyDef key = diffEntry.getKey();
ValueDifference<String> diff = diffEntry.getValue();
final String result;
switch (key) {
case STARTLINE:
case OFFSET:
int lowA = Integer.parseInt(diff.leftValue());
int lowB = Integer.parseInt(diff.rightValue());
result = Integer.toString(Math.min(lowA, lowB));
break;
case ENDLINE:
case ENDOFFSET:
int highA = Integer.parseInt(diff.leftValue());
int highB = Integer.parseInt(diff.rightValue());
result = Integer.toString(Math.max(highA, highB));
break;
default:
return Optional.empty();
}
newLabel = newLabel.putAndCopy(key, result);
}
return Optional.of(new Edge(source, target, newLabel));
}
}
|
package ch.spacebase.mcprotocol.net;
import java.util.ArrayList;
import java.util.List;
import ch.spacebase.mcprotocol.event.ProtocolEvent;
import ch.spacebase.mcprotocol.event.ProtocolListener;
/**
* A basic connection class.
*/
public abstract class BaseConnection implements Connection {
/**
* The connection's remote host.
*/
private String host;
/**
* The connection's remote port.
*/
private int port;
/**
* The connection's username.
*/
private String username;
/**
* The connection's packets.
*/
private PacketRegistry packets;
/**
* Listeners listening to this connection.
*/
private List<ProtocolListener> listeners = new ArrayList<ProtocolListener>();
/**
* Creates a new connection.
* @param host Host to connect to.
* @param port Port to connect to.
*/
public BaseConnection(PacketRegistry packets, String host, int port) {
this.packets = packets;
this.host = host;
this.port = port;
}
@Override
public String getRemoteHost() {
return this.host;
}
@Override
public int getRemotePort() {
return this.port;
}
@Override
public PacketRegistry getPacketRegistry() {
return this.packets;
}
@Override
public String getUsername() {
return this.username;
}
@Override
public void setUsername(String name) {
if(this.username != null) {
return;
}
this.username = name;
}
@Override
public void listen(ProtocolListener listener) {
this.listeners.add(listener);
}
@Override
public <T extends ProtocolEvent<ProtocolListener>> T call(T event) {
for(ProtocolListener listener : this.listeners) {
event.call(listener);
}
return event;
}
}
|
package com.google.code.kaptcha.text.impl;
import java.util.Random;
import com.google.code.kaptcha.text.TextProducer;
import com.google.code.kaptcha.util.Configurable;
/**
* {@link DefaultTextCreator} creates random text from an array of characters
* with specified length.
*/
public class DefaultTextCreator extends Configurable implements TextProducer
{
/**
* @return the random text
*/
public String getText()
{
int length = getConfig().getTextProducerCharLength();
char[] chars = getConfig().getTextProducerCharString();
Random rand = new Random();
StringBuffer text = new StringBuffer();
for (int i = 0; i < length; i++)
{
text.append(chars[rand.nextInt(chars.length)]);
}
return text.toString();
}
}
|
package io.typefox.yang.utils;
import com.google.common.base.Objects;
import com.google.common.collect.Iterables;
import com.google.inject.Singleton;
import io.typefox.yang.yang.AbstractModule;
import io.typefox.yang.yang.BelongsTo;
import io.typefox.yang.yang.Import;
import io.typefox.yang.yang.OtherStatement;
import io.typefox.yang.yang.Prefix;
import io.typefox.yang.yang.Statement;
import io.typefox.yang.yang.Submodule;
import io.typefox.yang.yang.YangVersion;
import java.util.Arrays;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.xtext.EcoreUtil2;
import org.eclipse.xtext.xbase.lib.IterableExtensions;
/**
* Convenient extension methods for the YANG language.
*
* @author akos.kitta
*/
@Singleton
@SuppressWarnings("all")
public class YangExtensions {
/**
* The {@code 1.0} YANG version.
*/
public static final String YANG_1 = "1";
/**
* The {@code 1.1} YANG version.
*/
public static final String YANG_1_1 = "1.1";
/**
* Returns with the YANG version of the module where the AST node element is contained.
* <p>
* Returns with version {@code 1} if the container module does not declare the version or the version equals
* with {@code 1}.
* <p>
* Returns with {@code 1.1} if the container module has declared YANG version, and that equals to {@code 1.1},
* otherwise returns with {@code null}. Also returns with {@code null}, if the argument is not contained in a module.
*/
public String getYangVersion(final EObject it) {
final AbstractModule module = EcoreUtil2.<AbstractModule>getContainerOfType(it, AbstractModule.class);
if ((module == null)) {
return null;
}
YangVersion _firstSubstatementsOfType = this.<YangVersion>firstSubstatementsOfType(module, YangVersion.class);
String _yangVersion = null;
if (_firstSubstatementsOfType!=null) {
_yangVersion=_firstSubstatementsOfType.getYangVersion();
}
final String version = _yangVersion;
if (((null == version) || Objects.equal(YangExtensions.YANG_1, version))) {
return YangExtensions.YANG_1;
}
String _xifexpression = null;
boolean _equals = Objects.equal(YangExtensions.YANG_1_1, version);
if (_equals) {
_xifexpression = YangExtensions.YANG_1_1;
} else {
_xifexpression = null;
}
return _xifexpression;
}
/**
* Returns with all sub-statements of a given type for the statement argument.
*/
public <S extends Statement> Iterable<? extends S> substatementsOfType(final Statement it, final Class<? extends S> clazz) {
return Iterables.filter(it.getSubstatements(), clazz);
}
/**
* Returns with the first sub-statement of a given type for the statement argument or {@code null}.
*/
public <S extends Statement> S firstSubstatementsOfType(final Statement it, final Class<? extends S> clazz) {
return IterableExtensions.head(this.<S>substatementsOfType(it, clazz));
}
/**
* Returns with the last sub-statement of a given type for the statement argument or {@code null}.
*/
public <S extends Statement> S lastSubstatementsOfType(final Statement it, final Class<? extends S> clazz) {
return IterableExtensions.last(this.<S>substatementsOfType(it, clazz));
}
/**
* Returns the main module this element belongs to
* Returns the containing module, or the belongs-to module of this element is contained in a submodule.
*/
public io.typefox.yang.yang.Module getMainModule(final EObject obj) {
final AbstractModule module = EcoreUtil2.<AbstractModule>getContainerOfType(obj, AbstractModule.class);
boolean _matched = false;
if (module instanceof Submodule) {
_matched=true;
BelongsTo _head = IterableExtensions.<BelongsTo>head(Iterables.<BelongsTo>filter(((Submodule)module).getSubstatements(), BelongsTo.class));
io.typefox.yang.yang.Module _module = null;
if (_head!=null) {
_module=_head.getModule();
}
return _module;
}
if (!_matched) {
if (module instanceof io.typefox.yang.yang.Module) {
_matched=true;
return ((io.typefox.yang.yang.Module)module);
}
}
return null;
}
/**
* Returns the prefix of an element
*/
protected String _getPrefix(final io.typefox.yang.yang.Module it) {
Prefix _head = IterableExtensions.<Prefix>head(Iterables.<Prefix>filter(it.getSubstatements(), Prefix.class));
String _prefix = null;
if (_head!=null) {
_prefix=_head.getPrefix();
}
return _prefix;
}
protected String _getPrefix(final Submodule it) {
BelongsTo _head = IterableExtensions.<BelongsTo>head(Iterables.<BelongsTo>filter(it.getSubstatements(), BelongsTo.class));
String _prefix = null;
if (_head!=null) {
_prefix=this.getPrefix(_head);
}
return _prefix;
}
protected String _getPrefix(final BelongsTo it) {
Prefix _head = IterableExtensions.<Prefix>head(Iterables.<Prefix>filter(it.getSubstatements(), Prefix.class));
String _prefix = null;
if (_head!=null) {
_prefix=_head.getPrefix();
}
return _prefix;
}
protected String _getPrefix(final Import it) {
Prefix _head = IterableExtensions.<Prefix>head(Iterables.<Prefix>filter(it.getSubstatements(), Prefix.class));
String _prefix = null;
if (_head!=null) {
_prefix=_head.getPrefix();
}
return _prefix;
}
public String getRevisionFromFileName(final AbstractModule module) {
String _xblockexpression = null;
{
final String rawFileName = module.eResource().getURI().trimFileExtension().lastSegment();
final int index = rawFileName.indexOf("@");
String _xifexpression = null;
if ((index > (-1))) {
_xifexpression = rawFileName.substring((index + 1));
} else {
_xifexpression = null;
}
_xblockexpression = _xifexpression;
}
return _xblockexpression;
}
public String getPrefix(final OtherStatement it) {
if (it instanceof Import) {
return _getPrefix((Import)it);
} else if (it instanceof io.typefox.yang.yang.Module) {
return _getPrefix((io.typefox.yang.yang.Module)it);
} else if (it instanceof Submodule) {
return _getPrefix((Submodule)it);
} else if (it instanceof BelongsTo) {
return _getPrefix((BelongsTo)it);
} else {
throw new IllegalArgumentException("Unhandled parameter types: " +
Arrays.<Object>asList(it).toString());
}
}
}
|
package colecao;
import java.util.ArrayList;
public class TestaArrayList2 {
public static void main(String[] args) {
ArrayList <String> lista = new ArrayList<String>();
lista.add("Ana");
lista.add("Joao");
lista.add("Maria");
lista.add("Pedro");
for (Object i: lista)
System.out.println(i);
}
}
|
package ru.innopolis.stc12.booksharing.model.dao.interfaces;
import ru.innopolis.stc12.booksharing.model.dao.entity.BookCopy;
import ru.innopolis.stc12.booksharing.model.dao.entity.BookEdition;
import java.io.Serializable;
import java.util.List;
public interface BookCopiesDao<T extends Serializable> extends AbstractDao<T> {
List<BookCopy> getBookCopiesOfBookEdition(BookEdition bookEdition);
}
|
/* ====================================================================
Copyright 2003-2004 Apache Software Foundation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record.formula;
public abstract class ControlPtg
extends Ptg
{
}
|
/*
* reserved comment block
* DO NOT REMOVE OR ALTER!
*/
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Copyright (c) 2005, 2014, Oracle and/or its affiliates. All rights reserved.
*/
/*
* $Id: ApacheCanonicalizer.java 1333869 2012-05-04 10:42:44Z coheigea $
*/
package org.jcp.xml.dsig.internal.dom;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import java.security.spec.AlgorithmParameterSpec;
import java.security.InvalidAlgorithmParameterException;
import java.util.Set;
import javax.xml.crypto.*;
import javax.xml.crypto.dom.DOMCryptoContext;
import javax.xml.crypto.dsig.TransformException;
import javax.xml.crypto.dsig.TransformService;
import javax.xml.crypto.dsig.spec.C14NMethodParameterSpec;
import com.sun.org.apache.xml.internal.security.c14n.Canonicalizer;
import com.sun.org.apache.xml.internal.security.c14n.InvalidCanonicalizerException;
import com.sun.org.apache.xml.internal.security.signature.XMLSignatureInput;
import com.sun.org.apache.xml.internal.security.transforms.Transform;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
public abstract class ApacheCanonicalizer extends TransformService {
static {
com.sun.org.apache.xml.internal.security.Init.init();
}
private static java.util.logging.Logger log =
java.util.logging.Logger.getLogger("org.jcp.xml.dsig.internal.dom");
protected Canonicalizer apacheCanonicalizer;
private Transform apacheTransform;
protected String inclusiveNamespaces;
protected C14NMethodParameterSpec params;
protected Document ownerDoc;
protected Element transformElem;
public final AlgorithmParameterSpec getParameterSpec()
{
return params;
}
public void init(XMLStructure parent, XMLCryptoContext context)
throws InvalidAlgorithmParameterException
{
if (context != null && !(context instanceof DOMCryptoContext)) {
throw new ClassCastException
("context must be of type DOMCryptoContext");
}
if (parent == null) {
throw new NullPointerException();
}
if (!(parent instanceof javax.xml.crypto.dom.DOMStructure)) {
throw new ClassCastException("parent must be of type DOMStructure");
}
transformElem = (Element)
((javax.xml.crypto.dom.DOMStructure)parent).getNode();
ownerDoc = DOMUtils.getOwnerDocument(transformElem);
}
public void marshalParams(XMLStructure parent, XMLCryptoContext context)
throws MarshalException
{
if (context != null && !(context instanceof DOMCryptoContext)) {
throw new ClassCastException
("context must be of type DOMCryptoContext");
}
if (parent == null) {
throw new NullPointerException();
}
if (!(parent instanceof javax.xml.crypto.dom.DOMStructure)) {
throw new ClassCastException("parent must be of type DOMStructure");
}
transformElem = (Element)
((javax.xml.crypto.dom.DOMStructure)parent).getNode();
ownerDoc = DOMUtils.getOwnerDocument(transformElem);
}
public Data canonicalize(Data data, XMLCryptoContext xc)
throws TransformException
{
return canonicalize(data, xc, null);
}
public Data canonicalize(Data data, XMLCryptoContext xc, OutputStream os)
throws TransformException
{
if (apacheCanonicalizer == null) {
try {
apacheCanonicalizer = Canonicalizer.getInstance(getAlgorithm());
if (log.isLoggable(java.util.logging.Level.FINE)) {
log.log(java.util.logging.Level.FINE, "Created canonicalizer for algorithm: " + getAlgorithm());
}
} catch (InvalidCanonicalizerException ice) {
throw new TransformException
("Couldn't find Canonicalizer for: " + getAlgorithm() +
": " + ice.getMessage(), ice);
}
}
if (os != null) {
apacheCanonicalizer.setWriter(os);
} else {
apacheCanonicalizer.setWriter(new ByteArrayOutputStream());
}
try {
Set<Node> nodeSet = null;
if (data instanceof ApacheData) {
XMLSignatureInput in =
((ApacheData)data).getXMLSignatureInput();
if (in.isElement()) {
if (inclusiveNamespaces != null) {
return new OctetStreamData(new ByteArrayInputStream
(apacheCanonicalizer.canonicalizeSubtree
(in.getSubNode(), inclusiveNamespaces)));
} else {
return new OctetStreamData(new ByteArrayInputStream
(apacheCanonicalizer.canonicalizeSubtree
(in.getSubNode())));
}
} else if (in.isNodeSet()) {
nodeSet = in.getNodeSet();
} else {
return new OctetStreamData(new ByteArrayInputStream(
apacheCanonicalizer.canonicalize(
Utils.readBytesFromStream(in.getOctetStream()))));
}
} else if (data instanceof DOMSubTreeData) {
DOMSubTreeData subTree = (DOMSubTreeData)data;
if (inclusiveNamespaces != null) {
return new OctetStreamData(new ByteArrayInputStream
(apacheCanonicalizer.canonicalizeSubtree
(subTree.getRoot(), inclusiveNamespaces)));
} else {
return new OctetStreamData(new ByteArrayInputStream
(apacheCanonicalizer.canonicalizeSubtree
(subTree.getRoot())));
}
} else if (data instanceof NodeSetData) {
NodeSetData<?> nsd = (NodeSetData<?>)data;
// convert Iterator to Set<Node>
nodeSet = Utils.toNodeSet(nsd.iterator());
if (log.isLoggable(java.util.logging.Level.FINE)) {
log.log(java.util.logging.Level.FINE, "Canonicalizing " + nodeSet.size() + " nodes");
}
} else {
return new OctetStreamData(new ByteArrayInputStream(
apacheCanonicalizer.canonicalize(
Utils.readBytesFromStream(
((OctetStreamData)data).getOctetStream()))));
}
if (inclusiveNamespaces != null) {
return new OctetStreamData(new ByteArrayInputStream(
apacheCanonicalizer.canonicalizeXPathNodeSet
(nodeSet, inclusiveNamespaces)));
} else {
return new OctetStreamData(new ByteArrayInputStream(
apacheCanonicalizer.canonicalizeXPathNodeSet(nodeSet)));
}
} catch (Exception e) {
throw new TransformException(e);
}
}
public Data transform(Data data, XMLCryptoContext xc, OutputStream os)
throws TransformException
{
if (data == null) {
throw new NullPointerException("data must not be null");
}
if (os == null) {
throw new NullPointerException("output stream must not be null");
}
if (ownerDoc == null) {
throw new TransformException("transform must be marshalled");
}
if (apacheTransform == null) {
try {
apacheTransform =
new Transform(ownerDoc, getAlgorithm(), transformElem.getChildNodes());
apacheTransform.setElement(transformElem, xc.getBaseURI());
if (log.isLoggable(java.util.logging.Level.FINE)) {
log.log(java.util.logging.Level.FINE, "Created transform for algorithm: " + getAlgorithm());
}
} catch (Exception ex) {
throw new TransformException
("Couldn't find Transform for: " + getAlgorithm(), ex);
}
}
XMLSignatureInput in;
if (data instanceof ApacheData) {
if (log.isLoggable(java.util.logging.Level.FINE)) {
log.log(java.util.logging.Level.FINE, "ApacheData = true");
}
in = ((ApacheData)data).getXMLSignatureInput();
} else if (data instanceof NodeSetData) {
if (log.isLoggable(java.util.logging.Level.FINE)) {
log.log(java.util.logging.Level.FINE, "isNodeSet() = true");
}
if (data instanceof DOMSubTreeData) {
DOMSubTreeData subTree = (DOMSubTreeData)data;
in = new XMLSignatureInput(subTree.getRoot());
in.setExcludeComments(subTree.excludeComments());
} else {
Set<Node> nodeSet =
Utils.toNodeSet(((NodeSetData)data).iterator());
in = new XMLSignatureInput(nodeSet);
}
} else {
if (log.isLoggable(java.util.logging.Level.FINE)) {
log.log(java.util.logging.Level.FINE, "isNodeSet() = false");
}
try {
in = new XMLSignatureInput
(((OctetStreamData)data).getOctetStream());
} catch (Exception ex) {
throw new TransformException(ex);
}
}
try {
in = apacheTransform.performTransform(in, os);
if (!in.isNodeSet() && !in.isElement()) {
return null;
}
if (in.isOctetStream()) {
return new ApacheOctetStreamData(in);
} else {
return new ApacheNodeSetData(in);
}
} catch (Exception ex) {
throw new TransformException(ex);
}
}
public final boolean isFeatureSupported(String feature) {
if (feature == null) {
throw new NullPointerException();
} else {
return false;
}
}
}
|
/*
* Copyright 2018 sukawasatoru
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package jp.tinyport.example.featurecoordinator.dependency.dependency;
import android.content.Context;
import jp.tinyport.featurecoordinator.Dependency;
public class SampleDependency implements Dependency {
public SampleDependency() {
}
@Override
public boolean isAvailable(Context context) {
for (int i = 0; i < 100000000; i++) {
hoge(i);
}
return false;
}
static void hoge(int i) {
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.