language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/bean/NewInstanceTest.java | {
"start": 2627,
"end": 3139
} | class ____ {
private static int counter;
private final int id;
public MyBean() {
id = generateId();
}
protected static synchronized int generateId() {
return ++counter;
}
@Override
public String toString() {
return "MyBean[" + id + "]";
}
public int read(@Body String body) {
LOG.info("read() method called with: {} on {}", body, this);
return id;
}
}
}
| MyBean |
java | apache__flink | flink-clients/src/main/java/org/apache/flink/client/cli/CliFrontendParser.java | {
"start": 1584,
"end": 2237
} | class ____ {
static final Option HELP_OPTION =
new Option(
"h",
"help",
false,
"Show the help message for the CLI Frontend or the action.");
static final Option JAR_OPTION = new Option("j", "jarfile", true, "Flink program JAR file.");
static final Option CLASS_OPTION =
new Option(
"c",
"class",
true,
"Class with the program entry point (\"main()\" method). Only needed if the "
+ "JAR file does not specify the | CliFrontendParser |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/event/connection/ReauthenticationEvent.java | {
"start": 325,
"end": 724
} | class ____ implements AuthenticationEvent {
private final String epId;
/**
* Create a new {@link ReauthenticationEvent} given a connection endpoint ID
*
* @param epId the connection endpoint ID
*/
public ReauthenticationEvent(String epId) {
this.epId = epId;
}
@Override
public String getEpId() {
return epId;
}
}
| ReauthenticationEvent |
java | google__guava | android/guava-tests/test/com/google/common/reflect/TypeTokenTest.java | {
"start": 88412,
"end": 90807
} | class ____<From, To> {
boolean isAssignable() {
return new TypeToken<To>(getClass()) {}.isSupertypeOf(new TypeToken<From>(getClass()) {});
}
static <From, To> Assignability<From, To> of() {
return new Assignability<>();
}
}
private static void assertAssignable(TypeToken<?> from, TypeToken<?> to) {
assertTrue(
from.getType() + " is expected to be assignable to " + to.getType(),
to.isSupertypeOf(from));
assertTrue(
to.getType() + " is expected to be a supertype of " + from.getType(),
to.isSupertypeOf(from));
assertTrue(
from.getType() + " is expected to be a subtype of " + to.getType(), from.isSubtypeOf(to));
}
private static void assertNotAssignable(TypeToken<?> from, TypeToken<?> to) {
assertFalse(
from.getType() + " shouldn't be assignable to " + to.getType(), to.isSupertypeOf(from));
assertFalse(
to.getType() + " shouldn't be a supertype of " + from.getType(), to.isSupertypeOf(from));
assertFalse(
from.getType() + " shouldn't be a subtype of " + to.getType(), from.isSubtypeOf(to));
}
private static void assertHasArrayInterfaces(TypeToken<?> arrayType) {
assertEquals(arrayInterfaces(), ImmutableSet.copyOf(arrayType.getGenericInterfaces()));
}
private static ImmutableSet<TypeToken<?>> arrayInterfaces() {
ImmutableSet.Builder<TypeToken<?>> builder = ImmutableSet.builder();
for (Class<?> interfaceType : Object[].class.getInterfaces()) {
builder.add(TypeToken.of(interfaceType));
}
return builder.build();
}
private static void assertIsPrimitive(TypeToken<?> type) {
assertTrue(type.isPrimitive());
assertNotWrapper(type);
assertEquals(TypeToken.of(Primitives.wrap((Class<?>) type.getType())), type.wrap());
}
private static void assertNotPrimitive(TypeToken<?> type) {
assertFalse(type.isPrimitive());
assertSame(type, type.wrap());
}
private static void assertIsWrapper(TypeToken<?> type) {
assertNotPrimitive(type);
assertEquals(TypeToken.of(Primitives.unwrap((Class<?>) type.getType())), type.unwrap());
}
private static void assertNotWrapper(TypeToken<?> type) {
assertSame(type, type.unwrap());
}
private static void assertNotPrimitiveNorWrapper(TypeToken<?> type) {
assertNotPrimitive(type);
assertNotWrapper(type);
}
private | Assignability |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/method/annotation/CsrfTokenArgumentResolver.java | {
"start": 1666,
"end": 2231
} | class ____ implements HandlerMethodArgumentResolver {
@Override
public boolean supportsParameter(MethodParameter parameter) {
return CsrfToken.class.equals(parameter.getParameterType());
}
@Override
public @Nullable Object resolveArgument(MethodParameter parameter, @Nullable ModelAndViewContainer mavContainer,
NativeWebRequest webRequest, @Nullable WebDataBinderFactory binderFactory) {
CsrfToken token = (CsrfToken) webRequest.getAttribute(CsrfToken.class.getName(),
RequestAttributes.SCOPE_REQUEST);
return token;
}
}
| CsrfTokenArgumentResolver |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/TaggedInputSplit.java | {
"start": 3331,
"end": 5347
} | class ____ use
*/
@SuppressWarnings("unchecked")
public Class<? extends Mapper> getMapperClass() {
return mapperClass;
}
public long getLength() throws IOException, InterruptedException {
return inputSplit.getLength();
}
public String[] getLocations() throws IOException, InterruptedException {
return inputSplit.getLocations();
}
@SuppressWarnings("unchecked")
public void readFields(DataInput in) throws IOException {
inputSplitClass = (Class<? extends InputSplit>) readClass(in);
inputFormatClass = (Class<? extends InputFormat<?, ?>>) readClass(in);
mapperClass = (Class<? extends Mapper<?, ?, ?, ?>>) readClass(in);
inputSplit = (InputSplit) ReflectionUtils
.newInstance(inputSplitClass, conf);
SerializationFactory factory = new SerializationFactory(conf);
Deserializer deserializer = factory.getDeserializer(inputSplitClass);
deserializer.open((DataInputStream)in);
inputSplit = (InputSplit)deserializer.deserialize(inputSplit);
}
private Class<?> readClass(DataInput in) throws IOException {
String className = StringInterner.weakIntern(Text.readString(in));
try {
return conf.getClassByName(className);
} catch (ClassNotFoundException e) {
throw new RuntimeException("readObject can't find class", e);
}
}
@SuppressWarnings("unchecked")
public void write(DataOutput out) throws IOException {
Text.writeString(out, inputSplitClass.getName());
Text.writeString(out, inputFormatClass.getName());
Text.writeString(out, mapperClass.getName());
SerializationFactory factory = new SerializationFactory(conf);
Serializer serializer =
factory.getSerializer(inputSplitClass);
serializer.open((DataOutputStream)out);
serializer.serialize(inputSplit);
}
public Configuration getConf() {
return conf;
}
public void setConf(Configuration conf) {
this.conf = conf;
}
@Override
public String toString() {
return inputSplit.toString();
}
}
| to |
java | hibernate__hibernate-orm | tooling/metamodel-generator/src/test/java/org/hibernate/processor/test/elementcollection/ElementCollectionTypeUseTest.java | {
"start": 831,
"end": 1805
} | class ____ {
@Test
@TestForIssue(jiraKey = "HHH-12612")
@WithClasses(OfficeBuildingValidated.class)
void testAnnotatedCollectionElements() {
System.out.println( TestUtil.getMetaModelSourceAsString( OfficeBuildingValidated.class ) );
assertMetamodelClassGeneratedFor( OfficeBuildingValidated.class );
assertMapAttributesInMetaModelFor(
OfficeBuildingValidated.class,
"doorCodes",
Integer.class,
byte[].class,
"Wrong type in map attributes."
);
assertSetAttributeTypeInMetaModelFor(
OfficeBuildingValidated.class,
"computerSerialNumbers",
String.class,
"Wrong type in set attribute."
);
assertListAttributeTypeInMetaModelFor(
OfficeBuildingValidated.class,
"employeeNames",
String.class,
"Wrong type in list attributes."
);
assertListAttributeTypeInMetaModelFor(
OfficeBuildingValidated.class,
"rooms",
Room.class,
"Wrong type in list attributes."
);
}
}
| ElementCollectionTypeUseTest |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/issues/MyHelloBean.java | {
"start": 862,
"end": 1072
} | class ____ {
private String greet = "Hello";
public void setGreet(String greet) {
this.greet = greet;
}
public String hello(String s) {
return greet + " " + s;
}
}
| MyHelloBean |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/resource/basic/resource/ClassLevelMediaTypeResource.java | {
"start": 257,
"end": 361
} | class ____ {
@GET
public String test() {
return "test";
}
}
| ClassLevelMediaTypeResource |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/FunctionContext.java | {
"start": 2080,
"end": 6515
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(FunctionContext.class);
private static final UnregisteredMetricsGroup defaultMetricsGroup =
new UnregisteredMetricsGroup();
private final @Nullable RuntimeContext context;
private final @Nullable ClassLoader userClassLoader;
private final @Nullable Map<String, String> jobParameters;
public FunctionContext(
@Nullable RuntimeContext context,
@Nullable ClassLoader userClassLoader,
@Nullable OpenContext openContext) {
this.context = context;
this.userClassLoader = userClassLoader;
if (openContext instanceof WithConfigurationOpenContext) {
Configuration configuration =
((WithConfigurationOpenContext) openContext).getConfiguration();
this.jobParameters = configuration.toMap();
} else {
this.jobParameters = null;
}
}
public FunctionContext(RuntimeContext context) {
this(context, null, null);
}
/**
* Get the {@link TaskInfo} for this parallel subtask.
*
* @return task info for this parallel subtask.
*/
public TaskInfo getTaskInfo() {
if (context == null) {
throw new TableException(
"Calls to FunctionContext.getTaskInfo are not available "
+ "at the current location.");
}
return context.getTaskInfo();
}
/**
* Returns the metric group for this parallel subtask.
*
* @return metric group for this parallel subtask.
*/
public MetricGroup getMetricGroup() {
if (context == null) {
LOG.warn(
"Calls to FunctionContext.getMetricGroup will have no effect "
+ "at the current location.");
return defaultMetricsGroup;
}
return context.getMetricGroup();
}
/**
* Gets the local temporary file copy of a distributed cache files.
*
* @param name distributed cache file name
* @return local temporary file copy of a distributed cache file.
*/
public File getCachedFile(String name) {
if (context == null) {
throw new TableException(
"Calls to FunctionContext.getCachedFile are not available "
+ "at the current location.");
}
return context.getDistributedCache().getFile(name);
}
/**
* Gets the global job parameter value associated with the given key as a string.
*
* @param key key pointing to the associated value
* @param defaultValue default value which is returned in case global job parameter is null or
* there is no value associated with the given key
* @return (default) value associated with the given key
*/
public String getJobParameter(String key, String defaultValue) {
if (context == null && jobParameters == null) {
throw new TableException(
"Calls to FunctionContext.getJobParameter are not available "
+ "at the current location.");
} else if (context == null) {
return jobParameters.getOrDefault(key, defaultValue);
}
return context.getGlobalJobParameters().getOrDefault(key, defaultValue);
}
/** Get the external resource information. */
public Set<ExternalResourceInfo> getExternalResourceInfos(String resourceName) {
if (context == null) {
throw new TableException(
"Calls to FunctionContext.getExternalResourceInfos are not available "
+ "at the current location.");
}
return context.getExternalResourceInfos(resourceName);
}
/**
* Gets the {@link ClassLoader} to load classes that are not in system's classpath, but are part
* of the JAR file of a user job.
*/
public ClassLoader getUserCodeClassLoader() {
if (context == null && userClassLoader == null) {
throw new TableException(
"Calls to FunctionContext.getUserCodeClassLoader are not available "
+ "at the current location.");
} else if (context == null) {
return userClassLoader;
}
return context.getUserCodeClassLoader();
}
}
| FunctionContext |
java | google__gson | proto/src/main/java/com/google/gson/protobuf/ProtoTypeAdapter.java | {
"start": 17517,
"end": 17731
} | enum ____
EnumValueDescriptor fieldValue = desc.findValueByNumber(jsonElement.getAsInt());
if (fieldValue == null) {
throw new IllegalArgumentException(
String.format("Unrecognized | value |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java | {
"start": 23028,
"end": 23653
} | class ____ extends TermSuggestionBuilder {
TimeoutSuggestionBuilder() {
super("field");
}
TimeoutSuggestionBuilder(StreamInput in) throws IOException {
super(in);
}
@Override
public String getWriteableName() {
return "timeout";
}
@Override
public SuggestionSearchContext.SuggestionContext build(SearchExecutionContext context) {
return new TimeoutSuggestionContext(new TimeoutSuggester((ContextIndexSearcher) context.searcher()), context);
}
}
private static final | TimeoutSuggestionBuilder |
java | apache__kafka | streams/src/test/java/org/apache/kafka/streams/state/internals/RocksDBTimeOrderedSessionStoreWithIndexTest.java | {
"start": 857,
"end": 1064
} | class ____ extends AbstractSessionBytesStoreTest {
@Override
StoreType storeType() {
return StoreType.RocksDBTimeOrderedSessionStoreWithIndex;
}
}
| RocksDBTimeOrderedSessionStoreWithIndexTest |
java | apache__logging-log4j2 | log4j-perf-test/src/main/java/org/apache/logging/log4j/perf/jmh/AsyncAppenderLog4j1Benchmark.java | {
"start": 2388,
"end": 5826
} | class ____ {
Logger logger;
@Setup(Level.Trial)
public void up() {
System.setProperty("log4j.configuration", "perf-log4j12-async-noOpAppender.xml");
logger = LogManager.getLogger(getClass());
}
@TearDown(Level.Trial)
public void down() {
LogManager.shutdown();
new File("perftest.log").delete();
}
@Benchmark
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
public void throughputSimple() {
logger.info(BenchmarkMessageParams.TEST);
}
@Benchmark
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
public void throughput1Param() {
logger.info("p1=" + one);
}
@Benchmark
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
public void throughput2Params() {
logger.info("p1=" + one + ", p2=" + two);
}
@Benchmark
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
public void throughput3Params() {
logger.info("p1=" + one + ", p2=" + two + ", p3=" + three);
}
@Benchmark
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
public void throughput4Params() {
logger.info("p1=" + one + ", p2=" + two + ", p3=" + three + ", p4=" + four);
}
@Benchmark
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
public void throughput5Params() {
logger.info("p1=" + one + ", p2=" + two + ", p3=" + three + ", p4=" + four + ", p5=" + five);
}
@Benchmark
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
public void throughput6Params() {
logger.info("p1=" + one + ", p2=" + two + ", p3=" + three + ", p4=" + four + ", p5=" + five + ", p6=" + six);
}
@Benchmark
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
public void throughput7Params() {
logger.info("p1=" + one + ", p2=" + two + ", p3=" + three + ", p4=" + four + ", p5=" + five + ", p6=" + six
+ ", p7=" + seven);
}
@Benchmark
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
public void throughput8Params() {
logger.info("p1=" + one + ", p2=" + two + ", p3=" + three + ", p4=" + four + ", p5=" + five + ", p6=" + six
+ ", p7=" + seven + ", p8=" + eight);
}
@Benchmark
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
public void throughput9Params() {
logger.info("p1=" + one + ", p2=" + two + ", p3=" + three + ", p4=" + four + ", p5=" + five + ", p6=" + six
+ ", p7=" + seven + ", p8=" + eight + ", p9=" + nine);
}
@Benchmark
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
public void throughput10Params() {
logger.info("p1=" + one + ", p2=" + two + ", p3=" + three + ", p4=" + four + ", p5=" + five + ", p6=" + six
+ ", p7=" + seven + ", p8=" + eight + ", p9=" + nine + ", p10=" + ten);
}
@Benchmark
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
public void throughput11Params() {
logger.info("p1=" + one + ", p2=" + two + ", p3=" + three + ", p4=" + four + ", p5=" + five + ", p6=" + six
+ ", p7=" + seven + ", p8=" + eight + ", p9=" + nine + ", p10=" + ten + ", p11=" + eleven);
}
}
| AsyncAppenderLog4j1Benchmark |
java | redisson__redisson | redisson-spring-data/redisson-spring-data-21/src/main/java/org/redisson/spring/data/connection/RedissonClusterConnection.java | {
"start": 2525,
"end": 20913
} | class ____ extends RedissonConnection implements DefaultedRedisClusterConnection {
private static final RedisStrictCommand<List<RedisClusterNode>> CLUSTER_NODES =
new RedisStrictCommand<List<RedisClusterNode>>("CLUSTER", "NODES", new ObjectDecoder(new RedisClusterNodeDecoder()));
public RedissonClusterConnection(RedissonClient redisson) {
super(redisson);
}
@Override
public Iterable<RedisClusterNode> clusterGetNodes() {
return read(null, StringCodec.INSTANCE, CLUSTER_NODES);
}
@Override
public Collection<RedisClusterNode> clusterGetSlaves(RedisClusterNode master) {
Iterable<RedisClusterNode> res = clusterGetNodes();
RedisClusterNode masterNode = null;
for (Iterator<RedisClusterNode> iterator = res.iterator(); iterator.hasNext();) {
RedisClusterNode redisClusterNode = iterator.next();
if (master.getHost().equals(redisClusterNode.getHost())
&& master.getPort().equals(redisClusterNode.getPort())) {
masterNode = redisClusterNode;
break;
}
}
if (masterNode == null) {
throw new IllegalStateException("Unable to find master node: " + master);
}
for (Iterator<RedisClusterNode> iterator = res.iterator(); iterator.hasNext();) {
RedisClusterNode redisClusterNode = iterator.next();
if (redisClusterNode.getMasterId() == null
|| !redisClusterNode.getMasterId().equals(masterNode.getId())) {
iterator.remove();
}
}
return (Collection<RedisClusterNode>) res;
}
@Override
public Map<RedisClusterNode, Collection<RedisClusterNode>> clusterGetMasterSlaveMap() {
Iterable<RedisClusterNode> res = clusterGetNodes();
Set<RedisClusterNode> masters = new HashSet<RedisClusterNode>();
for (Iterator<RedisClusterNode> iterator = res.iterator(); iterator.hasNext();) {
RedisClusterNode redisClusterNode = iterator.next();
if (redisClusterNode.isMaster()) {
masters.add(redisClusterNode);
}
}
Map<RedisClusterNode, Collection<RedisClusterNode>> result = new HashMap<RedisClusterNode, Collection<RedisClusterNode>>();
for (Iterator<RedisClusterNode> iterator = res.iterator(); iterator.hasNext();) {
RedisClusterNode redisClusterNode = iterator.next();
for (RedisClusterNode masterNode : masters) {
if (redisClusterNode.getMasterId() != null
&& redisClusterNode.getMasterId().equals(masterNode.getId())) {
Collection<RedisClusterNode> list = result.get(masterNode);
if (list == null) {
list = new ArrayList<RedisClusterNode>();
result.put(masterNode, list);
}
list.add(redisClusterNode);
}
}
}
return result;
}
@Override
public Integer clusterGetSlotForKey(byte[] key) {
RFuture<Integer> f = executorService.readAsync((String)null, StringCodec.INSTANCE, RedisCommands.KEYSLOT, key);
return syncFuture(f);
}
@Override
public RedisClusterNode clusterGetNodeForSlot(int slot) {
Iterable<RedisClusterNode> res = clusterGetNodes();
for (RedisClusterNode redisClusterNode : res) {
if (redisClusterNode.isMaster() && redisClusterNode.getSlotRange().contains(slot)) {
return redisClusterNode;
}
}
return null;
}
@Override
public RedisClusterNode clusterGetNodeForKey(byte[] key) {
int slot = executorService.getConnectionManager().calcSlot(key);
return clusterGetNodeForSlot(slot);
}
@Override
public ClusterInfo clusterGetClusterInfo() {
RFuture<Map<String, String>> f = executorService.readAsync((String)null, StringCodec.INSTANCE, RedisCommands.CLUSTER_INFO);
Map<String, String> entries = syncFuture(f);
Properties props = new Properties();
for (Entry<String, String> entry : entries.entrySet()) {
props.setProperty(entry.getKey(), entry.getValue());
}
return new ClusterInfo(props);
}
@Override
public void clusterAddSlots(RedisClusterNode node, int... slots) {
RedisClient entry = getEntry(node);
List<Integer> params = convert(slots);
RFuture<Map<String, String>> f = executorService.writeAsync(entry, StringCodec.INSTANCE, RedisCommands.CLUSTER_ADDSLOTS, params.toArray());
syncFuture(f);
}
protected List<Integer> convert(int... slots) {
List<Integer> params = new ArrayList<Integer>();
for (int slot : slots) {
params.add(slot);
}
return params;
}
@Override
public void clusterAddSlots(RedisClusterNode node, SlotRange range) {
clusterAddSlots(node, range.getSlotsArray());
}
@Override
public Long clusterCountKeysInSlot(int slot) {
RedisClusterNode node = clusterGetNodeForSlot(slot);
MasterSlaveEntry entry = executorService.getConnectionManager().getEntry(new InetSocketAddress(node.getHost(), node.getPort()));
RFuture<Long> f = executorService.readAsync(entry, StringCodec.INSTANCE, RedisCommands.CLUSTER_COUNTKEYSINSLOT, slot);
return syncFuture(f);
}
@Override
public void clusterDeleteSlots(RedisClusterNode node, int... slots) {
RedisClient entry = getEntry(node);
List<Integer> params = convert(slots);
RFuture<Long> f = executorService.writeAsync(entry, StringCodec.INSTANCE, RedisCommands.CLUSTER_DELSLOTS, params.toArray());
syncFuture(f);
}
@Override
public void clusterDeleteSlotsInRange(RedisClusterNode node, SlotRange range) {
clusterDeleteSlots(node, range.getSlotsArray());
}
@Override
public void clusterForget(RedisClusterNode node) {
RFuture<Void> f = executorService.writeAsync((String)null, StringCodec.INSTANCE, RedisCommands.CLUSTER_FORGET, node.getId());
syncFuture(f);
}
@Override
public void clusterMeet(RedisClusterNode node) {
Assert.notNull(node, "Cluster node must not be null for CLUSTER MEET command!");
Assert.hasText(node.getHost(), "Node to meet cluster must have a host!");
Assert.isTrue(node.getPort() > 0, "Node to meet cluster must have a port greater 0!");
RFuture<Void> f = executorService.writeAsync((String)null, StringCodec.INSTANCE, RedisCommands.CLUSTER_MEET, node.getHost(), node.getPort());
syncFuture(f);
}
@Override
public void clusterSetSlot(RedisClusterNode node, int slot, AddSlots mode) {
RedisClient entry = getEntry(node);
RFuture<Map<String, String>> f = executorService.writeAsync(entry, StringCodec.INSTANCE, RedisCommands.CLUSTER_SETSLOT, slot, mode);
syncFuture(f);
}
private static final RedisStrictCommand<List<String>> CLUSTER_GETKEYSINSLOT = new RedisStrictCommand<List<String>>("CLUSTER", "GETKEYSINSLOT", new ObjectListReplayDecoder<String>());
@Override
public List<byte[]> clusterGetKeysInSlot(int slot, Integer count) {
RFuture<List<byte[]>> f = executorService.readAsync((String)null, ByteArrayCodec.INSTANCE, CLUSTER_GETKEYSINSLOT, slot, count);
return syncFuture(f);
}
@Override
public void clusterReplicate(RedisClusterNode master, RedisClusterNode slave) {
RedisClient entry = getEntry(master);
RFuture<Long> f = executorService.writeAsync(entry, StringCodec.INSTANCE, RedisCommands.CLUSTER_REPLICATE, slave.getId());
syncFuture(f);
}
@Override
public String ping(RedisClusterNode node) {
return execute(node, RedisCommands.PING);
}
@Override
public void bgReWriteAof(RedisClusterNode node) {
execute(node, RedisCommands.BGREWRITEAOF);
}
@Override
public void bgSave(RedisClusterNode node) {
execute(node, RedisCommands.BGSAVE);
}
@Override
public Long lastSave(RedisClusterNode node) {
return execute(node, RedisCommands.LASTSAVE);
}
@Override
public void save(RedisClusterNode node) {
execute(node, RedisCommands.SAVE);
}
@Override
public Long dbSize(RedisClusterNode node) {
return execute(node, RedisCommands.DBSIZE);
}
private <T> T execute(RedisClusterNode node, RedisCommand<T> command) {
RedisClient entry = getEntry(node);
RFuture<T> f = executorService.writeAsync(entry, StringCodec.INSTANCE, command);
return syncFuture(f);
}
protected RedisClient getEntry(RedisClusterNode node) {
InetSocketAddress addr = new InetSocketAddress(node.getHost(), node.getPort());
MasterSlaveEntry entry = executorService.getConnectionManager().getEntry(addr);
ClientConnectionsEntry e = entry.getEntry(addr);
return e.getClient();
}
@Override
public void flushDb(RedisClusterNode node) {
execute(node, RedisCommands.FLUSHDB);
}
@Override
public void flushAll(RedisClusterNode node) {
execute(node, RedisCommands.FLUSHALL);
}
@Override
public Properties info(RedisClusterNode node) {
Map<String, String> info = execute(node, RedisCommands.INFO_ALL);
Properties result = new Properties();
for (Entry<String, String> entry : info.entrySet()) {
result.setProperty(entry.getKey(), entry.getValue());
}
return result;
}
@Override
public Properties info(RedisClusterNode node, String section) {
RedisStrictCommand<Map<String, String>> command = new RedisStrictCommand<Map<String, String>>("INFO", section, new StringMapDataDecoder());
Map<String, String> info = execute(node, command);
Properties result = new Properties();
for (Entry<String, String> entry : info.entrySet()) {
result.setProperty(entry.getKey(), entry.getValue());
}
return result;
}
private final RedisStrictCommand<List<byte[]>> KEYS = new RedisStrictCommand<>("KEYS");
@Override
public Set<byte[]> keys(RedisClusterNode node, byte[] pattern) {
RedisClient entry = getEntry(node);
RFuture<Collection<byte[]>> f = executorService.readAsync(entry, ByteArrayCodec.INSTANCE, KEYS, pattern);
Collection<byte[]> keys = syncFuture(f);
return new HashSet<>(keys);
}
@Override
public byte[] randomKey(RedisClusterNode node) {
RedisClient entry = getEntry(node);
RFuture<byte[]> f = executorService.readRandomAsync(entry, ByteArrayCodec.INSTANCE, RedisCommands.RANDOM_KEY);
return syncFuture(f);
}
@Override
public void shutdown(RedisClusterNode node) {
RedisClient entry = getEntry(node);
RFuture<Void> f = executorService.readAsync(entry, ByteArrayCodec.INSTANCE, RedisCommands.SHUTDOWN);
syncFuture(f);
}
@Override
public Properties getConfig(RedisClusterNode node, String pattern) {
RedisClient entry = getEntry(node);
RFuture<List<String>> f = executorService.writeAsync(entry, StringCodec.INSTANCE, RedisCommands.CONFIG_GET, pattern);
List<String> r = syncFuture(f);
if (r != null) {
return Converters.toProperties(r);
}
return null;
}
@Override
public void setConfig(RedisClusterNode node, String param, String value) {
RedisClient entry = getEntry(node);
RFuture<Void> f = executorService.writeAsync(entry, StringCodec.INSTANCE, RedisCommands.CONFIG_SET, param, value);
syncFuture(f);
}
@Override
public void resetConfigStats(RedisClusterNode node) {
RedisClient entry = getEntry(node);
RFuture<Void> f = executorService.writeAsync(entry, StringCodec.INSTANCE, RedisCommands.CONFIG_RESETSTAT);
syncFuture(f);
}
@Override
public Long time(RedisClusterNode node) {
RedisClient entry = getEntry(node);
RFuture<Long> f = executorService.readAsync(entry, LongCodec.INSTANCE, RedisCommands.TIME_LONG);
return syncFuture(f);
}
private static final StringToRedisClientInfoConverter CONVERTER = new StringToRedisClientInfoConverter();
@Override
public List<RedisClientInfo> getClientList(RedisClusterNode node) {
RedisClient entry = getEntry(node);
RFuture<List<String>> f = executorService.readAsync(entry, StringCodec.INSTANCE, RedisCommands.CLIENT_LIST);
List<String> list = syncFuture(f);
return CONVERTER.convert(list.toArray(new String[list.size()]));
}
@Override
public Cursor<byte[]> scan(RedisClusterNode node, ScanOptions options) {
return new ScanCursor<byte[]>(0, options) {
private RedisClient client = getEntry(node);
@Override
protected ScanIteration<byte[]> doScan(long cursorId, ScanOptions options) {
if (isQueueing() || isPipelined()) {
throw new UnsupportedOperationException("'SSCAN' cannot be called in pipeline / transaction mode.");
}
if (client == null) {
return null;
}
List<Object> args = new ArrayList<Object>();
if (cursorId == 101010101010101010L) {
cursorId = 0;
}
args.add(Long.toUnsignedString(cursorId));
if (options.getPattern() != null) {
args.add("MATCH");
args.add(options.getPattern());
}
if (options.getCount() != null) {
args.add("COUNT");
args.add(options.getCount());
}
RFuture<ListScanResult<byte[]>> f = executorService.readAsync(client, ByteArrayCodec.INSTANCE, RedisCommands.SCAN, args.toArray());
ListScanResult<byte[]> res = syncFuture(f);
String pos = res.getPos();
client = res.getRedisClient();
if ("0".equals(pos)) {
client = null;
}
return new ScanIteration<byte[]>(Long.parseUnsignedLong(pos), res.getValues());
}
}.open();
}
@Override
public void rename(byte[] oldName, byte[] newName) {
if (isPipelined()) {
throw new InvalidDataAccessResourceUsageException("Clustered rename is not supported in a pipeline");
}
if (executorService.getConnectionManager().calcSlot(oldName) == executorService.getConnectionManager().calcSlot(newName)) {
super.rename(oldName, newName);
return;
}
final byte[] value = dump(oldName);
if (null != value) {
final Long sourceTtlInSeconds = ttl(oldName);
final long ttlInMilliseconds;
if (null != sourceTtlInSeconds && sourceTtlInSeconds > 0) {
ttlInMilliseconds = sourceTtlInSeconds * 1000;
} else {
ttlInMilliseconds = 0;
}
restore(newName, ttlInMilliseconds, value);
del(oldName);
}
}
@Override
public Boolean renameNX(byte[] oldName, byte[] newName) {
if (isPipelined()) {
throw new InvalidDataAccessResourceUsageException("Clustered rename is not supported in a pipeline");
}
if (executorService.getConnectionManager().calcSlot(oldName) == executorService.getConnectionManager().calcSlot(newName)) {
return super.renameNX(oldName, newName);
}
final byte[] value = dump(oldName);
if (null != value && !exists(newName)) {
final Long sourceTtlInSeconds = ttl(oldName);
final long ttlInMilliseconds;
if (null != sourceTtlInSeconds && sourceTtlInSeconds > 0) {
ttlInMilliseconds = sourceTtlInSeconds * 1000;
} else {
ttlInMilliseconds = 0;
}
restore(newName, ttlInMilliseconds, value);
del(oldName);
return true;
}
return false;
}
@Override
public Long del(byte[]... keys) {
if (isQueueing() || isPipelined()) {
for (byte[] key: keys) {
write(key, LongCodec.INSTANCE, RedisCommands.DEL, key);
}
return null;
}
CommandBatchService es = new CommandBatchService(executorService);
for (byte[] key: keys) {
es.writeAsync(key, StringCodec.INSTANCE, RedisCommands.DEL, key);
}
BatchResult<Long> b = (BatchResult<Long>) es.execute();
return b.getResponses().stream().collect(Collectors.summarizingLong(v -> v)).getSum();
}
@Override
public List<byte[]> mGet(byte[]... keys) {
if (isQueueing() || isPipelined()) {
for (byte[] key : keys) {
read(key, ByteArrayCodec.INSTANCE, RedisCommands.GET, key);
}
return null;
}
CommandBatchService es = new CommandBatchService(executorService);
for (byte[] key: keys) {
es.readAsync(key, ByteArrayCodec.INSTANCE, RedisCommands.GET, key);
}
BatchResult<byte[]> r = (BatchResult<byte[]>) es.execute();
return r.getResponses();
}
@Override
public Boolean mSet(Map<byte[], byte[]> tuple) {
if (isQueueing() || isPipelined()) {
for (Entry<byte[], byte[]> entry: tuple.entrySet()) {
write(entry.getKey(), StringCodec.INSTANCE, RedisCommands.SET, entry.getKey(), entry.getValue());
}
return true;
}
CommandBatchService es = new CommandBatchService(executorService);
for (Entry<byte[], byte[]> entry: tuple.entrySet()) {
es.writeAsync(entry.getKey(), StringCodec.INSTANCE, RedisCommands.SET, entry.getKey(), entry.getValue());
}
es.execute();
return true;
}
}
| RedissonClusterConnection |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/JobDiagnosticsUpdateEvent.java | {
"start": 913,
"end": 1249
} | class ____ extends JobEvent {
private String diagnosticUpdate;
public JobDiagnosticsUpdateEvent(JobId jobID, String diagnostic) {
super(jobID, JobEventType.JOB_DIAGNOSTIC_UPDATE);
this.diagnosticUpdate = diagnostic;
}
public String getDiagnosticUpdate() {
return this.diagnosticUpdate;
}
}
| JobDiagnosticsUpdateEvent |
java | spring-projects__spring-framework | spring-core-test/src/test/java/org/springframework/core/test/tools/ResourceFileTests.java | {
"start": 837,
"end": 1507
} | class ____ {
@Test
void ofPathAndCharSequenceCreatesResource() {
ResourceFile file = ResourceFile.of("path", "test");
assertThat(file.getPath()).isEqualTo("path");
assertThat(file.getContent()).isEqualTo("test");
}
@Test
void ofPathAndWritableContentCreatesResource() {
ResourceFile file = ResourceFile.of("path", appendable -> appendable.append("test"));
assertThat(file.getPath()).isEqualTo("path");
assertThat(file.getContent()).isEqualTo("test");
}
@Test
void assertThatUsesResourceFileAssert() {
ResourceFile file = ResourceFile.of("path", appendable -> appendable.append("test"));
assertThat(file).hasContent("test");
}
}
| ResourceFileTests |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/LoopThousandTest.java | {
"start": 1008,
"end": 1730
} | class ____ extends ContextTestSupport {
@Test
public void testLoopThousand() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
final Processor loopTest = new LoopTestProcessor(1000);
return new RouteBuilder() {
public void configure() {
from("direct:start").loop(1000)
.process(loopTest)
.to("log:loop")
.end()
.to("mock:result");
}
};
}
}
| LoopThousandTest |
java | apache__camel | components/camel-shiro/src/test/java/org/apache/camel/component/shiro/security/ShiroAuthenticationBase64Test.java | {
"start": 1350,
"end": 3936
} | class ____ extends CamelTestSupport {
@EndpointInject("mock:success")
protected MockEndpoint successEndpoint;
@EndpointInject("mock:authenticationException")
protected MockEndpoint failureEndpoint;
private byte[] passPhrase = {
(byte) 0x08, (byte) 0x09, (byte) 0x0A, (byte) 0x0B,
(byte) 0x0C, (byte) 0x0D, (byte) 0x0E, (byte) 0x0F,
(byte) 0x10, (byte) 0x11, (byte) 0x12, (byte) 0x13,
(byte) 0x14, (byte) 0x15, (byte) 0x16, (byte) 0x17 };
@Test
public void testShiroAuthenticationFailure() throws Exception {
//Incorrect password
ShiroSecurityToken shiroSecurityToken = new ShiroSecurityToken("ringo", "stirr");
TestShiroSecurityTokenInjector shiroSecurityTokenInjector
= new TestShiroSecurityTokenInjector(shiroSecurityToken, passPhrase);
successEndpoint.expectedMessageCount(0);
failureEndpoint.expectedMessageCount(1);
template.send("direct:secureEndpoint", shiroSecurityTokenInjector);
successEndpoint.assertIsSatisfied();
failureEndpoint.assertIsSatisfied();
}
@Test
public void testSuccessfulShiroAuthenticationWithNoAuthorization() throws Exception {
ShiroSecurityToken shiroSecurityToken = new ShiroSecurityToken("ringo", "starr");
TestShiroSecurityTokenInjector shiroSecurityTokenInjector
= new TestShiroSecurityTokenInjector(shiroSecurityToken, passPhrase);
successEndpoint.expectedMessageCount(2);
failureEndpoint.expectedMessageCount(0);
template.send("direct:secureEndpoint", shiroSecurityTokenInjector);
template.send("direct:secureEndpoint", shiroSecurityTokenInjector);
successEndpoint.assertIsSatisfied();
failureEndpoint.assertIsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
final ShiroSecurityPolicy securityPolicy = new ShiroSecurityPolicy("src/test/resources/securityconfig.ini", passPhrase);
securityPolicy.setBase64(true);
return new RouteBuilder() {
@SuppressWarnings("unchecked")
public void configure() {
onException(UnknownAccountException.class, IncorrectCredentialsException.class,
LockedAccountException.class, AuthenticationException.class).to("mock:authenticationException");
from("direct:secureEndpoint").policy(securityPolicy).to("log:incoming payload").to("mock:success");
}
};
}
private static | ShiroAuthenticationBase64Test |
java | netty__netty | transport-rxtx/src/main/java/io/netty/channel/rxtx/RxtxChannelConfig.java | {
"start": 4156,
"end": 9821
} | enum ____ {
/**
* No parity bit will be sent with each data character at all
*/
NONE(SerialPort.PARITY_NONE),
/**
* An odd parity bit will be sent with each data character, ie. will be set
* to 1 if the data character contains an even number of bits set to 1.
*/
ODD(SerialPort.PARITY_ODD),
/**
* An even parity bit will be sent with each data character, ie. will be set
* to 1 if the data character contains an odd number of bits set to 1.
*/
EVEN(SerialPort.PARITY_EVEN),
/**
* A mark parity bit (ie. always 1) will be sent with each data character
*/
MARK(SerialPort.PARITY_MARK),
/**
* A space parity bit (ie. always 0) will be sent with each data character
*/
SPACE(SerialPort.PARITY_SPACE);
private final int value;
Paritybit(int value) {
this.value = value;
}
public int value() {
return value;
}
public static Paritybit valueOf(int value) {
for (Paritybit paritybit : Paritybit.values()) {
if (paritybit.value == value) {
return paritybit;
}
}
throw new IllegalArgumentException("unknown " + Paritybit.class.getSimpleName() + " value: " + value);
}
}
/**
* Sets the baud rate (ie. bits per second) for communication with the serial device.
* The baud rate will include bits for framing (in the form of stop bits and parity),
* such that the effective data rate will be lower than this value.
*
* @param baudrate The baud rate (in bits per second)
*/
RxtxChannelConfig setBaudrate(int baudrate);
/**
* Sets the number of stop bits to include at the end of every character to aid the
* serial device in synchronising with the data.
*
* @param stopbits The number of stop bits to use
*/
RxtxChannelConfig setStopbits(Stopbits stopbits);
/**
* Sets the number of data bits to use to make up each character sent to the serial
* device.
*
* @param databits The number of data bits to use
*/
RxtxChannelConfig setDatabits(Databits databits);
/**
* Sets the type of parity bit to be used when communicating with the serial device.
*
* @param paritybit The type of parity bit to be used
*/
RxtxChannelConfig setParitybit(Paritybit paritybit);
/**
* @return The configured baud rate, defaulting to 115200 if unset
*/
int getBaudrate();
/**
* @return The configured stop bits, defaulting to {@link Stopbits#STOPBITS_1} if unset
*/
Stopbits getStopbits();
/**
* @return The configured data bits, defaulting to {@link Databits#DATABITS_8} if unset
*/
Databits getDatabits();
/**
* @return The configured parity bit, defaulting to {@link Paritybit#NONE} if unset
*/
Paritybit getParitybit();
/**
* @return true if the serial device should support the Data Terminal Ready signal
*/
boolean isDtr();
/**
* Sets whether the serial device supports the Data Terminal Ready signal, used for
* flow control
*
* @param dtr true if DTR is supported, false otherwise
*/
RxtxChannelConfig setDtr(boolean dtr);
/**
* @return true if the serial device should support the Ready to Send signal
*/
boolean isRts();
/**
* Sets whether the serial device supports the Request To Send signal, used for flow
* control
*
* @param rts true if RTS is supported, false otherwise
*/
RxtxChannelConfig setRts(boolean rts);
/**
* @return The number of milliseconds to wait between opening the serial port and
* initialising.
*/
int getWaitTimeMillis();
/**
* Sets the time to wait after opening the serial port and before sending it any
* configuration information or data. A value of 0 indicates that no waiting should
* occur.
*
* @param waitTimeMillis The number of milliseconds to wait, defaulting to 0 (no
* wait) if unset
* @throws IllegalArgumentException if the supplied value is < 0
*/
RxtxChannelConfig setWaitTimeMillis(int waitTimeMillis);
/**
* Sets the maximal time (in ms) to block while try to read from the serial port. Default is 1000ms
*/
RxtxChannelConfig setReadTimeout(int readTimeout);
/**
* Return the maximal time (in ms) to block and wait for something to be ready to read.
*/
int getReadTimeout();
@Override
RxtxChannelConfig setConnectTimeoutMillis(int connectTimeoutMillis);
@Override
@Deprecated
RxtxChannelConfig setMaxMessagesPerRead(int maxMessagesPerRead);
@Override
RxtxChannelConfig setWriteSpinCount(int writeSpinCount);
@Override
RxtxChannelConfig setAllocator(ByteBufAllocator allocator);
@Override
RxtxChannelConfig setRecvByteBufAllocator(RecvByteBufAllocator allocator);
@Override
RxtxChannelConfig setAutoRead(boolean autoRead);
@Override
RxtxChannelConfig setAutoClose(boolean autoClose);
@Override
RxtxChannelConfig setWriteBufferHighWaterMark(int writeBufferHighWaterMark);
@Override
RxtxChannelConfig setWriteBufferLowWaterMark(int writeBufferLowWaterMark);
@Override
RxtxChannelConfig setWriteBufferWaterMark(WriteBufferWaterMark writeBufferWaterMark);
@Override
RxtxChannelConfig setMessageSizeEstimator(MessageSizeEstimator estimator);
}
| Paritybit |
java | elastic__elasticsearch | modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java | {
"start": 15231,
"end": 15997
} | class ____ extends ChannelInitializer<Channel> {
@Override
protected void initChannel(Channel ch) throws Exception {
assert ch instanceof Netty4NioSocketChannel;
NetUtils.tryEnsureReasonableKeepAliveConfig(((Netty4NioSocketChannel) ch).javaChannel());
setupPipeline(ch, false);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
Netty4TcpChannel channel = ctx.channel().attr(CHANNEL_KEY).get();
channel.setCloseException(exceptionFromThrowable(cause));
ExceptionsHelper.maybeDieOnAnotherThread(cause);
super.exceptionCaught(ctx, cause);
}
}
protected | ClientChannelInitializer |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java | {
"start": 1787,
"end": 7597
} | class ____ extends ESTestCase {
private Directory directory;
private DirectoryReader directoryReader;
private DocumentSubsetBitsetCache bitsetCache;
@Before
public void setUpDirectory() {
// We check it is empty at the end of the test, so make sure it is empty in the
// beginning as well so that we can easily distinguish from garbage added by
// this test and garbage not cleaned up by other tests.
assertTrue(DocumentSubsetReader.NUM_DOCS_CACHE.toString(), DocumentSubsetReader.NUM_DOCS_CACHE.isEmpty());
directory = newDirectory();
bitsetCache = new DocumentSubsetBitsetCache(Settings.EMPTY);
}
@After
public void cleanDirectory() throws Exception {
if (directoryReader != null) {
directoryReader.close();
}
assertTrue(DocumentSubsetReader.NUM_DOCS_CACHE.toString(), DocumentSubsetReader.NUM_DOCS_CACHE.isEmpty());
directory.close();
bitsetCache.close();
}
public void testSearch() throws Exception {
IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random())));
Document document = new Document();
document.add(new StringField("field", "value1", Field.Store.NO));
iw.addDocument(document);
document = new Document();
document.add(new StringField("field", "value2", Field.Store.NO));
iw.addDocument(document);
document = new Document();
document.add(new StringField("field", "value3", Field.Store.NO));
iw.addDocument(document);
document = new Document();
document.add(new StringField("field", "value4", Field.Store.NO));
iw.addDocument(document);
iw.forceMerge(1);
iw.deleteDocuments(new Term("field", "value3"));
iw.close();
openDirectoryReader();
IndexSearcher indexSearcher = newSearcher(
DocumentSubsetReader.wrap(directoryReader, bitsetCache, new TermQuery(new Term("field", "value1")))
);
assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1));
TopDocs result = indexSearcher.search(new MatchAllDocsQuery(), 1);
assertThat(result.totalHits.value(), equalTo(1L));
assertThat(result.scoreDocs[0].doc, equalTo(0));
indexSearcher = newSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetCache, new TermQuery(new Term("field", "value2"))));
assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1));
result = indexSearcher.search(new MatchAllDocsQuery(), 1);
assertThat(result.totalHits.value(), equalTo(1L));
assertThat(result.scoreDocs[0].doc, equalTo(1));
// this doc has been marked as deleted:
indexSearcher = newSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetCache, new TermQuery(new Term("field", "value3"))));
assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(0));
result = indexSearcher.search(new MatchAllDocsQuery(), 1);
assertThat(result.totalHits.value(), equalTo(0L));
indexSearcher = newSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetCache, new TermQuery(new Term("field", "value4"))));
assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1));
result = indexSearcher.search(new MatchAllDocsQuery(), 1);
assertThat(result.totalHits.value(), equalTo(1L));
assertThat(result.scoreDocs[0].doc, equalTo(3));
}
public void testLiveDocs() throws Exception {
int numDocs = scaledRandomIntBetween(16, 128);
IndexWriter iw = new IndexWriter(directory, new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(NoMergePolicy.INSTANCE));
for (int i = 0; i < numDocs; i++) {
Document document = new Document();
document.add(new StringField("field", "value" + i, Field.Store.NO));
iw.addDocument(document);
}
iw.forceMerge(1);
iw.close();
openDirectoryReader();
assertThat("should have one segment after force merge", directoryReader.leaves().size(), equalTo(1));
for (int i = 0; i < numDocs; i++) {
Query roleQuery = new TermQuery(new Term("field", "value" + i));
DirectoryReader wrappedReader = DocumentSubsetReader.wrap(directoryReader, bitsetCache, roleQuery);
LeafReader leafReader = wrappedReader.leaves().get(0).reader();
assertThat(leafReader.hasDeletions(), is(true));
assertThat(leafReader.numDocs(), equalTo(1));
Bits liveDocs = leafReader.getLiveDocs();
assertThat(liveDocs.length(), equalTo(numDocs));
for (int docId = 0; docId < numDocs; docId++) {
if (docId == i) {
assertThat("docId [" + docId + "] should match", liveDocs.get(docId), is(true));
} else {
assertThat("docId [" + docId + "] should not match", liveDocs.get(docId), is(false));
}
}
}
}
public void testWrapTwice() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(null);
IndexWriter iw = new IndexWriter(dir, iwc);
iw.close();
DirectoryReader dirReader = DocumentSubsetReader.wrap(DirectoryReader.open(dir), bitsetCache, new MatchAllDocsQuery());
try {
DocumentSubsetReader.wrap(dirReader, bitsetCache, new MatchAllDocsQuery());
fail("shouldn't be able to wrap DocumentSubsetDirectoryReader twice");
} catch (IllegalArgumentException e) {
assertThat(
e.getMessage(),
equalTo(
"Can't wrap [ | DocumentSubsetReaderTests |
java | apache__flink | flink-table/flink-sql-client/src/main/java/org/apache/flink/table/client/gateway/result/CollectResultBase.java | {
"start": 1325,
"end": 2742
} | class ____ implements DynamicResult {
private final CloseableIterator<RowData> resultIterator;
protected final Object resultLock;
protected AtomicReference<SqlExecutionException> executionException = new AtomicReference<>();
protected final ResultRetrievalThread retrievalThread;
public CollectResultBase(StatementResult tableResult) {
this.resultIterator = tableResult;
this.resultLock = new Object();
this.retrievalThread = new ResultRetrievalThread();
}
@Override
public void close() {
retrievalThread.isRunning = false;
retrievalThread.interrupt();
try {
// cancel the job if it is not terminated
resultIterator.close();
} catch (Exception e) {
// ignore
}
}
protected <T> TypedResult<T> handleMissingResult() {
if (executionException.get() != null) {
throw executionException.get();
}
// we assume that a bounded job finished
return TypedResult.endOfStream();
}
protected abstract void processRecord(RowData row);
protected boolean isRetrieving() {
return retrievalThread.isRunning;
}
// --------------------------------------------------------------------------------------------
/** Thread to retrieve data from the {@link CloseableIterator} and process. */
protected | CollectResultBase |
java | elastic__elasticsearch | x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryItem.java | {
"start": 954,
"end": 1305
} | class ____ the state of an index at a point in time. It should
* be constructed when an index has transitioned into a new step. Construction is done through the
* {@link #success(String, String, long, Long, LifecycleExecutionState)} and
* {@link #failure(String, String, long, Long, LifecycleExecutionState, Exception)} methods.
*/
public | encapsulates |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/floatarrays/FloatArrays_assertContainsOnly_Test.java | {
"start": 1772,
"end": 7095
} | class ____ extends FloatArraysBaseTest {
@Test
void should_pass_if_actual_contains_given_values_only() {
arrays.assertContainsOnly(someInfo(), actual, arrayOf(6f, 8f, 10f));
}
@Test
void should_pass_if_actual_contains_given_values_only_in_different_order() {
arrays.assertContainsOnly(someInfo(), actual, arrayOf(10f, 8f, 6f));
}
@Test
void should_pass_if_actual_contains_given_values_only_more_than_once() {
actual = arrayOf(6f, 8f, 10f, 8f, 8f, 8f);
arrays.assertContainsOnly(someInfo(), actual, arrayOf(6f, 8f, 10f));
}
@Test
void should_pass_if_actual_contains_given_values_only_even_if_duplicated() {
arrays.assertContainsOnly(someInfo(), actual, arrayOf(6f, 8f, 10f, 6f, 8f, 10f));
}
@Test
void should_pass_if_actual_and_given_values_are_empty() {
actual = emptyArray();
arrays.assertContainsOnly(someInfo(), actual, emptyArray());
}
@Test
void should_fail_if_array_of_values_to_look_for_is_empty_and_actual_is_not() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertContainsOnly(someInfo(), actual, emptyArray()));
}
@Test
void should_throw_error_if_array_of_values_to_look_for_is_null() {
assertThatNullPointerException().isThrownBy(() -> arrays.assertContainsOnly(someInfo(), actual, null))
.withMessage(valuesToLookForIsNull());
}
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertContainsOnly(someInfo(), null, arrayOf(6f)))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_actual_does_not_contain_given_values_only() {
AssertionInfo info = someInfo();
float[] expected = { 6f, 8f, 20f };
Throwable error = catchThrowable(() -> arrays.assertContainsOnly(info, actual, expected));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldContainOnly(actual, expected, newArrayList(20f), newArrayList(10f)));
}
@Test
void should_pass_if_actual_contains_given_values_only_according_to_custom_comparison_strategy() {
arraysWithCustomComparisonStrategy.assertContainsOnly(someInfo(), actual, arrayOf(6f, -8f, 10f));
}
@Test
void should_pass_if_actual_contains_given_values_only_in_different_order_according_to_custom_comparison_strategy() {
arraysWithCustomComparisonStrategy.assertContainsOnly(someInfo(), actual, arrayOf(10f, -8f, 6f));
}
@Test
void should_pass_if_actual_contains_given_values_only_more_than_once_according_to_custom_comparison_strategy() {
actual = arrayOf(6f, -8f, 10f, -8f, -8f, -8f);
arraysWithCustomComparisonStrategy.assertContainsOnly(someInfo(), actual, arrayOf(6f, -8f, 10f));
}
@Test
void should_pass_if_actual_contains_given_values_only_even_if_duplicated_according_to_custom_comparison_strategy() {
arraysWithCustomComparisonStrategy.assertContainsOnly(someInfo(), actual, arrayOf(6f, -8f, 10f, 6f, -8f, 10f));
}
@Test
void should_fail_if_array_of_values_to_look_for_is_empty_and_actual_is_not_whatever_custom_comparison_strategy_is() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arraysWithCustomComparisonStrategy.assertContainsOnly(someInfo(),
actual,
emptyArray()));
}
@Test
void should_throw_error_if_array_of_values_to_look_for_is_null_whatever_custom_comparison_strategy_is() {
assertThatNullPointerException().isThrownBy(() -> arraysWithCustomComparisonStrategy.assertContainsOnly(someInfo(),
actual,
null))
.withMessage(valuesToLookForIsNull());
}
@Test
void should_fail_if_actual_is_null_whatever_custom_comparison_strategy_is() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arraysWithCustomComparisonStrategy.assertContainsOnly(someInfo(),
null,
arrayOf(6f)))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_actual_does_not_contain_given_values_only_according_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
float[] expected = { 6f, -8f, 20f };
Throwable error = catchThrowable(() -> arraysWithCustomComparisonStrategy.assertContainsOnly(info, actual, expected));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldContainOnly(actual, expected, newArrayList(20f), newArrayList(10f),
absValueComparisonStrategy));
}
}
| FloatArrays_assertContainsOnly_Test |
java | spring-projects__spring-boot | module/spring-boot-jsonb/src/test/java/org/springframework/boot/jsonb/autoconfigure/jsontest/JsonbAutoConfigureJsonIntegrationTests.java | {
"start": 1318,
"end": 1535
} | class ____ {
@Autowired
private ApplicationContext context;
@Test
void jsonbIsAvailable() {
assertThatNoException().isThrownBy(() -> this.context.getBean(Jsonb.class));
}
}
| JsonbAutoConfigureJsonIntegrationTests |
java | apache__maven | impl/maven-di/src/main/java/org/apache/maven/di/Scope.java | {
"start": 1417,
"end": 1903
} | class ____ implements Scope {
* private final Map<Key<?>, Object> cache = new ConcurrentHashMap<>();
*
* {@literal @}Override
* public <T> Supplier<T> scope(Key<T> key, Supplier<T> unscoped) {
* return () -> {
* return (T) cache.computeIfAbsent(key, k -> unscoped.get());
* };
* }
* }
* </pre>
*
* @see org.apache.maven.api.di.Scope
* @since 4.0.0
*/
@Experimental
public | CachingScope |
java | quarkusio__quarkus | extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/DevClusterHelper.java | {
"start": 3435,
"end": 11875
} | class ____ {
public static final String DEFAULT_HASH_ALGORITHM = "SHA-256";
@SuppressWarnings("OptionalUsedAsFieldOrParameterType")
public static List<DecoratorBuildItem> createDecorators(String clusterKind,
String deploymentTarget,
ApplicationInfoBuildItem applicationInfo,
OutputTargetBuildItem outputTarget,
KubernetesConfig config,
PackageConfig packageConfig,
Optional<MetricsCapabilityBuildItem> metricsConfiguration,
Optional<KubernetesClientCapabilityBuildItem> kubernetesClientConfiguration,
List<KubernetesNamespaceBuildItem> namespaces,
List<KubernetesInitContainerBuildItem> initContainers,
List<KubernetesJobBuildItem> jobs,
List<KubernetesAnnotationBuildItem> annotations,
List<KubernetesLabelBuildItem> labels,
List<KubernetesEnvBuildItem> envs,
@Deprecated Optional<BaseImageInfoBuildItem> baseImage,
Optional<ContainerImageInfoBuildItem> image,
Optional<KubernetesCommandBuildItem> command,
List<KubernetesPortBuildItem> ports,
Optional<KubernetesProbePortNameBuildItem> portName,
Optional<KubernetesHealthLivenessPathBuildItem> livenessPath,
Optional<KubernetesHealthReadinessPathBuildItem> readinessPath,
Optional<KubernetesHealthStartupPathBuildItem> startupPath,
List<KubernetesRoleBuildItem> roles,
List<KubernetesClusterRoleBuildItem> clusterRoles,
List<KubernetesEffectiveServiceAccountBuildItem> serviceAccounts,
List<KubernetesRoleBindingBuildItem> roleBindings,
List<KubernetesClusterRoleBindingBuildItem> clusterRoleBindings,
Optional<CustomProjectRootBuildItem> customProjectRoot) {
String name = ResourceNameUtil.getResourceName(config, applicationInfo);
final var namespace = Targetable.filteredByTarget(namespaces, deploymentTarget, true)
.findFirst();
Optional<Project> project = KubernetesCommonHelper.createProject(applicationInfo, customProjectRoot, outputTarget,
packageConfig);
Optional<Port> port = KubernetesCommonHelper.getPort(ports, config);
List<DecoratorBuildItem> result = new ArrayList<>(
KubernetesCommonHelper.createDecorators(project, clusterKind, name, namespace, config,
metricsConfiguration, kubernetesClientConfiguration,
annotations, labels, image, command,
port, livenessPath, readinessPath, startupPath, roles, clusterRoles, serviceAccounts, roleBindings,
clusterRoleBindings));
image.ifPresent(
i -> result.add(new DecoratorBuildItem(clusterKind, new ApplyContainerImageDecorator(name, i.getImage()))));
var stream = Stream.concat(config.convertToBuildItems().stream(), Targetable.filteredByTarget(envs, KUBERNETES));
if (config.idempotent()) {
stream = stream.sorted(Comparator.comparing(e -> EnvConverter.convertName(e.getName())));
}
stream.forEach(e -> result.add(new DecoratorBuildItem(clusterKind,
new AddEnvVarDecorator(ApplicationContainerDecorator.ANY, name, new EnvBuilder()
.withName(EnvConverter.convertName(e.getName()))
.withValue(e.getValue())
.withSecret(e.getSecret())
.withConfigmap(e.getConfigMap())
.withField(e.getField())
.withPrefix(e.getPrefix())
.build()))));
result.add(new DecoratorBuildItem(clusterKind, new ApplyImagePullPolicyDecorator(name, "IfNotPresent")));
//Service handling
result.add(new DecoratorBuildItem(clusterKind, new ApplyServiceTypeDecorator(name, ServiceType.NodePort.name())));
List<Map.Entry<String, PortConfig>> nodeConfigPorts = config.ports().entrySet().stream()
.filter(e -> e.getValue().nodePort().isPresent())
.toList();
if (!nodeConfigPorts.isEmpty()) {
for (Map.Entry<String, PortConfig> entry : nodeConfigPorts) {
result.add(new DecoratorBuildItem(KUBERNETES,
new AddNodePortDecorator(name, entry.getValue().nodePort().getAsInt(), entry.getKey())));
}
} else {
result.add(new DecoratorBuildItem(clusterKind,
new AddNodePortDecorator(name,
config.nodePort().orElseGet(
() -> getStablePortNumberInRange(name, MIN_NODE_PORT_VALUE, MAX_NODE_PORT_VALUE)),
config.ingress().targetPort())));
}
//Probe port handling
result.add(
KubernetesCommonHelper.createProbeHttpPortDecorator(name, clusterKind, LIVENESS_PROBE, config.livenessProbe(),
portName,
ports,
config.ports()));
result.add(
KubernetesCommonHelper.createProbeHttpPortDecorator(name, clusterKind, READINESS_PROBE, config.readinessProbe(),
portName,
ports,
config.ports()));
result.add(
KubernetesCommonHelper.createProbeHttpPortDecorator(name, clusterKind, STARTUP_PROBE, config.startupProbe(),
portName,
ports,
config.ports()));
for (Map.Entry<String, String> annotation : config.ingress().annotations().entrySet()) {
result.add(new DecoratorBuildItem(clusterKind,
new AddAnnotationDecorator(name, annotation.getKey(), annotation.getValue(), INGRESS)));
}
for (IngressConfig.IngressRuleConfig rule : config.ingress().rules().values()) {
result.add(new DecoratorBuildItem(clusterKind, new AddIngressRuleDecorator(name, port,
new IngressRuleBuilder()
.withHost(rule.host())
.withPath(rule.path())
.withPathType(rule.pathType())
.withServiceName(rule.serviceName().orElse(null))
.withServicePortName(rule.servicePortName().orElse(null))
.withServicePortNumber(rule.servicePortNumber().orElse(-1))
.build())));
}
// Handle init Containers
result.addAll(KubernetesCommonHelper.createInitContainerDecorators(clusterKind, name, initContainers, result));
result.addAll(KubernetesCommonHelper.createInitJobDecorators(clusterKind, name, jobs, result));
// Do not bind the Management port to the Service resource unless it's explicitly used by the user.
if (managementPortIsEnabled()
&& (config.ingress() == null
|| !config.ingress().expose()
|| !config.ingress().targetPort().equals(MANAGEMENT_PORT_NAME))) {
result.add(new DecoratorBuildItem(clusterKind, new RemovePortFromServiceDecorator(name, MANAGEMENT_PORT_NAME)));
}
return result;
}
/**
* Given a string, generate a port number within the supplied range
* The output is always the same (between {@code min} and {@code max})
* given the same input and it's useful when we need to generate a port number
* which needs to stay the same but we don't care about the exact value
*/
private static int getStablePortNumberInRange(String input, int min, int max) {
if (min < MIN_PORT_NUMBER || max > MAX_PORT_NUMBER) {
throw new IllegalArgumentException(
String.format("Port number range must be within [%d-%d]", MIN_PORT_NUMBER, MAX_PORT_NUMBER));
}
try {
byte[] hash = MessageDigest.getInstance(DEFAULT_HASH_ALGORITHM).digest(input.getBytes(StandardCharsets.UTF_8));
return min + new BigInteger(hash).mod(BigInteger.valueOf(max - min)).intValue();
} catch (Exception e) {
throw new RuntimeException("Unable to generate stable port number from input string: '" + input + "'", e);
}
}
}
| DevClusterHelper |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/RIdGeneratorReactive.java | {
"start": 829,
"end": 1380
} | interface ____ extends RExpirableReactive {
/**
* Initializes Id generator params.
*
* @param value - initial value
* @param allocationSize - values range allocation size
* @return <code>true</code> if Id generator initialized
* <code>false</code> if Id generator already initialized
*/
Mono<Boolean> tryInit(long value, long allocationSize);
/**
* Returns next unique number but not monotonically increased
*
* @return number
*/
Mono<Long> nextId();
}
| RIdGeneratorReactive |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestCachingKeyProvider.java | {
"start": 1280,
"end": 6745
} | class ____ {
@Test
public void testCurrentKey() throws Exception {
KeyProvider.KeyVersion mockKey = mock(KeyProvider.KeyVersion.class);
KeyProvider mockProv = mock(KeyProvider.class);
when(mockProv.getCurrentKey(eq("k1"))).thenReturn(mockKey);
when(mockProv.getCurrentKey(eq("k2"))).thenReturn(null);
when(mockProv.getConf()).thenReturn(new Configuration());
KeyProvider cache = new CachingKeyProvider(mockProv, 100, 100);
// asserting caching
assertEquals(mockKey, cache.getCurrentKey("k1"));
verify(mockProv, times(1)).getCurrentKey(eq("k1"));
assertEquals(mockKey, cache.getCurrentKey("k1"));
verify(mockProv, times(1)).getCurrentKey(eq("k1"));
Thread.sleep(1200);
assertEquals(mockKey, cache.getCurrentKey("k1"));
verify(mockProv, times(2)).getCurrentKey(eq("k1"));
// asserting no caching when key is not known
cache = new CachingKeyProvider(mockProv, 100, 100);
assertEquals(null, cache.getCurrentKey("k2"));
verify(mockProv, times(1)).getCurrentKey(eq("k2"));
assertEquals(null, cache.getCurrentKey("k2"));
verify(mockProv, times(2)).getCurrentKey(eq("k2"));
}
@Test
public void testKeyVersion() throws Exception {
KeyProvider.KeyVersion mockKey = mock(KeyProvider.KeyVersion.class);
KeyProvider mockProv = mock(KeyProvider.class);
when(mockProv.getKeyVersion(eq("k1@0")))
.thenReturn(mockKey);
when(mockProv.getKeyVersion(eq("k2@0"))).thenReturn(null);
when(mockProv.getConf()).thenReturn(new Configuration());
KeyProvider cache = new CachingKeyProvider(mockProv, 100, 100);
// asserting caching
assertEquals(mockKey, cache.getKeyVersion("k1@0"));
verify(mockProv, times(1))
.getKeyVersion(eq("k1@0"));
assertEquals(mockKey, cache.getKeyVersion("k1@0"));
verify(mockProv, times(1))
.getKeyVersion(eq("k1@0"));
Thread.sleep(200);
assertEquals(mockKey, cache.getKeyVersion("k1@0"));
verify(mockProv, times(2))
.getKeyVersion(eq("k1@0"));
// asserting no caching when key is not known
cache = new CachingKeyProvider(mockProv, 100, 100);
assertEquals(null, cache.getKeyVersion("k2@0"));
verify(mockProv, times(1))
.getKeyVersion(eq("k2@0"));
assertEquals(null, cache.getKeyVersion("k2@0"));
verify(mockProv, times(2))
.getKeyVersion(eq("k2@0"));
}
@Test
public void testMetadata() throws Exception {
KeyProvider.Metadata mockMeta = mock(KeyProvider.Metadata.class);
KeyProvider mockProv = mock(KeyProvider.class);
when(mockProv.getMetadata(eq("k1"))).thenReturn(mockMeta);
when(mockProv.getMetadata(eq("k2"))).thenReturn(null);
when(mockProv.getConf()).thenReturn(new Configuration());
KeyProvider cache = new CachingKeyProvider(mockProv, 100, 100);
// asserting caching
assertEquals(mockMeta, cache.getMetadata("k1"));
verify(mockProv, times(1)).getMetadata(eq("k1"));
assertEquals(mockMeta, cache.getMetadata("k1"));
verify(mockProv, times(1)).getMetadata(eq("k1"));
Thread.sleep(200);
assertEquals(mockMeta, cache.getMetadata("k1"));
verify(mockProv, times(2)).getMetadata(eq("k1"));
// asserting no caching when key is not known
cache = new CachingKeyProvider(mockProv, 100, 100);
assertEquals(null, cache.getMetadata("k2"));
verify(mockProv, times(1)).getMetadata(eq("k2"));
assertEquals(null, cache.getMetadata("k2"));
verify(mockProv, times(2)).getMetadata(eq("k2"));
}
@Test
public void testRollNewVersion() throws Exception {
KeyProvider.KeyVersion mockKey = mock(KeyProvider.KeyVersion.class);
KeyProvider mockProv = mock(KeyProvider.class);
when(mockProv.getCurrentKey(eq("k1"))).thenReturn(mockKey);
when(mockProv.getConf()).thenReturn(new Configuration());
KeyProvider cache = new CachingKeyProvider(mockProv, 100, 100);
assertEquals(mockKey, cache.getCurrentKey("k1"));
verify(mockProv, times(1)).getCurrentKey(eq("k1"));
cache.rollNewVersion("k1");
// asserting the cache is purged
assertEquals(mockKey, cache.getCurrentKey("k1"));
verify(mockProv, times(2)).getCurrentKey(eq("k1"));
cache.rollNewVersion("k1", new byte[0]);
assertEquals(mockKey, cache.getCurrentKey("k1"));
verify(mockProv, times(3)).getCurrentKey(eq("k1"));
}
@Test
public void testDeleteKey() throws Exception {
KeyProvider.KeyVersion mockKey = mock(KeyProvider.KeyVersion.class);
KeyProvider mockProv = mock(KeyProvider.class);
when(mockProv.getCurrentKey(eq("k1"))).thenReturn(mockKey);
when(mockProv.getKeyVersion(eq("k1@0")))
.thenReturn(mockKey);
when(mockProv.getMetadata(eq("k1"))).thenReturn(
new KMSClientProvider.KMSMetadata("c", 0, "l", null, new Date(), 1));
when(mockProv.getConf()).thenReturn(new Configuration());
KeyProvider cache = new CachingKeyProvider(mockProv, 100, 100);
assertEquals(mockKey, cache.getCurrentKey("k1"));
verify(mockProv, times(1)).getCurrentKey(eq("k1"));
assertEquals(mockKey, cache.getKeyVersion("k1@0"));
verify(mockProv, times(1))
.getKeyVersion(eq("k1@0"));
cache.deleteKey("k1");
// asserting the cache is purged
assertEquals(mockKey, cache.getCurrentKey("k1"));
verify(mockProv, times(2)).getCurrentKey(eq("k1"));
assertEquals(mockKey, cache.getKeyVersion("k1@0"));
verify(mockProv, times(2))
.getKeyVersion(eq("k1@0"));
}
}
| TestCachingKeyProvider |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/observable/ObservableIntervalTest.java | {
"start": 1014,
"end": 1692
} | class ____ extends RxJavaTest {
@Test
public void dispose() {
TestHelper.checkDisposed(Observable.interval(1, TimeUnit.MILLISECONDS, new TestScheduler()));
}
@Test
public void cancel() {
Observable.interval(1, TimeUnit.MILLISECONDS, Schedulers.trampoline())
.take(10)
.test()
.assertResult(0L, 1L, 2L, 3L, 4L, 5L, 6L, 7L, 8L, 9L);
}
@Test
public void cancelledOnRun() {
TestObserver<Long> to = new TestObserver<>();
IntervalObserver is = new IntervalObserver(to);
to.onSubscribe(is);
is.dispose();
is.run();
to.assertEmpty();
}
}
| ObservableIntervalTest |
java | quarkusio__quarkus | test-framework/observability/src/main/java/io/quarkus/observability/test/utils/QueryResult.java | {
"start": 1391,
"end": 1820
} | class ____ {
@JsonProperty("__name__")
public String name;
public String job;
public String test;
// getters and setters
@Override
public String toString() {
return "Metric{" +
"name='" + name + '\'' +
", job='" + job + '\'' +
", test='" + test + '\'' +
'}';
}
}
}
| Metric |
java | quarkusio__quarkus | independent-projects/qute/debug/src/main/java/io/quarkus/qute/debug/StoppedEvent.java | {
"start": 611,
"end": 861
} | class ____ {
/**
* Represents the possible reasons why the debuggee execution stopped.
* <p>
* Each reason corresponds to a standard value defined by the
* Debug Adapter Protocol (DAP).
* </p>
*/
public | StoppedEvent |
java | google__auto | value/src/test/java/com/google/auto/value/processor/TypeVariablesTest.java | {
"start": 4910,
"end": 5036
} | class ____<T, U extends T> {
abstract Map<T, U> getFoo();
abstract List<? extends T> getBar();
abstract static | Outer |
java | processing__processing4 | core/src/processing/opengl/PShapeOpenGL.java | {
"start": 2407,
"end": 181405
} | class ____ extends PShape {
// Testing these constants, not use as they might go away...
static public final int POSITION = 0;
static public final int NORMAL = 1;
static public final int TEXCOORD = 2;
static public final int DIRECTION = 3;
static public final int OFFSET = 4;
static protected final int TRANSLATE = 0;
static protected final int ROTATE = 1;
static protected final int SCALE = 2;
static protected final int MATRIX = 3;
protected PGraphicsOpenGL pg;
protected PGL pgl;
protected int context; // The context that created this shape.
protected PShapeOpenGL root;
// ........................................................
// Input, tessellated geometry
protected InGeometry inGeo;
protected TessGeometry tessGeo;
protected Tessellator tessellator;
protected AttributeMap polyAttribs;
// ........................................................
// Texturing
protected HashSet<PImage> textures;
protected boolean strokedTexture;
protected boolean untexChild;
// ........................................................
// OpenGL buffers
protected VertexBuffer bufPolyVertex;
protected VertexBuffer bufPolyColor;
protected VertexBuffer bufPolyNormal;
protected VertexBuffer bufPolyTexCoord;
protected VertexBuffer bufPolyAmbient;
protected VertexBuffer bufPolySpecular;
protected VertexBuffer bufPolyEmissive;
protected VertexBuffer bufPolyShininess;
protected VertexBuffer bufPolyIndex;
protected VertexBuffer bufLineVertex;
protected VertexBuffer bufLineColor;
protected VertexBuffer bufLineAttrib;
protected VertexBuffer bufLineIndex;
protected VertexBuffer bufPointVertex;
protected VertexBuffer bufPointColor;
protected VertexBuffer bufPointAttrib;
protected VertexBuffer bufPointIndex;
// ........................................................
// Offsets for geometry aggregation and update.
protected int polyVertCopyOffset;
protected int polyIndCopyOffset;
protected int lineVertCopyOffset;
protected int lineIndCopyOffset;
protected int pointVertCopyOffset;
protected int pointIndCopyOffset;
protected int polyIndexOffset;
protected int polyVertexOffset;
protected int polyVertexAbs;
protected int polyVertexRel;
protected int lineIndexOffset;
protected int lineVertexOffset;
protected int lineVertexAbs;
protected int lineVertexRel;
protected int pointIndexOffset;
protected int pointVertexOffset;
protected int pointVertexAbs;
protected int pointVertexRel;
protected int firstPolyIndexCache;
protected int lastPolyIndexCache;
protected int firstLineIndexCache;
protected int lastLineIndexCache;
protected int firstPointIndexCache;
protected int lastPointIndexCache;
protected int firstPolyVertex;
protected int lastPolyVertex;
protected int firstLineVertex;
protected int lastLineVertex;
protected int firstPointVertex;
protected int lastPointVertex;
// ........................................................
// Geometric transformations.
protected PMatrix transform;
protected PMatrix transformInv;
protected PMatrix matrixInv;
// ........................................................
// State/rendering flags
protected boolean tessellated;
protected boolean needBufferInit = false;
// Flag to indicate if the shape can have holes or not.
protected boolean solid = true;
protected boolean breakShape = false;
protected boolean shapeCreated = false;
// These variables indicate if the shape contains
// polygon, line and/or point geometry. In the case of
// 3D shapes, poly geometry is coincident with the fill
// triangles, as the lines and points are stored separately.
// However, for 2D shapes the poly geometry contains all of
// the three since the same rendering shader applies to
// fill, line and point geometry.
protected boolean hasPolys;
protected boolean hasLines;
protected boolean hasPoints;
// ........................................................
// Bezier and Catmull-Rom curves
protected int bezierDetail;
protected int curveDetail;
protected float curveTightness;
protected int savedBezierDetail;
protected int savedCurveDetail;
protected float savedCurveTightness;
// ........................................................
// Normals
protected float normalX, normalY, normalZ;
// normal calculated per triangle
static protected final int NORMAL_MODE_AUTO = 0;
// one normal manually specified per shape
static protected final int NORMAL_MODE_SHAPE = 1;
// normals specified for each shape vertex
static protected final int NORMAL_MODE_VERTEX = 2;
// Current mode for normals, one of AUTO, SHAPE, or VERTEX
protected int normalMode;
// ........................................................
// Modification variables (used only by the root shape)
protected boolean modified;
protected boolean modifiedPolyVertices;
protected boolean modifiedPolyColors;
protected boolean modifiedPolyNormals;
protected boolean modifiedPolyTexCoords;
protected boolean modifiedPolyAmbient;
protected boolean modifiedPolySpecular;
protected boolean modifiedPolyEmissive;
protected boolean modifiedPolyShininess;
protected boolean modifiedLineVertices;
protected boolean modifiedLineColors;
protected boolean modifiedLineAttributes;
protected boolean modifiedPointVertices;
protected boolean modifiedPointColors;
protected boolean modifiedPointAttributes;
protected int firstModifiedPolyVertex;
protected int lastModifiedPolyVertex;
protected int firstModifiedPolyColor;
protected int lastModifiedPolyColor;
protected int firstModifiedPolyNormal;
protected int lastModifiedPolyNormal;
protected int firstModifiedPolyTexCoord;
protected int lastModifiedPolyTexCoord;
protected int firstModifiedPolyAmbient;
protected int lastModifiedPolyAmbient;
protected int firstModifiedPolySpecular;
protected int lastModifiedPolySpecular;
protected int firstModifiedPolyEmissive;
protected int lastModifiedPolyEmissive;
protected int firstModifiedPolyShininess;
protected int lastModifiedPolyShininess;
protected int firstModifiedLineVertex;
protected int lastModifiedLineVertex;
protected int firstModifiedLineColor;
protected int lastModifiedLineColor;
protected int firstModifiedLineAttribute;
protected int lastModifiedLineAttribute;
protected int firstModifiedPointVertex;
protected int lastModifiedPointVertex;
protected int firstModifiedPointColor;
protected int lastModifiedPointColor;
protected int firstModifiedPointAttribute;
protected int lastModifiedPointAttribute;
// ........................................................
// Saved style variables to style can be re-enabled after disableStyle,
// although it won't work if properties are defined on a per-vertex basis.
protected boolean savedStroke;
protected int savedStrokeColor;
protected float savedStrokeWeight;
protected int savedStrokeCap;
protected int savedStrokeJoin;
protected boolean savedFill;
protected int savedFillColor;
protected boolean savedTint;
protected int savedTintColor;
protected int savedAmbientColor;
protected int savedSpecularColor;
protected int savedEmissiveColor;
protected float savedShininess;
protected int savedTextureMode;
// ........................................................
// variables controlling tessellation update
private boolean tessUpdate = false;
private int tessKind;
// Temporary array for performance
private float[] selVertices;
PShapeOpenGL() {
}
public PShapeOpenGL(PGraphicsOpenGL pg, int family) {
this.pg = pg;
this.family = family;
pgl = pg.pgl;
context = pgl.createEmptyContext();
bufPolyVertex = null;
bufPolyColor = null;
bufPolyNormal = null;
bufPolyTexCoord = null;
bufPolyAmbient = null;
bufPolySpecular = null;
bufPolyEmissive = null;
bufPolyShininess = null;
bufPolyIndex = null;
bufLineVertex = null;
bufLineColor = null;
bufLineAttrib = null;
bufLineIndex = null;
bufPointVertex = null;
bufPointColor = null;
bufPointAttrib = null;
bufPointIndex = null;
this.tessellator = pg.tessellator;
this.root = this;
this.parent = null;
this.tessellated = false;
if (family == GEOMETRY || family == PRIMITIVE || family == PATH) {
polyAttribs = PGraphicsOpenGL.newAttributeMap();
inGeo = PGraphicsOpenGL.newInGeometry(pg, polyAttribs, PGraphicsOpenGL.RETAINED);
}
// Style parameters are retrieved from the current values in the renderer.
textureMode = pg.textureMode;
colorMode(pg.colorMode,
pg.colorModeX, pg.colorModeY, pg.colorModeZ, pg.colorModeA);
// Initial values for fill, stroke and tint colors are also imported from
// the renderer. This is particular relevant for primitive shapes, since is
// not possible to set their color separately when creating them, and their
// input vertices are actually generated at rendering time, by which the
// color configuration of the renderer might have changed.
fill = pg.fill;
fillColor = pg.fillColor;
stroke = pg.stroke;
strokeColor = pg.strokeColor;
strokeWeight = pg.strokeWeight;
strokeCap = pg.strokeCap;
strokeJoin = pg.strokeJoin;
tint = pg.tint;
tintColor = pg.tintColor;
setAmbient = pg.setAmbient;
ambientColor = pg.ambientColor;
specularColor = pg.specularColor;
emissiveColor = pg.emissiveColor;
shininess = pg.shininess;
sphereDetailU = pg.sphereDetailU;
sphereDetailV = pg.sphereDetailV;
bezierDetail = pg.bezierDetail;
curveDetail = pg.curveDetail;
curveTightness = pg.curveTightness;
rectMode = pg.rectMode;
ellipseMode = pg.ellipseMode;
normalX = normalY = 0;
normalZ = 1;
normalMode = NORMAL_MODE_AUTO;
// To make sure that the first vertex is marked as a break.
// Same behavior as in the immediate mode.
breakShape = false;
if (family == GROUP) {
// GROUP shapes are always marked as ended.
shapeCreated = true;
}
// OpenGL supports per-vertex coloring (unlike Java2D)
perVertexStyles = true;
}
/** Create a shape from the PRIMITIVE family, using this kind and these params */
public PShapeOpenGL(PGraphicsOpenGL pg, int kind, float... p) {
this(pg, PRIMITIVE);
setKind(kind);
setParams(p);
}
@Override
public void addChild(PShape who) {
if (who instanceof PShapeOpenGL) {
if (family == GROUP) {
PShapeOpenGL c3d = (PShapeOpenGL)who;
super.addChild(c3d);
c3d.updateRoot(root);
markForTessellation();
if (c3d.family == GROUP) {
if (c3d.textures != null) {
for (PImage tex: c3d.textures) {
addTexture(tex);
}
} else {
untexChild(true);
}
if (c3d.strokedTexture) {
strokedTexture(true);
}
} else {
if (c3d.image != null) {
addTexture(c3d.image);
if (c3d.stroke) {
strokedTexture(true);
}
} else {
untexChild(true);
}
}
} else {
PGraphics.showWarning("Cannot add child shape to non-group shape.");
}
} else {
PGraphics.showWarning("Shape must be OpenGL to be added to the group.");
}
}
@Override
public void addChild(PShape who, int idx) {
if (who instanceof PShapeOpenGL) {
if (family == GROUP) {
PShapeOpenGL c3d = (PShapeOpenGL)who;
super.addChild(c3d, idx);
c3d.updateRoot(root);
markForTessellation();
if (c3d.family == GROUP) {
if (c3d.textures != null) {
for (PImage tex: c3d.textures) {
addTexture(tex);
}
} else {
untexChild(true);
}
if (c3d.strokedTexture) {
strokedTexture(true);
}
} else {
if (c3d.image != null) {
addTexture(c3d.image);
if (c3d.stroke) {
strokedTexture(true);
}
} else {
untexChild(true);
}
}
} else {
PGraphics.showWarning("Cannot add child shape to non-group shape.");
}
} else {
PGraphics.showWarning("Shape must be OpenGL to be added to the group.");
}
}
@Override
public void removeChild(int idx) {
super.removeChild(idx);
strokedTexture(false);
untexChild(false);
markForTessellation();
}
protected void updateRoot(PShape root) {
this.root = (PShapeOpenGL) root;
if (family == GROUP) {
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL)children[i];
child.updateRoot(root);
}
}
}
///////////////////////////////////////////////////////////
//
// Shape creation (temporary hack)
public static PShapeOpenGL createShape(PGraphicsOpenGL pg, PShape src) {
PShapeOpenGL dest = null;
if (src.getFamily() == GROUP) {
//dest = PGraphics3D.createShapeImpl(pg, GROUP);
dest = (PShapeOpenGL) pg.createShapeFamily(GROUP);
copyGroup(pg, src, dest);
} else if (src.getFamily() == PRIMITIVE) {
//dest = PGraphics3D.createShapeImpl(pg, src.getKind(), src.getParams());
dest = (PShapeOpenGL) pg.createShapePrimitive(src.getKind(), src.getParams());
PShape.copyPrimitive(src, dest);
} else if (src.getFamily() == GEOMETRY) {
//dest = PGraphics3D.createShapeImpl(pg, PShape.GEOMETRY);
dest = (PShapeOpenGL) pg.createShapeFamily(PShape.GEOMETRY);
PShape.copyGeometry(src, dest);
} else if (src.getFamily() == PATH) {
dest = (PShapeOpenGL) pg.createShapeFamily(PShape.PATH);
//dest = PGraphics3D.createShapeImpl(pg, PATH);
PShape.copyPath(src, dest);
}
dest.setName(src.getName());
dest.width = src.width;
dest.height = src.height;
dest.depth = src.depth;
return dest;
}
/*
static public PShapeOpenGL createShape2D(PGraphicsOpenGL pg, PShape src) {
PShapeOpenGL dest = null;
if (src.getFamily() == GROUP) {
//dest = PGraphics2D.createShapeImpl(pg, GROUP);
dest = (PShapeOpenGL) pg.createShapeFamily(GROUP);
copyGroup2D(pg, src, dest);
} else if (src.getFamily() == PRIMITIVE) {
//dest = PGraphics2D.createShapeImpl(pg, src.getKind(), src.getParams());
dest = (PShapeOpenGL) pg.createShapePrimitive(src.getKind(), src.getParams());
PShape.copyPrimitive(src, dest);
} else if (src.getFamily() == GEOMETRY) {
//dest = PGraphics2D.createShapeImpl(pg, PShape.GEOMETRY);
dest = (PShapeOpenGL) pg.createShapeFamily(PShape.GEOMETRY);
PShape.copyGeometry(src, dest);
} else if (src.getFamily() == PATH) {
//dest = PGraphics2D.createShapeImpl(pg, PATH);
dest = (PShapeOpenGL) pg.createShapeFamily(PShape.PATH);
PShape.copyPath(src, dest);
}
dest.setName(src.getName());
dest.width = src.width;
dest.height = src.height;
return dest;
}
*/
static public void copyGroup(PGraphicsOpenGL pg, PShape src, PShape dest) {
copyMatrix(src, dest);
copyStyles(src, dest);
copyImage(src, dest);
for (int i = 0; i < src.getChildCount(); i++) {
PShape c = createShape(pg, src.getChild(i));
dest.addChild(c);
}
}
/*
static public void copyGroup2D(PGraphicsOpenGL pg, PShape src, PShape dest) {
copyMatrix(src, dest);
copyStyles(src, dest);
copyImage(src, dest);
for (int i = 0; i < src.getChildCount(); i++) {
PShape c = createShape2D(pg, src.getChild(i));
dest.addChild(c);
}
}
*/
///////////////////////////////////////////////////////////
//
// Query methods
@Override
public float getWidth() {
PVector min = new PVector(Float.POSITIVE_INFINITY, Float.POSITIVE_INFINITY,
Float.POSITIVE_INFINITY);
PVector max = new PVector(Float.NEGATIVE_INFINITY, Float.NEGATIVE_INFINITY,
Float.NEGATIVE_INFINITY);
if (shapeCreated) {
getVertexMin(min);
getVertexMax(max);
}
width = max.x - min.x;
return width;
}
@Override
public float getHeight() {
PVector min = new PVector(Float.POSITIVE_INFINITY, Float.POSITIVE_INFINITY,
Float.POSITIVE_INFINITY);
PVector max = new PVector(Float.NEGATIVE_INFINITY, Float.NEGATIVE_INFINITY,
Float.NEGATIVE_INFINITY);
if (shapeCreated) {
getVertexMin(min);
getVertexMax(max);
}
height = max.y - min.y;
return height;
}
@Override
public float getDepth() {
PVector min = new PVector(Float.POSITIVE_INFINITY, Float.POSITIVE_INFINITY,
Float.POSITIVE_INFINITY);
PVector max = new PVector(Float.NEGATIVE_INFINITY, Float.NEGATIVE_INFINITY,
Float.NEGATIVE_INFINITY);
if (shapeCreated) {
getVertexMin(min);
getVertexMax(max);
}
depth = max.z - min.z;
return depth;
}
protected void getVertexMin(PVector min) {
updateTessellation();
if (family == GROUP) {
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
child.getVertexMin(min);
}
} else {
if (hasPolys) {
tessGeo.getPolyVertexMin(min, firstPolyVertex, lastPolyVertex);
}
if (is3D()) {
if (hasLines) {
tessGeo.getLineVertexMin(min, firstLineVertex, lastLineVertex);
}
if (hasPoints) {
tessGeo.getPointVertexMin(min, firstPointVertex, lastPointVertex);
}
}
}
}
protected void getVertexMax(PVector max) {
updateTessellation();
if (family == GROUP) {
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
child.getVertexMax(max);
}
} else {
if (hasPolys) {
tessGeo.getPolyVertexMax(max, firstPolyVertex, lastPolyVertex);
}
if (is3D()) {
if (hasLines) {
tessGeo.getLineVertexMax(max, firstLineVertex, lastLineVertex);
}
if (hasPoints) {
tessGeo.getPointVertexMax(max, firstPointVertex, lastPointVertex);
}
}
}
}
protected int getVertexSum(PVector sum, int count) {
updateTessellation();
if (family == GROUP) {
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
count += child.getVertexSum(sum, count);
}
} else {
if (hasPolys) {
count += tessGeo.getPolyVertexSum(sum, firstPolyVertex, lastPolyVertex);
}
if (is3D()) {
if (hasLines) {
count += tessGeo.getLineVertexSum(sum, firstLineVertex,
lastLineVertex);
}
if (hasPoints) {
count += tessGeo.getPointVertexSum(sum, firstPointVertex,
lastPointVertex);
}
}
}
return count;
}
///////////////////////////////////////////////////////////
//
// Drawing methods
@Override
public void setTextureMode(int mode) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setTextureMode()");
return;
}
if (family == GROUP) {
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
child.setTextureMode(mode);
}
} else {
setTextureModeImpl(mode);
}
}
protected void setTextureModeImpl(int mode) {
if (textureMode == mode) return;
textureMode = mode;
if (image != null) {
float uFactor = image.width;
float vFactor = image.height;
if (textureMode == NORMAL) {
uFactor = 1.0f / uFactor;
vFactor = 1.0f / vFactor;
}
scaleTextureUV(uFactor, vFactor);
}
}
@Override
public void setTexture(PImage tex) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setTexture()");
return;
}
if (family == GROUP) {
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
child.setTexture(tex);
}
} else {
setTextureImpl(tex);
}
}
protected void setTextureImpl(PImage tex) {
PImage image0 = image;
image = tex;
if (textureMode == IMAGE && image0 != image) {
// Need to rescale the texture coordinates
float uFactor = 1;
float vFactor = 1;
if (image != null) {
uFactor /= image.width;
vFactor /= image.height;
}
if (image0 != null) {
uFactor *= image0.width;
vFactor *= image0.height;
}
scaleTextureUV(uFactor, vFactor);
}
if (image0 != tex && parent != null) {
((PShapeOpenGL)parent).removeTexture(image0, this);
}
if (parent != null) {
((PShapeOpenGL)parent).addTexture(image);
if (is2D() && stroke) {
((PShapeOpenGL)parent).strokedTexture(true);
}
}
}
protected void scaleTextureUV(float uFactor, float vFactor) {
if (PGraphicsOpenGL.same(uFactor, 1) &&
PGraphicsOpenGL.same(vFactor, 1)) return;
for (int i = 0; i < inGeo.vertexCount; i++) {
float u = inGeo.texcoords[2 * i + 0];
float v = inGeo.texcoords[2 * i + 1];
inGeo.texcoords[2 * i + 0] = PApplet.min(1, u * uFactor);
inGeo.texcoords[2 * i + 1] = PApplet.min(1, v * uFactor);
}
if (shapeCreated && tessellated && hasPolys) {
int last1 = 0;
if (is3D()) {
last1 = lastPolyVertex + 1;
} else if (is2D()) {
last1 = lastPolyVertex + 1;
if (-1 < firstLineVertex) last1 = firstLineVertex;
if (-1 < firstPointVertex) last1 = firstPointVertex;
}
for (int i = firstLineVertex; i < last1; i++) {
float u = tessGeo.polyTexCoords[2 * i + 0];
float v = tessGeo.polyTexCoords[2 * i + 1];
tessGeo.polyTexCoords[2 * i + 0] = PApplet.min(1, u * uFactor);
tessGeo.polyTexCoords[2 * i + 1] = PApplet.min(1, v * uFactor);
}
root.setModifiedPolyTexCoords(firstPolyVertex, last1 - 1);
}
}
protected void addTexture(PImage tex) {
if (textures == null) {
textures = new HashSet<>();
}
textures.add(tex);
if (parent != null) {
((PShapeOpenGL)parent).addTexture(tex);
}
}
protected void removeTexture(PImage tex, PShapeOpenGL caller) {
if (textures == null || !textures.contains(tex)) return; // Nothing to remove.
// First check that none of the child shapes have texture tex...
boolean childHasTex = false;
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
if (child == caller) continue;
if (child.hasTexture(tex)) {
childHasTex = true;
break;
}
}
if (!childHasTex) {
// ...if not, it is safe to remove from this shape.
textures.remove(tex);
if (textures.size() == 0) {
textures = null;
}
}
// Since this shape and all its child shapes don't contain
// tex anymore, we now can remove it from the parent.
if (parent != null) {
((PShapeOpenGL)parent).removeTexture(tex, this);
}
}
protected void strokedTexture(boolean newValue) {
strokedTexture(newValue, null);
}
protected void strokedTexture(boolean newValue, PShapeOpenGL caller) {
if (strokedTexture == newValue) return; // Nothing to change.
if (newValue) {
strokedTexture = true;
} else {
// Check that none of the child shapes have a stroked texture...
strokedTexture = false;
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
if (child == caller) continue;
if (child.hasStrokedTexture()) {
strokedTexture = true;
break;
}
}
}
// Now we can update the parent shape.
if (parent != null) {
((PShapeOpenGL)parent).strokedTexture(newValue, this);
}
}
protected void untexChild(boolean newValue) {
untexChild(newValue, null);
}
protected void untexChild(boolean newValue, PShapeOpenGL caller) {
if (untexChild == newValue) return; // Nothing to change.
if (newValue) {
untexChild = true;
} else {
// Check if any of the child shapes is not textured...
untexChild = false;
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
if (child == caller) continue;
if (!child.hasTexture()) {
untexChild = true;
break;
}
}
}
// Now we can update the parent shape.
if (parent != null) {
((PShapeOpenGL)parent).untexChild(newValue, this);
}
}
protected boolean hasTexture() {
if (family == GROUP) {
return textures != null && 0 < textures.size();
} else {
return image != null;
}
}
protected boolean hasTexture(PImage tex) {
if (family == GROUP) {
return textures != null && textures.contains(tex);
} else {
return image == tex;
}
}
protected boolean hasStrokedTexture() {
if (family == GROUP) {
return strokedTexture;
} else {
return image != null && stroke;
}
}
@Override
public void solid(boolean solid) {
if (family == GROUP) {
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
child.solid(solid);
}
} else {
this.solid = solid;
}
}
@Override
protected void beginContourImpl() {
if (family == PShape.PATH) {
super.beginContourImpl();
return;
}
breakShape = true;
}
@Override
protected void endContourImpl() {
if (family == PShape.PATH) {
super.endContourImpl();
}
}
@Override
public void vertex(float x, float y) {
if (family == PShape.PATH) {
super.vertex(x, y);
return;
}
vertexImpl(x, y, 0, 0, 0);
if (image != null) {
PGraphics.showWarning(PGraphicsOpenGL.MISSING_UV_TEXCOORDS_ERROR);
}
}
@Override
public void vertex(float x, float y, float u, float v) {
vertexImpl(x, y, 0, u, v);
}
@Override
public void vertex(float x, float y, float z) {
if (family == PShape.PATH) {
super.vertex(x, y);
return;
}
vertexImpl(x, y, z, 0, 0);
if (image != null) {
PGraphics.showWarning(PGraphicsOpenGL.MISSING_UV_TEXCOORDS_ERROR);
}
}
@Override
public void vertex(float x, float y, float z, float u, float v) {
vertexImpl(x, y, z, u, v);
}
protected void vertexImpl(float x, float y, float z, float u, float v) {
if (!openShape) {
PGraphics.showWarning(OUTSIDE_BEGIN_END_ERROR, "vertex()");
return;
}
if (family == GROUP) {
PGraphics.showWarning("Cannot add vertices to GROUP shape");
return;
}
boolean textured = image != null;
int fcolor = 0x00;
if (fill || textured) {
if (!textured) {
fcolor = fillColor;
} else {
if (tint) {
fcolor = tintColor;
} else {
fcolor = 0xffFFFFFF;
}
}
}
if (textureMode == IMAGE && image != null) {
u /= image.width;
v /= image.height;
}
int scolor = 0x00;
float sweight = 0;
if (stroke) {
scolor = strokeColor;
sweight = strokeWeight;
}
inGeo.addVertex(x, y, z,
fcolor,
normalX, normalY, normalZ,
u, v,
scolor, sweight,
ambientColor, specularColor, emissiveColor, shininess,
VERTEX, vertexBreak());
markForTessellation();
}
protected boolean vertexBreak() {
if (breakShape) {
breakShape = false;
return true;
}
return false;
}
@Override
public void normal(float nx, float ny, float nz) {
if (!openShape) {
PGraphics.showWarning(OUTSIDE_BEGIN_END_ERROR, "normal()");
return;
}
if (family == GROUP) {
PGraphics.showWarning("Cannot set normal in GROUP shape");
return;
}
normalX = nx;
normalY = ny;
normalZ = nz;
// if drawing a shape and the normal hasn't been set yet,
// then we need to set the normals for each vertex so far
if (normalMode == NORMAL_MODE_AUTO) {
// One normal per begin/end shape
normalMode = NORMAL_MODE_SHAPE;
} else if (normalMode == NORMAL_MODE_SHAPE) {
// a separate normal for each vertex
normalMode = NORMAL_MODE_VERTEX;
}
}
@Override
public void attribPosition(String name, float x, float y, float z) {
VertexAttribute attrib = attribImpl(name, VertexAttribute.POSITION, PGL.FLOAT, 3);
if (attrib != null) attrib.set(x, y, z);
}
@Override
public void attribNormal(String name, float nx, float ny, float nz) {
VertexAttribute attrib = attribImpl(name, VertexAttribute.NORMAL, PGL.FLOAT, 3);
if (attrib != null) attrib.set(nx, ny, nz);
}
@Override
public void attribColor(String name, int color) {
VertexAttribute attrib = attribImpl(name, VertexAttribute.COLOR, PGL.INT, 1);
if (attrib != null) attrib.set(new int[] {color});
}
@Override
public void attrib(String name, float... values) {
VertexAttribute attrib = attribImpl(name, VertexAttribute.OTHER, PGL.FLOAT, values.length);
if (attrib != null) attrib.set(values);
}
@Override
public void attrib(String name, int... values) {
VertexAttribute attrib = attribImpl(name, VertexAttribute.OTHER, PGL.INT, values.length);
if (attrib != null) attrib.set(values);
}
@Override
public void attrib(String name, boolean... values) {
VertexAttribute attrib = attribImpl(name, VertexAttribute.OTHER, PGL.BOOL, values.length);
if (attrib != null) attrib.set(values);
}
protected VertexAttribute attribImpl(String name, int kind, int type, int size) {
if (4 < size) {
PGraphics.showWarning("Vertex attributes cannot have more than 4 values");
return null;
}
VertexAttribute attrib = polyAttribs.get(name);
if (attrib == null) {
attrib = new VertexAttribute(pg, name, kind, type, size, PGL.bufferUsageRetained);
polyAttribs.put(name, attrib);
inGeo.initAttrib(attrib);
}
if (attrib.kind != kind) {
PGraphics.showWarning("The attribute kind cannot be changed after creation");
return null;
}
if (attrib.type != type) {
PGraphics.showWarning("The attribute type cannot be changed after creation");
return null;
}
if (attrib.size != size) {
PGraphics.showWarning("New value for vertex attribute has wrong number of values");
return null;
}
return attrib;
}
@Override
public void endShape(int mode) {
super.endShape(mode);
// Input arrays are trimmed since they are expanded by doubling their old
// size, which might lead to arrays larger than the vertex counts.
inGeo.trim();
close = mode == CLOSE;
markForTessellation();
shapeCreated = true;
}
@Override
public void setParams(float[] source) {
if (family != PRIMITIVE) {
PGraphics.showWarning("Parameters can only be set to PRIMITIVE shapes");
return;
}
super.setParams(source);
markForTessellation();
shapeCreated = true;
}
@Override
public void setPath(int vcount, float[][] verts, int ccount, int[] codes) {
if (family != PATH) {
PGraphics.showWarning("Vertex coordinates and codes can only be set to " +
"PATH shapes");
return;
}
super.setPath(vcount, verts, ccount, codes);
markForTessellation();
shapeCreated = true;
}
///////////////////////////////////////////////////////////
//
// Geometric transformations
@Override
public void translate(float tx, float ty) {
if (is3D) {
transform(TRANSLATE, tx, ty, 0);
} else {
transform(TRANSLATE, tx, ty);
}
}
@Override
public void translate(float tx, float ty, float tz) {
transform(TRANSLATE, tx, ty, tz);
}
@Override
public void rotate(float angle) {
if (is3D) {
transform(ROTATE, angle, 0, 0, 1);
} else {
transform(ROTATE, angle);
}
}
@Override
public void rotateX(float angle) {
rotate(angle, 1, 0, 0);
}
@Override
public void rotateY(float angle) {
rotate(angle, 0, 1, 0);
}
@Override
public void rotateZ(float angle) {
rotate(angle, 0, 0, 1);
}
@Override
public void rotate(float angle, float v0, float v1, float v2) {
transform(ROTATE, angle, v0, v1, v2);
}
@Override
public void scale(float s) {
if (is3D) {
transform(SCALE, s, s, s);
} else {
transform(SCALE, s, s);
}
}
@Override
public void scale(float x, float y) {
if (is3D) {
transform(SCALE, x, y, 1);
} else {
transform(SCALE, x, y);
}
}
@Override
public void scale(float x, float y, float z) {
transform(SCALE, x, y, z);
}
@Override
public void applyMatrix(PMatrix2D source) {
transform(MATRIX, source.m00, source.m01, source.m02,
source.m10, source.m11, source.m12);
}
@Override
public void applyMatrix(float n00, float n01, float n02,
float n10, float n11, float n12) {
transform(MATRIX, n00, n01, n02,
n10, n11, n12);
}
@Override
public void applyMatrix(float n00, float n01, float n02, float n03,
float n10, float n11, float n12, float n13,
float n20, float n21, float n22, float n23,
float n30, float n31, float n32, float n33) {
transform(MATRIX, n00, n01, n02, n03,
n10, n11, n12, n13,
n20, n21, n22, n23,
n30, n31, n32, n33);
}
@Override
public void resetMatrix() {
if (shapeCreated && matrix != null && matrixInv != null) {
if (family == GROUP) {
updateTessellation();
}
if (tessellated) {
applyMatrixImpl(matrixInv);
}
matrix.reset();
matrixInv.reset();
}
}
protected void transform(int type, float... args) {
int dimensions = is3D ? 3 : 2;
boolean invertible = true;
checkMatrix(dimensions);
if (transform == null) {
if (dimensions == 2) {
transform = new PMatrix2D();
transformInv = new PMatrix2D();
} else {
transform = new PMatrix3D();
transformInv = new PMatrix3D();
}
} else {
transform.reset();
transformInv.reset();
}
int ncoords = args.length;
if (type == ROTATE) {
ncoords = args.length == 1 ? 2 : 3;
} else if (type == MATRIX) {
ncoords = args.length == 6 ? 2 : 3;
}
switch (type) {
case TRANSLATE:
if (ncoords == 3) {
transform.translate(args[0], args[1], args[2]);
PGraphicsOpenGL.invTranslate((PMatrix3D)transformInv, args[0], args[1], args[2]);
} else {
transform.translate(args[0], args[1]);
PGraphicsOpenGL.invTranslate((PMatrix2D)transformInv, args[0], args[1]);
}
break;
case ROTATE:
if (ncoords == 3) {
transform.rotate(args[0], args[1], args[2], args[3]);
PGraphicsOpenGL.invRotate((PMatrix3D)transformInv, args[0], args[1], args[2], args[3]);
} else {
transform.rotate(args[0]);
PGraphicsOpenGL.invRotate((PMatrix2D)transformInv, -args[0]);
}
break;
case SCALE:
if (ncoords == 3) {
transform.scale(args[0], args[1], args[2]);
PGraphicsOpenGL.invScale((PMatrix3D)transformInv, args[0], args[1], args[2]);
} else {
transform.scale(args[0], args[1]);
PGraphicsOpenGL.invScale((PMatrix2D)transformInv, args[0], args[1]);
}
break;
case MATRIX:
if (ncoords == 3) {
transform.set(args[ 0], args[ 1], args[ 2], args[ 3],
args[ 4], args[ 5], args[ 6], args[ 7],
args[ 8], args[ 9], args[10], args[11],
args[12], args[13], args[14], args[15]);
} else {
transform.set(args[0], args[1], args[2],
args[3], args[4], args[5]);
}
transformInv.set(transform);
invertible = transformInv.invert();
break;
}
matrix.preApply(transform);
if (invertible) {
matrixInv.apply(transformInv);
} else {
PGraphics.showWarning("Transformation applied on the shape cannot be inverted");
}
if (tessellated) applyMatrixImpl(transform);
}
protected void applyMatrixImpl(PMatrix matrix) {
if (hasPolys) {
tessGeo.applyMatrixOnPolyGeometry(matrix,
firstPolyVertex, lastPolyVertex);
root.setModifiedPolyVertices(firstPolyVertex, lastPolyVertex);
root.setModifiedPolyNormals(firstPolyVertex, lastPolyVertex);
for (VertexAttribute attrib: polyAttribs.values()) {
if (attrib.isPosition() || attrib.isNormal()) {
root.setModifiedPolyAttrib(attrib, firstPolyVertex, lastPolyVertex);
}
}
}
if (is3D()) {
if (hasLines) {
tessGeo.applyMatrixOnLineGeometry(matrix,
firstLineVertex, lastLineVertex);
root.setModifiedLineVertices(firstLineVertex, lastLineVertex);
root.setModifiedLineAttributes(firstLineVertex, lastLineVertex);
}
if (hasPoints) {
tessGeo.applyMatrixOnPointGeometry(matrix,
firstPointVertex, lastPointVertex);
root.setModifiedPointVertices(firstPointVertex, lastPointVertex);
root.setModifiedPointAttributes(firstPointVertex, lastPointVertex);
}
}
}
@Override
protected void checkMatrix(int dimensions) {
if (matrix == null) {
if (dimensions == 2) {
matrix = new PMatrix2D();
matrixInv = new PMatrix2D();
} else {
matrix = new PMatrix3D();
matrixInv = new PMatrix3D();
}
} else if (dimensions == 3 && (matrix instanceof PMatrix2D)) {
matrix = new PMatrix3D(matrix);
matrixInv = new PMatrix3D(matrixInv);
}
}
///////////////////////////////////////////////////////////
//
// Bezier curves
@Override
public void bezierDetail(int detail) {
bezierDetail = detail;
if (0 < inGeo.codeCount) {
markForTessellation();
}
//pg.bezierDetail(detail); // setting the detail in the renderer, WTF??
}
@Override
public void bezierVertex(float x2, float y2,
float x3, float y3,
float x4, float y4) {
if (family == PShape.PATH) {
super.bezierVertex(x2, y2, x3, y3, x4, y4);
return;
}
bezierVertexImpl(x2, y2, 0,
x3, y3, 0,
x4, y4, 0);
}
@Override
public void bezierVertex(float x2, float y2, float z2,
float x3, float y3, float z3,
float x4, float y4, float z4) {
bezierVertexImpl(x2, y2, z2,
x3, y3, z3,
x4, y4, z4);
}
protected void bezierVertexImpl(float x2, float y2, float z2,
float x3, float y3, float z3,
float x4, float y4, float z4) {
inGeo.setMaterial(fillColor, strokeColor, strokeWeight,
ambientColor, specularColor, emissiveColor, shininess);
inGeo.setNormal(normalX, normalY, normalZ);
inGeo.addBezierVertex(x2, y2, z2,
x3, y3, z3,
x4, y4, z4, vertexBreak());
}
@Override
public void quadraticVertex(float cx, float cy,
float x3, float y3) {
if (family == PShape.PATH) {
super.quadraticVertex(cx, cy, x3, y3);
return;
}
quadraticVertexImpl(cx, cy, 0,
x3, y3, 0);
}
@Override
public void quadraticVertex(float cx, float cy, float cz,
float x3, float y3, float z3) {
quadraticVertexImpl(cx, cy, cz,
x3, y3, z3);
}
protected void quadraticVertexImpl(float cx, float cy, float cz,
float x3, float y3, float z3) {
inGeo.setMaterial(fillColor, strokeColor, strokeWeight,
ambientColor, specularColor, emissiveColor, shininess);
inGeo.setNormal(normalX, normalY, normalZ);
inGeo.addQuadraticVertex(cx, cy, cz,
x3, y3, z3, vertexBreak());
}
///////////////////////////////////////////////////////////
//
// Catmull-Rom curves
@Override
public void curveDetail(int detail) {
curveDetail = detail;
// pg.curveDetail(detail);
if (0 < inGeo.codeCount) {
markForTessellation();
}
}
@Override
public void curveTightness(float tightness) {
curveTightness = tightness;
// pg.curveTightness(tightness);
if (0 < inGeo.codeCount) {
markForTessellation();
}
}
@Override
public void curveVertex(float x, float y) {
if (family == PShape.PATH) {
super.curveVertex(x, y);
return;
}
curveVertexImpl(x, y, 0);
}
@Override
public void curveVertex(float x, float y, float z) {
curveVertexImpl(x, y, z);
}
protected void curveVertexImpl(float x, float y, float z) {
inGeo.setMaterial(fillColor, strokeColor, strokeWeight,
ambientColor, specularColor, emissiveColor, shininess);
inGeo.setNormal(normalX, normalY, normalZ);
inGeo.addCurveVertex(x, y, z, vertexBreak());
}
///////////////////////////////////////////////////////////
//
// Setters/getters of individual vertices
//for taking the default value as false , so user don't have to explicitly enter false
// if user don't want to include children vertex count
@Override
public int getVertexCount() {
return getVertexCount(false); // Calls the main method with default false
}
@Override
public int getVertexCount(boolean includeChildren) {
int count = 0;
// If the shape is a group, recursively count the vertices of its children
if (family == GROUP) {
if(!includeChildren){
return 0;
}
// Iterate through all the child shapes and count their vertices
for (int i = 0; i < getChildCount(); i++) {
count += getChild(i).getVertexCount(true); // Recursive call to get the vertex count of child shapes
}
} else {
if (root.tessUpdate) {
if (root.tessKind == TRIANGLES) {
count += lastPolyVertex - firstPolyVertex + 1;
} else if (root.tessKind == LINES) {
count += lastLineVertex - firstLineVertex + 1;
} else if (root.tessKind == POINTS) {
count += lastPointVertex - firstPointVertex + 1;
} else {
count += 0; // Handle other cases
}
} else {
if (family == PRIMITIVE || family == PATH) {
// the input geometry of primitive and path shapes is built during
// tessellation
updateTessellation();
}
count += inGeo.vertexCount;
}
}
return count;
}
@Override
public PVector getVertex(int index, PVector vec) {
if (vec == null) vec = new PVector();
if (root.tessUpdate) {
int tessIdx = getFirstTessVertex() + index;
vec.x = root.selVertices[4 * tessIdx + 0];
vec.y = root.selVertices[4 * tessIdx + 1];
vec.z = root.selVertices[4 * tessIdx + 2];
} else {
vec.x = inGeo.vertices[3 * index + 0];
vec.y = inGeo.vertices[3 * index + 1];
vec.z = inGeo.vertices[3 * index + 2];
}
return vec;
}
@Override
public float getVertexX(int index) {
if (root.tessUpdate) {
int tessIdx = getFirstTessVertex() + index;
return root.selVertices[4 * tessIdx + 0];
} else {
return inGeo.vertices[3 * index + 0];
}
}
@Override
public float getVertexY(int index) {
if (root.tessUpdate) {
int tessIdx = getFirstTessVertex() + index;
return root.selVertices[4 * tessIdx + 1];
} else {
return inGeo.vertices[3 * index + 1];
}
}
@Override
public float getVertexZ(int index) {
if (root.tessUpdate) {
int tessIdx = getFirstTessVertex() + index;
return root.selVertices[4 * tessIdx + 2];
} else {
return inGeo.vertices[3 * index + 2];
}
}
@Override
public void setVertex(int index, float x, float y) {
setVertex(index, x, y, 0);
}
@Override
public void setVertex(int index, float x, float y, float z) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setVertex()");
return;
}
if (root.tessUpdate) {
int tessIdx = getFirstTessVertex() + index;
root.selVertices[4 * tessIdx + 0] = x;
root.selVertices[4 * tessIdx + 1] = y;
root.selVertices[4 * tessIdx + 2] = z;
root.setModifiedTessVertex(tessIdx, tessIdx);
} else {
if (family == PATH) {
if (vertexCodes != null && vertexCodeCount > 0 &&
vertexCodes[index] != VERTEX) {
PGraphics.showWarning(NOT_A_SIMPLE_VERTEX, "setVertex()");
return;
}
vertices[index][X] = x;
vertices[index][Y] = y;
if (is3D && vertices[index].length > 2) {
// P3D allows to modify 2D shapes, ignoring the Z coordinate.
vertices[index][Z] = z;
}
} else {
inGeo.vertices[3 * index + 0] = x;
inGeo.vertices[3 * index + 1] = y;
inGeo.vertices[3 * index + 2] = z;
}
markForTessellation();
}
}
@Override
public void setVertex(int index, PVector vec) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setVertex()");
return;
}
if (root.tessUpdate) {
int tessIdx = getFirstTessVertex() + index;
root.selVertices[4 * tessIdx + 0] = vec.x;
root.selVertices[4 * tessIdx + 1] = vec.y;
root.selVertices[4 * tessIdx + 2] = vec.z;
root.setModifiedTessVertex(tessIdx, tessIdx);
} else {
if (family == PATH) {
if (vertexCodes != null && vertexCodeCount > 0 &&
vertexCodes[index] != VERTEX) {
PGraphics.showWarning(NOT_A_SIMPLE_VERTEX, "setVertex()");
return;
}
vertices[index][X] = vec.x;
vertices[index][Y] = vec.y;
if (is3D && vertices[index].length > 2) {
vertices[index][Z] = vec.z;
}
} else {
inGeo.vertices[3 * index + 0] = vec.x;
inGeo.vertices[3 * index + 1] = vec.y;
inGeo.vertices[3 * index + 2] = vec.z;
}
markForTessellation();
}
}
private int getFirstTessVertex() {
if (root.tessKind == TRIANGLES) {
return firstPolyVertex;
} else if (root.tessKind == LINES) {
return firstLineVertex;
} else {
return firstPointVertex;
}
}
private void setModifiedTessVertex(int first, int last) {
if (root.tessKind == TRIANGLES) {
root.setModifiedPolyVertices(first, last);
} else if (root.tessKind == LINES) {
root.setModifiedLineVertices(first, last);
} else {
root.setModifiedPointVertices(first, last);
}
}
@Override
public PVector getNormal(int index, PVector vec) {
if (vec == null) vec = new PVector();
if (root.tessUpdate) {
int tessIdx = firstPolyVertex + index;
vec.x = tessGeo.polyNormals[3 * tessIdx + 0];
vec.y = tessGeo.polyNormals[3 * tessIdx + 1];
vec.z = tessGeo.polyNormals[3 * tessIdx + 2];
} else {
vec.x = inGeo.normals[3 * index + 0];
vec.y = inGeo.normals[3 * index + 1];
vec.z = inGeo.normals[3 * index + 2];
}
return vec;
}
@Override
public float getNormalX(int index) {
if (root.tessUpdate) {
return tessGeo.polyNormals[3 * (firstPolyVertex + index) + 0];
}
return inGeo.normals[3 * index + 0];
}
@Override
public float getNormalY(int index) {
if (root.tessUpdate) {
return tessGeo.polyNormals[3 * (firstPolyVertex + index) + 1];
}
return inGeo.normals[3 * index + 1];
}
@Override
public float getNormalZ(int index) {
if (root.tessUpdate) {
return tessGeo.polyNormals[3 * (firstPolyVertex + index) + 2];
}
return inGeo.normals[3 * index + 2];
}
@Override
public void setNormal(int index, float nx, float ny, float nz) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setNormal()");
return;
}
if (root.tessUpdate) {
int tessIdx = firstPolyVertex + index;
tessGeo.polyNormals[3 * tessIdx + 0] = nx;
tessGeo.polyNormals[3 * tessIdx + 1] = ny;
tessGeo.polyNormals[3 * tessIdx + 2] = nz;
root.setModifiedPolyNormals(tessIdx, tessIdx);
} else {
inGeo.normals[3 * index + 0] = nx;
inGeo.normals[3 * index + 1] = ny;
inGeo.normals[3 * index + 2] = nz;
}
markForTessellation();
}
@Override
public float getTextureU(int index) {
if (root.tessUpdate) {
return tessGeo.polyTexCoords[2 * (firstPolyVertex + index) + 0];
}
return inGeo.texcoords[2 * index + 0];
}
@Override
public float getTextureV(int index) {
if (root.tessUpdate) {
return tessGeo.polyTexCoords[2 * (firstPolyVertex + index) + 1];
}
return inGeo.texcoords[2 * index + 1];
}
@Override
public void setTextureUV(int index, float u, float v) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setTextureUV()");
return;
}
if (textureMode == IMAGE && image != null) {
u /= image.width;
v /= image.height;
}
if (root.tessUpdate) {
int tessIdx = firstPolyVertex + index;
tessGeo.polyTexCoords[2 * tessIdx + 0] = u;
tessGeo.polyTexCoords[2 * tessIdx + 1] = v;
root.setModifiedPolyTexCoords(tessIdx, tessIdx);
} else {
inGeo.texcoords[2 * index + 0] = u;
inGeo.texcoords[2 * index + 1] = v;
markForTessellation();
}
}
@Override
public int getFill(int index) {
if (family != GROUP && image == null) {
if (root.tessUpdate) {
return PGL.nativeToJavaARGB(tessGeo.polyColors[firstPolyVertex + index]);
} else {
return PGL.nativeToJavaARGB(inGeo.colors[index]);
}
} else {
return 0;
}
}
@Override
public void setFill(boolean fill) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setFill()");
return;
}
if (family == GROUP) {
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
child.setFill(fill);
}
} else if (this.fill != fill) {
markForTessellation();
}
this.fill = fill;
}
@Override
public void setFill(int fill) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setFill()");
return;
}
if (family == GROUP) {
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
child.setFill(fill);
}
} else {
setFillImpl(fill);
}
}
protected void setFillImpl(int fill) {
if (fillColor == fill) return;
fillColor = fill;
if (image == null) {
Arrays.fill(inGeo.colors, 0, inGeo.vertexCount,
PGL.javaToNativeARGB(fillColor));
if (shapeCreated && tessellated && hasPolys) {
if (is3D()) {
Arrays.fill(tessGeo.polyColors, firstPolyVertex, lastPolyVertex + 1,
PGL.javaToNativeARGB(fillColor));
root.setModifiedPolyColors(firstPolyVertex, lastPolyVertex);
} else if (is2D()) {
int last1 = lastPolyVertex + 1;
if (-1 < firstLineVertex) last1 = firstLineVertex;
if (-1 < firstPointVertex) last1 = firstPointVertex;
Arrays.fill(tessGeo.polyColors, firstPolyVertex, last1,
PGL.javaToNativeARGB(fillColor));
root.setModifiedPolyColors(firstPolyVertex, last1 - 1);
}
}
}
if (!setAmbient) {
// Setting the ambient color from the current fill
// is what the old P3D did and allows to have an
// default ambient color when the user doesn't specify
// it explicitly.
setAmbientImpl(fill);
setAmbient = false;
}
}
@Override
public void setFill(int index, int fill) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setFill()");
return;
}
if (image == null) {
if (root.tessUpdate) {
int tessIdx = firstPolyVertex + index;
tessGeo.polyColors[tessIdx] = PGL.javaToNativeARGB(fill);
root.setModifiedPolyColors(tessIdx, tessIdx);
} else {
inGeo.colors[index] = PGL.javaToNativeARGB(fill);
markForTessellation();
}
}
}
@Override
public int getTint(int index) {
if (family != GROUP && image != null) {
if (root.tessUpdate) {
return PGL.nativeToJavaARGB(tessGeo.polyColors[firstPolyVertex + index]);
} else {
return PGL.nativeToJavaARGB(inGeo.colors[index]);
}
} else {
return 0;
}
}
@Override
public void setTint(boolean tint) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setTint()");
return;
}
if (family == GROUP) {
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
child.setTint(fill);
}
} else if (this.tint && !tint) {
setTintImpl(0xFFFFFFFF);
}
this.tint = tint;
}
@Override
public void setTint(int tint) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setTint()");
return;
}
if (family == GROUP) {
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
child.setTint(tint);
}
} else {
setTintImpl(tint);
}
}
protected void setTintImpl(int tint) {
if (tintColor == tint) return;
tintColor = tint;
if (image != null) {
Arrays.fill(inGeo.colors, 0, inGeo.vertexCount,
PGL.javaToNativeARGB(tintColor));
if (shapeCreated && tessellated && hasPolys) {
if (is3D()) {
Arrays.fill(tessGeo.polyColors, firstPolyVertex, lastPolyVertex + 1,
PGL.javaToNativeARGB(tintColor));
root.setModifiedPolyColors(firstPolyVertex, lastPolyVertex);
} else if (is2D()) {
int last1 = lastPolyVertex + 1;
if (-1 < firstLineVertex) last1 = firstLineVertex;
if (-1 < firstPointVertex) last1 = firstPointVertex;
Arrays.fill(tessGeo.polyColors, firstPolyVertex, last1,
PGL.javaToNativeARGB(tintColor));
root.setModifiedPolyColors(firstPolyVertex, last1 - 1);
}
}
}
}
@Override
public void setTint(int index, int tint) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setTint()");
return;
}
if (image != null) {
if (root.tessUpdate) {
int tessIdx = firstPolyVertex + index;
tessGeo.polyColors[tessIdx] = PGL.javaToNativeARGB(tint);
root.setModifiedPolyColors(tessIdx, tessIdx);
} else {
inGeo.colors[index] = PGL.javaToNativeARGB(tint);
markForTessellation();
}
}
}
@Override
public int getStroke(int index) {
if (family != GROUP) {
return PGL.nativeToJavaARGB(inGeo.strokeColors[index]);
} else {
return 0;
}
}
@Override
public void setStroke(boolean stroke) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setStroke()");
return;
}
if (family == GROUP) {
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
child.setStroke(stroke);
}
this.stroke = stroke;
} else {
setStrokeImpl(stroke);
}
}
protected void setStrokeImpl(boolean stroke) {
if (this.stroke != stroke) {
if (stroke) {
// Before there was no stroke, now there is stroke, so current stroke
// color should be copied to the input geometry, and geometry should
// be marked as modified in case it needs to be re-tessellated.
int color = strokeColor;
strokeColor += 1; // Forces a color change
setStrokeImpl(color);
}
markForTessellation();
if (is2D() && parent != null) {
((PShapeOpenGL)parent).strokedTexture(stroke && image != null);
}
this.stroke = stroke;
}
}
@Override
public void setStroke(int stroke) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setStroke()");
return;
}
if (family == GROUP) {
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
child.setStroke(stroke);
}
} else {
setStrokeImpl(stroke);
}
}
protected void setStrokeImpl(int stroke) {
if (strokeColor == stroke) return;
strokeColor = stroke;
Arrays.fill(inGeo.strokeColors, 0, inGeo.vertexCount,
PGL.javaToNativeARGB(strokeColor));
if (shapeCreated && tessellated && (hasLines || hasPoints)) {
if (hasLines) {
if (is3D()) {
Arrays.fill(tessGeo.lineColors, firstLineVertex, lastLineVertex + 1,
PGL.javaToNativeARGB(strokeColor));
root.setModifiedLineColors(firstLineVertex, lastLineVertex);
} else if (is2D()) {
Arrays.fill(tessGeo.polyColors, firstLineVertex, lastLineVertex + 1,
PGL.javaToNativeARGB(strokeColor));
root.setModifiedPolyColors(firstLineVertex, lastLineVertex);
}
}
if (hasPoints) {
if (is3D()) {
Arrays.fill(tessGeo.pointColors, firstPointVertex, lastPointVertex + 1,
PGL.javaToNativeARGB(strokeColor));
root.setModifiedPointColors(firstPointVertex, lastPointVertex);
} else if (is2D()) {
Arrays.fill(tessGeo.polyColors, firstPointVertex, lastPointVertex + 1,
PGL.javaToNativeARGB(strokeColor));
root.setModifiedPolyColors(firstPointVertex, lastPointVertex);
}
}
}
}
@Override
public void setStroke(int index, int stroke) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setStroke()");
return;
}
if (root.tessUpdate) {
if (hasLines) {
if (is3D()) {
int tessIdx = firstLineVertex + index;
tessGeo.lineColors[tessIdx] = PGL.javaToNativeARGB(stroke);
root.setModifiedLineColors(tessIdx, tessIdx);
} else if (is2D()) {
int tessIdx = firstLineVertex + index;
tessGeo.polyColors[tessIdx] = PGL.javaToNativeARGB(stroke);
root.setModifiedPolyColors(tessIdx, tessIdx);
}
}
if (hasPoints) {
if (is3D()) {
int tessIdx = firstPointVertex + index;
tessGeo.lineColors[tessIdx] = PGL.javaToNativeARGB(stroke);
root.setModifiedPointColors(tessIdx, tessIdx);
} else if (is2D()) {
int tessIdx = firstPointVertex + index;
tessGeo.polyColors[tessIdx] = PGL.javaToNativeARGB(stroke);
root.setModifiedPolyColors(tessIdx, tessIdx);
}
}
} else {
inGeo.strokeColors[index] = PGL.javaToNativeARGB(stroke);
markForTessellation();
}
}
@Override
public float getStrokeWeight(int index) {
if (family != GROUP) {
return inGeo.strokeWeights[index];
} else {
return 0;
}
}
@Override
public void setStrokeWeight(float weight) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setStrokeWeight()");
return;
}
if (family == GROUP) {
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
child.setStrokeWeight(weight);
}
} else {
setStrokeWeightImpl(weight);
}
}
protected void setStrokeWeightImpl(float weight) {
if (PGraphicsOpenGL.same(strokeWeight, weight)) return;
float oldWeight = strokeWeight;
strokeWeight = weight;
Arrays.fill(inGeo.strokeWeights, 0, inGeo.vertexCount, strokeWeight);
if (shapeCreated && tessellated && (hasLines || hasPoints)) {
float resizeFactor = weight / oldWeight;
if (hasLines) {
if (is3D()) {
for (int i = firstLineVertex; i <= lastLineVertex; i++) {
tessGeo.lineDirections[4 * i + 3] *= resizeFactor;
}
root.setModifiedLineAttributes(firstLineVertex, lastLineVertex);
} else if (is2D()) {
// Changing the stroke weight on a 2D shape needs a
// re-tessellation in order to replace the old line
// geometry.
markForTessellation();
}
}
if (hasPoints) {
if (is3D()) {
for (int i = firstPointVertex; i <= lastPointVertex; i++) {
tessGeo.pointOffsets[2 * i + 0] *= resizeFactor;
tessGeo.pointOffsets[2 * i + 1] *= resizeFactor;
}
root.setModifiedPointAttributes(firstPointVertex, lastPointVertex);
} else if (is2D()) {
// Changing the stroke weight on a 2D shape needs a
// re-tessellation in order to replace the old point
// geometry.
markForTessellation();
}
}
}
}
@Override
public void setStrokeWeight(int index, float weight) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setStrokeWeight()");
return;
}
inGeo.strokeWeights[index] = weight;
markForTessellation();
}
@Override
public void setStrokeJoin(int join) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setStrokeJoin()");
return;
}
if (family == GROUP) {
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
child.setStrokeJoin(join);
}
} else {
if (is2D() && strokeJoin != join) {
// Changing the stroke join on a 2D shape needs a
// re-tessellation in order to replace the old join
// geometry.
markForTessellation();
}
strokeJoin = join;
}
}
@Override
public void setStrokeCap(int cap) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setStrokeCap()");
return;
}
if (family == GROUP) {
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
child.setStrokeCap(cap);
}
} else {
if (is2D() && strokeCap != cap) {
// Changing the stroke cap on a 2D shape needs a
// re-tessellation in order to replace the old cap
// geometry.
markForTessellation();
}
strokeCap = cap;
}
}
@Override
public int getAmbient(int index) {
if (family != GROUP) {
if (root.tessUpdate) {
return PGL.nativeToJavaARGB(tessGeo.polyAmbient[firstPolyVertex + index]);
} else {
return PGL.nativeToJavaARGB(inGeo.ambient[index]);
}
} else {
return 0;
}
}
@Override
public void setAmbient(int ambient) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setAmbient()");
return;
}
if (family == GROUP) {
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
child.setAmbient(ambient);
}
} else {
setAmbientImpl(ambient);
}
}
protected void setAmbientImpl(int ambient) {
if (ambientColor == ambient) return;
ambientColor = ambient;
Arrays.fill(inGeo.ambient, 0, inGeo.vertexCount,
PGL.javaToNativeARGB(ambientColor));
if (shapeCreated && tessellated && hasPolys) {
if (is3D()) {
Arrays.fill(tessGeo.polyAmbient, firstPolyVertex, lastPolyVertex + 1,
PGL.javaToNativeARGB(ambientColor));
root.setModifiedPolyAmbient(firstPolyVertex, lastPolyVertex);
} else if (is2D()) {
int last1 = lastPolyVertex + 1;
if (-1 < firstLineVertex) last1 = firstLineVertex;
if (-1 < firstPointVertex) last1 = firstPointVertex;
Arrays.fill(tessGeo.polyAmbient, firstPolyVertex, last1,
PGL.javaToNativeARGB(ambientColor));
root.setModifiedPolyColors(firstPolyVertex, last1 - 1);
}
}
setAmbient = true;
}
@Override
public void setAmbient(int index, int ambient) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setAmbient()");
return;
}
if (root.tessUpdate) {
int tessIdx = firstPolyVertex + index;
tessGeo.polyAmbient[tessIdx] = PGL.javaToNativeARGB(ambient);
root.setModifiedPolyAmbient(tessIdx, tessIdx);
} else {
inGeo.ambient[index] = PGL.javaToNativeARGB(ambient);
markForTessellation();
}
setAmbient = true;
}
@Override
public int getSpecular(int index) {
if (family != GROUP) {
if (root.tessUpdate) {
return PGL.nativeToJavaARGB(tessGeo.polySpecular[firstPolyVertex + index]);
} else {
return PGL.nativeToJavaARGB(inGeo.specular[index]);
}
} else {
return 0;
}
}
@Override
public void setSpecular(int specular) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setSpecular()");
return;
}
if (family == GROUP) {
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
child.setSpecular(specular);
}
} else {
setSpecularImpl(specular);
}
}
protected void setSpecularImpl(int specular) {
if (specularColor == specular) return;
specularColor = specular;
Arrays.fill(inGeo.specular, 0, inGeo.vertexCount,
PGL.javaToNativeARGB(specularColor));
if (shapeCreated && tessellated && hasPolys) {
if (is3D()) {
Arrays.fill(tessGeo.polySpecular, firstPolyVertex, lastPolyVertex + 1,
PGL.javaToNativeARGB(specularColor));
root.setModifiedPolySpecular(firstPolyVertex, lastPolyVertex);
} else if (is2D()) {
int last1 = lastPolyVertex + 1;
if (-1 < firstLineVertex) last1 = firstLineVertex;
if (-1 < firstPointVertex) last1 = firstPointVertex;
Arrays.fill(tessGeo.polySpecular, firstPolyVertex, last1,
PGL.javaToNativeARGB(specularColor));
root.setModifiedPolyColors(firstPolyVertex, last1 - 1);
}
}
}
@Override
public void setSpecular(int index, int specular) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setSpecular()");
return;
}
if (root.tessUpdate) {
int tessIdx = firstPolyVertex + index;
tessGeo.polySpecular[tessIdx] = PGL.javaToNativeARGB(specular);
root.setModifiedPolySpecular(tessIdx, tessIdx);
} else {
inGeo.specular[index] = PGL.javaToNativeARGB(specular);
markForTessellation();
}
}
@Override
public int getEmissive(int index) {
if (family != GROUP) {
if (root.tessUpdate) {
return PGL.nativeToJavaARGB(tessGeo.polyEmissive[firstPolyVertex + index]);
} else {
return PGL.nativeToJavaARGB(inGeo.emissive[index]);
}
} else {
return 0;
}
}
@Override
public void setEmissive(int emissive) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setEmissive()");
return;
}
if (family == GROUP) {
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
child.setEmissive(emissive);
}
} else {
setEmissiveImpl(emissive);
}
}
protected void setEmissiveImpl(int emissive) {
if (emissiveColor == emissive) return;
emissiveColor = emissive;
Arrays.fill(inGeo.emissive, 0, inGeo.vertexCount,
PGL.javaToNativeARGB(emissiveColor));
if (shapeCreated && tessellated && 0 < tessGeo.polyVertexCount) {
if (is3D()) {
Arrays.fill(tessGeo.polyEmissive, firstPolyVertex, lastPolyVertex + 1,
PGL.javaToNativeARGB(emissiveColor));
root.setModifiedPolyEmissive(firstPolyVertex, lastPolyVertex);
} else if (is2D()) {
int last1 = lastPolyVertex + 1;
if (-1 < firstLineVertex) last1 = firstLineVertex;
if (-1 < firstPointVertex) last1 = firstPointVertex;
Arrays.fill(tessGeo.polyEmissive, firstPolyVertex, last1,
PGL.javaToNativeARGB(emissiveColor));
root.setModifiedPolyColors(firstPolyVertex, last1 - 1);
}
}
}
@Override
public void setEmissive(int index, int emissive) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setEmissive()");
return;
}
if (root.tessUpdate) {
int tessIdx = firstPolyVertex + index;
tessGeo.polyEmissive[tessIdx] = PGL.javaToNativeARGB(emissive);
root.setModifiedPolyEmissive(tessIdx, tessIdx);
} else {
inGeo.emissive[index] = PGL.javaToNativeARGB(emissive);
markForTessellation();
}
}
@Override
public float getShininess(int index) {
if (family != GROUP) {
if (root.tessUpdate) {
return tessGeo.polyShininess[firstPolyVertex + index];
} else {
return inGeo.shininess[index];
}
} else {
return 0;
}
}
@Override
public void setShininess(float shininess) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setShininess()");
return;
}
if (family == GROUP) {
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
child.setShininess(shininess);
}
} else {
setShininessImpl(shininess);
}
}
protected void setShininessImpl(float shininess) {
if (PGraphicsOpenGL.same(this.shininess, shininess)) return;
this.shininess = shininess;
Arrays.fill(inGeo.shininess, 0, inGeo.vertexCount, shininess);
if (shapeCreated && tessellated && hasPolys) {
if (is3D()) {
Arrays.fill(tessGeo.polyShininess, firstPolyVertex, lastPolyVertex + 1,
shininess);
root.setModifiedPolyShininess(firstPolyVertex, lastPolyVertex);
} else if (is2D()) {
int last1 = lastPolyVertex + 1;
if (-1 < firstLineVertex) last1 = firstLineVertex;
if (-1 < firstPointVertex) last1 = firstPointVertex;
Arrays.fill(tessGeo.polyShininess, firstPolyVertex, last1, shininess);
root.setModifiedPolyColors(firstPolyVertex, last1 - 1);
}
}
}
@Override
public void setShininess(int index, float shine) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setShininess()");
return;
}
if (root.tessUpdate) {
int tessIdx = firstPolyVertex + index;
tessGeo.polyShininess[tessIdx] = shininess;
root.setModifiedPolyShininess(tessIdx, tessIdx);
} else {
inGeo.shininess[index] = shine;
markForTessellation();
}
}
///////////////////////////////////////////////////////////
//
// Attribute getters and setters
public PVector getAttribPosition(String name, int index, PVector vec) {
VertexAttribute attrib = polyAttribs.get(name);
if (attrib == null)
throw new RuntimeException("Trying to get values of non existing attribute");
if (vec == null) vec = new PVector();
if (root.tessUpdate) {
float[] tessAttrib = tessGeo.fpolyAttribs.get(name);
int tessIdx = firstPolyVertex + index;
vec.x = tessAttrib[4 * tessIdx + 0];
vec.y = tessAttrib[4 * tessIdx + 1];
vec.z = tessAttrib[4 * tessIdx + 2];
} else {
float[] array = inGeo.fattribs.get(name);
vec.x = array[3 * index + 0];
vec.y = array[3 * index + 1];
vec.z = array[3 * index + 2];
}
return vec;
}
public float getAttribPositionX(String name, int index) {
VertexAttribute attrib = polyAttribs.get(name);
if (attrib == null)
throw new RuntimeException("Trying to get values of non existing attribute");
if (root.tessUpdate) {
float[] tessAttrib = tessGeo.fpolyAttribs.get(name);
return tessAttrib[4 * (firstPolyVertex + index) + 0];
} else {
float[] array = inGeo.fattribs.get(name);
return array[3 * index + 0];
}
}
public float getAttribPositionY(String name, int index) {
VertexAttribute attrib = polyAttribs.get(name);
if (attrib == null)
throw new RuntimeException("Trying to get values of non existing attribute");
if (root.tessUpdate) {
float[] tessAttrib = tessGeo.fpolyAttribs.get(name);
return tessAttrib[4 * (firstPolyVertex + index) + 1];
} else {
float[] array = inGeo.fattribs.get(name);
return array[3 * index + 1];
}
}
public float getAttribPositionZ(String name, int index) {
VertexAttribute attrib = polyAttribs.get(name);
if (attrib == null)
throw new RuntimeException("Trying to get values of non existing attribute");
if (root.tessUpdate) {
float[] tessAttrib = tessGeo.fpolyAttribs.get(name);
return tessAttrib[4 * (firstPolyVertex + index) + 2];
} else {
float[] array = inGeo.fattribs.get(name);
return array[3 * index + 2];
}
}
public PVector getAttribNormal(String name, int index, PVector vec) {
VertexAttribute attrib = polyAttribs.get(name);
if (attrib == null)
throw new RuntimeException("Trying to get values of non existing attribute");
if (vec == null) vec = new PVector();
if (root.tessUpdate) {
float[] tessAttrib = tessGeo.fpolyAttribs.get(name);
int tessIdx = firstPolyVertex + index;
vec.x = tessAttrib[3 * tessIdx + 0];
vec.y = tessAttrib[3 * tessIdx + 1];
vec.z = tessAttrib[3 * tessIdx + 2];
} else {
float[] array = inGeo.fattribs.get(name);
vec.x = array[3 * index + 0];
vec.y = array[3 * index + 1];
vec.z = array[3 * index + 2];
}
return vec;
}
public float getAttribNormalX(String name, int index) {
VertexAttribute attrib = polyAttribs.get(name);
if (attrib == null)
throw new RuntimeException("Trying to get values of non existing attribute");
if (root.tessUpdate) {
float[] tessAttrib = tessGeo.fpolyAttribs.get(name);
return tessAttrib[3 * (firstPolyVertex + index) + 0];
} else {
float[] array = inGeo.fattribs.get(name);
return array[3 * index + 0];
}
}
public float getAttribNormalY(String name, int index) {
VertexAttribute attrib = polyAttribs.get(name);
if (attrib == null)
throw new RuntimeException("Trying to get values of non existing attribute");
if (root.tessUpdate) {
float[] tessAttrib = tessGeo.fpolyAttribs.get(name);
return tessAttrib[3 * (firstPolyVertex + index) + 1];
} else {
float[] array = inGeo.fattribs.get(name);
return array[3 * index + 1];
}
}
public float getAttribNormalZ(String name, int index) {
VertexAttribute attrib = polyAttribs.get(name);
if (attrib == null)
throw new RuntimeException("Trying to get values of non existing attribute");
if (root.tessUpdate) {
float[] tessAttrib = tessGeo.fpolyAttribs.get(name);
return tessAttrib[3 * (firstPolyVertex + index) + 2];
} else {
float[] array = inGeo.fattribs.get(name);
return array[3 * index + 2];
}
}
public int getAttribColor(String name, int index) {
VertexAttribute attrib = polyAttribs.get(name);
if (attrib == null)
throw new RuntimeException("Trying to get values of non existing attribute");
if (root.tessUpdate) {
int[] tessAttrib = tessGeo.ipolyAttribs.get(name);
int color = tessAttrib[firstPolyVertex + index];
return PGL.nativeToJavaARGB(color);
} else {
int[] array = inGeo.iattribs.get(name);
return PGL.nativeToJavaARGB(array[index]);
}
}
public float[] getAttrib(String name, int index, float[] values) {
VertexAttribute attrib = polyAttribs.get(name);
if (attrib == null)
throw new RuntimeException("Trying to get values of non existing attribute");
if (root.tessUpdate) {
if (values == null || values.length < attrib.tessSize) values = new float[attrib.tessSize];
float[] tessAttrib = tessGeo.fpolyAttribs.get(name);
int tessIdx = firstPolyVertex + index;
PApplet.arrayCopy(tessAttrib, attrib.tessSize * tessIdx, values, 0, attrib.tessSize);
} else {
if (values == null || values.length < attrib.size) values = new float[attrib.size];
float[] array = inGeo.fattribs.get(name);
PApplet.arrayCopy(array, attrib.size * index, values, 0, attrib.size);
}
return values;
}
public int[] getAttrib(String name, int index, int[] values) {
VertexAttribute attrib = polyAttribs.get(name);
if (attrib == null)
throw new RuntimeException("Trying to get values of non existing attribute");
if (root.tessUpdate) {
if (values == null || values.length < attrib.tessSize) values = new int[attrib.tessSize];
int[] tessAttrib = tessGeo.ipolyAttribs.get(name);
int tessIdx = firstPolyVertex + index;
PApplet.arrayCopy(tessAttrib, attrib.tessSize * tessIdx, values, 0, attrib.tessSize);
} else {
if (values == null || values.length < attrib.size) values = new int[attrib.size];
int[] array = inGeo.iattribs.get(name);
PApplet.arrayCopy(array, attrib.size * index, values, 0, attrib.size);
}
return values;
}
public boolean[] getAttrib(String name, int index, boolean[] values) {
VertexAttribute attrib = polyAttribs.get(name);
if (attrib == null)
throw new RuntimeException("Trying to get values of non existing attribute");
if (root.tessUpdate) {
if (values == null || values.length < attrib.tessSize) values = new boolean[attrib.tessSize];
byte[] tessAttrib = tessGeo.bpolyAttribs.get(name);
int tessIdx = firstPolyVertex + index;
for (int i = 0; i < attrib.tessSize; i++) {
values[i] = (tessAttrib[tessIdx + i]!=0);
}
} else {
if (values == null || values.length < attrib.size) values = new boolean[attrib.size];
byte[] array = inGeo.battribs.get(name);
for (int i = 0; i < attrib.size; i++) {
values[i] = (array[attrib.size * index + i]!=0);
}
}
return values;
}
public void setAttribPosition(String name, int index, float x, float y, float z) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setAttribPosition()");
return;
}
VertexAttribute attrib = attribImpl(name, VertexAttribute.POSITION, PGL.FLOAT, 3);
if (attrib == null)
throw new RuntimeException("Trying to set values of non existing attribute");
if (root.tessUpdate) {
float[] tessAttrib = tessGeo.fpolyAttribs.get(name);
int tessIdx = firstPolyVertex + index;
tessAttrib[4 * tessIdx + 0] = x;
tessAttrib[4 * tessIdx + 1] = y;
tessAttrib[4 * tessIdx + 2] = z;
} else {
float[] array = inGeo.fattribs.get(name);
array[3 * index + 0] = x;
array[3 * index + 1] = y;
array[3 * index + 2] = z;
markForTessellation();
}
}
public void setAttribNormal(String name, int index, float nx, float ny, float nz) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setAttribNormal()");
return;
}
VertexAttribute attrib = attribImpl(name, VertexAttribute.NORMAL, PGL.FLOAT, 3);
if (attrib == null)
throw new RuntimeException("Trying to set values of non existing attribute");
if (root.tessUpdate) {
float[] tessAttrib = tessGeo.fpolyAttribs.get(name);
int tessIdx = firstPolyVertex + index;
tessAttrib[3 * tessIdx + 0] = nx;
tessAttrib[3 * tessIdx + 1] = ny;
tessAttrib[3 * tessIdx + 2] = nz;
} else {
float[] array = inGeo.fattribs.get(name);
array[3 * index + 0] = nx;
array[3 * index + 1] = ny;
array[3 * index + 2] = nz;
markForTessellation();
}
}
public void setAttribColor(String name, int index, int color) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setAttribColor()");
return;
}
VertexAttribute attrib = attribImpl(name, VertexAttribute.COLOR, PGL.INT, 1);
if (attrib == null)
throw new RuntimeException("Trying to set values of non existing attribute");
if (root.tessUpdate) {
int[] tessAttrib = tessGeo.ipolyAttribs.get(name);
tessAttrib[firstPolyVertex + index] = PGL.javaToNativeARGB(color);
} else {
float[] array = inGeo.fattribs.get(name);
array[index] = PGL.javaToNativeARGB(color);
markForTessellation();
}
}
public void setAttrib(String name, int index, float... values) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setAttrib()");
return;
}
VertexAttribute attrib = attribImpl(name, VertexAttribute.OTHER, PGL.FLOAT, values.length);
if (attrib == null)
throw new RuntimeException("Trying to set values of non existing attribute");
if (root.tessUpdate) {
if (attrib.tessSize != values.length)
throw new RuntimeException("Length of values array is different from attribute tesselated size");
float[] tessAttrib = tessGeo.fpolyAttribs.get(name);
int tessIdx = firstPolyVertex + index;
PApplet.arrayCopy(values, 0, tessAttrib, attrib.tessSize * tessIdx, attrib.tessSize);
} else {
if (attrib.size != values.length)
throw new RuntimeException("Length of values array is different from attribute size");
float[] array = inGeo.fattribs.get(name);
PApplet.arrayCopy(values, 0, array, attrib.size * index, attrib.size);
markForTessellation();
}
}
public void setAttrib(String name, int index, int... values) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setAttrib()");
return;
}
VertexAttribute attrib = attribImpl(name, VertexAttribute.OTHER, PGL.INT, values.length);
if (attrib == null)
throw new RuntimeException("Trying to set values of non existing attribute");
if (root.tessUpdate) {
if (attrib.tessSize != values.length)
throw new RuntimeException("Length of values array is different from attribute tesselated size");
int[] tessAttrib = tessGeo.ipolyAttribs.get(name);
int tessIdx = firstPolyVertex + index;
PApplet.arrayCopy(values, 0, tessAttrib, attrib.tessSize * tessIdx, attrib.tessSize);
} else {
if (attrib.size != values.length)
throw new RuntimeException("Length of values array is different from attribute size");
int[] array = inGeo.iattribs.get(name);
PApplet.arrayCopy(values, 0, array, attrib.size * index, attrib.size);
markForTessellation();
}
}
public void setAttrib(String name, int index, boolean... values) {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "setAttrib()");
return;
}
VertexAttribute attrib = attribImpl(name, VertexAttribute.OTHER, PGL.BOOL, values.length);
if (attrib == null)
throw new RuntimeException("Trying to set values of non existing attribute");
if (root.tessUpdate) {
if (attrib.tessSize != values.length)
throw new RuntimeException("Length of values array is different from attribute tesselated size");
byte[] tessAttrib = tessGeo.bpolyAttribs.get(name);
int tessIdx = firstPolyVertex + index;
for (int i = 0; i < attrib.tessSize; i++) {
tessAttrib[attrib.tessSize * tessIdx + i] = (byte)(values[i]?1:0);
}
} else {
if (attrib.size != values.length)
throw new RuntimeException("Length of values array is different from attribute size");
byte[] array = inGeo.battribs.get(name);
for (int i = 0; i < attrib.size; i++) {
array[attrib.size * index + i] = (byte)(values[i]?1:0);
}
markForTessellation();
}
}
///////////////////////////////////////////////////////////
//
// Vertex codes
@Override
public int[] getVertexCodes() {
if (family == GROUP) return null;
else {
if (family == PRIMITIVE || family == PATH) {
// the input geometry of primitive and path shapes is built during
// tessellation
updateTessellation();
}
if (inGeo.codes == null) return null;
return inGeo.codes;
}
}
@Override
public int getVertexCodeCount() {
if (family == GROUP) return 0;
else {
if (family == PRIMITIVE || family == PATH) {
// the input geometry of primitive and path shapes is built during
// tessellation
updateTessellation();
}
return inGeo.codeCount;
}
}
/**
* One of VERTEX, BEZIER_VERTEX, CURVE_VERTEX, or BREAK.
*/
@Override
public int getVertexCode(int index) {
return inGeo.codes[index];
}
///////////////////////////////////////////////////////////
//
// Tessellated geometry getter.
@Override
public PShape getTessellation() {
updateTessellation();
PShape polyTess = null;
PShape lineTess = null;
PShape pointTess = null;
float[] vertices;
float[] attribs;
int[] color;
float[] uv;
short[] indices;
if (0 < tessGeo.polyVertexCount) {
polyTess = pg.createShapeFamily(PShape.GEOMETRY);
polyTess.set3D(is3D); // if this is a 3D shape, make the new shape 3D as well
polyTess.beginShape(TRIANGLES);
polyTess.noStroke();
vertices = tessGeo.polyVertices;
attribs = tessGeo.polyNormals;
color = tessGeo.polyColors;
uv = tessGeo.polyTexCoords;
indices = tessGeo.polyIndices;
IndexCache cache = tessGeo.polyIndexCache;
for (int n = firstPolyIndexCache; n <= lastPolyIndexCache; n++) {
int ioffset = cache.indexOffset[n];
int icount = cache.indexCount[n];
int voffset = cache.vertexOffset[n];
for (int tr = ioffset / 3; tr < (ioffset + icount) / 3; tr++) {
int i0 = voffset + indices[3 * tr + 0];
int i1 = voffset + indices[3 * tr + 1];
int i2 = voffset + indices[3 * tr + 2];
if (is3D()) {
float x0 = vertices[4 * i0 + 0];
float y0 = vertices[4 * i0 + 1];
float z0 = vertices[4 * i0 + 2];
float x1 = vertices[4 * i1 + 0];
float y1 = vertices[4 * i1 + 1];
float z1 = vertices[4 * i1 + 2];
float x2 = vertices[4 * i2 + 0];
float y2 = vertices[4 * i2 + 1];
float z2 = vertices[4 * i2 + 2];
float nx0 = attribs[3 * i0 + 0];
float ny0 = attribs[3 * i0 + 1];
float nz0 = attribs[3 * i0 + 2];
float nx1 = attribs[3 * i1 + 0];
float ny1 = attribs[3 * i1 + 1];
float nz1 = attribs[3 * i1 + 2];
float nx2 = attribs[3 * i2 + 0];
float ny2 = attribs[3 * i2 + 1];
float nz2 = attribs[3 * i2 + 2];
int argb0 = PGL.nativeToJavaARGB(color[i0]);
int argb1 = PGL.nativeToJavaARGB(color[i1]);
int argb2 = PGL.nativeToJavaARGB(color[i2]);
polyTess.fill(argb0);
polyTess.normal(nx0, ny0, nz0);
polyTess.vertex(x0, y0, z0, uv[2 * i0 + 0], uv[2 * i0 + 1]);
polyTess.fill(argb1);
polyTess.normal(nx1, ny1, nz1);
polyTess.vertex(x1, y1, z1, uv[2 * i1 + 0], uv[2 * i1 + 1]);
polyTess.fill(argb2);
polyTess.normal(nx2, ny2, nz2);
polyTess.vertex(x2, y2, z2, uv[2 * i2 + 0], uv[2 * i2 + 1]);
} else if (is2D()) {
float x0 = vertices[4 * i0 + 0], y0 = vertices[4 * i0 + 1];
float x1 = vertices[4 * i1 + 0], y1 = vertices[4 * i1 + 1];
float x2 = vertices[4 * i2 + 0], y2 = vertices[4 * i2 + 1];
int argb0 = PGL.nativeToJavaARGB(color[i0]);
int argb1 = PGL.nativeToJavaARGB(color[i1]);
int argb2 = PGL.nativeToJavaARGB(color[i2]);
polyTess.fill(argb0);
polyTess.vertex(x0, y0, uv[2 * i0 + 0], uv[2 * i0 + 1]);
polyTess.fill(argb1);
polyTess.vertex(x1, y1, uv[2 * i1 + 0], uv[2 * i1 + 1]);
polyTess.fill(argb2);
polyTess.vertex(x2, y2, uv[2 * i2 + 0], uv[2 * i2 + 1]);
}
}
}
polyTess.endShape();
}
if (0 < tessGeo.lineVertexCount) {
lineTess = pg.createShapeFamily(PShape.GEOMETRY);
lineTess.set3D(is3D); // if this is a 3D shape, make the new shape 3D as well
lineTess.beginShape(LINES);
lineTess.noFill();
vertices = tessGeo.lineVertices;
attribs = tessGeo.lineDirections;
color = tessGeo.lineColors;
indices = tessGeo.lineIndices;
IndexCache cache = tessGeo.lineIndexCache;
for (int n = firstLineIndexCache; n <= lastLineIndexCache; n++) {
int ioffset = cache.indexOffset[n];
int icount = cache.indexCount[n];
int voffset = cache.vertexOffset[n];
for (int ln = ioffset / 6; ln < (ioffset + icount) / 6; ln++) {
// Same as in rawLines()
int i0 = voffset + indices[6 * ln + 0];
int i1 = voffset + indices[6 * ln + 5];
float sw0 = 2 * attribs[4 * i0 + 3];
float sw1 = 2 * attribs[4 * i1 + 3];
if (PGraphicsOpenGL.zero(sw0)) continue;
float[] pt0 = {0, 0, 0, 0};
float[] pt1 = {0, 0, 0, 0};
int argb0 = PGL.nativeToJavaARGB(color[i0]);
int argb1 = PGL.nativeToJavaARGB(color[i1]);
PApplet.arrayCopy(vertices, 4 * i0, pt0, 0, 4);
PApplet.arrayCopy(vertices, 4 * i1, pt1, 0, 4);
lineTess.strokeWeight(sw0);
lineTess.stroke(argb0);
lineTess.vertex(pt0[X], pt0[Y], pt0[Z]);
lineTess.strokeWeight(sw1);
lineTess.stroke(argb1);
lineTess.vertex(pt1[X], pt1[Y], pt1[Z]);
}
}
lineTess.endShape();
}
if (0 < tessGeo.pointVertexCount) {
pointTess = pg.createShapeFamily(PShape.GEOMETRY);
pointTess.set3D(is3D); // if this is a 3D shape, make the new shape 3D as well
pointTess.beginShape(POINTS);
pointTess.noFill();
vertices = tessGeo.pointVertices;
attribs = tessGeo.pointOffsets;
color = tessGeo.pointColors;
indices = tessGeo.pointIndices;
IndexCache cache = tessGeo.pointIndexCache;
for (int n = 0; n < cache.size; n++) {
int ioffset = cache.indexOffset[n];
int icount = cache.indexCount[n];
int voffset = cache.vertexOffset[n];
int pt = ioffset;
while (pt < (ioffset + icount) / 3) {
float size = attribs[2 * pt + 2];
float weight;
int perim;
if (0 < size) { // round point
weight = +size / 0.5f;
perim = PApplet.min(PGraphicsOpenGL.MAX_POINT_ACCURACY,
PApplet.max(PGraphicsOpenGL.MIN_POINT_ACCURACY,
(int) (TWO_PI * weight /
PGraphicsOpenGL.POINT_ACCURACY_FACTOR))) + 1;
} else { // Square point
weight = -size / 0.5f;
perim = 5;
}
int i0 = voffset + indices[3 * pt];
int argb0 = PGL.nativeToJavaARGB(color[i0]);
float[] pt0 = {0, 0, 0, 0};
PApplet.arrayCopy(vertices, 4 * i0, pt0, 0, 4);
pointTess.strokeWeight(weight);
pointTess.stroke(argb0);
pointTess.vertex(pt0[X], pt0[Y], pt0[Z]);
pt += perim;
}
}
pointTess.endShape();
}
if (polyTess == null && lineTess == null && pointTess == null) {
return pg.createShapeFamily(PShape.GEOMETRY);
} else if (polyTess != null && lineTess == null && pointTess == null) {
return polyTess;
} else if (polyTess == null && lineTess != null && pointTess == null) {
return lineTess;
} else if (polyTess == null && lineTess == null && pointTess != null) {
return pointTess;
} else {
PShape group = pg.createShape(GROUP);
if (polyTess != null) group.addChild(polyTess);
if (lineTess != null) group.addChild(lineTess);
if (pointTess != null) group.addChild(pointTess);
return group;
}
}
///////////////////////////////////////////////////////////
//
// Tessellation update mode
@Override
public void beginTessellation(int kind) {
if (kind != TRIANGLES && kind != LINES && kind != POINTS) {
throw new IllegalArgumentException("The only valid kinds of geometry for tessellation update are TRIANGLES, LINES, or POINTS.");
}
if (!root.tessUpdate) {
updateTessellation();
if (!tessGeo.bufObjStreaming) {
throw new RuntimeException("Buffer object streaming is not available in the OpenGL renderer, so tessellation update cannot be used.");
}
root.tessUpdate = true;
root.tessKind = is2D() ? TRIANGLES : kind;
boolean createBuffer;
if (root.tessKind == TRIANGLES && hasPolys) {
createBuffer = bufPolyVertex == null;
if (createBuffer) bufPolyVertex = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 4, PGL.SIZEOF_FLOAT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyVertex.glId);
tessGeo.initPolyVerticesBuffer(!createBuffer, false, PGL.bufferUsageRetained);
root.selVertices = tessGeo.polyVertices;
createBuffer = bufPolyColor == null;
if (createBuffer) bufPolyColor = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 1, PGL.SIZEOF_INT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyColor.glId);
tessGeo.initPolyColorsBuffer(!createBuffer, false, PGL.bufferUsageRetained);
createBuffer = bufPolyNormal == null;
if (createBuffer) bufPolyNormal = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 3, PGL.SIZEOF_FLOAT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyNormal.glId);
tessGeo.initPolyNormalsBuffer(!createBuffer, false, PGL.bufferUsageRetained);
createBuffer = bufPolyTexCoord == null;
if (createBuffer) bufPolyTexCoord = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 3, PGL.SIZEOF_FLOAT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyTexCoord.glId);
tessGeo.initPolyTexCoordsBuffer(!createBuffer, false, PGL.bufferUsageRetained);
createBuffer = bufPolyAmbient == null;
if (createBuffer) bufPolyAmbient = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 1, PGL.SIZEOF_INT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyAmbient.glId);
tessGeo.initPolyAmbientBuffer(!createBuffer, false, PGL.bufferUsageRetained);
createBuffer = bufPolySpecular == null;
if (createBuffer) bufPolySpecular = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 1, PGL.SIZEOF_INT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolySpecular.glId);
tessGeo.initPolySpecularBuffer(!createBuffer, false, PGL.bufferUsageRetained);
createBuffer = bufPolyEmissive == null;
if (createBuffer) bufPolyEmissive = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 1, PGL.SIZEOF_INT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyEmissive.glId);
tessGeo.initPolyEmissiveBuffer(!createBuffer, false, PGL.bufferUsageRetained);
createBuffer = bufPolyShininess == null;
if (createBuffer) bufPolyShininess = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 1, PGL.SIZEOF_FLOAT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyShininess.glId);
tessGeo.initPolyShininessBuffer(!createBuffer, false, PGL.bufferUsageRetained);
for (String name: polyAttribs.keySet()) {
VertexAttribute attrib = polyAttribs.get(name);
createBuffer = !attrib.bufferCreated();
if (createBuffer) attrib.createBuffer(pgl);
pgl.bindBuffer(PGL.ARRAY_BUFFER, attrib.buf.glId);
tessGeo.initPolyAttribsBuffer(attrib, !createBuffer, false, PGL.bufferUsageRetained);
}
} else if (root.tessKind == LINES && hasLines) {
createBuffer = bufLineVertex == null;
if (createBuffer) bufLineVertex = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 4, PGL.SIZEOF_FLOAT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufLineVertex.glId);
tessGeo.initLineVerticesBuffer(!createBuffer, false, PGL.bufferUsageRetained);
root.selVertices = tessGeo.lineVertices;
createBuffer = bufLineColor == null;
if (createBuffer) bufLineColor = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 1, PGL.SIZEOF_INT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufLineColor.glId);
tessGeo.initLineColorsBuffer(!createBuffer, false, PGL.bufferUsageRetained);
createBuffer = bufLineAttrib == null;
if (createBuffer) bufLineAttrib = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 4, PGL.SIZEOF_FLOAT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufLineAttrib.glId);
tessGeo.initLineDirectionsBuffer(!createBuffer, false, PGL.bufferUsageRetained);
} else if (root.tessKind == POINTS && hasPoints) {
createBuffer = bufPointVertex == null;
if (createBuffer) bufPointVertex = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 4, PGL.SIZEOF_FLOAT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPointVertex.glId);
tessGeo.initPointVerticesBuffer(!createBuffer, false, PGL.bufferUsageRetained);
root.selVertices = tessGeo.pointVertices;
createBuffer = bufPointColor == null;
if (createBuffer) bufPointColor = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 1, PGL.SIZEOF_INT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPointColor.glId);
tessGeo.initPointColorsBuffer(!createBuffer, false, PGL.bufferUsageRetained);
createBuffer = bufPointAttrib == null;
if (createBuffer) bufPointAttrib = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 2, PGL.SIZEOF_FLOAT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPointAttrib.glId);
tessGeo.initPointOffsetsBuffer(!createBuffer, false, PGL.bufferUsageRetained);
}
pgl.bindBuffer(PGL.ARRAY_BUFFER, 0);
}
}
@Override
public void endTessellation() {
if (root.tessUpdate) {
if (root.tessKind == TRIANGLES && hasPolys) {
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyVertex.glId);
tessGeo.finalPolyVerticesBuffer(firstModifiedPolyVertex, lastModifiedPolyVertex);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyColor.glId);
tessGeo.finalPolyColorsBuffer(firstModifiedPolyColor, lastModifiedPolyColor);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyNormal.glId);
tessGeo.finalPolyNormalsBuffer(firstModifiedPolyNormal, lastModifiedPolyNormal);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyTexCoord.glId);
tessGeo.finalPolyTexCoordsBuffer(firstModifiedPolyTexCoord, lastModifiedPolyTexCoord);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyAmbient.glId);
tessGeo.finalPolyAmbientBuffer(firstModifiedPolyAmbient, lastModifiedPolyAmbient);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolySpecular.glId);
tessGeo.finalPolySpecularBuffer(firstModifiedPolySpecular, lastModifiedPolySpecular);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyEmissive.glId);
tessGeo.finalPolyEmissiveBuffer(firstModifiedPolyEmissive, lastModifiedPolyEmissive);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyShininess.glId);
tessGeo.finalPolyShininessBuffer(firstModifiedPolyShininess, lastModifiedPolyShininess);
for (String name: polyAttribs.keySet()) {
VertexAttribute attrib = polyAttribs.get(name);
pgl.bindBuffer(PGL.ARRAY_BUFFER, attrib.buf.glId);
tessGeo.finalPolyAttribsBuffer(attrib, attrib.firstModified, attrib.lastModified);
}
} else if (root.tessKind == LINES && hasLines) {
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufLineVertex.glId);
tessGeo.finalLineVerticesBuffer(firstModifiedLineVertex, lastModifiedLineVertex);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufLineColor.glId);
tessGeo.finalLineColorsBuffer(firstModifiedLineColor, lastModifiedLineColor);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufLineAttrib.glId);
tessGeo.finalLineDirectionsBuffer(firstModifiedLineAttribute, lastModifiedLineAttribute);
} else if (root.tessKind == POINTS && hasPoints) {
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPointVertex.glId);
tessGeo.finalPointVerticesBuffer(firstModifiedPointVertex, lastModifiedPointVertex);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPointColor.glId);
tessGeo.finalPointColorsBuffer(firstModifiedPointColor, lastModifiedPointColor);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPointAttrib.glId);
tessGeo.finalPointOffsetsBuffer(firstModifiedPointAttribute, lastModifiedPointAttribute);
}
root.selVertices = null;
root.tessUpdate = false;
// To avoid triggering a new tessellation in the draw method.
root.modified = false;
}
}
///////////////////////////////////////////////////////////
//
/**
* Return true if this x, y coordinate is part of this shape. Only works
* with PATH shapes or GROUP shapes that contain other GROUPs or PATHs.
* This method is not imperfect and doesn't account for all cases
* (not all complex shapes: concave shapes or holes may have issues).
*/
@Override
public boolean contains(float x, float y) {
if (family == PATH || family == GEOMETRY) {
// http://www.ecse.rpi.edu/Homepages/wrf/Research/Short_Notes/pnpoly.html
boolean c = false;
for (int i = 0, j = inGeo.vertexCount-1; i < inGeo.vertexCount; j = i++) {
if (((inGeo.vertices[3 * i + 1] > y) != (inGeo.vertices[3 * j + 1] > y)) &&
(x <
(inGeo.vertices[3 * j]-inGeo.vertices[3 * i]) *
(y-inGeo.vertices[3 * i + 1]) /
(inGeo.vertices[3 * j + 1]-inGeo.vertices[3 * i + 1]) +
inGeo.vertices[3 * i])) {
c = !c;
}
}
return c;
} else {
throw new IllegalArgumentException("The contains() method is only implemented for paths.");
}
}
///////////////////////////////////////////////////////////
//
// Tessellation
protected void updateTessellation() {
if (!root.tessellated) {
root.tessellate();
root.aggregate();
root.initModified();
root.needBufferInit = true;
}
}
protected void markForTessellation() {
root.tessellated = false;
tessellated = false;
}
protected void initModified() {
modified = false;
modifiedPolyVertices = false;
modifiedPolyColors = false;
modifiedPolyNormals = false;
modifiedPolyTexCoords = false;
modifiedPolyAmbient = false;
modifiedPolySpecular = false;
modifiedPolyEmissive = false;
modifiedPolyShininess = false;
modifiedLineVertices = false;
modifiedLineColors = false;
modifiedLineAttributes = false;
modifiedPointVertices = false;
modifiedPointColors = false;
modifiedPointAttributes = false;
firstModifiedPolyVertex = PConstants.MAX_INT;
lastModifiedPolyVertex = PConstants.MIN_INT;
firstModifiedPolyColor = PConstants.MAX_INT;
lastModifiedPolyColor = PConstants.MIN_INT;
firstModifiedPolyNormal = PConstants.MAX_INT;
lastModifiedPolyNormal = PConstants.MIN_INT;
firstModifiedPolyTexCoord = PConstants.MAX_INT;
lastModifiedPolyTexCoord = PConstants.MIN_INT;
firstModifiedPolyAmbient = PConstants.MAX_INT;
lastModifiedPolyAmbient = PConstants.MIN_INT;
firstModifiedPolySpecular = PConstants.MAX_INT;
lastModifiedPolySpecular = PConstants.MIN_INT;
firstModifiedPolyEmissive = PConstants.MAX_INT;
lastModifiedPolyEmissive = PConstants.MIN_INT;
firstModifiedPolyShininess = PConstants.MAX_INT;
lastModifiedPolyShininess = PConstants.MIN_INT;
firstModifiedLineVertex = PConstants.MAX_INT;
lastModifiedLineVertex = PConstants.MIN_INT;
firstModifiedLineColor = PConstants.MAX_INT;
lastModifiedLineColor = PConstants.MIN_INT;
firstModifiedLineAttribute = PConstants.MAX_INT;
lastModifiedLineAttribute = PConstants.MIN_INT;
firstModifiedPointVertex = PConstants.MAX_INT;
lastModifiedPointVertex = PConstants.MIN_INT;
firstModifiedPointColor = PConstants.MAX_INT;
lastModifiedPointColor = PConstants.MIN_INT;
firstModifiedPointAttribute = PConstants.MAX_INT;
lastModifiedPointAttribute = PConstants.MIN_INT;
}
protected void tessellate() {
if (root == this && parent == null) { // Root shape
boolean initAttr = false;
if (polyAttribs == null) {
polyAttribs = PGraphicsOpenGL.newAttributeMap();
initAttr = true;
}
if (tessGeo == null) {
tessGeo = PGraphicsOpenGL.newTessGeometry(pg, polyAttribs, PGraphicsOpenGL.RETAINED,
PGL.bufferStreamingRetained);
}
tessGeo.clear();
if (initAttr) {
collectPolyAttribs();
}
for (int i = 0; i < polyAttribs.size(); i++) {
VertexAttribute attrib = polyAttribs.get(i);
tessGeo.initAttrib(attrib);
}
tessellateImpl();
// Tessellated arrays are trimmed since they are expanded
// by doubling their old size, which might lead to arrays
// larger than the vertex counts.
tessGeo.trim();
}
}
protected void collectPolyAttribs() {
AttributeMap rootAttribs = root.polyAttribs;
tessGeo = root.tessGeo;
if (family == GROUP) {
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
child.collectPolyAttribs();
}
} else {
for (int i = 0; i < polyAttribs.size(); i++) {
VertexAttribute attrib = polyAttribs.get(i);
tessGeo.initAttrib(attrib);
if (rootAttribs.containsKey(attrib.name)) {
VertexAttribute rattrib = rootAttribs.get(attrib.name);
if (rattrib.diff(attrib)) {
throw new RuntimeException("Children shapes cannot have different attributes with same name");
}
} else {
rootAttribs.put(attrib.name, attrib);
}
}
}
}
protected void tessellateImpl() {
tessGeo = root.tessGeo;
firstPolyIndexCache = -1;
lastPolyIndexCache = -1;
firstLineIndexCache = -1;
lastLineIndexCache = -1;
firstPointIndexCache = -1;
lastPointIndexCache = -1;
if (family == GROUP) {
if (polyAttribs == null) {
polyAttribs = PGraphicsOpenGL.newAttributeMap();
collectPolyAttribs();
}
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
child.tessellateImpl();
}
} else {
if (shapeCreated) {
// If the geometry was tessellated previously, then
// the edges information will still be stored in the
// input object, so it needs to be removed to avoid
// duplication.
inGeo.clearEdges();
tessellator.setInGeometry(inGeo);
tessellator.setTessGeometry(tessGeo);
tessellator.setFill(fill || image != null);
tessellator.setTexCache(null, null);
tessellator.setStroke(stroke);
tessellator.setStrokeColor(strokeColor);
tessellator.setStrokeWeight(strokeWeight);
tessellator.setStrokeCap(strokeCap);
tessellator.setStrokeJoin(strokeJoin);
tessellator.setRenderer(pg);
tessellator.setTransform(matrix);
tessellator.set3D(is3D());
if (family == GEOMETRY) {
if (kind == POINTS) {
tessellator.tessellatePoints();
} else if (kind == LINES) {
tessellator.tessellateLines();
} else if (kind == LINE_STRIP) {
tessellator.tessellateLineStrip();
} else if (kind == LINE_LOOP) {
tessellator.tessellateLineLoop();
} else if (kind == TRIANGLE || kind == TRIANGLES) {
if (stroke) inGeo.addTrianglesEdges();
if (normalMode == NORMAL_MODE_AUTO) inGeo.calcTrianglesNormals();
tessellator.tessellateTriangles();
} else if (kind == TRIANGLE_FAN) {
if (stroke) inGeo.addTriangleFanEdges();
if (normalMode == NORMAL_MODE_AUTO) inGeo.calcTriangleFanNormals();
tessellator.tessellateTriangleFan();
} else if (kind == TRIANGLE_STRIP) {
if (stroke) inGeo.addTriangleStripEdges();
if (normalMode == NORMAL_MODE_AUTO) inGeo.calcTriangleStripNormals();
tessellator.tessellateTriangleStrip();
} else if (kind == QUAD || kind == QUADS) {
if (stroke) inGeo.addQuadsEdges();
if (normalMode == NORMAL_MODE_AUTO) inGeo.calcQuadsNormals();
tessellator.tessellateQuads();
} else if (kind == QUAD_STRIP) {
if (stroke) inGeo.addQuadStripEdges();
if (normalMode == NORMAL_MODE_AUTO) inGeo.calcQuadStripNormals();
tessellator.tessellateQuadStrip();
} else if (kind == POLYGON) {
boolean bez = inGeo.hasBezierVertex();
boolean quad = inGeo.hasQuadraticVertex();
boolean curv = inGeo.hasCurveVertex();
if (bez || quad) saveBezierVertexSettings();
if (curv) {
saveCurveVertexSettings();
tessellator.resetCurveVertexCount();
}
tessellator.tessellatePolygon(solid, close,
normalMode == NORMAL_MODE_AUTO);
if (bez ||quad) restoreBezierVertexSettings();
if (curv) restoreCurveVertexSettings();
}
} else if (family == PRIMITIVE) {
// The input geometry needs to be cleared because the geometry
// generation methods in InGeometry add the vertices of the
// new primitive to what is already stored.
inGeo.clear();
if (kind == POINT) {
tessellatePoint();
} else if (kind == LINE) {
tessellateLine();
} else if (kind == TRIANGLE) {
tessellateTriangle();
} else if (kind == QUAD) {
tessellateQuad();
} else if (kind == RECT) {
tessellateRect();
} else if (kind == ELLIPSE) {
tessellateEllipse();
} else if (kind == ARC) {
tessellateArc();
} else if (kind == BOX) {
tessellateBox();
} else if (kind == SPHERE) {
tessellateSphere();
}
} else if (family == PATH) {
inGeo.clear();
tessellatePath();
}
if (image != null && parent != null) {
((PShapeOpenGL)parent).addTexture(image);
}
firstPolyIndexCache = tessellator.firstPolyIndexCache;
lastPolyIndexCache = tessellator.lastPolyIndexCache;
firstLineIndexCache = tessellator.firstLineIndexCache;
lastLineIndexCache = tessellator.lastLineIndexCache;
firstPointIndexCache = tessellator.firstPointIndexCache;
lastPointIndexCache = tessellator.lastPointIndexCache;
}
}
firstPolyVertex = lastPolyVertex = -1;
firstLineVertex = lastLineVertex = -1;
firstPointVertex = lastPointVertex = -1;
tessellated = true;
}
protected void tessellatePoint() {
float x = 0, y = 0, z = 0;
if (params.length == 2) {
x = params[0];
y = params[1];
z = 0;
} else if (params.length == 3) {
x = params[0];
y = params[1];
z = params[2];
}
inGeo.setMaterial(fillColor, strokeColor, strokeWeight,
ambientColor, specularColor, emissiveColor, shininess);
inGeo.setNormal(normalX, normalY, normalZ);
inGeo.addPoint(x, y, z, fill, stroke);
tessellator.tessellatePoints();
}
protected void tessellateLine() {
float x1 = 0, y1 = 0, z1 = 0;
float x2 = 0, y2 = 0, z2 = 0;
if (params.length == 4) {
x1 = params[0];
y1 = params[1];
x2 = params[2];
y2 = params[3];
} else if (params.length == 6) {
x1 = params[0];
y1 = params[1];
z1 = params[2];
x2 = params[3];
y2 = params[4];
z2 = params[5];
}
inGeo.setMaterial(fillColor, strokeColor, strokeWeight,
ambientColor, specularColor, emissiveColor, shininess);
inGeo.setNormal(normalX, normalY, normalZ);
inGeo.addLine(x1, y1, z1,
x2, y2, z2,
fill, stroke);
tessellator.tessellateLines();
}
protected void tessellateTriangle() {
float x1 = 0, y1 = 0;
float x2 = 0, y2 = 0;
float x3 = 0, y3 = 0;
if (params.length == 6) {
x1 = params[0];
y1 = params[1];
x2 = params[2];
y2 = params[3];
x3 = params[4];
y3 = params[5];
}
inGeo.setMaterial(fillColor, strokeColor, strokeWeight,
ambientColor, specularColor, emissiveColor, shininess);
inGeo.setNormal(normalX, normalY, normalZ);
inGeo.addTriangle(x1, y1, 0,
x2, y2, 0,
x3, y3, 0,
fill, stroke);
tessellator.tessellateTriangles();
}
protected void tessellateQuad() {
float x1 = 0, y1 = 0;
float x2 = 0, y2 = 0;
float x3 = 0, y3 = 0;
float x4 = 0, y4 = 0;
if (params.length == 8) {
x1 = params[0];
y1 = params[1];
x2 = params[2];
y2 = params[3];
x3 = params[4];
y3 = params[5];
x4 = params[6];
y4 = params[7];
}
inGeo.setMaterial(fillColor, strokeColor, strokeWeight,
ambientColor, specularColor, emissiveColor, shininess);
inGeo.setNormal(normalX, normalY, normalZ);
inGeo.addQuad(x1, y1, 0,
x2, y2, 0,
x3, y3, 0,
x4, y4, 0,
stroke);
tessellator.tessellateQuads();
}
protected void tessellateRect() {
float a = 0, b = 0, c = 0, d = 0;
float tl = 0, tr = 0, br = 0, bl = 0;
boolean rounded = false;
int mode = rectMode;
if (params.length == 4 || params.length == 5) {
a = params[0];
b = params[1];
c = params[2];
d = params[3];
rounded = false;
if (params.length == 5) {
tl = params[4];
tr = params[4];
br = params[4];
bl = params[4];
rounded = true;
}
} else if (params.length == 8) {
a = params[0];
b = params[1];
c = params[2];
d = params[3];
tl = params[4];
tr = params[5];
br = params[6];
bl = params[7];
rounded = true;
}
float hradius, vradius;
switch (mode) {
case CORNERS:
break;
case CORNER:
c += a; d += b;
break;
case RADIUS:
hradius = c;
vradius = d;
c = a + hradius;
d = b + vradius;
a -= hradius;
b -= vradius;
break;
case CENTER:
hradius = c / 2.0f;
vradius = d / 2.0f;
c = a + hradius;
d = b + vradius;
a -= hradius;
b -= vradius;
}
if (a > c) {
float temp = a; a = c; c = temp;
}
if (b > d) {
float temp = b; b = d; d = temp;
}
float maxRounding = PApplet.min((c - a) / 2, (d - b) / 2);
if (tl > maxRounding) tl = maxRounding;
if (tr > maxRounding) tr = maxRounding;
if (br > maxRounding) br = maxRounding;
if (bl > maxRounding) bl = maxRounding;
inGeo.setMaterial(fillColor, strokeColor, strokeWeight,
ambientColor, specularColor, emissiveColor, shininess);
inGeo.setNormal(normalX, normalY, normalZ);
if (rounded) {
saveBezierVertexSettings();
inGeo.addRect(a, b, c, d, tl, tr, br, bl, stroke);
tessellator.tessellatePolygon(true, true, true);
restoreBezierVertexSettings();
} else {
inGeo.addRect(a, b, c, d, stroke);
tessellator.tessellateQuads();
}
}
protected void tessellateEllipse() {
float a = 0, b = 0, c = 0, d = 0;
int mode = ellipseMode;
if (4 <= params.length) {
a = params[0];
b = params[1];
c = params[2];
d = params[3];
}
float x = a;
float y = b;
float w = c;
float h = d;
if (mode == CORNERS) {
w = c - a;
h = d - b;
} else if (mode == RADIUS) {
x = a - c;
y = b - d;
w = c * 2;
h = d * 2;
} else if (mode == DIAMETER) {
x = a - c/2f;
y = b - d/2f;
}
if (w < 0) { // undo negative width
x += w;
w = -w;
}
if (h < 0) { // undo negative height
y += h;
h = -h;
}
inGeo.setMaterial(fillColor, strokeColor, strokeWeight,
ambientColor, specularColor, emissiveColor, shininess);
inGeo.setNormal(normalX, normalY, normalZ);
inGeo.addEllipse(x, y, w, h, fill, stroke);
tessellator.tessellateTriangleFan();
}
protected void tessellateArc() {
float a = 0, b = 0, c = 0, d = 0;
float start = 0, stop = 0;
int mode = ellipseMode;
int arcMode = 0;
if (6 <= params.length) {
a = params[0];
b = params[1];
c = params[2];
d = params[3];
start = params[4];
stop = params[5];
if (params.length == 7) {
arcMode = (int)(params[6]);
}
}
float x = a;
float y = b;
float w = c;
float h = d;
if (mode == CORNERS) {
w = c - a;
h = d - b;
} else if (mode == RADIUS) {
x = a - c;
y = b - d;
w = c * 2;
h = d * 2;
} else if (mode == CENTER) {
x = a - c/2f;
y = b - d/2f;
}
// make sure the loop will exit before starting while
if (!Float.isInfinite(start) && !Float.isInfinite(stop)) {
// ignore equal and degenerate cases
if (stop > start) {
// make sure that we're starting at a useful point
while (start < 0) {
start += TWO_PI;
stop += TWO_PI;
}
if (stop - start > TWO_PI) {
// don't change start, it is visible in PIE mode
stop = start + TWO_PI;
}
inGeo.setMaterial(fillColor, strokeColor, strokeWeight,
ambientColor, specularColor, emissiveColor, shininess);
inGeo.setNormal(normalX, normalY, normalZ);
inGeo.addArc(x, y, w, h, start, stop, fill, stroke, arcMode);
tessellator.tessellateTriangleFan();
}
}
}
protected void tessellateBox() {
float w = 0, h = 0, d = 0;
if (params.length == 1) {
w = h = d = params[0];
} else if (params.length == 3) {
w = params[0];
h = params[1];
d = params[2];
}
inGeo.setMaterial(fillColor, strokeColor, strokeWeight,
ambientColor, specularColor, emissiveColor, shininess);
inGeo.addBox(w, h, d, fill, stroke);
tessellator.tessellateQuads();
}
protected void tessellateSphere() {
float r = 0;
int nu = sphereDetailU;
int nv = sphereDetailV;
if (1 <= params.length) {
r = params[0];
if (params.length == 2) {
nu = nv = (int)params[1];
} else if (params.length == 3) {
nu = (int)params[1];
nv = (int)params[2];
}
}
if (nu < 3 || nv < 2) {
nu = nv = 30;
}
int savedDetailU = pg.sphereDetailU;
int savedDetailV = pg.sphereDetailV;
if (pg.sphereDetailU != nu || pg.sphereDetailV != nv) {
pg.sphereDetail(nu, nv);
}
inGeo.setMaterial(fillColor, strokeColor, strokeWeight,
ambientColor, specularColor, emissiveColor, shininess);
int[] indices = inGeo.addSphere(r, nu, nv, fill, stroke);
tessellator.tessellateTriangles(indices);
if ((0 < savedDetailU && savedDetailU != nu) ||
(0 < savedDetailV && savedDetailV != nv)) {
pg.sphereDetail(savedDetailU, savedDetailV);
}
}
protected void tessellatePath() {
if (vertices == null) return;
inGeo.setMaterial(fillColor, strokeColor, strokeWeight,
ambientColor, specularColor, emissiveColor, shininess);
if (vertexCodeCount == 0) { // each point is a simple vertex
if (vertices[0].length == 2) { // tessellating 2D vertices
for (int i = 0; i < vertexCount; i++) {
inGeo.addVertex(vertices[i][X], vertices[i][Y], VERTEX, false);
}
}
} else { // coded set of vertices
int idx = 0;
boolean brk = true;
if (vertices[0].length == 2) { // tessellating a 2D path
for (int j = 0; j < vertexCodeCount; j++) {
switch (vertexCodes[j]) {
case VERTEX:
inGeo.addVertex(vertices[idx][X], vertices[idx][Y], VERTEX, brk);
brk = false;
idx++;
break;
case QUADRATIC_VERTEX:
inGeo.addQuadraticVertex(vertices[idx+0][X], vertices[idx+0][Y], 0,
vertices[idx+1][X], vertices[idx+1][Y], 0,
brk);
brk = false;
idx += 2;
break;
case BEZIER_VERTEX:
inGeo.addBezierVertex(vertices[idx+0][X], vertices[idx+0][Y], 0,
vertices[idx+1][X], vertices[idx+1][Y], 0,
vertices[idx+2][X], vertices[idx+2][Y], 0,
brk);
brk = false;
idx += 3;
break;
case CURVE_VERTEX:
inGeo.addCurveVertex(vertices[idx][X], vertices[idx][Y], 0, brk);
brk = false;
idx++;
break;
case BREAK:
brk = true;
}
}
}
}
boolean bez = inGeo.hasBezierVertex();
boolean quad = inGeo.hasQuadraticVertex();
boolean curv = inGeo.hasCurveVertex();
if (bez || quad) saveBezierVertexSettings();
if (curv) {
saveCurveVertexSettings();
tessellator.resetCurveVertexCount();
}
tessellator.tessellatePolygon(true, close, true);
if (bez || quad) restoreBezierVertexSettings();
if (curv) restoreCurveVertexSettings();
}
protected void saveBezierVertexSettings() {
savedBezierDetail = pg.bezierDetail;
if (pg.bezierDetail != bezierDetail) {
pg.bezierDetail(bezierDetail);
}
}
protected void restoreBezierVertexSettings() {
if (savedBezierDetail != bezierDetail) {
pg.bezierDetail(savedBezierDetail);
}
}
protected void saveCurveVertexSettings() {
savedCurveDetail = pg.curveDetail;
savedCurveTightness = pg.curveTightness;
if (pg.curveDetail != curveDetail) {
pg.curveDetail(curveDetail);
}
if (pg.curveTightness != curveTightness) {
pg.curveTightness(curveTightness);
}
}
protected void restoreCurveVertexSettings() {
if (savedCurveDetail != curveDetail) {
pg.curveDetail(savedCurveDetail);
}
if (savedCurveTightness != curveTightness) {
pg.curveTightness(savedCurveTightness);
}
}
///////////////////////////////////////////////////////////
//
// Aggregation
protected void aggregate() {
if (root == this && parent == null) {
// Initializing auxiliary variables in root node
// needed for aggregation.
polyIndexOffset = 0;
polyVertexOffset = 0;
polyVertexAbs = 0;
polyVertexRel = 0;
lineIndexOffset = 0;
lineVertexOffset = 0;
lineVertexAbs = 0;
lineVertexRel = 0;
pointIndexOffset = 0;
pointVertexOffset = 0;
pointVertexAbs = 0;
pointVertexRel = 0;
// Recursive aggregation.
aggregateImpl();
}
}
// This method is very important, as it is responsible of generating the
// correct vertex and index offsets for each level of the shape hierarchy.
// This is the core of the recursive algorithm that calculates the indices
// for the vertices accumulated in a single VBO.
// Basically, the algorithm traverses all the shapes in the hierarchy and
// updates the index cache for each child shape holding geometry (those being
// the leaf nodes in the hierarchy tree), and creates index caches for the
// group shapes so that the draw() method can be called from any shape in the
// hierarchy and the correct piece of geometry will be rendered.
//
// For example, in the following hierarchy:
//
// ROOT GROUP
// |
// /-----------------0-----------------\
// | |
// CHILD GROUP 0 CHILD GROUP 1
// | |
// | /---------------0-----------------\
// | | | |
// GEO SHAPE 0 GEO SHAPE 0 GEO SHAPE 1 GEO SHAPE 2
// 4 vertices 5 vertices 6 vertices 3 vertices
//
// calling draw() from the root group should result in all the
// vertices (4 + 5 + 6 + 3 = 18) being rendered, while calling
// draw() from either child groups 0 or 1 should result in the first
// 4 vertices or the last 14 vertices being rendered, respectively.
protected void aggregateImpl() {
if (family == GROUP) {
// Recursively aggregating the child shapes.
hasPolys = false;
hasLines = false;
hasPoints = false;
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
child.aggregateImpl();
hasPolys |= child.hasPolys;
hasLines |= child.hasLines;
hasPoints |= child.hasPoints;
}
} else { // LEAF SHAPE (family either GEOMETRY, PATH or PRIMITIVE)
hasPolys = -1 < firstPolyIndexCache && -1 < lastPolyIndexCache;
hasLines = -1 < firstLineIndexCache && -1 < lastLineIndexCache;
hasPoints = -1 < firstPointIndexCache && -1 < lastPointIndexCache;
}
if (hasPolys) {
updatePolyIndexCache();
}
if (is3D()) {
if (hasLines) updateLineIndexCache();
if (hasPoints) updatePointIndexCache();
}
if (matrix != null) {
// Some geometric transformations were applied on
// this shape before tessellation, so they are applied now.
if (hasPolys) {
tessGeo.applyMatrixOnPolyGeometry(matrix,
firstPolyVertex, lastPolyVertex);
}
if (is3D()) {
if (hasLines) {
tessGeo.applyMatrixOnLineGeometry(matrix,
firstLineVertex, lastLineVertex);
}
if (hasPoints) {
tessGeo.applyMatrixOnPointGeometry(matrix,
firstPointVertex, lastPointVertex);
}
}
}
}
// Updates the index cache for the range that corresponds to this shape.
protected void updatePolyIndexCache() {
IndexCache cache = tessGeo.polyIndexCache;
if (family == GROUP) {
// Updates the index cache to include the elements corresponding to
// a group shape, using the cache entries of the child shapes. The
// index cache has a pyramidal structure where the base is formed
// by the entries corresponding to the leaf (geometry) shapes, and
// each subsequent level is determined by the higher-level group shapes
// The index pyramid is flattened into arrays in order to use simple
// data structures, so each shape needs to store the positions in the
// cache that corresponds to itself.
// The index ranges of the child shapes that share the vertex offset
// are unified into a single range in the parent level.
firstPolyIndexCache = lastPolyIndexCache = -1;
int gindex = -1;
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
int first = child.firstPolyIndexCache;
int count = -1 < first ? child.lastPolyIndexCache - first + 1 : -1;
for (int n = first; n < first + count; n++) {
if (gindex == -1) {
gindex = cache.addNew(n);
firstPolyIndexCache = gindex;
} else {
if (cache.vertexOffset[gindex] == cache.vertexOffset[n]) {
// When the vertex offsets are the same, this means that the
// current index range in the group shape can be extended to
// include the index range in the current child shape.
// This is a result of how the indices are updated for the
// leaf shapes.
cache.incCounts(gindex,
cache.indexCount[n], cache.vertexCount[n]);
} else {
gindex = cache.addNew(n);
}
}
}
// Updating the first and last poly vertices for this group shape.
if (-1 < child.firstPolyVertex) {
if (firstPolyVertex == -1) {
firstPolyVertex = Integer.MAX_VALUE;
}
firstPolyVertex = PApplet.min(firstPolyVertex, child.firstPolyVertex);
}
if (-1 < child.lastPolyVertex) {
lastPolyVertex = PApplet.max(lastPolyVertex, child.lastPolyVertex);
}
}
lastPolyIndexCache = gindex;
} else {
// The index cache is updated in order to reflect the fact that all
// the vertices will be stored in a single VBO in the root shape.
// This update works as follows (the methodology is the same for
// poly, line and point): the VertexAbs variable in the root shape
// stores the index of the last vertex up to this shape (plus one)
// without taking into consideration the MAX_VERTEX_INDEX limit, so
// it effectively runs over the entire range.
// VertexRel, on the other hand, is reset every time the limit is
// exceeded, therefore creating the start of a new index group in the
// root shape. When this happens, the indices in the child shape need
// to be restarted as well to reflect the new index offset.
firstPolyVertex = lastPolyVertex =
cache.vertexOffset[firstPolyIndexCache];
for (int n = firstPolyIndexCache; n <= lastPolyIndexCache; n++) {
int ioffset = cache.indexOffset[n];
int icount = cache.indexCount[n];
int vcount = cache.vertexCount[n];
if (PGL.MAX_VERTEX_INDEX1 <= root.polyVertexRel + vcount || // Too many vertices already signal the start of a new cache...
(is2D() && startStrokedTex(n))) { // ... or, in 2D, the beginning of line or points.
root.polyVertexRel = 0;
root.polyVertexOffset = root.polyVertexAbs;
cache.indexOffset[n] = root.polyIndexOffset;
} else {
tessGeo.incPolyIndices(ioffset, ioffset + icount - 1,
root.polyVertexRel);
}
cache.vertexOffset[n] = root.polyVertexOffset;
if (is2D()) {
setFirstStrokeVertex(n, lastPolyVertex);
}
root.polyIndexOffset += icount;
root.polyVertexAbs += vcount;
root.polyVertexRel += vcount;
lastPolyVertex += vcount;
}
lastPolyVertex--;
if (is2D()) {
setLastStrokeVertex(lastPolyVertex);
}
}
}
protected boolean startStrokedTex(int n) {
return image != null && (n == firstLineIndexCache || n == firstPointIndexCache);
}
protected void setFirstStrokeVertex(int n, int vert) {
if (n == firstLineIndexCache && firstLineVertex == -1) {
firstLineVertex = lastLineVertex = vert;
}
if (n == firstPointIndexCache && firstPointVertex == -1) {
firstPointVertex = lastPointVertex = vert;
}
}
protected void setLastStrokeVertex(int vert) {
if (-1 < lastLineVertex) {
lastLineVertex = vert;
}
if (-1 < lastPointVertex) {
lastPointVertex += vert;
}
}
protected void updateLineIndexCache() {
IndexCache cache = tessGeo.lineIndexCache;
if (family == GROUP) {
firstLineIndexCache = lastLineIndexCache = -1;
int gindex = -1;
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
int first = child.firstLineIndexCache;
int count = -1 < first ? child.lastLineIndexCache - first + 1 : -1;
for (int n = first; n < first + count; n++) {
if (gindex == -1) {
gindex = cache.addNew(n);
firstLineIndexCache = gindex;
} else {
if (cache.vertexOffset[gindex] == cache.vertexOffset[n]) {
cache.incCounts(gindex, cache.indexCount[n],
cache.vertexCount[n]);
} else {
gindex = cache.addNew(n);
}
}
}
// Updating the first and last line vertices for this group shape.
if (-1 < child.firstLineVertex) {
if (firstLineVertex == -1) firstLineVertex = Integer.MAX_VALUE;
firstLineVertex = PApplet.min(firstLineVertex, child.firstLineVertex);
}
if (-1 < child.lastLineVertex) {
lastLineVertex = PApplet.max(lastLineVertex, child.lastLineVertex);
}
}
lastLineIndexCache = gindex;
} else {
firstLineVertex = lastLineVertex =
cache.vertexOffset[firstLineIndexCache];
for (int n = firstLineIndexCache; n <= lastLineIndexCache; n++) {
int ioffset = cache.indexOffset[n];
int icount = cache.indexCount[n];
int vcount = cache.vertexCount[n];
if (PGL.MAX_VERTEX_INDEX1 <= root.lineVertexRel + vcount) {
root.lineVertexRel = 0;
root.lineVertexOffset = root.lineVertexAbs;
cache.indexOffset[n] = root.lineIndexOffset;
} else {
tessGeo.incLineIndices(ioffset, ioffset + icount - 1,
root.lineVertexRel);
}
cache.vertexOffset[n] = root.lineVertexOffset;
root.lineIndexOffset += icount;
root.lineVertexAbs += vcount;
root.lineVertexRel += vcount;
lastLineVertex += vcount;
}
lastLineVertex--;
}
}
protected void updatePointIndexCache() {
IndexCache cache = tessGeo.pointIndexCache;
if (family == GROUP) {
firstPointIndexCache = lastPointIndexCache = -1;
int gindex = -1;
for (int i = 0; i < childCount; i++) {
PShapeOpenGL child = (PShapeOpenGL) children[i];
int first = child.firstPointIndexCache;
int count = -1 < first ? child.lastPointIndexCache - first + 1 : -1;
for (int n = first; n < first + count; n++) {
if (gindex == -1) {
gindex = cache.addNew(n);
firstPointIndexCache = gindex;
} else {
if (cache.vertexOffset[gindex] == cache.vertexOffset[n]) {
// When the vertex offsets are the same, this means that the
// current index range in the group shape can be extended to
// include either the index range in the current child shape.
// This is a result of how the indices are updated for the
// leaf shapes in aggregateImpl().
cache.incCounts(gindex, cache.indexCount[n],
cache.vertexCount[n]);
} else {
gindex = cache.addNew(n);
}
}
}
// Updating the first and last point vertices for this group shape.
if (-1 < child.firstPointVertex) {
if (firstPointVertex == -1) firstPointVertex = Integer.MAX_VALUE;
firstPointVertex = PApplet.min(firstPointVertex,
child.firstPointVertex);
}
if (-1 < child.lastPointVertex) {
lastPointVertex = PApplet.max(lastPointVertex, child.lastPointVertex);
}
}
lastPointIndexCache = gindex;
} else {
firstPointVertex = lastPointVertex =
cache.vertexOffset[firstPointIndexCache];
for (int n = firstPointIndexCache; n <= lastPointIndexCache; n++) {
int ioffset = cache.indexOffset[n];
int icount = cache.indexCount[n];
int vcount = cache.vertexCount[n];
if (PGL.MAX_VERTEX_INDEX1 <= root.pointVertexRel + vcount) {
root.pointVertexRel = 0;
root.pointVertexOffset = root.pointVertexAbs;
cache.indexOffset[n] = root.pointIndexOffset;
} else {
tessGeo.incPointIndices(ioffset, ioffset + icount - 1,
root.pointVertexRel);
}
cache.vertexOffset[n] = root.pointVertexOffset;
root.pointIndexOffset += icount;
root.pointVertexAbs += vcount;
root.pointVertexRel += vcount;
lastPointVertex += vcount;
}
lastPointVertex--;
}
}
///////////////////////////////////////////////////////////
//
// Buffer initialization
protected void initBuffers() {
boolean outdated = contextIsOutdated();
context = pgl.getCurrentContext();
if (hasPolys && (needBufferInit || outdated)) {
initPolyBuffers();
}
if (is3D()) {
if (hasLines && (needBufferInit || outdated)) {
initLineBuffers();
}
if (hasPoints && (needBufferInit || outdated)) {
initPointBuffers();
}
}
needBufferInit = false;
}
protected void initPolyBuffers() {
boolean createBuffer = bufPolyVertex == null;
if (createBuffer) bufPolyVertex = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 4, PGL.SIZEOF_FLOAT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyVertex.glId);
tessGeo.initPolyVerticesBuffer(false, true, PGL.bufferUsageRetained);
createBuffer = bufPolyColor == null;
if (createBuffer) bufPolyColor = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 1, PGL.SIZEOF_INT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyColor.glId);
tessGeo.initPolyColorsBuffer(!createBuffer, true, PGL.bufferUsageRetained);
createBuffer = bufPolyNormal == null;
if (createBuffer) bufPolyNormal = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 3, PGL.SIZEOF_FLOAT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyNormal.glId);
tessGeo.initPolyNormalsBuffer(!createBuffer, true, PGL.bufferUsageRetained);
createBuffer = bufPolyTexCoord == null;
if (createBuffer) bufPolyTexCoord = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 2, PGL.SIZEOF_FLOAT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyTexCoord.glId);
tessGeo.initPolyTexCoordsBuffer(!createBuffer, true, PGL.bufferUsageRetained);
createBuffer = bufPolyAmbient == null;
if (createBuffer) bufPolyAmbient = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 1, PGL.SIZEOF_INT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyAmbient.glId);
tessGeo.initPolyAmbientBuffer(!createBuffer, true, PGL.bufferUsageRetained);
createBuffer = bufPolySpecular == null;
if (createBuffer) bufPolySpecular = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 1, PGL.SIZEOF_INT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolySpecular.glId);
tessGeo.initPolySpecularBuffer(!createBuffer, true, PGL.bufferUsageRetained);
createBuffer = bufPolyEmissive == null;
if (createBuffer) bufPolyEmissive = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 1, PGL.SIZEOF_INT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyEmissive.glId);
tessGeo.initPolyEmissiveBuffer(!createBuffer, true, PGL.bufferUsageRetained);
createBuffer = bufPolyShininess == null;
if (createBuffer) bufPolyShininess = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 1, PGL.SIZEOF_FLOAT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyShininess.glId);
tessGeo.initPolyShininessBuffer(!createBuffer, true, PGL.bufferUsageRetained);
for (String name: polyAttribs.keySet()) {
VertexAttribute attrib = polyAttribs.get(name);
createBuffer = !attrib.bufferCreated();
if (createBuffer) attrib.createBuffer(pgl);
pgl.bindBuffer(PGL.ARRAY_BUFFER, attrib.buf.glId);
tessGeo.initPolyAttribsBuffer(attrib, !createBuffer, true, PGL.bufferUsageRetained);
}
pgl.bindBuffer(PGL.ARRAY_BUFFER, 0);
createBuffer = bufPolyIndex == null;
if (createBuffer) bufPolyIndex = new VertexBuffer(pg, PGL.ELEMENT_ARRAY_BUFFER, 1, PGL.SIZEOF_INDEX, PGL.bufferUsageRetained, true);
pgl.bindBuffer(PGL.ELEMENT_ARRAY_BUFFER, bufPolyIndex.glId);
tessGeo.initPolyIndicesBuffer(!createBuffer, true, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ELEMENT_ARRAY_BUFFER, 0);
}
protected void initLineBuffers() {
boolean createBuffer = bufLineVertex == null;
if (createBuffer) bufLineVertex = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 4, PGL.SIZEOF_FLOAT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufLineVertex.glId);
tessGeo.initLineVerticesBuffer(!createBuffer, true, PGL.bufferUsageRetained);
createBuffer = bufLineColor == null;
if (createBuffer) bufLineColor = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 1, PGL.SIZEOF_INT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufLineColor.glId);
tessGeo.initLineColorsBuffer(!createBuffer, true, PGL.bufferUsageRetained);
createBuffer = bufLineAttrib == null;
if (createBuffer) bufLineAttrib = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 4, PGL.SIZEOF_FLOAT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufLineAttrib.glId);
tessGeo.initLineDirectionsBuffer(!createBuffer, true, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, 0);
createBuffer = bufLineIndex == null;
if (createBuffer) bufLineIndex = new VertexBuffer(pg, PGL.ELEMENT_ARRAY_BUFFER, 1, PGL.SIZEOF_INDEX, PGL.bufferUsageRetained, true);
pgl.bindBuffer(PGL.ELEMENT_ARRAY_BUFFER, bufLineIndex.glId);
tessGeo.initLineIndicesBuffer(!createBuffer, true, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ELEMENT_ARRAY_BUFFER, 0);
}
protected void initPointBuffers() {
boolean createBuffer = bufPointVertex == null;
if (createBuffer) bufPointVertex = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 4, PGL.SIZEOF_FLOAT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPointVertex.glId);
tessGeo.initPointVerticesBuffer(!createBuffer, true, PGL.bufferUsageRetained);
createBuffer = bufPointColor == null;
if (createBuffer) bufPointColor = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 1, PGL.SIZEOF_INT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPointColor.glId);
tessGeo.initPointColorsBuffer(!createBuffer, true, PGL.bufferUsageRetained);
createBuffer = bufPointAttrib == null;
if (createBuffer) bufPointAttrib = new VertexBuffer(pg, PGL.ARRAY_BUFFER, 2, PGL.SIZEOF_FLOAT, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPointAttrib.glId);
tessGeo.initPointOffsetsBuffer(!createBuffer, true, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ARRAY_BUFFER, 0);
createBuffer = bufPointIndex == null;
if (createBuffer) bufPointIndex = new VertexBuffer(pg, PGL.ELEMENT_ARRAY_BUFFER, 1, PGL.SIZEOF_INDEX, PGL.bufferUsageRetained, true);
pgl.bindBuffer(PGL.ELEMENT_ARRAY_BUFFER, bufPointIndex.glId);
tessGeo.initPointIndicesBuffer(!createBuffer, true, PGL.bufferUsageRetained);
pgl.bindBuffer(PGL.ELEMENT_ARRAY_BUFFER, 0);
}
protected boolean contextIsOutdated() {
boolean outdated = !pgl.contextIsCurrent(context);
if (outdated) {
bufPolyVertex.dispose();
bufPolyColor.dispose();
bufPolyNormal.dispose();
bufPolyTexCoord.dispose();
bufPolyAmbient.dispose();
bufPolySpecular.dispose();
bufPolyEmissive.dispose();
bufPolyShininess.dispose();
for (VertexAttribute attrib: polyAttribs.values()) {
attrib.buf.dispose();
}
bufPolyIndex.dispose();
bufLineVertex.dispose();
bufLineColor.dispose();
bufLineAttrib.dispose();
bufLineIndex.dispose();
bufPointVertex.dispose();
bufPointColor.dispose();
bufPointAttrib.dispose();
bufPointIndex.dispose();
}
return outdated;
}
///////////////////////////////////////////////////////////
//
// Geometry update
protected void updateGeometry() {
root.initBuffers();
if (root.modified) {
root.updateGeometryImpl();
}
}
protected void updateGeometryImpl() {
if (modifiedPolyVertices) {
int offset = firstModifiedPolyVertex;
int size = lastModifiedPolyVertex - offset + 1;
copyPolyVertices(offset, size);
modifiedPolyVertices = false;
firstModifiedPolyVertex = PConstants.MAX_INT;
lastModifiedPolyVertex = PConstants.MIN_INT;
}
if (modifiedPolyColors) {
int offset = firstModifiedPolyColor;
int size = lastModifiedPolyColor - offset + 1;
copyPolyColors(offset, size);
modifiedPolyColors = false;
firstModifiedPolyColor = PConstants.MAX_INT;
lastModifiedPolyColor = PConstants.MIN_INT;
}
if (modifiedPolyNormals) {
int offset = firstModifiedPolyNormal;
int size = lastModifiedPolyNormal - offset + 1;
copyPolyNormals(offset, size);
modifiedPolyNormals = false;
firstModifiedPolyNormal = PConstants.MAX_INT;
lastModifiedPolyNormal = PConstants.MIN_INT;
}
if (modifiedPolyTexCoords) {
int offset = firstModifiedPolyTexCoord;
int size = lastModifiedPolyTexCoord - offset + 1;
copyPolyTexCoords(offset, size);
modifiedPolyTexCoords = false;
firstModifiedPolyTexCoord = PConstants.MAX_INT;
lastModifiedPolyTexCoord = PConstants.MIN_INT;
}
if (modifiedPolyAmbient) {
int offset = firstModifiedPolyAmbient;
int size = lastModifiedPolyAmbient - offset + 1;
copyPolyAmbient(offset, size);
modifiedPolyAmbient = false;
firstModifiedPolyAmbient = PConstants.MAX_INT;
lastModifiedPolyAmbient = PConstants.MIN_INT;
}
if (modifiedPolySpecular) {
int offset = firstModifiedPolySpecular;
int size = lastModifiedPolySpecular - offset + 1;
copyPolySpecular(offset, size);
modifiedPolySpecular = false;
firstModifiedPolySpecular = PConstants.MAX_INT;
lastModifiedPolySpecular = PConstants.MIN_INT;
}
if (modifiedPolyEmissive) {
int offset = firstModifiedPolyEmissive;
int size = lastModifiedPolyEmissive - offset + 1;
copyPolyEmissive(offset, size);
modifiedPolyEmissive = false;
firstModifiedPolyEmissive = PConstants.MAX_INT;
lastModifiedPolyEmissive = PConstants.MIN_INT;
}
if (modifiedPolyShininess) {
int offset = firstModifiedPolyShininess;
int size = lastModifiedPolyShininess - offset + 1;
copyPolyShininess(offset, size);
modifiedPolyShininess = false;
firstModifiedPolyShininess = PConstants.MAX_INT;
lastModifiedPolyShininess = PConstants.MIN_INT;
}
for (String name: polyAttribs.keySet()) {
VertexAttribute attrib = polyAttribs.get(name);
if (attrib.modified) {
int offset = firstModifiedPolyVertex;
int size = lastModifiedPolyVertex - offset + 1;
copyPolyAttrib(attrib, offset, size);
attrib.modified = false;
attrib.firstModified = PConstants.MAX_INT;
attrib.lastModified = PConstants.MIN_INT;
}
}
if (modifiedLineVertices) {
int offset = firstModifiedLineVertex;
int size = lastModifiedLineVertex - offset + 1;
copyLineVertices(offset, size);
modifiedLineVertices = false;
firstModifiedLineVertex = PConstants.MAX_INT;
lastModifiedLineVertex = PConstants.MIN_INT;
}
if (modifiedLineColors) {
int offset = firstModifiedLineColor;
int size = lastModifiedLineColor - offset + 1;
copyLineColors(offset, size);
modifiedLineColors = false;
firstModifiedLineColor = PConstants.MAX_INT;
lastModifiedLineColor = PConstants.MIN_INT;
}
if (modifiedLineAttributes) {
int offset = firstModifiedLineAttribute;
int size = lastModifiedLineAttribute - offset + 1;
copyLineAttributes(offset, size);
modifiedLineAttributes = false;
firstModifiedLineAttribute = PConstants.MAX_INT;
lastModifiedLineAttribute = PConstants.MIN_INT;
}
if (modifiedPointVertices) {
int offset = firstModifiedPointVertex;
int size = lastModifiedPointVertex - offset + 1;
copyPointVertices(offset, size);
modifiedPointVertices = false;
firstModifiedPointVertex = PConstants.MAX_INT;
lastModifiedPointVertex = PConstants.MIN_INT;
}
if (modifiedPointColors) {
int offset = firstModifiedPointColor;
int size = lastModifiedPointColor - offset + 1;
copyPointColors(offset, size);
modifiedPointColors = false;
firstModifiedPointColor = PConstants.MAX_INT;
lastModifiedPointColor = PConstants.MIN_INT;
}
if (modifiedPointAttributes) {
int offset = firstModifiedPointAttribute;
int size = lastModifiedPointAttribute - offset + 1;
copyPointAttributes(offset, size);
modifiedPointAttributes = false;
firstModifiedPointAttribute = PConstants.MAX_INT;
lastModifiedPointAttribute = PConstants.MIN_INT;
}
modified = false;
}
protected void copyPolyVertices(int offset, int size) {
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyVertex.glId);
tessGeo.copyPolyVertices(offset, size);
pgl.bindBuffer(PGL.ARRAY_BUFFER, 0);
}
protected void copyPolyColors(int offset, int size) {
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyColor.glId);
tessGeo.copyPolyColors(offset, size);
pgl.bindBuffer(PGL.ARRAY_BUFFER, 0);
}
protected void copyPolyNormals(int offset, int size) {
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyNormal.glId);
tessGeo.copyPolyNormals(offset, size);
pgl.bindBuffer(PGL.ARRAY_BUFFER, 0);
}
protected void copyPolyTexCoords(int offset, int size) {
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyTexCoord.glId);
tessGeo.copyPolyTexCoords(offset, size);
pgl.bindBuffer(PGL.ARRAY_BUFFER, 0);
}
protected void copyPolyAmbient(int offset, int size) {
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyAmbient.glId);
tessGeo.copyPolyAmbient(offset, size);
pgl.bindBuffer(PGL.ARRAY_BUFFER, 0);
}
protected void copyPolySpecular(int offset, int size) {
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolySpecular.glId);
tessGeo.copyPolySpecular(offset, size);
pgl.bindBuffer(PGL.ARRAY_BUFFER, 0);
}
protected void copyPolyEmissive(int offset, int size) {
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyEmissive.glId);
tessGeo.copyPolyEmissive(offset, size);
pgl.bindBuffer(PGL.ARRAY_BUFFER, 0);
}
protected void copyPolyShininess(int offset, int size) {
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPolyShininess.glId);
tessGeo.copyPolyShininess(offset, size);
pgl.bindBuffer(PGL.ARRAY_BUFFER, 0);
}
protected void copyPolyAttrib(VertexAttribute attrib, int offset, int size) {
pgl.bindBuffer(PGL.ARRAY_BUFFER, attrib.buf.glId);
tessGeo.copyPolyAttribs(attrib, offset, size);
pgl.bindBuffer(PGL.ARRAY_BUFFER, 0);
}
protected void copyLineVertices(int offset, int size) {
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufLineVertex.glId);
tessGeo.copyLineVertices(offset, size);
pgl.bindBuffer(PGL.ARRAY_BUFFER, 0);
}
protected void copyLineColors(int offset, int size) {
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufLineColor.glId);
tessGeo.copyLineColors(offset, size);
pgl.bindBuffer(PGL.ARRAY_BUFFER, 0);
}
protected void copyLineAttributes(int offset, int size) {
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufLineAttrib.glId);
tessGeo.copyLineDirections(offset, size);
pgl.bindBuffer(PGL.ARRAY_BUFFER, 0);
}
protected void copyPointVertices(int offset, int size) {
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPointVertex.glId);
tessGeo.copyPointVertices(offset, size);
pgl.bindBuffer(PGL.ARRAY_BUFFER, 0);
}
protected void copyPointColors(int offset, int size) {
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPointColor.glId);
tessGeo.copyPointColors(offset, size);
pgl.bindBuffer(PGL.ARRAY_BUFFER, 0);
}
protected void copyPointAttributes(int offset, int size) {
pgl.bindBuffer(PGL.ARRAY_BUFFER, bufPointAttrib.glId);
tessGeo.copyPointOffsets(offset, size);
pgl.bindBuffer(PGL.ARRAY_BUFFER, 0);
}
protected void setModifiedPolyVertices(int first, int last) {
if (first < firstModifiedPolyVertex) firstModifiedPolyVertex = first;
if (last > lastModifiedPolyVertex) lastModifiedPolyVertex = last;
modifiedPolyVertices = true;
modified = true;
}
protected void setModifiedPolyColors(int first, int last) {
if (first < firstModifiedPolyColor) firstModifiedPolyColor = first;
if (last > lastModifiedPolyColor) lastModifiedPolyColor = last;
modifiedPolyColors = true;
modified = true;
}
protected void setModifiedPolyNormals(int first, int last) {
if (first < firstModifiedPolyNormal) firstModifiedPolyNormal = first;
if (last > lastModifiedPolyNormal) lastModifiedPolyNormal = last;
modifiedPolyNormals = true;
modified = true;
}
protected void setModifiedPolyTexCoords(int first, int last) {
if (first < firstModifiedPolyTexCoord) firstModifiedPolyTexCoord = first;
if (last > lastModifiedPolyTexCoord) lastModifiedPolyTexCoord = last;
modifiedPolyTexCoords = true;
modified = true;
}
protected void setModifiedPolyAmbient(int first, int last) {
if (first < firstModifiedPolyAmbient) firstModifiedPolyAmbient = first;
if (last > lastModifiedPolyAmbient) lastModifiedPolyAmbient = last;
modifiedPolyAmbient = true;
modified = true;
}
protected void setModifiedPolySpecular(int first, int last) {
if (first < firstModifiedPolySpecular) firstModifiedPolySpecular = first;
if (last > lastModifiedPolySpecular) lastModifiedPolySpecular = last;
modifiedPolySpecular = true;
modified = true;
}
protected void setModifiedPolyEmissive(int first, int last) {
if (first < firstModifiedPolyEmissive) firstModifiedPolyEmissive = first;
if (last > lastModifiedPolyEmissive) lastModifiedPolyEmissive = last;
modifiedPolyEmissive = true;
modified = true;
}
protected void setModifiedPolyShininess(int first, int last) {
if (first < firstModifiedPolyShininess) firstModifiedPolyShininess = first;
if (last > lastModifiedPolyShininess) lastModifiedPolyShininess = last;
modifiedPolyShininess = true;
modified = true;
}
protected void setModifiedPolyAttrib(VertexAttribute attrib, int first, int last) {
if (first < attrib.firstModified) attrib.firstModified = first;
if (last > attrib.lastModified) attrib.lastModified = last;
attrib.modified = true;
modified = true;
}
protected void setModifiedLineVertices(int first, int last) {
if (first < firstModifiedLineVertex) firstModifiedLineVertex = first;
if (last > lastModifiedLineVertex) lastModifiedLineVertex = last;
modifiedLineVertices = true;
modified = true;
}
protected void setModifiedLineColors(int first, int last) {
if (first < firstModifiedLineColor) firstModifiedLineColor = first;
if (last > lastModifiedLineColor) lastModifiedLineColor = last;
modifiedLineColors = true;
modified = true;
}
protected void setModifiedLineAttributes(int first, int last) {
if (first < firstModifiedLineAttribute) firstModifiedLineAttribute = first;
if (last > lastModifiedLineAttribute) lastModifiedLineAttribute = last;
modifiedLineAttributes = true;
modified = true;
}
protected void setModifiedPointVertices(int first, int last) {
if (first < firstModifiedPointVertex) firstModifiedPointVertex = first;
if (last > lastModifiedPointVertex) lastModifiedPointVertex = last;
modifiedPointVertices = true;
modified = true;
}
protected void setModifiedPointColors(int first, int last) {
if (first < firstModifiedPointColor) firstModifiedPointColor = first;
if (last > lastModifiedPointColor) lastModifiedPointColor = last;
modifiedPointColors = true;
modified = true;
}
protected void setModifiedPointAttributes(int first, int last) {
if (first < firstModifiedPointAttribute) firstModifiedPointAttribute = first;
if (last > lastModifiedPointAttribute) lastModifiedPointAttribute = last;
modifiedPointAttributes = true;
modified = true;
}
///////////////////////////////////////////////////////////
//
// Style handling
@Override
public void disableStyle() {
if (openShape) {
PGraphics.showWarning(INSIDE_BEGIN_END_ERROR, "disableStyle()");
return;
}
// Saving the current values to use if the style is re-enabled later
savedStroke = stroke;
savedStrokeColor = strokeColor;
savedStrokeWeight = strokeWeight;
savedStrokeCap = strokeCap;
savedStrokeJoin = strokeJoin;
savedFill = fill;
savedFillColor = fillColor;
savedTint = tint;
savedTintColor = tintColor;
savedAmbientColor = ambientColor;
savedSpecularColor = specularColor;
savedEmissiveColor = emissiveColor;
savedShininess = shininess;
savedTextureMode = textureMode;
super.disableStyle();
}
@Override
public void enableStyle() {
if (savedStroke) {
setStroke(true);
setStroke(savedStrokeColor);
setStrokeWeight(savedStrokeWeight);
setStrokeCap(savedStrokeCap);
setStrokeJoin(savedStrokeJoin);
} else {
setStroke(false);
}
if (savedFill) {
setFill(true);
setFill(savedFillColor);
} else {
setFill(false);
}
if (savedTint) {
setTint(true);
setTint(savedTintColor);
}
setAmbient(savedAmbientColor);
setSpecular(savedSpecularColor);
setEmissive(savedEmissiveColor);
setShininess(savedShininess);
if (image != null) {
setTextureMode(savedTextureMode);
}
super.enableStyle();
}
@Override
protected void styles(PGraphics g) {
if (g instanceof PGraphicsOpenGL) {
if (g.stroke) {
setStroke(true);
setStroke(g.strokeColor);
setStrokeWeight(g.strokeWeight);
setStrokeCap(g.strokeCap);
setStrokeJoin(g.strokeJoin);
} else {
setStroke(false);
}
if (g.fill) {
setFill(true);
setFill(g.fillColor);
} else {
setFill(false);
}
if (g.tint) {
setTint(true);
setTint(g.tintColor);
}
setAmbient(g.ambientColor);
setSpecular(g.specularColor);
setEmissive(g.emissiveColor);
setShininess(g.shininess);
if (image != null) {
setTextureMode(g.textureMode);
}
} else {
super.styles(g);
}
}
///////////////////////////////////////////////////////////
//
// Rendering methods
@Override
public void draw(PGraphics g) {
if (g instanceof PGraphicsOpenGL) {
PGraphicsOpenGL gl = (PGraphicsOpenGL)g;
if (visible) {
pre(gl);
updateTessellation();
updateGeometry();
if (family == GROUP) {
if (fragmentedGroup(gl)) {
for (int i = 0; i < childCount; i++) {
children[i].draw(gl);
}
} else {
PImage tex = null;
if (textures != null && textures.size() == 1) {
tex = (PImage)textures.toArray()[0];
}
render(gl, tex);
}
} else {
render(gl, image);
}
post(gl);
}
} else {
if (family == GEOMETRY) {
inGeoToVertices();
}
pre(g);
drawImpl(g);
post(g);
}
}
private void inGeoToVertices() {
vertexCount = 0;
vertexCodeCount = 0;
if (inGeo.codeCount == 0) {
for (int i = 0; i < inGeo.vertexCount; i++) {
int index = 3 * i;
float x = inGeo.vertices[index++];
float y = inGeo.vertices[index ];
super.vertex(x, y);
}
} else {
int v;
float x, y;
float cx, cy;
float x2, y2, x3, y3, x4, y4;
int idx = 0;
boolean insideContour = false;
for (int j = 0; j < inGeo.codeCount; j++) {
switch (inGeo.codes[j]) {
case VERTEX:
v = 3 * idx;
x = inGeo.vertices[v++];
y = inGeo.vertices[v ];
super.vertex(x, y);
idx++;
break;
case QUADRATIC_VERTEX:
v = 3 * idx;
cx = inGeo.vertices[v++];
cy = inGeo.vertices[v];
v = 3 * (idx + 1);
x3 = inGeo.vertices[v++];
y3 = inGeo.vertices[v];
super.quadraticVertex(cx, cy, x3, y3);
idx += 2;
break;
case BEZIER_VERTEX:
v = 3 * idx;
x2 = inGeo.vertices[v++];
y2 = inGeo.vertices[v ];
v = 3 * (idx + 1);
x3 = inGeo.vertices[v++];
y3 = inGeo.vertices[v ];
v = 3 * (idx + 2);
x4 = inGeo.vertices[v++];
y4 = inGeo.vertices[v ];
super.bezierVertex(x2, y2, x3, y3, x4, y4);
idx += 3;
break;
case CURVE_VERTEX:
v = 3 * idx;
x = inGeo.vertices[v++];
y = inGeo.vertices[v ];
super.curveVertex(x, y);
idx++;
break;
case BREAK:
if (insideContour) {
super.endContourImpl();
}
super.beginContourImpl();
insideContour = true;
}
}
if (insideContour) {
super.endContourImpl();
}
}
}
// Returns true if some child shapes below this one either
// use different texture maps (or only one texture is used by some while
// others are untextured), or have stroked textures,
// so they cannot rendered in a single call.
// Or accurate 2D mode is enabled, which forces each
// shape to be rendered separately.
protected boolean fragmentedGroup(PGraphicsOpenGL g) {
return g.getHint(DISABLE_OPTIMIZED_STROKE) ||
(textures != null && (1 < textures.size() || untexChild)) ||
strokedTexture;
}
@Override
protected void pre(PGraphics g) {
if (g instanceof PGraphicsOpenGL) {
if (!style) {
styles(g);
}
} else {
super.pre(g);
}
}
@Override
protected void post(PGraphics g) {
if (g instanceof PGraphicsOpenGL) {
} else {
super.post(g);
}
}
@Override
protected void drawGeometry(PGraphics g) {
vertexCount = inGeo.vertexCount;
vertices = inGeo.getVertexData();
super.drawGeometry(g);
vertexCount = 0;
vertices = null;
}
// Render the geometry stored in the root shape as VBOs, for the vertices
// corresponding to this shape. Sometimes we can have root == this.
protected void render(PGraphicsOpenGL g, PImage texture) {
if (root == null) {
// Some error. Root should never be null. At least it should be 'this'.
throw new RuntimeException("Error rendering PShapeOpenGL, root shape is null");
}
if (hasPolys) {
renderPolys(g, texture);
if (g.haveRaw()) {
rawPolys(g, texture);
}
}
if (is3D()) {
// In 3D mode, the lines and points need to be rendered separately
// as they require their own shaders.
if (hasLines) {
renderLines(g);
if (g.haveRaw()) {
rawLines(g);
}
}
if (hasPoints) {
renderPoints(g);
if (g.haveRaw()) {
rawPoints(g);
}
}
}
}
protected void renderPolys(PGraphicsOpenGL g, PImage textureImage) {
boolean customShader = g.polyShader != null;
boolean needNormals = customShader ? g.polyShader.accessNormals() : false;
boolean needTexCoords = customShader ? g.polyShader.accessTexCoords() : false;
Texture tex = textureImage != null ? g.getTexture(textureImage) : null;
boolean renderingFill = false, renderingStroke = false;
PShader shader = null;
IndexCache cache = tessGeo.polyIndexCache;
for (int n = firstPolyIndexCache; n <= lastPolyIndexCache; n++) {
if (is3D() || (tex != null && (firstLineIndexCache == -1 ||
n < firstLineIndexCache) &&
(firstPointIndexCache == -1 ||
n < firstPointIndexCache))) {
// Rendering fill triangles, which can be lit and textured.
if (!renderingFill) {
shader = g.getPolyShader(g.lights, tex != null);
shader.bind();
renderingFill = true;
}
} else {
// Rendering line or point triangles, which are never lit nor textured.
if (!renderingStroke) {
if (tex != null) {
tex.unbind();
tex = null;
}
if (shader != null && shader.bound()) {
shader.unbind();
}
// If the renderer is 2D, then g.lights should always be false,
// so no need to worry about that.
shader = g.getPolyShader(g.lights, false);
shader.bind();
renderingFill = false;
renderingStroke = true;
}
}
int ioffset = cache.indexOffset[n];
int icount = cache.indexCount[n];
int voffset = cache.vertexOffset[n];
shader.setVertexAttribute(root.bufPolyVertex.glId, 4, PGL.FLOAT,
0, 4 * voffset * PGL.SIZEOF_FLOAT);
shader.setColorAttribute(root.bufPolyColor.glId, 4, PGL.UNSIGNED_BYTE,
0, 4 * voffset * PGL.SIZEOF_BYTE);
if (g.lights) {
shader.setNormalAttribute(root.bufPolyNormal.glId, 3, PGL.FLOAT,
0, 3 * voffset * PGL.SIZEOF_FLOAT);
shader.setAmbientAttribute(root.bufPolyAmbient.glId, 4, PGL.UNSIGNED_BYTE,
0, 4 * voffset * PGL.SIZEOF_BYTE);
shader.setSpecularAttribute(root.bufPolySpecular.glId, 4, PGL.UNSIGNED_BYTE,
0, 4 * voffset * PGL.SIZEOF_BYTE);
shader.setEmissiveAttribute(root.bufPolyEmissive.glId, 4, PGL.UNSIGNED_BYTE,
0, 4 * voffset * PGL.SIZEOF_BYTE);
shader.setShininessAttribute(root.bufPolyShininess.glId, 1, PGL.FLOAT,
0, voffset * PGL.SIZEOF_FLOAT);
}
if (g.lights || needNormals) {
shader.setNormalAttribute(root.bufPolyNormal.glId, 3, PGL.FLOAT,
0, 3 * voffset * PGL.SIZEOF_FLOAT);
}
if (tex != null || needTexCoords) {
shader.setTexcoordAttribute(root.bufPolyTexCoord.glId, 2, PGL.FLOAT,
0, 2 * voffset * PGL.SIZEOF_FLOAT);
shader.setTexture(tex);
}
for (VertexAttribute attrib: polyAttribs.values()) {
if (!attrib.active(shader)) continue;
attrib.bind(pgl);
shader.setAttributeVBO(attrib.glLoc, attrib.buf.glId,
attrib.tessSize, attrib.type,
attrib.isColor(), 0, attrib.sizeInBytes(voffset));
}
shader.draw(root.bufPolyIndex.glId, icount, ioffset);
}
for (VertexAttribute attrib: polyAttribs.values()) {
if (attrib.active(shader)) attrib.unbind(pgl);
}
if (shader != null && shader.bound()) {
shader.unbind();
}
}
protected void rawPolys(PGraphicsOpenGL g, PImage textureImage) {
PGraphics raw = g.getRaw();
raw.colorMode(RGB);
raw.noStroke();
raw.beginShape(TRIANGLES);
float[] vertices = tessGeo.polyVertices;
int[] color = tessGeo.polyColors;
float[] uv = tessGeo.polyTexCoords;
short[] indices = tessGeo.polyIndices;
IndexCache cache = tessGeo.polyIndexCache;
for (int n = firstPolyIndexCache; n <= lastPolyIndexCache; n++) {
int ioffset = cache.indexOffset[n];
int icount = cache.indexCount[n];
int voffset = cache.vertexOffset[n];
for (int tr = ioffset / 3; tr < (ioffset + icount) / 3; tr++) {
int i0 = voffset + indices[3 * tr + 0];
int i1 = voffset + indices[3 * tr + 1];
int i2 = voffset + indices[3 * tr + 2];
float[] src0 = {0, 0, 0, 0};
float[] src1 = {0, 0, 0, 0};
float[] src2 = {0, 0, 0, 0};
float[] pt0 = {0, 0, 0, 0};
float[] pt1 = {0, 0, 0, 0};
float[] pt2 = {0, 0, 0, 0};
int argb0 = PGL.nativeToJavaARGB(color[i0]);
int argb1 = PGL.nativeToJavaARGB(color[i1]);
int argb2 = PGL.nativeToJavaARGB(color[i2]);
PApplet.arrayCopy(vertices, 4 * i0, src0, 0, 4);
PApplet.arrayCopy(vertices, 4 * i1, src1, 0, 4);
PApplet.arrayCopy(vertices, 4 * i2, src2, 0, 4);
// Applying any transformation is currently stored in the
// modelview matrix of the renderer.
g.modelview.mult(src0, pt0);
g.modelview.mult(src1, pt1);
g.modelview.mult(src2, pt2);
if (textureImage != null) {
raw.texture(textureImage);
if (raw.is3D()) {
raw.fill(argb0);
raw.vertex(pt0[X], pt0[Y], pt0[Z], uv[2 * i0 + 0], uv[2 * i0 + 1]);
raw.fill(argb1);
raw.vertex(pt1[X], pt1[Y], pt1[Z], uv[2 * i1 + 0], uv[2 * i1 + 1]);
raw.fill(argb2);
raw.vertex(pt2[X], pt2[Y], pt2[Z], uv[2 * i2 + 0], uv[2 * i2 + 1]);
} else if (raw.is2D()) {
float sx0 = g.screenXImpl(pt0[0], pt0[1], pt0[2], pt0[3]);
float sy0 = g.screenYImpl(pt0[0], pt0[1], pt0[2], pt0[3]);
float sx1 = g.screenXImpl(pt1[0], pt1[1], pt1[2], pt1[3]);
float sy1 = g.screenYImpl(pt1[0], pt1[1], pt1[2], pt1[3]);
float sx2 = g.screenXImpl(pt2[0], pt2[1], pt2[2], pt2[3]);
float sy2 = g.screenYImpl(pt2[0], pt2[1], pt2[2], pt2[3]);
raw.fill(argb0);
raw.vertex(sx0, sy0, uv[2 * i0 + 0], uv[2 * i0 + 1]);
raw.fill(argb1);
raw.vertex(sx1, sy1, uv[2 * i1 + 0], uv[2 * i1 + 1]);
raw.fill(argb1);
raw.vertex(sx2, sy2, uv[2 * i2 + 0], uv[2 * i2 + 1]);
}
} else {
if (raw.is3D()) {
raw.fill(argb0);
raw.vertex(pt0[X], pt0[Y], pt0[Z]);
raw.fill(argb1);
raw.vertex(pt1[X], pt1[Y], pt1[Z]);
raw.fill(argb2);
raw.vertex(pt2[X], pt2[Y], pt2[Z]);
} else if (raw.is2D()) {
float sx0 = g.screenXImpl(pt0[0], pt0[1], pt0[2], pt0[3]);
float sy0 = g.screenYImpl(pt0[0], pt0[1], pt0[2], pt0[3]);
float sx1 = g.screenXImpl(pt1[0], pt1[1], pt1[2], pt1[3]);
float sy1 = g.screenYImpl(pt1[0], pt1[1], pt1[2], pt1[3]);
float sx2 = g.screenXImpl(pt2[0], pt2[1], pt2[2], pt2[3]);
float sy2 = g.screenYImpl(pt2[0], pt2[1], pt2[2], pt2[3]);
raw.fill(argb0);
raw.vertex(sx0, sy0);
raw.fill(argb1);
raw.vertex(sx1, sy1);
raw.fill(argb2);
raw.vertex(sx2, sy2);
}
}
}
}
raw.endShape();
}
protected void renderLines(PGraphicsOpenGL g) {
PShader shader = g.getLineShader();
shader.bind();
IndexCache cache = tessGeo.lineIndexCache;
for (int n = firstLineIndexCache; n <= lastLineIndexCache; n++) {
int ioffset = cache.indexOffset[n];
int icount = cache.indexCount[n];
int voffset = cache.vertexOffset[n];
shader.setVertexAttribute(root.bufLineVertex.glId, 4, PGL.FLOAT,
0, 4 * voffset * PGL.SIZEOF_FLOAT);
shader.setColorAttribute(root.bufLineColor.glId, 4, PGL.UNSIGNED_BYTE,
0, 4 * voffset * PGL.SIZEOF_BYTE);
shader.setLineAttribute(root.bufLineAttrib.glId, 4, PGL.FLOAT,
0, 4 * voffset * PGL.SIZEOF_FLOAT);
shader.draw(root.bufLineIndex.glId, icount, ioffset);
}
shader.unbind();
}
protected void rawLines(PGraphicsOpenGL g) {
PGraphics raw = g.getRaw();
raw.colorMode(RGB);
raw.noFill();
raw.strokeCap(strokeCap);
raw.strokeJoin(strokeJoin);
raw.beginShape(LINES);
float[] vertices = tessGeo.lineVertices;
int[] color = tessGeo.lineColors;
float[] attribs = tessGeo.lineDirections;
short[] indices = tessGeo.lineIndices;
IndexCache cache = tessGeo.lineIndexCache;
for (int n = firstLineIndexCache; n <= lastLineIndexCache; n++) {
int ioffset = cache.indexOffset[n];
int icount = cache.indexCount[n];
int voffset = cache.vertexOffset[n];
for (int ln = ioffset / 6; ln < (ioffset + icount) / 6; ln++) {
// Each line segment is defined by six indices since it's
// formed by two triangles. We only need the first and last
// vertices.
// This bunch of vertices could also be the bevel triangles,
// with we detect this situation by looking at the line weight.
int i0 = voffset + indices[6 * ln + 0];
int i1 = voffset + indices[6 * ln + 5];
float sw0 = 2 * attribs[4 * i0 + 3];
float sw1 = 2 * attribs[4 * i1 + 3];
if (PGraphicsOpenGL.zero(sw0)) continue; // Bevel triangles, skip.
float[] src0 = {0, 0, 0, 0};
float[] src1 = {0, 0, 0, 0};
float[] pt0 = {0, 0, 0, 0};
float[] pt1 = {0, 0, 0, 0};
int argb0 = PGL.nativeToJavaARGB(color[i0]);
int argb1 = PGL.nativeToJavaARGB(color[i1]);
PApplet.arrayCopy(vertices, 4 * i0, src0, 0, 4);
PApplet.arrayCopy(vertices, 4 * i1, src1, 0, 4);
// Applying any transformation is currently stored in the
// modelview matrix of the renderer.
g.modelview.mult(src0, pt0);
g.modelview.mult(src1, pt1);
if (raw.is3D()) {
raw.strokeWeight(sw0);
raw.stroke(argb0);
raw.vertex(pt0[X], pt0[Y], pt0[Z]);
raw.strokeWeight(sw1);
raw.stroke(argb1);
raw.vertex(pt1[X], pt1[Y], pt1[Z]);
} else if (raw.is2D()) {
float sx0 = g.screenXImpl(pt0[0], pt0[1], pt0[2], pt0[3]);
float sy0 = g.screenYImpl(pt0[0], pt0[1], pt0[2], pt0[3]);
float sx1 = g.screenXImpl(pt1[0], pt1[1], pt1[2], pt1[3]);
float sy1 = g.screenYImpl(pt1[0], pt1[1], pt1[2], pt1[3]);
raw.strokeWeight(sw0);
raw.stroke(argb0);
raw.vertex(sx0, sy0);
raw.strokeWeight(sw1);
raw.stroke(argb1);
raw.vertex(sx1, sy1);
}
}
}
raw.endShape();
}
protected void renderPoints(PGraphicsOpenGL g) {
PShader shader = g.getPointShader();
shader.bind();
IndexCache cache = tessGeo.pointIndexCache;
for (int n = firstPointIndexCache; n <= lastPointIndexCache; n++) {
int ioffset = cache.indexOffset[n];
int icount = cache.indexCount[n];
int voffset = cache.vertexOffset[n];
shader.setVertexAttribute(root.bufPointVertex.glId, 4, PGL.FLOAT,
0, 4 * voffset * PGL.SIZEOF_FLOAT);
shader.setColorAttribute(root.bufPointColor.glId, 4, PGL.UNSIGNED_BYTE,
0, 4 * voffset * PGL.SIZEOF_BYTE);
shader.setPointAttribute(root.bufPointAttrib.glId, 2, PGL.FLOAT,
0, 2 * voffset * PGL.SIZEOF_FLOAT);
shader.draw(root.bufPointIndex.glId, icount, ioffset);
}
shader.unbind();
}
protected void rawPoints(PGraphicsOpenGL g) {
PGraphics raw = g.getRaw();
raw.colorMode(RGB);
raw.noFill();
raw.strokeCap(strokeCap);
raw.beginShape(POINTS);
float[] vertices = tessGeo.pointVertices;
int[] color = tessGeo.pointColors;
float[] attribs = tessGeo.pointOffsets;
short[] indices = tessGeo.pointIndices;
IndexCache cache = tessGeo.pointIndexCache;
for (int n = 0; n < cache.size; n++) {
int ioffset = cache.indexOffset[n];
int icount = cache.indexCount[n];
int voffset = cache.vertexOffset[n];
int pt = ioffset;
while (pt < (ioffset + icount) / 3) {
float size = attribs[2 * pt + 2];
float weight;
int perim;
if (0 < size) { // round point
weight = +size / 0.5f;
perim = PApplet.min(PGraphicsOpenGL.MAX_POINT_ACCURACY,
PApplet.max(PGraphicsOpenGL.MIN_POINT_ACCURACY,
(int) (TWO_PI * weight /
PGraphicsOpenGL.POINT_ACCURACY_FACTOR))) + 1;
} else { // Square point
weight = -size / 0.5f;
perim = 5;
}
int i0 = voffset + indices[3 * pt];
int argb0 = PGL.nativeToJavaARGB(color[i0]);
float[] pt0 = {0, 0, 0, 0};
float[] src0 = {0, 0, 0, 0};
PApplet.arrayCopy(vertices, 4 * i0, src0, 0, 4);
g.modelview.mult(src0, pt0);
if (raw.is3D()) {
raw.strokeWeight(weight);
raw.stroke(argb0);
raw.vertex(pt0[X], pt0[Y], pt0[Z]);
} else if (raw.is2D()) {
float sx0 = g.screenXImpl(pt0[0], pt0[1], pt0[2], pt0[3]);
float sy0 = g.screenYImpl(pt0[0], pt0[1], pt0[2], pt0[3]);
raw.strokeWeight(weight);
raw.stroke(argb0);
raw.vertex(sx0, sy0);
}
pt += perim;
}
}
raw.endShape();
}
}
| PShapeOpenGL |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/redshift/parser/RedshiftExprParser.java | {
"start": 704,
"end": 3995
} | class ____
extends PGExprParser {
public RedshiftExprParser(String sql, SQLParserFeature... features) {
super(new RedshiftLexer(sql, features));
lexer.nextToken();
dbType = DbType.redshift;
}
public RedshiftExprParser(Lexer lexer) {
super(lexer);
dbType = DbType.redshift;
}
@Override
protected SQLColumnDefinition parseColumnSpecific(SQLColumnDefinition column) {
switch (lexer.token()) {
case ENCODE: {
lexer.nextToken();
SQLExpr codecExpr;
if (lexer.token() == Token.AUTO) {
codecExpr = new SQLIdentifierExpr("AUTO");
lexer.nextToken();
} else {
codecExpr = expr();
}
RedshiftColumnEncode sqlColumnEncode = new RedshiftColumnEncode();
sqlColumnEncode.setExpr(codecExpr);
column.addConstraint(sqlColumnEncode);
return parseColumnRest(column);
}
case SORTKEY:
case DISTKEY:
RedshiftColumnKey key = new RedshiftColumnKey();
if (lexer.token() == Token.DISTKEY) {
key.setDistKey(true);
} else {
key.setSortKey(true);
}
lexer.nextToken();
column.addConstraint(key);
return parseColumnRest(column);
case IDENTITY:
lexer.nextToken();
SQLColumnDefinition.Identity identity = parseIdentity();
column.setIdentity(identity);
return parseColumnRest(column);
default:
return column;
}
}
@Override
public SQLColumnDefinition parseColumnRest(SQLColumnDefinition column) {
if (lexer.identifierEquals(FnvHash.Constants.GENERATED)) {
lexer.nextToken();
accept(Token.BY);
accept(Token.DEFAULT);
accept(Token.AS);
accept(Token.IDENTITY);
SQLColumnDefinition.Identity id = parseIdentity();
column.setGeneratedAlwaysAs(id);
} else if (lexer.identifierEquals(FnvHash.Constants.COLLATE)) {
lexer.nextToken();
column.setCollateExpr(expr());
}
return super.parseColumnRest(column);
}
@Override
protected SQLColumnDefinition.Identity parseIdentity() {
accept(Token.LPAREN);
SQLColumnDefinition.Identity ident = new SQLColumnDefinition.Identity();
parseIdentifySpecific();
if (lexer.token() == Token.LITERAL_INT) {
ident.setSeed(lexer.integerValue().intValue());
lexer.nextToken();
} else {
throw new ParserException("TODO : " + lexer.info());
}
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
if (lexer.token() == Token.LITERAL_INT) {
ident.setIncrement(lexer.integerValue().intValue());
lexer.nextToken();
} else {
throw new ParserException("TODO : " + lexer.info());
}
}
accept(Token.RPAREN);
return ident;
}
}
| RedshiftExprParser |
java | redisson__redisson | redisson/src/main/java/org/redisson/RedissonListMultimapValues.java | {
"start": 1483,
"end": 34505
} | class ____<V> extends RedissonExpirable implements RList<V> {
private final RList<V> list;
private final Object key;
private final String timeoutSetName;
public RedissonListMultimapValues(Codec codec, CommandAsyncExecutor commandExecutor, String name, String timeoutSetName, Object key) {
super(codec, commandExecutor, name);
this.timeoutSetName = timeoutSetName;
this.key = key;
this.list = new RedissonList<V>(codec, commandExecutor, name, null);
}
@Override
public <KOut, VOut> RCollectionMapReduce<V, KOut, VOut> mapReduce() {
return null;
}
@Override
public RFuture<Boolean> clearExpireAsync() {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
protected RFuture<Boolean> expireAsync(long timeToLive, TimeUnit timeUnit, String param, String... keys) {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
protected RFuture<Boolean> expireAtAsync(long timestamp, String param, String... keys) {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
public RFuture<Long> remainTimeToLiveAsync() {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
public RFuture<Void> renameAsync(String newName) {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
public RFuture<Boolean> renamenxAsync(String newName) {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
public RFuture<Long> sizeInMemoryAsync() {
List<Object> keys = Arrays.<Object>asList(getRawName(), timeoutSetName);
return super.sizeInMemoryAsync(keys);
}
public RFuture<Boolean> deleteAsync() {
return commandExecutor.evalWriteAsync(getRawName(), codec, RedisCommands.EVAL_BOOLEAN,
"local expireDate = 92233720368547758; " +
"local expireDateScore = redis.call('zscore', KEYS[1], ARGV[2]); "
+ "if expireDateScore ~= false then "
+ "expireDate = tonumber(expireDateScore) "
+ "end; "
+ "if expireDate <= tonumber(ARGV[1]) then "
+ "return 0;"
+ "end; " +
"local res = redis.call('zrem', KEYS[1], ARGV[2]); " +
"if res > 0 then " +
"redis.call('del', KEYS[2]); " +
"end; " +
"return res; ",
Arrays.<Object>asList(timeoutSetName, getRawName()), System.currentTimeMillis(), key);
}
@Override
public RFuture<Boolean> copyAsync(List<Object> keys, int database, boolean replace) {
throw new UnsupportedOperationException();
}
@Override
public int size() {
return get(sizeAsync());
}
public RFuture<Integer> sizeAsync() {
return commandExecutor.evalReadAsync(getRawName(), codec, RedisCommands.EVAL_INTEGER,
"local expireDate = 92233720368547758; " +
"local expireDateScore = redis.call('zscore', KEYS[1], ARGV[2]); "
+ "if expireDateScore ~= false then "
+ "expireDate = tonumber(expireDateScore) "
+ "end; "
+ "if expireDate <= tonumber(ARGV[1]) then "
+ "return 0;"
+ "end; "
+ "return redis.call('llen', KEYS[2]);",
Arrays.<Object>asList(timeoutSetName, getRawName()),
System.currentTimeMillis(), encodeMapKey(key));
}
@Override
public boolean isEmpty() {
return size() == 0;
}
@Override
public boolean contains(Object o) {
return get(containsAsync(o));
}
@Override
public Iterator<V> iterator() {
return listIterator();
}
@Override
public Object[] toArray() {
List<V> list = readAll();
return list.toArray();
}
@Override
public List<V> readAll() {
return get(readAllAsync());
}
@Override
public RFuture<List<V>> readAllAsync() {
return rangeAsync(0, -1);
}
@Override
public <T> T[] toArray(T[] a) {
List<V> list = readAll();
return list.toArray(a);
}
@Override
public boolean add(V e) {
return list.add(e);
}
@Override
public RFuture<Boolean> addAsync(V e) {
return list.addAsync(e);
}
@Override
public RFuture<Boolean> addAsync(int index, V element) {
return list.addAsync(index, element);
}
@Override
public boolean remove(Object o) {
return get(removeAsync(o));
}
@Override
public RFuture<Boolean> removeAsync(Object o) {
return removeAsync(o, 1);
}
@Override
public RFuture<Boolean> removeAsync(Object o, int count) {
return commandExecutor.evalWriteAsync(getRawName(), codec, RedisCommands.EVAL_BOOLEAN,
"local expireDate = 92233720368547758; " +
"local expireDateScore = redis.call('zscore', KEYS[1], ARGV[3]); "
+ "if expireDateScore ~= false then "
+ "expireDate = tonumber(expireDateScore) "
+ "end; "
+ "if expireDate <= tonumber(ARGV[1]) then "
+ "return 0;"
+ "end; "
+ "return redis.call('lrem', KEYS[2], ARGV[2], ARGV[4]) > 0 and 1 or 0;",
Arrays.<Object>asList(timeoutSetName, getRawName()),
System.currentTimeMillis(), count, encodeMapKey(key), encodeMapValue(o));
}
@Override
public boolean remove(Object o, int count) {
return get(removeAsync(o, count));
}
@Override
public RFuture<Boolean> containsAllAsync(Collection<?> c) {
List<Object> args = new ArrayList<Object>(c.size() + 2);
args.add(System.currentTimeMillis());
args.add(encodeMapKey(key));
encodeMapValues(args, c);
return commandExecutor.evalReadAsync(getRawName(), codec, RedisCommands.EVAL_BOOLEAN,
"local expireDate = 92233720368547758; " +
"local expireDateScore = redis.call('zscore', KEYS[1], ARGV[2]); "
+ "if expireDateScore ~= false then "
+ "expireDate = tonumber(expireDateScore) "
+ "end; "
+ "if expireDate <= tonumber(ARGV[1]) then "
+ "return 0;"
+ "end; " +
"local items = redis.call('lrange', KEYS[2], 0, -1);" +
"for i = 1, #items, 1 do " +
"for j = #ARGV, 3, -1 do "
+ "if ARGV[j] == items[i] "
+ "then table.remove(ARGV, j) end "
+ "end; "
+ "end;"
+ "return #ARGV == 2 and 1 or 0; ",
Arrays.<Object>asList(timeoutSetName, getRawName()), args.toArray());
}
@Override
public boolean containsAll(Collection<?> c) {
return get(containsAllAsync(c));
}
@Override
public boolean addAll(Collection<? extends V> c) {
return list.addAll(c);
}
@Override
public RFuture<Boolean> addAllAsync(final Collection<? extends V> c) {
return list.addAllAsync(c);
}
@Override
public RFuture<Boolean> addAllAsync(int index, Collection<? extends V> coll) {
return list.addAllAsync(index, coll);
}
@Override
public boolean addAll(int index, Collection<? extends V> coll) {
return list.addAll(index, coll);
}
@Override
public RFuture<Boolean> removeAllAsync(Collection<?> c) {
List<Object> args = new ArrayList<Object>(c.size() + 2);
args.add(System.currentTimeMillis());
args.add(encodeMapKey(key));
encodeMapValues(args, c);
return commandExecutor.evalWriteAsync(getRawName(), codec, RedisCommands.EVAL_BOOLEAN,
"local expireDate = 92233720368547758; " +
"local expireDateScore = redis.call('zscore', KEYS[1], ARGV[2]); "
+ "if expireDateScore ~= false then "
+ "expireDate = tonumber(expireDateScore) "
+ "end; "
+ "if expireDate <= tonumber(ARGV[1]) then "
+ "return 0;"
+ "end; " +
"local v = 0 " +
"for i = 3, #ARGV, 1 do "
+ "if redis.call('lrem', KEYS[2], 0, ARGV[i]) == 1 "
+ "then v = 1 end "
+"end "
+ "return v ",
Arrays.<Object>asList(timeoutSetName, getRawName()), args.toArray());
}
@Override
public boolean removeAll(Collection<?> c) {
return get(removeAllAsync(c));
}
@Override
public boolean retainAll(Collection<?> c) {
return get(retainAllAsync(c));
}
@Override
public RFuture<Boolean> retainAllAsync(Collection<?> c) {
List<Object> args = new ArrayList<Object>(c.size() + 2);
args.add(System.currentTimeMillis());
args.add(encodeMapKey(key));
encodeMapValues(args, c);
return commandExecutor.evalWriteAsync(getRawName(), codec, RedisCommands.EVAL_BOOLEAN,
"local expireDate = 92233720368547758; " +
"local expireDateScore = redis.call('zscore', KEYS[1], ARGV[2]); "
+ "if expireDateScore ~= false then "
+ "expireDate = tonumber(expireDateScore) "
+ "end; "
+ "if expireDate <= tonumber(ARGV[1]) then "
+ "return 0;"
+ "end; " +
"local changed = 0; " +
"local s = redis.call('lrange', KEYS[2], 0, -1); "
+ "local i = 1; "
+ "while i <= #s do "
+ "local element = s[i]; "
+ "local isInAgrs = false; "
+ "for j = 3, #ARGV, 1 do "
+ "if ARGV[j] == element then "
+ "isInAgrs = true; "
+ "break; "
+ "end; "
+ "end; "
+ "if isInAgrs == false then "
+ "redis.call('lrem', KEYS[2], 0, element); "
+ "changed = 1; "
+ "end; "
+ "i = i + 1; "
+ "end; "
+ "return changed; ",
Arrays.<Object>asList(timeoutSetName, getRawName()), args.toArray());
}
@Override
public void clear() {
delete();
}
@Override
public List<V> get(int... indexes) {
return get(getAsync(indexes));
}
@Override
public Iterator<V> distributedIterator(final int count) {
String iteratorName = "__redisson_set_cursor_{" + getRawName() + "}";
return distributedIterator(iteratorName, count);
}
@Override
public Iterator<V> distributedIterator(final String iteratorName, final int count) {
return new RedissonBaseIterator<V>() {
@Override
protected ScanResult<Object> iterator(RedisClient client, String nextIterPos) {
return distributedScanIterator(iteratorName, count);
}
@Override
protected void remove(Object value) {
RedissonListMultimapValues.this.remove((V) value);
}
};
}
private ScanResult<Object> distributedScanIterator(String iteratorName, int count) {
return get(distributedScanIteratorAsync(iteratorName, count));
}
private RFuture<ScanResult<Object>> distributedScanIteratorAsync(String iteratorName, int count) {
return commandExecutor.evalWriteAsync(getRawName(), codec, RedisCommands.EVAL_SCAN,
"local cursor = redis.call('get', KEYS[3]); "
+ "if cursor ~= false then "
+ "cursor = tonumber(cursor); "
+ "else "
+ "cursor = 0;"
+ "end;"
+ "if start_index == -1 then "
+ "return {0, {}}; "
+ "end;"
+ "local end_index = start_index + ARGV[1];"
+ "local result; "
+ "result = redis.call('lrange', KEYS[1], start_index, end_index - 1); "
+ "if end_index > redis.call('llen', KEYS[1]) then "
+ "end_index = -1;"
+ "end; "
+ "redis.call('setex', KEYS[2], 3600, end_index);"
+ "local expireDate = 92233720368547758; "
+ "local expirations = redis.call('zmscore', KEYS[1], result[2])"
+ "for i = #expirations, 1, -1 do "
+ "if expirations[i] ~= false then "
+ "local expireDate = tonumber(expireDateScore) "
+ "if expireDate <= tonumber(ARGV[1]) then "
+ "table.remove(result[2], i);"
+ "end; "
+ "end; "
+ "end; "
+ "return {end_index, result[2]};",
Arrays.<Object>asList(timeoutSetName, getRawName(), iteratorName), System.currentTimeMillis(), count);
}
@Override
public RFuture<List<V>> getAsync(int... indexes) {
List<Object> params = new ArrayList<Object>();
params.add(System.currentTimeMillis());
params.add(encodeMapKey(key));
for (Integer index : indexes) {
params.add(index);
}
return commandExecutor.evalReadAsync(getRawName(), codec, RedisCommands.EVAL_LIST,
"local expireDate = 92233720368547758; " +
"local expireDateScore = redis.call('zscore', KEYS[1], ARGV[2]); "
+ "if expireDateScore ~= false then "
+ "expireDate = tonumber(expireDateScore); "
+ "end; "
+ "if expireDate <= tonumber(ARGV[1]) then "
+ "return nil;"
+ "end; " +
"local result = {}; " +
"for i = 3, #ARGV, 1 do "
+ "local value = redis.call('lindex', KEYS[1], ARGV[i]);"
+ "table.insert(result, value);" +
"end; " +
"return result;",
Collections.<Object>singletonList(getRawName()), params.toArray());
}
@Override
public RFuture<V> getAsync(int index) {
return commandExecutor.evalReadAsync(getRawName(), codec, RedisCommands.EVAL_MAP_VALUE,
"local expireDate = 92233720368547758; " +
"local expireDateScore = redis.call('zscore', KEYS[1], ARGV[3]); "
+ "if expireDateScore ~= false then "
+ "expireDate = tonumber(expireDateScore); "
+ "end; "
+ "if expireDate <= tonumber(ARGV[1]) then "
+ "return nil;"
+ "end; "
+ "return redis.call('lindex', KEYS[2], ARGV[2]);",
Arrays.<Object>asList(timeoutSetName, getRawName()),
System.currentTimeMillis(), index, encodeMapKey(key));
}
@Override
public V get(int index) {
return getValue(index);
}
V getValue(int index) {
return get(getAsync(index));
}
public V set(int index, V element) {
return list.set(index, element);
}
@Override
public RFuture<V> setAsync(int index, V element) {
return list.setAsync(index, element);
}
@Override
public void fastSet(int index, V element) {
list.fastSet(index, element);
}
@Override
public RFuture<Void> fastSetAsync(int index, V element) {
return list.fastSetAsync(index, element);
}
@Override
public void add(int index, V element) {
addAll(index, Collections.singleton(element));
}
@Override
public V remove(int index) {
return list.remove(index);
}
@Override
public RFuture<V> removeAsync(int index) {
return list.removeAsync(index);
}
@Override
public void fastRemove(int index) {
list.fastRemove(index);
}
@Override
public RFuture<Void> fastRemoveAsync(int index) {
return list.fastRemoveAsync(index);
}
@Override
public int indexOf(Object o) {
return get(indexOfAsync(o));
}
@Override
public RFuture<Boolean> containsAsync(Object o) {
return indexOfAsync(o, new BooleanNumberReplayConvertor(-1L));
}
private <R> RFuture<R> indexOfAsync(Object o, Convertor<R> convertor) {
return commandExecutor.evalReadAsync(getRawName(), codec, new RedisCommand<R>("EVAL", convertor),
"local expireDate = 92233720368547758; " +
"local expireDateScore = redis.call('zscore', KEYS[1], ARGV[2]); "
+ "if expireDateScore ~= false then "
+ "expireDate = tonumber(expireDateScore); "
+ "end; "
+ "if expireDate <= tonumber(ARGV[1]) then "
+ "return -1;"
+ "end; " +
"local items = redis.call('lrange', KEYS[2], 0, -1); " +
"for i=1,#items do " +
"if items[i] == ARGV[3] then " +
"return i - 1; " +
"end; " +
"end; " +
"return -1;",
Arrays.<Object>asList(timeoutSetName, getRawName()),
System.currentTimeMillis(), encodeMapKey(key), encodeMapValue(o));
}
@Override
public RFuture<Integer> indexOfAsync(Object o) {
return indexOfAsync(o, new IntegerReplayConvertor());
}
@Override
public int lastIndexOf(Object o) {
return get(lastIndexOfAsync(o));
}
@Override
public RFuture<Integer> lastIndexOfAsync(Object o) {
return commandExecutor.evalReadAsync(getRawName(), codec, RedisCommands.EVAL_INTEGER,
"local expireDate = 92233720368547758; " +
"local expireDateScore = redis.call('zscore', KEYS[1], ARGV[2]); "
+ "if expireDateScore ~= false then "
+ "expireDate = tonumber(expireDateScore); "
+ "end; "
+ "if expireDate <= tonumber(ARGV[1]) then "
+ "return -1;"
+ "end; " +
"local items = redis.call('lrange', KEYS[1], 0, -1) " +
"for i = #items, 1, -1 do " +
"if items[i] == ARGV[1] then " +
"return i - 1 " +
"end " +
"end " +
"return -1",
Arrays.<Object>asList(timeoutSetName, getRawName()),
System.currentTimeMillis(), encodeMapKey(key), encodeMapValue(o));
}
@Override
public void trim(int fromIndex, int toIndex) {
list.trim(fromIndex, toIndex);
}
@Override
public RFuture<Void> trimAsync(int fromIndex, int toIndex) {
return list.trimAsync(fromIndex, toIndex);
}
@Override
public ListIterator<V> listIterator() {
return listIterator(0);
}
@Override
public ListIterator<V> listIterator(final int ind) {
return new ListIterator<V>() {
private V prevCurrentValue;
private V nextCurrentValue;
private V currentValueHasRead;
private int currentIndex = ind - 1;
private boolean hasBeenModified = true;
@Override
public boolean hasNext() {
V val = RedissonListMultimapValues.this.getValue(currentIndex+1);
if (val != null) {
nextCurrentValue = val;
}
return val != null;
}
@Override
public V next() {
if (nextCurrentValue == null && !hasNext()) {
throw new NoSuchElementException("No such element at index " + currentIndex);
}
currentIndex++;
currentValueHasRead = nextCurrentValue;
nextCurrentValue = null;
hasBeenModified = false;
return currentValueHasRead;
}
@Override
public void remove() {
if (currentValueHasRead == null) {
throw new IllegalStateException("Neither next nor previous have been called");
}
if (hasBeenModified) {
throw new IllegalStateException("Element been already deleted");
}
RedissonListMultimapValues.this.remove(currentIndex);
currentIndex--;
hasBeenModified = true;
currentValueHasRead = null;
}
@Override
public boolean hasPrevious() {
if (currentIndex < 0) {
return false;
}
V val = RedissonListMultimapValues.this.getValue(currentIndex);
if (val != null) {
prevCurrentValue = val;
}
return val != null;
}
@Override
public V previous() {
if (prevCurrentValue == null && !hasPrevious()) {
throw new NoSuchElementException("No such element at index " + currentIndex);
}
currentIndex--;
hasBeenModified = false;
currentValueHasRead = prevCurrentValue;
prevCurrentValue = null;
return currentValueHasRead;
}
@Override
public int nextIndex() {
return currentIndex + 1;
}
@Override
public int previousIndex() {
return currentIndex;
}
@Override
public void set(V e) {
if (hasBeenModified) {
throw new IllegalStateException();
}
RedissonListMultimapValues.this.fastSet(currentIndex, e);
}
@Override
public void add(V e) {
RedissonListMultimapValues.this.add(currentIndex+1, e);
currentIndex++;
hasBeenModified = true;
}
};
}
@Override
public RList<V> subList(int fromIndex, int toIndex) {
int size = size();
if (fromIndex < 0 || toIndex > size) {
throw new IndexOutOfBoundsException("fromIndex: " + fromIndex + " toIndex: " + toIndex + " size: " + size);
}
if (fromIndex > toIndex) {
throw new IllegalArgumentException("fromIndex: " + fromIndex + " toIndex: " + toIndex);
}
return new RedissonSubList<V>(codec, commandExecutor, getRawName(), fromIndex, toIndex);
}
@Override
@SuppressWarnings("AvoidInlineConditionals")
public String toString() {
Iterator<V> it = iterator();
if (! it.hasNext())
return "[]";
StringBuilder sb = new StringBuilder();
sb.append('[');
for (;;) {
V e = it.next();
sb.append(e == this ? "(this Collection)" : e);
if (! it.hasNext())
return sb.append(']').toString();
sb.append(',').append(' ');
}
}
@Override
@SuppressWarnings("AvoidInlineConditionals")
public boolean equals(Object o) {
if (o == this)
return true;
if (!(o instanceof List))
return false;
Iterator<V> e1 = iterator();
Iterator<?> e2 = ((List<?>) o).iterator();
while (e1.hasNext() && e2.hasNext()) {
V o1 = e1.next();
Object o2 = e2.next();
if (!(o1==null ? o2==null : o1.equals(o2)))
return false;
}
return !(e1.hasNext() || e2.hasNext());
}
@Override
@SuppressWarnings("AvoidInlineConditionals")
public int hashCode() {
int hashCode = 1;
for (V e : this) {
hashCode = 31*hashCode + (e==null ? 0 : e.hashCode());
}
return hashCode;
}
@Override
public RFuture<Integer> addAfterAsync(V elementToFind, V element) {
return list.addAfterAsync(elementToFind, element);
}
@Override
public RFuture<Integer> addBeforeAsync(V elementToFind, V element) {
return list.addBeforeAsync(elementToFind, element);
}
@Override
public int addAfter(V elementToFind, V element) {
return list.addAfter(elementToFind, element);
}
@Override
public int addBefore(V elementToFind, V element) {
return list.addBefore(elementToFind, element);
}
@Override
public RFuture<List<V>> readSortAsync(SortOrder order) {
return list.readSortAsync(order);
}
@Override
public List<V> readSort(SortOrder order) {
return list.readSort(order);
}
@Override
public RFuture<List<V>> readSortAsync(SortOrder order, int offset, int count) {
return list.readSortAsync(order, offset, count);
}
@Override
public List<V> readSort(SortOrder order, int offset, int count) {
return list.readSort(order, offset, count);
}
@Override
public List<V> readSort(String byPattern, SortOrder order, int offset, int count) {
return list.readSort(byPattern, order, offset, count);
}
@Override
public RFuture<List<V>> readSortAsync(String byPattern, SortOrder order, int offset, int count) {
return list.readSortAsync(byPattern, order, offset, count);
}
@Override
public <T> Collection<T> readSort(String byPattern, List<String> getPatterns, SortOrder order, int offset, int count) {
return list.readSort(byPattern, getPatterns, order, offset, count);
}
@Override
public <T> RFuture<Collection<T>> readSortAsync(String byPattern, List<String> getPatterns, SortOrder order, int offset,
int count) {
return list.readSortAsync(byPattern, getPatterns, order, offset, count);
}
@Override
public List<V> readSortAlpha(SortOrder order) {
return list.readSortAlpha(order);
}
@Override
public List<V> readSortAlpha(SortOrder order, int offset, int count) {
return list.readSortAlpha(order, offset, count);
}
@Override
public List<V> readSortAlpha(String byPattern, SortOrder order) {
return list.readSortAlpha(byPattern, order);
}
@Override
public List<V> readSortAlpha(String byPattern, SortOrder order, int offset, int count) {
return list.readSortAlpha(byPattern, order, offset, count);
}
@Override
public <T> Collection<T> readSortAlpha(String byPattern, List<String> getPatterns, SortOrder order) {
return list.readSortAlpha(byPattern, getPatterns, order);
}
@Override
public <T> Collection<T> readSortAlpha(String byPattern, List<String> getPatterns, SortOrder order, int offset, int count) {
return list.readSortAlpha(byPattern, getPatterns, order, offset, count);
}
@Override
public RFuture<List<V>> readSortAlphaAsync(SortOrder order) {
return list.readSortAlphaAsync(order);
}
@Override
public RFuture<List<V>> readSortAlphaAsync(SortOrder order, int offset, int count) {
return list.readSortAlphaAsync(order, offset, count);
}
@Override
public RFuture<List<V>> readSortAlphaAsync(String byPattern, SortOrder order) {
return list.readSortAlphaAsync(byPattern, order);
}
@Override
public RFuture<List<V>> readSortAlphaAsync(String byPattern, SortOrder order, int offset, int count) {
return list.readSortAlphaAsync(byPattern, order, offset, count);
}
@Override
public <T> RFuture<Collection<T>> readSortAlphaAsync(String byPattern, List<String> getPatterns, SortOrder order) {
return list.readSortAlphaAsync(byPattern, getPatterns, order);
}
@Override
public <T> RFuture<Collection<T>> readSortAlphaAsync(String byPattern, List<String> getPatterns, SortOrder order, int offset, int count) {
return list.readSortAlphaAsync(byPattern, getPatterns, order, offset, count);
}
@Override
public int sortTo(String destName, SortOrder order) {
return list.sortTo(destName, order);
}
@Override
public RFuture<Integer> sortToAsync(String destName, SortOrder order) {
return list.sortToAsync(destName, order);
}
public List<V> readSort(String byPattern, SortOrder order) {
return list.readSort(byPattern, order);
}
public RFuture<List<V>> readSortAsync(String byPattern, SortOrder order) {
return list.readSortAsync(byPattern, order);
}
public <T> Collection<T> readSort(String byPattern, List<String> getPatterns, SortOrder order) {
return list.readSort(byPattern, getPatterns, order);
}
public <T> RFuture<Collection<T>> readSortAsync(String byPattern, List<String> getPatterns, SortOrder order) {
return list.readSortAsync(byPattern, getPatterns, order);
}
public int sortTo(String destName, SortOrder order, int offset, int count) {
return list.sortTo(destName, order, offset, count);
}
public int sortTo(String destName, String byPattern, SortOrder order) {
return list.sortTo(destName, byPattern, order);
}
public RFuture<Integer> sortToAsync(String destName, SortOrder order, int offset, int count) {
return list.sortToAsync(destName, order, offset, count);
}
public int sortTo(String destName, String byPattern, SortOrder order, int offset, int count) {
return list.sortTo(destName, byPattern, order, offset, count);
}
public RFuture<Integer> sortToAsync(String destName, String byPattern, SortOrder order) {
return list.sortToAsync(destName, byPattern, order);
}
public int sortTo(String destName, String byPattern, List<String> getPatterns, SortOrder order) {
return list.sortTo(destName, byPattern, getPatterns, order);
}
public RFuture<Integer> sortToAsync(String destName, String byPattern, SortOrder order, int offset,
int count) {
return list.sortToAsync(destName, byPattern, order, offset, count);
}
public int sortTo(String destName, String byPattern, List<String> getPatterns, SortOrder order, int offset,
int count) {
return list.sortTo(destName, byPattern, getPatterns, order, offset, count);
}
public RFuture<Integer> sortToAsync(String destName, String byPattern, List<String> getPatterns,
SortOrder order) {
return list.sortToAsync(destName, byPattern, getPatterns, order);
}
public RFuture<Integer> sortToAsync(String destName, String byPattern, List<String> getPatterns,
SortOrder order, int offset, int count) {
return list.sortToAsync(destName, byPattern, getPatterns, order, offset, count);
}
@Override
public RFuture<List<V>> rangeAsync(int toIndex) {
return rangeAsync(0, toIndex);
}
@Override
public RFuture<List<V>> rangeAsync(int fromIndex, int toIndex) {
return commandExecutor.evalReadAsync(getRawName(), codec, RedisCommands.EVAL_MAP_VALUE_LIST,
"local expireDate = 92233720368547758; " +
"local expireDateScore = redis.call('zscore', KEYS[1], ARGV[2]); "
+ "if expireDateScore ~= false then "
+ "expireDate = tonumber(expireDateScore) "
+ "end; "
+ "if expireDate <= tonumber(ARGV[1]) then "
+ "return {};"
+ "end; "
+ "return redis.call('lrange', KEYS[2], ARGV[3], ARGV[4]);",
Arrays.<Object>asList(timeoutSetName, getRawName()),
System.currentTimeMillis(), encodeMapKey(key), fromIndex, toIndex);
}
@Override
public List<V> range(int toIndex) {
return get(rangeAsync(toIndex));
}
@Override
public List<V> range(int fromIndex, int toIndex) {
return get(rangeAsync(fromIndex, toIndex));
}
}
| RedissonListMultimapValues |
java | apache__flink | flink-architecture-tests/flink-architecture-tests-production/src/main/java/org/apache/flink/architecture/rules/CheckpointingConfigurationAccessRules.java | {
"start": 3491,
"end": 5774
} | class ____ to a set of their whitelisted method names.
* @throws ExceptionInInitializerError if a method cannot be found.
*/
private static Map<String, Set<String>> buildAllowedAccessMap() {
try {
Map<String, Set<String>> map = new HashMap<>();
// Whitelist for CheckpointingOptions
Set<String> checkpointingOptionsMethods =
new HashSet<>(
List.of(
CheckpointingOptions.class
.getMethod(
"isUnalignedCheckpointEnabled",
Configuration.class)
.getName(),
CheckpointingOptions.class
.getMethod(
"isCheckpointingEnabled", Configuration.class)
.getName(),
CheckpointingOptions.class
.getMethod("getCheckpointingMode", Configuration.class)
.getName(),
CheckpointingOptions.class
.getMethod(
"isUnalignedCheckpointInterruptibleTimersEnabled",
Configuration.class)
.getName()));
map.put(CheckpointingOptions.class.getName(), checkpointingOptionsMethods);
// Whitelist for CheckpointConfig
Set<String> checkpointConfigMethods =
new HashSet<>(
List.of(
CheckpointConfig.class
.getMethod("configure", ReadableConfig.class)
.getName()));
map.put(CheckpointConfig.class.getName(), checkpointConfigMethods);
return map;
} catch (NoSuchMethodException e) {
// This makes the test | names |
java | quarkusio__quarkus | extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/typesafe/CheckedTemplateRequireTypeSafeTest.java | {
"start": 2591,
"end": 2720
} | class ____ {
static native TemplateInstance hola(String name);
}
@Singleton
@Named
public static | Templates |
java | quarkusio__quarkus | independent-projects/qute/debug/src/main/java/io/quarkus/qute/debug/agent/evaluations/EvaluationSupport.java | {
"start": 839,
"end": 4128
} | class ____ {
/** Sentinel value used to ignore evaluation results in hover context. */
private static final Object IGNORE_RESULT = new Object();
/** Reference to the debuggee agent that manages threads, frames, and variables. */
private final DebuggeeAgent agent;
public EvaluationSupport(DebuggeeAgent agent) {
this.agent = agent;
}
/**
* Evaluates a string expression in the context of a given stack frame.
*
* @param frameId the ID of the stack frame where the expression should be evaluated
* @param expression the expression to evaluate
* @param context the evaluation context (e.g., HOVER, WATCH)
* @return a CompletableFuture resolving to an {@link EvaluateResponse}
*/
public CompletableFuture<EvaluateResponse> evaluate(Integer frameId, String expression, String context) {
if (!agent.isEnabled()) {
// Debugger not enabled: return an error immediately
ResponseError re = new ResponseError();
re.setCode(ResponseErrorCode.InvalidRequest);
re.setMessage("Debuggee agent is not enabled.");
throw new ResponseErrorException(re);
}
// Find the stack frame
RemoteStackFrame frame = agent.findStackFrame(frameId);
if (frame == null) {
// Frame not found: return null
return CompletableFuture.completedFuture(null);
}
// Evaluate the expression asynchronously
return frame.evaluate(expression)
.handle((result, error) -> {
// Handle evaluation errors
if (error != null) {
if (EvaluateArgumentsContext.HOVER.equals(context)) {
// Ignore errors in hover context
return IGNORE_RESULT;
}
// Otherwise, propagate as JSON-RPC error
ResponseError re = new ResponseError();
re.setCode(ResponseErrorCode.InvalidRequest);
re.setMessage(error.getMessage());
throw new ResponseErrorException(re);
}
return result;
})
.thenApply(result -> {
// Build EvaluateResponse
EvaluateResponse response = new EvaluateResponse();
if (result != null) {
if (result != IGNORE_RESULT) {
response.setResult(result.toString());
// If the result can be expanded (object/array), populate variables reference
if (VariablesHelper.shouldBeExpanded(result, frame)) {
var variable = VariablesHelper.fillVariable("", result, frame, null,
agent.getVariablesRegistry());
response.setVariablesReference(variable.getVariablesReference());
}
}
} else {
response.setResult("null");
}
return response;
});
}
}
| EvaluationSupport |
java | quarkusio__quarkus | independent-projects/qute/generator/src/main/java/io/quarkus/qute/generator/ValueResolverGenerator.java | {
"start": 53915,
"end": 55520
} | class ____. Therefore, this method would
* return "Foo$Bar" for the
* parameter "com.foo.Foo$Bar". Use {@link #simpleName(ClassInfo)} when you need to distinguish the nested classes.
*
* @param name
* @return the simple name
*/
static String simpleName(String name) {
return name.contains(".") ? name.substring(name.lastIndexOf(".") + 1, name.length()) : name;
}
static String packageName(DotName dotName) {
String name = dotName.toString();
int index = name.lastIndexOf('.');
if (index == -1) {
return "";
}
return name.substring(0, index);
}
static String generatedNameFromTarget(String targetPackage, String baseName, String suffix) {
if (targetPackage == null || targetPackage.isEmpty()) {
return baseName + suffix;
} else if (targetPackage.startsWith("java")) {
return "io/quarkus/qute" + "/" + baseName + suffix;
} else {
return targetPackage.replace('.', '/') + "/" + baseName + suffix;
}
}
private static boolean noneMethodMatches(List<MethodKey> methods, String name) {
for (MethodKey method : methods) {
if (method.name.equals(name)) {
return false;
}
}
return true;
}
public static boolean isVarArgs(MethodInfo method) {
return (method.flags() & 0x00000080) != 0;
}
public static String underscoredFullyQualifiedName(String name) {
return name.replace(".", "_").replace("$", "_");
}
private static | here |
java | google__dagger | javatests/dagger/internal/codegen/ModuleValidationTest.java | {
"start": 5967,
"end": 6049
} | interface ____ {",
" @ProductionSubcomponent.Builder",
" | Sub |
java | apache__hadoop | hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/RegistryOperationsClient.java | {
"start": 1795,
"end": 2064
} | class ____ extends RegistryOperationsService {
public RegistryOperationsClient(String name) {
super(name);
}
public RegistryOperationsClient(String name,
RegistryBindingSource bindingSource) {
super(name, bindingSource);
}
}
| RegistryOperationsClient |
java | elastic__elasticsearch | libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/EntitlementTest.java | {
"start": 855,
"end": 1167
} | enum ____ {
PLUGINS,
ES_MODULES_ONLY,
SERVER_ONLY,
ALWAYS_DENIED,
ALWAYS_ALLOWED
}
ExpectedAccess expectedAccess();
Class<? extends Exception> expectedExceptionIfDenied() default NotEntitledException.class;
int fromJavaVersion() default -1;
}
| ExpectedAccess |
java | apache__kafka | streams/test-utils/src/test/java/org/apache/kafka/streams/MockTimeTest.java | {
"start": 1000,
"end": 2072
} | class ____ {
@Test
public void shouldSetStartTime() {
final TopologyTestDriver.MockTime time = new TopologyTestDriver.MockTime(42L);
assertEquals(42L, time.milliseconds());
assertEquals(42L * 1000L * 1000L, time.nanoseconds());
}
@Test
public void shouldGetNanosAsMillis() {
final TopologyTestDriver.MockTime time = new TopologyTestDriver.MockTime(42L);
assertEquals(42L, time.hiResClockMs());
}
@Test
public void shouldNotAllowNegativeSleep() {
assertThrows(IllegalArgumentException.class,
() -> new TopologyTestDriver.MockTime(42).sleep(-1L));
}
@Test
public void shouldAdvanceTimeOnSleep() {
final TopologyTestDriver.MockTime time = new TopologyTestDriver.MockTime(42L);
assertEquals(42L, time.milliseconds());
time.sleep(1L);
assertEquals(43L, time.milliseconds());
time.sleep(0L);
assertEquals(43L, time.milliseconds());
time.sleep(3L);
assertEquals(46L, time.milliseconds());
}
}
| MockTimeTest |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/MonoSingleCallable.java | {
"start": 1181,
"end": 3150
} | class ____<T> extends Mono<T>
implements Callable<T>, SourceProducer<T> {
final Callable<? extends @Nullable T> callable;
final @Nullable T defaultValue;
MonoSingleCallable(Callable<? extends @Nullable T> source) {
this.callable = Objects.requireNonNull(source, "source");
this.defaultValue = null;
}
MonoSingleCallable(Callable<? extends T> source, T defaultValue) {
this.callable = Objects.requireNonNull(source, "source");
this.defaultValue = Objects.requireNonNull(defaultValue, "defaultValue");
}
@Override
public void subscribe(CoreSubscriber<? super T> actual) {
Operators.MonoInnerProducerBase<T>
sds = new Operators.MonoInnerProducerBase<>(actual);
actual.onSubscribe(sds);
if (sds.isCancelled()) {
return;
}
try {
T t = callable.call();
if (t != null) {
sds.complete(t);
} else if (defaultValue != null) {
sds.complete(defaultValue);
} else {
actual.onError(new NoSuchElementException("Source was empty"));
}
}
catch (Throwable e) {
actual.onError(Operators.onOperatorError(e, actual.currentContext()));
}
}
@Override
public T block() {
//duration is ignored below
return block(Duration.ZERO);
}
@Override
public T block(Duration m) {
final T v;
try {
v = callable.call();
}
catch (Throwable e) {
throw Exceptions.propagate(e);
}
if (v != null) {
return v;
} else {
if (this.defaultValue != null) {
return this.defaultValue;
}
throw new NoSuchElementException("Source was empty");
}
}
@Override
public T call() throws Exception {
final T v = callable.call();
if (v != null) {
return v;
} else {
if (this.defaultValue != null) {
return this.defaultValue;
}
throw new NoSuchElementException("Source was empty");
}
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return SourceProducer.super.scanUnsafe(key);
}
}
| MonoSingleCallable |
java | netty__netty | resolver-dns/src/main/java/io/netty/resolver/dns/DnsQueryLifecycleObserver.java | {
"start": 1499,
"end": 1799
} | interface ____ be used to track metrics for individual DNS servers. Methods which may lead to another DNS query
* return an object of type {@link DnsQueryLifecycleObserver}. Implementations may use this to build a query tree to
* understand the "sub queries" generated by a single query.
*/
public | can |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/condition/ConditionalOnThreadingTests.java | {
"start": 2686,
"end": 2949
} | class ____ {
@Bean
@ConditionalOnThreading(Threading.VIRTUAL)
ThreadType virtual() {
return ThreadType.VIRTUAL;
}
@Bean
@ConditionalOnThreading(Threading.PLATFORM)
ThreadType platform() {
return ThreadType.PLATFORM;
}
}
}
| BasicConfiguration |
java | square__retrofit | retrofit-converters/scalars/src/main/java/retrofit2/converter/scalars/ScalarsConverterFactory.java | {
"start": 1847,
"end": 4005
} | class ____ extends Converter.Factory {
public static ScalarsConverterFactory create() {
return new ScalarsConverterFactory();
}
private ScalarsConverterFactory() {}
@Override
public @Nullable Converter<?, RequestBody> requestBodyConverter(
Type type,
Annotation[] parameterAnnotations,
Annotation[] methodAnnotations,
Retrofit retrofit) {
if (type == String.class
|| type == boolean.class
|| type == Boolean.class
|| type == byte.class
|| type == Byte.class
|| type == char.class
|| type == Character.class
|| type == double.class
|| type == Double.class
|| type == float.class
|| type == Float.class
|| type == int.class
|| type == Integer.class
|| type == long.class
|| type == Long.class
|| type == short.class
|| type == Short.class) {
return ScalarRequestBodyConverter.INSTANCE;
}
return null;
}
@Override
public @Nullable Converter<ResponseBody, ?> responseBodyConverter(
Type type, Annotation[] annotations, Retrofit retrofit) {
if (type == String.class) {
return StringResponseBodyConverter.INSTANCE;
}
if (type == Boolean.class || type == boolean.class) {
return BooleanResponseBodyConverter.INSTANCE;
}
if (type == Byte.class || type == byte.class) {
return ByteResponseBodyConverter.INSTANCE;
}
if (type == Character.class || type == char.class) {
return CharacterResponseBodyConverter.INSTANCE;
}
if (type == Double.class || type == double.class) {
return DoubleResponseBodyConverter.INSTANCE;
}
if (type == Float.class || type == float.class) {
return FloatResponseBodyConverter.INSTANCE;
}
if (type == Integer.class || type == int.class) {
return IntegerResponseBodyConverter.INSTANCE;
}
if (type == Long.class || type == long.class) {
return LongResponseBodyConverter.INSTANCE;
}
if (type == Short.class || type == short.class) {
return ShortResponseBodyConverter.INSTANCE;
}
return null;
}
}
| ScalarsConverterFactory |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/buildextension/beans/SyntheticInjectionPointListAllTest.java | {
"start": 2373,
"end": 2704
} | class ____ implements BeanCreator<SyntheticBean> {
@Override
public SyntheticBean create(SyntheticCreationalContext<SyntheticBean> context) {
return new SyntheticBean(context.getInjectedReference(new TypeLiteral<List<SomeBean>>() {
}, All.Literal.INSTANCE));
}
}
}
| SynthBeanCreator |
java | quarkusio__quarkus | extensions/funqy/funqy-amazon-lambda/deployment/src/test/java/io/quarkus/funqy/test/SnsFunctionTest.java | {
"start": 418,
"end": 1577
} | class ____ {
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addAsResource("item-function.properties", "application.properties")
.addAsResource("events/sns", "events")
.addClasses(TestFunctions.class, Item.class,
EventDataProvider.class));
@Test
public void should_return_no_failures_if_processing_is_ok() {
// given
var body = getData("ok.json");
// when
var response = RestAssured.given().contentType("application/json")
.body(body)
.post("/");
// then
response.then().statusCode(204);
}
@Test
public void should_return_one_failure_if_processing_fails() {
// given
var body = getData("fail.json");
// when
var response = RestAssured.given().contentType("application/json")
.body(body)
.post("/");
// then
// SNS triggers have no error handling.
response.then().statusCode(204);
}
}
| SnsFunctionTest |
java | netty__netty | codec-http/src/main/java/io/netty/handler/codec/http/HttpUtil.java | {
"start": 1282,
"end": 31178
} | class ____ {
private static final AsciiString CHARSET_EQUALS = AsciiString.of(HttpHeaderValues.CHARSET + "=");
private static final AsciiString SEMICOLON = AsciiString.cached(";");
private static final String COMMA_STRING = String.valueOf(COMMA);
private HttpUtil() { }
/**
* Determine if a uri is in origin-form according to
* <a href="https://tools.ietf.org/html/rfc7230#section-5.3">rfc7230, 5.3</a>.
*/
public static boolean isOriginForm(URI uri) {
return isOriginForm(uri.toString());
}
/**
* Determine if a string uri is in origin-form according to
* <a href="https://tools.ietf.org/html/rfc7230#section-5.3">rfc7230, 5.3</a>.
*/
public static boolean isOriginForm(String uri) {
return uri.startsWith("/");
}
/**
* Determine if a uri is in asterisk-form according to
* <a href="https://tools.ietf.org/html/rfc7230#section-5.3">rfc7230, 5.3</a>.
*/
public static boolean isAsteriskForm(URI uri) {
return isAsteriskForm(uri.toString());
}
/**
* Determine if a string uri is in asterisk-form according to
* <a href="https://tools.ietf.org/html/rfc7230#section-5.3">rfc7230, 5.3</a>.
*/
public static boolean isAsteriskForm(String uri) {
return "*".equals(uri);
}
/**
* Returns {@code true} if and only if the connection can remain open and
* thus 'kept alive'. This methods respects the value of the.
*
* {@code "Connection"} header first and then the return value of
* {@link HttpVersion#isKeepAliveDefault()}.
*/
public static boolean isKeepAlive(HttpMessage message) {
return !message.headers().containsValue(HttpHeaderNames.CONNECTION, HttpHeaderValues.CLOSE, true) &&
(message.protocolVersion().isKeepAliveDefault() ||
message.headers().containsValue(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE, true));
}
/**
* Sets the value of the {@code "Connection"} header depending on the
* protocol version of the specified message. This getMethod sets or removes
* the {@code "Connection"} header depending on what the default keep alive
* mode of the message's protocol version is, as specified by
* {@link HttpVersion#isKeepAliveDefault()}.
* <ul>
* <li>If the connection is kept alive by default:
* <ul>
* <li>set to {@code "close"} if {@code keepAlive} is {@code false}.</li>
* <li>remove otherwise.</li>
* </ul></li>
* <li>If the connection is closed by default:
* <ul>
* <li>set to {@code "keep-alive"} if {@code keepAlive} is {@code true}.</li>
* <li>remove otherwise.</li>
* </ul></li>
* </ul>
* @see #setKeepAlive(HttpHeaders, HttpVersion, boolean)
*/
public static void setKeepAlive(HttpMessage message, boolean keepAlive) {
setKeepAlive(message.headers(), message.protocolVersion(), keepAlive);
}
/**
* Sets the value of the {@code "Connection"} header depending on the
* protocol version of the specified message. This getMethod sets or removes
* the {@code "Connection"} header depending on what the default keep alive
* mode of the message's protocol version is, as specified by
* {@link HttpVersion#isKeepAliveDefault()}.
* <ul>
* <li>If the connection is kept alive by default:
* <ul>
* <li>set to {@code "close"} if {@code keepAlive} is {@code false}.</li>
* <li>remove otherwise.</li>
* </ul></li>
* <li>If the connection is closed by default:
* <ul>
* <li>set to {@code "keep-alive"} if {@code keepAlive} is {@code true}.</li>
* <li>remove otherwise.</li>
* </ul></li>
* </ul>
*/
public static void setKeepAlive(HttpHeaders h, HttpVersion httpVersion, boolean keepAlive) {
if (httpVersion.isKeepAliveDefault()) {
if (keepAlive) {
h.remove(HttpHeaderNames.CONNECTION);
} else {
h.set(HttpHeaderNames.CONNECTION, HttpHeaderValues.CLOSE);
}
} else {
if (keepAlive) {
h.set(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE);
} else {
h.remove(HttpHeaderNames.CONNECTION);
}
}
}
/**
* Returns the length of the content. Please note that this value is
* not retrieved from {@link HttpContent#content()} but from the
* {@code "Content-Length"} header, and thus they are independent from each
* other.
*
* @return the content length
*
* @throws NumberFormatException
* if the message does not have the {@code "Content-Length"} header
* or its value is not a number
*/
public static long getContentLength(HttpMessage message) {
String value = message.headers().get(HttpHeaderNames.CONTENT_LENGTH);
if (value != null) {
return Long.parseLong(value);
}
// We know the content length if it's a Web Socket message even if
// Content-Length header is missing.
long webSocketContentLength = getWebSocketContentLength(message);
if (webSocketContentLength >= 0) {
return webSocketContentLength;
}
// Otherwise we don't.
throw new NumberFormatException("header not found: " + HttpHeaderNames.CONTENT_LENGTH);
}
/**
* Returns the length of the content or the specified default value if the message does not have the {@code
* "Content-Length" header}. Please note that this value is not retrieved from {@link HttpContent#content()} but
* from the {@code "Content-Length"} header, and thus they are independent from each other.
*
* @param message the message
* @param defaultValue the default value
* @return the content length or the specified default value
* @throws NumberFormatException if the {@code "Content-Length"} header does not parse as a long
*/
public static long getContentLength(HttpMessage message, long defaultValue) {
String value = message.headers().get(HttpHeaderNames.CONTENT_LENGTH);
if (value != null) {
return Long.parseLong(value);
}
// We know the content length if it's a Web Socket message even if
// Content-Length header is missing.
long webSocketContentLength = getWebSocketContentLength(message);
if (webSocketContentLength >= 0) {
return webSocketContentLength;
}
// Otherwise we don't.
return defaultValue;
}
/**
* Get an {@code int} representation of {@link #getContentLength(HttpMessage, long)}.
*
* @return the content length or {@code defaultValue} if this message does
* not have the {@code "Content-Length"} header.
*
* @throws NumberFormatException if the {@code "Content-Length"} header does not parse as an int
*/
public static int getContentLength(HttpMessage message, int defaultValue) {
return (int) Math.min(Integer.MAX_VALUE, getContentLength(message, (long) defaultValue));
}
/**
* Returns the content length of the specified web socket message. If the
* specified message is not a web socket message, {@code -1} is returned.
*/
static int getWebSocketContentLength(HttpMessage message) {
// WebSocket messages have constant content-lengths.
HttpHeaders h = message.headers();
if (message instanceof HttpRequest) {
HttpRequest req = (HttpRequest) message;
if (HttpMethod.GET.equals(req.method()) &&
h.contains(HttpHeaderNames.SEC_WEBSOCKET_KEY1) &&
h.contains(HttpHeaderNames.SEC_WEBSOCKET_KEY2)) {
return 8;
}
} else if (message instanceof HttpResponse) {
HttpResponse res = (HttpResponse) message;
if (res.status().code() == 101 &&
h.contains(HttpHeaderNames.SEC_WEBSOCKET_ORIGIN) &&
h.contains(HttpHeaderNames.SEC_WEBSOCKET_LOCATION)) {
return 16;
}
}
// Not a web socket message
return -1;
}
/**
* Sets the {@code "Content-Length"} header.
*/
public static void setContentLength(HttpMessage message, long length) {
message.headers().set(HttpHeaderNames.CONTENT_LENGTH, length);
}
public static boolean isContentLengthSet(HttpMessage m) {
return m.headers().contains(HttpHeaderNames.CONTENT_LENGTH);
}
/**
* Returns {@code true} if and only if the specified message contains an expect header and the only expectation
* present is the 100-continue expectation. Note that this method returns {@code false} if the expect header is
* not valid for the message (e.g., the message is a response, or the version on the message is HTTP/1.0).
*
* @param message the message
* @return {@code true} if and only if the expectation 100-continue is present and it is the only expectation
* present
*/
public static boolean is100ContinueExpected(HttpMessage message) {
return isExpectHeaderValid(message)
// unquoted tokens in the expect header are case-insensitive, thus 100-continue is case insensitive
&& message.headers().contains(HttpHeaderNames.EXPECT, HttpHeaderValues.CONTINUE, true);
}
/**
* Returns {@code true} if the specified message contains an expect header specifying an expectation that is not
* supported. Note that this method returns {@code false} if the expect header is not valid for the message
* (e.g., the message is a response, or the version on the message is HTTP/1.0).
*
* @param message the message
* @return {@code true} if and only if an expectation is present that is not supported
*/
static boolean isUnsupportedExpectation(HttpMessage message) {
if (!isExpectHeaderValid(message)) {
return false;
}
final String expectValue = message.headers().get(HttpHeaderNames.EXPECT);
return expectValue != null && !HttpHeaderValues.CONTINUE.toString().equalsIgnoreCase(expectValue);
}
private static boolean isExpectHeaderValid(final HttpMessage message) {
/*
* Expect: 100-continue is for requests only and it works only on HTTP/1.1 or later. Note further that RFC 7231
* section 5.1.1 says "A server that receives a 100-continue expectation in an HTTP/1.0 request MUST ignore
* that expectation."
*/
return message instanceof HttpRequest &&
message.protocolVersion().compareTo(HttpVersion.HTTP_1_1) >= 0;
}
/**
* Sets or removes the {@code "Expect: 100-continue"} header to / from the
* specified message. If {@code expected} is {@code true},
* the {@code "Expect: 100-continue"} header is set and all other previous
* {@code "Expect"} headers are removed. Otherwise, all {@code "Expect"}
* headers are removed completely.
*/
public static void set100ContinueExpected(HttpMessage message, boolean expected) {
if (expected) {
message.headers().set(HttpHeaderNames.EXPECT, HttpHeaderValues.CONTINUE);
} else {
message.headers().remove(HttpHeaderNames.EXPECT);
}
}
/**
* Checks to see if the transfer encoding in a specified {@link HttpMessage} is chunked
*
* @param message The message to check
* @return True if transfer encoding is chunked, otherwise false
*/
public static boolean isTransferEncodingChunked(HttpMessage message) {
return message.headers().containsValue(HttpHeaderNames.TRANSFER_ENCODING, HttpHeaderValues.CHUNKED, true);
}
/**
* Set the {@link HttpHeaderNames#TRANSFER_ENCODING} to either include {@link HttpHeaderValues#CHUNKED} if
* {@code chunked} is {@code true}, or remove {@link HttpHeaderValues#CHUNKED} if {@code chunked} is {@code false}.
*
* @param m The message which contains the headers to modify.
* @param chunked if {@code true} then include {@link HttpHeaderValues#CHUNKED} in the headers. otherwise remove
* {@link HttpHeaderValues#CHUNKED} from the headers.
*/
public static void setTransferEncodingChunked(HttpMessage m, boolean chunked) {
if (chunked) {
m.headers().set(HttpHeaderNames.TRANSFER_ENCODING, HttpHeaderValues.CHUNKED);
m.headers().remove(HttpHeaderNames.CONTENT_LENGTH);
} else {
List<String> encodings = m.headers().getAll(HttpHeaderNames.TRANSFER_ENCODING);
if (encodings.isEmpty()) {
return;
}
List<CharSequence> values = new ArrayList<CharSequence>(encodings);
Iterator<CharSequence> valuesIt = values.iterator();
while (valuesIt.hasNext()) {
CharSequence value = valuesIt.next();
if (HttpHeaderValues.CHUNKED.contentEqualsIgnoreCase(value)) {
valuesIt.remove();
}
}
if (values.isEmpty()) {
m.headers().remove(HttpHeaderNames.TRANSFER_ENCODING);
} else {
m.headers().set(HttpHeaderNames.TRANSFER_ENCODING, values);
}
}
}
/**
* Fetch charset from message's Content-Type header.
*
* @param message entity to fetch Content-Type header from
* @return the charset from message's Content-Type header or {@link CharsetUtil#ISO_8859_1}
* if charset is not presented or unparsable
*/
public static Charset getCharset(HttpMessage message) {
return getCharset(message, CharsetUtil.ISO_8859_1);
}
/**
* Fetch charset from Content-Type header value.
*
* @param contentTypeValue Content-Type header value to parse
* @return the charset from message's Content-Type header or {@link CharsetUtil#ISO_8859_1}
* if charset is not presented or unparsable
*/
public static Charset getCharset(CharSequence contentTypeValue) {
if (contentTypeValue != null) {
return getCharset(contentTypeValue, CharsetUtil.ISO_8859_1);
} else {
return CharsetUtil.ISO_8859_1;
}
}
/**
* Fetch charset from message's Content-Type header.
*
* @param message entity to fetch Content-Type header from
* @param defaultCharset result to use in case of empty, incorrect or doesn't contain required part header value
* @return the charset from message's Content-Type header or {@code defaultCharset}
* if charset is not presented or unparsable
*/
public static Charset getCharset(HttpMessage message, Charset defaultCharset) {
CharSequence contentTypeValue = message.headers().get(HttpHeaderNames.CONTENT_TYPE);
if (contentTypeValue != null) {
return getCharset(contentTypeValue, defaultCharset);
} else {
return defaultCharset;
}
}
/**
* Fetch charset from Content-Type header value.
*
* @param contentTypeValue Content-Type header value to parse
* @param defaultCharset result to use in case of empty, incorrect or doesn't contain required part header value
* @return the charset from message's Content-Type header or {@code defaultCharset}
* if charset is not presented or unparsable
*/
public static Charset getCharset(CharSequence contentTypeValue, Charset defaultCharset) {
if (contentTypeValue != null) {
CharSequence charsetRaw = getCharsetAsSequence(contentTypeValue);
if (charsetRaw != null) {
if (charsetRaw.length() > 2) { // at least contains 2 quotes(")
if (charsetRaw.charAt(0) == '"' && charsetRaw.charAt(charsetRaw.length() - 1) == '"') {
charsetRaw = charsetRaw.subSequence(1, charsetRaw.length() - 1);
}
}
try {
return Charset.forName(charsetRaw.toString());
} catch (IllegalCharsetNameException | UnsupportedCharsetException ignored) {
// just return the default charset
}
}
}
return defaultCharset;
}
/**
* Fetch charset from message's Content-Type header as a char sequence.
*
* A lot of sites/possibly clients have charset="CHARSET", for example charset="utf-8". Or "utf8" instead of "utf-8"
* This is not according to standard, but this method provide an ability to catch desired mistakes manually in code
*
* @param message entity to fetch Content-Type header from
* @return the {@code CharSequence} with charset from message's Content-Type header
* or {@code null} if charset is not presented
* @deprecated use {@link #getCharsetAsSequence(HttpMessage)}
*/
@Deprecated
public static CharSequence getCharsetAsString(HttpMessage message) {
return getCharsetAsSequence(message);
}
/**
* Fetch charset from message's Content-Type header as a char sequence.
*
* A lot of sites/possibly clients have charset="CHARSET", for example charset="utf-8". Or "utf8" instead of "utf-8"
* This is not according to standard, but this method provide an ability to catch desired mistakes manually in code
*
* @return the {@code CharSequence} with charset from message's Content-Type header
* or {@code null} if charset is not presented
*/
public static CharSequence getCharsetAsSequence(HttpMessage message) {
CharSequence contentTypeValue = message.headers().get(HttpHeaderNames.CONTENT_TYPE);
if (contentTypeValue != null) {
return getCharsetAsSequence(contentTypeValue);
} else {
return null;
}
}
/**
* Fetch charset from Content-Type header value as a char sequence.
*
* A lot of sites/possibly clients have charset="CHARSET", for example charset="utf-8". Or "utf8" instead of "utf-8"
* This is not according to standard, but this method provide an ability to catch desired mistakes manually in code
*
* @param contentTypeValue Content-Type header value to parse
* @return the {@code CharSequence} with charset from message's Content-Type header
* or {@code null} if charset is not presented
* @throws NullPointerException in case if {@code contentTypeValue == null}
*/
public static CharSequence getCharsetAsSequence(CharSequence contentTypeValue) {
ObjectUtil.checkNotNull(contentTypeValue, "contentTypeValue");
int indexOfCharset = AsciiString.indexOfIgnoreCaseAscii(contentTypeValue, CHARSET_EQUALS, 0);
if (indexOfCharset == AsciiString.INDEX_NOT_FOUND) {
return null;
}
int indexOfEncoding = indexOfCharset + CHARSET_EQUALS.length();
if (indexOfEncoding < contentTypeValue.length()) {
CharSequence charsetCandidate = contentTypeValue.subSequence(indexOfEncoding, contentTypeValue.length());
int indexOfSemicolon = AsciiString.indexOfIgnoreCaseAscii(charsetCandidate, SEMICOLON, 0);
if (indexOfSemicolon == AsciiString.INDEX_NOT_FOUND) {
return charsetCandidate;
}
return charsetCandidate.subSequence(0, indexOfSemicolon);
}
return null;
}
/**
* Fetch MIME type part from message's Content-Type header as a char sequence.
*
* @param message entity to fetch Content-Type header from
* @return the MIME type as a {@code CharSequence} from message's Content-Type header
* or {@code null} if content-type header or MIME type part of this header are not presented
* <p/>
* "content-type: text/html; charset=utf-8" - "text/html" will be returned <br/>
* "content-type: text/html" - "text/html" will be returned <br/>
* "content-type: " or no header - {@code null} we be returned
*/
public static CharSequence getMimeType(HttpMessage message) {
CharSequence contentTypeValue = message.headers().get(HttpHeaderNames.CONTENT_TYPE);
if (contentTypeValue != null) {
return getMimeType(contentTypeValue);
} else {
return null;
}
}
/**
* Fetch MIME type part from Content-Type header value as a char sequence.
*
* @param contentTypeValue Content-Type header value to parse
* @return the MIME type as a {@code CharSequence} from message's Content-Type header
* or {@code null} if content-type header or MIME type part of this header are not presented
* <p/>
* "content-type: text/html; charset=utf-8" - "text/html" will be returned <br/>
* "content-type: text/html" - "text/html" will be returned <br/>
* "content-type: empty header - {@code null} we be returned
* @throws NullPointerException in case if {@code contentTypeValue == null}
*/
public static CharSequence getMimeType(CharSequence contentTypeValue) {
ObjectUtil.checkNotNull(contentTypeValue, "contentTypeValue");
int indexOfSemicolon = AsciiString.indexOfIgnoreCaseAscii(contentTypeValue, SEMICOLON, 0);
if (indexOfSemicolon != AsciiString.INDEX_NOT_FOUND) {
return contentTypeValue.subSequence(0, indexOfSemicolon);
} else {
return contentTypeValue.length() > 0 ? contentTypeValue : null;
}
}
/**
* Formats the host string of an address so it can be used for computing an HTTP component
* such as a URL or a Host header
*
* @param addr the address
* @return the formatted String
*/
public static String formatHostnameForHttp(InetSocketAddress addr) {
String hostString = NetUtil.getHostname(addr);
if (NetUtil.isValidIpV6Address(hostString)) {
if (!addr.isUnresolved()) {
hostString = NetUtil.toAddressString(addr.getAddress());
} else if (hostString.charAt(0) == '[' && hostString.charAt(hostString.length() - 1) == ']') {
// If IPv6 address already contains brackets, let's return as is.
return hostString;
}
return '[' + hostString + ']';
}
return hostString;
}
/**
* Validates, and optionally extracts the content length from headers. This method is not intended for
* general use, but is here to be shared between HTTP/1 and HTTP/2 parsing.
*
* @param contentLengthFields the content-length header fields.
* @param isHttp10OrEarlier {@code true} if we are handling HTTP/1.0 or earlier
* @param allowDuplicateContentLengths {@code true} if multiple, identical-value content lengths should be allowed.
* @return the normalized content length from the headers or {@code -1} if the fields were empty.
* @throws IllegalArgumentException if the content-length fields are not valid
*/
public static long normalizeAndGetContentLength(
List<? extends CharSequence> contentLengthFields, boolean isHttp10OrEarlier,
boolean allowDuplicateContentLengths) {
if (contentLengthFields.isEmpty()) {
return -1;
}
// Guard against multiple Content-Length headers as stated in
// https://tools.ietf.org/html/rfc7230#section-3.3.2:
//
// If a message is received that has multiple Content-Length header
// fields with field-values consisting of the same decimal value, or a
// single Content-Length header field with a field value containing a
// list of identical decimal values (e.g., "Content-Length: 42, 42"),
// indicating that duplicate Content-Length header fields have been
// generated or combined by an upstream message processor, then the
// recipient MUST either reject the message as invalid or replace the
// duplicated field-values with a single valid Content-Length field
// containing that decimal value prior to determining the message body
// length or forwarding the message.
String firstField = contentLengthFields.get(0).toString();
boolean multipleContentLengths =
contentLengthFields.size() > 1 || firstField.indexOf(COMMA) >= 0;
if (multipleContentLengths && !isHttp10OrEarlier) {
if (allowDuplicateContentLengths) {
// Find and enforce that all Content-Length values are the same
String firstValue = null;
for (CharSequence field : contentLengthFields) {
String[] tokens = field.toString().split(COMMA_STRING, -1);
for (String token : tokens) {
String trimmed = token.trim();
if (firstValue == null) {
firstValue = trimmed;
} else if (!trimmed.equals(firstValue)) {
throw new IllegalArgumentException(
"Multiple Content-Length values found: " + contentLengthFields);
}
}
}
// Replace the duplicated field-values with a single valid Content-Length field
firstField = firstValue;
} else {
// Reject the message as invalid
throw new IllegalArgumentException(
"Multiple Content-Length values found: " + contentLengthFields);
}
}
// Ensure we not allow sign as part of the content-length:
// See https://github.com/squid-cache/squid/security/advisories/GHSA-qf3v-rc95-96j5
if (firstField.isEmpty() || !Character.isDigit(firstField.charAt(0))) {
// Reject the message as invalid
throw new IllegalArgumentException(
"Content-Length value is not a number: " + firstField);
}
try {
final long value = Long.parseLong(firstField);
return checkPositiveOrZero(value, "Content-Length value");
} catch (NumberFormatException e) {
// Reject the message as invalid
throw new IllegalArgumentException(
"Content-Length value is not a number: " + firstField, e);
}
}
/**
* Validate a <a href="https://tools.ietf.org/html/rfc7230#section-3.2.6">token</a> contains only allowed
* characters.
* <p>
* The <a href="https://tools.ietf.org/html/rfc2616#section-2.2">token</a> format is used for variety of HTTP
* components, like <a href="https://tools.ietf.org/html/rfc6265#section-4.1.1">cookie-name</a>,
* <a href="https://tools.ietf.org/html/rfc7230#section-3.2.6">field-name</a> of a
* <a href="https://tools.ietf.org/html/rfc7230#section-3.2">header-field</a>, or
* <a href="https://tools.ietf.org/html/rfc7231#section-4">request method</a>.
*
* @param token the token to validate.
* @return the index of the first invalid token character found, or {@code -1} if there are none.
*/
static int validateToken(CharSequence token) {
if (token instanceof AsciiString) {
return validateAsciiStringToken((AsciiString) token);
}
return validateCharSequenceToken(token);
}
/**
* Validate that an {@link AsciiString} contain onlu valid
* <a href="https://tools.ietf.org/html/rfc7230#section-3.2.6">token</a> characters.
*
* @param token the ascii string to validate.
*/
private static int validateAsciiStringToken(AsciiString token) {
byte[] array = token.array();
for (int i = token.arrayOffset(), len = token.arrayOffset() + token.length(); i < len; i++) {
if (!isValidTokenChar(array[i])) {
return i - token.arrayOffset();
}
}
return -1;
}
/**
* Validate that a {@link CharSequence} contain onlu valid
* <a href="https://tools.ietf.org/html/rfc7230#section-3.2.6">token</a> characters.
*
* @param token the character sequence to validate.
*/
private static int validateCharSequenceToken(CharSequence token) {
for (int i = 0, len = token.length(); i < len; i++) {
byte value = (byte) token.charAt(i);
if (!isValidTokenChar(value)) {
return i;
}
}
return -1;
}
// HEADER
// header-field = field-name ":" OWS field-value OWS
//
// field-name = token
// token = 1*tchar
//
// tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
// / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
// / DIGIT / ALPHA
// ; any VCHAR, except delimiters.
// Delimiters are chosen
// from the set of US-ASCII visual characters not allowed in a token
// (DQUOTE and "(),/:;<=>?@[\]{}")
//
// COOKIE
// cookie-pair = cookie-name "=" cookie-value
// cookie-name = token
// token = 1*<any CHAR except CTLs or separators>
// CTL = <any US-ASCII control character
// (octets 0 - 31) and DEL (127)>
// separators = "(" | ")" | "<" | ">" | "@"
// | "," | ";" | ":" | "\" | <">
// | "/" | "[" | "]" | "?" | "="
// | "{" | "}" | SP | HT
//
// field-name's token is equivalent to cookie-name's token, we can reuse the tchar mask for both:
// private static final | HttpUtil |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/junit4/statements/RunBeforeTestClassCallbacks.java | {
"start": 1400,
"end": 2321
} | class ____ extends Statement {
private final Statement next;
private final TestContextManager testContextManager;
/**
* Construct a new {@code RunBeforeTestClassCallbacks} statement.
* @param next the next {@code Statement} in the execution chain
* @param testContextManager the TestContextManager upon which to call
* {@code beforeTestClass()}
*/
public RunBeforeTestClassCallbacks(Statement next, TestContextManager testContextManager) {
this.next = next;
this.testContextManager = testContextManager;
}
/**
* Invoke {@link TestContextManager#beforeTestClass()} and then evaluate
* the next {@link Statement} in the execution chain (typically an instance
* of {@link org.junit.internal.runners.statements.RunBefores RunBefores}).
*/
@Override
public void evaluate() throws Throwable {
this.testContextManager.beforeTestClass();
this.next.evaluate();
}
}
| RunBeforeTestClassCallbacks |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/statistics/impl/StatisticsFromAwsSdkImpl.java | {
"start": 2370,
"end": 4387
} | class ____ implements
StatisticsFromAwsSdk {
private final CountersAndGauges countersAndGauges;
public StatisticsFromAwsSdkImpl(
final CountersAndGauges countersAndGauges) {
this.countersAndGauges = countersAndGauges;
}
@Override
public void updateAwsRequestCount(final long count) {
countersAndGauges.incrementCounter(STORE_IO_REQUEST, count);
}
@Override
public void updateAwsRetryCount(final long count) {
countersAndGauges.incrementCounter(STORE_IO_RETRY, count);
}
@Override
public void updateAwsThrottleExceptionsCount(final long count) {
countersAndGauges.incrementCounter(STORE_IO_THROTTLED, count);
countersAndGauges.addValueToQuantiles(STORE_IO_THROTTLE_RATE, count);
}
@Override
public void noteAwsRequestTime(final Duration duration) {
}
@Override
public void noteAwsClientExecuteTime(final Duration duration) {
}
@Override
public void noteRequestMarshallTime(final Duration duration) {
}
@Override
public void noteRequestSigningTime(final Duration duration) {
}
@Override
public void noteResponseProcessingTime(final Duration duration) {
}
/**
* Map error status codes to statistic names, excluding 404.
* 429 (google throttle events) are mapped to the 503 statistic.
* @param sc status code.
* @return a statistic name or null.
*/
public static String mapErrorStatusCodeToStatisticName(int sc) {
String stat = null;
switch (sc) {
case SC_400_BAD_REQUEST:
stat = HTTP_RESPONSE_400;
break;
case SC_404_NOT_FOUND:
/* do not map; not measured */
break;
case SC_500_INTERNAL_SERVER_ERROR:
stat = HTTP_RESPONSE_500;
break;
case SC_503_SERVICE_UNAVAILABLE:
case SC_429_TOO_MANY_REQUESTS_GCS:
stat = HTTP_RESPONSE_503;
break;
default:
if (sc > 500) {
stat = HTTP_RESPONSE_5XX;
} else if (sc > 400) {
stat = HTTP_RESPONSE_4XX;
}
}
return stat;
}
}
| StatisticsFromAwsSdkImpl |
java | spring-projects__spring-framework | spring-websocket/src/main/java/org/springframework/web/socket/sockjs/client/InfoReceiver.java | {
"start": 1210,
"end": 1685
} | interface ____ {
/**
* Perform an HTTP request to the SockJS "Info" URL.
* and return the resulting JSON response content, or raise an exception.
* <p>Note that as of 4.2 this method accepts a {@code headers} parameter.
* @param infoUrl the URL to obtain SockJS server information from
* @param headers the headers to use for the request
* @return the body of the response
*/
String executeInfoRequest(URI infoUrl, @Nullable HttpHeaders headers);
}
| InfoReceiver |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/typeRef/TypeReferenceTest6.java | {
"start": 257,
"end": 807
} | class ____ extends TestCase {
public void test_typeRef() throws Exception {
TypeReference<Map<String, Entity>> typeRef = new TypeReference<Map<String, Entity>>() {
};
Map<String, Entity> map = JSON.parseObject(
"{\"value\":{\"id\":\"abc\",\"list\":[{\"id\":123}]}}", typeRef);
Entity entity = map.get("value");
Assert.assertNotNull(entity);
Assert.assertEquals("abc", entity.getId());
Assert.assertEquals(1, entity.getList().size());
Assert.assertEquals(123, entity.getList().get(0).getId());
}
public static | TypeReferenceTest6 |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/matchers/CapturingMatcher.java | {
"start": 587,
"end": 2354
} | class ____<T> implements ArgumentMatcher<T>, CapturesArguments, Serializable {
private final Class<? extends T> clazz;
private final List<T> arguments = new ArrayList<>();
private final ReadWriteLock lock = new ReentrantReadWriteLock();
private final Lock readLock = lock.readLock();
private final Lock writeLock = lock.writeLock();
public CapturingMatcher(final Class<? extends T> clazz) {
this.clazz = Objects.requireNonNull(clazz);
}
@Override
public boolean matches(Object argument) {
if (argument == null) {
return true;
}
if (Primitives.isPrimitiveOrWrapper(clazz)) {
return Primitives.isAssignableFromWrapper(clazz, argument.getClass());
}
return clazz.isAssignableFrom(argument.getClass());
}
@Override
public String toString() {
return "<Capturing argument: " + clazz.getSimpleName() + ">";
}
public T getLastValue() {
readLock.lock();
try {
if (arguments.isEmpty()) {
throw noArgumentValueWasCaptured();
}
return arguments.get(arguments.size() - 1);
} finally {
readLock.unlock();
}
}
public List<T> getAllValues() {
readLock.lock();
try {
return new ArrayList<>(arguments);
} finally {
readLock.unlock();
}
}
@SuppressWarnings("unchecked")
@Override
public void captureFrom(Object argument) {
writeLock.lock();
try {
this.arguments.add((T) argument);
} finally {
writeLock.unlock();
}
}
@Override
public Class<?> type() {
return clazz;
}
}
| CapturingMatcher |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/federation/store/sql/TestFederationMySQLScriptAccuracy.java | {
"start": 1109,
"end": 2103
} | class ____ extends FederationSQLAccuracyTest {
private static final Logger LOG =
LoggerFactory.getLogger(TestFederationMySQLScriptAccuracy.class);
private static final String MYSQL_COMPATIBILITY = ";sql.syntax_mys=true";
@Override
protected MySQLFederationStateStore createStateStore() {
return new MySQLFederationStateStore();
}
@Override
protected String getSQLURL() {
return DATABASE_URL + System.currentTimeMillis() + MYSQL_COMPATIBILITY;
}
@Test
public void checkMysqlScriptAccuracy() throws SQLException {
MySQLFederationStateStore federationStateStore = this.createStateStore();
federationStateStore.initConnection(this.getConf());
// get a list of tables
List<String> tables = federationStateStore.getTables();
for (String table : tables) {
federationStateStore.getConn().prepareStatement(table).execute();
}
LOG.info("FederationStateStore create {} tables.", tables.size());
}
}
| TestFederationMySQLScriptAccuracy |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/aggregator/AggregateCompletionOnNewCorrelationGroupWithCanPreCompleteTest.java | {
"start": 2857,
"end": 5442
} | class ____ implements AggregationStrategy {
private static final Logger LOG = LoggerFactory.getLogger(CanPreCompleteAggregationStrategy.class);
public CanPreCompleteAggregationStrategy() {
}
@Override
public boolean canPreComplete() {
return true;
}
@Override
public boolean preComplete(Exchange oldExchange, Exchange newExchange) {
boolean preComplete = false;
String body1;
String body2;
String oldExchangeId;
String newExchangeId;
if (oldExchange == null) {
oldExchangeId = null;
newExchangeId = newExchange.getExchangeId();
body1 = null;
body2 = newExchange.getIn().getBody(String.class);
} else {
body1 = oldExchange.getIn().getBody(String.class);
body2 = newExchange.getIn().getBody(String.class);
oldExchangeId = oldExchange.getExchangeId();
newExchangeId = newExchange.getExchangeId();
}
LOG.debug("preComplete body1[{}] body2[{}] [{}] [{}]", body1, body2,
oldExchangeId, newExchangeId);
if (newExchange.getIn().getBody().equals("end")) {
preComplete = true;
}
LOG.debug("preComplete[{}]", preComplete);
return preComplete;
}
@Override
public Exchange aggregate(Exchange oldExchange, Exchange newExchange) {
LOG.debug("aggregate");
if (oldExchange == null) {
LOG.debug("aggregate oldExchange[{}] newExchangeId[{}]",
oldExchange,
newExchange.getExchangeId());
return newExchange;
}
String body1 = oldExchange.getIn().getBody(String.class);
String body2 = newExchange.getIn().getBody(String.class);
LOG.debug("aggregate body1[{}] body2[{}] [{}] [{}]", body1, body2,
oldExchange.getExchangeId(), newExchange.getExchangeId());
oldExchange.getIn().setBody(body1 + body2);
LOG.debug("aggregate [{}] [{}] [{}]", oldExchange.getIn().getBody(),
oldExchange.getExchangeId(), newExchange.getExchangeId());
return oldExchange;
}
@Override
public void onCompletion(Exchange exchange) {
LOG.debug("onCompletion[{}]", exchange.getExchangeId());
}
}
}
| CanPreCompleteAggregationStrategy |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/path/JSONPath_field_wildcard_filter_double.java | {
"start": 288,
"end": 1585
} | class ____ extends TestCase {
public void test_list_map_0() throws Exception {
Map<String, Value> jsonObject = JSON.parseObject(text, new TypeReference<Map<String, Value>>(){}, Feature.OrderedField);
Collection array = (Collection) JSONPath.eval(jsonObject, "$.*[score>0]");
assertEquals("[{\"score\":0.8951322155668501},{\"score\":0.7237896928683851},{\"score\":0.3467174233072834}]", JSON.toJSONString(array));
}
public void test_list_map_1() throws Exception {
Map<String, Value> jsonObject = JSON.parseObject(text, new TypeReference<Map<String, Value>>(){}, Feature.OrderedField);
Collection array = (Collection) JSONPath.eval(jsonObject, "$.*[score<0]");
assertEquals("[{\"score\":-0.3453003960431523}]", JSON.toJSONString(array));
}
public void test_list_map_2() throws Exception {
Map<String, Value> jsonObject = JSON.parseObject(text, new TypeReference<Map<String, Value>>(){}, Feature.OrderedField);
Collection array = (Collection) JSONPath.eval(jsonObject, "$.*[score=0]");
assertEquals("[{\"score\":0.0},{\"score\":0.0},{\"score\":0.0},{\"score\":0.0},{\"score\":0.0},{\"score\":0.0},{\"score\":0.0}]", JSON.toJSONString(array));
}
public static | JSONPath_field_wildcard_filter_double |
java | apache__camel | components/camel-reactive-streams/src/test/java/org/apache/camel/component/reactive/streams/BasicSubscriberTest.java | {
"start": 1288,
"end": 3209
} | class ____ extends BaseReactiveTest {
@Test
public void testWorking() throws Exception {
int count = 2;
MockEndpoint e1 = getMockEndpoint("mock:sub1");
e1.expectedMinimumMessageCount(count);
e1.assertIsSatisfied();
MockEndpoint e2 = getMockEndpoint("mock:sub2");
e2.expectedMinimumMessageCount(count);
e2.assertIsSatisfied();
MockEndpoint e3 = getMockEndpoint("mock:sub3");
e3.expectedMinimumMessageCount(count);
e3.assertIsSatisfied();
for (int i = 0; i < count; i++) {
Exchange ex1 = e1.getExchanges().get(i);
Exchange ex2 = e2.getExchanges().get(i);
Exchange ex3 = e3.getExchanges().get(i);
assertEquals(ex1.getIn().getBody(), ex2.getIn().getBody());
assertEquals(ex1.getIn().getBody(), ex3.getIn().getBody());
}
}
@Override
protected void doPostSetup() {
Subscriber<Integer> sub = CamelReactiveStreams.get(context()).streamSubscriber("sub", Integer.class);
Subscriber<Integer> sub2 = CamelReactiveStreams.get(context()).streamSubscriber("sub2", Integer.class);
Publisher<Integer> pub = CamelReactiveStreams.get(context()).fromStream("pub", Integer.class);
pub.subscribe(sub);
pub.subscribe(sub2);
}
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("reactive-streams:sub")
.to("mock:sub1");
from("reactive-streams:sub2")
.to("mock:sub2");
from("timer:tick?period=50")
.setBody().simple("${random(500)}")
.wireTap("mock:sub3")
.to("reactive-streams:pub");
}
};
}
}
| BasicSubscriberTest |
java | google__guice | extensions/grapher/test/com/google/inject/grapher/demo/DancePartyImpl.java | {
"start": 721,
"end": 858
} | class ____ implements DanceParty {
@Inject
public DancePartyImpl(@Assisted String thatNewSound, MartyMcFly guitarist) {}
}
| DancePartyImpl |
java | google__guice | core/test/com/google/inject/BinderTestSuite.java | {
"start": 18852,
"end": 21909
} | class ____ extends TestCase {
final String name;
final Key<?> key;
final Class<? extends Injectable> injectsKey;
final ImmutableList<Module> modules;
final ImmutableList<Object> expectedValues;
final CreationTime creationTime;
public UserExceptionsTest(Builder builder) {
super("test");
name = builder.name;
key = builder.key;
injectsKey = builder.injectsKey;
modules = ImmutableList.copyOf(builder.modules);
expectedValues = ImmutableList.copyOf(builder.expectedValues);
creationTime = builder.creationTime;
}
@Override
public String getName() {
return "provision errors:" + name;
}
Injector newInjector() {
return Guice.createInjector(modules);
}
public void test() throws IllegalAccessException, InstantiationException {
nextId.set(-1);
try {
newInjector();
assertEquals(CreationTime.LAZY, creationTime);
} catch (CreationException expected) {
assertEquals(CreationTime.EAGER, creationTime);
assertContains(expected.getMessage(), "Illegal value: -1");
return;
}
Provider<?> provider = newInjector().getProvider(key);
Provider<?> bindingProvider = newInjector().getBinding(key).getProvider();
nextId.set(-1);
try {
newInjector().getInstance(key);
fail();
} catch (ProvisionException expected) {
assertContains(expected.getMessage(), "Illegal value: -1");
}
nextId.set(-1);
try {
provider.get();
fail();
} catch (ProvisionException expected) {
assertContains(expected.getMessage(), "Illegal value: -1");
}
nextId.set(-1);
try {
bindingProvider.get();
fail();
} catch (ProvisionException expected) {
assertContains(expected.getMessage(), "Illegal value: -1");
}
try {
nextId.set(-1);
newInjector().getInstance(injectsKey);
fail("Expected ProvisionException");
} catch (ProvisionException expected) {
assertContains(expected.getMessage(), "Illegal value: -1", "for 1st parameter");
}
nextId.set(201);
Injectable injectable = injectsKey.newInstance();
try {
nextId.set(-1);
newInjector().injectMembers(injectable);
} catch (ProvisionException expected) {
assertContains(expected.getMessage(), "Illegal value: -1", "for 1st parameter");
}
nextId.set(201);
Injectable hasProvider = newInjector().getInstance(injectsKey);
hasProvider.provider.get();
try {
nextId.set(-1);
hasProvider.provider.get();
// TODO(lukes): insert fail() call here
} catch (ProvisionException expected) {
assertContains(expected.getMessage(), "Illegal value: -1");
}
}
}
/** negative to throw, 101... for eager singletons, 201... for everything else */
static final AtomicInteger nextId = new AtomicInteger();
@ProvidedBy(PlainAProvider.class)
| UserExceptionsTest |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsAnyOf_with_Long_array_Test.java | {
"start": 1288,
"end": 1991
} | class ____ extends LongArrayAssertBaseTest {
@Test
void should_fail_if_values_is_null() {
// GIVEN
Long[] values = null;
// WHEN
Throwable thrown = catchThrowable(() -> assertions.containsAnyOf(values));
// THEN
then(thrown).isInstanceOf(NullPointerException.class)
.hasMessage(shouldNotBeNull("values").create());
}
@Override
protected LongArrayAssert invoke_api_method() {
return assertions.containsAnyOf(new Long[] { 6L, 8L });
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertContainsAnyOf(getInfo(assertions), getActual(assertions), arrayOf(6L, 8L));
}
}
| LongArrayAssert_containsAnyOf_with_Long_array_Test |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/SplitAggregateInOutTest.java | {
"start": 3907,
"end": 4874
} | class ____ {
private static int counter;
/**
* We just handle the order by returning a id line for the order
*/
public String handleOrder(String line) {
LOG.debug("HandleOrder: {}", line);
return "(id=" + ++counter + ",item=" + line + ")";
}
/**
* We use the same bean for building the combined response to send back to the original caller
*/
public String buildCombinedResponse(String line) {
LOG.debug("BuildCombinedResponse: {}", line);
return "Response[" + line + "]";
}
}
// END SNIPPET: e2
// START SNIPPET: e3
/**
* This is our own order aggregation strategy where we can control how each split message should be combined. As we
* do not want to loos any message we copy from the new to the old to preserve the order lines as long we process
* them
*/
public static | MyOrderService |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformStats.java | {
"start": 1118,
"end": 1165
} | class ____ cluster state or an index.
*/
public | to |
java | apache__flink | flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/handler/materializedtable/scheduler/DeleteEmbeddedSchedulerWorkflowHandler.java | {
"start": 1920,
"end": 3476
} | class ____
extends AbstractSqlGatewayRestHandler<
EmbeddedSchedulerWorkflowRequestBody, EmptyResponseBody, EmptyMessageParameters> {
private final EmbeddedQuartzScheduler quartzScheduler;
public DeleteEmbeddedSchedulerWorkflowHandler(
SqlGatewayService service,
EmbeddedQuartzScheduler quartzScheduler,
Map<String, String> responseHeaders,
MessageHeaders<
EmbeddedSchedulerWorkflowRequestBody,
EmptyResponseBody,
EmptyMessageParameters>
messageHeaders) {
super(service, responseHeaders, messageHeaders);
this.quartzScheduler = quartzScheduler;
}
@Override
protected CompletableFuture<EmptyResponseBody> handleRequest(
@Nullable SqlGatewayRestAPIVersion version,
@Nonnull HandlerRequest<EmbeddedSchedulerWorkflowRequestBody> request)
throws RestHandlerException {
String workflowName = request.getRequestBody().getWorkflowName();
String workflowGroup = request.getRequestBody().getWorkflowGroup();
try {
quartzScheduler.deleteScheduleWorkflow(workflowName, workflowGroup);
return CompletableFuture.completedFuture(EmptyResponseBody.getInstance());
} catch (Exception e) {
throw new RestHandlerException(
e.getMessage(), HttpResponseStatus.INTERNAL_SERVER_ERROR, e);
}
}
}
| DeleteEmbeddedSchedulerWorkflowHandler |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/hql/CastFunctionTest.java | {
"start": 1498,
"end": 2063
} | class ____
session.createQuery( "select cast(e.theLostNumber as java.lang.Integer) from MyEntity e" ).list();
// using the fqn Hibernate Type name
session.createQuery( "select cast(e.theLostNumber as org.hibernate.type.IntegerType) from MyEntity e" ).list();
}
);
}
@Test
public void testLongCasting(SessionFactoryScope scope) {
scope.inTransaction(
(session) -> {
// using the short name (also the primitive name)
session.createQuery( "select cast(e.theLostNumber as long) from MyEntity e" ).list();
// using the java | name |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/CassandraEndpointBuilderFactory.java | {
"start": 56283,
"end": 57341
} | class ____ {
/**
* The internal instance of the builder used to access to all the
* methods representing the name of headers.
*/
private static final CassandraHeaderNameBuilder INSTANCE = new CassandraHeaderNameBuilder();
/**
* The CQL query to execute.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code CqlQuery}.
*/
public String cqlQuery() {
return "CamelCqlQuery";
}
/**
* The resume action to execute when resuming.
*
* The option is a: {@code String} type.
*
* Group: consumer
*
* @return the name of the header {@code CqlResumeAction}.
*/
public String cqlResumeAction() {
return "CamelCqlResumeAction";
}
}
static CassandraEndpointBuilder endpointBuilder(String componentName, String path) {
| CassandraHeaderNameBuilder |
java | apache__camel | components/camel-telemetry-dev/src/test/java/org/apache/camel/telemetrydev/TelemetryDevTracerTestSupport.java | {
"start": 1432,
"end": 3465
} | class ____ extends ExchangeTestSupport {
private final ObjectMapper mapper = new ObjectMapper();
protected Map<String, DevTrace> tracesFromLog() throws IOException {
Map<String, DevTrace> answer = new HashMap<>();
Path path = Paths.get("target/telemetry-traces.log");
List<String> allTraces = Files.readAllLines(path);
for (String trace : allTraces) {
DevTrace st = mapper.readValue(trace, DevTrace.class);
if (answer.get(st.getTraceId()) != null) {
// Multiple traces exists for this traceId: this may happen
// when we deal with async events (like wiretap and the like)
DevTrace existing = answer.get(st.getTraceId());
List<DevSpanAdapter> mergedSpans = st.getSpans();
mergedSpans.addAll(existing.getSpans());
st = new DevTrace(st.getTraceId(), mergedSpans);
}
answer.put(st.getTraceId(), st);
}
return answer;
}
/*
* This one is required to rollover the log traces database file and make sure each test has its own
* set of fresh data.
*/
@AfterEach
public synchronized void clearLogTraces() throws IOException {
final LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
RollingFileAppender appender = (RollingFileAppender) ctx.getConfiguration().getAppenders().get("file2");
if (appender != null) {
appender.getManager().rollover();
}
}
protected static DevSpanAdapter getSpan(List<DevSpanAdapter> trace, String uri, Op op) {
for (DevSpanAdapter span : trace) {
if (span.getTag("camel.uri") != null && span.getTag("camel.uri").equals(uri)) {
if (span.getTag(TagConstants.OP).equals(op.toString())) {
return span;
}
}
}
throw new IllegalArgumentException("Trying to get a non existing span!");
}
}
| TelemetryDevTracerTestSupport |
java | google__guava | android/guava-tests/test/com/google/common/util/concurrent/CallablesTest.java | {
"start": 1514,
"end": 4520
} | class ____ extends TestCase {
@J2ktIncompatible // TODO(b/324550390): Enable
public void testReturning() throws Exception {
assertThat(Callables.returning(null).call()).isNull();
Object value = new Object();
Callable<Object> callable = Callables.returning(value);
assertSame(value, callable.call());
// Expect the same value on subsequent calls
assertSame(value, callable.call());
}
@J2ktIncompatible
@GwtIncompatible
public void testAsAsyncCallable() throws Exception {
String expected = "MyCallableString";
Callable<String> callable =
new Callable<String>() {
@Override
public String call() throws Exception {
return expected;
}
};
AsyncCallable<String> asyncCallable =
Callables.asAsyncCallable(callable, newDirectExecutorService());
ListenableFuture<String> future = asyncCallable.call();
assertSame(expected, future.get());
}
@J2ktIncompatible
@GwtIncompatible
public void testAsAsyncCallable_exception() throws Exception {
Exception expected = new IllegalArgumentException();
Callable<String> callable =
new Callable<String>() {
@Override
public String call() throws Exception {
throw expected;
}
};
AsyncCallable<String> asyncCallable =
Callables.asAsyncCallable(callable, newDirectExecutorService());
ListenableFuture<String> future = asyncCallable.call();
ExecutionException e = assertThrows(ExecutionException.class, () -> future.get());
assertThat(e).hasCauseThat().isSameInstanceAs(expected);
}
@J2ktIncompatible
@GwtIncompatible // threads
public void testRenaming() throws Exception {
String oldName = Thread.currentThread().getName();
Supplier<String> newName = Suppliers.ofInstance("MyCrazyThreadName");
Callable<@Nullable Void> callable =
new Callable<@Nullable Void>() {
@Override
public @Nullable Void call() throws Exception {
assertEquals(Thread.currentThread().getName(), newName.get());
return null;
}
};
Callables.threadRenaming(callable, newName).call();
assertEquals(oldName, Thread.currentThread().getName());
}
@J2ktIncompatible
@GwtIncompatible // threads
public void testRenaming_exceptionalReturn() throws Exception {
String oldName = Thread.currentThread().getName();
Supplier<String> newName = Suppliers.ofInstance("MyCrazyThreadName");
Callable<@Nullable Void> callable =
new Callable<@Nullable Void>() {
@Override
public @Nullable Void call() throws Exception {
assertEquals(Thread.currentThread().getName(), newName.get());
throw new SomeCheckedException();
}
};
assertThrows(
SomeCheckedException.class, () -> Callables.threadRenaming(callable, newName).call());
assertEquals(oldName, Thread.currentThread().getName());
}
}
| CallablesTest |
java | hibernate__hibernate-orm | hibernate-envers/src/main/java/org/hibernate/envers/query/AuditAssociationQuery.java | {
"start": 567,
"end": 1782
} | interface ____<Q extends AuditQuery> extends AuditQuery {
@Override
AuditAssociationQuery<? extends AuditAssociationQuery<Q>> traverseRelation(String associationName, JoinType joinType);
@Override
AuditAssociationQuery<? extends AuditAssociationQuery<Q>> traverseRelation(String associationName, JoinType joinType,
String alias);
@Override
AuditAssociationQuery<Q> add(AuditCriterion criterion);
@Override
AuditAssociationQuery<Q> addOrder(AuditOrder order);
@Override
AuditAssociationQuery<Q> addProjection(AuditProjection projection);
@Override
AuditAssociationQuery<Q> setMaxResults(int maxResults);
@Override
AuditAssociationQuery<Q> setFirstResult(int firstResult);
@Override
AuditAssociationQuery<Q> setCacheable(boolean cacheable);
@Override
AuditAssociationQuery<Q> setCacheRegion(String cacheRegion);
@Override
AuditAssociationQuery<Q> setComment(String comment);
@Override
AuditAssociationQuery<Q> setFlushMode(FlushMode flushMode);
@Override
AuditAssociationQuery<Q> setCacheMode(CacheMode cacheMode);
@Override
AuditAssociationQuery<Q> setTimeout(int timeout);
@Override
AuditAssociationQuery<Q> setLockMode(LockMode lockMode);
Q up();
}
| AuditAssociationQuery |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_isEmpty_Test.java | {
"start": 929,
"end": 1359
} | class ____ extends LongArrayAssertBaseTest {
@Override
protected LongArrayAssert invoke_api_method() {
assertions.isEmpty();
return null;
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertEmpty(getInfo(assertions), getActual(assertions));
}
@Override
@Test
public void should_return_this() {
// Disable this test because isEmpty is void
}
}
| LongArrayAssert_isEmpty_Test |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/struct/FormatFeatureAcceptSingleTest.java | {
"start": 4510,
"end": 10668
} | class ____ {
final Role role;
private WrapperWithStringFactory(Role role) {
this.role = role;
}
@JsonCreator
static WrapperWithStringFactory from(String value) {
Role role = new Role();
role.ID = "1";
role.Name = value;
return new WrapperWithStringFactory(role);
}
}
private final ObjectMapper MAPPER = newJsonMapper();
/*
/**********************************************************
/* Test methods, reading with single-element unwrapping
/**********************************************************
*/
@Test
public void testSingleStringArrayRead() throws Exception {
String json = a2q(
"{ 'values': 'first' }");
StringArrayWrapper result = MAPPER.readValue(json, StringArrayWrapper.class);
assertNotNull(result.values);
assertEquals(1, result.values.length);
assertEquals("first", result.values[0]);
// and then without annotation, but with global override
ObjectMapper mapper = jsonMapperBuilder()
.withConfigOverride(String[].class,
o -> o.setFormat(JsonFormat.Value.empty()
.withFeature(JsonFormat.Feature.ACCEPT_SINGLE_VALUE_AS_ARRAY)))
.build();
StringArrayNotAnnoted result2 = mapper.readValue(json, StringArrayNotAnnoted.class);
assertNotNull(result2.values);
assertEquals(1, result2.values.length);
assertEquals("first", result2.values[0]);
}
@Test
public void testSingleIntArrayRead() throws Exception {
String json = a2q(
"{ 'values': 123 }");
IntArrayWrapper result = MAPPER.readValue(json, IntArrayWrapper.class);
assertNotNull(result.values);
assertEquals(1, result.values.length);
assertEquals(123, result.values[0]);
}
@Test
public void testSingleLongArrayRead() throws Exception {
String json = a2q(
"{ 'values': -205 }");
LongArrayWrapper result = MAPPER.readValue(json, LongArrayWrapper.class);
assertNotNull(result.values);
assertEquals(1, result.values.length);
assertEquals(-205L, result.values[0]);
}
@Test
public void testSingleBooleanArrayRead() throws Exception {
String json = a2q(
"{ 'values': true }");
BooleanArrayWrapper result = MAPPER.readValue(json, BooleanArrayWrapper.class);
assertNotNull(result.values);
assertEquals(1, result.values.length);
assertTrue(result.values[0]);
}
@Test
public void testSingleDoubleArrayRead() throws Exception {
String json = a2q(
"{ 'values': -0.5 }");
DoubleArrayWrapper result = MAPPER.readValue(json, DoubleArrayWrapper.class);
assertNotNull(result.values);
assertEquals(1, result.values.length);
assertEquals(-0.5, result.values[0]);
}
@Test
public void testSingleFloatArrayRead() throws Exception {
String json = a2q(
"{ 'values': 0.25 }");
FloatArrayWrapper result = MAPPER.readValue(json, FloatArrayWrapper.class);
assertNotNull(result.values);
assertEquals(1, result.values.length);
assertEquals(0.25f, result.values[0]);
}
@Test
public void testSingleElementArrayRead() throws Exception {
String json = a2q(
"{ 'roles': { 'Name': 'User', 'ID': '333' } }");
RolesInArray response = MAPPER.readValue(json, RolesInArray.class);
assertNotNull(response.roles);
assertEquals(1, response.roles.length);
assertEquals("333", response.roles[0].ID);
}
@Test
public void testSingleStringListRead() throws Exception {
String json = a2q(
"{ 'values': 'first' }");
StringListWrapper result = MAPPER.readValue(json, StringListWrapper.class);
assertNotNull(result.values);
assertEquals(1, result.values.size());
assertEquals("first", result.values.get(0));
}
@Test
public void testSingleStringListReadWithBuilder() throws Exception {
String json = a2q(
"{ 'values': 'first' }");
StringListWrapperWithBuilder result =
MAPPER.readValue(json, StringListWrapperWithBuilder.class);
assertNotNull(result.values);
assertEquals(1, result.values.size());
assertEquals("first", result.values.get(0));
}
@Test
public void testSingleElementListRead() throws Exception {
String json = a2q(
"{ 'roles': { 'Name': 'User', 'ID': '333' } }");
RolesInList response = MAPPER.readValue(json, RolesInList.class);
assertNotNull(response.roles);
assertEquals(1, response.roles.size());
assertEquals("333", response.roles.get(0).ID);
}
@Test
public void testSingleElementListReadWithBuilder() throws Exception {
String json = a2q(
"{ 'roles': { 'Name': 'User', 'ID': '333' } }");
RolesInListWithBuilder response = MAPPER.readValue(json, RolesInListWithBuilder.class);
assertNotNull(response.roles);
assertEquals(1, response.roles.size());
assertEquals("333", response.roles.get(0).ID);
}
@Test
public void testSingleElementWithStringFactoryRead() throws Exception {
String json = a2q(
"{ 'values': '333' }");
WrapperWithStringFactoryInList response = MAPPER.readValue(json, WrapperWithStringFactoryInList.class);
assertNotNull(response.values);
assertEquals(1, response.values.size());
assertEquals("333", response.values.get(0).role.Name);
}
@Test
public void testSingleEnumSetRead() throws Exception {
EnumSetWrapper result = MAPPER.readValue(a2q("{ 'values': 'B' }"),
EnumSetWrapper.class);
assertNotNull(result.values);
assertEquals(1, result.values.size());
assertEquals(ABC.B, result.values.iterator().next());
}
}
| WrapperWithStringFactory |
java | alibaba__nacos | client/src/test/java/com/alibaba/nacos/client/naming/event/InstancesChangeNotifierTest.java | {
"start": 1867,
"end": 7976
} | class ____ {
private static final String EVENT_SCOPE_CASE = "scope-001";
private static final String GROUP_CASE = "a";
private static final String SERVICE_NAME_CASE = "b";
private static final String CLUSTER_STR_CASE = "c";
InstancesChangeNotifier instancesChangeNotifier;
@BeforeEach
public void setUp() {
instancesChangeNotifier = new InstancesChangeNotifier(EVENT_SCOPE_CASE);
}
@Test
void testRegisterListener() {
List<String> clusters = Collections.singletonList(CLUSTER_STR_CASE);
EventListener listener = Mockito.mock(EventListener.class);
NamingSelector selector = NamingSelectorFactory.newClusterSelector(clusters);
NamingSelectorWrapper wrapper = new NamingSelectorWrapper(SERVICE_NAME_CASE, GROUP_CASE, CLUSTER_STR_CASE,
selector, listener);
instancesChangeNotifier.registerListener(GROUP_CASE, SERVICE_NAME_CASE, wrapper);
List<ServiceInfo> subscribeServices = instancesChangeNotifier.getSubscribeServices();
assertEquals(1, subscribeServices.size());
assertEquals(GROUP_CASE, subscribeServices.get(0).getGroupName());
assertEquals(SERVICE_NAME_CASE, subscribeServices.get(0).getName());
assertNull(subscribeServices.get(0).getClusters());
List<Instance> hosts = new ArrayList<>();
Instance ins = new Instance();
hosts.add(ins);
InstancesDiff diff = new InstancesDiff();
diff.setAddedInstances(hosts);
InstancesChangeEvent event = new InstancesChangeEvent(EVENT_SCOPE_CASE, SERVICE_NAME_CASE, GROUP_CASE,
CLUSTER_STR_CASE, hosts, diff);
assertTrue(instancesChangeNotifier.scopeMatches(event));
}
@Test
void testDeregisterListener() {
List<String> clusters = Collections.singletonList(CLUSTER_STR_CASE);
EventListener listener = Mockito.mock(EventListener.class);
NamingSelector selector = NamingSelectorFactory.newClusterSelector(clusters);
NamingSelectorWrapper wrapper = new NamingSelectorWrapper(selector, listener);
instancesChangeNotifier.registerListener(GROUP_CASE, SERVICE_NAME_CASE, wrapper);
List<ServiceInfo> subscribeServices = instancesChangeNotifier.getSubscribeServices();
assertEquals(1, subscribeServices.size());
instancesChangeNotifier.deregisterListener(GROUP_CASE, SERVICE_NAME_CASE, wrapper);
List<ServiceInfo> subscribeServices2 = instancesChangeNotifier.getSubscribeServices();
assertEquals(0, subscribeServices2.size());
}
@Test
void testIsSubscribed() {
List<String> clusters = Collections.singletonList(CLUSTER_STR_CASE);
EventListener listener = Mockito.mock(EventListener.class);
NamingSelector selector = NamingSelectorFactory.newClusterSelector(clusters);
assertFalse(instancesChangeNotifier.isSubscribed(GROUP_CASE, SERVICE_NAME_CASE));
NamingSelectorWrapper wrapper = new NamingSelectorWrapper(SERVICE_NAME_CASE, GROUP_CASE, CLUSTER_STR_CASE,
selector, listener);
instancesChangeNotifier.registerListener(GROUP_CASE, SERVICE_NAME_CASE, wrapper);
assertTrue(instancesChangeNotifier.isSubscribed(GROUP_CASE, SERVICE_NAME_CASE));
}
@Test
void testOnEvent() {
List<String> clusters = Collections.singletonList(CLUSTER_STR_CASE);
NamingSelector selector = NamingSelectorFactory.newClusterSelector(clusters);
EventListener listener = Mockito.mock(EventListener.class);
NamingSelectorWrapper wrapper = new NamingSelectorWrapper(SERVICE_NAME_CASE, GROUP_CASE, CLUSTER_STR_CASE,
selector, listener);
instancesChangeNotifier.registerListener(GROUP_CASE, SERVICE_NAME_CASE, wrapper);
Instance instance = new Instance();
InstancesDiff diff = new InstancesDiff(null, Collections.singletonList(instance), null);
instance.setClusterName(CLUSTER_STR_CASE);
InstancesChangeEvent event1 = new InstancesChangeEvent(null, SERVICE_NAME_CASE, GROUP_CASE, CLUSTER_STR_CASE,
Collections.emptyList(), diff);
instancesChangeNotifier.onEvent(event1);
Mockito.verify(listener, times(1)).onEvent(any());
}
@Test
void testOnEventWithoutListener() {
InstancesChangeEvent event1 = Mockito.mock(InstancesChangeEvent.class);
when(event1.getClusters()).thenReturn(CLUSTER_STR_CASE);
when(event1.getGroupName()).thenReturn(GROUP_CASE);
when(event1.getServiceName()).thenReturn(SERVICE_NAME_CASE);
EventListener listener = Mockito.mock(EventListener.class);
instancesChangeNotifier.registerListener(GROUP_CASE, SERVICE_NAME_CASE + "c", new NamingSelectorWrapper(
NamingSelectorFactory.newClusterSelector(Collections.singletonList(CLUSTER_STR_CASE)), listener));
instancesChangeNotifier.onEvent(event1);
Mockito.verify(listener, never()).onEvent(any());
}
@Test
void testOnEventByExecutor() {
AbstractEventListener listener = Mockito.mock(AbstractEventListener.class);
Executor executor = mock(Executor.class);
when(listener.getExecutor()).thenReturn(executor);
instancesChangeNotifier.registerListener(GROUP_CASE, SERVICE_NAME_CASE,
new NamingSelectorWrapper(new DefaultNamingSelector(instance -> true), listener));
InstancesDiff instancesDiff = new InstancesDiff();
instancesDiff.setRemovedInstances(Collections.singletonList(new Instance()));
InstancesChangeEvent event = new InstancesChangeEvent(EVENT_SCOPE_CASE, SERVICE_NAME_CASE, GROUP_CASE,
CLUSTER_STR_CASE, new ArrayList<>(), instancesDiff);
instancesChangeNotifier.onEvent(event);
Mockito.verify(executor).execute(any());
}
@Test
void testSubscribeType() {
assertEquals(InstancesChangeEvent.class, instancesChangeNotifier.subscribeType());
}
} | InstancesChangeNotifierTest |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/RandomTextWriterJob.java | {
"start": 2338,
"end": 3364
} | enum ____ { RECORDS_WRITTEN, BYTES_WRITTEN }
public Job createJob(Configuration conf) throws IOException {
long numBytesToWritePerMap = conf.getLong(BYTES_PER_MAP, 10 * 1024);
long totalBytesToWrite = conf.getLong(TOTAL_BYTES, numBytesToWritePerMap);
int numMaps = (int) (totalBytesToWrite / numBytesToWritePerMap);
if (numMaps == 0 && totalBytesToWrite > 0) {
numMaps = 1;
conf.setLong(BYTES_PER_MAP, totalBytesToWrite);
}
conf.setInt(MRJobConfig.NUM_MAPS, numMaps);
Job job = Job.getInstance(conf);
job.setJarByClass(RandomTextWriterJob.class);
job.setJobName("random-text-writer");
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
job.setInputFormatClass(RandomInputFormat.class);
job.setMapperClass(RandomTextMapper.class);
job.setOutputFormatClass(SequenceFileOutputFormat.class);
//FileOutputFormat.setOutputPath(job, new Path("random-output"));
job.setNumReduceTasks(0);
return job;
}
public static | Counters |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/parser/SQLExprParser.java | {
"start": 1499,
"end": 231998
} | class ____ extends SQLParser {
public static final String[] AGGREGATE_FUNCTIONS;
public static final long[] AGGREGATE_FUNCTIONS_CODES;
private static final List<String> NESTED_DATA_TYPE;
static {
String[] strings = {"AVG", "COUNT", "MAX", "MIN", "STDDEV", "SUM"};
AGGREGATE_FUNCTIONS_CODES = FnvHash.fnv1a_64_lower(strings, true);
AGGREGATE_FUNCTIONS = new String[AGGREGATE_FUNCTIONS_CODES.length];
for (String str : strings) {
long hash = FnvHash.fnv1a_64_lower(str);
int index = Arrays.binarySearch(AGGREGATE_FUNCTIONS_CODES, hash);
AGGREGATE_FUNCTIONS[index] = str;
}
NESTED_DATA_TYPE = Collections.singletonList("array");
}
protected String[] aggregateFunctions = AGGREGATE_FUNCTIONS;
protected long[] aggregateFunctionHashCodes = AGGREGATE_FUNCTIONS_CODES;
protected List<String> nestedDataType = NESTED_DATA_TYPE;
protected boolean allowIdentifierMethod = true;
public SQLExprParser(String sql) {
super(sql);
}
public SQLExprParser(String sql, DbType dbType, SQLParserFeature... features) {
super(sql, dbType, features);
}
public SQLExprParser(Lexer lexer) {
super(lexer);
}
public SQLExprParser(Lexer lexer, DbType dbType) {
super(lexer, dbType);
}
public void setAllowIdentifierMethod(boolean allowIdentifierMethod) {
this.allowIdentifierMethod = allowIdentifierMethod;
}
public SQLExpr expr() {
if (lexer.token == Token.STAR) {
lexer.nextToken();
SQLAllColumnExpr expr = new SQLAllColumnExpr();
if (lexer.token == Token.DOT) {
lexer.nextToken();
accept(Token.STAR);
return new SQLPropertyExpr(expr, "*");
}
if (lexer.token == Token.EXCEPT) {
lexer.nextToken();
accept(Token.LPAREN);
List<SQLExpr> except = new ArrayList<>();
this.exprList(except, expr);
expr.setExcept(except);
accept(Token.RPAREN);
}
return expr;
}
boolean parenthesized = (lexer.token == Token.LPAREN);
SQLExpr expr = primary();
if (parenthesized && expr instanceof SQLBinaryOpExpr) {
if (((SQLBinaryOpExpr) expr).isParenthesized()) {
parenthesized = false;
}
}
if (parenthesized && expr instanceof SQLCaseExpr) {
parenthesized = false;
((SQLCaseExpr) expr).setParenthesized(true);
}
if (parenthesized && expr instanceof SQLUnaryExpr) {
if (((SQLUnaryExpr) expr).isParenthesized()) {
parenthesized = false;
}
}
if (parenthesized && expr instanceof SQLQueryExpr) {
parenthesized = false;
((SQLQueryExpr) expr).setParenthesized(true);
}
if (parenthesized && expr instanceof SQLIdentifierExpr) {
parenthesized = false;
((SQLIdentifierExpr) expr).setParenthesized(true);
}
Lexer.SavePoint mark = lexer.mark();
Token token = lexer.token;
if (token == Token.COMMA) {
return expr;
} else if (token == Token.EQ || token == Token.EQEQ) {
expr = relationalRest(expr);
expr = andRest(expr);
expr = xorRest(expr);
expr = orRest(expr);
return expr;
}
if (token == Token.IN) {
lexer.nextToken();
if (lexer.token == Token.PARTITION) {
lexer.reset(mark);
return expr;
} else {
lexer.reset(mark);
return exprRest(expr);
}
} else {
SQLExpr sqlExpr = exprRest(expr);
if (token == SUBGT) {
parenthesized = false;
}
if (parenthesized && sqlExpr instanceof SQLBinaryOpExpr) {
((SQLBinaryOpExpr) sqlExpr).setParenthesized(true);
}
if (parenthesized && sqlExpr instanceof SQLUnaryExpr) {
((SQLUnaryExpr) sqlExpr).setParenthesized(true);
}
if (parenthesized && sqlExpr instanceof MySqlCharExpr) {
((MySqlCharExpr) sqlExpr).setParenthesized(true);
}
return sqlExpr;
}
}
public SQLExpr exprRest(SQLExpr expr) {
expr = bitXorRest(expr);
expr = multiplicativeRest(expr);
expr = additiveRest(expr);
expr = shiftRest(expr);
expr = bitAndRest(expr);
expr = bitOrRest(expr);
expr = inRest(expr);
expr = relationalRest(expr);
// expr = equalityRest(expr);
expr = andRest(expr);
expr = xorRest(expr);
expr = orRest(expr);
return expr;
}
public final SQLExpr bitXor() {
SQLExpr expr = primary();
return bitXorRest(expr);
}
protected SQLExpr bitXorRestSUBGT() {
return primary();
}
public SQLExpr bitXorRest(SQLExpr expr) {
Token token = lexer.token;
switch (token) {
case CARET: {
lexer.nextToken();
SQLBinaryOperator op;
if (lexer.token == Token.EQ) {
lexer.nextToken();
op = SQLBinaryOperator.BitwiseXorEQ;
} else {
op = SQLBinaryOperator.BitwiseXor;
}
SQLExpr rightExp = primary();
expr = new SQLBinaryOpExpr(expr, op, rightExp, dbType);
expr = bitXorRest(expr);
break;
}
case SUBGT: {
lexer.nextToken();
SQLExpr rightExp = bitXorRestSUBGT();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.SubGt, rightExp, dbType);
expr = bitXorRest(expr);
break;
}
case LT_SUB_GT: {
lexer.nextToken();
SQLExpr rightExp = primary();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.PG_ST_DISTANCE, rightExp, dbType);
expr = bitXorRest(expr);
break;
}
case SUBGTGT: {
lexer.nextToken();
SQLExpr rightExp = primary();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.SubGtGt, rightExp, dbType);
expr = bitXorRest(expr);
break;
}
case POUNDGT: {
lexer.nextToken();
SQLExpr rightExp = primary();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.PoundGt, rightExp, dbType);
expr = bitXorRest(expr);
break;
}
case POUNDGTGT: {
lexer.nextToken();
SQLExpr rightExp = primary();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.PoundGtGt, rightExp, dbType);
expr = bitXorRest(expr);
break;
}
case QUESQUES: {
lexer.nextToken();
SQLExpr rightExp = primary();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.QuesQues, rightExp, dbType);
expr = bitXorRest(expr);
break;
}
case QUESBAR: {
lexer.nextToken();
SQLExpr rightExp = primary();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.QuesBar, rightExp, dbType);
expr = bitXorRest(expr);
break;
}
case QUESQUESBAR: {
lexer.nextToken();
SQLExpr rightExp = primary();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.QuesQuesBar, rightExp, dbType);
expr = bitXorRest(expr);
break;
}
case QUESAMP: {
lexer.nextToken();
SQLExpr rightExp = primary();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.QuesAmp, rightExp, dbType);
expr = bitXorRest(expr);
break;
}
default:
break;
}
return expr;
}
public final SQLExpr multiplicative() {
SQLExpr expr = bitXor();
return multiplicativeRest(expr);
}
public SQLExpr multiplicativeRest(SQLExpr expr) {
final Token token = lexer.token;
if (token == Token.STAR) {
lexer.nextToken();
SQLExpr rightExp = bitXor();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Multiply, rightExp, getDbType());
expr = multiplicativeRest(expr);
} else if (token == Token.SLASH) {
lexer.nextToken();
SQLExpr rightExp = bitXor();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Divide, rightExp, getDbType());
expr = multiplicativeRest(expr);
} else if (token == Token.PERCENT) {
lexer.nextToken();
SQLExpr rightExp = bitXor();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Modulus, rightExp, getDbType());
expr = multiplicativeRest(expr);
} else if (token == Token.DIV) {
lexer.nextToken();
SQLExpr rightExp = bitXor();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.DIV, rightExp, getDbType());
expr = multiplicativeRest(expr);
} else if (lexer.identifierEquals(FnvHash.Constants.MOD) || lexer.token == Token.MOD) {
Lexer.SavePoint savePoint = lexer.mark();
lexer.nextToken();
if (lexer.token == Token.COMMA
|| lexer.token == Token.EOF
|| lexer.token == Token.ON
|| lexer.token == Token.WHERE
|| lexer.token == Token.RPAREN) {
lexer.reset(savePoint);
return expr;
}
SQLExpr rightExp = bitXor();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Modulus, rightExp, dbType);
expr = multiplicativeRest(expr);
} else if (token == Token.LITERAL_INT && lexer.isNegativeIntegerValue()) {
Number number = lexer.integerValue();
if (number instanceof Integer) {
number = -number.intValue();
} else if (number instanceof Long) {
number = -number.longValue();
} else if (number instanceof BigInteger) {
number = ((BigInteger) number).abs();
} else {
throw new ParserException("not support value : " + number + ", " + lexer.info());
}
SQLIntegerExpr rightExp = new SQLIntegerExpr(number);
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Subtract, rightExp, dbType);
lexer.nextToken();
expr = multiplicativeRest(expr);
}
return expr;
}
public SQLIntegerExpr integerExpr() {
SQLIntegerExpr intExpr = null;
if (lexer.token() == Token.SUB) {
lexer.nextToken();
intExpr = new SQLIntegerExpr(lexer.integerValue().longValue() * -1);
} else {
intExpr = new SQLIntegerExpr(lexer.integerValue());
}
accept(Token.LITERAL_INT);
return intExpr;
}
public SQLCharExpr charExpr() {
SQLCharExpr charExpr = new SQLCharExpr(lexer.stringVal());
accept(Token.LITERAL_CHARS);
return charExpr;
}
public int parseIntValue() {
if (lexer.token == Token.LITERAL_INT) {
Number number = this.lexer.integerValue();
int intVal = ((Integer) number).intValue();
lexer.nextToken();
return intVal;
} else {
throw new ParserException("not int. " + lexer.info());
}
}
public void parseAssignItems(List<? super SQLAssignItem> items, SQLObject parent, boolean variant) {
for (; ; ) {
SQLAssignItem item = parseAssignItem(variant, parent);
item.setParent(parent);
items.add(item);
if (lexer.token == Token.COMMA) {
lexer.nextToken();
continue;
} else {
break;
}
}
}
protected SQLExpr primaryIdentifierRest(long hash_lower, String ident) {
SQLExpr sqlExpr = null;
if (hash_lower == FnvHash.Constants.VARCHAR && lexer.token == Token.LITERAL_CHARS) {
sqlExpr = new SQLCharExpr(lexer.stringVal());
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.CHAR && lexer.token == Token.LITERAL_CHARS) {
sqlExpr = new SQLCharExpr(lexer.stringVal());
lexer.nextToken();
}
return sqlExpr;
}
protected SQLPropertyExpr topPropertyExpr(String ident) {
String substring = ident.substring(1, ident.length() - 1);
String[] items = substring.split("\\.");
SQLPropertyExpr expr = new SQLPropertyExpr(items[0], items[1]);
for (int i = 2; i < items.length; i++) {
expr = new SQLPropertyExpr(expr, items[i]);
}
return expr;
}
protected SQLExpr primaryLiteralCharsRest(SQLExpr sqlExpr) {
lexer.nextToken();
return sqlExpr;
}
protected SQLExpr primaryLiteralNCharsRest(SQLExpr sqlExpr) {
return sqlExpr;
}
protected SQLExpr primaryDefaultRest() {
return new SQLDefaultExpr();
}
protected SQLExpr primarySubLiteralAliasRest() {
return new SQLIdentifierExpr(lexer.stringVal());
}
protected void primaryQues() {
lexer.nextToken();
}
protected SQLExpr primaryCommon(SQLExpr sqlExpr) {
throw new ParserException("ERROR. " + lexer.info());
}
protected SQLExpr primaryAs(SQLExpr sqlExpr) {
return sqlExpr;
}
protected SQLExpr primaryDistinct(SQLExpr sqlExpr) {
throw new ParserException("ERROR. " + lexer.info());
}
protected SQLExpr primaryIn(SQLExpr sqlExpr) {
throw new ParserException("ERROR. " + lexer.info());
}
protected SQLExpr primaryOn(SQLExpr sqlExpr) {
throw new ParserException("ERROR. " + lexer.info());
}
protected SQLExpr primaryColonColon(SQLExpr sqlExpr) {
throw new ParserException("ERROR. " + lexer.info());
}
public SQLExpr primary() {
List<String> beforeComments = null;
if (lexer.isKeepComments() && lexer.hasComment()) {
beforeComments = lexer.readAndResetComments();
}
SQLExpr sqlExpr = null;
final Token tok = lexer.token;
switch (tok) {
case LPAREN:
lexer.nextToken();
if (lexer.token == Token.RPAREN) {
lexer.nextToken();
sqlExpr = new SQLMethodInvokeExpr();
break;
}
sqlExpr = expr();
if (lexer.token == Token.COMMA) {
SQLListExpr listExpr = new SQLListExpr();
listExpr.addItem(sqlExpr);
do {
lexer.nextToken();
listExpr.addItem(expr());
} while (lexer.token == Token.COMMA);
sqlExpr = listExpr;
}
if (sqlExpr instanceof SQLExprImpl) {
((SQLExprImpl) sqlExpr).setParenthesized(true);
}
if ((lexer.token == Token.UNION || lexer.token == Token.MINUS || lexer.token == Token.EXCEPT)
&& sqlExpr instanceof SQLQueryExpr) {
SQLQueryExpr queryExpr = (SQLQueryExpr) sqlExpr;
SQLSelectQuery query = this.createSelectParser().queryRest(queryExpr.getSubQuery().getQuery(), true);
queryExpr.getSubQuery().setQuery(query);
}
accept(Token.RPAREN);
break;
case INSERT:
lexer.nextToken();
if (lexer.token != Token.LPAREN) {
throw new ParserException("syntax error. " + lexer.info());
}
sqlExpr = new SQLIdentifierExpr("INSERT");
break;
case IDENTIFIER:
String ident = lexer.stringVal();
long hash_lower = lexer.hashLCase();
int sourceLine = -1, sourceColumn = -1;
if (lexer.keepSourceLocation) {
lexer.computeRowAndColumn();
sourceLine = lexer.posLine;
sourceColumn = lexer.posColumn;
}
lexer.nextToken();
if (lexer.identifierEquals("COLLATE")) {
acceptIdentifier("COLLATE");
String collateValue = lexer.stringVal();
if (lexer.token == Token.IDENTIFIER || lexer.token == Token.LITERAL_ALIAS || lexer.token == Token.LITERAL_CHARS) {
SQLIdentifierExpr identifierExpr = new SQLIdentifierExpr(ident);
identifierExpr.setCollate(collateValue);
lexer.nextToken();
sqlExpr = identifierExpr;
} else {
throw new ParserException("syntax error. " + lexer.info());
}
} else if (hash_lower == FnvHash.Constants.TRY_CAST) {
accept(Token.LPAREN);
SQLCastExpr cast = new SQLCastExpr();
cast.setTry(true);
cast.setExpr(expr());
accept(Token.AS);
cast.setDataType(parseDataType(false));
accept(Token.RPAREN);
sqlExpr = cast;
} else if (hash_lower == FnvHash.Constants.DATE
&& (lexer.token == Token.LITERAL_CHARS || lexer.token == Token.VARIANT)
&& (dialectFeatureEnabled(SQLDateExpr))) {
String literal = lexer.token == Token.LITERAL_CHARS ? lexer.stringVal() : "?";
lexer.nextToken();
SQLDateExpr dateExpr = new SQLDateExpr();
dateExpr.setValue(literal);
sqlExpr = dateExpr;
} else if (hash_lower == FnvHash.Constants.TIMESTAMP
&& (lexer.token == Token.LITERAL_CHARS || lexer.token == Token.VARIANT)
&& dialectFeatureEnabled(SQLTimestampExpr)) {
SQLTimestampExpr dateExpr = new SQLTimestampExpr(lexer.stringVal());
lexer.nextToken();
sqlExpr = dateExpr;
} else if (hash_lower == FnvHash.Constants.TIME
&& (lexer.token == Token.LITERAL_CHARS || lexer.token == Token.VARIANT)) {
SQLTimeExpr dateExpr = new SQLTimeExpr(lexer.stringVal());
lexer.nextToken();
sqlExpr = dateExpr;
} else if (hash_lower == FnvHash.Constants.TIME && lexer.token == Token.LITERAL_ALIAS) {
SQLTimeExpr dateExpr = new SQLTimeExpr(SQLUtils.normalize(lexer.stringVal()));
lexer.nextToken();
sqlExpr = dateExpr;
} else if (hash_lower == FnvHash.Constants.DATETIME
&& (lexer.token == Token.LITERAL_CHARS || lexer.token == Token.VARIANT)) {
SQLDateTimeExpr dateExpr = new SQLDateTimeExpr(lexer.stringVal());
lexer.nextToken();
sqlExpr = dateExpr;
} else if (hash_lower == FnvHash.Constants.DATETIME && lexer.token == Token.LITERAL_ALIAS) {
SQLDateTimeExpr dateExpr = new SQLDateTimeExpr(SQLUtils.normalize(lexer.stringVal()));
lexer.nextToken();
sqlExpr = dateExpr;
} else if (hash_lower == FnvHash.Constants.BOOLEAN && lexer.token == Token.LITERAL_CHARS) {
sqlExpr = new SQLBooleanExpr(Boolean.valueOf(lexer.stringVal()));
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.DATE && lexer.token == Token.LITERAL_ALIAS) {
sqlExpr = new SQLDateExpr(lexer.stringVal());
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.DATETIME && lexer.token == Token.LITERAL_ALIAS) {
sqlExpr = new SQLDateTimeExpr(lexer.stringVal());
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.TIMESTAMP && lexer.token == Token.LITERAL_ALIAS) {
sqlExpr = new SQLTimestampExpr(lexer.stringVal());
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.ARRAY && (lexer.token == Token.LBRACKET || lexer.token == Token.LT)) {
sqlExpr = parseArrayExpr(ident);
} else {
sqlExpr = primaryIdentifierRest(hash_lower, ident);
if (sqlExpr == null) {
char c0 = ident.charAt(0);
if (c0 == '`' || c0 == '[' || c0 == '"') {
if (lexer.isEnabled(SQLParserFeature.IgnoreNameQuotes)) {
ident = ident.substring(1, ident.length() - 1);
}
hash_lower = FnvHash.hashCode64(ident);
}
SQLIdentifierExpr identifierExpr = new SQLIdentifierExpr(ident, hash_lower);
if (sourceLine != -1) {
identifierExpr.setSource(sourceLine, sourceColumn);
}
sqlExpr = identifierExpr;
}
}
break;
case NEW:
throw new ParserException("TODO " + lexer.info());
case LITERAL_INT:
Number number = lexer.integerValue();
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.BD)) {
SQLDecimalExpr decimalExpr = new SQLDecimalExpr();
decimalExpr.setValue(new BigDecimal(number.intValue()));
lexer.nextToken();
sqlExpr = decimalExpr;
} else {
sqlExpr = new SQLIntegerExpr(number);
}
break;
case LITERAL_FLOAT:
sqlExpr = lexer.numberExpr();
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.BD)) {
SQLDecimalExpr decimalExpr = new SQLDecimalExpr();
decimalExpr.setValue(new BigDecimal(sqlExpr.toString()));
lexer.nextToken();
sqlExpr = decimalExpr;
}
break;
case LITERAL_CHARS: {
sqlExpr = new SQLCharExpr(lexer.stringVal());
sqlExpr = primaryLiteralCharsRest(sqlExpr);
break;
}
case LITERAL_NCHARS:
sqlExpr = new SQLNCharExpr(lexer.stringVal());
lexer.nextToken();
sqlExpr = primaryLiteralNCharsRest(sqlExpr);
break;
case VARIANT: {
if (lexer.keepSourceLocation) {
lexer.computeRowAndColumn();
}
int line = lexer.getPosLine(), column = lexer.getPosColumn();
String varName = lexer.stringVal();
lexer.nextToken();
if (varName.equals(":") && lexer.token == Token.IDENTIFIER && dialectFeatureEnabled(PrimaryVariantColon)) {
String part2 = lexer.stringVal();
lexer.nextToken();
varName += part2;
}
SQLVariantRefExpr varRefExpr = new SQLVariantRefExpr(varName);
varRefExpr.setSource(line, column);
if (varName.startsWith(":")) {
varRefExpr.setIndex(lexer.nextVarIndex());
}
if (varRefExpr.getName().equals("@")
&& (lexer.token == Token.LITERAL_CHARS || (lexer.token == Token.VARIANT && lexer.stringVal().startsWith("@")))) {
varRefExpr.setName("@'" + lexer.stringVal() + "'");
lexer.nextToken();
} else if (varRefExpr.getName().equals("@@") && lexer.token == Token.LITERAL_CHARS) {
varRefExpr.setName("@@'" + lexer.stringVal() + "'");
lexer.nextToken();
}
sqlExpr = varRefExpr;
}
break;
case DEFAULT:
sqlExpr = primaryDefaultRest();
lexer.nextToken();
break;
case DUAL:
case KEY:
// case DISTINCT:
case LIMIT:
case SCHEMA:
case COLUMN:
case IF:
case END:
case COMMENT:
case COMPUTE:
case ENABLE:
case DISABLE:
case INITIALLY:
case SEQUENCE:
case USER:
case EXPLAIN:
case WITH:
case GRANT:
case REPLACE:
case INDEX:
// case MODEL:
case PCTFREE:
case INITRANS:
case MAXTRANS:
case SEGMENT:
case CREATION:
case IMMEDIATE:
case DEFERRED:
case STORAGE:
case NEXT:
case MINEXTENTS:
case MAXEXTENTS:
case MAXSIZE:
case PCTINCREASE:
case FLASH_CACHE:
case CELL_FLASH_CACHE:
case NONE:
case LOB:
case STORE:
case ROW:
case CHUNK:
case CACHE:
case NOCACHE:
case LOGGING:
case NOCOMPRESS:
case KEEP_DUPLICATES:
case EXCEPTIONS:
case PURGE:
case FULL:
case TO:
case IDENTIFIED:
case PASSWORD:
case BINARY:
case WINDOW:
case OFFSET:
case SHARE:
case START:
case CONNECT:
case MATCHED:
case ERRORS:
case REJECT:
case UNLIMITED:
case BEGIN:
case EXCLUSIVE:
case MODE:
case ADVISE:
case VIEW:
case ESCAPE:
case OVER:
case ORDER:
case CONSTRAINT:
case TYPE:
case OPEN:
case REPEAT:
case TABLE:
case TRUNCATE:
case EXCEPTION:
case FUNCTION:
case IDENTITY:
case EXTRACT:
case DESC:
case DO:
case GROUP:
case MOD:
case CONCAT:
case PRIMARY:
case PARTITION:
case LEAVE:
case CLOSE:
case CONDITION:
case OUT:
case USE:
case EXCEPT:
case INTERSECT:
case MERGE:
case MINUS:
case UNTIL:
case TOP:
case SHOW:
case INOUT:
case OUTER:
case QUALIFY:
case GET:
sqlExpr = new SQLIdentifierExpr(lexer.stringVal());
lexer.nextToken();
break;
case CASE:
SQLCaseExpr caseExpr = new SQLCaseExpr();
lexer.nextToken();
if (lexer.token != Token.WHEN) {
caseExpr.setValueExpr(expr());
}
accept(Token.WHEN);
SQLExpr testExpr = expr();
accept(Token.THEN);
SQLExpr valueExpr = expr();
SQLCaseExpr.Item caseItem = new SQLCaseExpr.Item(testExpr, valueExpr);
caseExpr.addItem(caseItem);
while (lexer.token == Token.WHEN) {
lexer.nextToken();
testExpr = expr();
accept(Token.THEN);
valueExpr = expr();
caseItem = new SQLCaseExpr.Item(testExpr, valueExpr);
caseExpr.addItem(caseItem);
}
if (lexer.token == Token.ELSE) {
lexer.nextToken();
caseExpr.setElseExpr(expr());
}
accept(Token.END);
sqlExpr = caseExpr;
break;
case EXISTS: {
String strVal = lexer.stringVal();
lexer.nextToken();
switch (lexer.token) {
case COMMA:
case DOT:
sqlExpr = new SQLIdentifierExpr(strVal);
break;
default:
accept(Token.LPAREN);
sqlExpr = new SQLExistsExpr(createSelectParser().select());
accept(Token.RPAREN);
parseQueryPlanHint(sqlExpr);
break;
}
break;
}
case NOT:
lexer.nextToken();
if (lexer.token == Token.EXISTS) {
lexer.nextToken();
accept(Token.LPAREN);
SQLExistsExpr exists = new SQLExistsExpr(createSelectParser().select(), true);
accept(Token.RPAREN);
parseQueryPlanHint(exists);
if (lexer.token == Token.EQ) {
exists.setNot(false);
SQLExpr relational = this.relationalRest(exists);
sqlExpr = new SQLNotExpr(relational);
} else {
sqlExpr = exists;
}
} else if (lexer.token == Token.LPAREN) {
lexer.nextToken();
SQLExpr notTarget = expr();
if (notTarget instanceof SQLBinaryOpExpr) {
((SQLBinaryOpExpr) notTarget).setParenthesized(true);
}
if (notTarget instanceof SQLUnaryExpr) {
((SQLUnaryExpr) notTarget).setParenthesized(true);
}
accept(Token.RPAREN);
notTarget = bitXorRest(notTarget);
notTarget = multiplicativeRest(notTarget);
notTarget = additiveRest(notTarget);
notTarget = shiftRest(notTarget);
notTarget = bitAndRest(notTarget);
notTarget = bitOrRest(notTarget);
notTarget = inRest(notTarget);
notTarget = relationalRest(notTarget);
sqlExpr = new SQLNotExpr(notTarget);
parseQueryPlanHint(sqlExpr);
return primaryRest(sqlExpr);
} else {
SQLExpr restExpr = relational();
sqlExpr = new SQLNotExpr(restExpr);
parseQueryPlanHint(sqlExpr);
}
break;
case FROM:
case SELECT:
sqlExpr = parseQueryExpr();
break;
case CAST:
sqlExpr = parseCast();
break;
case SUB:
lexer.nextToken();
switch (lexer.token) {
case LITERAL_INT:
Number integerValue = lexer.integerValue();
if (integerValue instanceof Integer) {
int intVal = integerValue.intValue();
if (intVal == Integer.MIN_VALUE) {
integerValue = Long.valueOf(((long) intVal) * -1);
} else {
integerValue = Integer.valueOf(intVal * -1);
}
} else if (integerValue instanceof Long) {
long longVal = ((Long) integerValue).longValue();
if (longVal == 2147483648L) {
integerValue = Integer.valueOf((int) (((long) longVal) * -1));
} else {
integerValue = Long.valueOf(longVal * -1);
}
} else {
integerValue = ((BigInteger) integerValue).negate();
}
sqlExpr = new SQLIntegerExpr(integerValue);
lexer.nextToken();
break;
case LITERAL_FLOAT:
sqlExpr = lexer.numberExpr(true);
lexer.nextToken();
break;
case LITERAL_CHARS:
case LITERAL_ALIAS:
sqlExpr = primarySubLiteralAliasRest();
lexer.nextToken();
if (lexer.token == Token.LPAREN
|| lexer.token == Token.LBRACKET
|| lexer.token == Token.DOT) {
sqlExpr = primaryRest(sqlExpr);
}
sqlExpr = new SQLUnaryExpr(SQLUnaryOperator.Negative, sqlExpr);
break;
case QUES: {
SQLVariantRefExpr variantRefExpr = new SQLVariantRefExpr("?");
variantRefExpr.setIndex(lexer.nextVarIndex());
sqlExpr = new SQLUnaryExpr(SQLUnaryOperator.Negative, variantRefExpr);
lexer.nextToken();
break;
}
case PLUS:
case SUB:
case LPAREN:
case IDENTIFIER:
case BANG:
case CASE:
case CAST:
case NULL:
case INTERVAL:
case LBRACE:
case IF:
case CHECK:
case INDEX:
case PRIMARY:
case KEY:
case REPLACE:
sqlExpr = primary();
while (lexer.token == Token.HINT) {
lexer.nextToken();
}
sqlExpr = new SQLUnaryExpr(SQLUnaryOperator.Negative, sqlExpr);
break;
case VARIANT:
sqlExpr = primary();
sqlExpr = new SQLUnaryExpr(SQLUnaryOperator.Negative, sqlExpr);
break;
default:
throw new ParserException("TODO : " + lexer.info());
}
break;
case PLUS:
lexer.nextToken();
switch (lexer.token) {
case LITERAL_CHARS:
case LITERAL_ALIAS:
sqlExpr = new SQLIdentifierExpr(lexer.stringVal());
sqlExpr = new SQLUnaryExpr(SQLUnaryOperator.Plus, sqlExpr);
lexer.nextToken();
break;
case QUES: {
SQLVariantRefExpr variantRefExpr = new SQLVariantRefExpr("?");
variantRefExpr.setIndex(lexer.nextVarIndex());
sqlExpr = new SQLUnaryExpr(SQLUnaryOperator.Plus, variantRefExpr);
lexer.nextToken();
break;
}
case PLUS:
case SUB:
case LITERAL_FLOAT:
case LITERAL_INT:
case LPAREN:
case IDENTIFIER:
case BANG:
case CASE:
case CAST:
case NULL:
case INTERVAL:
case LBRACE:
case IF:
case CHECK:
case REPLACE:
sqlExpr = primary();
while (lexer.token == Token.HINT) {
lexer.nextToken();
}
sqlExpr = new SQLUnaryExpr(SQLUnaryOperator.Plus, sqlExpr);
break;
default:
throw new ParserException("TODO " + lexer.info());
}
break;
case TILDE:
lexer.nextToken();
SQLExpr unaryValueExpr = primary();
SQLUnaryExpr unary = new SQLUnaryExpr(SQLUnaryOperator.Compl, unaryValueExpr);
sqlExpr = unary;
break;
case QUES:
primaryQues();
SQLVariantRefExpr quesVarRefExpr = new SQLVariantRefExpr("?");
quesVarRefExpr.setIndex(lexer.nextVarIndex());
sqlExpr = quesVarRefExpr;
break;
case LEFT:
sqlExpr = new SQLIdentifierExpr("LEFT");
lexer.nextToken();
break;
case RIGHT:
sqlExpr = new SQLIdentifierExpr("RIGHT");
lexer.nextToken();
break;
case INNER:
sqlExpr = new SQLIdentifierExpr("INNER");
lexer.nextToken();
break;
case DATABASE:
sqlExpr = new SQLIdentifierExpr("DATABASE");
lexer.nextToken();
break;
case CASCADE:
sqlExpr = new SQLIdentifierExpr("CASCADE");
lexer.nextToken();
break;
case LOCK:
sqlExpr = new SQLIdentifierExpr("LOCK");
lexer.nextToken();
break;
case NULL:
sqlExpr = new SQLNullExpr();
lexer.nextToken();
break;
case BANG:
lexer.nextToken();
sqlExpr = new SQLUnaryExpr(
SQLUnaryOperator.Not,
primary()
);
break;
case BANGBANG: {
if (!dialectFeatureEnabled(PrimaryBangBangSupport)) {
throw new ParserException(lexer.info());
}
lexer.nextToken();
sqlExpr = new SQLUnaryExpr(
SQLUnaryOperator.Not,
primary()
);
break;
}
case BANG_TILDE: {
lexer.nextToken();
SQLExpr bangExpr = primary();
sqlExpr = new SQLUnaryExpr(SQLUnaryOperator.Not, new SQLUnaryExpr(SQLUnaryOperator.Compl, bangExpr));
break;
}
case LITERAL_HEX:
String hex = lexer.hexString();
sqlExpr = new SQLHexExpr(hex);
lexer.nextToken();
break;
case INTERVAL:
sqlExpr = parseInterval();
break;
case COLON:
lexer.nextToken();
if (lexer.token == Token.LITERAL_ALIAS) {
sqlExpr = new SQLVariantRefExpr(":\"" + lexer.stringVal() + "\"");
lexer.nextToken();
}
break;
case ANY:
sqlExpr = parseAny();
break;
case SOME:
sqlExpr = parseSome();
break;
case ALL:
sqlExpr = parseAll();
break;
case LITERAL_ALIAS:
sqlExpr = parseAliasExpr(lexer.stringVal());
lexer.nextToken();
break;
case EOF:
throw new EOFParserException();
case TRUE:
lexer.nextToken();
sqlExpr = new SQLBooleanExpr(true);
break;
case FALSE:
lexer.nextToken();
sqlExpr = new SQLBooleanExpr(false);
break;
case BITS: {
String strVal = lexer.stringVal();
lexer.nextToken();
sqlExpr = new SQLBinaryExpr(strVal);
break;
}
case GLOBAL:
case CONTAINS:
sqlExpr = inRest(null);
break;
case SET: {
Lexer.SavePoint savePoint = lexer.mark();
lexer.nextToken();
if (lexer.token == Token.SET && dialectFeatureEnabled(PrimaryTwoConsecutiveSet)) {
lexer.nextToken();
}
if (lexer.token() == Token.LPAREN) {
sqlExpr = new SQLIdentifierExpr("SET");
} else if (lexer.token == Token.DOT) {
sqlExpr = new SQLIdentifierExpr("SET");
sqlExpr = this.primaryRest(sqlExpr);
} else {
lexer.reset(savePoint);
throw new ParserException("ERROR. " + lexer.info());
}
break;
}
case LBRACE: {
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.TS)) {
lexer.nextToken();
String literal = lexer.stringVal();
lexer.nextToken();
sqlExpr = new SQLTimestampExpr(literal);
} else if (lexer.identifierEquals(FnvHash.Constants.D)
|| lexer.identifierEquals(FnvHash.Constants.DATE)
) {
lexer.nextToken();
String literal = lexer.stringVal();
if (literal.length() > 2
&& literal.charAt(0) == '"'
&& literal.charAt(literal.length() - 1) == '"') {
literal = literal.substring(1, literal.length() - 1);
}
lexer.nextToken();
sqlExpr = new SQLDateExpr(literal);
} else if (lexer.identifierEquals(FnvHash.Constants.T)) {
lexer.nextToken();
String literal = lexer.stringVal();
lexer.nextToken();
sqlExpr = new SQLTimeExpr(literal);
} else if (lexer.identifierEquals(FnvHash.Constants.FN)) {
lexer.nextToken();
sqlExpr = this.expr();
} else if (dialectFeatureEnabled(PrimaryLbraceOdbcEscape)) {
sqlExpr = this.expr(); // {identifier expr} is ODBC escape syntax and is accepted for ODBC compatibility.
} else if (lexer.nextIf(LBRACE)) {
// support dbt style: {{ source(a,b) }} as identifier
sqlExpr = new SQLPatternExpr(this.expr());
// skip }
accept(RBRACE);
} else {
throw new ParserException("ERROR. " + lexer.info());
}
accept(Token.RBRACE);
break;
}
case VALUES:
case TRIGGER:
case FOR:
case CHECK:
case DELETE:
case BY:
case UPDATE:
case LOOP:
case LIKE:
case UNION:
case CREATE:
case COMMA:
case STAR:
case DIV:
case DISTRIBUTE:
case UNIQUE:
case PROCEDURE:
case REFERENCES:
case IS:
case REVOKE:
case DECLARE:
case DROP:
case RLIKE:
case FOREIGN:
case FETCH:
case ASC:
case CURSOR:
case ALTER:
sqlExpr = primaryCommon(sqlExpr);
break;
case AS: {
sqlExpr = primaryAs(sqlExpr);
break;
}
case DISTINCT:
sqlExpr = primaryDistinct(sqlExpr);
break;
case BETWEEN:
case IN:
sqlExpr = primaryIn(sqlExpr);
break;
case LBRACKET:
SQLArrayExpr arrayTmp = new SQLArrayExpr();
lexer.nextToken();
this.exprList(arrayTmp.getValues(), arrayTmp);
accept(Token.RBRACKET);
sqlExpr = arrayTmp;
break;
case ON:
sqlExpr = primaryOn(sqlExpr);
break;
case COLONCOLON:
sqlExpr = primaryColonColon(sqlExpr);
break;
case ARRAY: {
SQLArrayExpr array = new SQLArrayExpr();
array.setExpr(new SQLIdentifierExpr("ARRAY"));
lexer.nextToken();
if (lexer.nextIf(Token.LT)) {
SQLDataType sqlDataType = this.parseDataType();
array.setDataType(sqlDataType);
accept(Token.GT);
}
if (lexer.nextIf(Token.LBRACKET)) {
this.exprList(array.getValues(), array);
accept(Token.RBRACKET);
} else {
throw new ParserException("Syntax error. " + lexer.info());
}
sqlExpr = array;
break;
}
case LITERAL_TEXT_BLOCK:
sqlExpr = new SQLCharExpr(lexer.stringVal());
lexer.nextToken();
break;
default:
throw new ParserException("ERROR. " + lexer.info());
}
SQLExpr expr = primaryRest(sqlExpr);
if (beforeComments != null) {
expr.addBeforeComment(beforeComments);
}
if (lexer.hasComment() && lexer.isKeepComments()) {
expr.addAfterComment(lexer.readAndResetComments());
}
return expr;
}
protected SQLExpr parseArrayExpr(String ident) {
SQLExpr sqlExpr;
SQLArrayExpr array = new SQLArrayExpr();
array.setExpr(new SQLIdentifierExpr(ident));
if (lexer.nextIf(Token.LT)) {
SQLDataType sqlDataType = this.parseDataType();
array.setDataType(sqlDataType);
accept(Token.GT);
}
if (lexer.nextIf(Token.LBRACKET)) {
this.exprList(array.getValues(), array);
accept(Token.RBRACKET);
} else {
throw new ParserException("Syntax error. " + lexer.info());
}
sqlExpr = array;
return sqlExpr;
}
protected SQLExpr parseCast() {
String castStr = lexer.stringVal();
lexer.nextToken();
if (lexer.token != Token.LPAREN) {
return new SQLIdentifierExpr(castStr);
} else {
lexer.nextToken();
SQLCastExpr cast = new SQLCastExpr();
cast.setExpr(
expr());
accept(Token.AS);
cast.setDataType(
parseDataType(false));
if (cast.getDataType() instanceof SQLArrayDataType) {
SQLArrayDataType arrayDataType = (SQLArrayDataType) cast.getDataType();
if (arrayDataType.getDbType() == null) {
arrayDataType.setDbType(dbType);
}
arrayDataType.setUsedForCast(true);
}
cast = parseCastFormat(cast);
accept(Token.RPAREN);
return cast;
}
}
protected SQLCastExpr parseCastFormat(SQLCastExpr cast) {
return cast;
}
protected SQLExpr parseQueryExpr() {
return new SQLQueryExpr(
createSelectParser()
.select());
}
protected SQLExpr parseAll() {
SQLExpr sqlExpr;
String str = lexer.stringVal();
lexer.nextToken();
switch (lexer.token) {
case DOT:
case SLASH:
case EQ:
case GT:
case GTEQ:
case LT:
case LTEQ:
case STAR:
case DIV:
return primaryRest(new SQLIdentifierExpr(str));
case COMMA:
case PLUS:
case SUB:
case RPAREN:
case WHERE:
case GROUP:
case SEMI:
case AS:
case FROM:
case ORDER:
case LIMIT:
case UNION:
return new SQLIdentifierExpr(str);
case IDENTIFIER:
if (dialectFeatureEnabled(ParseAllIdentifier)) {
return new SQLIdentifierExpr(str);
}
break;
default:
break;
}
SQLAllExpr allExpr = new SQLAllExpr();
accept(Token.LPAREN);
if (lexer.token != Token.SELECT && lexer.token != Token.VALUES && lexer.token != Token.LPAREN) {
SQLExpr expr = this.expr();
SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("ALL");
methodInvokeExpr.addArgument(expr);
while (lexer.token == Token.COMMA) {
lexer.nextToken();
methodInvokeExpr.addArgument(expr());
}
accept(Token.RPAREN);
return methodInvokeExpr;
}
SQLSelect allSubQuery = createSelectParser().select();
allExpr.setSubQuery(allSubQuery);
accept(Token.RPAREN);
allSubQuery.setParent(allExpr);
sqlExpr = allExpr;
return sqlExpr;
}
protected SQLExpr parseSome() {
SQLExpr sqlExpr;
String str = lexer.stringVal();
lexer.nextToken();
if (lexer.token != Token.LPAREN) {
return new SQLIdentifierExpr(str);
}
lexer.nextToken();
if (lexer.token != Token.SELECT && lexer.token != Token.VALUES && lexer.token != Token.LPAREN) {
SQLExpr expr = this.expr();
SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("SOME");
methodInvokeExpr.addArgument(expr);
while (lexer.token == Token.COMMA) {
lexer.nextToken();
methodInvokeExpr.addArgument(expr());
}
accept(Token.RPAREN);
return methodInvokeExpr;
}
SQLSomeExpr someExpr = new SQLSomeExpr();
SQLSelect someSubQuery = createSelectParser().select();
someExpr.setSubQuery(someSubQuery);
accept(Token.RPAREN);
someSubQuery.setParent(someExpr);
sqlExpr = someExpr;
return sqlExpr;
}
protected SQLExpr parseAny() {
SQLExpr sqlExpr;
lexer.nextToken();
if (lexer.token == Token.LPAREN) {
accept(Token.LPAREN);
if (lexer.token != Token.SELECT && lexer.token != Token.VALUES && lexer.token != Token.LPAREN) {
SQLExpr expr = this.expr();
SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("ANY");
methodInvokeExpr.addArgument(expr);
while (lexer.token == Token.COMMA) {
lexer.nextToken();
methodInvokeExpr.addArgument(expr());
}
accept(Token.RPAREN);
return methodInvokeExpr;
}
SQLSelect anySubQuery = createSelectParser().select();
SQLAnyExpr anyExpr = new SQLAnyExpr(anySubQuery);
accept(Token.RPAREN);
sqlExpr = anyExpr;
} else {
sqlExpr = new SQLIdentifierExpr("ANY");
}
return sqlExpr;
}
protected SQLExpr parseAliasExpr(String alias) {
return new SQLIdentifierExpr(alias);
}
protected SQLExpr parseInterval() {
String str = lexer.stringVal();
accept(Token.INTERVAL);
switch (lexer.token) {
case COMMA:
case IS:
case BETWEEN:
case IN:
case RPAREN:
case EQ:
case BANGEQ:
case LTGT:
case LT:
case LTEQ:
case GT:
case GTEQ:
case STAR:
case DIV:
case SLASH:
case DOT:
case FROM:
case ORDER:
case THEN:
case END:
case AS:
case UNION:
case SEMI:
case ASC:
case DESC:
case ELSE:
return new SQLIdentifierExpr(str);
case PLUS:
case SUB: {
break;
}
default:
if (lexer.identifierEquals(FnvHash.Constants.GROUPING)) {
return new SQLIdentifierExpr(str);
}
break;
}
SQLExpr value = expr();
if (value instanceof SQLCharExpr) {
String literal = ((SQLCharExpr) value).getText();
int space = literal.indexOf(' ');
if (space != -1) {
int intervalValue = Integer.valueOf(literal.substring(0, space));
String unitStr = literal.substring(space + 1).toUpperCase();
SQLIntervalUnit unit = SQLIntervalUnit.of(unitStr);
return new SQLIntervalExpr(new SQLIntegerExpr(intervalValue), unit);
}
}
if (lexer.token() != Token.IDENTIFIER) {
throw new ParserException("Syntax error. " + lexer.info());
}
String unit = lexer.stringVal().toUpperCase();
lexer.nextToken();
SQLIntervalExpr intervalExpr = new SQLIntervalExpr();
intervalExpr.setValue(value);
intervalExpr.setUnit(SQLIntervalUnit.of(unit));
return intervalExpr;
}
public SQLSelectParser createSelectParser() {
return new SQLSelectParser(this);
}
public SQLExpr primaryRest(SQLExpr expr) {
if (expr == null) {
throw new ParserException("expr" + ", " + lexer.info());
}
Token token = lexer.token;
if (token == Token.OF) {
if (expr instanceof SQLIdentifierExpr) {
long hashCode64 = ((SQLIdentifierExpr) expr).hashCode64();
if (hashCode64 == FnvHash.Constants.CURRENT) {
lexer.nextToken();
SQLName cursorName = this.name();
return new SQLCurrentOfCursorExpr(cursorName);
}
}
} else if (token == Token.FOR) {
if (expr instanceof SQLIdentifierExpr) {
SQLIdentifierExpr idenExpr = (SQLIdentifierExpr) expr;
if (idenExpr.hashCode64() == FnvHash.Constants.NEXTVAL) {
lexer.nextToken();
SQLName seqName = this.name();
SQLSequenceExpr seqExpr = new SQLSequenceExpr(seqName, SQLSequenceExpr.Function.NextVal);
return seqExpr;
} else if (idenExpr.hashCode64() == FnvHash.Constants.CURRVAL) {
lexer.nextToken();
SQLName seqName = this.name();
SQLSequenceExpr seqExpr = new SQLSequenceExpr(seqName, SQLSequenceExpr.Function.CurrVal);
return seqExpr;
} else if (idenExpr.hashCode64() == FnvHash.Constants.PREVVAL) {
lexer.nextToken();
SQLName seqName = this.name();
SQLSequenceExpr seqExpr = new SQLSequenceExpr(seqName, SQLSequenceExpr.Function.PrevVal);
return seqExpr;
}
}
} else if (token == Token.LBRACKET) {
SQLArrayExpr array = new SQLArrayExpr();
array.setExpr(expr);
lexer.nextToken();
this.exprList(array.getValues(), array);
accept(Token.RBRACKET);
expr = array;
return primaryRest(expr);
} else if (token == Token.DOT) {
lexer.nextToken();
if (expr instanceof SQLCharExpr) {
String text = ((SQLCharExpr) expr).getText();
expr = new SQLIdentifierExpr(text);
}
if (expr instanceof SQLDefaultExpr) {
expr = new SQLIdentifierExpr(expr.toString());
}
expr = dotRest(expr);
return primaryRest(expr);
} else if (lexer.identifierEquals(FnvHash.Constants.SETS) //
&& expr.getClass() == SQLIdentifierExpr.class //
&& "GROUPING".equalsIgnoreCase(((SQLIdentifierExpr) expr).getName())) {
SQLGroupingSetExpr groupingSets = new SQLGroupingSetExpr();
lexer.nextToken();
accept(Token.LPAREN);
for (; ; ) {
SQLExpr item;
if (lexer.token == Token.LPAREN) {
lexer.nextToken();
if (lexer.token == Token.COMMA && dialectFeatureEnabled(PrimaryRestCommaAfterLparen)) {
lexer.nextToken();
}
SQLListExpr listExpr = new SQLListExpr();
this.exprList(listExpr.getItems(), listExpr);
item = listExpr;
accept(Token.RPAREN);
} else {
item = this.expr();
}
item.setParent(groupingSets);
groupingSets.addParameter(item);
if (lexer.token == Token.RPAREN) {
break;
}
accept(Token.COMMA);
}
this.exprList(groupingSets.getParameters(), groupingSets);
accept(Token.RPAREN);
return groupingSets;
} else if (lexer.token == Token.LITERAL_CHARS && expr instanceof SQLIdentifierExpr && ((SQLIdentifierExpr) expr).hashCode64() == FnvHash.Constants.DECIMAL) {
expr = new SQLDecimalExpr(lexer.stringVal());
lexer.nextToken();
} else {
if (lexer.token == Token.LPAREN &&
!(expr instanceof SQLIntegerExpr) && !(expr instanceof SQLHexExpr) && !(expr instanceof SQLVariantRefExpr)
) {
SQLExpr method = methodRest(expr, true);
if (lexer.token == Token.LBRACKET || lexer.token == Token.DOT) {
method = primaryRest(method);
}
return method;
}
}
return expr;
}
protected SQLExpr parseExtract() {
throw new ParserException("not supported.");
}
protected SQLExpr parsePosition() {
throw new ParserException("not supported.");
}
protected SQLExpr parseMatch() {
SQLMatchAgainstExpr matchAgainstExpr = new SQLMatchAgainstExpr();
if (lexer.token() == Token.RPAREN) {
lexer.nextToken();
} else {
exprList(matchAgainstExpr.getColumns(), matchAgainstExpr);
accept(Token.RPAREN);
}
if (lexer.identifierEquals(FnvHash.Constants.AGAINST)) {
lexer.nextToken();
}
accept(Token.LPAREN);
SQLExpr against = primary();
matchAgainstExpr.setAgainst(against);
if (lexer.token() == Token.IN) {
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.NATURAL)) {
lexer.nextToken();
acceptIdentifier("LANGUAGE");
acceptIdentifier("MODE");
if (lexer.token() == Token.WITH) {
lexer.nextToken();
acceptIdentifier("QUERY");
acceptIdentifier("EXPANSION");
matchAgainstExpr.setSearchModifier(SQLMatchAgainstExpr.SearchModifier.IN_NATURAL_LANGUAGE_MODE_WITH_QUERY_EXPANSION);
} else {
matchAgainstExpr.setSearchModifier(SQLMatchAgainstExpr.SearchModifier.IN_NATURAL_LANGUAGE_MODE);
}
} else if (lexer.identifierEquals(FnvHash.Constants.BOOLEAN)) {
lexer.nextToken();
acceptIdentifier("MODE");
matchAgainstExpr.setSearchModifier(SQLMatchAgainstExpr.SearchModifier.IN_BOOLEAN_MODE);
} else {
throw new ParserException("syntax error. " + lexer.info());
}
} else if (lexer.token() == Token.WITH) {
lexer.nextToken();
acceptIdentifier("QUERY");
acceptIdentifier("EXPANSION");
matchAgainstExpr.setSearchModifier(SQLMatchAgainstExpr.SearchModifier.WITH_QUERY_EXPANSION);
}
accept(Token.RPAREN);
return primaryRest(matchAgainstExpr);
}
protected SQLExpr methodRestAllowIdentifierMethodSpecific(String methodName, long hash_lower, SQLMethodInvokeExpr methodInvokeExpr) {
return null;
}
protected void methodRestUsing(SQLMethodInvokeExpr methodInvokeExpr) {
}
protected SQLExpr methodRest(SQLExpr expr, boolean acceptLPAREN) {
if (acceptLPAREN) {
accept(Token.LPAREN);
}
boolean distinct = false;
if (lexer.token == Token.DISTINCT) {
lexer.nextToken();
distinct = true;
if (lexer.token == Token.RPAREN || lexer.token == Token.COMMA) {
throw new ParserException(lexer.info());
}
}
String methodName = null;
String aggMethodName = null;
SQLMethodInvokeExpr methodInvokeExpr = null;
SQLExpr owner = null;
String trimOption = null;
long hash_lower = 0L;
if (expr instanceof SQLIdentifierExpr) {
SQLIdentifierExpr identifierExpr = (SQLIdentifierExpr) expr;
methodName = identifierExpr.getName();
hash_lower = identifierExpr.nameHashCode64();
if (allowIdentifierMethod) {
if (hash_lower == FnvHash.Constants.TRIM) {
if (lexer.identifierEquals(FnvHash.Constants.LEADING)) {
trimOption = lexer.stringVal();
lexer.nextToken();
} else if (lexer.identifierEquals(FnvHash.Constants.BOTH)) {
trimOption = lexer.stringVal();
lexer.nextToken();
} else if (lexer.identifierEquals(FnvHash.Constants.TRAILING)) {
trimOption = lexer.stringVal();
lexer.nextToken();
}
} else if (hash_lower == FnvHash.Constants.TRY_CAST) {
SQLCastExpr cast = new SQLCastExpr();
cast.setTry(true);
cast.setExpr(expr());
accept(Token.AS);
cast.setDataType(parseDataType(false));
accept(Token.RPAREN);
return cast;
} else {
SQLExpr resultExpr = methodRestAllowIdentifierMethodSpecific(methodName, hash_lower, methodInvokeExpr);
if (resultExpr != null) {
return resultExpr;
}
}
}
if (distinct) {
aggMethodName = methodName;
} else {
aggMethodName = getAggregateFunction(hash_lower);
}
} else if (expr instanceof SQLPropertyExpr) {
methodName = ((SQLPropertyExpr) expr).getSimpleName();
aggMethodName = SQLUtils.normalize(methodName);
hash_lower = FnvHash.fnv1a_64_lower(aggMethodName);
aggMethodName = getAggregateFunction(hash_lower);
owner = ((SQLPropertyExpr) expr).getOwner();
} else if (expr instanceof SQLDefaultExpr) {
methodName = "DEFAULT";
} else if (expr instanceof SQLCharExpr) {
methodName = ((SQLCharExpr) expr).getText();
if (isAggregateFunction(methodName)) {
aggMethodName = methodName;
}
} else if (expr instanceof SQLDbLinkExpr) {
SQLDbLinkExpr dbLinkExpr = (SQLDbLinkExpr) expr;
methodName = dbLinkExpr.toString();
}
if (aggMethodName != null) {
SQLAggregateExpr aggregateExpr = parseAggregateExpr(methodName);
if (distinct) {
aggregateExpr.setOption(SQLAggregateOption.DISTINCT);
}
if (lexer.token == Token.COLONCOLON) {
return primaryRest(aggregateExpr);
}
return primaryRest(aggregateExpr);
}
methodInvokeExpr = new SQLMethodInvokeExpr(methodName, hash_lower);
if (lexer.keepSourceLocation) {
int line, column;
if (lexer.keepSourceLocation) {
lexer.computeRowAndColumn();
}
if (expr instanceof SQLObjectImpl) {
line = expr.getSourceLine();
column = expr.getSourceColumn();
} else {
line = lexer.getPosLine();
column = lexer.getPosColumn();
}
methodInvokeExpr.setSource(line, column);
}
if (owner != null) {
methodInvokeExpr.setOwner(owner);
}
if (trimOption != null) {
methodInvokeExpr.setTrimOption(trimOption);
}
Token token = lexer.token;
if ("XMLSERIALIZE".equals(methodName) && lexer.identifierEquals("CONTENT")) {
SQLExpr contentExpr = expr();
methodInvokeExpr.setContent(contentExpr);
}
if ("XMLELEMENT".equals(methodName) && lexer.identifierEquals("NAME")) {
Lexer.SavePoint mark = lexer.markOut();
lexer.nextToken(); // Skip NAME if it is a keyword
if (lexer.token != Token.IDENTIFIER) {
// No other identifier name comes after NAME, so NAME itself is
// the xml element name. Reset lexer to NAME
lexer.reset(mark);
}
}
if (token != Token.RPAREN && token != Token.FROM) {
exprList(methodInvokeExpr.getArguments(), methodInvokeExpr);
if (lexer.token == Token.RPAREN) {
Lexer.SavePoint mark = lexer.mark();
lexer.nextToken();
if (lexer.token == Token.LPAREN) {
//for clickhouse parametric functions
//处理类似偏函数的语法 func(x,y)(a,b,c)
lexer.nextToken();
SQLParametricMethodInvokeExpr parametricExpr =
new SQLParametricMethodInvokeExpr(methodName, hash_lower);
methodInvokeExpr.cloneTo(parametricExpr);
methodInvokeExpr = parametricExpr;
exprList(((SQLParametricMethodInvokeExpr) methodInvokeExpr).getSecondArguments(), methodInvokeExpr);
} else {
lexer.reset(mark);
}
}
}
if (hash_lower == FnvHash.Constants.EXIST
&& methodInvokeExpr.getArguments().size() == 1
&& methodInvokeExpr.getArguments().get(0) instanceof SQLQueryExpr) {
throw new ParserException("exists syntax error.");
}
if (lexer.token == Token.FROM) {
lexer.nextToken();
SQLExpr from = this.expr();
methodInvokeExpr.setFrom(from);
if (lexer.token == Token.FOR) {
lexer.nextToken();
SQLExpr forExpr = expr();
methodInvokeExpr.setFor(forExpr);
}
}
if (lexer.token == Token.USING || lexer.identifierEquals(FnvHash.Constants.USING)) {
lexer.nextToken();
SQLExpr using;
if (lexer.token == Token.STAR) {
lexer.nextToken();
using = new SQLAllColumnExpr();
} else if (lexer.token == Token.BINARY) {
using = new SQLIdentifierExpr(lexer.stringVal());
lexer.nextToken();
} else {
using = this.primary();
}
methodInvokeExpr.setUsing(using);
}
// mysql
if (hash_lower == FnvHash.Constants.WEIGHT_STRING) {
if (lexer.token == Token.AS) {
lexer.nextToken();
SQLDataType as = this.parseDataType();
methodInvokeExpr.putAttribute("as", as);
}
if (lexer.identifierEquals(FnvHash.Constants.LEVEL)) {
lexer.nextToken();
List<SQLSelectOrderByItem> levels = new ArrayList<SQLSelectOrderByItem>();
for (; ; ) {
SQLSelectOrderByItem level = this.parseSelectOrderByItem();
levels.add(level);
if (lexer.token == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
methodInvokeExpr.putAttribute("levels", levels);
}
if (lexer.identifierEquals(FnvHash.Constants.REVERSE)) {
lexer.nextToken();
methodInvokeExpr.putAttribute("reverse", true);
}
}
if (lexer.token == Token.AS || lexer.identifierEquals(FnvHash.Constants.AS)) {
lexer.nextToken();
final SQLExpr as = this.primary();
methodInvokeExpr.setAs(as);
}
SQLAggregateExpr aggregateExpr = null;
if (lexer.token == Token.ORDER) {
lexer.nextToken();
accept(Token.BY);
aggregateExpr = new SQLAggregateExpr(methodName);
aggregateExpr.getArguments().addAll(methodInvokeExpr.getArguments());
SQLOrderBy orderBy = new SQLOrderBy();
this.orderBy(orderBy.getItems(), orderBy);
aggregateExpr.setOrderBy(orderBy);
}
accept(Token.RPAREN);
methodRestUsing(methodInvokeExpr);
if (lexer.identifierEquals(FnvHash.Constants.FILTER)) {
if (aggregateExpr == null) {
Lexer.SavePoint mark = lexer.mark();
lexer.nextToken();
Token nextToken = lexer.token;
lexer.reset(mark);
if (nextToken == Token.LPAREN) {
aggregateExpr = new SQLAggregateExpr(methodName);
aggregateExpr.getArguments().addAll(methodInvokeExpr.getArguments());
filter(aggregateExpr);
}
} else {
filter(aggregateExpr);
}
}
if (lexer.token == Token.OVER) {
if (aggregateExpr == null) {
aggregateExpr = new SQLAggregateExpr(methodName);
aggregateExpr.addArguments(methodInvokeExpr.getArguments());
}
over(aggregateExpr);
}
if (aggregateExpr != null) {
return primaryRest(aggregateExpr);
}
if (lexer.token == Token.LPAREN) {
return methodInvokeExpr;
}
return primaryRest(methodInvokeExpr);
//throw new ParserException("not support token:" + lexer.token + ", " + lexer.info());
}
protected String doRestSpecific(SQLExpr expr) {
return null;
}
protected void aliasedItems(List<SQLAliasedExpr> items, SQLObject parent) {
while (true) {
SQLAliasedExpr aliasedExpr = aliasedExpr();
aliasedExpr.setParent(parent);
items.add(aliasedExpr);
if (lexer.nextIfComma()) {
if (lexer.token() == Token.FROM || lexer.token() == Token.RPAREN) {
break;
}
continue;
}
break;
}
}
public SQLAliasedExpr aliasedExpr() {
return new SQLAliasedExpr(expr(), as());
}
protected SQLExpr dotRest(SQLExpr expr) {
if (lexer.nextIf(STAR)) {
SQLAllColumnExpr allColumnExpr = new SQLAllColumnExpr();
allColumnExpr.setOwner(expr);
if (lexer.nextIf(EXCEPT)) {
accept(Token.LPAREN);
List<SQLExpr> except = new ArrayList<>();
this.exprList(except, allColumnExpr);
allColumnExpr.setExcept(except);
accept(Token.RPAREN);
}
if (lexer.nextIf(REPLACE)) {
accept(Token.LPAREN);
this.aliasedItems(allColumnExpr.getReplace(), allColumnExpr);
accept(Token.RPAREN);
}
expr = allColumnExpr;
} else {
String name;
long hash_lower = 0L;
if (lexer.token == Token.IDENTIFIER) {
name = lexer.stringVal();
hash_lower = lexer.hashLCase;
lexer.nextToken();
if (hash_lower == FnvHash.Constants.NEXTVAL) {
expr = new SQLSequenceExpr((SQLName) expr, SQLSequenceExpr.Function.NextVal);
return primaryRest(expr);
} else if (hash_lower == FnvHash.Constants.CURRVAL) {
expr = new SQLSequenceExpr((SQLName) expr, SQLSequenceExpr.Function.CurrVal);
return primaryRest(expr);
} else if (hash_lower == FnvHash.Constants.PREVVAL) {
expr = new SQLSequenceExpr((SQLName) expr, SQLSequenceExpr.Function.PrevVal);
return primaryRest(expr);
}
} else if (lexer.token == Token.LITERAL_CHARS
|| lexer.token == Token.LITERAL_ALIAS) {
name = lexer.stringVal();
lexer.nextToken();
} else if (lexer.getKeywords().containsValue(lexer.token)) {
name = lexer.stringVal();
lexer.nextToken();
} else if (lexer.token == Token.VARIANT && lexer.stringVal().startsWith("$")) {
name = lexer.stringVal();
lexer.nextToken();
} else {
name = doRestSpecific(expr);
if (name == null) {
throw new ParserException("error : " + lexer.info());
}
}
if (lexer.token == Token.LPAREN) {
boolean aggregate = hash_lower == FnvHash.Constants.WM_CONCAT
&& expr instanceof SQLIdentifierExpr
&& ((SQLIdentifierExpr) expr).nameHashCode64() == FnvHash.Constants.WMSYS;
expr = methodRest(expr, name, aggregate);
} else {
if (name.length() > 0 && name.charAt(0) == '`') {
if (lexer.isEnabled(SQLParserFeature.IgnoreNameQuotes)) {
name = name.substring(1, name.length() - 1);
}
hash_lower = FnvHash.hashCode64(name);
}
expr = new SQLPropertyExpr(expr, name, hash_lower);
}
}
expr = primaryRest(expr);
return expr;
}
private SQLExpr methodRest(SQLExpr expr, String name, boolean aggregate) {
lexer.nextToken();
if (lexer.token == Token.DISTINCT) {
lexer.nextToken();
SQLAggregateExpr aggregateExpr = new SQLAggregateExpr(name, SQLAggregateOption.DISTINCT);
aggregateExpr.setOwner(expr);
if (lexer.token == Token.RPAREN) {
lexer.nextToken();
} else {
if (lexer.token == Token.PLUS) {
aggregateExpr.getArguments().add(new SQLIdentifierExpr("+"));
lexer.nextToken();
} else {
exprList(aggregateExpr.getArguments(), aggregateExpr);
}
accept(Token.RPAREN);
}
expr = aggregateExpr;
} else if (aggregate) {
SQLAggregateExpr methodInvokeExpr = new SQLAggregateExpr(name);
methodInvokeExpr.setMethodName(expr.toString() + "." + name);
if (lexer.token == Token.RPAREN) {
lexer.nextToken();
} else {
if (lexer.token == Token.PLUS) {
methodInvokeExpr.addArgument(new SQLIdentifierExpr("+"));
lexer.nextToken();
} else {
exprList(methodInvokeExpr.getArguments(), methodInvokeExpr);
}
accept(Token.RPAREN);
}
if (lexer.token == Token.OVER) {
over(methodInvokeExpr);
}
expr = methodInvokeExpr;
} else {
SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(name);
methodInvokeExpr.setOwner(expr);
if (lexer.token == Token.RPAREN) {
lexer.nextToken();
} else {
if (lexer.token == Token.PLUS) {
methodInvokeExpr.addArgument(new SQLIdentifierExpr("+"));
lexer.nextToken();
} else {
exprList(methodInvokeExpr.getArguments(), methodInvokeExpr);
}
accept(Token.RPAREN);
}
if (lexer.token == Token.OVER) {
SQLAggregateExpr aggregateExpr = new SQLAggregateExpr(methodInvokeExpr.getMethodName());
aggregateExpr.setOwner(methodInvokeExpr.getOwner());
aggregateExpr.getArguments().addAll(methodInvokeExpr.getArguments());
over(aggregateExpr);
methodInvokeExpr = aggregateExpr;
}
expr = methodInvokeExpr;
}
return expr;
}
public final SQLExpr groupComparisionRest(SQLExpr expr) {
return expr;
}
public final void names(Collection<SQLName> exprCol) {
names(exprCol, null);
}
public final void names(Collection<SQLName> exprCol, SQLObject parent) {
if (lexer.token == Token.RBRACE) {
return;
}
if (lexer.token == Token.EOF) {
return;
}
SQLName name = name();
name.setParent(parent);
exprCol.add(name);
while (lexer.token == Token.COMMA) {
lexer.nextToken();
if (parent instanceof SQLLateralViewTableSource && lexer.token == Token.NULL) {
name = new SQLIdentifierExpr(lexer.stringVal());
lexer.nextToken();
} else {
name = name();
}
name.setParent(parent);
exprCol.add(name);
}
}
@Deprecated
public final void exprList(Collection<SQLExpr> exprCol) {
exprList(exprCol, null);
}
protected void exprListComma() {
lexer.nextToken();
}
public final void exprList(Collection<SQLExpr> exprCol, SQLObject parent) {
exprList(exprCol, parent, false);
}
public final void exprList(Collection<SQLExpr> exprCol, SQLObject parent, boolean isNestDataType) {
if (lexer.token == Token.RPAREN
|| lexer.token == Token.RBRACKET
|| lexer.token == Token.SEMI) {
return;
}
if (lexer.token == Token.EOF) {
return;
}
for (; ; ) {
SQLExpr expr;
if ((lexer.token == Token.ROW || (isNestDataType && lexer.token == IDENTIFIER)) && parent instanceof SQLDataType) {
SQLDataType dataType = this.parseDataType();
expr = new SQLDataTypeRefExpr(dataType);
} else {
expr = expr();
}
if (expr != null) {
expr.setParent(parent);
exprCol.add(expr);
// https://github.com/alibaba/druid/issues/5709
if (lexer.hasComment()
&& lexer.isKeepComments()
&& lexer.getComments().size() == 1
&& lexer.getComments().get(0).startsWith("--")) {
expr.addAfterComment(lexer.readAndResetComments());
}
}
if (lexer.token == Token.COMMA) {
exprListComma();
continue;
}
break;
}
}
public SQLIdentifierExpr identifier() {
SQLName name = name();
if (name instanceof SQLIdentifierExpr) {
return (SQLIdentifierExpr) name;
}
throw new ParserException("identifier excepted, " + lexer.info());
}
protected String nameCommon() {
throw new ParserException("illegal name, " + lexer.info());
}
public SQLName name() {
String identName;
long hash = 0;
if (lexer.token == Token.LITERAL_ALIAS) {
identName = lexer.stringVal();
lexer.nextToken();
} else if (lexer.token == Token.IDENTIFIER) {
identName = lexer.stringVal();
char c0 = identName.charAt(0);
if (c0 != '[') {
hash = lexer.hashLCase();
}
lexer.nextToken();
} else if (lexer.token == Token.LITERAL_CHARS) {
identName = '\'' + lexer.stringVal() + '\'';
lexer.nextToken();
} else if (lexer.token == Token.VARIANT) {
identName = lexer.stringVal();
lexer.nextToken();
} else {
switch (lexer.token) {
// case MODEL:
case MODE:
case ERRORS:
case NOWAIT:
case COMMIT:
case PCTFREE:
case INITRANS:
case MAXTRANS:
case SEGMENT:
case CREATION:
case IMMEDIATE:
case DEFERRED:
case STORAGE:
case NEXT:
case MINEXTENTS:
case MAXEXTENTS:
case MAXSIZE:
case PCTINCREASE:
case FLASH_CACHE:
case CELL_FLASH_CACHE:
case NONE:
case LOB:
case STORE:
case ROW:
case CHUNK:
case CACHE:
case NOCACHE:
case LOGGING:
case NOCOMPRESS:
case KEEP_DUPLICATES:
case EXCEPTIONS:
case PURGE:
case INITIALLY:
case END:
case COMMENT:
case ENABLE:
case DISABLE:
case SEQUENCE:
case USER:
case ANALYZE:
case OPTIMIZE:
case GRANT:
case REVOKE:
// binary有很多含义,lexer识别了这个token,实际上应该当做普通IDENTIFIER
case BINARY:
case OVER:
case ORDER:
case DO:
case INNER:
case JOIN:
case TYPE:
case FUNCTION:
case KEY:
case UNIQUE:
case SCHEMA:
case INTERVAL:
case EXPLAIN:
case SET:
case TABLESPACE:
case PARTITION:
case CLOSE:
case INOUT:
case GOTO:
case DEFAULT:
case FULLTEXT:
case WITH:
case ANY:
case BEGIN:
case CAST:
case COMPUTE:
case ESCAPE:
case EXCEPT:
case FULL:
case INTERSECT:
case MERGE:
case MINUS:
case OPEN:
case SOME:
case TRUNCATE:
case UNTIL:
case VIEW:
case GROUP:
case INDEX:
case DESC:
case ALL:
case SHOW:
case FOR:
case LEAVE:
case REPEAT:
case LOOP:
case IS:
case LOCK:
case REFERENCES:
case EXCEPTION:
identName = lexer.stringVal();
lexer.nextToken();
break;
case CONSTRAINT:
case CHECK:
case VALUES:
case IN:
case OUT:
case LIMIT:
case TRIGGER:
case USE:
case LIKE:
case DISTRIBUTE:
case DELETE:
case UPDATE:
case PROCEDURE:
case LEFT:
case RIGHT:
case TABLE:
case RLIKE:
case CREATE:
case PARTITIONED:
case UNION:
case PRIMARY:
case TO:
case DECLARE:
case AS:
case BY:
case EXISTS:
case FOREIGN:
case ALTER:
case ASC:
case NULL:
case CURSOR:
case FETCH:
case BITMAP:
case NGRAMBF:
case INVERTED:
case DATABASE:
identName = nameCommon();
break;
default:
throw new ParserException("illegal name, " + lexer.info());
}
}
SQLName identifierExpr = null;
if (lexer.isEnabled(SQLParserFeature.IgnoreNameQuotes)) {
if (identName.indexOf('.') == -1) {
identName = SQLUtils.forcedNormalize(identName, dbType);
hash = 0;
} else {
identifierExpr = (SQLName) primaryIdentifierRest(hash, identName);
}
}
if (identifierExpr == null) {
identifierExpr = new SQLIdentifierExpr(identName, hash);
}
if (lexer.keepSourceLocation) {
lexer.computeRowAndColumn(identifierExpr);
}
SQLName name = identifierExpr;
name = nameRest(name);
return name;
}
public SQLName nameRest(SQLName name) {
if (lexer.token == Token.DOT) {
lexer.nextToken();
if (lexer.token == Token.KEY) {
name = new SQLPropertyExpr(name, "KEY");
lexer.nextToken();
return name;
}
if (lexer.token != Token.LITERAL_ALIAS
&& lexer.token != Token.IDENTIFIER
&& lexer.token != Token.VARIANT
&& lexer.token != Token.LITERAL_CHARS
&& (!lexer.getKeywords().containsValue(lexer.token))) {
throw new ParserException("error, " + lexer.info());
}
String propertyName;
if (lexer.token == Token.LITERAL_CHARS) {
propertyName = '\'' + lexer.stringVal() + '\'';
} else {
propertyName = lexer.stringVal();
}
if (lexer.isEnabled(SQLParserFeature.IgnoreNameQuotes)) {
propertyName = SQLUtils.forcedNormalize(propertyName, dbType);
}
name = new SQLPropertyExpr(name, propertyName);
lexer.nextToken();
name = nameRest(name);
}
return name;
}
public boolean isAggregateFunction(String word) {
long hash_lower = FnvHash.fnv1a_64_lower(word);
return isAggregateFunction(hash_lower);
}
protected boolean isAggregateFunction(long hash_lower) {
return Arrays.binarySearch(aggregateFunctionHashCodes, hash_lower) >= 0;
}
protected String getAggregateFunction(long hash_lower) {
int index = Arrays.binarySearch(aggregateFunctionHashCodes, hash_lower);
if (index < 0) {
return null;
}
return aggregateFunctions[index];
}
protected SQLAggregateExpr parseAggregateExpr(String methodName) {
SQLAggregateExpr aggregateExpr;
if (lexer.token == Token.ALL) {
Lexer.SavePoint mark = lexer.mark();
lexer.nextToken();
if (lexer.token == Token.DOT) {
aggregateExpr = new SQLAggregateExpr(methodName);
lexer.reset(mark);
} else {
aggregateExpr = new SQLAggregateExpr(methodName, SQLAggregateOption.ALL);
}
} else if (lexer.token == Token.DISTINCT) {
aggregateExpr = new SQLAggregateExpr(methodName, SQLAggregateOption.DISTINCT);
lexer.nextToken();
} else if (lexer.identifierEquals(FnvHash.Constants.DEDUPLICATION)) { // just for nut
aggregateExpr = new SQLAggregateExpr(methodName, SQLAggregateOption.DEDUPLICATION);
lexer.nextToken();
} else {
aggregateExpr = new SQLAggregateExpr(methodName);
}
exprList(aggregateExpr.getArguments(), aggregateExpr);
if (lexer.identifierEquals(FnvHash.Constants.IGNORE)) {
lexer.nextToken();
acceptIdentifier("NULLS");
aggregateExpr.setIgnoreNulls(true);
}
if (lexer.identifierEquals(FnvHash.Constants.RESPECT)) {
lexer.nextToken();
acceptIdentifier("NULLS");
aggregateExpr.setRespectNulls(true);
}
if (lexer.token != Token.RPAREN) {
parseAggregateExprRest(aggregateExpr);
}
if (lexer.token == ORDER) {
aggregateExpr.setOrderBy(
this.parseOrderBy());
}
if (lexer.nextIf(LIMIT)) {
aggregateExpr.setLimit(
expr()
);
}
accept(Token.RPAREN);
if (lexer.identifierEquals(FnvHash.Constants.IGNORE)) {
lexer.nextToken();
acceptIdentifier("NULLS");
aggregateExpr.setIgnoreNulls(true);
}
if (lexer.identifierEquals(FnvHash.Constants.RESPECT)) {
lexer.nextToken();
acceptIdentifier("NULLS");
aggregateExpr.setRespectNulls(true);
}
if (lexer.nextIfIdentifier(FnvHash.Constants.WITHIN)) {
accept(Token.GROUP);
accept(Token.LPAREN);
SQLOrderBy orderBy = this.parseOrderBy();
aggregateExpr.setWithinGroup(true);
aggregateExpr.setOrderBy(orderBy);
accept(Token.RPAREN);
}
if (lexer.identifierEquals(FnvHash.Constants.FILTER)) {
filter(aggregateExpr);
}
if (lexer.token == Token.OVER) {
over(aggregateExpr);
}
return aggregateExpr;
}
protected void filter(SQLAggregateExpr x) {
Lexer.SavePoint mark = lexer.mark();
lexer.nextToken();
switch (lexer.token) {
case COMMA:
case FROM:
lexer.reset(mark);
return;
default:
break;
}
accept(Token.LPAREN);
accept(Token.WHERE);
SQLExpr filter = this.expr();
accept(Token.RPAREN);
x.setFilter(filter);
}
protected void over(SQLAggregateExpr aggregateExpr) {
lexer.nextToken();
if (lexer.token != Token.LPAREN) {
SQLName overRef = this.name();
aggregateExpr.setOverRef(overRef);
return;
}
SQLOver over = new SQLOver();
over(over);
aggregateExpr.setOver(over);
}
protected void over(SQLOver over) {
lexer.nextToken();
if (lexer.token == Token.PARTITION || lexer.identifierEquals("PARTITION")) {
lexer.nextToken();
accept(Token.BY);
if (lexer.token == (Token.LPAREN)) {
lexer.nextToken();
exprList(over.getPartitionBy(), over);
accept(Token.RPAREN);
if (over.getPartitionBy().size() == 1) {
switch (lexer.token) {
case SLASH:
case DIV:
case STAR:
case PLUS:
case SUB:
SQLExpr first = this.exprRest(over.getPartitionBy().get(0));
first.setParent(over);
over.getPartitionBy().set(0, first);
break;
default:
break;
}
if (lexer.token == Token.COMMA) {
lexer.nextToken();
exprList(over.getPartitionBy(), over);
}
}
} else if (lexer.token == Token.ALL) {
SQLName name = this.name();
name.setParent(over);
over.getPartitionBy().add(name);
if (lexer.token == Token.COMMA) {
lexer.nextToken();
exprList(over.getPartitionBy(), over);
}
} else {
exprList(over.getPartitionBy(), over);
}
}
over.setOrderBy(parseOrderBy());
over.setDistributeBy(parseDistributeBy());
over.setSortBy(parseSortBy());
over.setClusterBy(parseClusterBy());
if (lexer.token == Token.OF) {
lexer.nextToken();
SQLName of = this.name();
over.setOf(of);
}
SQLOver.WindowingType windowingType = null;
if (lexer.identifierEquals(FnvHash.Constants.ROWS) || lexer.token == Token.ROWS) {
windowingType = SQLOver.WindowingType.ROWS;
} else if (lexer.identifierEquals(FnvHash.Constants.RANGE)) {
windowingType = SQLOver.WindowingType.RANGE;
}
if (windowingType != null) {
over.setWindowingType(windowingType);
lexer.nextToken();
if (lexer.token == Token.BETWEEN) {
lexer.nextToken();
if (lexer.token == Token.LITERAL_INT
|| lexer.token == Token.LITERAL_FLOAT
|| lexer.token == Token.LITERAL_CHARS
|| lexer.token == Token.CAST
) {
SQLExpr betweenBegin = this.additive();
over.setWindowingBetweenBegin(betweenBegin);
} else if (lexer.token == Token.IDENTIFIER) {
long hash = lexer.hashLCase();
if (hash != FnvHash.Constants.PRECEDING
&& hash != FnvHash.Constants.FOLLOWING
&& hash != FnvHash.Constants.CURRENT
&& hash != FnvHash.Constants.UNBOUNDED) {
SQLExpr betweenBegin = this.primary();
over.setWindowingBetweenBegin(betweenBegin);
}
} else if (lexer.token == Token.INTERVAL) {
SQLExpr betweenBegin = this.primary();
over.setWindowingBetweenBegin(betweenBegin);
}
final SQLOver.WindowingBound beginBound = parseWindowingBound();
if (beginBound != null) {
over.setWindowingBetweenBeginBound(beginBound);
}
accept(Token.AND);
if (lexer.token == Token.LITERAL_INT
|| lexer.token == Token.LITERAL_FLOAT
|| lexer.token == Token.LITERAL_CHARS
) {
SQLExpr betweenEnd = this.additive();
over.setWindowingBetweenEnd(betweenEnd);
} else if (lexer.token == Token.INTERVAL) {
SQLExpr betweenBegin = this.additive();
over.setWindowingBetweenEnd(betweenBegin);
} else if (lexer.token == Token.IDENTIFIER) {
long hash = lexer.hashLCase();
if (hash != FnvHash.Constants.PRECEDING
&& hash != FnvHash.Constants.FOLLOWING
&& hash != FnvHash.Constants.CURRENT
&& hash != FnvHash.Constants.UNBOUNDED) {
SQLExpr betweenBegin = this.additive();
over.setWindowingBetweenEnd(betweenBegin);
}
}
final SQLOver.WindowingBound endBound = parseWindowingBound();
if (endBound != null) {
over.setWindowingBetweenEndBound(endBound);
}
} else {
if (lexer.token == Token.LITERAL_INT
|| lexer.token == Token.LITERAL_FLOAT
|| lexer.token == Token.LITERAL_CHARS
|| lexer.token == Token.INTERVAL
) {
SQLExpr betweenBegin = this.additive();
over.setWindowingBetweenBegin(betweenBegin);
} else if (lexer.token == Token.IDENTIFIER) {
long hash = lexer.hashLCase();
if (hash != FnvHash.Constants.PRECEDING
&& hash != FnvHash.Constants.FOLLOWING
&& hash != FnvHash.Constants.CURRENT
&& hash != FnvHash.Constants.UNBOUNDED) {
SQLExpr betweenBegin = this.additive();
over.setWindowingBetweenBegin(betweenBegin);
}
}
final SQLOver.WindowingBound beginBound = parseWindowingBound();
if (beginBound != null) {
over.setWindowingBetweenBeginBound(beginBound);
}
}
}
if (lexer.identifierEquals(FnvHash.Constants.EXCLUDE)) {
lexer.nextToken();
acceptIdentifier("CURRENT");
acceptIdentifier("ROW");
over.setExcludeCurrentRow(true);
}
accept(Token.RPAREN);
}
protected SQLOver.WindowingBound parseWindowingBound() {
if (lexer.identifierEquals(FnvHash.Constants.PRECEDING)) {
lexer.nextToken();
return SQLOver.WindowingBound.PRECEDING;
} else if (lexer.identifierEquals(FnvHash.Constants.FOLLOWING)) {
lexer.nextToken();
return SQLOver.WindowingBound.FOLLOWING;
} else if (lexer.identifierEquals(FnvHash.Constants.CURRENT) || lexer.token == Token.CURRENT) {
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.ROW)) {
lexer.nextToken();
} else {
accept(Token.ROW);
}
return SQLOver.WindowingBound.CURRENT_ROW;
} else if (lexer.identifierEquals(FnvHash.Constants.UNBOUNDED)) {
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.PRECEDING)) {
lexer.nextToken();
return SQLOver.WindowingBound.UNBOUNDED_PRECEDING;
} else {
acceptIdentifier("FOLLOWING");
return SQLOver.WindowingBound.UNBOUNDED_FOLLOWING;
}
}
return null;
}
protected SQLAggregateExpr parseAggregateExprRest(SQLAggregateExpr aggregateExpr) {
return aggregateExpr;
}
public SQLOrderBy parseOrderBy() {
if (lexer.token == Token.ORDER) {
SQLOrderBy orderBy = new SQLOrderBy();
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.SIBLINGS)) {
lexer.nextToken();
orderBy.setSiblings(true);
}
accept(Token.BY);
orderBy(orderBy.getItems(), orderBy);
if (lexer.token == Token.ORDER) {
throw new ParserException(lexer.info()); // dual order by
}
return orderBy;
}
return null;
}
public SQLZOrderBy parseZOrderBy() {
if (lexer.identifierEquals("ZORDER")) {
SQLZOrderBy orderBy = new SQLZOrderBy();
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.SIBLINGS)) {
lexer.nextToken();
orderBy.setSibings(true);
}
accept(Token.BY);
orderBy(orderBy.getItems(), orderBy);
if (lexer.token == Token.ORDER) {
throw new ParserException(lexer.info()); // dual order by
}
return orderBy;
}
return null;
}
public SQLOrderBy parseDistributeBy() {
if (lexer.token == Token.DISTRIBUTE || lexer.identifierEquals("DISTRIBUTE")) {
SQLOrderBy orderBy = new SQLOrderBy();
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.SIBLINGS)) {
lexer.nextToken();
orderBy.setSiblings(true);
}
accept(Token.BY);
orderBy(orderBy.getItems(), orderBy);
if (lexer.token == Token.ORDER) {
throw new ParserException(lexer.info()); // dual order by
}
return orderBy;
}
return null;
}
public SQLOrderBy parseSortBy() {
if (lexer.token == Token.SORT || lexer.identifierEquals(FnvHash.Constants.SORT)) {
SQLOrderBy orderBy = new SQLOrderBy();
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.SIBLINGS)) {
lexer.nextToken();
orderBy.setSiblings(true);
}
accept(Token.BY);
orderBy(orderBy.getItems(), orderBy);
if (lexer.token == Token.ORDER) {
throw new ParserException(lexer.info()); // dual order by
}
return orderBy;
}
return null;
}
public SQLOrderBy parseClusterBy() {
if (lexer.identifierEquals(FnvHash.Constants.CLUSTER)) {
SQLOrderBy orderBy = new SQLOrderBy();
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.SIBLINGS)) {
lexer.nextToken();
orderBy.setSiblings(true);
}
accept(Token.BY);
orderBy(orderBy.getItems(), orderBy);
if (lexer.token == Token.ORDER) {
throw new ParserException(lexer.info()); // dual order by
}
return orderBy;
}
return null;
}
public void orderBy(List<SQLSelectOrderByItem> items, SQLObject parent) {
SQLSelectOrderByItem item = parseSelectOrderByItem();
item.setParent(parent);
items.add(item);
while (lexer.nextIf(Token.COMMA) || (item.getExpr() instanceof SQLVariantRefExpr && lexer.token == IDENTIFIER)) {
item = parseSelectOrderByItem();
item.setParent(parent);
items.add(item);
}
}
public SQLSelectOrderByItem parseSelectOrderByItem() {
SQLSelectOrderByItem item = new SQLSelectOrderByItem();
setAllowIdentifierMethod(false);
try {
SQLExpr expr;
if (lexer.token() == Token.LITERAL_ALIAS) {
expr = name();
expr = primaryRest(expr);
expr = exprRest(expr);
} else if (lexer.token == Token.LPAREN) {
Lexer.SavePoint mark = lexer.mark();
lexer.nextToken();
for (int i = 0; ; ++i) {
expr = this.expr();
if (lexer.token == Token.ASC) {
lexer.nextToken();
item.setType(SQLOrderingSpecification.ASC);
if (lexer.token == Token.COMMA) {
lexer.nextToken();
continue;
}
accept(Token.RPAREN);
} else if (lexer.token == Token.DESC) {
lexer.nextToken();
item.setType(SQLOrderingSpecification.DESC);
if (lexer.token == Token.COMMA) {
lexer.nextToken();
continue;
}
accept(Token.RPAREN);
} else {
if (i > 0 && lexer.token == Token.RPAREN) {
lexer.nextToken();
break;
}
lexer.reset(mark);
expr = expr();
}
break;
}
} else {
expr = expr();
}
if (isEnabled(SQLParserFeature.IgnoreNameQuotes)) {
if (expr instanceof SQLPropertyExpr) {
SQLPropertyExpr propertyExpr = (SQLPropertyExpr) expr;
SQLExpr owner = propertyExpr.getOwner();
if (owner != null) {
String ownerStr = SQLUtils.toSQLString(owner);
if (ownerStr.length() > 1) {
ownerStr = StringUtils.removeNameQuotes(ownerStr);
}
propertyExpr.setOwner(ownerStr);
}
String name = propertyExpr.getName();
if (name.length() > 1) {
name = StringUtils.removeNameQuotes(name);
propertyExpr.setName(name);
}
expr = propertyExpr;
}
}
item.setExpr(expr);
} finally {
setAllowIdentifierMethod(true);
}
if (lexer.identifierEquals(FnvHash.Constants.COLLATE)) {
lexer.nextToken();
String collate = lexer.stringVal();
item.setCollate(collate);
lexer.nextToken();
if (lexer.token == Token.DOT) {
lexer.nextToken();
String collateOther = lexer.stringVal();
item.setCollate(collate + "." + collateOther);
lexer.nextToken();
}
}
if (lexer.token == Token.LITERAL_ALIAS) {
SQLExpr name = this.expr();
item.setOpclass(name);
}
if (lexer.token == Token.ASC) {
lexer.nextToken();
item.setType(SQLOrderingSpecification.ASC);
} else if (lexer.token == Token.DESC) {
lexer.nextToken();
item.setType(SQLOrderingSpecification.DESC);
}
if (lexer.identifierEquals(FnvHash.Constants.NULLS)) {
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.FIRST) || lexer.token == Token.FIRST) {
lexer.nextToken();
item.setNullsOrderType(SQLSelectOrderByItem.NullsOrderType.NullsFirst);
} else if (lexer.identifierEquals(FnvHash.Constants.LAST)) {
lexer.nextToken();
item.setNullsOrderType(SQLSelectOrderByItem.NullsOrderType.NullsLast);
} else {
throw new ParserException("TODO " + lexer.info());
}
}
if (lexer.token == Token.HINT) {
item.setHint(this.parseHint());
}
return item;
}
protected void parseUpdateSetItemLbracket(SQLUpdateSetItem item) {
}
public SQLUpdateSetItem parseUpdateSetItem() {
SQLUpdateSetItem item = new SQLUpdateSetItem();
if (lexer.token == (Token.LPAREN)) {
lexer.nextToken();
SQLListExpr list = new SQLListExpr();
this.exprList(list.getItems(), list);
accept(Token.RPAREN);
item.setColumn(list);
} else {
String identName;
long hash;
Token token = lexer.token();
if (token == Token.IDENTIFIER) {
identName = lexer.stringVal();
hash = lexer.hashLCase();
} else if (token == Token.LITERAL_CHARS) {
identName = '\'' + lexer.stringVal() + '\'';
hash = 0;
} else {
identName = lexer.stringVal();
hash = 0;
}
lexer.nextTokenEq();
SQLExpr expr = new SQLIdentifierExpr(identName, hash);
while (lexer.token() == Token.DOT) {
lexer.nextToken();
String propertyName = lexer.stringVal();
lexer.nextTokenEq();
expr = new SQLPropertyExpr(expr, propertyName);
}
item.setColumn(expr);
}
if (lexer.token == Token.LBRACKET) {
parseUpdateSetItemLbracket(item);
}
if (lexer.token == Token.COLONEQ) {
lexer.nextTokenValue();
} else if (lexer.token == Token.EQ) {
lexer.nextTokenValue();
} else {
throw new ParserException("syntax error, expect EQ, actual " + lexer.token + " "
+ lexer.info());
}
item.setValue(this.expr());
return item;
}
public final SQLExpr bitAnd() {
SQLExpr expr = shift();
if (lexer.token == Token.AMP) {
expr = bitAndRest(expr);
}
return expr;
}
public final SQLExpr bitAndRest(SQLExpr expr) {
while (lexer.token == Token.AMP) {
lexer.nextToken();
SQLExpr rightExp = shift();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.BitwiseAnd, rightExp, getDbType());
}
return expr;
}
public final SQLExpr bitOr() {
SQLExpr expr = bitAnd();
if (lexer.token == Token.BAR) {
expr = bitOrRest(expr);
}
return expr;
}
public final SQLExpr bitOrRest(SQLExpr expr) {
while (lexer.token == Token.BAR) {
lexer.nextToken();
SQLBinaryOperator op = SQLBinaryOperator.BitwiseOr;
if (lexer.token == Token.BAR) {
lexer.nextToken();
op = SQLBinaryOperator.Concat;
}
SQLExpr rightExp = bitAnd();
expr = new SQLBinaryOpExpr(expr, op, rightExp, getDbType());
expr = bitAndRest(expr);
}
return expr;
}
public final SQLExpr inRest(SQLExpr expr) {
boolean global = false;
// for clickhouse
Lexer.SavePoint globalMark = lexer.mark();
if (lexer.token == Token.GLOBAL) {
global = true;
lexer.nextToken();
// global not in logic
if (lexer.token == Token.NOT) {
lexer.nextToken();
return notRationalRest(expr, true);
}
}
if (lexer.token == Token.IN) {
Lexer.SavePoint mark = lexer.mark();
lexer.nextTokenLParen();
if (lexer.token == Token.COMMA) {
lexer.reset(mark);
return expr;
}
SQLInListExpr inListExpr = new SQLInListExpr(expr);
List<SQLExpr> targetList = inListExpr.getTargetList();
if (lexer.token == Token.LPAREN) {
lexer.nextTokenValue();
List<SQLCommentHint> hints = null;
if (lexer.token == Token.HINT) {
hints = this.parseHints();
}
if (lexer.token == Token.WITH) {
SQLSelect select = this.createSelectParser().select();
SQLInSubQueryExpr queryExpr = new SQLInSubQueryExpr(select);
queryExpr.setExpr(expr);
accept(Token.RPAREN);
return queryExpr;
}
if (lexer.token != Token.RPAREN) {
for (; ; ) {
SQLExpr item;
if (lexer.token == Token.LITERAL_INT) {
item = new SQLIntegerExpr(lexer.integerValue());
lexer.nextToken();
if (lexer.token != Token.COMMA && lexer.token != Token.RPAREN) {
item = this.primaryRest(item);
item = this.exprRest(item);
}
} else {
item = this.expr();
}
item.setParent(inListExpr);
targetList.add(item);
if (item instanceof SQLCharExpr
&& lexer.token == Token.LITERAL_CHARS
&& dialectFeatureEnabled(InRestSpecificOperation)
) {
continue;
}
if (lexer.token == Token.COMMA) {
lexer.nextTokenValue();
if (lexer.token == Token.RPAREN && dialectFeatureEnabled(InRestSpecificOperation)) {
break;
}
continue;
}
item.addAfterComment(lexer.comments);
break;
}
switch (lexer.token) {
case MINUS:
case EXCEPT:
case UNION: {
if (targetList.size() == 1
&& targetList.get(0) instanceof SQLQueryExpr) {
SQLQueryExpr queryExpr = (SQLQueryExpr) targetList.get(0);
SQLSelectQuery query = this.createSelectParser().queryRest(queryExpr.getSubQuery().getQuery(), true);
if (query != queryExpr.getSubQuery()) {
queryExpr.getSubQuery().setQuery(query);
}
if (hints != null && hints.size() > 0) {
queryExpr.getSubQuery().setHeadHint(hints.get(0));
}
}
break;
}
default:
break;
}
}
int line = lexer.line;
accept(Token.RPAREN);
if (line + 1 == lexer.line
&& lexer.hasComment()
&& lexer.getComments().get(0).startsWith("--")) {
inListExpr.addAfterComment(lexer.readAndResetComments());
}
} else {
SQLExpr itemExpr = primary();
itemExpr.setParent(inListExpr);
targetList.add(itemExpr);
}
parseQueryPlanHint(inListExpr);
expr = inListExpr;
if (targetList.size() == 1) {
SQLExpr targetExpr = targetList.get(0);
if (targetExpr instanceof SQLQueryExpr) {
SQLInSubQueryExpr inSubQueryExpr = new SQLInSubQueryExpr();
inSubQueryExpr.setExpr(inListExpr.getExpr());
inSubQueryExpr.setSubQuery(((SQLQueryExpr) targetExpr).getSubQuery());
inSubQueryExpr.setHint(inListExpr.getHint());
if (global) {
inSubQueryExpr.setGlobal(true);
}
expr = inSubQueryExpr;
}
}
} else if (lexer.token == Token.CONTAINS) {
lexer.nextTokenLParen();
SQLContainsExpr containsExpr = new SQLContainsExpr(expr);
List<SQLExpr> targetList = containsExpr.getTargetList();
if (lexer.token == Token.LPAREN) {
lexer.nextTokenValue();
if (lexer.token == Token.WITH) {
SQLSelect select = this.createSelectParser().select();
SQLInSubQueryExpr queryExpr = new SQLInSubQueryExpr(select);
queryExpr.setExpr(expr);
accept(Token.RPAREN);
return queryExpr;
}
for (; ; ) {
SQLExpr item;
if (lexer.token == Token.LITERAL_INT) {
item = new SQLIntegerExpr(lexer.integerValue());
lexer.nextToken();
if (lexer.token != Token.COMMA && lexer.token != Token.RPAREN) {
item = this.primaryRest(item);
item = this.exprRest(item);
}
} else {
item = this.expr();
}
item.setParent(containsExpr);
targetList.add(item);
if (lexer.token == Token.COMMA) {
lexer.nextTokenValue();
continue;
}
break;
}
accept(Token.RPAREN);
} else {
SQLExpr itemExpr = primary();
itemExpr.setParent(containsExpr);
targetList.add(itemExpr);
}
expr = containsExpr;
} else {
lexer.reset(globalMark);
}
return expr;
}
public final SQLExpr additive() {
SQLExpr expr = multiplicative();
if (lexer.token == Token.PLUS
|| lexer.token == Token.BARBAR
|| lexer.token == Token.CONCAT
|| lexer.token == Token.SUB) {
expr = additiveRest(expr);
}
return expr;
}
public final SQLExpr additiveRest(SQLExpr expr) {
Token token = lexer.token;
SQLBinaryOperator operator;
if (token == Token.PLUS) {
lexer.nextToken();
while (lexer.token == Token.HINT) {
SQLCommentHint hint = parseHint();
if (expr instanceof SQLObjectImpl) {
((SQLObjectImpl) expr).setHint(hint);
}
}
operator = SQLBinaryOperator.Add;
} else if ((token == Token.BARBAR || token == Token.CONCAT)
&& (isEnabled(SQLParserFeature.PipesAsConcat) || dialectFeatureEnabled(AdditiveRestPipesAsConcat))) {
lexer.nextToken();
operator = SQLBinaryOperator.Concat;
} else if (token == Token.SUB) {
lexer.nextToken();
operator = SQLBinaryOperator.Subtract;
} else {
return expr;
}
SQLExpr rightExp = multiplicative();
return additiveRest(
new SQLBinaryOpExpr(expr, operator, rightExp, dbType)
);
}
public final SQLExpr shift() {
SQLExpr expr = additive();
if (lexer.token == Token.LTLT || lexer.token == Token.GTGT || lexer.token == Token.GTGTGT) {
expr = shiftRest(expr);
}
return expr;
}
public SQLExpr shiftRest(SQLExpr expr) {
SQLBinaryOperator operator = null;
if (lexer.token == Token.LTLT) {
lexer.nextToken();
operator = SQLBinaryOperator.LeftShift;
} else if (lexer.token == Token.GTGT) {
lexer.nextToken();
operator = SQLBinaryOperator.RightShift;
} else if (lexer.token == Token.GTGTGT) {
lexer.nextToken();
operator = SQLBinaryOperator.RightShiftUnsigned;
}
if (operator != null) {
SQLExpr rightExp = additive();
expr = new SQLBinaryOpExpr(expr, operator, rightExp, dbType);
expr = shiftRest(expr);
}
return expr;
}
public SQLExpr and() {
SQLExpr expr = relational();
if (lexer.token == Token.AND || lexer.token == Token.AMPAMP) {
expr = andRest(expr);
}
return expr;
}
//for ads
public void parseQueryPlanHint(SQLExpr expr) {
if (lexer.token == Token.HINT && (expr instanceof SQLInListExpr
|| expr instanceof SQLBinaryOpExpr
|| expr instanceof SQLInSubQueryExpr
|| expr instanceof SQLExistsExpr
|| expr instanceof SQLNotExpr
|| expr instanceof SQLBetweenExpr)) {
String text = lexer.stringVal().trim();
Lexer hintLex = SQLParserUtils.createLexer(text, dbType);
hintLex.nextToken();
//防止SQL注入
if (hintLex.token == Token.PLUS) {
if (expr instanceof SQLBinaryOpExpr) {
SQLBinaryOpExpr binaryOpExpr = (SQLBinaryOpExpr) expr;
SQLBinaryOperator operator = binaryOpExpr.getOperator();
if (operator == SQLBinaryOperator.BooleanAnd
|| operator == SQLBinaryOperator.BooleanOr) {
if (binaryOpExpr.isParenthesized()) {
binaryOpExpr.setHint(new SQLCommentHint(text));
} else {
SQLExpr right = binaryOpExpr.getRight();
if (right instanceof SQLBinaryOpExpr
|| right instanceof SQLBetweenExpr) {
((SQLExprImpl) right).setHint(new SQLCommentHint(text));
}
}
} else {
binaryOpExpr.setHint(new SQLCommentHint(text));
}
} else if (expr instanceof SQLObjectImpl) {
((SQLExprImpl) expr).setHint(new SQLCommentHint(text));
} else {
throw new ParserException("TODO : " + lexer.info());
}
this.lexer.nextToken();
}
}
}
protected SQLBinaryOperator andRestGetAndOperator() {
return SQLBinaryOperator.BooleanAnd;
}
public SQLExpr andRest(SQLExpr expr) {
for (; ; ) {
if (expr instanceof SQLBinaryOpExpr) {
parseQueryPlanHint(expr);
}
Token token = lexer.token;
if (token == Token.AND) {
if (lexer.isKeepComments() && lexer.hasComment()) {
expr.addAfterComment(lexer.readAndResetComments());
}
lexer.nextToken();
SQLExpr rightExp = relational();
if (expr instanceof SQLBinaryOpExpr) {
parseQueryPlanHint(rightExp);
}
if (lexer.token == Token.AND
&& lexer.isEnabled(SQLParserFeature.EnableSQLBinaryOpExprGroup)) {
SQLBinaryOpExprGroup group = new SQLBinaryOpExprGroup(SQLBinaryOperator.BooleanAnd, dbType);
group.add(expr);
group.add(rightExp);
if (lexer.isKeepComments() && lexer.hasComment()) {
rightExp.addAfterComment(lexer.readAndResetComments());
}
for (; ; ) {
lexer.nextToken();
SQLExpr more = relational();
if (more instanceof SQLBinaryOpExpr) {
parseQueryPlanHint(more);
}
group.add(more);
if (lexer.token == Token.AND) {
if (lexer.isKeepComments() && lexer.hasComment()) {
more.addAfterComment(lexer.readAndResetComments());
}
continue;
}
break;
}
expr = group;
} else {
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.BooleanAnd, rightExp, dbType);
}
} else if (token == Token.AMPAMP) {
if (lexer.isKeepComments() && lexer.hasComment()) {
expr.addAfterComment(lexer.readAndResetComments());
}
lexer.nextToken();
SQLExpr rightExp = relational();
SQLBinaryOperator operator = andRestGetAndOperator();
expr = new SQLBinaryOpExpr(expr, operator, rightExp, dbType);
} else if (token == Token.VARIANT) {
SQLExpr expr1 = relationalRestVariant(expr);
if (expr1 == expr) {
break;
}
expr = expr1;
} else {
break;
}
}
return expr;
}
public SQLExpr xor() {
SQLExpr expr = and();
if (lexer.token == Token.XOR) {
expr = xorRest(expr);
}
return expr;
}
public SQLExpr xorRest(SQLExpr expr) {
for (; ; ) {
if (lexer.token == Token.XOR) {
lexer.nextToken();
SQLExpr rightExp = and();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.BooleanXor, rightExp, dbType);
} else {
break;
}
}
return expr;
}
public SQLExpr or() {
SQLExpr expr = xor();
if (lexer.token == Token.OR || lexer.token == Token.BARBAR) {
expr = orRest(expr);
}
return expr;
}
protected SQLBinaryOperator orRestGetOrOperator() {
return SQLBinaryOperator.Concat;
}
public SQLExpr orRest(SQLExpr expr) {
for (; ; ) {
if (lexer.token == Token.OR) {
if (lexer.isKeepComments() && lexer.hasComment()) {
expr.addAfterComment(lexer.readAndResetComments());
}
lexer.nextToken();
SQLExpr rightExp = xor();
if (lexer.token == Token.OR
&& lexer.isEnabled(SQLParserFeature.EnableSQLBinaryOpExprGroup)) {
SQLBinaryOpExprGroup group = new SQLBinaryOpExprGroup(SQLBinaryOperator.BooleanOr, dbType);
group.add(expr);
group.add(rightExp);
if (lexer.isKeepComments() && lexer.hasComment()) {
rightExp.addAfterComment(lexer.readAndResetComments());
}
for (; ; ) {
lexer.nextToken();
SQLExpr more = xor();
group.add(more);
if (lexer.token == Token.OR) {
if (lexer.isKeepComments() && lexer.hasComment()) {
more.addAfterComment(lexer.readAndResetComments());
}
continue;
}
break;
}
expr = group;
} else {
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.BooleanOr, rightExp, dbType);
}
} else if (lexer.token == Token.BARBAR) {
lexer.nextToken();
SQLExpr rightExp = xor();
SQLBinaryOperator op = orRestGetOrOperator();
expr = new SQLBinaryOpExpr(expr, op, rightExp, dbType);
} else if (lexer.token == Token.VARIANT) {
SQLExpr expr1 = relationalRestVariant(expr);
if (expr == expr1) {
break;
}
expr = expr1;
} else {
break;
}
}
return expr;
}
public SQLExpr relational() {
SQLExpr expr = bitOr();
return relationalRest(expr);
}
protected SQLExpr relationalRestQues(SQLExpr expr) {
return expr;
}
protected SQLExpr relationalRestBang(SQLExpr expr) {
return expr;
}
protected SQLExpr relationalRestEqeq(SQLExpr expr) {
lexer.nextToken();
return new SQLBinaryOpExpr(expr, SQLBinaryOperator.EqEq, expr());
}
protected SQLExpr relationalRestTilde(SQLExpr expr) {
return expr;
}
protected SQLExpr relationalRestTildeStar(SQLExpr expr) {
return expr;
}
protected SQLExpr relationalRestBangTilde(SQLExpr expr) {
return expr;
}
protected SQLExpr relationalRestBangTildeStar(SQLExpr expr) {
return expr;
}
protected SQLExpr relationalRestTildeEq(SQLExpr expr) {
return expr;
}
protected SQLExpr relationalRestIdentifierSimilar(SQLExpr expr) {
return expr;
}
public SQLExpr relationalRest(SQLExpr expr) {
final SQLExpr initExpr = expr;
SQLExpr rightExp = null;
Token token = lexer.token;
switch (token) {
case EQ: {
lexer.nextToken();
try {
rightExp = bitOr();
} catch (EOFParserException e) {
throw new ParserException("EOF, " + expr + "=", e);
}
if (lexer.token == Token.COLONEQ) {
lexer.nextToken();
SQLExpr colonExpr = expr();
rightExp = new SQLBinaryOpExpr(rightExp, SQLBinaryOperator.Assignment, colonExpr, dbType);
}
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Equality, rightExp, dbType);
}
break;
case IS: {
lexer.nextTokenNotOrNull();
SQLBinaryOperator op;
if (lexer.token == Token.NOT) {
op = SQLBinaryOperator.IsNot;
lexer.nextTokenNotOrNull();
} else {
op = SQLBinaryOperator.Is;
}
if (lexer.identifierEquals(FnvHash.Constants.JSON)) {
lexer.nextToken();
String name = "JSON";
if (lexer.identifierEquals(FnvHash.Constants.VALUE)) {
lexer.nextToken();
name = "JSON VALUE";
} else if (lexer.identifierEquals(FnvHash.Constants.OBJECT)) {
lexer.nextToken();
name = "JSON OBJECT";
} else if (lexer.identifierEquals(FnvHash.Constants.ARRAY)) {
lexer.nextToken();
name = "JSON ARRAY";
} else if (lexer.identifierEquals(FnvHash.Constants.SCALAR)) {
lexer.nextToken();
name = "JSON SCALAR";
}
rightExp = new SQLIdentifierExpr(name);
} else if (lexer.token == Token.DISTINCT) {
lexer.nextToken();
accept(Token.FROM);
if (op == SQLBinaryOperator.Is) {
op = SQLBinaryOperator.IsDistinctFrom;
} else {
op = SQLBinaryOperator.IsNotDistinctFrom;
}
rightExp = bitOr();
} else {
rightExp = primary();
}
expr = new SQLBinaryOpExpr(expr, op, rightExp, dbType);
}
break;
case EQGT: {
lexer.nextToken();
rightExp = expr();
String argumentName = ((SQLIdentifierExpr) expr).getName();
expr = new OracleArgumentExpr(argumentName, rightExp);
}
break;
case BANGEQ:
case CARETEQ: {
lexer.nextToken();
rightExp = bitOr();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.NotEqual, rightExp, dbType);
}
break;
case COLONEQ: {
lexer.nextToken();
rightExp = expr();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Assignment, rightExp, dbType);
}
break;
case LT: {
SQLBinaryOperator op = SQLBinaryOperator.LessThan;
lexer.nextToken();
if (lexer.token == Token.EQ) {
lexer.nextToken();
op = SQLBinaryOperator.LessThanOrEqual;
} else if (lexer.token == Token.LT) {
lexer.nextToken();
op = SQLBinaryOperator.LeftShift;
rightExp = additive();
}
if (rightExp == null) {
rightExp = bitOr();
}
expr = new SQLBinaryOpExpr(expr, op, rightExp, getDbType());
}
break;
case LTEQ:
lexer.nextToken();
rightExp = bitOr();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.LessThanOrEqual, rightExp, getDbType());
break;
case LTLTLT:
lexer.nextToken();
rightExp = bitOr();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.LeftShiftUnsigned, rightExp, getDbType());
break;
case LTEQGT: {
lexer.nextToken();
rightExp = bitOr();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.LessThanOrEqualOrGreaterThan, rightExp, getDbType());
}
break;
case GT: {
SQLBinaryOperator op = SQLBinaryOperator.GreaterThan;
lexer.nextToken();
if (lexer.token == Token.EQ) {
lexer.nextToken();
op = SQLBinaryOperator.GreaterThanOrEqual;
}
if (lexer.token == Token.GT) {
lexer.nextToken();
op = SQLBinaryOperator.RightShift;
rightExp = additive();
} else if (lexer.token == Token.GTGT) {
lexer.nextToken();
op = SQLBinaryOperator.RightShiftUnsigned;
rightExp = additive();
}
if (rightExp == null) {
rightExp = bitOr();
}
expr = new SQLBinaryOpExpr(expr, op, rightExp, dbType);
}
break;
case GTEQ: {
lexer.nextToken();
rightExp = bitOr();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.GreaterThanOrEqual, rightExp, dbType);
}
break;
case BANGLT: {
lexer.nextToken();
rightExp = bitOr();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.NotLessThan, rightExp, dbType);
}
break;
case BANGGT:
lexer.nextToken();
rightExp = bitOr();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.NotGreaterThan, rightExp, dbType);
break;
case LTGT:
lexer.nextToken();
rightExp = bitOr();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.LessThanOrGreater, rightExp, dbType);
break;
case LIKE: {
Lexer.SavePoint mark = lexer.mark();
lexer.nextTokenValue();
if (lexer.token == Token.COMMA) {
lexer.reset(mark);
return expr;
}
rightExp = bitOr();
if (rightExp.getClass() == SQLIdentifierExpr.class) {
String name = ((SQLIdentifierExpr) rightExp).getName();
int length = name.length();
if (length > 1
&& name.charAt(0) == name.charAt(length - 1)
&& name.charAt(0) != '`'
) {
rightExp = new SQLCharExpr(name.substring(1, length - 1));
}
}
// rightExp = relationalRest(rightExp);
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Like, rightExp, dbType);
if (lexer.token == Token.ESCAPE) {
lexer.nextToken();
rightExp = primary();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Escape, rightExp, dbType);
}
break;
}
case ILIKE:
lexer.nextToken();
rightExp = bitOr();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.ILike, rightExp, dbType);
break;
case MONKEYS_AT_AT:
lexer.nextToken();
rightExp = bitOr();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.AT_AT, rightExp, dbType);
break;
case MONKEYS_AT_GT:
lexer.nextToken();
rightExp = bitOr();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Array_Contains, rightExp, dbType);
break;
case LT_MONKEYS_AT:
lexer.nextToken();
rightExp = bitOr();
rightExp = relationalRest(rightExp);
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Array_ContainedBy, rightExp, dbType);
break;
case QUES:
expr = relationalRestQues(expr);
break;
case NOT:
lexer.nextToken();
expr = notRationalRest(expr, false);
break;
case BANG:
expr = relationalRestBang(expr);
break;
case BETWEEN:
lexer.nextToken();
SQLExpr beginExpr = relational();
accept(Token.AND);
SQLExpr endExpr = relational();
expr = new SQLBetweenExpr(expr, beginExpr, endExpr);
parseQueryPlanHint(expr);
break;
case IN:
case CONTAINS:
case GLOBAL:
expr = inRest(expr);
break;
case EQEQ:
expr = relationalRestEqeq(expr);
break;
case TILDE:
expr = relationalRestTilde(expr);
break;
case TILDE_STAR:
expr = relationalRestTildeStar(expr);
break;
case BANG_TILDE:
expr = relationalRestBangTilde(expr);
break;
case BANG_TILDE_STAR:
expr = relationalRestBangTildeStar(expr);
break;
case TILDE_EQ:
expr = relationalRestTildeEq(expr);
break;
case RLIKE: {
Lexer.SavePoint mark = lexer.mark();
lexer.nextToken();
switch (lexer.token) {
case COMMA:
lexer.reset(mark);
break;
default:
rightExp = bitOr();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.RLike, rightExp, dbType);
break;
}
break;
}
case IDENTIFIER:
long hash = lexer.hashLCase;
if (hash == FnvHash.Constants.SOUNDS) {
lexer.nextToken();
accept(Token.LIKE);
rightExp = bitOr();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.SoudsLike, rightExp, dbType);
} else if (hash == FnvHash.Constants.REGEXP) {
lexer.nextToken();
rightExp = bitOr();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.RegExp, rightExp, dbType);
} else if (hash == FnvHash.Constants.SIMILAR) {
expr = relationalRestIdentifierSimilar(expr);
} else {
return expr;
}
break;
case VARIANT:
return relationalRestVariant(expr);
default:
return expr;
}
if (expr == initExpr) {
return expr;
}
switch (lexer.token) {
case BETWEEN:
case IS:
case EQ:
case EQEQ:
case IN:
case CONTAINS:
case BANG_TILDE_STAR:
case TILDE_EQ:
case LT:
case LTEQ:
case LTEQGT:
case GT:
case GTEQ:
case LTGT:
case BANGEQ:
case LIKE:
case NOT:
expr = relationalRest(expr);
break;
default:
break;
}
return expr;
}
protected SQLExpr relationalRestVariant(SQLExpr expr) {
String value = lexer.stringVal();
lexer.nextToken();
SQLExpr variantExpr = new SQLVariantRefExpr(value);
if (lexer.token == Token.IN) {
variantExpr = inRest(variantExpr);
}
return new SQLBinaryOpExpr(expr, SQLBinaryOperator.Blank, variantExpr, dbType);
}
public SQLExpr notRationalRest(SQLExpr expr, boolean global) {
switch (lexer.token) {
case LIKE:
lexer.nextTokenValue();
SQLExpr rightExp = bitOr();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.NotLike, rightExp, dbType);
if (lexer.token == Token.ESCAPE) {
lexer.nextToken();
rightExp = bitOr();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Escape, rightExp, dbType);
}
break;
case IN:
lexer.nextToken();
SQLInListExpr inListExpr = new SQLInListExpr(expr, true);
if (lexer.token == Token.LPAREN) {
lexer.nextToken();
exprList(inListExpr.getTargetList(), inListExpr);
expr = inListExpr;
switch (lexer.token) {
case MINUS:
case UNION: {
List<SQLExpr> targetList = inListExpr.getTargetList();
if (targetList.size() == 1
&& targetList.get(0) instanceof SQLQueryExpr) {
SQLQueryExpr queryExpr = (SQLQueryExpr) targetList.get(0);
queryExpr.getSubQuery().getQuery().setParenthesized(queryExpr.isParenthesized());
SQLSelectQuery query = this.createSelectParser().queryRest(queryExpr.getSubQuery().getQuery(), true);
if (query != queryExpr.getSubQuery()) {
queryExpr.getSubQuery().setQuery(query);
}
}
break;
}
default:
break;
}
accept(Token.RPAREN);
} else {
SQLExpr valueExpr = this.primary();
valueExpr.setParent(inListExpr);
inListExpr.getTargetList().add(valueExpr);
expr = inListExpr;
}
parseQueryPlanHint(inListExpr);
if (inListExpr.getTargetList().size() == 1) {
SQLExpr targetExpr = inListExpr.getTargetList().get(0);
if (targetExpr instanceof SQLQueryExpr) {
SQLInSubQueryExpr inSubQueryExpr = new SQLInSubQueryExpr();
inSubQueryExpr.setNot(true);
inSubQueryExpr.setExpr(inListExpr.getExpr());
inSubQueryExpr.setSubQuery(((SQLQueryExpr) targetExpr).getSubQuery());
inSubQueryExpr.setGlobal(global);
expr = inSubQueryExpr;
}
}
break;
case CONTAINS:
lexer.nextToken();
SQLContainsExpr containsExpr = new SQLContainsExpr(expr, true);
if (lexer.token == Token.LPAREN) {
lexer.nextToken();
exprList(containsExpr.getTargetList(), containsExpr);
expr = containsExpr;
switch (lexer.token) {
case MINUS:
case UNION: {
List<SQLExpr> targetList = containsExpr.getTargetList();
if (targetList.size() == 1
&& targetList.get(0) instanceof SQLQueryExpr) {
SQLQueryExpr queryExpr = (SQLQueryExpr) targetList.get(0);
SQLSelectQuery query = this.createSelectParser().queryRest(queryExpr.getSubQuery().getQuery(), true);
if (query != queryExpr.getSubQuery()) {
queryExpr.getSubQuery().setQuery(query);
}
}
break;
}
default:
break;
}
accept(Token.RPAREN);
} else {
SQLExpr valueExpr = this.primary();
valueExpr.setParent(containsExpr);
containsExpr.getTargetList().add(valueExpr);
expr = containsExpr;
}
if (containsExpr.getTargetList().size() == 1) {
SQLExpr targetExpr = containsExpr.getTargetList().get(0);
if (targetExpr instanceof SQLQueryExpr) {
SQLInSubQueryExpr inSubQueryExpr = new SQLInSubQueryExpr();
inSubQueryExpr.setNot(true);
inSubQueryExpr.setExpr(containsExpr.getExpr());
inSubQueryExpr.setSubQuery(((SQLQueryExpr) targetExpr).getSubQuery());
expr = inSubQueryExpr;
}
}
break;
case BETWEEN:
lexer.nextToken();
SQLExpr beginExpr = relational();
accept(Token.AND);
SQLExpr endExpr = relational();
expr = new SQLBetweenExpr(expr, true, beginExpr, endExpr);
break;
case ILIKE:
lexer.nextToken();
rightExp = bitOr();
return new SQLBinaryOpExpr(expr, SQLBinaryOperator.NotILike, rightExp, dbType);
case LPAREN:
expr = this.primary();
break;
case RLIKE:
lexer.nextToken();
rightExp = bitOr();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.NotRLike, rightExp, dbType);
expr = relationalRest(expr);
break;
case IDENTIFIER:
long hash = lexer.hashLCase();
if (hash == FnvHash.Constants.REGEXP) {
lexer.nextToken();
rightExp = bitOr();
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.NotRegExp, rightExp, dbType);
expr = relationalRest(expr);
}
break;
default:
throw new ParserException("TODO " + lexer.info());
}
return expr;
}
public SQLDataType parseDataType() {
return parseDataType(true);
}
protected SQLDataType parseDataTypeNested() {
return null;
}
protected void parseDataTypeByte(StringBuilder typeName) {
}
protected void parseDataTypeDouble(StringBuilder typeName) {
}
protected void parseDataTypePrecision(StringBuilder typeName) {
}
protected void parseDataTypeComplex(StringBuilder typeName) {
}
protected SQLDataType parseDataTypeDate(StringBuilder typeName, int sourceLine, int sourceColumn) {
SQLDataType dataType = new SQLDataTypeImpl(typeName.toString());
dataType.setDbType(dbType);
dataType.setSource(sourceLine, sourceColumn);
return dataType;
}
public SQLDataType parseDataType(boolean restrict) {
if (lexer.keepSourceLocation) {
lexer.computeRowAndColumn();
}
int sourceLine = lexer.getPosLine(), sourceColumn = lexer.getPosColumn();
Token token = lexer.token;
if (token == Token.DEFAULT || token == Token.NOT || token == Token.NULL) {
return null;
}
if (lexer.identifierEquals(FnvHash.Constants.ARRAY) || lexer.token() == Token.ARRAY) {
return parseArrayDataType();
}
if (lexer.identifierEquals(FnvHash.Constants.MAP)) {
lexer.nextToken();
if (lexer.token == Token.LPAREN) { // presto
lexer.nextToken();
SQLDataType keyType = parseDataType();
accept(Token.COMMA);
SQLDataType valueType = parseDataType();
accept(Token.RPAREN);
return new SQLMapDataType(keyType, valueType, dbType);
}
accept(Token.LT);
SQLDataType keyType = parseDataType();
accept(Token.COMMA);
SQLDataType valueType = parseDataType();
if (lexer.token == Token.GTGT) {
lexer.token = Token.GT;
} else {
accept(Token.GT);
}
return new SQLMapDataType(keyType, valueType, dbType);
}
if (lexer.identifierEquals(FnvHash.Constants.STRUCT)) {
return parseDataTypeStruct();
} else if (lexer.token == Token.TABLE) {
lexer.nextToken();
SQLTableDataType table = new SQLTableDataType();
accept(Token.LPAREN);
for (; ; ) {
SQLColumnDefinition column;
if (lexer.token == Token.STAR) {
lexer.nextToken();
column = new SQLColumnDefinition();
column.setName("*");
accept(Token.ANY);
} else {
column = this.parseColumn();
}
column.setParent(table);
table.getColumns().add(column);
if (lexer.token == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
return table;
} else if (lexer.identifierEquals(FnvHash.Constants.ROW) || lexer.token == Token.ROW) {
lexer.nextToken();
return parseDataTypeRow();
} else if (lexer.identifierEquals(FnvHash.Constants.NESTED)) {
SQLDataType dataType = parseDataTypeNested();
if (dataType != null) {
return dataType;
}
}
if (lexer.identifierEquals(FnvHash.Constants.UNIONTYPE)) {
lexer.nextToken();
accept(Token.LT);
SQLUnionDataType unionType = new SQLUnionDataType();
for (; ; ) {
SQLDataType item = this.parseDataType();
unionType.add(item);
if (lexer.token == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.GT);
return unionType;
}
if (lexer.identifierEquals(FnvHash.Constants.GENERATED)
|| lexer.identifierEquals(FnvHash.Constants.RENAME)) {
return null;
}
SQLName typeExpr = name();
final long typeNameHashCode = typeExpr.nameHashCode64();
StringBuilder typeName = new StringBuilder(typeExpr.toString());
if (typeNameHashCode == FnvHash.Constants.LONG
&& lexer.identifierEquals(FnvHash.Constants.BYTE)) {
parseDataTypeByte(typeName);
} else if (typeNameHashCode == FnvHash.Constants.DOUBLE) {
parseDataTypeDouble(typeName);
parseDataTypePrecision(typeName);
}
if (typeNameHashCode == FnvHash.Constants.UNSIGNED) {
if (lexer.token == Token.IDENTIFIER) {
typeName.append(' ').append(lexer.stringVal());
lexer.nextToken();
}
} else if (typeNameHashCode == FnvHash.Constants.SIGNED) {
if (lexer.token == Token.IDENTIFIER) {
typeName.append(' ').append(lexer.stringVal());
lexer.nextToken();
}
} else if (isCharType(typeNameHashCode)) {
SQLCharacterDataType charType = new SQLCharacterDataType(typeName.toString());
//for ads
if (lexer.token == Token.LBRACKET) {
SQLArrayDataType arrayDataType = new SQLArrayDataType(charType, dbType);
lexer.nextToken();
accept(Token.RBRACKET);
arrayDataType.putAttribute("ads.arrayDataType", Boolean.TRUE);
return arrayDataType;
}
if (lexer.token == Token.LPAREN) {
lexer.nextToken();
if (typeNameHashCode == FnvHash.Constants.ENUM) {
exprList(charType.getArguments(), charType);
} else {
SQLExpr arg = this.expr();
arg.setParent(charType);
charType.addArgument(arg);
}
accept(Token.RPAREN);
}
charType = (SQLCharacterDataType) parseCharTypeRest(charType);
if (lexer.token == Token.HINT) {
List<SQLCommentHint> hints = this.parseHints();
charType.setHints(hints);
}
if (lexer.identifierEquals(FnvHash.Constants.ARRAY)) {
return parseDataTypeRest(charType);
} else if (lexer.token == Token.LBRACKET) {
return parseDataTypeRest(charType);
}
return charType;
} else if (typeNameHashCode == FnvHash.Constants.DATE) {
return parseDataTypeRest(parseDataTypeDate(typeName, sourceLine, sourceColumn));
}
if ("national".equalsIgnoreCase(typeName.toString()) &&
(lexer.identifierEquals(FnvHash.Constants.CHAR)
|| lexer.identifierEquals(FnvHash.Constants.VARCHAR))) {
typeName.append(' ').append(lexer.stringVal());
lexer.nextToken();
SQLCharacterDataType charType = new SQLCharacterDataType(typeName.toString());
if (lexer.token == Token.LPAREN) {
lexer.nextToken();
SQLExpr arg = this.expr();
arg.setParent(charType);
charType.addArgument(arg);
accept(Token.RPAREN);
}
charType = (SQLCharacterDataType) parseCharTypeRest(charType);
if (lexer.token == Token.HINT) {
List<SQLCommentHint> hints = this.parseHints();
charType.setHints(hints);
}
if (lexer.identifierEquals(FnvHash.Constants.ARRAY)) {
return parseDataTypeRest(charType);
}
return charType;
}
if ("character".equalsIgnoreCase(typeName.toString()) && "varying".equalsIgnoreCase(lexer.stringVal())) {
typeName.append(' ').append(lexer.stringVal());
lexer.nextToken();
}
parseDataTypeComplex(typeName);
SQLDataType dataType = new SQLDataTypeImpl(typeName.toString());
dataType.setDbType(dbType);
//for ads
if (lexer.token == Token.LBRACKET) {
dataType = new SQLArrayDataType(dataType, dbType);
lexer.nextToken();
if (lexer.token == Token.LITERAL_INT) {
SQLExpr arg = this.expr();
arg.setParent(dataType);
dataType.getArguments().add(arg);
}
accept(Token.RBRACKET);
dataType.putAttribute("ads.arrayDataType", Boolean.TRUE);
}
dataType.setSource(sourceLine, sourceColumn);
return parseDataTypeRest(dataType);
}
protected SQLArrayDataType parseArrayDataType() {
lexer.nextToken();
if (lexer.token == Token.LPAREN) {
lexer.nextToken();
SQLArrayDataType array = new SQLArrayDataType(null, dbType);
this.exprList(array.getArguments(), array, true);
accept(Token.RPAREN);
return array;
}
accept(Token.LT);
SQLDataType itemType = parseDataType();
if (lexer.token == Token.GTGTGT) {
lexer.token = Token.GTGT;
} else if (lexer.token == Token.GTGT) {
lexer.token = Token.GT;
} else {
accept(Token.GT);
}
SQLArrayDataType array = new SQLArrayDataType(itemType, dbType);
if (lexer.token == Token.LPAREN) {
lexer.nextToken();
this.exprList(array.getArguments(), array, true);
accept(Token.RPAREN);
}
return array;
}
protected SQLStructDataType parseDataTypeStruct() {
acceptIdentifier("STRUCT");
SQLStructDataType struct = new SQLStructDataType(dbType);
accept(Token.LT);
for (; ; ) {
SQLName name;
switch (lexer.token) {
case GROUP:
case ORDER:
case FROM:
case TO:
name = new SQLIdentifierExpr(lexer.stringVal());
lexer.nextToken();
break;
default:
name = this.name();
break;
}
accept(Token.COLON);
SQLDataType dataType = this.parseDataType();
SQLStructDataType.Field field = struct.addField(name, dataType);
if (lexer.token == Token.COMMENT) {
lexer.nextToken();
SQLCharExpr chars = (SQLCharExpr) this.primary();
field.setComment(chars.getText());
}
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
if (lexer.token == Token.GTGTGT) {
lexer.token = Token.GTGT;
} else if (lexer.token == Token.GTGT) {
lexer.token = Token.GT;
} else {
accept(Token.GT);
}
return struct;
}
protected SQLRowDataType parseDataTypeRow() {
SQLRowDataType struct = new SQLRowDataType(dbType);
accept(Token.LPAREN);
for (; ; ) {
SQLDataType dataType = null;
Lexer.SavePoint mark = lexer.mark();
SQLName name;
switch (lexer.token) {
case GROUP:
case ORDER:
case FROM:
case TO:
name = new SQLIdentifierExpr(lexer.stringVal());
lexer.nextToken();
break;
case ROW:
lexer.nextToken();
name = null;
dataType = this.parseDataTypeRow();
break;
default:
name = this.name();
break;
}
if (lexer.token == Token.COMMA) {
lexer.reset(mark);
dataType = this.parseDataType();
struct.addField(null, dataType);
lexer.nextToken();
continue;
}
if (lexer.token != Token.RPAREN) {
dataType = this.parseDataType();
}
SQLStructDataType.Field field = struct.addField(name, dataType);
if (lexer.token == Token.COMMENT) {
lexer.nextToken();
SQLCharExpr chars = (SQLCharExpr) this.primary();
field.setComment(chars.getText());
}
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
return struct;
}
protected void parseDataTypeParams(Collection<SQLExpr> exprCol, SQLDataType parent) {
if (nestedDataType.contains(parent.getName().toLowerCase())) {
exprList(exprCol, parent, true);
} else {
exprList(exprCol, parent);
}
}
protected SQLDataType parseDataTypeRest(SQLDataType dataType) {
if (lexer.token == Token.LPAREN) {
lexer.nextToken();
parseDataTypeParams(dataType.getArguments(), dataType);
accept(Token.RPAREN);
}
if (lexer.identifierEquals(FnvHash.Constants.PRECISION)
&& dataType.nameHashCode64() == FnvHash.Constants.DOUBLE) {
lexer.nextToken();
dataType.setName("DOUBLE PRECISION");
}
long nameHash = dataType.nameHashCode64();
if (FnvHash.Constants.TIMESTAMP == nameHash || FnvHash.Constants.TIME == nameHash) {
if (lexer.identifierEquals(FnvHash.Constants.WITHOUT)) {
lexer.nextToken();
acceptIdentifier("TIME");
acceptIdentifier("ZONE");
dataType.setWithTimeZone(false);
} else if (lexer.token == Token.WITH) {
Lexer.SavePoint mark = lexer.mark();
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.TIME)) {
lexer.nextToken();
acceptIdentifier("ZONE");
dataType.setWithTimeZone(true);
} else {
lexer.reset(mark);
}
}
} else if (FnvHash.Constants.INTERVAL == nameHash) {
if (lexer.token == Token.IDENTIFIER) {
String interval = dataType.getName();
if (SQLIntervalUnit.of(lexer.stringVal) != null) {
interval += ' ' + lexer.stringVal;
lexer.nextToken();
if (lexer.nextIf(Token.TO)) {
interval += " to " + lexer.stringVal;
lexer.nextToken();
dataType.setName(interval);
}
}
}
}
return dataType;
}
protected boolean isCharType(String dataTypeName) {
long hash = FnvHash.hashCode64(dataTypeName);
return isCharType(hash);
}
protected boolean isCharType(long hash) {
return hash == FnvHash.Constants.CHAR
|| hash == FnvHash.Constants.VARCHAR
|| hash == FnvHash.Constants.NCHAR
|| hash == FnvHash.Constants.NVARCHAR
|| hash == FnvHash.Constants.TINYTEXT
|| hash == FnvHash.Constants.TEXT
|| hash == FnvHash.Constants.MEDIUMTEXT
|| hash == FnvHash.Constants.LONGTEXT
|| hash == FnvHash.Constants.STRING
|| hash == FnvHash.Constants.ENUM;
}
protected SQLDataType parseCharTypeRest(SQLCharacterDataType charType) {
if (lexer.token == Token.BINARY) {
charType.setHasBinary(true);
lexer.nextToken();
}
if (lexer.identifierEquals(FnvHash.Constants.CHARACTER)) {
lexer.nextToken();
accept(Token.SET);
if (lexer.token != Token.IDENTIFIER
&& lexer.token != Token.LITERAL_CHARS
&& lexer.token != Token.BINARY) {
throw new ParserException(lexer.info());
}
charType.setCharSetName(lexer.stringVal());
lexer.nextToken();
} else if (lexer.identifierEquals(FnvHash.Constants.CHARSET)) {
lexer.nextToken();
if (lexer.token != Token.IDENTIFIER
&& lexer.token != Token.LITERAL_CHARS
&& lexer.token != Token.BINARY) {
throw new ParserException(lexer.info());
}
charType.setCharSetName(lexer.stringVal());
lexer.nextToken();
}
if (lexer.token == Token.BINARY) {
charType.setHasBinary(true);
lexer.nextToken();
}
if (lexer.identifierEquals(FnvHash.Constants.COLLATE)) {
lexer.nextToken();
if (lexer.token == Token.LITERAL_ALIAS
|| lexer.token == Token.IDENTIFIER
|| lexer.token == Token.LITERAL_CHARS) {
StringBuilder collate = new StringBuilder(lexer.stringVal());
lexer.nextToken();
if (lexer.token == Token.DOT) {
lexer.nextToken();
if (lexer.token == Token.LITERAL_ALIAS
|| lexer.token == Token.IDENTIFIER
|| lexer.token == Token.LITERAL_CHARS) {
collate.append(".").append(lexer.stringVal());
lexer.nextToken();
} else {
throw new ParserException(lexer.info());
}
}
charType.setCollate(collate.toString());
} else {
throw new ParserException(lexer.info());
}
}
return charType;
}
@Override
public void accept(Token token) {
if (lexer.token == token) {
lexer.nextToken();
} else {
StringBuilder sb = new StringBuilder();
sb.append("syntax error, expect ");
sb.append((token.name != null ? token.name : token.toString()));
sb.append(", actual ");
sb.append((lexer.token.name != null ? lexer.token.name : lexer.token.toString()));
sb.append(" ");
sb.append(lexer.info());
throw new ParserException(sb.toString());
}
}
public SQLColumnDefinition parseColumn() {
return parseColumn(null);
}
public SQLColumnDefinition parseColumn(SQLObject parent) {
SQLColumnDefinition column = createColumnDefinition();
column.setParent(parent);
column.setDbType(dbType);
if (Token.IF == lexer.token) {
lexer.nextToken();
accept(Token.NOT);
accept(Token.EXISTS);
column.setIfNotExists(true);
}
column.setName(
name());
final Token token = lexer.token;
if (token != Token.SET //
&& token != Token.DROP
&& token != Token.PRIMARY
&& token != Token.RPAREN
&& token != Token.COMMA
&& token != Token.COMMENT
) {
column.setDataType(
parseDataType());
}
return parseColumnRest(column);
}
public SQLColumnDefinition createColumnDefinition() {
SQLColumnDefinition column = new SQLColumnDefinition();
column.setDbType(dbType);
return column;
}
protected SQLColumnDefinition parseColumnSpecific(SQLColumnDefinition column) {
return column;
}
protected SQLExpr parseColumnRestDefault() {
return bitOr();
}
protected void parseColumnCommentLiteralCharsRest(StringBuilder stringVal) {
}
public SQLColumnDefinition parseColumnRest(SQLColumnDefinition column) {
switch (lexer.token) {
case DEFAULT:
lexer.nextToken();
SQLExpr defaultExpr = parseColumnRestDefault();
column.setDefaultExpr(defaultExpr);
return parseColumnRest(column);
case NOT: {
lexer.nextToken();
accept(Token.NULL);
SQLNotNullConstraint notNull = new SQLNotNullConstraint();
if (lexer.token == Token.HINT) {
List<SQLCommentHint> hints = this.parseHints();
notNull.setHints(hints);
}
column.addConstraint(notNull);
return parseColumnRest(column);
}
case NULL:
lexer.nextToken();
column.getConstraints().add(new SQLNullConstraint());
return parseColumnRest(column);
case PRIMARY:
lexer.nextToken();
accept(Token.KEY);
column.addConstraint(new SQLColumnPrimaryKey());
return parseColumnRest(column);
case UNIQUE:
lexer.nextToken();
if (lexer.token == Token.KEY) {
lexer.nextToken();
}
column.addConstraint(new SQLColumnUniqueKey());
return parseColumnRest(column);
case DISABLE:
lexer.nextToken();
if (lexer.stringVal.equalsIgnoreCase("novalidate")) {
column.setDisableNovalidate(true);
}
lexer.nextToken();
return parseColumnRest(column);
case KEY:
lexer.nextToken();
column.addConstraint(new SQLColumnPrimaryKey());
return parseColumnRest(column);
case REFERENCES: {
SQLColumnReference ref = parseReference();
column.addConstraint(ref);
return parseColumnRest(column);
}
case CONSTRAINT:
lexer.nextToken();
SQLName name = this.name();
if (lexer.token == Token.PRIMARY) {
lexer.nextToken();
accept(Token.KEY);
SQLColumnPrimaryKey pk = new SQLColumnPrimaryKey();
pk.setName(name);
column.addConstraint(pk);
return parseColumnRest(column);
}
if (lexer.token == Token.UNIQUE) {
lexer.nextToken();
SQLColumnUniqueKey uk = new SQLColumnUniqueKey();
uk.setName(name);
column.addConstraint(uk);
return parseColumnRest(column);
}
if (lexer.token == Token.REFERENCES) {
SQLColumnReference ref = parseReference();
ref.setName(name);
column.addConstraint(ref);
return parseColumnRest(column);
}
if (lexer.token == Token.NOT) {
lexer.nextToken();
accept(Token.NULL);
SQLNotNullConstraint notNull = new SQLNotNullConstraint();
notNull.setName(name);
column.addConstraint(notNull);
return parseColumnRest(column);
}
if (lexer.token == Token.CHECK) {
SQLColumnCheck check = parseColumnCheck();
check.setName(name);
check.setParent(column);
column.addConstraint(check);
return parseColumnRest(column);
}
if (lexer.token == Token.DEFAULT) {
lexer.nextToken();
SQLExpr expr = this.expr();
column.setDefaultExpr(expr);
return parseColumnRest(column);
}
throw new ParserException("TODO : " + lexer.info());
case CHECK:
SQLColumnCheck check = parseColumnCheck();
column.addConstraint(check);
return parseColumnRest(column);
case IDENTIFIER:
long hash = lexer.hashLCase();
if (hash == FnvHash.Constants.AUTO_INCREMENT) {
lexer.nextToken();
column.setAutoIncrement(true);
//sequence parser
if (lexer.token == Token.BY) {
lexer.nextToken();
if (lexer.hashLCase() == FnvHash.Constants.GROUP) {
lexer.nextToken();
column.setSequenceType(AutoIncrementType.GROUP);
if (lexer.identifierEquals(FnvHash.Constants.UNIT)) {
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.COUNT)) {
lexer.nextToken();
SQLExpr unitCount = primary();
column.setUnitCount(unitCount);
}
if (lexer.token == Token.INDEX) {
lexer.nextToken();
SQLExpr unitIndex = primary();
column.setUnitIndex(unitIndex);
}
if (lexer.hashLCase() == FnvHash.Constants.STEP) {
lexer.nextToken();
SQLExpr step = primary();
column.setStep(step);
}
} else {
return parseColumnRest(column);
}
} else if (lexer.hashLCase() == FnvHash.Constants.TIME) {
lexer.nextToken();
column.setSequenceType(AutoIncrementType.TIME);
return parseColumnRest(column);
} else if (lexer.hashLCase() == FnvHash.Constants.SIMPLE) {
lexer.nextToken();
if (lexer.hashLCase() == FnvHash.Constants.WITH) {
lexer.nextToken();
if (lexer.hashLCase() == FnvHash.Constants.CACHE) {
column.setSequenceType(AutoIncrementType.SIMPLE_CACHE);
} else {
throw new ParserException("TODO : " + lexer.info());
}
lexer.nextToken();
return parseColumnRest(column);
} else {
column.setSequenceType(AutoIncrementType.SIMPLE);
return parseColumnRest(column);
}
}
return parseColumnRest(column);
} else if (lexer.identifierEquals(FnvHash.Constants.UNIT)) {
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.COUNT)) {
lexer.nextToken();
SQLExpr unitCount = primary();
column.setUnitCount(unitCount);
}
if (lexer.token == Token.INDEX) {
lexer.nextToken();
SQLExpr unitIndex = primary();
column.setUnitIndex(unitIndex);
}
if (lexer.hashLCase() == FnvHash.Constants.STEP) {
lexer.nextToken();
SQLExpr unitIndex = primary();
column.setStep(unitIndex);
}
} else if (lexer.token == Token.LPAREN) {
column.setIdentity(parseIdentity());
}
return parseColumnRest(column);
}
break;
case COMMENT:
lexer.nextToken();
if (lexer.token == Token.LITERAL_ALIAS) {
String alias = lexer.stringVal();
if (alias.length() > 2 && alias.charAt(0) == '"' && alias.charAt(alias.length() - 1) == '"') {
alias = alias.substring(1, alias.length() - 1);
}
column.setComment(alias);
lexer.nextToken();
} else if (lexer.token == Token.LITERAL_CHARS) {
StringBuilder stringVal = new StringBuilder(lexer.stringVal());
lexer.nextToken();
parseColumnCommentLiteralCharsRest(stringVal);
column.setComment(stringVal.toString());
} else {
column.setComment(primary());
}
return parseColumnRest(column);
default:
return parseColumnSpecific(column);
}
return column;
}
protected void parseIdentifySpecific() {
}
protected void parsePrimaryKeyRest(SQLPrimaryKeyImpl primaryKey){
}
protected void parseForeignKeyRest(SQLForeignKeyImpl foreignKey){
}
protected SQLColumnDefinition.Identity parseIdentity() {
accept(Token.LPAREN);
SQLColumnDefinition.Identity ident = new SQLColumnDefinition.Identity();
parseIdentifySpecific();
if (lexer.token == Token.LITERAL_INT) {
ident.setSeed(lexer.integerValue().intValue());
lexer.nextToken();
} else {
throw new ParserException("TODO : " + lexer.info());
}
if (lexer.token == Token.COMMA) {
lexer.nextToken();
if (lexer.token == Token.LITERAL_INT) {
ident.setIncrement(lexer.integerValue().intValue());
lexer.nextToken();
} else {
throw new ParserException("TODO : " + lexer.info());
}
}
accept(Token.RPAREN);
return ident;
}
protected SQLColumnReference parseReference() {
SQLColumnReference fk = new SQLColumnReference();
lexer.nextToken();
fk.setTable(this.name());
if (lexer.nextIf(LPAREN)) {
this.names(fk.getColumns(), fk);
accept(Token.RPAREN);
}
if (lexer.identifierEquals(FnvHash.Constants.MATCH)) {
lexer.nextToken();
if (lexer.identifierEquals("FULL") || lexer.token() == Token.FULL) {
fk.setReferenceMatch(SQLForeignKeyImpl.Match.FULL);
lexer.nextToken();
} else if (lexer.identifierEquals(FnvHash.Constants.PARTIAL)) {
fk.setReferenceMatch(SQLForeignKeyImpl.Match.PARTIAL);
lexer.nextToken();
} else if (lexer.identifierEquals(FnvHash.Constants.SIMPLE)) {
fk.setReferenceMatch(SQLForeignKeyImpl.Match.SIMPLE);
lexer.nextToken();
} else {
throw new ParserException("TODO : " + lexer.info());
}
}
while (lexer.token() == Token.ON) {
lexer.nextToken();
if (lexer.token() == Token.DELETE) {
lexer.nextToken();
SQLForeignKeyImpl.Option option = parseReferenceOption();
fk.setOnDelete(option);
} else if (lexer.token() == Token.UPDATE) {
lexer.nextToken();
SQLForeignKeyImpl.Option option = parseReferenceOption();
fk.setOnUpdate(option);
} else {
throw new ParserException("syntax error, expect DELETE or UPDATE, actual " + lexer.token() + " "
+ lexer.info());
}
}
return fk;
}
protected SQLForeignKeyImpl.Option parseReferenceOption() {
SQLForeignKeyImpl.Option option;
if (lexer.token() == Token.RESTRICT || lexer.identifierEquals(FnvHash.Constants.RESTRICT)) {
option = SQLForeignKeyImpl.Option.RESTRICT;
lexer.nextToken();
} else if (lexer.identifierEquals(FnvHash.Constants.CASCADE) || lexer.token == CASCADE) {
option = SQLForeignKeyImpl.Option.CASCADE;
lexer.nextToken();
} else if (lexer.token() == Token.SET) {
lexer.nextToken();
if (lexer.token() == Token.NULL) {
accept(Token.NULL);
option = SQLForeignKeyImpl.Option.SET_NULL;
} else if (lexer.token == Token.DEFAULT) {
accept(Token.DEFAULT);
option = SQLForeignKeyImpl.Option.SET_DEFAULT;
} else {
throw new ParserException("syntax error," + lexer.info());
}
} else if (lexer.identifierEquals(FnvHash.Constants.NO)) {
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.ACTION)) {
option = SQLForeignKeyImpl.Option.NO_ACTION;
lexer.nextToken();
} else {
throw new ParserException("syntax error, expect ACTION, actual " + lexer.token() + " "
+ lexer.info());
}
} else {
throw new ParserException("syntax error, expect ACTION, actual " + lexer.token() + " "
+ lexer.info());
}
return option;
}
protected SQLColumnCheck parseColumnCheck() {
lexer.nextToken();
SQLExpr expr = this.primary();
SQLColumnCheck check = new SQLColumnCheck(expr);
if (lexer.token == Token.DISABLE) {
lexer.nextToken();
check.setEnable(false);
} else if (lexer.token == Token.ENABLE) {
lexer.nextToken();
check.setEnable(true);
} else if (lexer.identifierEquals(FnvHash.Constants.VALIDATE)) {
lexer.nextToken();
check.setValidate(Boolean.TRUE);
} else if (lexer.identifierEquals(FnvHash.Constants.NOVALIDATE)) {
lexer.nextToken();
check.setValidate(Boolean.FALSE);
} else if (lexer.identifierEquals(FnvHash.Constants.RELY)) {
lexer.nextToken();
check.setRely(Boolean.TRUE);
} else if (lexer.identifierEquals(FnvHash.Constants.NORELY)) {
lexer.nextToken();
check.setRely(Boolean.FALSE);
} else if (lexer.identifierEquals("ENFORCED")) {
lexer.nextToken();
check.setEnforced(true);
} else if (lexer.token == Token.NOT) {
Lexer.SavePoint mark = lexer.mark();
lexer.nextToken();
if (lexer.identifierEquals("ENFORCED")) {
lexer.nextToken();
check.setEnforced(false);
} else {
lexer.reset(mark);
}
}
return check;
}
public SQLPrimaryKey parsePrimaryKey() {
accept(Token.PRIMARY);
accept(Token.KEY);
SQLPrimaryKeyImpl pk = new SQLPrimaryKeyImpl();
if (lexer.identifierEquals(FnvHash.Constants.CLUSTERED)) {
lexer.nextToken();
pk.setClustered(true);
}
accept(Token.LPAREN);
orderBy(pk.getColumns(), pk);
accept(Token.RPAREN);
if (lexer.token == Token.DISABLE) {
lexer.nextToken();
acceptIdentifier("NOVALIDATE");
pk.setDisableNovalidate(true);
}
parsePrimaryKeyRest(pk);
return pk;
}
public SQLUnique parseUnique() {
accept(Token.UNIQUE);
SQLUnique unique = new SQLUnique();
accept(Token.LPAREN);
orderBy(unique.getColumns(), unique);
accept(Token.RPAREN);
if (lexer.token == Token.DISABLE) {
lexer.nextToken();
SavePoint savePoint = lexer.mark();
if ("NOVALIDATE".equalsIgnoreCase(lexer.stringVal())) {
unique.setDisableNovalidate(true);
lexer.nextToken();
} else {
lexer.reset(savePoint);
}
} else if (lexer.token == Token.ENABLE) {
lexer.nextToken();
unique.setEnable(true);
} else if (lexer.identifierEquals(FnvHash.Constants.VALIDATE)) {
lexer.nextToken();
unique.setValidate(Boolean.TRUE);
} else if (lexer.identifierEquals(FnvHash.Constants.NOVALIDATE)) {
lexer.nextToken();
unique.setValidate(Boolean.FALSE);
} else if (lexer.identifierEquals(FnvHash.Constants.RELY)) {
lexer.nextToken();
unique.setRely(Boolean.TRUE);
} else if (lexer.identifierEquals(FnvHash.Constants.NORELY)) {
lexer.nextToken();
unique.setRely(Boolean.FALSE);
}
return unique;
}
public void parseAssignItem(List<SQLAssignItem> outList, SQLObject parent) {
accept(Token.LPAREN);
if (lexer.token() != Token.RPAREN) {
for (; ; ) {
SQLAssignItem item = this.parseAssignItem(true, parent);
item.setParent(parent);
outList.add(item);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
}
accept(Token.RPAREN);
}
public SQLAssignItem parseAssignItem() {
return parseAssignItem(true, null);
}
public SQLAssignItem parseAssignItem(boolean variant) {
return parseAssignItem(variant, null);
}
protected void parseAssignItemDot() {
}
protected void parseAssignItemNcToBeExecuted() {
}
protected boolean parseAssignItemTblProperties(SQLAssignItem item) {
return false;
}
protected SQLExpr parseAssignItemSQLPropertyExprAndSub(SQLExpr sqlExpr) {
return sqlExpr;
}
protected void parseAssignItemSQLIdentifierExpr(SQLExpr sqlExpr) {
}
protected SQLExpr parseAssignItemSQLPropertyExpr(SQLExpr sqlExpr) {
return sqlExpr;
}
protected void parseAssignItemSQLIdentifierExprAndVariant(SQLIdentifierExpr ident) {
}
protected boolean parseAssignItemSQLMethodInvokeExpr(SQLExpr sqlExpr, SQLAssignItem item) {
return false;
}
protected void parseAssignItemEq(SQLObject parent) {
lexer.nextToken();
}
protected SQLExpr parseAssignItemOnLiteralFloat(SQLExpr sqlExpr) {
return sqlExpr;
}
protected SQLExpr parseAssignItemOnColon(SQLExpr sqlExpr) {
return sqlExpr;
}
protected void parseAssignItemOnComma(SQLExpr sqlExpr, SQLAssignItem item, SQLObject parent) {
item.setValue(sqlExpr);
}
public SQLAssignItem parseAssignItem(boolean variant, SQLObject parent) {
SQLAssignItem item = new SQLAssignItem();
parseAssignItemDot();
SQLExpr var;
parseAssignItemNcToBeExecuted();
if (lexer.identifierEquals(FnvHash.Constants.TBLPROPERTIES)) {
var = new SQLIdentifierExpr(lexer.stringVal());
lexer.nextToken();
if (parseAssignItemTblProperties(item)) {
return item;
}
} else {
var = primary();
}
var = parseAssignItemSQLPropertyExprAndSub(var);
parseAssignItemSQLIdentifierExpr(var);
var = parseAssignItemSQLPropertyExpr(var);
if (variant && var instanceof SQLIdentifierExpr) {
SQLIdentifierExpr ident = (SQLIdentifierExpr) var;
parseAssignItemSQLIdentifierExprAndVariant(ident);
String identName = (ident).getName();
if (identName.indexOf('@') != -1
|| identName.indexOf('#') != -1
|| identName.indexOf(':') != -1
|| identName.indexOf(' ') != -1) {
var = new SQLVariantRefExpr(identName);
}
}
if (parseAssignItemSQLMethodInvokeExpr(var, item)) {
return item;
}
item.setTarget(var);
if (lexer.token == Token.COLONEQ) {
lexer.nextToken();
} else if (lexer.token == Token.TRUE || lexer.identifierEquals(FnvHash.Constants.TRUE)) {
lexer.nextToken();
item.setValue(new SQLBooleanExpr(true));
return item;
} else if (lexer.token == Token.ON) {
lexer.nextToken();
item.setValue(new SQLIdentifierExpr("ON"));
return item;
} else if ((lexer.token == Token.RPAREN || lexer.token == Token.COMMA || lexer.token == Token.SET)
&& dialectFeatureEnabled(ParseAssignItemRparenCommaSetReturn)) {
return item;
} else {
if (lexer.token == Token.EQ) {
parseAssignItemEq(parent);
if (lexer.token == Token.SEMI && dialectFeatureEnabled(ParseAssignItemEqSemiReturn)) {
return item;
}
} else if (dialectFeatureEnabled(ParseAssignItemSkip)
|| lexer.token == Token.QUES
|| lexer.token == Token.LITERAL_CHARS
|| lexer.token == Token.LITERAL_ALIAS
|| lexer.identifierEquals("utf8mb4")
) {
// skip
} else if (lexer.token == Token.EQEQ && dialectFeatureEnabled(ParseAssignItemEqeq)) {
lexer.nextToken();
} else {
accept(Token.EQ);
}
}
if (lexer.token == Token.ON) {
item.setValue(new SQLIdentifierExpr(lexer.stringVal()));
lexer.nextToken();
} else {
if (lexer.token == Token.ALL) {
item.setValue(new SQLIdentifierExpr(lexer.stringVal()));
lexer.nextToken();
} else {
SQLExpr expr = expr();
expr = parseAssignItemOnLiteralFloat(expr);
expr = parseAssignItemOnColon(expr);
parseAssignItemOnComma(expr, item, parent);
}
}
return item;
}
public List<SQLCommentHint> parseHints() {
List<SQLCommentHint> hints = new ArrayList<SQLCommentHint>();
parseHints(hints);
return hints;
}
@SuppressWarnings({"unchecked", "rawtypes"})
public void parseHints(List hints) {
while (lexer.token == Token.HINT) {
String text = lexer.stringVal();
SQLCommentHint hint;
if (lexer.isEnabled(SQLParserFeature.TDDLHint)
&& (text.startsWith("+ TDDL")
|| text.startsWith("+TDDL")
|| text.startsWith("!TDDL")
|| text.startsWith("TDDL"))) {
hint = new TDDLHint(text);
} else {
hint = new SQLCommentHint(text);
}
if (lexer.commentCount > 0) {
hint.addBeforeComment(lexer.comments);
}
hints.add(hint);
lexer.nextToken();
}
}
public SQLCommentHint parseHint() {
String text = lexer.stringVal();
SQLCommentHint hint;
if (lexer.isEnabled(SQLParserFeature.TDDLHint)
&& (text.startsWith("+ TDDL")
|| text.startsWith("+TDDL")
|| text.startsWith("!TDDL")
|| text.startsWith("TDDL"))) {
hint = new TDDLHint(text);
} else {
hint = new SQLCommentHint(text);
}
if (lexer.commentCount > 0) {
hint.addBeforeComment(lexer.comments);
}
lexer.nextToken();
return hint;
}
protected void parseIndexSpecific(SQLIndexDefinition indexDefinition) {
indexDefinition.setName(name());
}
protected void parseIndexOptions(SQLIndexDefinition indexDefinition) {
while (true) {
if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
indexDefinition.getOptions().setComment(primary());
} else {
break;
}
}
}
public void parseIndex(SQLIndexDefinition indexDefinition) {
if (lexer.token() == Token.CONSTRAINT) {
indexDefinition.setHasConstraint(true);
lexer.nextToken();
if (lexer.token() == Token.IDENTIFIER
&& !lexer.identifierEquals(FnvHash.Constants.GLOBAL)
&& !lexer.identifierEquals(FnvHash.Constants.LOCAL)
&& !lexer.identifierEquals(FnvHash.Constants.SPATIAL)) {
indexDefinition.setSymbol(name());
}
}
if (lexer.identifierEquals(FnvHash.Constants.GLOBAL)) {
indexDefinition.setGlobal(true);
lexer.nextToken();
} else if (lexer.identifierEquals(FnvHash.Constants.LOCAL)) {
indexDefinition.setLocal(true);
lexer.nextToken();
}
if (lexer.token() == Token.FULLTEXT
|| lexer.token() == Token.UNIQUE
|| lexer.token() == Token.PRIMARY
|| lexer.identifierEquals(FnvHash.Constants.SPATIAL)
|| lexer.identifierEquals(FnvHash.Constants.CLUSTERED)
|| lexer.identifierEquals(FnvHash.Constants.CLUSTERING)
|| lexer.identifierEquals(FnvHash.Constants.ANN)) {
indexDefinition.setType(lexer.stringVal());
lexer.nextToken();
}
if (lexer.identifierEquals(FnvHash.Constants.GLOBAL)) {
indexDefinition.setGlobal(true);
lexer.nextToken();
} else if (lexer.identifierEquals(FnvHash.Constants.LOCAL)) {
indexDefinition.setLocal(true);
lexer.nextToken();
}
if (lexer.token() == Token.INDEX) {
indexDefinition.setIndex(true);
lexer.nextToken();
} else if (lexer.token() == Token.KEY) {
indexDefinition.setKey(true);
lexer.nextToken();
}
while (lexer.token() != Token.LPAREN && lexer.token() != Token.ON) {
parseIndexSpecific(indexDefinition);
}
if (lexer.token() == Token.ON) {
lexer.nextToken();
indexDefinition.setTable(new SQLExprTableSource(name()));
}
parseIndexRest(indexDefinition, indexDefinition.getParent());
// Options, partitions.
parseIndexOptions(indexDefinition);
}
public SQLConstraint parseConstraint() {
SQLName name = null;
boolean nextIfConstraint = lexer.nextIf(CONSTRAINT);
if (nextIfConstraint) {
if (lexer.token != PRIMARY
&& lexer.token != UNIQUE
&& lexer.token != KEY
&& lexer.token != FOREIGN
&& lexer.token != CHECK
&& lexer.token != DEFAULT
) {
name = this.name();
}
}
SQLConstraint constraint;
switch (lexer.token) {
case PRIMARY:
constraint = parsePrimaryKey();
break;
case UNIQUE:
constraint = parseUnique();
break;
case KEY:
constraint = parseUnique();
break;
case FOREIGN:
constraint = parseForeignKey();
break;
case CHECK:
constraint = parseCheck();
break;
case DEFAULT:
constraint = parseDefault();
break;
default:
throw new ParserException("TODO : " + lexer.info());
}
if (name == null && lexer.token == Token.IDENTIFIER) {
name = name();
}
if (name != null) {
constraint.setName(name);
}
if (nextIfConstraint) {
constraint.setHasConstraint(true);
}
return constraint;
}
public SQLCheck parseCheck() {
accept(Token.CHECK);
SQLCheck check = createCheck();
accept(Token.LPAREN);
check.setExpr(this.expr());
if (check.getExpr() instanceof SQLExprImpl) {
((SQLExprImpl) check.getExpr()).setParenthesized(true);
}
accept(Token.RPAREN);
return check;
}
public SQLDefault parseDefault() {
accept(Token.DEFAULT);
SQLDefault sqlDefault = new SQLDefault();
if (lexer.token == Token.LPAREN) {
while (lexer.token == Token.LPAREN) {
accept(Token.LPAREN);
}
sqlDefault.setExpr(this.expr());
while (lexer.token == Token.RPAREN) {
accept(Token.RPAREN);
}
} else {
sqlDefault.setExpr(this.expr());
}
accept(Token.FOR);
sqlDefault.setColumn(this.expr());
if (lexer.token == Token.WITH) {
lexer.nextToken();
accept(Token.VALUES);
sqlDefault.setWithValues(true);
}
return sqlDefault;
}
protected SQLCheck createCheck() {
return new SQLCheck();
}
public SQLForeignKeyConstraint parseForeignKey() {
accept(Token.FOREIGN);
accept(Token.KEY);
SQLForeignKeyImpl fk = createForeignKey();
accept(Token.LPAREN);
this.names(fk.getReferencingColumns(), fk);
accept(Token.RPAREN);
accept(Token.REFERENCES);
fk.setReferencedTableName(this.name());
if (lexer.token == Token.LPAREN) {
lexer.nextToken();
this.names(fk.getReferencedColumns(), fk);
accept(Token.RPAREN);
}
if (lexer.token == Token.ON) {
lexer.nextToken();
accept(Token.DELETE);
if (lexer.identifierEquals(FnvHash.Constants.CASCADE) || lexer.token == Token.CASCADE) {
lexer.nextToken();
fk.setOnDeleteCascade(true);
} else {
accept(Token.SET);
accept(Token.NULL);
fk.setOnDeleteSetNull(true);
}
}
if (lexer.token == Token.DISABLE) {
Lexer.SavePoint mark = lexer.mark();
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.NOVALIDATE)) {
lexer.nextToken();
fk.setDisableNovalidate(true);
} else {
lexer.reset(mark);
}
}
parseForeignKeyRest(fk);
return fk;
}
protected SQLForeignKeyImpl createForeignKey() {
return new SQLForeignKeyImpl();
}
protected SQLExpr parseSelectItemRest(String ident, long hash_lower) {
SQLExpr expr = null;
if (FnvHash.Constants.TIMESTAMP == hash_lower
&& lexer.stringVal().charAt(0) != '`'
&& lexer.token == Token.LITERAL_CHARS) {
String literal = lexer.stringVal();
lexer.nextToken();
SQLTimestampExpr ts = new SQLTimestampExpr(literal);
expr = ts;
if (lexer.identifierEquals(FnvHash.Constants.AT)) {
Lexer.SavePoint mark = lexer.mark();
lexer.nextToken();
String timeZone = null;
if (lexer.identifierEquals(FnvHash.Constants.TIME)) {
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.ZONE)) {
lexer.nextToken();
timeZone = lexer.stringVal();
lexer.nextToken();
}
}
if (timeZone == null) {
lexer.reset(mark);
} else {
ts.setTimeZone(timeZone);
}
}
} else if (FnvHash.Constants.DATETIME == hash_lower
&& lexer.stringVal().charAt(0) != '`'
&& lexer.token == Token.LITERAL_CHARS) {
String literal = lexer.stringVal();
lexer.nextToken();
SQLDateTimeExpr ts = new SQLDateTimeExpr(literal);
expr = ts;
}
return expr;
}
protected Pair<String, SQLExpr> parseSelectItemIdentifier(SQLExpr expr) {
String as = lexer.stringVal();
if (isEnabled(SQLParserFeature.IgnoreNameQuotes) && as.length() > 1) {
as = StringUtils.removeNameQuotes(as);
}
lexer.nextTokenComma();
return Pair.of(as, expr);
}
protected SQLExpr parseSelectItemMethod(SQLExpr expr) {
return this.methodRest(expr, true);
}
protected String parseSelectItemAlias(String alias) {
boolean specialChar = false;
for (int i = 0; i < alias.length(); ++i) {
char ch = alias.charAt(i);
if (!CharTypes.isIdentifierChar(ch)) {
specialChar = true;
break;
}
}
if (specialChar) {
alias = alias.replaceAll("\"", "\\\"");
}
return alias;
}
public SQLSelectItem parseSelectItem() {
SQLExpr expr;
boolean connectByRoot = false;
Token token = lexer.token;
int startPos = lexer.startPos;
if (token == Token.IDENTIFIER
&& !(lexer.hashLCase() == -5808529385363204345L && lexer.charAt(lexer.pos) == '\'' && (dialectFeatureEnabled(
ParseSelectItemPrefixX))) // x'123' X'123'
) {
int sourceLine = -1, sourceColumn = -1;
if (lexer.isKeepSourceLocation()) {
lexer.computeRowAndColumn();
sourceLine = lexer.getPosLine();
sourceColumn = lexer.getPosColumn();
}
String ident = lexer.stringVal();
long hash_lower = lexer.hashLCase();
lexer.nextTokenComma();
if (hash_lower == FnvHash.Constants.CONNECT_BY_ROOT) {
connectByRoot = lexer.token != Token.LPAREN;
if (connectByRoot) {
expr = new SQLIdentifierExpr(lexer.stringVal());
lexer.nextToken();
} else {
expr = new SQLIdentifierExpr(ident);
}
} else if (FnvHash.Constants.DATE == hash_lower
&& lexer.stringVal().charAt(0) != '`'
&& (lexer.token == Token.LITERAL_CHARS || lexer.token == LITERAL_ALIAS)
&& (dialectFeatureEnabled(SQLDateExpr))
) {
String literal = lexer.stringVal();
lexer.nextToken();
SQLDateExpr dateExpr = new SQLDateExpr();
dateExpr.setValue(literal);
expr = dateExpr;
} else if (FnvHash.Constants.TIME == hash_lower
&& lexer.token == Token.LITERAL_CHARS) {
String literal = lexer.stringVal();
lexer.nextToken();
expr = new SQLTimeExpr(literal);
} else if (hash_lower == FnvHash.Constants.DECIMAL
&& lexer.token == Token.LITERAL_CHARS) {
String decimal = lexer.stringVal();
expr = new SQLDecimalExpr(decimal);
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.REAL
&& lexer.token == Token.LITERAL_CHARS) {
String decimal = lexer.stringVal();
expr = new SQLRealExpr(decimal);
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.DOUBLE
&& lexer.token == Token.LITERAL_CHARS) {
String decimal = lexer.stringVal();
expr = new SQLDoubleExpr(decimal);
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.FLOAT
&& lexer.token == Token.LITERAL_CHARS) {
String decimal = lexer.stringVal();
expr = new SQLFloatExpr(decimal);
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.BIGINT
&& lexer.token == Token.LITERAL_CHARS) {
String strVal = lexer.stringVal();
if (strVal.startsWith("--")) {
strVal = strVal.substring(2);
}
expr = new SQLBigIntExpr(strVal);
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.INTEGER
&& lexer.token == Token.LITERAL_CHARS) {
String strVal = lexer.stringVal();
if (strVal.startsWith("--")) {
strVal = strVal.substring(2);
}
SQLIntegerExpr integerExpr = SQLIntegerExpr.ofIntOrLong(Long.parseLong(strVal));
integerExpr.setType("INTEGER");
expr = integerExpr;
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.SMALLINT
&& lexer.token == Token.LITERAL_CHARS) {
String decimal = lexer.stringVal();
expr = new SQLSmallIntExpr(decimal);
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.TINYINT
&& lexer.token == Token.LITERAL_CHARS) {
String decimal = lexer.stringVal();
expr = new SQLTinyIntExpr(decimal);
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.JSON
&& lexer.token == Token.LITERAL_CHARS) {
String decimal = lexer.stringVal();
expr = new SQLJSONExpr(decimal);
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.TRY_CAST) {
accept(Token.LPAREN);
SQLCastExpr cast = new SQLCastExpr();
cast.setTry(true);
cast.setExpr(expr());
accept(Token.AS);
cast.setDataType(parseDataType(false));
accept(Token.RPAREN);
expr = cast;
} else if (FnvHash.Constants.CURRENT_USER == hash_lower
&& ident.charAt(0) != '`'
&& lexer.token != Token.LPAREN && isEnabled(SQLParserFeature.EnableCurrentUserExpr)) {
expr = new SQLCurrentUserExpr();
} else {
expr = parseSelectItemRest(ident, hash_lower);
if (expr == null) {
if (lexer.isEnabled(SQLParserFeature.IgnoreNameQuotes)) {
ident = SQLUtils.normalize(ident, dbType);
}
if (ident.charAt(0) == '"' && ident.charAt(ident.length() - 1) == '"') {
hash_lower = FnvHash.hashCode64(ident);
}
SQLIdentifierExpr identifierExpr = new SQLIdentifierExpr(ident, hash_lower);
if (lexer.keepSourceLocation) {
lexer.computeRowAndColumn();
identifierExpr.setSource(lexer.posLine, lexer.posColumn);
}
expr = identifierExpr;
}
}
token = lexer.token;
if (token == Token.DOT) {
lexer.nextTokenIdent();
if (lexer.token == Token.STAR) {
expr = dotRest(expr);
} else {
String name = lexer.stringVal();
long name_hash_lower = lexer.hashLCase();
lexer.nextTokenComma();
token = lexer.token;
if (token == Token.LPAREN) {
boolean aggregate = hash_lower == FnvHash.Constants.WMSYS && name_hash_lower == FnvHash.Constants.WM_CONCAT;
expr = methodRest(expr, name, aggregate);
token = lexer.token;
} else {
if (name_hash_lower == FnvHash.Constants.NEXTVAL) {
expr = new SQLSequenceExpr((SQLIdentifierExpr) expr, SQLSequenceExpr.Function.NextVal);
} else if (name_hash_lower == FnvHash.Constants.CURRVAL) {
expr = new SQLSequenceExpr((SQLIdentifierExpr) expr, SQLSequenceExpr.Function.CurrVal);
} else if (name_hash_lower == FnvHash.Constants.PREVVAL) {
expr = new SQLSequenceExpr((SQLIdentifierExpr) expr, SQLSequenceExpr.Function.PrevVal);
} else {
if (lexer.isEnabled(SQLParserFeature.IgnoreNameQuotes)) {
name = SQLUtils.normalize(name, dbType);
}
if (name.charAt(0) == '"') {
name_hash_lower = FnvHash.hashCode64(name);
}
expr = new SQLPropertyExpr(expr, name, name_hash_lower);
}
}
}
}
if (token == Token.COMMA) {
return new SQLSelectItem(expr, (String) null, connectByRoot);
}
if (token == Token.AS) {
lexer.nextTokenAlias();
String as = null;
if (lexer.token != Token.COMMA && lexer.token != Token.FROM) {
as = lexer.stringVal();
if (isEnabled(SQLParserFeature.IgnoreNameQuotes) && as.length() > 1) {
as = StringUtils.removeNameQuotes(as);
}
lexer.nextTokenComma();
if (lexer.token == Token.DOT) {
lexer.nextToken();
as += '.' + lexer.stringVal();
lexer.nextToken();
}
}
return new SQLSelectItem(expr, as, connectByRoot);
}
if (token == Token.LITERAL_ALIAS) {
String as = lexer.stringVal();
if (isEnabled(SQLParserFeature.IgnoreNameQuotes) && as.length() > 1) {
as = StringUtils.removeNameQuotes(as);
}
lexer.nextTokenComma();
return new SQLSelectItem(expr, as, connectByRoot);
}
if (token == Token.IDENTIFIER
&& hash_lower != FnvHash.Constants.CURRENT) {
Pair<String, SQLExpr> pair = parseSelectItemIdentifier(expr);
String as = pair.getKey();
expr = pair.getValue();
return new SQLSelectItem(expr, as, connectByRoot);
}
if (token == Token.LPAREN) {
expr = parseSelectItemMethod(expr);
} else {
expr = this.primaryRest(expr);
}
expr = this.exprRest(expr);
if (sourceLine != -1) {
expr.setSource(sourceLine, sourceColumn);
}
} else if (lexer.nextIf(STAR)) {
SQLAllColumnExpr star = new SQLAllColumnExpr();
if (lexer.nextIf(Token.EXCEPT)) {
accept(Token.LPAREN);
List<SQLExpr> except = new ArrayList<>();
this.exprList(except, star);
star.setExcept(except);
accept(Token.RPAREN);
}
if (lexer.nextIf(REPLACE)) {
accept(Token.LPAREN);
this.aliasedItems(star.getReplace(), star);
accept(Token.RPAREN);
}
expr = star;
return new SQLSelectItem(expr, (String) null, connectByRoot);
} else if (token == Token.DO || token == Token.JOIN || token == Token.TABLESPACE) {
expr = this.name();
expr = this.exprRest(expr);
} else {
if (lexer.token == Token.DISTINCT && dbType == DbType.elastic_search) {
lexer.nextToken();
}
while (lexer.token == Token.HINT) {
lexer.nextToken();
}
expr = expr();
}
String alias;
List<String> aliasList = null;
if (expr instanceof SQLVariantRefExpr && ((SQLVariantRefExpr) expr).isTemplateParameter() && lexer.token != AS) {
alias = null;
} else {
switch (lexer.token) {
case FULL:
case TABLESPACE:
alias = lexer.stringVal();
lexer.nextToken();
break;
case AS:
lexer.nextTokenAlias();
if (lexer.token == Token.LITERAL_INT) {
alias = '"' + lexer.stringVal() + '"';
lexer.nextToken();
} else if (lexer.token == Token.LPAREN) {
lexer.nextToken();
aliasList = new ArrayList<String>();
for (; ; ) {
String stringVal = lexer.stringVal();
lexer.nextToken();
aliasList.add(stringVal);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
alias = null;
} else {
alias = alias();
}
break;
case EOF:
alias = null;
break;
default:
alias = as();
break;
}
}
if (alias == null && isEnabled(SQLParserFeature.SelectItemGenerateAlias)
&& (!(expr instanceof SQLName))
&& !(expr instanceof SQLNumericLiteralExpr)
&& !(expr instanceof SQLCharExpr)
&& !(expr instanceof SQLNullExpr)
&& !(expr instanceof SQLBooleanExpr)) {
alias = lexer.text.substring(startPos, lexer.startPos);
if (lexer.comments != null) {
for (int i = lexer.comments.size() - 1; i >= 0; i--) {
String comment = lexer.comments.get(i);
int p = alias.lastIndexOf(comment);
if (p >= 0) {
alias = alias.substring(0, p - 1);
}
}
}
alias = CharTypes.trim(alias);
if (alias.length() > 0) {
alias = parseSelectItemAlias(alias);
}
}
SQLSelectItem selectItem;
if (aliasList != null) {
selectItem = new SQLSelectItem(expr, aliasList, connectByRoot);
} else {
selectItem = new SQLSelectItem(expr, alias, connectByRoot);
}
if (lexer.token == Token.HINT && !lexer.isEnabled(SQLParserFeature.StrictForWall)) {
String comment = "/*" + lexer.stringVal() + "*/";
selectItem.addAfterComment(comment);
lexer.nextToken();
}
return selectItem;
}
public SQLPartition parsePartition() {
if (lexer.nextIf(Token.PARTITION)) {
SQLPartitionSingle partitionDef = new SQLPartitionSingle();
SQLName name = this.name();
partitionDef.setName(name);
partitionDef.setValues(this.parsePartitionValues());
return partitionDef;
} else if (lexer.nextIf(START) || lexer.nextIfIdentifier(FnvHash.Constants.START)) {
SQLPartitionBatch partitionDef = new SQLPartitionBatch();
accept(Token.LPAREN);
partitionDef.setStart(this.expr());
accept(Token.RPAREN);
if (lexer.nextIf(Token.END) || lexer.nextIfIdentifier(FnvHash.Constants.END)) {
accept(Token.LPAREN);
partitionDef.setEnd(this.expr());
accept(Token.RPAREN);
} else {
throw new ParserException("syntax error, expect END, " + lexer.info());
}
acceptIdentifier(FnvHash.Constants.EVERY);
accept(Token.LPAREN);
partitionDef.setEvery(this.expr());
accept(Token.RPAREN);
return partitionDef;
}
return null;
}
public SQLPartitionSpec parsePartitionSpec() {
SQLPartitionSpec spec = new SQLPartitionSpec();
accept(Token.PARTITION);
accept(Token.LPAREN);
for (; ; ) {
SQLPartitionSpec.Item item = new SQLPartitionSpec.Item();
item.setColumn(
this.name());
accept(Token.EQ);
item.setValue(
this.expr());
spec.addItem(item);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
} else {
break;
}
}
accept(Token.RPAREN);
return spec;
}
public SQLPartitionBy parsePartitionBy() {
lexer.nextToken();
accept(Token.BY);
SQLPartitionBy partitionClause = null;
if (lexer.identifierEquals("VALUE")) {
partitionClause = new SQLPartitionByValue();
if (lexer.identifierEquals(FnvHash.Constants.VALUE)) {
lexer.nextToken();
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
partitionClause.addColumn(expr());
accept(Token.RPAREN);
}
}
if (lexer.identifierEquals(FnvHash.Constants.LIFECYCLE)) {
lexer.nextToken();
partitionClause.setLifeCycle((SQLIntegerExpr) expr());
}
}
return partitionClause;
}
public SQLExpr parseGroupingSet() {
String tmp = lexer.stringVal();
acceptIdentifier("GROUPING");
SQLGroupingSetExpr expr = new SQLGroupingSetExpr();
if (lexer.token == Token.SET || lexer.identifierEquals(FnvHash.Constants.SET)) {
lexer.nextToken();
} else {
return new SQLIdentifierExpr(tmp);
}
accept(Token.LPAREN);
this.exprList(expr.getParameters(), expr);
accept(Token.RPAREN);
return expr;
}
public SQLPartitionValue parsePartitionValues() {
SQLPartitionValue values = null;
if (lexer.nextIf(Token.VALUES)) {
if (lexer.nextIf(Token.IN)) {
values = new SQLPartitionValue(SQLPartitionValue.Operator.In);
accept(Token.LPAREN);
this.exprList(values.getItems(), values);
accept(Token.RPAREN);
} else if (lexer.nextIfIdentifier(FnvHash.Constants.LESS)) {
acceptIdentifier(FnvHash.Constants.THAN);
values = new SQLPartitionValue(SQLPartitionValue.Operator.LessThan);
if (lexer.nextIfIdentifier(FnvHash.Constants.MAXVALUE)) {
SQLIdentifierExpr maxValue = new SQLIdentifierExpr(lexer.stringVal());
maxValue.setParent(values);
values.addItem(maxValue);
} else {
accept(Token.LPAREN);
this.exprList(values.getItems(), values);
accept(Token.RPAREN);
}
} else if (lexer.nextIf(Token.LPAREN)) {
values = new SQLPartitionValue(SQLPartitionValue.Operator.List);
this.exprList(values.getItems(), values);
accept(Token.RPAREN);
} else if (lexer.nextIf(Token.LBRACKET)) {
values = new SQLPartitionValue(SQLPartitionValue.Operator.FixedRange);
this.exprList(values.getItems(), values);
accept(Token.RPAREN);
}
}
return values;
}
protected static boolean isIdent(SQLExpr expr, String name) {
if (expr instanceof SQLIdentifierExpr) {
SQLIdentifierExpr identExpr = (SQLIdentifierExpr) expr;
return identExpr.getName().equalsIgnoreCase(name);
}
return false;
}
public SQLLimit parseLimit() {
if (lexer.token != Token.LIMIT) {
return null;
}
SQLLimit limit = new SQLLimit();
lexer.nextTokenValue();
SQLExpr temp;
if (lexer.token == Token.LITERAL_INT) {
temp = new SQLIntegerExpr(lexer.integerValue());
lexer.nextTokenComma();
if (lexer.token != Token.COMMA && lexer.token != Token.EOF && lexer.token != Token.IDENTIFIER) {
temp = this.primaryRest(temp);
temp = this.exprRest(temp);
}
} else {
temp = this.expr();
}
if (lexer.token == (Token.COMMA)) {
limit.setOffset(temp);
lexer.nextTokenValue();
SQLExpr rowCount;
if (lexer.token == Token.LITERAL_INT) {
rowCount = new SQLIntegerExpr(lexer.integerValue());
lexer.nextToken();
if (lexer.token != Token.EOF && lexer.token != Token.IDENTIFIER) {
rowCount = this.primaryRest(rowCount);
rowCount = this.exprRest(rowCount);
}
} else {
rowCount = this.expr();
}
limit.setRowCount(rowCount);
} else if (lexer.identifierEquals(FnvHash.Constants.OFFSET)) {
limit.setRowCount(temp);
lexer.nextToken();
limit.setOffset(this.expr());
limit.setOffsetClause(true);
} else {
limit.setRowCount(temp);
}
if (lexer.token == Token.BY && dialectFeatureEnabled(ParseLimitBy)) {
lexer.nextToken();
for (; ; ) {
SQLExpr item = this.expr();
limit.addBy(item);
if (lexer.token == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
}
return limit;
}
public void parseIndexRest(SQLIndex idx) {
parseIndexRest(idx, idx);
}
public void parseIndexRest(SQLIndex idx, SQLObject parent) {
accept(Token.LPAREN);
for (; ; ) {
SQLSelectOrderByItem selectOrderByItem = this.parseSelectOrderByItem();
selectOrderByItem.setParent(parent);
idx.getColumns().add(selectOrderByItem);
if (!(lexer.token() == (Token.COMMA))) {
break;
} else {
lexer.nextToken();
}
}
accept(Token.RPAREN);
if (lexer.identifierEquals(FnvHash.Constants.COVERING)) {
Lexer.SavePoint mark = lexer.mark();
lexer.nextToken();
if (lexer.token == Token.LPAREN) {
lexer.nextToken();
} else {
lexer.reset(mark);
return;
}
for (; ; ) {
SQLName name = this.name();
name.setParent(parent);
idx.getCovering().add(name);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
} else {
break;
}
}
accept(Token.RPAREN);
}
}
public SQLExternalRecordFormat parseRowFormat() {
lexer.nextToken();
acceptIdentifier("FORMAT");
if (lexer.identifierEquals(FnvHash.Constants.DELIMITED)) {
lexer.nextToken();
}
SQLExternalRecordFormat format = new SQLExternalRecordFormat();
Lexer.SavePoint mark = lexer.mark();
String strVal = lexer.stringVal();
if (NULL.equals(lexer.token())) {
lexer.nextToken();
acceptIdentifier("DEFINED");
accept(Token.AS);
strVal = lexer.stringVal();
String value = strVal.substring(1, strVal.length() - 1);
SQLCharExpr emptyExpr = new SQLCharExpr(value);
format.setNullDefinedAs(emptyExpr);
lexer.nextToken();
}
// for (; ; ) {
// if (strVal.equalsIgnoreCase("FULL")) {
//
// stmt.setTruncate(true);
// lexer.nextToken();
// mark = lexer.mark();
// strVal = lexer.stringVal();
// continue;
// } else {
// lexer.reset(mark);
// break;
// }
// }
if (lexer.identifierEquals(FnvHash.Constants.LINES)) {
lexer.nextToken();
acceptIdentifier("TERMINATED");
accept(Token.BY);
format.setLinesTerminatedBy(this.expr());
}
if (lexer.identifierEquals(FnvHash.Constants.FIELDS)) {
lexer.nextToken();
acceptIdentifier("TERMINATED");
accept(Token.BY);
format.setTerminatedBy(this.expr());
} else if (lexer.identifierEquals("FIELD")) {
throw new ParserException("syntax error, expect FIELDS, " + lexer.info());
}
if (lexer.token() == Token.ESCAPE || lexer.identifierEquals(FnvHash.Constants.ESCAPED)) {
lexer.nextToken();
accept(Token.BY);
format.setEscapedBy(this.expr());
}
if (lexer.identifierEquals(FnvHash.Constants.LINES)) {
lexer.nextToken();
acceptIdentifier("TERMINATED");
accept(Token.BY);
format.setLinesTerminatedBy(this.expr());
}
if (lexer.identifierEquals(FnvHash.Constants.COLLECTION)) {
lexer.nextToken();
acceptIdentifier("ITEMS");
acceptIdentifier("TERMINATED");
accept(Token.BY);
format.setCollectionItemsTerminatedBy(this.expr());
}
if (lexer.identifierEquals(FnvHash.Constants.MAP)) {
lexer.nextToken();
acceptIdentifier("KEYS");
acceptIdentifier("TERMINATED");
accept(Token.BY);
format.setMapKeysTerminatedBy(this.expr());
}
if (lexer.identifierEquals(FnvHash.Constants.SERDE)) {
lexer.nextToken();
format.setSerde(this.expr());
}
return format;
}
public SQLTop parseTop() {
if (lexer.token() == Token.TOP) {
SQLTop top = new SQLTop();
lexer.computeRowAndColumn(top);
lexer.nextToken();
boolean paren = false;
if (lexer.token() == Token.LPAREN) {
top.setParentheses(true);
paren = true;
lexer.nextToken();
}
if (lexer.token() == Token.LITERAL_INT) {
top.setExpr(lexer.integerValue().intValue());
lexer.nextToken();
} else {
top.setExpr(primary());
}
if (paren) {
accept(Token.RPAREN);
}
if (lexer.token() == Token.PERCENT) {
lexer.nextToken();
top.setPercent(true);
}
return top;
}
return null;
}
protected SQLStructExpr struct() {
SQLStructExpr structExpr = new SQLStructExpr();
accept(Token.LPAREN);
aliasedItems(structExpr.getItems(), structExpr);
if (lexer.token == Token.GTGT) {
lexer.token = Token.RPAREN;
} else {
accept(Token.RPAREN);
}
return structExpr;
}
}
| SQLExprParser |
java | elastic__elasticsearch | x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorNameActionTests.java | {
"start": 716,
"end": 1501
} | class ____ extends ESTestCase {
private RestUpdateConnectorNameAction action;
@Override
public void setUp() throws Exception {
super.setUp();
action = new RestUpdateConnectorNameAction();
}
public void testPrepareRequest_emptyPayload_badRequestError() {
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.PUT)
.withPath("/_connector/123/_name")
.build();
final ElasticsearchParseException e = expectThrows(
ElasticsearchParseException.class,
() -> action.prepareRequest(request, mock(NodeClient.class))
);
assertThat(e, hasToString(containsString("request body is required")));
}
}
| RestUpdateConnectorNameActionTests |
java | elastic__elasticsearch | x-pack/plugin/identity-provider/src/test/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolverTests.java | {
"start": 2402,
"end": 15239
} | class ____ extends ESTestCase {
private Client client;
private SecurityContext securityContext;
private UserPrivilegeResolver resolver;
private String app;
@Before
@SuppressWarnings("unchecked")
public void setupTest() {
client = mock(Client.class);
securityContext = new SecurityContext(Settings.EMPTY, new ThreadContext(Settings.EMPTY));
app = randomAlphaOfLengthBetween(3, 8);
final ApplicationActionsResolver actionsResolver = mock(ApplicationActionsResolver.class);
doAnswer(inv -> {
final Object[] args = inv.getArguments();
assertThat(args, arrayWithSize(2));
ActionListener<Set<String>> listener = (ActionListener<Set<String>>) args[args.length - 1];
listener.onResponse(Set.of("role:cluster:view", "role:cluster:admin", "role:cluster:operator", "role:cluster:monitor"));
return null;
}).when(actionsResolver).getActions(anyString(), any(ActionListener.class));
doAnswer(inv -> {
final Object[] args = inv.getArguments();
assertThat(args, arrayWithSize(3));
ActionListener<GetUserPrivilegesResponse> listener = (ActionListener<GetUserPrivilegesResponse>) args[args.length - 1];
RoleDescriptor.ApplicationResourcePrivileges appPriv1 = RoleDescriptor.ApplicationResourcePrivileges.builder()
.application(app)
.resources("resource1")
.privileges("role:extra1")
.build();
RoleDescriptor.ApplicationResourcePrivileges appPriv2 = RoleDescriptor.ApplicationResourcePrivileges.builder()
.application(app)
.resources("resource1")
.privileges("role:extra2", "role:extra3")
.build();
RoleDescriptor.ApplicationResourcePrivileges discardedAppPriv = RoleDescriptor.ApplicationResourcePrivileges.builder()
.application(randomAlphaOfLengthBetween(3, 8))
.resources("resource1")
.privileges("role:discarded")
.build();
GetUserPrivilegesResponse response = new GetUserPrivilegesResponse(
Set.of(),
Set.of(),
Set.of(),
Set.of(appPriv1, appPriv2, discardedAppPriv),
Set.of(),
Set.of(),
RemoteClusterPermissions.NONE
);
listener.onResponse(response);
return null;
}).when(client).execute(same(GetUserPrivilegesAction.INSTANCE), any(GetUserPrivilegesRequest.class), any(ActionListener.class));
resolver = new UserPrivilegeResolver(client, securityContext, actionsResolver);
}
public void testResolveZeroAccess() throws Exception {
final String username = randomAlphaOfLengthBetween(4, 12);
setupUser(username, () -> {
setupHasPrivileges(username, app);
final PlainActionFuture<UserPrivilegeResolver.UserPrivileges> future = new PlainActionFuture<>();
final Function<String, Set<String>> roleMapping = Map.of(
"role:cluster:view",
Set.of("viewer"),
"role:cluster:admin",
Set.of("admin")
)::get;
resolver.resolve(service(app, "cluster:" + randomLong(), roleMapping), future);
final UserPrivilegeResolver.UserPrivileges privileges;
try {
privileges = future.get();
} catch (Exception e) {
throw new RuntimeException(e);
}
assertThat(privileges.principal, equalTo(username));
assertThat(privileges.hasAccess, equalTo(false));
assertThat(privileges.roles, emptyIterable());
});
}
public void testResolveSsoWithNoRoleAccess() throws Exception {
final String username = randomAlphaOfLengthBetween(4, 12);
final String resource = "cluster:" + MessageDigests.toHexString(randomByteArrayOfLength(16));
final String viewerAction = "role:cluster:view";
final String adminAction = "role:cluster:admin";
setupUser(username, () -> {
setupHasPrivileges(username, app, access(resource, viewerAction, false), access(resource, adminAction, false));
final PlainActionFuture<UserPrivilegeResolver.UserPrivileges> future = new PlainActionFuture<>();
final Function<String, Set<String>> roleMapping = Map.of(viewerAction, Set.of("viewer"), adminAction, Set.of("admin"))::get;
resolver.resolve(service(app, resource, roleMapping), future);
final UserPrivilegeResolver.UserPrivileges privileges;
try {
privileges = future.get();
} catch (Exception e) {
throw new RuntimeException(e);
}
assertThat(privileges.principal, equalTo(username));
assertThat(privileges.hasAccess, equalTo(false));
assertThat(privileges.roles, emptyIterable());
});
}
public void testResolveSsoWithSingleRole() throws Exception {
final String username = randomAlphaOfLengthBetween(4, 12);
final String resource = "cluster:" + MessageDigests.toHexString(randomByteArrayOfLength(16));
final String viewerAction = "role:cluster:view";
final String adminAction = "role:cluster:admin";
setupUser(username, () -> {
setupHasPrivileges(username, app, access(resource, viewerAction, true), access(resource, adminAction, false));
final PlainActionFuture<UserPrivilegeResolver.UserPrivileges> future = new PlainActionFuture<>();
final Function<String, Set<String>> roleMapping = Map.of(viewerAction, Set.of("viewer"), adminAction, Set.of("admin"))::get;
resolver.resolve(service(app, resource, roleMapping), future);
final UserPrivilegeResolver.UserPrivileges privileges;
try {
privileges = future.get();
} catch (Exception e) {
throw new RuntimeException(e);
}
assertThat(privileges.principal, equalTo(username));
assertThat(privileges.hasAccess, equalTo(true));
assertThat(privileges.roles, containsInAnyOrder("viewer"));
});
}
public void testResolveSsoWithMultipleRoles() throws Exception {
final String username = randomAlphaOfLengthBetween(4, 12);
final String resource = "cluster:" + MessageDigests.toHexString(randomByteArrayOfLength(16));
final String viewerAction = "role:cluster:view";
final String adminAction = "role:cluster:admin";
final String operatorAction = "role:cluster:operator";
final String monitorAction = "role:cluster:monitor";
setupUser(username, () -> {
setupHasPrivileges(
username,
app,
access(resource, viewerAction, false),
access(resource, adminAction, false),
access(resource, operatorAction, true),
access(resource, monitorAction, true)
);
final PlainActionFuture<UserPrivilegeResolver.UserPrivileges> future = new PlainActionFuture<>();
Function<String, Set<String>> roleMapping = action -> {
return switch (action) {
case viewerAction -> Set.of("viewer");
case adminAction -> Set.of("admin");
case operatorAction -> Set.of("operator");
case monitorAction -> Set.of("monitor");
default -> Set.of();
};
};
resolver.resolve(service(app, resource, roleMapping), future);
final UserPrivilegeResolver.UserPrivileges privileges;
try {
privileges = future.get();
} catch (Exception e) {
throw new RuntimeException(e);
}
assertThat(privileges.principal, equalTo(username));
assertThat(privileges.hasAccess, equalTo(true));
assertThat(privileges.roles, containsInAnyOrder("operator", "monitor"));
});
}
public void testResolveSsoWithActionDefinedInUserPrivileges() throws Exception {
final String username = randomAlphaOfLengthBetween(4, 12);
final String resource = "cluster:" + MessageDigests.toHexString(randomByteArrayOfLength(16));
final String actionInUserPrivs = "role:extra2";
final String adminAction = "role:cluster:admin";
setupUser(username, () -> {
setupHasPrivileges(username, app, access(resource, actionInUserPrivs, true), access(resource, adminAction, false));
final PlainActionFuture<UserPrivilegeResolver.UserPrivileges> future = new PlainActionFuture<>();
final Function<String, Set<String>> roleMapping = Map.of(
actionInUserPrivs,
Set.of("extra2"),
adminAction,
Set.of("admin")
)::get;
resolver.resolve(service(app, resource, roleMapping), future);
final UserPrivilegeResolver.UserPrivileges privileges;
try {
privileges = future.get();
} catch (Exception e) {
throw new RuntimeException(e);
}
assertThat(privileges.principal, equalTo(username));
assertThat(privileges.hasAccess, equalTo(true));
assertThat(privileges.roles, containsInAnyOrder("extra2"));
});
}
private ServiceProviderPrivileges service(String appName, String resource, Function<String, Set<String>> roleMapping) {
return new ServiceProviderPrivileges(appName, resource, roleMapping);
}
@SafeVarargs
@SuppressWarnings("unchecked")
private HasPrivilegesResponse setupHasPrivileges(
String username,
String appName,
Tuple<String, Tuple<String, Boolean>>... resourceActionAccess
) {
final boolean isCompleteMatch = randomBoolean();
final Map<String, Map<String, Boolean>> resourcePrivilegeMap = Maps.newMapWithExpectedSize(resourceActionAccess.length);
for (Tuple<String, Tuple<String, Boolean>> t : resourceActionAccess) {
final String resource = t.v1();
final String action = t.v2().v1();
final Boolean access = t.v2().v2();
resourcePrivilegeMap.computeIfAbsent(resource, ignore -> new HashMap<>()).put(action, access);
}
final Collection<ResourcePrivileges> privileges = resourcePrivilegeMap.entrySet()
.stream()
.map(e -> ResourcePrivileges.builder(e.getKey()).addPrivileges(e.getValue()).build())
.collect(Collectors.toList());
final Map<String, Collection<ResourcePrivileges>> appPrivs = Map.of(appName, privileges);
final HasPrivilegesResponse response = new HasPrivilegesResponse(username, isCompleteMatch, Map.of(), Set.of(), appPrivs);
doAnswer(inv -> {
final Object[] args = inv.getArguments();
assertThat(args.length, equalTo(3));
ActionListener<HasPrivilegesResponse> listener = (ActionListener<HasPrivilegesResponse>) args[args.length - 1];
HasPrivilegesRequest request = (HasPrivilegesRequest) args[1];
Set<String> gotPriviliges = Arrays.stream(request.applicationPrivileges())
.flatMap(appPriv -> Arrays.stream(appPriv.getPrivileges()))
.collect(Collectors.toUnmodifiableSet());
Set<String> expectedPrivileges = Set.of(
"role:cluster:view",
"role:cluster:admin",
"role:cluster:operator",
"role:cluster:monitor",
"role:extra1",
"role:extra2",
"role:extra3"
);
assertEquals(expectedPrivileges, gotPriviliges);
listener.onResponse(response);
return null;
}).when(client).execute(same(HasPrivilegesAction.INSTANCE), any(HasPrivilegesRequest.class), any(ActionListener.class));
return response;
}
private Tuple<String, Tuple<String, Boolean>> access(String resource, String action, boolean access) {
return new Tuple<>(resource, new Tuple<>(action, access));
}
private void setupUser(String principal, Runnable runnable) {
final Authentication authentication = AuthenticationTestHelper.builder()
.user(new User(principal, randomAlphaOfLengthBetween(6, 12)))
.build();
securityContext.executeWithAuthentication(authentication, ignored -> {
runnable.run();
return null;
});
}
}
| UserPrivilegeResolverTests |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/params/provider/EnumArgumentsProviderTests.java | {
"start": 6733,
"end": 7880
} | enum ____ {
}
private <E extends Enum<E>> Stream<Object[]> provideArguments(Class<E> enumClass, String... names) {
return provideArguments(enumClass, Mode.INCLUDE, names);
}
private <E extends Enum<E>> Stream<Object[]> provideArguments(Class<E> enumClass, Mode mode, String... names) {
return provideArguments(enumClass, "", "", mode, names);
}
private <E extends Enum<E>> Stream<Object[]> provideArguments(Class<E> enumClass, String from, String to, Mode mode,
String... names) {
var annotation = mock(EnumSource.class);
when(annotation.value()).thenAnswer(__ -> enumClass);
when(annotation.from()).thenReturn(from);
when(annotation.to()).thenReturn(to);
when(annotation.mode()).thenReturn(mode);
when(annotation.names()).thenReturn(names);
when(annotation.toString()).thenReturn(
"@EnumSource(value=%s.class, from=%s, to=%s, mode=%s, names=%s)".formatted(enumClass.getSimpleName(), from,
to, mode, Arrays.toString(names)));
var provider = new EnumArgumentsProvider();
provider.accept(annotation);
return provider.provideArguments(parameters, extensionContext).map(Arguments::get);
}
}
| EnumWithNoConstant |
java | apache__camel | components/camel-olingo2/camel-olingo2-api/src/main/java/org/apache/camel/component/olingo2/api/impl/UriInfoWithType.java | {
"start": 2077,
"end": 9600
} | class ____ implements UriInfo {
private final UriInfo uriInfo;
private final UriType uriType;
public UriInfoWithType(UriInfo uriInfo, String resourcePath) throws ODataApplicationException, EdmException {
this.uriInfo = uriInfo;
// determine Uri Type
UriType uriType;
final List<NavigationSegment> segments = uriInfo.getNavigationSegments();
final boolean isLinks = uriInfo.isLinks();
if (segments.isEmpty() && uriInfo.getTargetType() == null) {
uriType = UriType.URI0;
if (resourcePath.endsWith("$metadata")) {
uriType = UriType.URI8;
} else if (resourcePath.endsWith("$batch")) {
uriType = UriType.URI9;
}
} else {
final EdmEntitySet targetEntitySet = uriInfo.getTargetEntitySet();
if (targetEntitySet != null) {
final boolean isCount = uriInfo.isCount();
final List<KeyPredicate> keyPredicates = uriInfo.getKeyPredicates();
if (keyPredicates.isEmpty()) {
if (!isCount) {
uriType = UriType.URI1;
} else {
uriType = UriType.URI15;
}
} else {
uriType = UriType.URI2;
if (isCount) {
uriType = UriType.URI16;
} else if (uriInfo.isValue()) {
uriType = UriType.URI17;
}
final EdmTypeKind targetKind = uriInfo.getTargetType().getKind();
switch (targetKind) {
case SIMPLE:
if (segments.isEmpty()) {
uriType = UriType.URI5;
} else {
uriType = UriType.URI4;
}
break;
case COMPLEX:
uriType = UriType.URI3;
break;
case ENTITY:
final List<EdmProperty> propertyPath = uriInfo.getPropertyPath();
if (!segments.isEmpty() || !propertyPath.isEmpty()) {
boolean many = false;
if (!propertyPath.isEmpty()) {
final EdmProperty lastProperty = propertyPath.get(propertyPath.size() - 1);
many = lastProperty.getMultiplicity() == EdmMultiplicity.MANY;
} else {
final NavigationSegment lastSegment = segments.get(segments.size() - 1);
many = lastSegment.getKeyPredicates().isEmpty()
&& lastSegment.getNavigationProperty().getMultiplicity() == EdmMultiplicity.MANY;
}
if (isCount) {
if (many) {
uriType = isLinks ? UriType.URI50B : UriType.URI15;
} else {
uriType = UriType.URI50A;
}
} else {
if (many) {
uriType = isLinks ? UriType.URI7B : UriType.URI6B;
} else {
uriType = isLinks ? UriType.URI7A : UriType.URI6A;
}
}
}
break;
default:
throw new ODataApplicationException("Unexpected property type " + targetKind, Locale.ENGLISH);
}
}
} else {
final EdmFunctionImport functionImport = uriInfo.getFunctionImport();
final EdmType targetType = uriInfo.getTargetType();
final boolean isCollection = functionImport.getReturnType().getMultiplicity() == EdmMultiplicity.MANY;
switch (targetType.getKind()) {
case SIMPLE:
uriType = isCollection ? UriType.URI13 : UriType.URI14;
break;
case COMPLEX:
uriType = isCollection ? UriType.URI11 : UriType.URI12;
break;
case ENTITY:
uriType = UriType.URI10;
break;
default:
throw new ODataApplicationException("Invalid function return type " + targetType, Locale.ENGLISH);
}
}
}
this.uriType = uriType;
}
public UriType getUriType() {
return uriType;
}
@Override
public EdmEntityContainer getEntityContainer() {
return uriInfo.getEntityContainer();
}
@Override
public EdmEntitySet getStartEntitySet() {
return uriInfo.getStartEntitySet();
}
@Override
public EdmEntitySet getTargetEntitySet() {
return uriInfo.getTargetEntitySet();
}
@Override
public EdmFunctionImport getFunctionImport() {
return uriInfo.getFunctionImport();
}
@Override
public EdmType getTargetType() {
return uriInfo.getTargetType();
}
@Override
public List<KeyPredicate> getKeyPredicates() {
return uriInfo.getKeyPredicates();
}
@Override
public List<KeyPredicate> getTargetKeyPredicates() {
return uriInfo.getTargetKeyPredicates();
}
@Override
public List<NavigationSegment> getNavigationSegments() {
return uriInfo.getNavigationSegments();
}
@Override
public List<EdmProperty> getPropertyPath() {
return uriInfo.getPropertyPath();
}
@Override
public boolean isCount() {
return uriInfo.isCount();
}
@Override
public boolean isValue() {
return uriInfo.isValue();
}
@Override
public boolean isLinks() {
return uriInfo.isLinks();
}
@Override
public String getFormat() {
return uriInfo.getFormat();
}
@Override
public FilterExpression getFilter() {
return uriInfo.getFilter();
}
@Override
public InlineCount getInlineCount() {
return uriInfo.getInlineCount();
}
@Override
public OrderByExpression getOrderBy() {
return uriInfo.getOrderBy();
}
@Override
public String getSkipToken() {
return uriInfo.getSkipToken();
}
@Override
public Integer getSkip() {
return uriInfo.getSkip();
}
@Override
public Integer getTop() {
return uriInfo.getTop();
}
@Override
public List<ArrayList<NavigationPropertySegment>> getExpand() {
return uriInfo.getExpand();
}
@Override
public List<SelectItem> getSelect() {
return uriInfo.getSelect();
}
@Override
public Map<String, EdmLiteral> getFunctionImportParameters() {
return uriInfo.getFunctionImportParameters();
}
@Override
public Map<String, String> getCustomQueryOptions() {
return uriInfo.getCustomQueryOptions();
}
}
| UriInfoWithType |
java | spring-projects__spring-boot | module/spring-boot-reactor-netty/src/main/java/org/springframework/boot/reactor/netty/NettyWebServer.java | {
"start": 2167,
"end": 2365
} | class ____ be created using the {@link NettyReactiveWebServerFactory} and not
* directly.
*
* @author Brian Clozel
* @author Madhura Bhave
* @author Andy Wilkinson
* @since 4.0.0
*/
public | should |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/runtime/src/main/java/io/quarkus/resteasy/reactive/server/runtime/ResteasyReactiveSecurityContext.java | {
"start": 360,
"end": 1576
} | class ____ implements SecurityContext {
private final RoutingContext routingContext;
public ResteasyReactiveSecurityContext(RoutingContext routingContext) {
this.routingContext = routingContext;
}
@Override
public Principal getUserPrincipal() {
QuarkusHttpUser user = (QuarkusHttpUser) routingContext.user();
if (user == null || user.getSecurityIdentity().isAnonymous()) {
return null;
}
return user.getSecurityIdentity().getPrincipal();
}
@Override
public boolean isUserInRole(String role) {
SecurityIdentity user = CurrentIdentityAssociation.current();
if (role.equals("**")) {
return !user.isAnonymous();
}
return user.hasRole(role);
}
@Override
public boolean isSecure() {
return routingContext.request().isSSL();
}
@Override
public String getAuthenticationScheme() {
String authorizationValue = routingContext.request().getHeader("Authorization");
if (authorizationValue == null) {
return null;
} else {
return authorizationValue.split(" ")[0].trim();
}
}
}
| ResteasyReactiveSecurityContext |
java | spring-projects__spring-boot | module/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/context/properties/ConfigurationPropertiesReportEndpointTests.java | {
"start": 24786,
"end": 24882
} | class ____ {
}
@ConfigurationProperties("boolean")
public static | BooleanPropertiesConfiguration |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/type/EnumSetTest.java | {
"start": 2196,
"end": 7679
} | class ____ {
private BasicType<Set<MyEnum>> enumSetType;
@BeforeEach
public void startUp(SessionFactoryScope scope) {
scope.inTransaction( em -> {
enumSetType = em.getTypeConfiguration().getBasicTypeForGenericJavaType( Set.class, MyEnum.class );
em.persist( new TableWithEnumSet( 1L, new HashSet<>() ) );
em.persist( new TableWithEnumSet( 2L, EnumSet.of( MyEnum.VALUE1, MyEnum.VALUE2 ) ) );
em.persist( new TableWithEnumSet( 3L, null ) );
//noinspection deprecation,unchecked
em.createNamedQuery( "TableWithEnumSet.Native.insert" )
.setParameter( "id", 4L )
.setParameter( "data", EnumSet.of( MyEnum.VALUE2, MyEnum.VALUE1, MyEnum.VALUE3 ), enumSetType )
.executeUpdate();
//noinspection deprecation,unchecked
em.createNativeQuery( "INSERT INTO table_with_enum_set(id, the_set) VALUES ( :id , :data )" )
.setParameter( "id", 5L )
.setParameter( "data", EnumSet.of( MyEnum.VALUE2, MyEnum.VALUE1, MyEnum.VALUE3 ), enumSetType )
.executeUpdate();
} );
}
@AfterEach
public void tearDown(SessionFactoryScope scope) {
scope.dropData();
}
@Test
public void testById(SessionFactoryScope scope) {
scope.inSession( em -> {
TableWithEnumSet tableRecord;
tableRecord = em.find( TableWithEnumSet.class, 1L );
assertThat( tableRecord.getTheSet() ).isEmpty();
tableRecord = em.find( TableWithEnumSet.class, 2L );
assertThat( tableRecord.getTheSet() ).isEqualTo( EnumSet.of( MyEnum.VALUE1, MyEnum.VALUE2 ) );
tableRecord = em.find( TableWithEnumSet.class, 3L );
assertNull( tableRecord.getTheSet() );
} );
}
@Test
public void testQueryById(SessionFactoryScope scope) {
scope.inSession( em -> {
TypedQuery<TableWithEnumSet> tq = em.createNamedQuery( "TableWithEnumSet.JPQL.getById", TableWithEnumSet.class );
tq.setParameter( "id", 2L );
TableWithEnumSet tableRecord = tq.getSingleResult();
assertThat( tableRecord.getTheSet() ).isEqualTo( EnumSet.of( MyEnum.VALUE1, MyEnum.VALUE2 ) );
} );
}
@Test
@SkipForDialect(dialectClass = InformixDialect.class,
reason = "The statement failed because binary large objects are not allowed in the Union, Intersect, or Minus ")
@SkipForDialect(dialectClass = MariaDBDialect.class, majorVersion = 10, minorVersion = 6,
reason = "Bug in MariaDB https://jira.mariadb.org/browse/MDEV-21530")
public void testQuery(SessionFactoryScope scope) {
scope.inSession( em -> {
TypedQuery<TableWithEnumSet> tq = em.createNamedQuery( "TableWithEnumSet.JPQL.getByData", TableWithEnumSet.class );
tq.setParameter( "data", new HashSet<>() );
TableWithEnumSet tableRecord = tq.getSingleResult();
assertEquals( 1L, tableRecord.getId() );
} );
}
@Test
public void testNativeQueryById(SessionFactoryScope scope) {
scope.inSession( em -> {
TypedQuery<TableWithEnumSet> tq = em.createNamedQuery( "TableWithEnumSet.Native.getById", TableWithEnumSet.class );
tq.setParameter( "id", 2L );
TableWithEnumSet tableRecord = tq.getSingleResult();
assertThat( tableRecord.getTheSet() ).isEqualTo( EnumSet.of( MyEnum.VALUE1, MyEnum.VALUE2 ) );
} );
}
@Test
@SkipForDialect(dialectClass = HSQLDialect.class, reason = "HSQL does not like plain parameters in the distinct from predicate")
@SkipForDialect(dialectClass = OracleDialect.class, reason = "Oracle requires a special function to compare XML")
@SkipForDialect(dialectClass = DB2Dialect.class, reason = "DB2 requires a special function to compare XML")
@SkipForDialect(dialectClass = SQLServerDialect.class, reason = "SQL Server requires a special function to compare XML")
@SkipForDialect(dialectClass = SybaseASEDialect.class, reason = "Sybase ASE requires a special function to compare XML")
@SkipForDialect(dialectClass = HANADialect.class, reason = "HANA requires a special function to compare LOBs")
@SkipForDialect(dialectClass = MySQLDialect.class, matchSubTypes = true, reason = "MySQL supports distinct from through a special operator")
@SkipForDialect(dialectClass = InformixDialect.class, reason = "Informix can't compare LOBs")
public void testNativeQuery(SessionFactoryScope scope) {
scope.inSession( em -> {
final Dialect dialect = em.getDialect();
final String op = dialect.supportsDistinctFromPredicate() ? "IS NOT DISTINCT FROM" : "=";
final String param = enumSetType.getJdbcType().wrapWriteExpression( ":data", null, dialect );
Query<TableWithEnumSet> tq = em.createNativeQuery(
"SELECT * FROM table_with_enum_set t WHERE the_set " + op + " " + param,
TableWithEnumSet.class
);
tq.setParameter( "data", EnumSet.of( MyEnum.VALUE1, MyEnum.VALUE2 ), enumSetType );
TableWithEnumSet tableRecord = tq.getSingleResult();
assertEquals( 2L, tableRecord.getId() );
} );
}
@Entity( name = "TableWithEnumSet" )
@Table( name = "table_with_enum_set" )
@NamedQueries( {
@NamedQuery( name = "TableWithEnumSet.JPQL.getById",
query = "SELECT t FROM TableWithEnumSet t WHERE id = :id" ),
@NamedQuery( name = "TableWithEnumSet.JPQL.getByData",
query = "SELECT t FROM TableWithEnumSet t WHERE theSet IS NOT DISTINCT FROM :data" ), } )
@NamedNativeQueries( {
@NamedNativeQuery( name = "TableWithEnumSet.Native.getById",
query = "SELECT * FROM table_with_enum_set t WHERE id = :id",
resultClass = TableWithEnumSet.class ),
@NamedNativeQuery( name = "TableWithEnumSet.Native.insert",
query = "INSERT INTO table_with_enum_set(id, the_set) VALUES ( :id , :data )" )
} )
public static | EnumSetTest |
java | apache__flink | flink-python/src/test/java/org/apache/flink/table/utils/TestingDescriptors.java | {
"start": 2187,
"end": 4151
} | class ____ extends TimestampExtractor {
private final String field;
public CustomExtractor(String field) {
this.field = field;
}
public CustomExtractor() {
this("ts");
}
@Override
public String[] getArgumentFields() {
return new String[] {field};
}
@Override
public void validateArgumentFields(TypeInformation<?>[] argumentFieldTypes) {
if (argumentFieldTypes[0] != Types.SQL_TIMESTAMP) {
throw new ValidationException(
String.format(
"Field 'ts' must be of type Timestamp " + "but is of type %s.",
argumentFieldTypes[0]));
}
}
@Override
public Expression getExpression(ResolvedFieldReference[] fieldAccesses) {
ResolvedFieldReference fieldAccess = fieldAccesses[0];
checkState(fieldAccess.resultType() == Types.SQL_TIMESTAMP);
FieldReferenceExpression fieldReferenceExpr =
new FieldReferenceExpression(
fieldAccess.name(),
TypeConversions.fromLegacyInfoToDataType(fieldAccess.resultType()),
0,
fieldAccess.fieldIndex());
return ApiExpressionUtils.unresolvedCall(
BuiltInFunctionDefinitions.CAST,
fieldReferenceExpr,
ApiExpressionUtils.typeLiteral(DataTypes.BIGINT()));
}
@Override
public boolean equals(Object o) {
if (o instanceof CustomExtractor) {
return field.equals(((CustomExtractor) o).field);
} else {
return false;
}
}
@Override
public int hashCode() {
return Objects.hash(field);
}
}
}
| CustomExtractor |
java | apache__camel | components/camel-platform-http-vertx/src/test/java/org/apache/camel/component/platform/http/vertx/model/Pet.java | {
"start": 1301,
"end": 2361
} | class ____ {
@XmlElement
private Long id;
private String name;
private Category category;
private List<String> photoUrls;
private List<Tag> tags;
private Status status;
public Long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Category getCategory() {
return category;
}
public void setCategory(Category category) {
this.category = category;
}
public List<Tag> getTags() {
return tags;
}
public void setTags(List<Tag> tags) {
this.tags = tags;
}
public List<String> getPhotoUrls() {
return photoUrls;
}
public void setPhotoUrls(List<String> photoUrls) {
this.photoUrls = photoUrls;
}
public Status getStatus() {
return status;
}
public void setStatus(Status status) {
this.status = status;
}
public | Pet |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/load/ImageHeaderParser.java | {
"start": 684,
"end": 2760
} | enum ____ {
GIF(true),
JPEG(false),
RAW(false),
/** PNG type with alpha. */
PNG_A(true),
/** PNG type without alpha. */
PNG(false),
/** WebP type with alpha. */
WEBP_A(true),
/** WebP type without alpha. */
WEBP(false),
/** All animated webps. */
ANIMATED_WEBP(true),
/** Avif type (may contain alpha). */
AVIF(true),
/** Animated Avif type (may contain alpha). */
ANIMATED_AVIF(true),
/** Unrecognized type. */
UNKNOWN(false);
private final boolean hasAlpha;
ImageType(boolean hasAlpha) {
this.hasAlpha = hasAlpha;
}
public boolean hasAlpha() {
return hasAlpha;
}
public boolean isWebp() {
switch (this) {
case WEBP:
case WEBP_A:
case ANIMATED_WEBP:
return true;
default:
return false;
}
}
}
@NonNull
ImageType getType(@NonNull InputStream is) throws IOException;
@NonNull
ImageType getType(@NonNull ByteBuffer byteBuffer) throws IOException;
/**
* Parse the orientation from the image header. If it doesn't handle this image type (or this is
* not an image) it will return a default value rather than throwing an exception.
*
* @return The exif orientation if present or -1 if the header couldn't be parsed or doesn't
* contain an orientation
*/
int getOrientation(@NonNull InputStream is, @NonNull ArrayPool byteArrayPool) throws IOException;
int getOrientation(@NonNull ByteBuffer byteBuffer, @NonNull ArrayPool byteArrayPool)
throws IOException;
/**
* Returns whether the {@link InputStream} has associated multi-picture-format (MPF) data. Only
* JPEGs have MPF data.
*/
boolean hasJpegMpf(@NonNull InputStream is, @NonNull ArrayPool byteArrayPool) throws IOException;
/**
* Returns whether the {@link ByteBuffer} has associated multi-picture-format (MPF) data. Only
* JPEGs have MPF data.
*/
boolean hasJpegMpf(@NonNull ByteBuffer byteBuffer, @NonNull ArrayPool byteArrayPool)
throws IOException;
}
| ImageType |
java | redisson__redisson | redisson/src/main/java/org/redisson/connection/ClusterConnectionManager.java | {
"start": 1754,
"end": 49211
} | class ____ extends MasterSlaveConnectionManager {
private final Logger log = LoggerFactory.getLogger(getClass());
private final Map<Integer, ClusterPartition> lastPartitions = new ConcurrentHashMap<>();
private final Map<RedisURI, ClusterPartition> lastUri2Partition = new ConcurrentHashMap<>();
private volatile Timeout monitorFuture;
private volatile RedisURI lastClusterNode;
private RedisStrictCommand<List<ClusterNodeInfo>> clusterNodesCommand;
private String configEndpointHostName;
private String configEndpointPassword;
private final AtomicReferenceArray<MasterSlaveEntry> slot2entry = new AtomicReferenceArray<>(MAX_SLOT);
private final Map<RedisClient, MasterSlaveEntry> client2entry = new ConcurrentHashMap<>();
private ClusterServersConfig cfg;
ClusterConnectionManager(ClusterServersConfig cfg, Config configCopy) {
super(cfg, configCopy);
this.serviceManager.setNatMapper(cfg.getNatMapper());
}
@Override
protected MasterSlaveServersConfig create(BaseMasterSlaveServersConfig<?> cfg) {
this.cfg = (ClusterServersConfig) cfg;
MasterSlaveServersConfig res = super.create(cfg);
res.setDatabase(((ClusterServersConfig) cfg).getDatabase());
return res;
}
@Override
public void doConnect(Function<RedisURI, String> hostnameMapper) {
if (cfg.getScanInterval() <= 0) {
throw new IllegalArgumentException("scanInterval setting can't be 0 or less");
}
if (cfg.getNodeAddresses().isEmpty()) {
throw new IllegalArgumentException("At least one cluster node should be defined!");
}
Throwable lastException = null;
List<String> failedMasters = new ArrayList<>();
boolean skipCommandsDetection = false;
for (String address : cfg.getNodeAddresses()) {
RedisURI addr = new RedisURI(address);
CompletionStage<RedisConnection> connectionFuture = connectToNode(cfg, addr, addr.getHost());
try {
RedisConnection connection = connectionFuture.toCompletableFuture()
.get(config.getConnectTimeout(), TimeUnit.MILLISECONDS);
if (cfg.getNodeAddresses().size() == 1 && !addr.isIP()) {
configEndpointHostName = addr.getHost();
configEndpointPassword = addr.getPassword();
}
clusterNodesCommand = new RedisStrictCommand<List<ClusterNodeInfo>>("CLUSTER", "NODES",
new ObjectDecoder(new ClusterNodesDecoder(addr.getScheme())));
if (!skipCommandsDetection) {
subscribeService.checkShardingSupport(cfg.getShardedSubscriptionMode(), connection);
subscribeService.checkPatternSupport(connection);
skipCommandsDetection = true;
}
List<ClusterNodeInfo> nodes = connection.sync(clusterNodesCommand);
StringBuilder nodesValue = new StringBuilder();
for (ClusterNodeInfo clusterNodeInfo : nodes) {
nodesValue.append(clusterNodeInfo.getNodeInfo()).append("\n");
}
log.info("Redis cluster nodes configuration got from {}:\n{}", connection.getRedisClient().getAddr(), nodesValue);
lastClusterNode = addr;
CompletableFuture<Collection<ClusterPartition>> partitionsFuture = parsePartitions(nodes);
Collection<ClusterPartition> partitions;
try {
partitions = partitionsFuture.join();
} catch (CompletionException e) {
lastException = e.getCause();
break;
}
List<CompletableFuture<Void>> masterFutures = new ArrayList<>();
for (ClusterPartition partition : partitions) {
if (partition.isMasterFail()) {
failedMasters.add(partition.getMasterAddress().toString());
continue;
}
if (partition.getMasterAddress() == null) {
throw new IllegalStateException("Master node: " + partition.getNodeId() + " doesn't have an address.");
}
CompletionStage<Void> masterFuture = addMasterEntry(partition, cfg);
masterFutures.add(masterFuture.toCompletableFuture());
}
CompletableFuture<Void> masterFuture = CompletableFuture.allOf(masterFutures.toArray(new CompletableFuture[0]));
try {
masterFuture.join();
} catch (CompletionException e) {
lastException = e.getCause();
}
break;
} catch (Exception e) {
if (e instanceof CompletionException) {
e = (Exception) e.getCause();
}
lastException = e;
if (e instanceof TimeoutException) {
log.warn("Connection timeout to {}", address);
}
if (e.getMessage() != null) {
log.warn(e.getMessage());
}
}
}
if (lastPartitions.isEmpty()) {
internalShutdown();
if (failedMasters.isEmpty()) {
throw new RedisConnectionException("Can't connect to servers!", lastException);
} else {
throw new RedisConnectionException("Can't connect to servers! Failed masters according to cluster status: " + failedMasters, lastException);
}
}
if (cfg.isCheckSlotsCoverage() && lastPartitions.size() != MAX_SLOT) {
internalShutdown();
if (failedMasters.isEmpty()) {
throw new RedisConnectionException("Not all slots covered! Only " + lastPartitions.size() + " slots are available. Set checkSlotsCoverage = false to avoid this check.", lastException);
} else {
throw new RedisConnectionException("Not all slots covered! Only " + lastPartitions.size() + " slots are available. Set checkSlotsCoverage = false to avoid this check. Failed masters according to cluster status: " + failedMasters, lastException);
}
}
scheduleClusterChangeCheck(cfg);
}
@Override
public Collection<MasterSlaveEntry> getEntrySet() {
lazyConnect();
return client2entry.values();
}
@Override
public MasterSlaveEntry getEntry(RedisURI addr) {
lazyConnect();
for (MasterSlaveEntry entry : client2entry.values()) {
if (addr.equals(entry.getClient().getAddr())) {
return entry;
}
if (entry.hasSlave(addr)) {
return entry;
}
}
return null;
}
@Override
public MasterSlaveEntry getEntry(RedisClient redisClient) {
lazyConnect();
MasterSlaveEntry entry = client2entry.get(redisClient);
if (entry != null) {
return entry;
}
for (MasterSlaveEntry mentry : client2entry.values()) {
if (mentry.hasSlave(redisClient)) {
return mentry;
}
}
return null;
}
@Override
public MasterSlaveEntry getEntry(InetSocketAddress address) {
lazyConnect();
for (MasterSlaveEntry entry : client2entry.values()) {
InetSocketAddress addr = entry.getClient().getAddr();
if (addr.getAddress().equals(address.getAddress()) && addr.getPort() == address.getPort()) {
return entry;
}
if (entry.hasSlave(address)) {
return entry;
}
}
return null;
}
@Override
protected CompletableFuture<RedisClient> changeMaster(int slot, RedisURI address) {
MasterSlaveEntry entry = getEntry(slot);
RedisClient oldClient = entry.getClient();
CompletableFuture<RedisClient> future = super.changeMaster(slot, address);
return future.thenApply(res -> {
client2entry.remove(oldClient);
client2entry.put(entry.getClient(), entry);
return res;
});
}
@Override
public MasterSlaveEntry getEntry(int slot) {
lazyConnect();
return slot2entry.get(slot);
}
private void addEntry(Integer slot, MasterSlaveEntry entry) {
MasterSlaveEntry oldEntry = slot2entry.getAndSet(slot, entry);
if (oldEntry != entry) {
entry.incReference();
shutdownEntry(oldEntry, entry);
}
client2entry.put(entry.getClient(), entry);
}
private void removeEntry(Integer slot) {
MasterSlaveEntry entry = slot2entry.getAndSet(slot, null);
shutdownEntry(entry, null);
}
private void removeEntry(Integer slot, MasterSlaveEntry entry) {
if (slot2entry.compareAndSet(slot, entry, null)) {
shutdownEntry(entry, null);
}
}
private void shutdownEntry(MasterSlaveEntry entry, MasterSlaveEntry newEntry) {
if (entry != null && entry.decReference() == 0) {
entry.getAllEntries().forEach(e -> {
RedisURI uri = new RedisURI(e.getClient().getConfig().getAddress().getScheme(),
e.getClient().getAddr().getAddress().getHostAddress(),
e.getClient().getAddr().getPort());
disconnectNode(uri);
e.nodeDown();
});
entry.masterDown();
entry.shutdownAsync();
entry.setReplacedBy(newEntry);
subscribeService.remove(entry);
RedisURI uri = new RedisURI(entry.getClient().getConfig().getAddress().getScheme(),
entry.getClient().getAddr().getAddress().getHostAddress(),
entry.getClient().getAddr().getPort());
disconnectNode(uri);
client2entry.remove(entry.getClient());
String slaves = entry.getAllEntries().stream()
.filter(e -> !e.getClient().getAddr().equals(entry.getClient().getAddr()))
.map(e -> e.getClient().toString())
.collect(Collectors.joining(","));
log.info("{} master and related slaves: {} removed", entry.getClient().getAddr(), slaves);
}
}
@Override
protected RedisClientConfig createRedisConfig(NodeType type, RedisURI address, int timeout, int commandTimeout, String sslHostname) {
RedisClientConfig result = super.createRedisConfig(type, address, timeout, commandTimeout, sslHostname);
result.setReadOnly(type == NodeType.SLAVE && config.getReadMode() != ReadMode.MASTER);
return result;
}
private CompletionStage<Void> addMasterEntry(ClusterPartition partition, ClusterServersConfig cfg) {
if (partition.isMasterFail()) {
RedisException e = new RedisException("Failed to add master: " +
partition.getMasterAddress() + " for slot ranges: " +
partition.getSlotRanges() + ". Reason - server has FAIL flag");
if (partition.getSlotsAmount() == 0) {
e = new RedisException("Failed to add master: " +
partition.getMasterAddress() + ". Reason - server has FAIL flag");
}
CompletableFuture<Void> result = new CompletableFuture<>();
result.completeExceptionally(e);
return result;
}
CompletionStage<RedisConnection> connectionFuture = connectToNode(cfg, partition.getMasterAddress(), configEndpointHostName);
return connectionFuture.thenCompose(connection -> {
MasterSlaveServersConfig config = create(cfg);
config.setMasterAddress(partition.getMasterAddress().toString());
MasterSlaveEntry entry;
if (config.isSlaveNotUsed()) {
entry = new SingleEntry(this, config);
} else {
Set<String> slaveAddresses = partition.getSlaveAddresses().stream()
.filter(r -> !partition.getFailedSlaveAddresses().contains(r))
.map(r -> r.toString())
.collect(Collectors.toSet());
config.setSlaveAddresses(slaveAddresses);
entry = new MasterSlaveEntry(ClusterConnectionManager.this, config);
}
CompletableFuture<RedisClient> f = entry.setupMasterEntry(new RedisURI(config.getMasterAddress()), configEndpointHostName);
return f.thenCompose(masterClient -> {
for (Integer slot : partition.getSlots()) {
addEntry(slot, entry);
addPartition(slot, partition);
}
if (partition.getSlotsAmount() > 0) {
lastUri2Partition.put(partition.getMasterAddress(), partition);
}
if (!config.isSlaveNotUsed()) {
CompletableFuture<Void> fs = entry.initSlaveBalancer(r -> configEndpointHostName);
return fs.thenAccept(r -> {
if (!partition.getSlaveAddresses().isEmpty()) {
log.info("slaves: {} added for master: {} slot ranges: {}",
partition.getSlaveAddresses(), partition.getMasterAddress(), partition.getSlotRanges());
if (!partition.getFailedSlaveAddresses().isEmpty()) {
log.warn("slaves: {} down for master: {} slot ranges: {}",
partition.getFailedSlaveAddresses(), partition.getMasterAddress(), partition.getSlotRanges());
}
}
log.info("master: {} added for slot ranges: {}", partition.getMasterAddress(), partition.getSlotRanges());
});
}
log.info("master: {} added for slot ranges: {}", partition.getMasterAddress(), partition.getSlotRanges());
return CompletableFuture.completedFuture(null);
});
});
}
private void addPartition(Integer slot, ClusterPartition partition) {
partition.incReference();
ClusterPartition prevPartiton = lastPartitions.put(slot, partition);
if (prevPartiton != null
&& prevPartiton.decReference() == 0) {
lastUri2Partition.remove(prevPartiton.getMasterAddress());
}
}
private void scheduleClusterChangeCheck(ClusterServersConfig cfg) {
monitorFuture = serviceManager.newTimeout(t -> {
if (configEndpointHostName != null) {
String address = cfg.getNodeAddresses().iterator().next();
RedisURI uri = new RedisURI(address);
CompletableFuture<List<RedisURI>> allNodes = serviceManager.resolveAll(uri);
allNodes.whenComplete((nodes, ex) -> {
log.debug("{} resolved to {}", uri, nodes);
AtomicReference<Throwable> lastException = new AtomicReference<>(ex);
if (ex != null) {
checkClusterState(cfg, Collections.emptyIterator(), lastException, nodes);
return;
}
Iterator<RedisURI> nodesIterator = nodes.iterator();
checkClusterState(cfg, nodesIterator, lastException, nodes);
});
} else {
AtomicReference<Throwable> lastException = new AtomicReference<>();
List<RedisURI> nodes = new ArrayList<>();
List<RedisURI> slaves = new ArrayList<>();
for (ClusterPartition partition : getLastPartitions()) {
if (!partition.isMasterFail()) {
nodes.add(partition.getMasterAddress());
}
Set<RedisURI> partitionSlaves = new HashSet<>(partition.getSlaveAddresses());
partitionSlaves.removeAll(partition.getFailedSlaveAddresses());
slaves.addAll(partitionSlaves);
}
Collections.shuffle(nodes);
Collections.shuffle(slaves);
// master nodes first
nodes.addAll(slaves);
Iterator<RedisURI> nodesIterator = nodes.iterator();
checkClusterState(cfg, nodesIterator, lastException, nodes);
}
}, cfg.getScanInterval(), TimeUnit.MILLISECONDS);
}
private void checkClusterState(ClusterServersConfig cfg, Iterator<RedisURI> iterator, AtomicReference<Throwable> lastException, List<RedisURI> allNodes) {
if (!iterator.hasNext()) {
if (lastException.get() != null) {
log.error("Can't update cluster state using nodes: {}. A new attempt will be made.", allNodes, lastException.getAndSet(null));
}
scheduleClusterChangeCheck(cfg);
return;
}
if (serviceManager.isShuttingDown()) {
return;
}
RedisURI uri = iterator.next();
CompletionStage<RedisConnection> connectionFuture = connectToNode(cfg, uri, configEndpointHostName);
connectionFuture.whenComplete((connection, e) -> {
if (e != null) {
if (!lastException.compareAndSet(null, e)) {
lastException.get().addSuppressed(e);
}
checkClusterState(cfg, iterator, lastException, allNodes);
return;
}
updateClusterState(cfg, connection, iterator, uri, lastException, allNodes);
});
}
private void updateClusterState(ClusterServersConfig cfg, RedisConnection connection,
Iterator<RedisURI> iterator, RedisURI uri, AtomicReference<Throwable> lastException, List<RedisURI> allNodes) {
RFuture<List<ClusterNodeInfo>> future = connection.async(StringCodec.INSTANCE, clusterNodesCommand);
future.whenComplete((nodes, e) -> {
if (e != null) {
if (!lastException.compareAndSet(null, e)) {
lastException.get().addSuppressed(e);
}
checkClusterState(cfg, iterator, lastException, allNodes);
return;
}
if (nodes.isEmpty()) {
log.debug("cluster nodes state got from {}: doesn't contain any nodes", connection.getRedisClient().getAddr());
checkClusterState(cfg, iterator, lastException, allNodes);
return;
}
lastClusterNode = uri;
if (log.isDebugEnabled()) {
StringBuilder nodesValue = new StringBuilder();
for (ClusterNodeInfo clusterNodeInfo : nodes) {
nodesValue.append(clusterNodeInfo.getNodeInfo()).append("\n");
}
log.debug("Cluster nodes state got from {}:\n{}", connection.getRedisClient().getAddr(), nodesValue);
serviceManager.setLastClusterNodes(nodesValue.toString());
}
CompletableFuture<Collection<ClusterPartition>> newPartitionsFuture = parsePartitions(nodes);
newPartitionsFuture
.whenComplete((r, ex) -> {
if (ex != null) {
StringBuilder nodesValue = new StringBuilder();
for (ClusterNodeInfo clusterNodeInfo : nodes) {
nodesValue.append(clusterNodeInfo.getNodeInfo()).append("\n");
}
log.error("Unable to parse cluster nodes state got from: {}:\n{}", connection.getRedisClient().getAddr(), nodesValue, ex);
if (!lastException.compareAndSet(null, ex)) {
lastException.get().addSuppressed(ex);
}
checkClusterState(cfg, iterator, lastException, allNodes);
}
})
.thenCompose(newPartitions -> checkMasterNodesChange(cfg, newPartitions))
.thenCompose(r -> newPartitionsFuture)
.thenCompose(newPartitions -> checkSlaveNodesChange(newPartitions))
.thenCompose(r -> newPartitionsFuture)
.whenComplete((newPartitions, ex) -> {
if (newPartitions != null
&& !newPartitions.isEmpty()) {
try {
checkSlotsMigration(newPartitions);
checkSlotsChange(newPartitions);
} catch (Exception exc) {
log.error(exc.getMessage(), exc);
}
}
if (ex != null) {
log.error(ex.getMessage(), ex);
}
scheduleClusterChangeCheck(cfg);
});
});
}
private CompletableFuture<Void> checkSlaveNodesChange(Collection<ClusterPartition> newPartitions) {
List<CompletableFuture<?>> futures = new ArrayList<>();
for (ClusterPartition newPart : newPartitions) {
ClusterPartition currentPart = lastUri2Partition.get(newPart.getMasterAddress());
if (currentPart == null) {
continue;
}
MasterSlaveEntry entry = getEntry(currentPart.getSlotRanges().iterator().next().getStartSlot());
// should be invoked first in order to remove stale failedSlaveAddresses
CompletableFuture<Set<RedisURI>> addedSlavesFuture = addRemoveSlaves(entry, currentPart, newPart);
CompletableFuture<Void> f = addedSlavesFuture.thenCompose(addedSlaves -> {
// Have some slaves changed state from failed to alive?
return upDownSlaves(entry, currentPart, newPart, addedSlaves);
});
futures.add(f);
}
return CompletableFuture.allOf(futures.toArray(new CompletableFuture[0]))
.exceptionally(e -> {
if (e != null) {
log.error("Unable to add/remove slave nodes", e);
}
return null;
});
}
private CompletableFuture<Void> upDownSlaves(MasterSlaveEntry entry, ClusterPartition currentPart, ClusterPartition newPart, Set<RedisURI> addedSlaves) {
List<CompletableFuture<?>> futures = new ArrayList<>();
List<RedisURI> nonFailedSlaves = currentPart.getFailedSlaveAddresses().stream()
.filter(uri -> !addedSlaves.contains(uri) && !newPart.getFailedSlaveAddresses().contains(uri))
.collect(Collectors.toList());
nonFailedSlaves.forEach(uri -> {
if (entry.hasSlave(uri)) {
CompletableFuture<Boolean> f = entry.slaveUpNoMasterExclusionAsync(uri);
f = f.thenApply(v -> {
if (v) {
log.info("slave: {} is up for slot ranges: {}", uri, currentPart.getSlotRanges());
currentPart.removeFailedSlaveAddress(uri);
entry.excludeMasterFromSlaves(uri);
}
return v;
});
futures.add(f);
}
});
newPart.getFailedSlaveAddresses().stream()
.filter(uri -> !currentPart.getFailedSlaveAddresses().contains(uri))
.forEach(uri -> {
currentPart.addFailedSlaveAddress(uri);
boolean slaveDown = entry.slaveDown(uri);
if (config.isSlaveNotUsed() || slaveDown) {
disconnectNode(uri);
log.warn("slave: {} has down for slot ranges: {}", uri, currentPart.getSlotRanges());
}
});
return CompletableFuture.allOf(futures.toArray(new CompletableFuture[0]));
}
private CompletableFuture<Set<RedisURI>> addRemoveSlaves(MasterSlaveEntry entry, ClusterPartition currentPart, ClusterPartition newPart) {
Set<RedisURI> removedSlaves = new HashSet<>(currentPart.getSlaveAddresses());
removedSlaves.removeAll(newPart.getSlaveAddresses());
if (!removedSlaves.isEmpty()) {
log.info("removed slaves detected for master {}. current slaves {} last slaves {}",
currentPart.getMasterAddress(), currentPart.getSlaveAddresses(), newPart.getSlaveAddresses());
}
for (RedisURI uri : removedSlaves) {
currentPart.removeSlaveAddress(uri);
boolean slaveDown = entry.slaveDown(uri);
if (config.isSlaveNotUsed() || slaveDown) {
disconnectNode(uri);
log.info("slave {} removed for master {} and slot ranges: {}",
currentPart.getMasterAddress(), uri, currentPart.getSlotRanges());
}
}
Set<RedisURI> addedSlaves = newPart.getSlaveAddresses().stream()
.filter(uri -> (!currentPart.getSlaveAddresses().contains(uri)
&& !newPart.getFailedSlaveAddresses().contains(uri))
|| (currentPart.getSlaveAddresses().contains(uri)
&& currentPart.getFailedSlaveAddresses().contains(uri)
&& !newPart.getFailedSlaveAddresses().contains(uri)
&& !entry.hasSlave(uri))
)
.collect(Collectors.toSet());
if (!addedSlaves.isEmpty()) {
log.info("added slaves detected for master {}. current slaves {} last slaves {} last failed slaves {}",
currentPart.getMasterAddress(), currentPart.getSlaveAddresses(),
newPart.getSlaveAddresses(), newPart.getFailedSlaveAddresses());
}
List<CompletableFuture<?>> futures = new ArrayList<>();
for (RedisURI uri : addedSlaves) {
ClientConnectionsEntry slaveEntry = entry.getEntry(uri);
if (slaveEntry != null) {
CompletableFuture<Boolean> slaveUpFuture = entry.slaveUpNoMasterExclusionAsync(uri);
slaveUpFuture = slaveUpFuture.thenApply(v -> {
if (v) {
currentPart.addSlaveAddress(uri);
currentPart.removeFailedSlaveAddress(uri);
log.info("slave: {} unfreezed for master {} and slot ranges: {}",
currentPart.getMasterAddress(), uri, currentPart.getSlotRanges());
entry.excludeMasterFromSlaves(uri);
}
return v;
});
futures.add(slaveUpFuture);
continue;
}
if (config.isSlaveNotUsed()) {
continue;
}
CompletableFuture<Void> slaveUpFuture = entry.addSlave(uri, configEndpointHostName);
CompletableFuture<Void> f = slaveUpFuture.thenAccept(res -> {
currentPart.addSlaveAddress(uri);
currentPart.removeFailedSlaveAddress(uri);
log.info("slave: {} added for master {} and slot ranges: {}",
currentPart.getMasterAddress(), uri, currentPart.getSlotRanges());
entry.excludeMasterFromSlaves(uri);
});
futures.add(f);
}
CompletableFuture<Void> f = CompletableFuture.allOf(futures.toArray(new CompletableFuture[0]));
return f.thenApply(r -> addedSlaves);
}
private ClusterPartition find(Collection<ClusterPartition> partitions, Integer slot) {
return partitions.stream().filter(p -> p.hasSlot(slot)).findFirst().orElseThrow(() -> {
return new IllegalStateException("Unable to find partition with slot " + slot);
});
}
private CompletableFuture<Void> checkMasterNodesChange(ClusterServersConfig cfg, Collection<ClusterPartition> newPartitions) {
Map<RedisURI, ClusterPartition> addedPartitions = new HashMap<>();
Set<RedisURI> mastersElected = new HashSet<>();
List<CompletableFuture<?>> futures = new ArrayList<>();
for (ClusterPartition newPart : newPartitions) {
if (newPart.getSlotsAmount() == 0) {
continue;
}
ClusterPartition currentPart = lastUri2Partition.get(newPart.getMasterAddress());
boolean masterFound = currentPart != null;
if (masterFound && newPart.isMasterFail()) {
for (Integer slot : currentPart.getSlots()) {
ClusterPartition newMasterPart = find(newPartitions, slot);
// does partition have a new master?
if (!Objects.equals(newMasterPart.getMasterAddress(), currentPart.getMasterAddress())) {
RedisURI newUri = newMasterPart.getMasterAddress();
RedisURI oldUri = currentPart.getMasterAddress();
mastersElected.add(newUri);
CompletableFuture<RedisClient> future = changeMaster(slot, newUri);
currentPart.setMasterAddress(newUri);
CompletableFuture<RedisClient> f = future.whenComplete((res, e) -> {
if (e != null) {
currentPart.setMasterAddress(oldUri);
} else {
disconnectNode(oldUri);
}
});
futures.add(f);
}
}
}
if (!masterFound && !newPart.isMasterFail()) {
addedPartitions.put(newPart.getMasterAddress(), newPart);
}
}
addedPartitions.keySet().removeAll(mastersElected);
for (ClusterPartition newPart : addedPartitions.values()) {
CompletionStage<Void> future = addMasterEntry(newPart, cfg);
futures.add(future.toCompletableFuture());
}
return CompletableFuture.allOf(futures.toArray(new CompletableFuture[0]))
.exceptionally(e -> {
if (e != null) {
log.error("Unable to add/change master node", e);
}
return null;
});
}
private void checkSlotsChange(Collection<ClusterPartition> newPartitions) {
int newSlotsAmount = newPartitions.stream()
.mapToInt(ClusterPartition::getSlotsAmount)
.sum();
if (newSlotsAmount == lastPartitions.size() && lastPartitions.size() == MAX_SLOT) {
return;
}
Set<Integer> removedSlots = lastPartitions.keySet().stream()
.filter(s -> newPartitions.stream().noneMatch(p -> p.hasSlot(s)))
.collect(Collectors.toSet());
for (Integer slot : removedSlots) {
ClusterPartition p = lastPartitions.remove(slot);
if (p != null
&& p.decReference() == 0
&& lastUri2Partition.size() > 1) {
lastUri2Partition.remove(p.getMasterAddress());
}
removeEntry(slot);
}
if (!removedSlots.isEmpty()) {
log.info("{} slots removed", removedSlots.size());
}
Integer addedSlots = 0;
for (ClusterPartition clusterPartition : newPartitions) {
MasterSlaveEntry entry = getEntry(clusterPartition.getMasterAddress());
for (Integer slot : clusterPartition.getSlots()) {
if (lastPartitions.containsKey(slot)) {
continue;
}
if (entry != null) {
addEntry(slot, entry);
addPartition(slot, clusterPartition);
lastUri2Partition.put(clusterPartition.getMasterAddress(), clusterPartition);
addedSlots++;
}
}
}
if (addedSlots > 0) {
log.info("{} slots added", addedSlots);
}
}
private void checkSlotsMigration(Collection<ClusterPartition> newPartitions) {
Collection<ClusterPartition> clusterLastPartitions = getLastPartitions();
// https://github.com/redisson/redisson/issues/3635
Map<String, MasterSlaveEntry> nodeEntries = clusterLastPartitions.stream()
.collect(Collectors.toMap(p -> p.getNodeId(),
p -> getEntry(p.getSlotRanges().iterator().next().getStartSlot())));
Set<Integer> changedSlots = new HashSet<>();
for (ClusterPartition currentPartition : clusterLastPartitions) {
String nodeId = currentPartition.getNodeId();
for (ClusterPartition newPartition : newPartitions) {
if (!Objects.equals(nodeId, newPartition.getNodeId())
|| newPartition.getSlotRanges().equals(currentPartition.getSlotRanges())) {
continue;
}
MasterSlaveEntry entry = nodeEntries.get(nodeId);
BitSet addedSlots = newPartition.copySlots();
addedSlots.andNot(currentPartition.slots());
addedSlots.stream().forEach(slot -> {
addEntry(slot, entry);
addPartition(slot, currentPartition);
changedSlots.add(slot);
});
if (!addedSlots.isEmpty()) {
lastUri2Partition.put(currentPartition.getMasterAddress(), currentPartition);
log.info("{} slots added to {}", addedSlots.cardinality(), currentPartition.getMasterAddress());
}
BitSet removedSlots = currentPartition.copySlots();
removedSlots.andNot(newPartition.slots());
removedSlots.stream().forEach(slot -> {
if (lastPartitions.remove(slot, currentPartition)) {
if (currentPartition.decReference() == 0
&& lastUri2Partition.size() > 1) {
lastUri2Partition.remove(currentPartition.getMasterAddress());
}
removeEntry(slot, entry);
changedSlots.add(slot);
}
});
if (!removedSlots.isEmpty()) {
log.info("{} slots removed from {}", removedSlots.cardinality(), currentPartition.getMasterAddress());
}
if (!addedSlots.isEmpty() || !removedSlots.isEmpty()) {
// https://github.com/redisson/redisson/issues/3695, slotRanges not update when slots of node changed.
currentPartition.updateSlotRanges(newPartition.getSlotRanges(), newPartition.slots());
}
break;
}
}
changedSlots.forEach(subscribeService::reattachPubSub);
}
private int indexOf(byte[] array, byte element) {
for (int i = 0; i < array.length; ++i) {
if (array[i] == element) {
return i;
}
}
return -1;
}
@Override
public int calcSlot(byte[] key) {
if (key == null) {
return 0;
}
int start = indexOf(key, (byte) '{');
if (start != -1) {
int end = indexOf(key, (byte) '}');
if (end != -1 && start + 1 < end) {
key = Arrays.copyOfRange(key, start + 1, end);
}
}
int result = CRC16.crc16(key) % MAX_SLOT;
return result;
}
@Override
public int calcSlot(ByteBuf key) {
if (key == null) {
return 0;
}
int start = key.indexOf(key.readerIndex(), key.readerIndex() + key.readableBytes(), (byte) '{');
if (start != -1) {
int end = key.indexOf(start + 1, key.readerIndex() + key.readableBytes(), (byte) '}');
if (end != -1 && start + 1 < end) {
key = key.slice(start + 1, end-start - 1);
}
}
int result = CRC16.crc16(key) % MAX_SLOT;
log.debug("slot {} for {}", result, key);
return result;
}
@Override
public int calcSlot(String key) {
if (key == null) {
return 0;
}
int start = key.indexOf('{');
if (start != -1) {
int end = key.indexOf('}');
if (end != -1 && start + 1 < end) {
key = key.substring(start + 1, end);
}
}
int result = CRC16.crc16(key.getBytes()) % MAX_SLOT;
log.debug("slot {} for {}", result, key);
return result;
}
private CompletableFuture<Collection<ClusterPartition>> parsePartitions(List<ClusterNodeInfo> nodes) {
Map<String, ClusterPartition> partitions = new ConcurrentHashMap<>();
List<CompletableFuture<Void>> futures = new ArrayList<>();
for (ClusterNodeInfo clusterNodeInfo : nodes) {
if (clusterNodeInfo.containsFlag(Flag.NOADDR)
|| clusterNodeInfo.containsFlag(Flag.HANDSHAKE)
|| clusterNodeInfo.getAddress() == null
|| (clusterNodeInfo.getSlotRanges().isEmpty() && clusterNodeInfo.containsFlag(Flag.MASTER))) {
// skip it
continue;
}
String masterId;
if (clusterNodeInfo.containsFlag(Flag.SLAVE)) {
masterId = clusterNodeInfo.getSlaveOf();
} else {
masterId = clusterNodeInfo.getNodeId();
}
if (masterId == null) {
// skip it
continue;
}
RedisURI uri;
if (clusterNodeInfo.getHostName() != null) {
uri = new RedisURI(clusterNodeInfo.getAddress().getScheme() + "://" + clusterNodeInfo.getHostName() +
":" + clusterNodeInfo.getAddress().getPort());
} else {
uri = clusterNodeInfo.getAddress();
}
CompletableFuture<List<RedisURI>> ipsFuture = serviceManager.resolveAll(uri);
CompletableFuture<Void> f = ipsFuture.handle((r, ex) -> {
if (ex != null) {
RedisURI mappedUri = serviceManager.toURI(clusterNodeInfo.getAddress().getScheme(), clusterNodeInfo.getAddress().getHost(), "" + clusterNodeInfo.getAddress().getPort());
return Collections.singletonList(mappedUri);
}
return r;
})
.thenCompose(addresses -> {
int index = 0;
if (addresses.size() > 1) {
addresses.sort(Comparator.comparing(RedisURI::getHost));
}
RedisURI address = addresses.get(index);
if (configEndpointPassword != null) {
address = new RedisURI(address.getScheme() + "://" + configEndpointPassword + "@" + address.getHost() + ":" + address.getPort());
}
if (addresses.size() > 1) {
for (RedisURI addr : addresses) {
for (ClusterPartition value : lastUri2Partition.values()) {
if (value.getNodeId().equals(clusterNodeInfo.getNodeId())
&& value.getMasterAddress().equals(addr)) {
address = addr;
break;
}
}
}
}
if (addresses.size() == 1) {
if (!uri.equals(address)) {
log.debug("{} resolved to {}", uri, address);
}
} else {
log.debug("{} resolved to {} and {} selected", uri, addresses, address);
}
if (clusterNodeInfo.containsFlag(Flag.SLAVE)) {
ClusterPartition masterPartition = partitions.computeIfAbsent(masterId, k -> new ClusterPartition(masterId));
ClusterPartition slavePartition = partitions.computeIfAbsent(clusterNodeInfo.getNodeId(),
k -> new ClusterPartition(clusterNodeInfo.getNodeId()));
slavePartition.setType(Type.SLAVE);
slavePartition.setParent(masterPartition);
masterPartition.addSlaveAddress(address);
if (clusterNodeInfo.containsFlag(Flag.FAIL)) {
masterPartition.addFailedSlaveAddress(address);
}
if (cfg.isCheckMasterLinkStatus()) {
CompletionStage<RedisConnection> connectionFuture = connectToNode(cfg, address, configEndpointHostName);
RedisURI finalAddress = address;
return connectionFuture.thenCompose(con -> {
RFuture<Map<String, String>> future = con.async(StringCodec.INSTANCE, RedisCommands.INFO_REPLICATION);
return future.handle((info, ex) -> {
if (ex != null) {
if (ex instanceof RedisTimeoutException) {
return null;
}
throw new CompletionException(ex);
}
String masterLinkStatus = info.getOrDefault("master_link_status", "");
if ("down".equals(masterLinkStatus)) {
masterPartition.addFailedSlaveAddress(finalAddress);
}
return null;
});
});
}
return CompletableFuture.<Void>completedFuture(null);
} else if (clusterNodeInfo.containsFlag(Flag.MASTER)) {
ClusterPartition masterPartition = partitions.computeIfAbsent(masterId, k -> new ClusterPartition(masterId));
masterPartition.setSlotRanges(clusterNodeInfo.getSlotRanges());
masterPartition.setMasterAddress(address);
masterPartition.setType(Type.MASTER);
if (clusterNodeInfo.containsFlag(Flag.FAIL)) {
masterPartition.setMasterFail(true);
}
}
return CompletableFuture.<Void>completedFuture(null);
}).exceptionally(ex -> {
if (clusterNodeInfo.containsFlag(Flag.FAIL)
|| clusterNodeInfo.containsFlag(Flag.EVENTUAL_FAIL)) {
return null;
}
log.error(ex.getMessage(), ex);
return null;
});
futures.add(f);
}
CompletableFuture<Void> future = CompletableFuture.allOf(futures.toArray(new CompletableFuture[0]));
return future.thenApply(r -> {
addCascadeSlaves(partitions.values());
List<ClusterPartition> ps = partitions.values()
.stream()
.filter(cp -> cp.getType() == Type.MASTER
&& cp.getMasterAddress() != null
&& ((!cp.slots().isEmpty() && partitions.size() == 1) || partitions.size() > 1))
.collect(Collectors.toList());
return ps;
});
}
private void addCascadeSlaves(Collection<ClusterPartition> partitions) {
Iterator<ClusterPartition> iter = partitions.iterator();
while (iter.hasNext()) {
ClusterPartition cp = iter.next();
if (cp.getType() != Type.SLAVE) {
continue;
}
if (cp.getParent() != null && cp.getParent().getType() == Type.MASTER) {
ClusterPartition parent = cp.getParent();
for (RedisURI addr : cp.getSlaveAddresses()) {
parent.addSlaveAddress(addr);
}
for (RedisURI addr : cp.getFailedSlaveAddresses()) {
parent.addFailedSlaveAddress(addr);
}
}
iter.remove();
}
}
@Override
public void shutdown(long quietPeriod, long timeout, TimeUnit unit) {
if (monitorFuture != null) {
monitorFuture.cancel();
}
closeNodeConnections();
super.shutdown(quietPeriod, timeout, unit);
}
private Collection<ClusterPartition> getLastPartitions() {
return lastUri2Partition.values().stream().collect(Collectors.toMap(e -> e.getNodeId(), Function.identity(),
BinaryOperator.maxBy(Comparator.comparing(e -> e.getTime())))).values();
}
public int getSlot(MasterSlaveEntry entry) {
return lastPartitions.entrySet().stream()
.filter(e -> e.getValue().getMasterAddress().equals(entry.getClient().getConfig().getAddress()))
.findAny()
.map(m -> m.getKey())
.orElse(-1);
}
@Override
public RedisURI getLastClusterNode() {
return lastClusterNode;
}
}
| ClusterConnectionManager |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/internal/Lists.java | {
"start": 2182,
"end": 5583
} | class ____ {
private static final Lists INSTANCE = new Lists();
/**
* Returns the singleton instance of this class.
* @return the singleton instance of this class.
*/
public static Lists instance() {
return INSTANCE;
}
private final ComparisonStrategy comparisonStrategy;
// TODO reduce the visibility of the fields annotated with @VisibleForTesting
Failures failures = Failures.instance();
// TODO reduce the visibility of the fields annotated with @VisibleForTesting
Lists() {
this(StandardComparisonStrategy.instance());
}
public Lists(ComparisonStrategy comparisonStrategy) {
this.comparisonStrategy = comparisonStrategy;
}
// TODO reduce the visibility of the fields annotated with @VisibleForTesting
public Comparator<?> getComparator() {
return comparisonStrategy instanceof ComparatorBasedComparisonStrategy strategy ? strategy.getComparator() : null;
}
/**
* Verifies that the given {@code List} contains the given object at the given index.
* @param info contains information about the assertion.
* @param actual the given {@code List}.
* @param value the object to look for.
* @param index the index where the object should be stored in the given {@code List}.
* @throws AssertionError if the given {@code List} is {@code null} or empty.
* @throws NullPointerException if the given {@code Index} is {@code null}.
* @throws IndexOutOfBoundsException if the value of the given {@code Index} is equal to or greater than the size of the given
* {@code List}.
* @throws AssertionError if the given {@code List} does not contain the given object at the given index.
*/
public void assertContains(AssertionInfo info, List<?> actual, Object value, Index index) {
assertNotNull(info, actual);
Iterables.instance().assertNotEmpty(info, actual);
checkIndexValueIsValid(index, actual.size() - 1);
Object actualElement = actual.get(index.value);
if (areEqual(actualElement, value)) return;
throw failures.failure(info, shouldContainAtIndex(actual, value, index, actual.get(index.value), comparisonStrategy));
}
/**
* Verifies that the given {@code List} does not contain the given object at the given index.
* @param info contains information about the assertion.
* @param actual the given {@code List}.
* @param value the object to look for.
* @param index the index where the object should be stored in the given {@code List}.
* @throws AssertionError if the given {@code List} is {@code null}.
* @throws NullPointerException if the given {@code Index} is {@code null}.
* @throws AssertionError if the given {@code List} contains the given object at the given index.
*/
public void assertDoesNotContain(AssertionInfo info, List<?> actual, Object value, Index index) {
assertNotNull(info, actual);
checkIndexValueIsValid(index, Integer.MAX_VALUE);
int indexValue = index.value;
if (indexValue >= actual.size()) return;
Object actualElement = actual.get(index.value);
if (!areEqual(actualElement, value)) return;
throw failures.failure(info, shouldNotContainAtIndex(actual, value, index, comparisonStrategy));
}
/**
* Verifies that the actual list is sorted in ascending order according to the natural ordering of its elements.
* <p>
* All list elements must implement the {@link Comparable} | Lists |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/builditem/AllowJNDIBuildItem.java | {
"start": 333,
"end": 385
} | class ____ extends MultiBuildItem {
}
| AllowJNDIBuildItem |
java | apache__logging-log4j2 | log4j-api/src/main/java/org/apache/logging/log4j/message/ParameterFormatter.java | {
"start": 1388,
"end": 5905
} | class ____ {
/**
* Prefix for recursion.
*/
static final String RECURSION_PREFIX = "[...";
/**
* Suffix for recursion.
*/
static final String RECURSION_SUFFIX = "...]";
/**
* Prefix for errors.
*/
static final String ERROR_PREFIX = "[!!!";
/**
* Separator for errors.
*/
static final String ERROR_SEPARATOR = "=>";
/**
* Separator for error messages.
*/
static final String ERROR_MSG_SEPARATOR = ":";
/**
* Suffix for errors.
*/
static final String ERROR_SUFFIX = "!!!]";
private static final char DELIM_START = '{';
private static final char DELIM_STOP = '}';
private static final char ESCAPE_CHAR = '\\';
private static final DateTimeFormatter DATE_FORMATTER =
DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ").withZone(ZoneId.systemDefault());
private static final Logger STATUS_LOGGER = StatusLogger.getLogger();
private ParameterFormatter() {}
/**
* Analyzes – finds argument placeholder (i.e., {@literal "{}"}) occurrences, etc. – the given message pattern.
* <p>
* Only {@literal "{}"} strings are treated as argument placeholders.
* Escaped or incomplete argument placeholders will be ignored.
* Some invalid argument placeholder examples:
* </p>
* <pre>
* { }
* foo\{}
* {bar
* {buzz}
* </pre>
*
* @param pattern a message pattern to be analyzed
* @param argCount
* The number of arguments to be formatted.
* For instance, for a parametrized message containing 7 placeholders in the pattern and 4 arguments for formatting, analysis will only need to store the index of the first 4 placeholder characters.
* A negative value indicates no limit.
* @return the analysis result
*/
static MessagePatternAnalysis analyzePattern(final String pattern, final int argCount) {
MessagePatternAnalysis analysis = new MessagePatternAnalysis();
analyzePattern(pattern, argCount, analysis);
return analysis;
}
/**
* Analyzes – finds argument placeholder (i.e., {@literal "{}"}) occurrences, etc. – the given message pattern.
* <p>
* Only {@literal "{}"} strings are treated as argument placeholders.
* Escaped or incomplete argument placeholders will be ignored.
* Some invalid argument placeholder examples:
* </p>
* <pre>
* { }
* foo\{}
* {bar
* {buzz}
* </pre>
*
* @param pattern a message pattern to be analyzed
* @param argCount
* The number of arguments to be formatted.
* For instance, for a parametrized message containing 7 placeholders in the pattern and 4 arguments for formatting, analysis will only need to store the index of the first 4 placeholder characters.
* A negative value indicates no limit.
* @param analysis an object to store the results
*/
static void analyzePattern(final String pattern, final int argCount, final MessagePatternAnalysis analysis) {
// Short-circuit if there is nothing interesting
final int l;
if (pattern == null || (l = pattern.length()) < 2) {
analysis.placeholderCount = 0;
return;
}
// Count `{}` occurrences that is not escaped, i.e., not `\`-prefixed
boolean escaped = false;
analysis.placeholderCount = 0;
analysis.escapedCharFound = false;
for (int i = 0; i < (l - 1); i++) {
final char c = pattern.charAt(i);
if (c == ESCAPE_CHAR) {
analysis.escapedCharFound = true;
escaped = !escaped;
} else {
if (escaped) {
escaped = false;
} else if (c == DELIM_START && pattern.charAt(i + 1) == DELIM_STOP) {
if (argCount < 0 || analysis.placeholderCount < argCount) {
analysis.ensurePlaceholderCharIndicesCapacity(argCount);
analysis.placeholderCharIndices[analysis.placeholderCount++] = i++;
}
// `argCount` is exceeded, skip storing the index
else {
analysis.placeholderCount++;
i++;
}
}
}
}
}
/**
*See {@link #analyzePattern(String, int, MessagePatternAnalysis)}.
*
*/
static final | ParameterFormatter |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/internal/AnyBinder.java | {
"start": 1376,
"end": 6278
} | class ____ {
static void bindAny(
PropertyHolder propertyHolder,
Nullability nullability,
PropertyData inferredData,
EntityBinder entityBinder,
boolean isIdentifierMapper,
MetadataBuildingContext context,
AnnotatedJoinColumns joinColumns) {
final var memberDetails = inferredData.getAttributeMember();
//check validity
if ( memberDetails.hasDirectAnnotationUsage( Columns.class ) ) {
throw new AnnotationException(
String.format(
Locale.ROOT,
"Property '%s' is annotated '@Any' and may not have a '@Columns' annotation "
+ "(a single '@Column' or '@Formula' must be used to map the discriminator, and '@JoinColumn's must be used to map the foreign key) ",
getPath( propertyHolder, inferredData )
)
);
}
final var hibernateCascade = memberDetails.getDirectAnnotationUsage( Cascade.class );
final var onDeleteAnn = memberDetails.getDirectAnnotationUsage( OnDelete.class );
final var assocTable = propertyHolder.getJoinTable( memberDetails );
if ( assocTable != null ) {
final Join join = propertyHolder.addJoin( assocTable, false );
for ( AnnotatedJoinColumn joinColumn : joinColumns.getJoinColumns() ) {
joinColumn.setExplicitTableName( join.getTable().getName() );
}
}
bindAny(
aggregateCascadeTypes( null, hibernateCascade, false, context ),
//@Any has no cascade attribute
joinColumns,
onDeleteAnn == null ? null : onDeleteAnn.action(),
nullability,
propertyHolder,
inferredData,
entityBinder,
isIdentifierMapper,
context
);
}
private static void bindAny(
EnumSet<CascadeType> cascadeStrategy,
AnnotatedJoinColumns columns,
OnDeleteAction onDeleteAction,
Nullability nullability,
PropertyHolder propertyHolder,
PropertyData inferredData,
EntityBinder entityBinder,
boolean isIdentifierMapper,
MetadataBuildingContext context) {
final var memberDetails = inferredData.getAttributeMember();
final var any = memberDetails.getDirectAnnotationUsage( org.hibernate.annotations.Any.class );
if ( any == null ) {
throw new AssertionFailure( "Missing @Any annotation: " + getPath( propertyHolder, inferredData ) );
}
final boolean lazy = any.fetch() == FetchType.LAZY;
final boolean optional = any.optional();
final Any value = BinderHelper.buildAnyValue(
memberDetails.getDirectAnnotationUsage( Column.class ),
getOverridableAnnotation( memberDetails, Formula.class, context ),
columns,
inferredData,
onDeleteAction,
lazy,
nullability,
propertyHolder,
entityBinder,
optional,
context
);
final var anyDiscriminatorImplicitValues =
memberDetails.getDirectAnnotationUsage( AnyDiscriminatorImplicitValues.class );
if ( anyDiscriminatorImplicitValues != null ) {
value.setImplicitDiscriminatorValueStrategy(
resolveImplicitDiscriminatorStrategy( anyDiscriminatorImplicitValues, context ) );
}
final var binder = new PropertyBinder();
binder.setName( inferredData.getPropertyName() );
binder.setValue( value );
binder.setLazy( lazy );
//binder.setCascade(cascadeStrategy);
if ( isIdentifierMapper ) {
binder.setInsertable( false );
binder.setUpdatable( false );
}
binder.setAccessType( inferredData.getDefaultAccess() );
binder.setCascade( cascadeStrategy );
binder.setBuildingContext( context );
binder.setHolder( propertyHolder );
binder.setMemberDetails( memberDetails );
binder.setEntityBinder( entityBinder );
Property prop = binder.makeProperty();
prop.setOptional( optional && value.isNullable() );
//composite FK columns are in the same table, so it's OK
propertyHolder.addProperty( prop, inferredData.getAttributeMember(), columns, inferredData.getDeclaringClass() );
binder.callAttributeBindersInSecondPass( prop );
}
public static ImplicitDiscriminatorStrategy resolveImplicitDiscriminatorStrategy(
AnyDiscriminatorImplicitValues anyDiscriminatorImplicitValues,
MetadataBuildingContext context) {
return switch ( anyDiscriminatorImplicitValues.value() ) {
case FULL_NAME -> FullNameImplicitDiscriminatorStrategy.FULL_NAME_STRATEGY;
case SHORT_NAME -> ShortNameImplicitDiscriminatorStrategy.SHORT_NAME_STRATEGY;
case CUSTOM -> {
final var customStrategy = anyDiscriminatorImplicitValues.implementation();
if ( ImplicitDiscriminatorStrategy.class.equals( customStrategy ) ) {
yield null;
}
else if ( FullNameImplicitDiscriminatorStrategy.class.equals( customStrategy ) ) {
yield FullNameImplicitDiscriminatorStrategy.FULL_NAME_STRATEGY;
}
else if ( ShortNameImplicitDiscriminatorStrategy.class.equals( customStrategy ) ) {
yield ShortNameImplicitDiscriminatorStrategy.SHORT_NAME_STRATEGY;
}
else {
yield context.getBootstrapContext().getCustomTypeProducer()
.produceBeanInstance( customStrategy );
}
}
};
}
}
| AnyBinder |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ClassCanBeStaticTest.java | {
"start": 2000,
"end": 2179
} | class ____ an outer method in a method
int localMethod() {
return outerMethod();
}
}
// outer | references |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/AbstractJobVertexHandler.java | {
"start": 2310,
"end": 4573
} | class ____<
R extends ResponseBody, M extends JobVertexMessageParameters>
extends AbstractAccessExecutionGraphHandler<R, M> {
/**
* Instantiates a new Abstract job vertex handler.
*
* @param leaderRetriever the leader retriever
* @param timeout the timeout
* @param responseHeaders the response headers
* @param messageHeaders the message headers
* @param executionGraphCache the execution graph cache
* @param executor the executor
*/
protected AbstractJobVertexHandler(
GatewayRetriever<? extends RestfulGateway> leaderRetriever,
Duration timeout,
Map<String, String> responseHeaders,
MessageHeaders<EmptyRequestBody, R, M> messageHeaders,
ExecutionGraphCache executionGraphCache,
Executor executor) {
super(
leaderRetriever,
timeout,
responseHeaders,
messageHeaders,
executionGraphCache,
executor);
}
@Override
protected R handleRequest(
HandlerRequest<EmptyRequestBody> request, AccessExecutionGraph executionGraph)
throws RestHandlerException {
final JobVertexID jobVertexID = request.getPathParameter(JobVertexIdPathParameter.class);
final AccessExecutionJobVertex jobVertex = executionGraph.getJobVertex(jobVertexID);
if (jobVertex == null) {
throw new RestHandlerException(
"No vertex with ID '" + jobVertexID + "' exists.",
HttpResponseStatus.NOT_FOUND);
}
return handleRequest(request, jobVertex);
}
/**
* Called for each request after the corresponding {@link AccessExecutionJobVertex} has been
* retrieved from the {@link AccessExecutionGraph}.
*
* @param request the request
* @param jobVertex the execution job vertex
* @return the response
* @throws RestHandlerException if the handler could not process the request
*/
protected abstract R handleRequest(
HandlerRequest<EmptyRequestBody> request, AccessExecutionJobVertex jobVertex)
throws RestHandlerException;
}
| AbstractJobVertexHandler |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/utils/DiffRepository.java | {
"start": 32349,
"end": 32658
} | class ____ the same repository instance. This
* ensures that, if two or more test cases fail, the log file will contains the actual results
* of both test cases.
*
* <p>The <code>baseRepository</code> parameter is useful if the test is an extension to a
* previous test. If the test | share |
java | hibernate__hibernate-orm | hibernate-envers/src/main/java/org/hibernate/envers/internal/tools/ConcurrentReferenceHashMap.java | {
"start": 14837,
"end": 46685
} | class ____<K, V> extends ReentrantLock implements Serializable {
/*
* Segments maintain a table of entry lists that are ALWAYS
* kept in a consistent state, so can be read without locking.
* Next fields of nodes are immutable (final). All list
* additions are performed at the front of each bin. This
* makes it easy to check changes, and also fast to traverse.
* When nodes would otherwise be changed, new nodes are
* created to replace them. This works well for hash tables
* since the bin lists tend to be short. (The average length
* is less than two for the default load factor threshold.)
*
* Read operations can thus proceed without locking, but rely
* on selected uses of volatiles to ensure that completed
* write operations performed by other threads are
* noticed. For most purposes, the "count" field, tracking the
* number of elements, serves as that volatile variable
* ensuring visibility. This is convenient because this field
* needs to be read in many read operations anyway:
*
* - All (unsynchronized) read operations must first read the
* "count" field, and should not look at table entries if
* it is 0.
*
* - All (synchronized) write operations should write to
* the "count" field after structurally changing any bin.
* The operations must not take any action that could even
* momentarily cause a concurrent read operation to see
* inconsistent data. This is made easier by the nature of
* the read operations in Map. For example, no operation
* can reveal that the table has grown but the threshold
* has not yet been updated, so there are no atomicity
* requirements for this with respect to reads.
*
* As a guide, all critical volatile reads and writes to the
* count field are marked in code comments.
*/
@Serial
private static final long serialVersionUID = 2249069246763182397L;
/**
* The number of elements in this segment's region.
*/
transient volatile int count;
/**
* Number of updates that alter the size of the table. This is
* used during bulk-read methods to make sure they see a
* consistent snapshot: If modCounts change during a traversal
* of segments computing size or checking containsValue, then
* we might have an inconsistent view of state so (usually)
* must retry.
*/
transient int modCount;
/**
* The table is rehashed when its size exceeds this threshold.
* (The value of this field is always {@code (int)(capacity *
* loadFactor)}.)
*/
transient int threshold;
/**
* The per-segment table.
*/
transient volatile HashEntry<K, V>[] table;
/**
* The load factor for the hash table. Even though this value
* is same for all segments, it is replicated to avoid needing
* links to outer object.
*
* @serial
*/
final float loadFactor;
/**
* The collected weak-key reference queue for this segment.
* This should be (re)initialized whenever table is assigned,
*/
transient volatile ReferenceQueue<Object> refQueue;
final ReferenceType keyType;
final ReferenceType valueType;
final boolean identityComparisons;
Segment(
int initialCapacity, float lf, ReferenceType keyType,
ReferenceType valueType, boolean identityComparisons) {
loadFactor = lf;
this.keyType = keyType;
this.valueType = valueType;
this.identityComparisons = identityComparisons;
setTable( HashEntry.newArray( initialCapacity ) );
}
@SuppressWarnings("unchecked")
static <K, V> Segment<K, V>[] newArray(int i) {
return new Segment[i];
}
private boolean keyEq(Object src, Object dest) {
return identityComparisons ? src == dest : src.equals( dest );
}
/**
* Sets table to new HashEntry array.
* Call only while holding lock or in constructor.
*/
void setTable(HashEntry<K, V>[] newTable) {
threshold = (int) ( newTable.length * loadFactor );
table = newTable;
refQueue = new ReferenceQueue<>();
}
/**
* Returns properly casted first entry of bin for given hash.
*/
HashEntry<K, V> getFirst(int hash) {
HashEntry<K, V>[] tab = table;
return tab[hash & ( tab.length - 1 )];
}
HashEntry<K, V> newHashEntry(K key, int hash, HashEntry<K, V> next, V value) {
return new HashEntry<>( key, hash, next, value, keyType, valueType, refQueue );
}
/**
* Reads value field of an entry under lock. Called if value
* field ever appears to be null. This is possible only if a
* compiler happens to reorder a HashEntry initialization with
* its table assignment, which is legal under memory model
* but is not known to ever occur.
*/
V readValueUnderLock(HashEntry<K, V> e) {
lock();
try {
removeStale();
return e.value();
}
finally {
unlock();
}
}
/* Specialized implementations of map methods */
V get(Object key, int hash) {
if ( count != 0 ) { // read-volatile
HashEntry<K, V> e = getFirst( hash );
while ( e != null ) {
if ( e.hash == hash && keyEq( key, e.key() ) ) {
Object opaque = e.valueRef;
if ( opaque != null ) {
return e.dereferenceValue( opaque );
}
return readValueUnderLock( e ); // recheck
}
e = e.next;
}
}
return null;
}
boolean containsKey(Object key, int hash) {
if ( count != 0 ) { // read-volatile
HashEntry<K, V> e = getFirst( hash );
while ( e != null ) {
if ( e.hash == hash && keyEq( key, e.key() ) ) {
return true;
}
e = e.next;
}
}
return false;
}
boolean containsValue(Object value) {
if ( count != 0 ) { // read-volatile
HashEntry<K, V>[] tab = table;
int len = tab.length;
for ( int i = 0; i < len; i++ ) {
for ( HashEntry<K, V> e = tab[i]; e != null; e = e.next ) {
Object opaque = e.valueRef;
V v;
if ( opaque == null ) {
v = readValueUnderLock( e ); // recheck
}
else {
v = e.dereferenceValue( opaque );
}
if ( value.equals( v ) ) {
return true;
}
}
}
}
return false;
}
boolean replace(K key, int hash, V oldValue, V newValue) {
lock();
try {
removeStale();
HashEntry<K, V> e = getFirst( hash );
while ( e != null && ( e.hash != hash || !keyEq( key, e.key() ) ) ) {
e = e.next;
}
boolean replaced = false;
if ( e != null && oldValue.equals( e.value() ) ) {
replaced = true;
e.setValue( newValue, valueType, refQueue );
}
return replaced;
}
finally {
unlock();
}
}
V replace(K key, int hash, V newValue) {
lock();
try {
removeStale();
HashEntry<K, V> e = getFirst( hash );
while ( e != null && ( e.hash != hash || !keyEq( key, e.key() ) ) ) {
e = e.next;
}
V oldValue = null;
if ( e != null ) {
oldValue = e.value();
e.setValue( newValue, valueType, refQueue );
}
return oldValue;
}
finally {
unlock();
}
}
V put(K key, int hash, V value, boolean onlyIfAbsent) {
lock();
try {
removeStale();
int c = count;
if ( c++ > threshold ) {// ensure capacity
int reduced = rehash();
if ( reduced > 0 ) {
// adjust from possible weak cleanups
count = ( c -= reduced ) - 1; // write-volatile
}
}
HashEntry<K, V>[] tab = table;
int index = hash & ( tab.length - 1 );
HashEntry<K, V> first = tab[index];
HashEntry<K, V> e = first;
while ( e != null && ( e.hash != hash || !keyEq( key, e.key() ) ) ) {
e = e.next;
}
V oldValue;
if ( e != null ) {
oldValue = e.value();
if ( !onlyIfAbsent ) {
e.setValue( value, valueType, refQueue );
}
}
else {
oldValue = null;
++modCount;
tab[index] = newHashEntry( key, hash, first, value );
count = c; // write-volatile
}
return oldValue;
}
finally {
unlock();
}
}
int rehash() {
HashEntry<K, V>[] oldTable = table;
int oldCapacity = oldTable.length;
if ( oldCapacity >= MAXIMUM_CAPACITY ) {
return 0;
}
/*
* Reclassify nodes in each list to new Map. Because we are
* using power-of-two expansion, the elements from each bin
* must either stay at same index, or move with a power of two
* offset. We eliminate unnecessary node creation by catching
* cases where old nodes can be reused because their next
* fields won't change. Statistically, at the default
* threshold, only about one-sixth of them need cloning when
* a table doubles. The nodes they replace will be garbage
* collectable as soon as they are no longer referenced by any
* reader thread that may be in the midst of traversing table
* right now.
*/
HashEntry<K, V>[] newTable = HashEntry.newArray( oldCapacity << 1 );
threshold = (int) ( newTable.length * loadFactor );
int sizeMask = newTable.length - 1;
int reduce = 0;
for ( int i = 0; i < oldCapacity; i++ ) {
// We need to guarantee that any existing reads of old Map can
// proceed. So we cannot yet null out each bin.
HashEntry<K, V> e = oldTable[i];
if ( e != null ) {
HashEntry<K, V> next = e.next;
int idx = e.hash & sizeMask;
// Single node on list
if ( next == null ) {
newTable[idx] = e;
}
else {
// Reuse trailing consecutive sequence at same slot
HashEntry<K, V> lastRun = e;
int lastIdx = idx;
for ( HashEntry<K, V> last = next; last != null; last = last.next ) {
int k = last.hash & sizeMask;
if ( k != lastIdx ) {
lastIdx = k;
lastRun = last;
}
}
newTable[lastIdx] = lastRun;
// Clone all remaining nodes
for ( HashEntry<K, V> p = e; p != lastRun; p = p.next ) {
// Skip GC'd weak refs
K key = p.key();
if ( key == null ) {
reduce++;
continue;
}
int k = p.hash & sizeMask;
HashEntry<K, V> n = newTable[k];
newTable[k] = newHashEntry( key, p.hash, n, p.value() );
}
}
}
}
table = newTable;
return reduce;
}
/**
* Remove; match on key only if value null, else match both.
*/
V remove(Object key, int hash, Object value, boolean refRemove) {
lock();
try {
if ( !refRemove ) {
removeStale();
}
int c = count - 1;
HashEntry<K, V>[] tab = table;
int index = hash & ( tab.length - 1 );
HashEntry<K, V> first = tab[index];
HashEntry<K, V> e = first;
// a ref remove operation compares the Reference instance
while ( e != null && key != e.keyRef
&& ( refRemove || hash != e.hash || !keyEq( key, e.key() ) ) ) {
e = e.next;
}
V oldValue = null;
if ( e != null ) {
V v = e.value();
if ( value == null || value.equals( v ) ) {
oldValue = v;
// All entries following removed node can stay
// in list, but all preceding ones need to be
// cloned.
++modCount;
HashEntry<K, V> newFirst = e.next;
for ( HashEntry<K, V> p = first; p != e; p = p.next ) {
K pKey = p.key();
if ( pKey == null ) { // Skip GC'd keys
c--;
continue;
}
newFirst = newHashEntry( pKey, p.hash, newFirst, p.value() );
}
tab[index] = newFirst;
count = c; // write-volatile
}
}
return oldValue;
}
finally {
unlock();
}
}
void removeStale() {
KeyReference ref;
while ( ( ref = (KeyReference) refQueue.poll() ) != null ) {
remove( ref.keyRef(), ref.keyHash(), null, true );
}
}
void clear() {
if ( count != 0 ) {
lock();
try {
HashEntry<K, V>[] tab = table;
Arrays.fill( tab, null );
++modCount;
// replace the reference queue to avoid unnecessary stale cleanups
refQueue = new ReferenceQueue<>();
count = 0; // write-volatile
}
finally {
unlock();
}
}
}
}
/* ---------------- Public operations -------------- */
/**
* Creates a new, empty map with the specified initial
* capacity, reference types, load factor and concurrency level.
* <p>
* Behavioral changing options such as {@link Option#IDENTITY_COMPARISONS}
* can also be specified.
*
* @param initialCapacity the initial capacity. The implementation
* performs internal sizing to accommodate this many elements.
* @param loadFactor the load factor threshold, used to control resizing.
* Resizing may be performed when the average number of elements per
* bin exceeds this threshold.
* @param concurrencyLevel the estimated number of concurrently
* updating threads. The implementation performs internal sizing
* to try to accommodate this many threads.
* @param keyType the reference type to use for keys
* @param valueType the reference type to use for values
* @param options the behavioral options
*
* @throws IllegalArgumentException if the initial capacity is
* negative or the load factor or concurrencyLevel are
* nonpositive.
*/
public ConcurrentReferenceHashMap(
int initialCapacity,
float loadFactor, int concurrencyLevel,
ReferenceType keyType, ReferenceType valueType,
EnumSet<Option> options) {
if ( !( loadFactor > 0 ) || initialCapacity < 0 || concurrencyLevel <= 0 ) {
throw new IllegalArgumentException();
}
if ( concurrencyLevel > MAX_SEGMENTS ) {
concurrencyLevel = MAX_SEGMENTS;
}
// Find power-of-two sizes best matching arguments
int sshift = 0;
int ssize = 1;
while ( ssize < concurrencyLevel ) {
++sshift;
ssize <<= 1;
}
segmentShift = 32 - sshift;
segmentMask = ssize - 1;
this.segments = Segment.newArray( ssize );
if ( initialCapacity > MAXIMUM_CAPACITY ) {
initialCapacity = MAXIMUM_CAPACITY;
}
int c = initialCapacity / ssize;
if ( c * ssize < initialCapacity ) {
++c;
}
int cap = 1;
while ( cap < c ) {
cap <<= 1;
}
identityComparisons = options != null && options.contains( Option.IDENTITY_COMPARISONS );
for ( int i = 0; i < this.segments.length; ++i ) {
this.segments[i] = new Segment<>(
cap, loadFactor,
keyType, valueType, identityComparisons
);
}
}
/**
* Creates a new, empty map with the specified initial
* capacity, load factor and concurrency level.
*
* @param initialCapacity the initial capacity. The implementation
* performs internal sizing to accommodate this many elements.
* @param loadFactor the load factor threshold, used to control resizing.
* Resizing may be performed when the average number of elements per
* bin exceeds this threshold.
* @param concurrencyLevel the estimated number of concurrently
* updating threads. The implementation performs internal sizing
* to try to accommodate this many threads.
*
* @throws IllegalArgumentException if the initial capacity is
* negative or the load factor or concurrencyLevel are
* nonpositive.
*/
public ConcurrentReferenceHashMap(
int initialCapacity,
float loadFactor, int concurrencyLevel) {
this(
initialCapacity, loadFactor, concurrencyLevel,
DEFAULT_KEY_TYPE, DEFAULT_VALUE_TYPE, null
);
}
/**
* Creates a new, empty map with the specified initial capacity
* and load factor and with the default reference types (weak keys,
* strong values), and concurrencyLevel (16).
*
* @param initialCapacity The implementation performs internal
* sizing to accommodate this many elements.
* @param loadFactor the load factor threshold, used to control resizing.
* Resizing may be performed when the average number of elements per
* bin exceeds this threshold.
*
* @throws IllegalArgumentException if the initial capacity of
* elements is negative or the load factor is nonpositive
* @since 1.6
*/
public ConcurrentReferenceHashMap(int initialCapacity, float loadFactor) {
this( initialCapacity, loadFactor, DEFAULT_CONCURRENCY_LEVEL );
}
/**
* Creates a new, empty map with the specified initial capacity,
* reference types and with default load factor (0.75) and concurrencyLevel (16).
*
* @param initialCapacity the initial capacity. The implementation
* performs internal sizing to accommodate this many elements.
* @param keyType the reference type to use for keys
* @param valueType the reference type to use for values
*
* @throws IllegalArgumentException if the initial capacity of
* elements is negative.
*/
public ConcurrentReferenceHashMap(
int initialCapacity,
ReferenceType keyType, ReferenceType valueType) {
this(
initialCapacity, DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL,
keyType, valueType, null
);
}
/**
* Creates a new, empty map with the specified initial capacity,
* and with default reference types (weak keys, strong values),
* load factor (0.75) and concurrencyLevel (16).
*
* @param initialCapacity the initial capacity. The implementation
* performs internal sizing to accommodate this many elements.
*
* @throws IllegalArgumentException if the initial capacity of
* elements is negative.
*/
public ConcurrentReferenceHashMap(int initialCapacity) {
this( initialCapacity, DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL );
}
/**
* Creates a new, empty map with a default initial capacity (16),
* reference types (weak keys, strong values), default
* load factor (0.75) and concurrencyLevel (16).
*/
public ConcurrentReferenceHashMap() {
this( DEFAULT_INITIAL_CAPACITY, DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL );
}
/**
* Creates a new map with the same mappings as the given map.
* The map is created with a capacity of 1.5 times the number
* of mappings in the given map or 16 (whichever is greater),
* and a default load factor (0.75) and concurrencyLevel (16).
*
* @param m the map
*/
public ConcurrentReferenceHashMap(Map<? extends K, ? extends V> m) {
this(
Math.max(
(int) ( m.size() / DEFAULT_LOAD_FACTOR ) + 1,
DEFAULT_INITIAL_CAPACITY
),
DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL
);
putAll( m );
}
/**
* Returns {@code true} if this map contains no key-value mappings.
*
* @return {@code true} if this map contains no key-value mappings
*/
@Override
public boolean isEmpty() {
final Segment<K, V>[] segments = this.segments;
/*
* We keep track of per-segment modCounts to avoid ABA
* problems in which an element in one segment was added and
* in another removed during traversal, in which case the
* table was never actually empty at any point. Note the
* similar use of modCounts in the size() and containsValue()
* methods, which are the only other methods also susceptible
* to ABA problems.
*/
int[] mc = new int[segments.length];
int mcsum = 0;
for ( int i = 0; i < segments.length; ++i ) {
if ( segments[i].count != 0 ) {
return false;
}
else {
mcsum += mc[i] = segments[i].modCount;
}
}
// If mcsum happens to be zero, then we know we got a snapshot
// before any modifications at all were made. This is
// probably common enough to bother tracking.
if ( mcsum != 0 ) {
for ( int i = 0; i < segments.length; ++i ) {
if ( segments[i].count != 0 ||
mc[i] != segments[i].modCount ) {
return false;
}
}
}
return true;
}
/**
* Returns the number of key-value mappings in this map. If the
* map contains more than {@code Integer.MAX_VALUE} elements, returns
* {@code Integer.MAX_VALUE}.
*
* @return the number of key-value mappings in this map
*/
@Override
public int size() {
final Segment<K, V>[] segments = this.segments;
long sum = 0;
long check = 0;
int[] mc = new int[segments.length];
// Try a few times to get accurate count. On failure due to
// continuous async changes in table, resort to locking.
for ( int k = 0; k < RETRIES_BEFORE_LOCK; ++k ) {
check = 0;
sum = 0;
int mcsum = 0;
for ( int i = 0; i < segments.length; ++i ) {
sum += segments[i].count;
mcsum += mc[i] = segments[i].modCount;
}
if ( mcsum != 0 ) {
for ( int i = 0; i < segments.length; ++i ) {
check += segments[i].count;
if ( mc[i] != segments[i].modCount ) {
check = -1; // force retry
break;
}
}
}
if ( check == sum ) {
break;
}
}
if ( check != sum ) { // Resort to locking all segments
sum = 0;
for ( int i = 0; i < segments.length; ++i ) {
segments[i].lock();
}
for ( int i = 0; i < segments.length; ++i ) {
sum += segments[i].count;
}
for ( int i = 0; i < segments.length; ++i ) {
segments[i].unlock();
}
}
if ( sum > Integer.MAX_VALUE ) {
return Integer.MAX_VALUE;
}
else {
return (int) sum;
}
}
/**
* Returns the value to which the specified key is mapped,
* or {@code null} if this map contains no mapping for the key.
* <p>
* <p>More formally, if this map contains a mapping from a key
* {@code k} to a value {@code v} such that {@code key.equals(k)},
* then this method returns {@code v}; otherwise it returns
* {@code null}. (There can be at most one such mapping.)
*
* @throws NullPointerException if the specified key is null
*/
@Override
public V get(Object key) {
if ( key == null ) {
return null;
}
int hash = hashOf( key );
return segmentFor( hash ).get( key, hash );
}
/**
* Tests if the specified object is a key in this table.
*
* @param key possible key
*
* @return {@code true} if and only if the specified object
* is a key in this table, as determined by the
* {@code equals} method; {@code false} otherwise.
*
* @throws NullPointerException if the specified key is null
*/
@Override
public boolean containsKey(Object key) {
if ( key == null ) {
return false;
}
int hash = hashOf( key );
return segmentFor( hash ).containsKey( key, hash );
}
/**
* Returns {@code true} if this map maps one or more keys to the
* specified value. Note: This method requires a full internal
* traversal of the hash table, and so is much slower than
* method {@code containsKey}.
*
* @param value value whose presence in this map is to be tested
*
* @return {@code true} if this map maps one or more keys to the
* specified value
*/
@Override
public boolean containsValue(Object value) {
if ( value == null ) {
return false;
}
// See explanation of modCount use above
final Segment<K, V>[] segments = this.segments;
int[] mc = new int[segments.length];
// Try a few times without locking
for ( int k = 0; k < RETRIES_BEFORE_LOCK; ++k ) {
int sum = 0;
int mcsum = 0;
for ( int i = 0; i < segments.length; ++i ) {
int c = segments[i].count;
mcsum += mc[i] = segments[i].modCount;
if ( segments[i].containsValue( value ) ) {
return true;
}
}
boolean cleanSweep = true;
if ( mcsum != 0 ) {
for ( int i = 0; i < segments.length; ++i ) {
int c = segments[i].count;
if ( mc[i] != segments[i].modCount ) {
cleanSweep = false;
break;
}
}
}
if ( cleanSweep ) {
return false;
}
}
// Resort to locking all segments
for ( int i = 0; i < segments.length; ++i ) {
segments[i].lock();
}
boolean found = false;
try {
for ( int i = 0; i < segments.length; ++i ) {
if ( segments[i].containsValue( value ) ) {
found = true;
break;
}
}
}
finally {
for ( int i = 0; i < segments.length; ++i ) {
segments[i].unlock();
}
}
return found;
}
/**
* Legacy method testing if some key maps into the specified value
* in this table. This method is identical in functionality to
* {@link #containsValue}, and exists solely to ensure
* full compatibility with class {@link java.util.Hashtable},
* which supported this method prior to introduction of the
* Java Collections framework.
*
* @param value a value to search for
*
* @return {@code true} if and only if some key maps to the
* {@code value} argument in this table as
* determined by the {@code equals} method;
* {@code false} otherwise
*
* @throws NullPointerException if the specified value is null
*/
public boolean contains(Object value) {
return containsValue( value );
}
/**
* Maps the specified key to the specified value in this table.
* Neither the key nor the value can be null.
* <p>
* <p> The value can be retrieved by calling the {@code get} method
* with a key that is equal to the original key.
*
* @param key key with which the specified value is to be associated
* @param value value to be associated with the specified key
*
* @return the previous value associated with {@code key}, or
* {@code null} if there was no mapping for {@code key}
*
* @throws NullPointerException if the specified key or value is null
*/
@Override
public V put(K key, V value) {
if ( key == null || value == null ) {
return null;
}
int hash = hashOf( key );
return segmentFor( hash ).put( key, hash, value, false );
}
/**
* {@inheritDoc}
*
* @return the previous value associated with the specified key,
* or {@code null} if there was no mapping for the key
*
* @throws NullPointerException if the specified key or value is null
*/
@Override
public V putIfAbsent(K key, V value) {
if ( key == null || value == null ) {
return null;
}
int hash = hashOf( key );
return segmentFor( hash ).put( key, hash, value, true );
}
/**
* Copies all of the mappings from the specified map to this one.
* These mappings replace any mappings that this map had for any of the
* keys currently in the specified map.
*
* @param m mappings to be stored in this map
*/
@Override
public void putAll(Map<? extends K, ? extends V> m) {
for ( Entry<? extends K, ? extends V> e : m.entrySet() ) {
put( e.getKey(), e.getValue() );
}
}
/**
* Removes the key (and its corresponding value) from this map.
* This method does nothing if the key is not in the map.
*
* @param key the key that needs to be removed
*
* @return the previous value associated with {@code key}, or
* {@code null} if there was no mapping for {@code key}
*
* @throws NullPointerException if the specified key is null
*/
@Override
public V remove(Object key) {
if ( key == null ) {
return null;
}
int hash = hashOf( key );
return segmentFor( hash ).remove( key, hash, null, false );
}
/**
* {@inheritDoc}
*
* @throws NullPointerException if the specified key is null
*/
@Override
public boolean remove(Object key, Object value) {
if ( key == null || value == null ) {
return false;
}
int hash = hashOf( key );
return segmentFor( hash ).remove( key, hash, value, false ) != null;
}
/**
* {@inheritDoc}
*
* @throws NullPointerException if any of the arguments are null
*/
@Override
public boolean replace(K key, V oldValue, V newValue) {
if ( key == null || oldValue == null || newValue == null ) {
throw new NullPointerException();
}
int hash = hashOf( key );
return segmentFor( hash ).replace( key, hash, oldValue, newValue );
}
/**
* {@inheritDoc}
*
* @return the previous value associated with the specified key,
* or {@code null} if there was no mapping for the key
*
* @throws NullPointerException if the specified key or value is null
*/
@Override
public V replace(K key, V value) {
if ( key == null || value == null ) {
return null;
}
int hash = hashOf( key );
return segmentFor( hash ).replace( key, hash, value );
}
/**
* Removes all of the mappings from this map.
*/
@Override
public void clear() {
for ( int i = 0; i < segments.length; ++i ) {
segments[i].clear();
}
}
/**
* Removes any stale entries whose keys have been finalized. Use of this
* method is normally not necessary since stale entries are automatically
* removed lazily, when blocking operations are required. However, there
* are some cases where this operation should be performed eagerly, such
* as cleaning up old references to a ClassLoader in a multi-classloader
* environment.
* <p>
* Note: this method will acquire locks, one at a time, across all segments
* of this table, so if it is to be used, it should be used sparingly.
*/
public void purgeStaleEntries() {
for ( int i = 0; i < segments.length; ++i ) {
segments[i].removeStale();
}
}
/**
* Returns a {@link Set} view of the keys contained in this map.
* The set is backed by the map, so changes to the map are
* reflected in the set, and vice-versa. The set supports element
* removal, which removes the corresponding mapping from this map,
* via the {@code Iterator.remove}, {@code Set.remove},
* {@code removeAll}, {@code retainAll}, and {@code clear}
* operations. It does not support the {@code add} or
* {@code addAll} operations.
* <p>
* <p>The view's {@code iterator} is a "weakly consistent" iterator
* that will never throw {@link java.util.ConcurrentModificationException},
* and guarantees to traverse elements as they existed upon
* construction of the iterator, and may (but is not guaranteed to)
* reflect any modifications subsequent to construction.
*/
@Override
public Set<K> keySet() {
Set<K> ks = keySet;
return ( ks != null ) ? ks : ( keySet = new KeySet() );
}
/**
* Returns a {@link Collection} view of the values contained in this map.
* The collection is backed by the map, so changes to the map are
* reflected in the collection, and vice-versa. The collection
* supports element removal, which removes the corresponding
* mapping from this map, via the {@code Iterator.remove},
* {@code Collection.remove}, {@code removeAll},
* {@code retainAll}, and {@code clear} operations. It does not
* support the {@code add} or {@code addAll} operations.
* <p>
* <p>The view's {@code iterator} is a "weakly consistent" iterator
* that will never throw {@link java.util.ConcurrentModificationException},
* and guarantees to traverse elements as they existed upon
* construction of the iterator, and may (but is not guaranteed to)
* reflect any modifications subsequent to construction.
*/
@Override
public Collection<V> values() {
Collection<V> vs = values;
return ( vs != null ) ? vs : ( values = new Values() );
}
/**
* Returns a {@link Set} view of the mappings contained in this map.
* The set is backed by the map, so changes to the map are
* reflected in the set, and vice-versa. The set supports element
* removal, which removes the corresponding mapping from the map,
* via the {@code Iterator.remove}, {@code Set.remove},
* {@code removeAll}, {@code retainAll}, and {@code clear}
* operations. It does not support the {@code add} or
* {@code addAll} operations.
* <p>
* <p>The view's {@code iterator} is a "weakly consistent" iterator
* that will never throw {@link java.util.ConcurrentModificationException},
* and guarantees to traverse elements as they existed upon
* construction of the iterator, and may (but is not guaranteed to)
* reflect any modifications subsequent to construction.
*/
@Override
public Set<Entry<K, V>> entrySet() {
Set<Entry<K, V>> es = entrySet;
return ( es != null ) ? es : ( entrySet = new EntrySet() );
}
/**
* Returns an enumeration of the keys in this table.
*
* @return an enumeration of the keys in this table
*
* @see #keySet()
*/
public Enumeration<K> keys() {
return new KeyIterator();
}
/**
* Returns an enumeration of the values in this table.
*
* @return an enumeration of the values in this table
*
* @see #values()
*/
public Enumeration<V> elements() {
return new ValueIterator();
}
/* ---------------- Iterator Support -------------- */
abstract | Segment |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/java/typeutils/TypeExtractor.java | {
"start": 79015,
"end": 80077
} | class ____
for (int i = typeHierarchy.size() - 1; i >= 0; i--) {
Type curT = typeHierarchy.get(i);
// parameterized type
if (curT instanceof ParameterizedType) {
Class<?> rawType = ((Class<?>) ((ParameterizedType) curT).getRawType());
for (int paramIndex = 0;
paramIndex < rawType.getTypeParameters().length;
paramIndex++) {
TypeVariable<?> curVarOfCurT = rawType.getTypeParameters()[paramIndex];
// check if variable names match
if (sameTypeVars(curVarOfCurT, inTypeTypeVar)) {
Type curVarType =
((ParameterizedType) curT).getActualTypeArguments()[paramIndex];
// another type variable level
if (curVarType instanceof TypeVariable<?>) {
inTypeTypeVar = (TypeVariable<?>) curVarType;
}
// | assigned |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/api/condition/AbstractExecutionConditionTests.java | {
"start": 1703,
"end": 1837
} | class ____ unit testing a concrete {@link ExecutionCondition}
* implementation.
*
* <p><strong>WARNING</strong>: this abstract base | for |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/SinksTest.java | {
"start": 2471,
"end": 3665
} | class ____ {
//TODO Multicast has slightly different behavior with early onNext + onError : doesn't buffer elements for benefit of 1st subscriber
//(this is a behavioral difference in EmitterProcessor)
final Supplier<Sinks.Many<Integer>> supplier = () -> Sinks.many().multicast().onBackpressureBuffer(10);
@TestFactory
Stream<DynamicContainer> checkSemantics() {
return Stream.of(
expectMulticast(supplier, 10),
expectReplay(supplier, NONE),
dynamicContainer("buffers all before 1st subscriber, except for errors",
expectBufferingBeforeFirstSubscriber(supplier, ALL).getChildren().filter(dn -> !dn.getDisplayName().equals("replayAndErrorFirstSubscriber")))
);
}
@Test
void noReplayBeforeFirstSubscriberIfEarlyError() {
Sinks.Many<Integer> sink = supplier.get();
Flux<Integer> flux = sink.asFlux();
AssertSubscriber<Integer> first = AssertSubscriber.create();
sink.emitNext(1, FAIL_FAST);
sink.emitNext(2, FAIL_FAST);
sink.emitNext(3, FAIL_FAST);
sink.emitError(new IllegalStateException("boom"), FAIL_FAST);
flux.subscribe(first);
first.assertNoValues()
.assertErrorMessage("boom");
}
}
@Nested
| Multicast |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.