language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/api/QueryOperationTestPrograms.java | {
"start": 3859,
"end": 80946
} | class ____ {
static final TableTestProgram SOURCE_QUERY_OPERATION =
TableTestProgram.of("source-query-operation", "verifies sql serialization")
.setupTableSource(
SourceTestStep.newBuilder("s")
.addSchema("a bigint", "b string")
.producedValues(Row.of(1L, "abc"), Row.of(2L, "cde"))
.build())
.setupTableSink(
SinkTestStep.newBuilder("sink")
.addSchema("a bigint", "b string")
.consumedValues(Row.of(1L, "abc"), Row.of(2L, "cde"))
.build())
.runTableApi(t -> t.from("s"), "sink")
.runSql(
"SELECT `$$T_SOURCE`.`a`, `$$T_SOURCE`.`b` FROM `default_catalog`"
+ ".`default_database`.`s` "
+ "$$T_SOURCE")
.build();
static final TableTestProgram VALUES_QUERY_OPERATION =
TableTestProgram.of("values-query-operation", "verifies sql serialization")
.setupTableSink(
SinkTestStep.newBuilder("sink")
.addSchema("a bigint", "b string", "c time", "d timestamp")
.consumedValues(
Row.of(
1L,
"abc",
LocalTime.of(12, 30, 0),
LocalDateTime.of(1970, 1, 1, 12, 30, 0)),
Row.of(
2L,
"cde",
LocalTime.of(18, 0, 0),
LocalDateTime.of(1970, 1, 1, 18, 0, 0)))
.build())
.runTableApi(
t ->
t.fromValues(
row(
1L,
"abc",
LocalTime.of(12, 30, 0),
LocalDateTime.of(1970, 1, 1, 12, 30, 0)),
row(
2L,
"cde",
LocalTime.of(18, 0, 0),
LocalDateTime.of(1970, 1, 1, 18, 0, 0))),
"sink")
.runSql(
"SELECT `$$T_VAL`.`f0`, `$$T_VAL`.`f1`, `$$T_VAL`.`f2`, `$$T_VAL`.`f3` FROM (VALUES \n"
+ " (CAST(1 AS BIGINT), 'abc', TIME '12:30:00', TIMESTAMP '1970-01-01 12:30:00'),\n"
+ " (CAST(2 AS BIGINT), 'cde', TIME '18:00:00', TIMESTAMP '1970-01-01 18:00:00')\n"
+ ") $$T_VAL(`f0`, `f1`, `f2`, `f3`)")
.build();
static final TableTestProgram FILTER_QUERY_OPERATION =
TableTestProgram.of("filter-query-operation", "verifies sql serialization")
.setupTableSource(
SourceTestStep.newBuilder("s")
.addSchema("a bigint", "b string")
.producedValues(Row.of(10L, "abc"), Row.of(20L, "cde"))
.build())
.setupTableSink(
SinkTestStep.newBuilder("sink")
.addSchema("a bigint", "b string")
.consumedValues(Row.of(20L, "cde"))
.build())
.runTableApi(t -> t.from("s").where($("a").isGreaterOrEqual(15)), "sink")
.runSql(
"SELECT `$$T_FILTER`.`a`, `$$T_FILTER`.`b` FROM (\n"
+ " SELECT `$$T_SOURCE`.`a`, `$$T_SOURCE`.`b` FROM `default_catalog`"
+ ".`default_database`.`s` $$T_SOURCE\n"
+ ") $$T_FILTER WHERE `$$T_FILTER`.`a` >= 15")
.build();
static final TableTestProgram DISTINCT_QUERY_OPERATION =
TableTestProgram.of("distinct-query-operation", "verifies sql serialization")
.setupTableSource(
SourceTestStep.newBuilder("s")
.addSchema("a bigint", "b string")
.producedValues(
Row.of(20L, "apple"),
Row.of(20L, "apple"),
Row.of(5L, "pear"))
.build())
.setupTableSink(
SinkTestStep.newBuilder("sink")
.addSchema("a bigint", "b string")
.consumedValues(Row.of(20L, "apple"))
.build())
.runTableApi(
t -> t.from("s").where($("a").isGreaterOrEqual(15)).distinct(), "sink")
.runSql(
"SELECT DISTINCT `$$T_DISTINCT`.`a`, `$$T_DISTINCT`.`b` FROM (\n"
+ " SELECT `$$T_FILTER`.`a`, `$$T_FILTER`.`b` FROM (\n"
+ " SELECT `$$T_SOURCE`.`a`, `$$T_SOURCE`.`b` FROM `default_catalog`"
+ ".`default_database`.`s` $$T_SOURCE\n"
+ " ) $$T_FILTER WHERE `$$T_FILTER`.`a` >= 15\n"
+ ") $$T_DISTINCT")
.build();
static final TableTestProgram AGGREGATE_QUERY_OPERATION =
TableTestProgram.of("aggregate-query-operation", "verifies sql serialization")
.setupTableSource(
SourceTestStep.newBuilder("s")
.addSchema("a bigint", "b string")
.producedValues(
Row.of(10L, "apple"),
Row.of(20L, "apple"),
Row.of(5L, "pear"),
Row.of(15L, "pear"))
.build())
.setupTableSink(
SinkTestStep.newBuilder("sink")
.addSchema("a string", "b bigint")
.consumedValues(
Row.ofKind(RowKind.INSERT, "apple", 10L),
Row.ofKind(RowKind.UPDATE_BEFORE, "apple", 10L),
Row.ofKind(RowKind.UPDATE_AFTER, "apple", 30L),
Row.ofKind(RowKind.INSERT, "pear", 5L),
Row.ofKind(RowKind.UPDATE_BEFORE, "pear", 5L),
Row.ofKind(RowKind.UPDATE_AFTER, "pear", 20L))
.build())
.runTableApi(
t -> t.from("s").groupBy($("b")).select($("b"), $("a").sum()), "sink")
.runSql(
"SELECT `$$T_PROJECT`.`b`, `$$T_PROJECT`.`EXPR$0` FROM (\n"
+ " SELECT `$$T_AGG`.`b`, (SUM(`$$T_AGG`.`a`)) AS `EXPR$0`"
+ " FROM (\n"
+ " SELECT `$$T_SOURCE`.`a`, `$$T_SOURCE`.`b` FROM "
+ "`default_catalog`.`default_database`.`s` $$T_SOURCE\n"
+ " ) $$T_AGG\n"
+ " GROUP BY `$$T_AGG`.`b`\n"
+ ") $$T_PROJECT")
.build();
static final TableTestProgram AGGREGATE_NO_GROUP_BY_QUERY_OPERATION =
TableTestProgram.of(
"aggregate-query-no-group-by-operation", "verifies sql serialization")
.setupTableSource(
SourceTestStep.newBuilder("s")
.addSchema("a bigint", "b string")
.producedValues(
Row.of(10L, "apple"),
Row.of(20L, "apple"),
Row.of(5L, "pear"),
Row.of(15L, "pear"))
.build())
.setupTableSink(
SinkTestStep.newBuilder("sink")
.addSchema("b bigint")
.consumedValues(
Row.ofKind(RowKind.INSERT, 10L),
Row.ofKind(RowKind.UPDATE_BEFORE, 10L),
Row.ofKind(RowKind.UPDATE_AFTER, 30L),
Row.ofKind(RowKind.UPDATE_BEFORE, 30L),
Row.ofKind(RowKind.UPDATE_AFTER, 35L),
Row.ofKind(RowKind.UPDATE_BEFORE, 35L),
Row.ofKind(RowKind.UPDATE_AFTER, 50L))
.build())
.runTableApi(t -> t.from("s").select($("a").sum()), "sink")
.runSql(
"SELECT `$$T_PROJECT`.`EXPR$0` FROM (\n"
+ " SELECT (SUM(`$$T_AGG`.`a`)) AS `EXPR$0` FROM (\n"
+ " SELECT `$$T_SOURCE`.`a`, `$$T_SOURCE`.`b` FROM "
+ "`default_catalog`.`default_database`.`s` $$T_SOURCE\n"
+ " ) $$T_AGG\n"
+ " GROUP BY 1\n"
+ ") $$T_PROJECT")
.build();
static final TableTestProgram WINDOW_AGGREGATE_QUERY_OPERATION =
TableTestProgram.of("window-aggregate-query-operation", "verifies sql serialization")
.setupTableSource(
SourceTestStep.newBuilder("s")
.addSchema(
"a bigint",
"b string",
"ts TIMESTAMP_LTZ(3)",
"WATERMARK FOR ts AS ts - INTERVAL '1' SECOND")
.producedValues(
Row.of(2L, "apple", dayOfSeconds(0)),
Row.of(3L, "apple", dayOfSeconds(4)),
Row.of(1L, "apple", dayOfSeconds(7)))
.build())
.setupTableSink(
SinkTestStep.newBuilder("sink")
.addSchema("a string", "ts TIMESTAMP_LTZ(3)", "b bigint")
.consumedValues(
Row.of("apple", dayOfSeconds(0), 5L),
Row.of("apple", dayOfSeconds(5), 1L))
.build())
.runTableApi(
t ->
t.from("s")
.window(
Tumble.over(lit(5).seconds())
.on($("ts"))
.as("w"))
.groupBy($("w"), $("b"))
.select($("b"), $("w").start(), $("a").sum()),
"sink")
.runSql(
"SELECT `$$T_PROJECT`.`b`, `$$T_PROJECT`.`EXPR$0`, `$$T_PROJECT`.`EXPR$1` FROM (\n"
+ " SELECT `$$T_WIN_AGG`.`b`, (SUM(`$$T_WIN_AGG`.`a`)) AS `EXPR$1`, (window_start) AS `EXPR$0` FROM TABLE(\n"
+ " TUMBLE((\n"
+ " SELECT `$$T_SOURCE`.`a`, `$$T_SOURCE`.`b`, "
+ "`$$T_SOURCE`.`ts` FROM `default_catalog`.`default_database`.`s` $$T_SOURCE\n"
+ " ), DESCRIPTOR(`ts`), INTERVAL '0 00:00:05.0' DAY TO SECOND(3))\n"
+ " ) $$T_WIN_AGG GROUP BY window_start, window_end, `$$T_WIN_AGG`.`b`\n"
+ ") $$T_PROJECT")
.build();
private static Instant dayOfSeconds(int second) {
return LocalDateTime.of(2024, 1, 1, 0, 0, second).atZone(ZoneId.of("UTC")).toInstant();
}
static final TableTestProgram JOIN_QUERY_OPERATION =
TableTestProgram.of("join-query-operation", "verifies sql serialization")
.setupTableSource(
SourceTestStep.newBuilder("d")
.addSchema("dept_id bigint", "d_name string")
.producedValues(
Row.of(1L, "Research"), Row.of(2L, "Accounting"))
.build())
.setupTableSource(
SourceTestStep.newBuilder("e")
.addSchema(
"emp_id bigint",
"e_dept_id bigint",
"name string",
"age int")
.producedValues(
Row.of(1L, 2L, "Steve", 18),
Row.of(2L, 1L, "Helena", 22),
Row.of(3L, 2L, "Charlie", 25),
Row.of(4L, 1L, "Anna", 18))
.build())
.setupTableSink(
SinkTestStep.newBuilder("sink")
.addSchema("name string", "dept_name string", "age bigint")
.consumedValues(
Row.of("Helena", "Research", 22L),
Row.of("Charlie", "Accounting", 25L))
.build())
.runTableApi(
t ->
t.from("e")
.join(
t.from("d"),
$("e_dept_id")
.isEqual($("dept_id"))
.and($("age").isGreaterOrEqual(21)))
.select($("name"), $("d_name"), $("age")),
"sink")
.runSql(
"SELECT `$$T_PROJECT`.`name`, `$$T_PROJECT`.`d_name`, `$$T_PROJECT`.`age` FROM (\n"
+ " SELECT `$$T1_JOIN`.`emp_id`, `$$T1_JOIN`.`e_dept_id`, "
+ "`$$T1_JOIN`.`name`, "
+ "`$$T1_JOIN`.`age`, `$$T2_JOIN`.`dept_id`, `$$T2_JOIN`"
+ ".`d_name` FROM (\n"
+ " SELECT `$$T_SOURCE`.`emp_id`, `$$T_SOURCE`"
+ ".`e_dept_id`, `$$T_SOURCE`.`name`, `$$T_SOURCE`.`age` FROM `default_catalog`.`default_database`.`e` $$T_SOURCE\n"
+ " ) $$T1_JOIN INNER JOIN (\n"
+ " SELECT `$$T_SOURCE`.`dept_id`, `$$T_SOURCE`"
+ ".`d_name` FROM `default_catalog`.`default_database`.`d` $$T_SOURCE\n"
+ " ) $$T2_JOIN ON (`$$T1_JOIN`.`e_dept_id` = `$$T2_JOIN`"
+ ".`dept_id`)"
+ " AND "
+ "(`$$T1_JOIN`.`age` "
+ ">= 21)\n"
+ ") $$T_PROJECT")
.build();
static final TableTestProgram LATERAL_JOIN_QUERY_OPERATION =
TableTestProgram.of("lateral-join-query-operation", "verifies sql serialization")
.setupTableSource(
SourceTestStep.newBuilder("e")
.addSchema("a bigint", "b string")
.producedValues(Row.of(1L, "abc"))
.build())
.setupTableSink(
SinkTestStep.newBuilder("sink")
.addSchema("a bigint", "b string", "f0 int")
.consumedValues(Row.of(1L, "abc", 3))
.build())
.setupCatalogFunction(
"udtf", JavaUserDefinedTableFunctions.JavaTableFunc1.class)
.runTableApi(
t -> t.from("e").joinLateral(call("udtf", $("b")).as("f0")), "sink")
.runSql(
"SELECT `$$T1_JOIN`.`a`, `$$T1_JOIN`.`b`, `$$T_LAT`.`f0` FROM (\n"
+ " SELECT `$$T_SOURCE`.`a`, `$$T_SOURCE`.`b` FROM "
+ "`default_catalog`.`default_database`.`e` $$T_SOURCE\n"
+ ") $$T1_JOIN INNER JOIN \n"
+ " LATERAL TABLE(`default_catalog`.`default_database`.`udtf`(`b`)) $$T_LAT(`f0`) ON TRUE")
.build();
static final TableTestProgram UNION_ALL_QUERY_OPERATION =
TableTestProgram.of("union-all-query-operation", "verifies sql serialization")
.setupTableSource(
SourceTestStep.newBuilder("s")
.addSchema("a bigint", "b string")
.producedValues(Row.of(1L, "abc"))
.build())
.setupTableSource(
SourceTestStep.newBuilder("t")
.addSchema("a bigint", "b string")
.producedValues(Row.of(2L, "cde"))
.build())
.setupTableSink(
SinkTestStep.newBuilder("sink")
.addSchema("a bigint", "b string")
.consumedValues(Row.of(1L, "abc"), Row.of(2L, "cde"))
.build())
.runTableApi(t -> t.from("s").unionAll(t.from("t")), "sink")
.runSql(
"SELECT `a`, `b` FROM (\n"
+ " SELECT `$$T_SOURCE`.`a`, `$$T_SOURCE`.`b` FROM "
+ "`default_catalog`.`default_database`.`s` $$T_SOURCE\n"
+ ") UNION ALL (\n"
+ " SELECT `$$T_SOURCE`.`a`, `$$T_SOURCE`.`b` FROM "
+ "`default_catalog`.`default_database`.`t` $$T_SOURCE\n"
+ ")")
.build();
static final TableTestProgram ORDER_BY_QUERY_OPERATION =
TableTestProgram.of("order-by-query-operation", "verifies sql serialization")
.setupTableSource(
SourceTestStep.newBuilder("s")
.addSchema("a bigint", "b string")
.producedValues(
Row.of(1L, "a"),
Row.of(2L, "b"),
Row.of(3L, "c"),
Row.of(4L, "d"),
Row.of(5L, "e"))
.build())
.setupTableSink(
SinkTestStep.newBuilder("sink")
.addSchema("a bigint", "b string")
.consumedValues(Row.of(2L, "b"), Row.of(3L, "c"))
.build())
.runTableApi(
t -> t.from("s").orderBy($("a"), $("b").desc()).offset(1).fetch(2),
"sink")
.runSql(
"SELECT `$$T_SORT`.`a`, `$$T_SORT`.`b` FROM (\n"
+ " SELECT `$$T_SOURCE`.`a`, `$$T_SOURCE`.`b` FROM `default_catalog`"
+ ".`default_database`.`s` $$T_SOURCE\n"
+ ") $$T_SORT ORDER BY `$$T_SORT`.`a` ASC, `$$T_SORT`.`b` DESC"
+ " OFFSET 1 ROWS FETCH NEXT 2 ROWS ONLY")
.build();
static final TableTestProgram SQL_QUERY_OPERATION =
TableTestProgram.of("sql-query-operation", "verifies sql serialization")
.setupTableSource(
SourceTestStep.newBuilder("s")
.addSchema("a bigint", "b string")
.producedValues(Row.of(1L, "abc"), Row.of(2L, "cde"))
.build())
.setupTableSink(
SinkTestStep.newBuilder("sink")
.addSchema("a bigint", "b string")
.consumedValues(Row.of(3L, "bc"), Row.of(4L, "de"))
.build())
.runTableApi(
t ->
t.sqlQuery("SELECT a, b FROM s")
.select($("a").plus(2), $("b").substr(2, 3)),
"sink")
.runSql(
"SELECT (`$$T_PROJECT`.`a` + 2) AS `_c0`, (SUBSTR(`$$T_PROJECT`.`b`, "
+ "2, 3)) AS "
+ "`_c1` FROM (\n"
+ " SELECT `s`.`a`, `s`.`b`\n"
+ " FROM `default_catalog`.`default_database`.`s` AS `s`\n"
+ ") $$T_PROJECT")
.build();
static final TableTestProgram GROUP_HOP_WINDOW_EVENT_TIME =
TableTestProgram.of(
"group-window-aggregate-hop-event-time",
"validates group by using hopping window with event time")
.setupTableSource(
SourceTestStep.newBuilder("source_t")
.addSchema(
"ts STRING",
"a_int INT",
"b_double DOUBLE",
"c_float FLOAT",
"d_bigdec DECIMAL(10, 2)",
"`comment` STRING",
"name STRING",
"`rowtime` AS TO_TIMESTAMP(`ts`)",
"`proctime` AS PROCTIME()",
"WATERMARK for `rowtime` AS `rowtime` - INTERVAL '1' SECOND")
.producedValues(
Row.of(
"2020-10-10 00:00:01",
1,
1d,
1f,
new BigDecimal("1.11"),
"Hi",
"a"),
Row.of(
"2020-10-10 00:00:02",
2,
2d,
2f,
new BigDecimal("2.22"),
"Comment#1",
"a"),
Row.of(
"2020-10-10 00:00:03",
2,
2d,
2f,
new BigDecimal("2.22"),
"Comment#1",
"a"),
Row.of(
"2020-10-10 00:00:04",
5,
5d,
5f,
new BigDecimal("5.55"),
null,
"a"),
Row.of(
"2020-10-10 00:00:07",
3,
3d,
3f,
null,
"Hello",
"b"),
// out of order
Row.of(
"2020-10-10 00:00:06",
6,
6d,
6f,
new BigDecimal("6.66"),
"Hi",
"b"),
Row.of(
"2020-10-10 00:00:08",
3,
null,
3f,
new BigDecimal("3.33"),
"Comment#2",
"a"),
// late event
Row.of(
"2020-10-10 00:00:04",
5,
5d,
null,
new BigDecimal("5.55"),
"Hi",
"a"),
Row.of(
"2020-10-10 00:00:16",
4,
4d,
4f,
new BigDecimal("4.44"),
"Hi",
"b"),
Row.of(
"2020-10-10 00:00:32",
7,
7d,
7f,
new BigDecimal("7.77"),
null,
null),
Row.of(
"2020-10-10 00:00:34",
1,
3d,
3f,
new BigDecimal("3.33"),
"Comment#3",
"b"),
Row.of(
"2020-10-10 00:00:41",
10,
3d,
3f,
new BigDecimal("4.44"),
"Comment#4",
"a"),
Row.of(
"2020-10-10 00:00:42",
11,
4d,
4f,
new BigDecimal("5.44"),
"Comment#5",
"d"),
Row.of(
"2020-10-10 00:00:43",
12,
5d,
5f,
new BigDecimal("6.44"),
"Comment#6",
"c"),
Row.of(
"2020-10-10 00:00:44",
13,
6d,
6f,
new BigDecimal("7.44"),
"Comment#7",
"d"))
.build())
.setupTableSink(
SinkTestStep.newBuilder("sink_t")
.addSchema("name STRING", "cnt BIGINT")
.consumedValues(
"+I[a, 4]",
"+I[b, 2]",
"+I[a, 6]",
"+I[a, 1]",
"+I[b, 2]",
"+I[b, 1]",
"+I[b, 1]",
"+I[b, 1]",
"+I[null, 1]",
"+I[b, 1]",
"+I[null, 1]",
"+I[a, 1]",
"+I[d, 2]",
"+I[c, 1]",
"+I[a, 1]",
"+I[c, 1]",
"+I[d, 2]")
.build())
.runTableApi(
env ->
env.from("source_t")
.window(
Slide.over(lit(10).seconds())
.every(lit(5).seconds())
.on($("rowtime"))
.as("w"))
.groupBy($("name"), $("w"))
.select($("name"), lit(1).count()),
"sink_t")
.build();
static final TableTestProgram SORT_LIMIT_DESC =
TableTestProgram.of(
"sort-limit-desc",
"validates sort limit node by sorting integers in desc mode")
.setupTableSource(
SourceTestStep.newBuilder("source_t")
.addSchema("a INT", "b VARCHAR", "c INT")
.producedValues(
Row.of(2, "a", 6),
Row.of(4, "b", 8),
Row.of(6, "c", 10),
Row.of(1, "a", 5),
Row.of(3, "b", 7),
Row.of(5, "c", 9),
// ignored since smaller than the least max (4, b, 8)
Row.of(2, "a", 6),
// replaces (4, b, 8) from beforeRestore
Row.of(6, "c", 10),
// ignored since not larger than the least max (5, c, 9)
Row.of(5, "c", 9))
.build())
.setupTableSink(
SinkTestStep.newBuilder("sink_t")
.addSchema("a INT", "b VARCHAR", "c BIGINT")
.consumedValues(
"+I[2, a, 6]",
"+I[4, b, 8]",
"+I[6, c, 10]",
"-D[2, a, 6]",
"+I[3, b, 7]",
"-D[3, b, 7]",
"+I[5, c, 9]",
"-D[4, b, 8]",
"+I[6, c, 10]")
.build())
.runTableApi(
env -> env.from("source_t").orderBy($("a").desc()).limit(3), "sink_t")
.build();
static final TableTestProgram GROUP_BY_UDF_WITH_MERGE =
TableTestProgram.of(
"group-aggregate-udf-with-merge",
"validates udfs with merging using group by")
.setupCatalogFunction(
"my_avg", JavaUserDefinedAggFunctions.WeightedAvgWithMerge.class)
.setupTemporarySystemFunction(
"my_concat",
JavaUserDefinedAggFunctions.ConcatDistinctAggFunction.class)
.setupTableSource(
SourceTestStep.newBuilder("source_t")
.addSchema(
"a INT", "b BIGINT", "c INT", "d VARCHAR", "e BIGINT")
.producedValues(
Row.of(2, 3L, 2, "Hello World Like", 1L),
Row.of(3, 4L, 3, "Hello World Its nice", 2L),
Row.of(2, 2L, 1, "Hello World", 2L),
Row.of(1, 1L, 0, "Hello", 1L),
Row.of(5, 11L, 10, "GHI", 1L),
Row.of(3, 5L, 4, "ABC", 2L),
Row.of(4, 10L, 9, "FGH", 2L),
Row.of(4, 7L, 6, "CDE", 2L),
Row.of(5, 14L, 13, "JKL", 2L),
Row.of(4, 9L, 8, "EFG", 1L),
Row.of(5, 15L, 14, "KLM", 2L),
Row.of(5, 12L, 11, "HIJ", 3L),
Row.of(4, 8L, 7, "DEF", 1L),
Row.of(5, 13L, 12, "IJK", 3L),
Row.of(3, 6L, 5, "BCD", 3L),
Row.of(1, 1L, 0, "Hello", 1L),
Row.of(3, 5L, 4, "ABC", 2L),
Row.of(4, 10L, 9, "FGH", 2L),
Row.of(4, 7L, 6, "CDE", 2L),
Row.of(7, 7L, 7, "MNO", 7L),
Row.of(3, 6L, 5, "BCD", 3L),
Row.of(7, 7L, 7, "XYZ", 7L))
.build())
.setupTableSink(
SinkTestStep.newBuilder("sink_t")
.addSchema(
"d BIGINT",
"s1 BIGINT",
"c1 VARCHAR",
"PRIMARY KEY (d) NOT ENFORCED")
.consumedValues(
"+I[1, 1, Hello World Like]",
"+I[2, 2, Hello World Its nice]",
"+U[2, 2, Hello World Its nice|Hello World]",
"+U[1, 1, Hello World Like|Hello]",
"+U[1, 1, Hello World Like|Hello|GHI]",
"+U[2, 2, Hello World Its nice|Hello World|ABC]",
"+U[2, 2, Hello World Its nice|Hello World|ABC|FGH]",
"+U[2, 2, Hello World Its nice|Hello World|ABC|FGH|CDE]",
"+U[2, 2, Hello World Its nice|Hello World|ABC|FGH|CDE|JKL]",
"+U[1, 1, Hello World Like|Hello|GHI|EFG]",
"+U[2, 2, Hello World Its nice|Hello World|ABC|FGH|CDE|JKL|KLM]",
"+I[3, 3, HIJ]",
"+U[1, 1, Hello World Like|Hello|GHI|EFG|DEF]",
"+U[3, 3, HIJ|IJK]",
"+U[3, 3, HIJ|IJK|BCD]",
"+I[7, 7, MNO]",
"+U[7, 7, MNO|XYZ]")
.build())
.runTableApi(
env ->
env.from("source_t")
.groupBy($("e"))
.select(
$("e"),
call("my_avg", $("e"), $("a")).as("s1"),
call("my_concat", $("d")).as("c1")),
"sink_t")
.build();
static final TableTestProgram NON_WINDOW_INNER_JOIN =
TableTestProgram.of("join-non-window-inner-join", "test non-window inner join")
.setupTableSource(
SourceTestStep.newBuilder("T1")
.addSchema("a int", "b bigint", "c varchar")
.producedValues(
Row.of(1, 1L, "Baker1"),
Row.of(1, 2L, "Baker2"),
Row.of(1, 2L, "Baker2"),
Row.of(1, 5L, "Baker3"),
Row.of(2, 7L, "Baker5"),
Row.of(1, 9L, "Baker6"),
Row.of(1, 8L, "Baker8"),
Row.of(3, 8L, "Baker9"),
Row.of(1, 1L, "PostRestore"))
.build())
.setupTableSource(
SourceTestStep.newBuilder("T2")
.addSchema("a int", "b bigint", "c varchar")
.producedValues(
Row.of(1, 1L, "BakerBaker"),
Row.of(2, 2L, "HeHe"),
Row.of(3, 2L, "HeHe"),
Row.of(2, 1L, "PostRestoreRight"))
.build())
.setupTableSink(
SinkTestStep.newBuilder("MySink")
.addSchema("a int", "c1 varchar", "c2 varchar")
.consumedValues(
Row.of(1, "BakerBaker", "Baker2"),
Row.of(1, "BakerBaker", "Baker2"),
Row.of(1, "BakerBaker", "Baker3"),
Row.of(2, "HeHe", "Baker5"),
Row.of(1, "BakerBaker", "Baker6"),
Row.of(1, "BakerBaker", "Baker8"),
Row.of(2, "PostRestoreRight", "Baker5"))
.build())
.runSql(
"insert into MySink "
+ "SELECT t2.a, t2.c, t1.c\n"
+ "FROM (\n"
+ " SELECT if(a = 3, cast(null as int), a) as a, b, c FROM T1\n"
+ ") as t1\n"
+ "JOIN (\n"
+ " SELECT if(a = 3, cast(null as int), a) as a, b, c FROM T2\n"
+ ") as t2\n"
+ "ON t1.a = t2.a AND t1.b > t2.b")
.runTableApi(
env -> {
final Table t1 =
env.from("T1")
.select(
ifThenElse(
$("a").isEqual(3),
nullOf(DataTypes.INT()),
$("a"))
.as("a1"),
$("b").as("b1"),
$("c").as("c1"));
final Table t2 =
env.from("T2")
.select(
ifThenElse(
$("a").isEqual(3),
nullOf(DataTypes.INT()),
$("a"))
.as("a2"),
$("b").as("b2"),
$("c").as("c2"));
return t1.join(
t2,
$("a1").isEqual($("a2"))
.and($("b1").isGreater($("b2"))))
.select($("a2"), $("c2"), $("c1"));
},
"MySink")
.build();
static final TableTestProgram OVER_WINDOW_RANGE =
TableTestProgram.of("over-window-range", "test over window with time range")
.setupTableSource(
SourceTestStep.newBuilder("data")
.addSchema(
"k string",
"v bigint",
"ts TIMESTAMP_LTZ(3)",
"WATERMARK for `ts` AS `ts`")
.producedValues(
Row.of("Apple", 5L, dayOfSeconds(0)),
Row.of("Apple", 4L, dayOfSeconds(1)),
Row.of("Apple", 3L, dayOfSeconds(2)))
.build())
.setupTableSink(
SinkTestStep.newBuilder("sink")
.addSchema("k string", "v bigint", "ts TIMESTAMP_LTZ(3)")
.consumedValues(
Row.of("Apple", 5L, dayOfSeconds(0)),
Row.of("Apple", 4L, dayOfSeconds(1)),
Row.of("Apple", 3L, dayOfSeconds(2)))
.build())
.runSql(
"SELECT `$$T_PROJECT`.`k`, (LAST_VALUE(`$$T_PROJECT`.`v`) "
+ "OVER(PARTITION BY `$$T_PROJECT`.`k` "
+ "ORDER BY `$$T_PROJECT`.`ts` RANGE BETWEEN INTERVAL '0 "
+ "00:00:02.0' DAY TO SECOND(3) PRECEDING AND CURRENT ROW)) AS `_c1`, `$$T_PROJECT`.`ts` FROM (\n"
+ " SELECT `$$T_SOURCE`.`k`, `$$T_SOURCE`.`v`, "
+ "`$$T_SOURCE`.`ts` FROM `default_catalog`.`default_database`.`data` $$T_SOURCE\n"
+ ") $$T_PROJECT")
.runTableApi(
tableEnvAccessor ->
tableEnvAccessor
.from("data")
.window(
Over.partitionBy($("k"))
.orderBy($("ts"))
.preceding(lit(2).second())
.as("w"))
.select(
$("k"),
$("v").lastValue().over($("w")),
$("ts")),
"sink")
.build();
static final TableTestProgram OVER_WINDOW_ROWS =
TableTestProgram.of("over-window-rows", "test over window with rows range")
.setupTableSource(
SourceTestStep.newBuilder("data")
.addSchema(
"k string",
"v bigint",
"ts TIMESTAMP_LTZ(3)",
"WATERMARK for `ts` AS `ts`")
.producedValues(
Row.of("Apple", 5L, dayOfSeconds(0)),
Row.of("Apple", 4L, dayOfSeconds(1)),
Row.of("Apple", 3L, dayOfSeconds(2)))
.build())
.setupTableSink(
SinkTestStep.newBuilder("sink")
.addSchema("k string", "v bigint", "ts TIMESTAMP_LTZ(3)")
.consumedValues(
Row.of("Apple", 5L, dayOfSeconds(0)),
Row.of("Apple", 4L, dayOfSeconds(1)),
Row.of("Apple", 3L, dayOfSeconds(2)))
.build())
.runSql(
"SELECT `$$T_PROJECT`.`k`, (LAST_VALUE(`$$T_PROJECT`.`v`) OVER"
+ "(PARTITION BY `$$T_PROJECT`.`k` "
+ "ORDER BY `$$T_PROJECT`.`ts` "
+ "ROWS BETWEEN CAST(2 AS BIGINT) PRECEDING AND CURRENT ROW))"
+ " AS `_c1`, `$$T_PROJECT`.`ts` FROM (\n"
+ " SELECT `$$T_SOURCE`.`k`, `$$T_SOURCE`.`v`, "
+ "`$$T_SOURCE`.`ts` FROM `default_catalog`.`default_database`.`data` $$T_SOURCE\n"
+ ") $$T_PROJECT")
.runTableApi(
tableEnvAccessor ->
tableEnvAccessor
.from("data")
.window(
Over.partitionBy($("k"))
.orderBy($("ts"))
.preceding(lit(2L))
.as("w"))
.select(
$("k"),
$("v").lastValue().over($("w")),
$("ts")),
"sink")
.build();
static final TableTestProgram OVER_WINDOW_ROWS_UNBOUNDED_NO_PARTITION =
TableTestProgram.of(
"over-window-rows-unbounded-no-partition",
"test over window with " + "rows range")
.setupTableSource(
SourceTestStep.newBuilder("data")
.addSchema(
"k string",
"v bigint",
"ts TIMESTAMP_LTZ(3)",
"WATERMARK for `ts` AS `ts`")
.producedValues(
Row.of("Apple", 5L, dayOfSeconds(0)),
Row.of("Apple", 4L, dayOfSeconds(1)),
Row.of("Apple", 3L, dayOfSeconds(2)))
.build())
.setupTableSink(
SinkTestStep.newBuilder("sink")
.addSchema("v bigint", "ts TIMESTAMP_LTZ(3)")
.consumedValues(
Row.of(5L, dayOfSeconds(0)),
Row.of(4L, dayOfSeconds(1)),
Row.of(3L, dayOfSeconds(2)))
.build())
.runSql(
"SELECT (LAST_VALUE(`$$T_PROJECT`.`v`) OVER(ORDER BY `$$T_PROJECT`"
+ ".`ts` "
+ "ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)) AS "
+ "`_c0`, `$$T_PROJECT`.`ts` FROM (\n"
+ " SELECT `$$T_SOURCE`.`k`, `$$T_SOURCE`.`v`, "
+ "`$$T_SOURCE`.`ts` FROM `default_catalog`.`default_database`.`data` $$T_SOURCE\n"
+ ") $$T_PROJECT")
.runTableApi(
tableEnvAccessor ->
tableEnvAccessor
.from("data")
.window(
Over.orderBy($("ts"))
.preceding(UNBOUNDED_ROW)
.as("w"))
.select($("v").lastValue().over($("w")), $("ts")),
"sink")
.build();
static final TableTestProgram OVER_WINDOW_LAG =
TableTestProgram.of("over-window-lag", "validates over window with lag function")
.setupTableSource(
SourceTestStep.newBuilder("t")
.addSchema(
"ts STRING",
"b MAP<DOUBLE, DOUBLE>",
"`r_time` AS TO_TIMESTAMP(`ts`)",
"WATERMARK for `r_time` AS `r_time`")
.producedValues(
Row.of(
"2020-04-15 08:00:05",
Collections.singletonMap(42.0, 42.0)),
Row.of(
"2020-04-15 08:00:06",
Collections.singletonMap(42.1, 42.1)))
.build())
.setupTableSink(
SinkTestStep.newBuilder("sink_t")
.addSchema("ts STRING", "b MAP<DOUBLE, DOUBLE>")
.consumedValues(
Row.of("2020-04-15 08:00:05", null),
Row.of(
"2020-04-15 08:00:06",
Collections.singletonMap(42.0, 42.0)))
.build())
.runTableApi(
env ->
env.from("t")
.window(Over.orderBy($("r_time")).as("bLag"))
.select($("ts"), lag($("b"), 1).over($("bLag"))),
"sink_t")
.runSql(
"SELECT `$$T_PROJECT`.`ts`, (LAG(`$$T_PROJECT`.`b`, 1) OVER(ORDER BY `$$T_PROJECT`.`r_time`)) AS `_c1` FROM (\n"
+ " SELECT `$$T_SOURCE`.`ts`, `$$T_SOURCE`.`b`, `$$T_SOURCE`.`r_time` FROM `default_catalog`.`default_database`.`t` $$T_SOURCE\n"
+ ") $$T_PROJECT")
.build();
static final TableTestProgram ACCESSING_NESTED_COLUMN =
TableTestProgram.of(
"project-nested-columnd",
"test projection with nested columns of an inline type")
.setupTableSource(
SourceTestStep.newBuilder("data")
.addSchema("f0 bigint")
.producedValues(Row.of(1L), Row.of(2L), Row.of(3L))
.build())
.setupTableSink(
SinkTestStep.newBuilder("sink")
.addSchema("v bigint")
.consumedValues(Row.of(1L), Row.of(2L), Row.of(3L))
.build())
.runSql(
"SELECT (`$$T_PROJECT`.`composite_column`.`f0_nested`) AS `composite_column$f0_nested` FROM (\n"
+ " SELECT (CAST(ROW(`$$T_PROJECT`.`f0`, 'a') AS ROW<`f0_nested` BIGINT, `f1_nested` VARCHAR(2147483647)>)) AS `composite_column` FROM (\n"
+ " SELECT `$$T_SOURCE`.`f0` FROM `default_catalog`.`default_database`.`data` $$T_SOURCE\n"
+ " ) $$T_PROJECT\n"
+ ") $$T_PROJECT")
.runTableApi(
tableEnvAccessor ->
tableEnvAccessor
.from("data")
.select(
row($("f0"), lit("a"))
.cast(
DataTypes.ROW(
DataTypes.FIELD(
"f0_nested",
DataTypes
.BIGINT()),
DataTypes.FIELD(
"f1_nested",
DataTypes
.STRING())))
.as("composite_column"))
.select($("composite_column").get("f0_nested")),
"sink")
.build();
public static final TableTestProgram ROW_SEMANTIC_TABLE_PTF =
TableTestProgram.of("process-row-table-api", "table with row semantics")
// TODO [FLINK-38233]: Remove this config when PTF support in
// StreamNonDeterministicUpdatePlanVisitor is added.
.setupConfig(
OptimizerConfigOptions.TABLE_OPTIMIZER_NONDETERMINISTIC_UPDATE_STRATEGY,
OptimizerConfigOptions.NonDeterministicUpdateStrategy.IGNORE)
.setupTemporarySystemFunction("f", RowSemanticTableFunction.class)
.setupSql(BASIC_VALUES)
.setupTableSink(
SinkTestStep.newBuilder("sink")
.addSchema(BASE_SINK_SCHEMA)
.consumedValues(
"+I[{+I[Bob, 12], 1}]", "+I[{+I[Alice, 42], 1}]")
.build())
.runSql(
"SELECT `$$T_FUNC`.`out` FROM TABLE(\n"
+ " `f`((\n"
+ " SELECT `$$T_SOURCE`.`name`, `$$T_SOURCE`.`score` FROM `default_catalog`.`default_database`.`t` $$T_SOURCE\n"
+ " ), 1, DEFAULT, 'f')\n"
+ ") $$T_FUNC")
.runTableApi(
env ->
env.fromCall(
"f",
env.from("t").asArgument("r"),
lit(1).asArgument("i")),
"sink")
.build();
static final TableTestProgram SET_SEMANTIC_TABLE_PTF =
TableTestProgram.of("partitioned-ptf", "verifies SQL serialization")
// TODO [FLINK-38233]: Remove this config when PTF support in
// StreamNonDeterministicUpdatePlanVisitor is added.
.setupConfig(
OptimizerConfigOptions.TABLE_OPTIMIZER_NONDETERMINISTIC_UPDATE_STRATEGY,
OptimizerConfigOptions.NonDeterministicUpdateStrategy.IGNORE)
.setupTemporarySystemFunction("f1", ChainedSendingFunction.class)
.setupTemporarySystemFunction("f2", ChainedReceivingFunction.class)
.setupTableSource(TIMED_SOURCE)
.setupTableSink(
SinkTestStep.newBuilder("sink")
.addSchema(KEYED_TIMED_BASE_SINK_SCHEMA)
.consumedValues(
"+I[Bob, {Processing input row +I[Bob, {Processing input row +I[Bob, 1, 1970-01-01T00:00:00Z] at time 0 watermark null}, 1970-01-01T00:00:00Z] at time 0 watermark null}, 1970-01-01T00:00:00Z]",
"+I[Bob, {Processing input row +I[Bob, {Registering timer t for 1 at time 0 watermark null}, 1970-01-01T00:00:00Z] at time 0 watermark null}, 1970-01-01T00:00:00Z]",
"+I[Alice, {Processing input row +I[Alice, {Processing input row +I[Alice, 1, 1970-01-01T00:00:00.001Z] at time 1 watermark -1}, 1970-01-01T00:00:00.001Z] at time 1 watermark -1}, 1970-01-01T00:00:00.001Z]",
"+I[Alice, {Processing input row +I[Alice, {Registering timer t for 2 at time 1 watermark -1}, 1970-01-01T00:00:00.001Z] at time 1 watermark -1}, 1970-01-01T00:00:00.001Z]",
"+I[Bob, {Processing input row +I[Bob, {Processing input row +I[Bob, 2, 1970-01-01T00:00:00.002Z] at time 2 watermark 0}, 1970-01-01T00:00:00.002Z] at time 2 watermark 0}, 1970-01-01T00:00:00.002Z]",
"+I[Bob, {Processing input row +I[Bob, {Registering timer t for 3 at time 2 watermark 0}, 1970-01-01T00:00:00.002Z] at time 2 watermark 0}, 1970-01-01T00:00:00.002Z]",
"+I[Bob, {Processing input row +I[Bob, {Processing input row +I[Bob, 3, 1970-01-01T00:00:00.003Z] at time 3 watermark 1}, 1970-01-01T00:00:00.003Z] at time 3 watermark 1}, 1970-01-01T00:00:00.003Z]",
"+I[Bob, {Processing input row +I[Bob, {Registering timer t for 4 at time 3 watermark 1}, 1970-01-01T00:00:00.003Z] at time 3 watermark 1}, 1970-01-01T00:00:00.003Z]",
"+I[Alice, {Processing input row +I[Alice, {Timer t fired at time 2 watermark 2}, 1970-01-01T00:00:00.002Z] at time 2 watermark 1}, 1970-01-01T00:00:00.002Z]",
"+I[Alice, {Processing input row +I[Alice, {2}, 1970-01-01T00:00:00.002Z] at time 2 watermark 1}, 1970-01-01T00:00:00.002Z]",
"+I[Bob, {Processing input row +I[Bob, {Processing input row +I[Bob, 4, 1970-01-01T00:00:00.004Z] at time 4 watermark 2}, 1970-01-01T00:00:00.004Z] at time 4 watermark 2}, 1970-01-01T00:00:00.004Z]",
"+I[Bob, {Processing input row +I[Bob, {Registering timer t for 5 at time 4 watermark 2}, 1970-01-01T00:00:00.004Z] at time 4 watermark 2}, 1970-01-01T00:00:00.004Z]",
"+I[Bob, {Processing input row +I[Bob, {Processing input row +I[Bob, 5, 1970-01-01T00:00:00.005Z] at time 5 watermark 3}, 1970-01-01T00:00:00.005Z] at time 5 watermark 3}, 1970-01-01T00:00:00.005Z]",
"+I[Bob, {Processing input row +I[Bob, {Registering timer t for 6 at time 5 watermark 3}, 1970-01-01T00:00:00.005Z] at time 5 watermark 3}, 1970-01-01T00:00:00.005Z]",
"+I[Bob, {Processing input row +I[Bob, {Processing input row +I[Bob, 6, 1970-01-01T00:00:00.006Z] at time 6 watermark 4}, 1970-01-01T00:00:00.006Z] at time 6 watermark 4}, 1970-01-01T00:00:00.006Z]",
"+I[Bob, {Processing input row +I[Bob, {Registering timer t for 7 at time 6 watermark 4}, 1970-01-01T00:00:00.006Z] at time 6 watermark 4}, 1970-01-01T00:00:00.006Z]",
"+I[Bob, {Processing input row +I[Bob, {Timer t fired at time 7 watermark 9223372036854775807}, 1970-01-01T00:00:00.007Z] at time 7 watermark 5}, 1970-01-01T00:00:00.007Z]",
"+I[Bob, {Processing input row +I[Bob, {7}, 1970-01-01T00:00:00.007Z] at time 7 watermark 5}, 1970-01-01T00:00:00.007Z]")
.build())
.runSql(
"SELECT `$$T_FUNC`.`name`, `$$T_FUNC`.`out`, `$$T_FUNC`.`rowtime` FROM TABLE(\n"
+ " `f2`(\n"
+ " (\n"
+ " SELECT `$$T_FUNC`.`name`, `$$T_FUNC`.`out`, `$$T_FUNC`.`rowtime` FROM TABLE(\n"
+ " `f1`(\n"
+ " (\n"
+ " SELECT `$$T_SOURCE`.`name`, `$$T_SOURCE`.`score`, `$$T_SOURCE`.`ts` FROM `default_catalog`.`default_database`.`t` $$T_SOURCE\n"
+ " ) PARTITION BY (`name`), DESCRIPTOR(`ts`), 'f1')\n"
+ " ) $$T_FUNC\n"
+ " ) PARTITION BY (`name`), DESCRIPTOR(`rowtime`), 'f2')\n"
+ ") $$T_FUNC")
.runTableApi(
env -> {
final Table ptf1 =
env.fromCall(
"f1",
env.from("t")
.partitionBy($("name"))
.asArgument("r"),
descriptor("ts").asArgument("on_time"));
return env.fromCall(
"f2",
ptf1.partitionBy($("name")).asArgument("r"),
descriptor("rowtime").asArgument("on_time"));
},
"sink")
.build();
public static final TableTestProgram ML_PREDICT_MODEL_API =
TableTestProgram.of("ml-predict-model-api", "ml-predict using model API")
.setupTableSource(SIMPLE_FEATURES_SOURCE)
.setupModel(SYNC_MODEL)
.setupTableSink(SIMPLE_SINK)
.runSql(
"SELECT `$$T_FUNC`.`id`, `$$T_FUNC`.`feature`, `$$T_FUNC`.`category` FROM TABLE(\n"
+ " ML_PREDICT((\n"
+ " SELECT `$$T_SOURCE`.`id`, `$$T_SOURCE`.`feature` FROM `default_catalog`.`default_database`.`features` $$T_SOURCE\n"
+ " ), MODEL `default_catalog`.`default_database`.`chatgpt`, DESCRIPTOR(`feature`), DEFAULT)\n"
+ ") $$T_FUNC")
.runTableApi(
env ->
env.fromModel("chatgpt")
.predict(
env.from("features"), ColumnList.of("feature")),
"sink")
.build();
public static final TableTestProgram ASYNC_ML_PREDICT_MODEL_API =
TableTestProgram.of("async-ml-predict-model-api", "async ml-predict using model API")
.setupTableSource(SIMPLE_FEATURES_SOURCE)
.setupModel(ASYNC_MODEL)
.setupTableSink(SIMPLE_SINK)
.setupConfig(
ExecutionConfigOptions.TABLE_EXEC_ASYNC_ML_PREDICT_OUTPUT_MODE,
AsyncOutputMode.ALLOW_UNORDERED)
.runSql(
"SELECT `$$T_FUNC`.`id`, `$$T_FUNC`.`feature`, `$$T_FUNC`.`category` FROM TABLE(\n"
+ " ML_PREDICT((\n"
+ " SELECT `$$T_SOURCE`.`id`, `$$T_SOURCE`.`feature` FROM `default_catalog`.`default_database`.`features` $$T_SOURCE\n"
+ " ), MODEL `default_catalog`.`default_database`.`chatgpt`, DESCRIPTOR(`feature`), MAP['async', 'true'])\n"
+ ") $$T_FUNC")
.runTableApi(
env ->
env.fromModel("chatgpt")
.predict(
env.from("features"),
ColumnList.of("feature"),
Map.of("async", "true")),
"sink")
.build();
public static final TableTestProgram ML_PREDICT_ANON_MODEL_API =
TableTestProgram.of(
"ml-predict-anonymous-model-api",
"ml-predict using anonymous model API")
.setupTableSource(SIMPLE_FEATURES_SOURCE)
.runFailingTableApi(
env ->
env.from(
ModelDescriptor.forProvider("values")
.inputSchema(
Schema.newBuilder()
.column(
"feature",
"STRING")
.build())
.outputSchema(
Schema.newBuilder()
.column(
"category",
"STRING")
.build())
.option(
"data-id",
TestValuesModelFactory
.registerData(
SYNC_MODEL
.data))
.build())
.predict(
env.from("features"), ColumnList.of("feature")),
"sink",
ValidationException.class,
"Anonymous models cannot be serialized.")
.build();
public static final TableTestProgram ASYNC_ML_PREDICT_TABLE_API_MAP_EXPRESSION_CONFIG =
TableTestProgram.of(
"async-ml-predict-table-api-map-expression-config",
"ml-predict in async mode using Table API and map expression.")
.setupTableSource(SIMPLE_FEATURES_SOURCE)
.setupModel(ASYNC_MODEL)
.setupTableSink(SIMPLE_SINK)
.setupConfig(
ExecutionConfigOptions.TABLE_EXEC_ASYNC_ML_PREDICT_OUTPUT_MODE,
ExecutionConfigOptions.AsyncOutputMode.ALLOW_UNORDERED)
.runSql(
"SELECT `$$T_FUNC`.`id`, `$$T_FUNC`.`feature`, `$$T_FUNC`.`category` FROM TABLE(\n"
+ " ML_PREDICT((\n"
+ " SELECT `$$T_SOURCE`.`id`, `$$T_SOURCE`.`feature` FROM `default_catalog`.`default_database`.`features` $$T_SOURCE\n"
+ " ), MODEL `default_catalog`.`default_database`.`chatgpt`, DESCRIPTOR(`feature`), MAP['async', 'true'])\n"
+ ") $$T_FUNC")
.runTableApi(
env ->
env.fromCall(
"ML_PREDICT",
env.from("features").asArgument("INPUT"),
env.fromModel("chatgpt").asArgument("MODEL"),
descriptor("feature").asArgument("ARGS"),
Expressions.map("async", "true").asArgument("CONFIG")),
"sink")
.build();
/**
* A function that will be used as an inline function in {@link #INLINE_FUNCTION_SERIALIZATION}.
*/
public static | QueryOperationTestPrograms |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/mixins/TestMixinDeserForCreators.java | {
"start": 1984,
"end": 2130
} | class ____ {
@JsonCreator static StringWrapper create(String str) { return null; }
}
// [databind#2020]
static | StringWrapperMixIn |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/io/support/SpringFactoriesLoaderTests.java | {
"start": 12414,
"end": 14297
} | class ____ {
private final ArgumentResolver resolver = ArgumentResolver.of(String.class, "test");
@Test
void defaultConstructorCreatesInstance() throws Exception {
Object instance = FactoryInstantiator.forClass(
DefaultConstructor.class).instantiate(this.resolver);
assertThat(instance).isNotNull();
}
@Test
void singleConstructorWithArgumentsCreatesInstance() throws Exception {
Object instance = FactoryInstantiator.forClass(
SingleConstructor.class).instantiate(this.resolver);
assertThat(instance).isNotNull();
}
@Test
void multiplePrivateAndSinglePublicConstructorCreatesInstance() throws Exception {
Object instance = FactoryInstantiator.forClass(
MultiplePrivateAndSinglePublicConstructor.class).instantiate(this.resolver);
assertThat(instance).isNotNull();
}
@Test
void multiplePackagePrivateAndSinglePublicConstructorCreatesInstance() throws Exception {
Object instance = FactoryInstantiator.forClass(
MultiplePackagePrivateAndSinglePublicConstructor.class).instantiate(this.resolver);
assertThat(instance).isNotNull();
}
@Test
void singlePackagePrivateConstructorCreatesInstance() throws Exception {
Object instance = FactoryInstantiator.forClass(
SinglePackagePrivateConstructor.class).instantiate(this.resolver);
assertThat(instance).isNotNull();
}
@Test
void singlePrivateConstructorCreatesInstance() throws Exception {
Object instance = FactoryInstantiator.forClass(
SinglePrivateConstructor.class).instantiate(this.resolver);
assertThat(instance).isNotNull();
}
@Test
void multiplePackagePrivateConstructorsThrowsException() {
assertThatIllegalStateException().isThrownBy(
() -> FactoryInstantiator.forClass(MultiplePackagePrivateConstructors.class))
.withMessageContaining("has no suitable constructor");
}
static | FactoryInstantiatorTests |
java | micronaut-projects__micronaut-core | http/src/main/java/io/micronaut/http/HttpResponseFactory.java | {
"start": 762,
"end": 2455
} | interface ____ {
/**
* The default {@link HttpResponseFactory} instance.
*/
HttpResponseFactory INSTANCE = DefaultHttpFactories.resolveDefaultResponseFactory();
/**
* Creates an {@link io.micronaut.http.HttpStatus#OK} response with a body.
*
* @param body The body
* @param <T> The body type
* @return The ok response with the given body
*/
<T> MutableHttpResponse<T> ok(T body);
/**
* Return a response for the given status.
*
* @param status The status
* @param reason An alternative reason message
* @param <T> The response type
* @return The response
*/
<T> MutableHttpResponse<T> status(HttpStatus status, String reason);
/**
* Return a response for the given status.
*
* @param status The status
* @param reason An alternative reason message
* @param <T> The response type
* @return The response
*/
<T> MutableHttpResponse<T> status(int status, String reason);
/**
* Return a response for the given status.
*
* @param status The status
* @param body The body
* @param <T> The body type
* @return The response
*/
<T> MutableHttpResponse<T> status(HttpStatus status, T body);
/**
* @param <T> The response type
* @return The ok response
*/
default <T> MutableHttpResponse<T> ok() {
return ok(null);
}
/**
* @param status The status
* @param <T> The response type
* @return The restus response
*/
default <T> MutableHttpResponse<T> status(HttpStatus status) {
return status(status, null);
}
}
| HttpResponseFactory |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/bytebuddy/FeatureMismatchException.java | {
"start": 717,
"end": 2208
} | enum ____ { DIRTY_CHECK, ASSOCIATION_MANAGEMENT }
private final String className;
private final Feature mismatchedFeature;
private final boolean previousValue;
public FeatureMismatchException(
String className,
Feature mismatchedFeature,
boolean previousValue) {
super( String.format(
Locale.ROOT,
"Support for %s was enabled during enhancement, but `%s` was previously enhanced with that support %s.",
featureText( mismatchedFeature ),
className,
decode( previousValue )
) );
this.className = className;
this.mismatchedFeature = mismatchedFeature;
this.previousValue = previousValue;
}
public String getClassName() {
return className;
}
public Feature getMismatchedFeature() {
return mismatchedFeature;
}
public boolean wasPreviouslyEnabled() {
return previousValue;
}
public static void checkFeatureEnablement(
TypeDescription managedCtClass,
Feature feature,
boolean currentlyEnabled,
boolean previouslyEnabled) {
if ( currentlyEnabled != previouslyEnabled ) {
throw new FeatureMismatchException( managedCtClass.getName(), feature, previouslyEnabled );
}
}
private static String featureText(Feature mismatchedFeature) {
return switch ( mismatchedFeature ) {
case DIRTY_CHECK -> "inline dirty checking";
case ASSOCIATION_MANAGEMENT -> "bidirectional association management";
};
}
private static String decode(boolean previousValue) {
return previousValue ? "enabled" : "disabled";
}
}
| Feature |
java | processing__processing4 | core/src/processing/core/PApplet.java | {
"start": 350181,
"end": 373465
} | class ____(PApplet):
* pass
*
*MySketch().runSketch();</pre>
*/
protected void runSketch(final String[] args) {
final String[] argsWithSketchName = new String[args.length + 1];
System.arraycopy(args, 0, argsWithSketchName, 0, args.length);
final String className = this.getClass().getSimpleName();
final String cleanedClass =
className.replaceAll("__[^_]+__\\$", "").replaceAll("\\$\\d+", "");
argsWithSketchName[args.length] = cleanedClass;
runSketch(argsWithSketchName, this);
}
/** Convenience method for Python Mode */
protected void runSketch() {
runSketch(new String[0]);
}
//////////////////////////////////////////////////////////////
/**
*
* Opens a new file and all subsequent drawing functions are echoed to this
* file as well as the display window. The <b>beginRecord()</b> function
* requires two parameters, the first is the renderer and the second is the
* file name. This function is always used with <b>endRecord()</b> to stop the
* recording process and close the file. <br />
* <br />
* Note that <b>beginRecord()</b> will only pick up any settings that happen
* after it has been called. For instance, if you call <b>textFont()</b>
* before <b>beginRecord()</b>, then that font will not be set for the file
* that you're recording to. <br />
* <br />
* <b>beginRecord()</b> works only with the PDF and SVG renderers.
*
* @webref output:files
* @webBrief Opens a new file and all subsequent drawing functions are echoed
* to this file as well as the display window
* @param renderer
* PDF or SVG
* @param filename
* filename for output
* @see PApplet#endRecord()
*/
public PGraphics beginRecord(String renderer, String filename) {
filename = insertFrame(filename);
PGraphics rec = createGraphics(width, height, renderer, filename);
beginRecord(rec);
return rec;
}
/**
* @nowebref
* Begin recording (echoing) commands to the specified PGraphics object.
*/
public void beginRecord(PGraphics recorder) {
this.recorder = recorder;
recorder.beginDraw();
}
/**
*
* Stops the recording process started by <b>beginRecord()</b> and closes
* the file.
*
* @webref output:files
* @webBrief Stops the recording process started by <b>beginRecord()</b> and closes
* the file
* @see PApplet#beginRecord(String, String)
*/
public void endRecord() {
if (recorder != null) {
recorder.endDraw();
recorder.dispose();
recorder = null;
}
}
/**
*
* To create vectors from 3D data, use the <b>beginRaw()</b> and
* <b>endRaw()</b> commands. These commands will grab the shape data just
* before it is rendered to the screen. At this stage, your entire scene is
* nothing but a long list of individual lines and triangles. This means
* that a shape created with <b>sphere()</b> function will be made up of
* hundreds of triangles, rather than a single object. Or that a
* multi-segment line shape (such as a curve) will be rendered as
* individual segments.
* <br /><br />
* When using <b>beginRaw()</b> and <b>endRaw()</b>, it's possible to write
* to either a 2D or 3D renderer. For instance, <b>beginRaw()</b> with the
* PDF library will write the geometry as flattened triangles and lines,
* even if recording from the <b>P3D</b> renderer.
* <br /><br />
* If you want a background to show up in your files, use <b>rect(0, 0,
* width, height)</b> after setting the <b>fill()</b> to the background
* color. Otherwise, the background will not be rendered to the file because
* the background is not shape.
* <br /><br />
* Using <b>hint(ENABLE_DEPTH_SORT)</b> can improve the appearance of 3D
* geometry drawn to 2D file formats. See the <b>hint()</b> reference for
* more details.
* <br /><br />
* See examples in the reference for the <b>PDF</b> and <b>DXF</b>
* libraries for more information.
*
* @webref output:files
* @webBrief To create vectors from 3D data, use the <b>beginRaw()</b> and
* <b>endRaw()</b> commands
* @param renderer for example, PDF or DXF
* @param filename filename for output
* @see PApplet#endRaw()
* @see PApplet#hint(int)
*/
public PGraphics beginRaw(String renderer, String filename) {
filename = insertFrame(filename);
PGraphics rec = createGraphics(width, height, renderer, filename);
g.beginRaw(rec);
return rec;
}
/**
* @nowebref
* Begin recording raw shape data to the specified renderer.
* <p/>
* This simply echoes to g.beginRaw(), but since is placed here (rather than
* generated by preproc.pl) for clarity and so that it doesn't echo the
* command should beginRecord() be in use.
*
* @param rawGraphics PGraphics context that raw shapes will be written to
*/
public void beginRaw(PGraphics rawGraphics) {
g.beginRaw(rawGraphics);
}
/**
*
* Complement to <b>beginRaw()</b>; they must always be used together. See
* the <b>beginRaw()</b> reference for details.
*
* @webref output:files
* @webBrief Complement to <b>beginRaw()</b>; they must always be used together
* @see PApplet#beginRaw(String, String)
*/
public void endRaw() {
g.endRaw();
}
//////////////////////////////////////////////////////////////
/**
*
* Loads the pixel data of the current display window into the <b>pixels[]</b>
* array. This function must always be called before reading from or writing
* to <b>pixels[]</b>. Subsequent changes to the display window will not be
* reflected in <b>pixels</b> until <b>loadPixels()</b> is called again.
*
* <h3>Advanced</h3> Override the g.pixels[] function to set the pixels[]
* array that's part of the PApplet object. Allows the use of pixels[] in the
* code, rather than g.pixels[].
*
* @webref image:pixels
* @webBrief Loads the pixel data for the display window into the
* <b>pixels[]</b> array
* @see PApplet#pixels
* @see PApplet#updatePixels()
*/
public void loadPixels() {
g.loadPixels();
pixels = g.pixels;
}
/**
*
* Updates the display window with the data in the <b>pixels[]</b> array. Use
* in conjunction with <b>loadPixels()</b>. If you're only reading pixels from
* the array, there's no need to call <b>updatePixels()</b> — updating is
* only necessary to apply changes.
*
* @webref image:pixels
* @webBrief Updates the display window with the data in the <b>pixels[]</b>
* array
* @see PApplet#loadPixels()
* @see PApplet#pixels
*/
public void updatePixels() {
g.updatePixels();
}
/**
* @nowebref
* @param x1 x-coordinate of the upper-left corner
* @param y1 y-coordinate of the upper-left corner
* @param x2 width of the region
* @param y2 height of the region
*/
public void updatePixels(int x1, int y1, int x2, int y2) {
g.updatePixels(x1, y1, x2, y2);
}
//////////////////////////////////////////////////////////////
// EVERYTHING BELOW THIS LINE IS AUTOMATICALLY GENERATED. DO NOT TOUCH!
// This includes the Javadoc comments, which are automatically copied from
// the PImage and PGraphics source code files.
// public functions for processing.core
public PGL beginPGL() {
return g.beginPGL();
}
public void endPGL() {
if (recorder != null) recorder.endPGL();
g.endPGL();
}
public void flush() {
if (recorder != null) recorder.flush();
g.flush();
}
public void hint(int which) {
if (recorder != null) recorder.hint(which);
g.hint(which);
}
/**
* Start a new shape of type POLYGON
*/
public void beginShape() {
if (recorder != null) recorder.beginShape();
g.beginShape();
}
/**
*
* Using the <b>beginShape()</b> and <b>endShape()</b> functions allow creating
* more complex forms. <b>beginShape()</b> begins recording vertices for a shape
* and <b>endShape()</b> stops recording. The value of the <b>kind</b> parameter
* tells it which types of shapes to create from the provided vertices. With no
* mode specified, the shape can be any irregular polygon. The parameters
* available for beginShape() are POINTS, LINES, TRIANGLES, TRIANGLE_FAN,
* TRIANGLE_STRIP, QUADS, and QUAD_STRIP. After calling the <b>beginShape()</b>
* function, a series of <b>vertex()</b> commands must follow. To stop drawing
* the shape, call <b>endShape()</b>. The <b>vertex()</b> function with two
* parameters specifies a position in 2D and the <b>vertex()</b> function with
* three parameters specifies a position in 3D. Each shape will be outlined with
* the current stroke color and filled with the fill color. <br />
* <br />
* Transformations such as <b>translate()</b>, <b>rotate()</b>, and
* <b>scale()</b> do not work within <b>beginShape()</b>. It is also not
* possible to use other shapes, such as <b>ellipse()</b> or <b>rect()</b>
* within <b>beginShape()</b>. <br />
* <br />
* The P2D and P3D renderers allow <b>stroke()</b> and <b>fill()</b> to be
* altered on a per-vertex basis, but the default renderer does not. Settings
* such as <b>strokeWeight()</b>, <b>strokeCap()</b>, and <b>strokeJoin()</b>
* cannot be changed while inside a <b>beginShape()</b>/<b>endShape()</b> block
* with any renderer.
*
* @webref shape:vertex
* @webBrief Using the <b>beginShape()</b> and <b>endShape()</b> functions allow
* creating more complex forms
* @param kind Either POINTS, LINES, TRIANGLES, TRIANGLE_FAN, TRIANGLE_STRIP,
* QUADS, or QUAD_STRIP
* @see PShape
* @see PGraphics#endShape()
* @see PGraphics#vertex(float, float, float, float, float)
* @see PGraphics#curveVertex(float, float, float)
* @see PGraphics#bezierVertex(float, float, float, float, float, float, float,
* float, float)
*/
public void beginShape(int kind) {
if (recorder != null) recorder.beginShape(kind);
g.beginShape(kind);
}
/**
* Sets whether the upcoming vertex is part of an edge.
* Equivalent to glEdgeFlag(), for people familiar with OpenGL.
*/
public void edge(boolean edge) {
if (recorder != null) recorder.edge(edge);
g.edge(edge);
}
/**
*
* Sets the current normal vector. Used for drawing three-dimensional
* shapes and surfaces, <b>normal()</b> specifies a vector perpendicular
* to a shape's surface which, in turn, determines how lighting affects it.
* Processing attempts to automatically assign normals to shapes, but since
* that's imperfect, this is a better option when you want more control.
* This function is identical to <b>glNormal3f()</b> in OpenGL.
*
* @webref lights_camera:lights
* @webBrief Sets the current normal vector
* @param nx x direction
* @param ny y direction
* @param nz z direction
* @see PGraphics#beginShape(int)
* @see PGraphics#endShape(int)
* @see PGraphics#lights()
*/
public void normal(float nx, float ny, float nz) {
if (recorder != null) recorder.normal(nx, ny, nz);
g.normal(nx, ny, nz);
}
public void attribPosition(String name, float x, float y, float z) {
if (recorder != null) recorder.attribPosition(name, x, y, z);
g.attribPosition(name, x, y, z);
}
public void attribNormal(String name, float nx, float ny, float nz) {
if (recorder != null) recorder.attribNormal(name, nx, ny, nz);
g.attribNormal(name, nx, ny, nz);
}
public void attribColor(String name, int color) {
if (recorder != null) recorder.attribColor(name, color);
g.attribColor(name, color);
}
public void attrib(String name, float... values) {
if (recorder != null) recorder.attrib(name, values);
g.attrib(name, values);
}
public void attrib(String name, int... values) {
if (recorder != null) recorder.attrib(name, values);
g.attrib(name, values);
}
public void attrib(String name, boolean... values) {
if (recorder != null) recorder.attrib(name, values);
g.attrib(name, values);
}
/**
*
* Sets the coordinate space for texture mapping. The default mode is
* <b>IMAGE</b>, which refers to the actual coordinates of the image.
* <b>NORMAL</b> refers to a normalized space of values ranging from 0 to 1.
* This function only works with the P2D and P3D renderers.<br />
* <br />
* With <b>IMAGE</b>, if an image is 100 x 200 pixels, mapping the image onto
* the entire size of a quad would require the points (0,0) (100, 0) (100,200)
* (0,200). The same mapping in <b>NORMAL</b> is (0,0) (1,0) (1,1) (0,1).
*
* @webref image:textures
* @webBrief Sets the coordinate space for texture mapping
* @param mode either IMAGE or NORMAL
* @see PGraphics#texture(PImage)
* @see PGraphics#textureWrap(int)
*/
public void textureMode(int mode) {
if (recorder != null) recorder.textureMode(mode);
g.textureMode(mode);
}
/**
* Defines if textures repeat or draw once within a texture map.
* The two parameters are CLAMP (the default behavior) and REPEAT.
* This function only works with the P2D and P3D renderers.
*
* @webref image:textures
* @webBrief Defines if textures repeat or draw once within a texture map
* @param wrap Either CLAMP (default) or REPEAT
* @see PGraphics#texture(PImage)
* @see PGraphics#textureMode(int)
*/
public void textureWrap(int wrap) {
if (recorder != null) recorder.textureWrap(wrap);
g.textureWrap(wrap);
}
/**
* Sets a texture to be applied to vertex points. The <b>texture()</b> function
* must be called between <b>beginShape()</b> and <b>endShape()</b> and before
* any calls to <b>vertex()</b>. This function only works with the P2D and P3D
* renderers.
* <p/>
* When textures are in use, the fill color is ignored. Instead, use
* <b>tint()</b> to specify the color of the texture as it is applied to the
* shape.
*
* @webref image:textures
* @webBrief Sets a texture to be applied to vertex points
* @param image reference to a PImage object
* @see PGraphics#textureMode(int)
* @see PGraphics#textureWrap(int)
* @see PGraphics#beginShape(int)
* @see PGraphics#endShape(int)
* @see PGraphics#vertex(float, float, float, float, float)
*/
public void texture(PImage image) {
if (recorder != null) recorder.texture(image);
g.texture(image);
}
/**
* Removes texture image for current shape.
* Needs to be called between beginShape and endShape
*
*/
public void noTexture() {
if (recorder != null) recorder.noTexture();
g.noTexture();
}
public void vertex(float x, float y) {
if (recorder != null) recorder.vertex(x, y);
g.vertex(x, y);
}
public void vertex(float x, float y, float z) {
if (recorder != null) recorder.vertex(x, y, z);
g.vertex(x, y, z);
}
/**
* Used by renderer subclasses or PShape to efficiently pass in already
* formatted vertex information.
* @param v vertex parameters, as a float array of length VERTEX_FIELD_COUNT
*/
public void vertex(float[] v) {
if (recorder != null) recorder.vertex(v);
g.vertex(v);
}
public void vertex(float x, float y, float u, float v) {
if (recorder != null) recorder.vertex(x, y, u, v);
g.vertex(x, y, u, v);
}
/**
*
* All shapes are constructed by connecting a series of vertices.
* <b>vertex()</b> is used to specify the vertex coordinates for points, lines,
* triangles, quads, and polygons. It is used exclusively within the
* <b>beginShape()</b> and <b>endShape()</b> functions. <br />
* <br />
* Drawing a vertex in 3D using the <b>z</b> parameter requires the P3D
* parameter in combination with size, as shown in the above example. <br />
* <br />
* This function is also used to map a texture onto geometry. The
* <b>texture()</b> function declares the texture to apply to the geometry and
* the <b>u</b> and <b>v</b> coordinates set define the mapping of this texture
* to the form. By default, the coordinates used for <b>u</b> and <b>v</b> are
* specified in relation to the image's size in pixels, but this relation can be
* changed with <b>textureMode()</b>.
*
* @webref shape:vertex
* @webBrief All shapes are constructed by connecting a series of vertices
* @param x x-coordinate of the vertex
* @param y y-coordinate of the vertex
* @param z z-coordinate of the vertex
* @param u horizontal coordinate for the texture mapping
* @param v vertical coordinate for the texture mapping
* @see PGraphics#beginShape(int)
* @see PGraphics#endShape(int)
* @see PGraphics#bezierVertex(float, float, float, float, float, float, float,
* float, float)
* @see PGraphics#quadraticVertex(float, float, float, float, float, float)
* @see PGraphics#curveVertex(float, float, float)
* @see PGraphics#texture(PImage)
*/
public void vertex(float x, float y, float z, float u, float v) {
if (recorder != null) recorder.vertex(x, y, z, u, v);
g.vertex(x, y, z, u, v);
}
/**
* Use the <b>beginContour()</b> and <b>endContour()</b> function to
* create negative shapes within shapes such as the center of the
* letter "O". <b>beginContour()</b> begins recording vertices for the
* shape and <b>endContour()</b> stops recording. The vertices that
* define a negative shape must "wind" in the opposite direction from
* the exterior shape. First draw vertices for the exterior shape in
* clockwise order, then for internal shapes, draw vertices counterclockwise.<br />
* <br />
* These functions can only be used within a <b>beginShape()</b>/<b>endShape()</b>
* pair and transformations such as <b>translate()</b>, <b>rotate()</b>, and
* <b>scale()</b> do not work within a <b>beginContour()</b>/<b>endContour()</b>
* pair. It is also not possible to use other shapes, such as <b>ellipse()</b>
* or <b>rect()</b> within.
*
* @webref shape:vertex
* @webBrief Begins recording vertices for the shape
*/
public void beginContour() {
if (recorder != null) recorder.beginContour();
g.beginContour();
}
/**
* Use the <b>beginContour()</b> and <b>endContour()</b> function to
* create negative shapes within shapes such as the center of the
* letter "O". <b>beginContour()</b> begins recording vertices for
* the shape and <b>endContour()</b> stops recording. The vertices
* that define a negative shape must "wind" in the opposite direction
* from the exterior shape. First draw vertices for the exterior shape
* in clockwise order, then for internal shapes, draw vertices counterclockwise.<br />
* <br />
* These functions can only be used within a <b>beginShape()</b>/<b>endShape()</b>
* pair and transformations such as <b>translate()</b>, <b>rotate()</b>, and
* <b>scale()</b> do not work within a <b>beginContour()</b>/<b>endContour()</b>
* pair. It is also not possible to use other shapes, such as <b>ellipse()</b>
* or <b>rect()</b> within.
*
* @webref shape:vertex
* @webBrief Stops recording vertices for the shape
*/
public void endContour() {
if (recorder != null) recorder.endContour();
g.endContour();
}
public void endShape() {
if (recorder != null) recorder.endShape();
g.endShape();
}
/**
*
* The <b>endShape()</b> function is the companion to <b>beginShape()</b>
* and may only be called after <b>beginShape()</b>. When <b>endshape()</b>
* is called, all the image data defined since the previous call to
* <b>beginShape()</b> is written into the image buffer. The constant CLOSE
* as the value for the MODE parameter to close the shape (to connect the
* beginning and the end).
*
* @webref shape:vertex
* @webBrief the companion to <b>beginShape()</b> and may only be called after <b>beginShape()</b>
* @param mode use CLOSE to close the shape
* @see PShape
* @see PGraphics#beginShape(int)
*/
public void endShape(int mode) {
if (recorder != null) recorder.endShape(mode);
g.endShape(mode);
}
/**
* Loads geometry into a variable of type <b>PShape</b>. SVG and OBJ
* files may be loaded. To load correctly, the file must be located
* in the data directory of the current sketch. In most cases,
* <b>loadShape()</b> should be used inside <b>setup()</b> because
* loading shapes inside <b>draw()</b> will reduce the speed of a sketch.<br />
* <br />
* Alternatively, the file maybe be loaded from anywhere on the local
* computer using an absolute path (something that starts with / on
* Unix and Linux, or a drive letter on Windows), or the filename
* parameter can be a URL for a file found on a network.<br />
* <br />
* If the file is not available or an error occurs, <b>null</b> will
* be returned and an error message will be printed to the console.
* The error message does not halt the program, however the null value
* may cause a NullPointerException if your code does not check whether
* the value returned is null.<br />
*
* @webref shape
* @webBrief Loads geometry into a variable of type <b>PShape</b>
* @param filename name of file to load, can be .svg or .obj
* @see PShape
* @see PApplet#createShape()
*/
public PShape loadShape(String filename) {
return g.loadShape(filename);
}
/**
* @nowebref
*/
public PShape loadShape(String filename, String options) {
return g.loadShape(filename, options);
}
/**
* The <b>createShape()</b> function is used to define a new shape.
* Once created, this shape can be drawn with the <b>shape()</b>
* function. The basic way to use the function defines new primitive
* shapes. One of the following parameters are used as the first
* parameter: <b>ELLIPSE</b>, <b>RECT</b>, <b>ARC</b>, <b>TRIANGLE</b>,
* <b>SPHERE</b>, <b>BOX</b>, <b>QUAD</b>, or <b>LINE</b>. The
* parameters for each of these different shapes are the same as their
* corresponding functions: <b>ellipse()</b>, <b>rect()</b>, <b>arc()</b>,
* <b>triangle()</b>, <b>sphere()</b>, <b>box()</b>, <b>quad()</b>, and
* <b>line()</b>. The first example above clarifies how this works.<br />
* <br />
* Custom, unique shapes can be made by using <b>createShape()</b> without
* a parameter. After the shape is started, the drawing attributes and
* geometry can be set directly to the shape within the <b>beginShape()</b>
* and <b>endShape()</b> methods. See the second example above for specifics,
* and the reference for <b>beginShape()</b> for all of its options.<br />
* <br />
* The <b>createShape()</b> function can also be used to make a complex
* shape made of other shapes. This is called a "group" and it's created by
* using the parameter <b>GROUP</b> as the first parameter. See the fourth
* example above to see how it works.<br />
* <br />
* After using <b>createShape()</b>, stroke and fill color can be set by
* calling methods like <b>setFill()</b> and <b>setStroke()</b>, as seen
* in the examples above. The complete list of methods and fields for the
* PShape | MySketch |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transform/chain/ChainTransformTests.java | {
"start": 1933,
"end": 8115
} | class ____ extends ESTestCase {
public void testExecute() throws Exception {
ChainTransform transform = new ChainTransform(
new NamedExecutableTransform.Transform("name1"),
new NamedExecutableTransform.Transform("name2"),
new NamedExecutableTransform.Transform("name3")
);
ExecutableChainTransform executable = new ExecutableChainTransform(
transform,
logger,
new NamedExecutableTransform("name1"),
new NamedExecutableTransform("name2"),
new NamedExecutableTransform("name3")
);
WatchExecutionContext ctx = mock(WatchExecutionContext.class);
Payload payload = new Payload.Simple(new HashMap<>());
ChainTransform.Result result = executable.execute(ctx, payload);
assertThat(result.status(), is(Transform.Result.Status.SUCCESS));
assertThat(result.results(), hasSize(3));
assertThat(result.results().get(0), instanceOf(NamedExecutableTransform.Result.class));
assertThat(result.results().get(0).status(), is(Transform.Result.Status.SUCCESS));
assertThat(getNames(result.results().get(0).payload()), hasSize(1));
assertThat(getNames(result.results().get(0).payload()), contains("name1"));
assertThat(result.results().get(1), instanceOf(NamedExecutableTransform.Result.class));
assertThat(result.results().get(1).status(), is(Transform.Result.Status.SUCCESS));
assertThat(getNames(result.results().get(1).payload()), hasSize(2));
assertThat(getNames(result.results().get(1).payload()), contains("name1", "name2"));
assertThat(result.results().get(2), instanceOf(NamedExecutableTransform.Result.class));
assertThat(result.results().get(2).status(), is(Transform.Result.Status.SUCCESS));
assertThat(getNames(result.results().get(2).payload()), hasSize(3));
assertThat(getNames(result.results().get(2).payload()), contains("name1", "name2", "name3"));
Map<String, Object> data = result.payload().data();
assertThat(data, notNullValue());
assertThat(data, hasKey("names"));
assertThat(data.get("names"), instanceOf(List.class));
List<String> names = getNames(result.payload());
assertThat(names, hasSize(3));
assertThat(names, contains("name1", "name2", "name3"));
}
public void testExecuteFailure() throws Exception {
ChainTransform transform = new ChainTransform(
new NamedExecutableTransform.Transform("name1"),
new NamedExecutableTransform.Transform("name2"),
new FailingExecutableTransform.Transform()
);
ExecutableChainTransform executable = new ExecutableChainTransform(
transform,
logger,
new NamedExecutableTransform("name1"),
new NamedExecutableTransform("name2"),
new FailingExecutableTransform(logger)
);
WatchExecutionContext ctx = mock(WatchExecutionContext.class);
Payload payload = new Payload.Simple(new HashMap<>());
ChainTransform.Result result = executable.execute(ctx, payload);
assertThat(result.status(), is(Transform.Result.Status.FAILURE));
assertThat(result.reason(), notNullValue());
assertThat(result.results(), hasSize(3));
assertThat(result.results().get(0), instanceOf(NamedExecutableTransform.Result.class));
assertThat(result.results().get(0).status(), is(Transform.Result.Status.SUCCESS));
assertThat(getNames(result.results().get(0).payload()), hasSize(1));
assertThat(getNames(result.results().get(0).payload()), contains("name1"));
assertThat(result.results().get(1), instanceOf(NamedExecutableTransform.Result.class));
assertThat(result.results().get(1).status(), is(Transform.Result.Status.SUCCESS));
assertThat(getNames(result.results().get(1).payload()), hasSize(2));
assertThat(getNames(result.results().get(1).payload()), contains("name1", "name2"));
assertThat(result.results().get(2), instanceOf(FailingExecutableTransform.Result.class));
assertThat(result.results().get(2).status(), is(Transform.Result.Status.FAILURE));
assertThat(result.results().get(2).reason(), containsString("_error"));
}
public void testParser() throws Exception {
TransformRegistry registry = new TransformRegistry(singletonMap("named", new NamedExecutableTransform.Factory(logger)));
ChainTransformFactory transformParser = new ChainTransformFactory(registry);
XContentBuilder builder = jsonBuilder().startArray()
.startObject()
.startObject("named")
.field("name", "name1")
.endObject()
.endObject()
.startObject()
.startObject("named")
.field("name", "name2")
.endObject()
.endObject()
.startObject()
.startObject("named")
.field("name", "name3")
.endObject()
.endObject()
.startObject()
.field("named", "name4")
.endObject()
.endArray();
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));
parser.nextToken();
ExecutableChainTransform executable = transformParser.parseExecutable("_id", parser);
assertThat(executable, notNullValue());
assertThat(executable.transform().getTransforms(), notNullValue());
assertThat(executable.transform().getTransforms(), hasSize(4));
for (int i = 0; i < executable.transform().getTransforms().size(); i++) {
assertThat(executable.executableTransforms().get(i), instanceOf(NamedExecutableTransform.class));
assertThat(((NamedExecutableTransform) executable.executableTransforms().get(i)).transform().name, is("name" + (i + 1)));
}
}
@SuppressWarnings("unchecked")
private static List<String> getNames(Payload payload) {
return (List<String>) payload.data().get("names");
}
private static | ChainTransformTests |
java | quarkusio__quarkus | independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/Types.java | {
"start": 5514,
"end": 9010
} | class ____ returned.
*
* @return
*/
static Type getCanonicalType(Class<?> clazz) {
if (clazz.isArray()) {
Class<?> componentType = clazz.getComponentType();
Type resolvedComponentType = getCanonicalType(componentType);
if (componentType != resolvedComponentType) {
// identity check intentional
// a different identity means that we actually replaced the component Class with a ParameterizedType
return new GenericArrayTypeImpl(resolvedComponentType);
}
}
if (clazz.getTypeParameters().length > 0) {
Type[] actualTypeParameters = clazz.getTypeParameters();
return new ParameterizedTypeImpl(clazz, actualTypeParameters, clazz.getDeclaringClass());
}
return clazz;
}
public static Type getCanonicalType(Type type) {
if (type instanceof Class<?>) {
Class<?> clazz = (Class<?>) type;
return getCanonicalType(clazz);
}
return type;
}
static boolean isRawGenericType(Type type) {
if (!(type instanceof Class<?>)) {
return false;
}
Class<?> clazz = (Class<?>) type;
if (clazz.isArray()) {
Class<?> componentType = clazz.getComponentType();
return isRawGenericType(componentType);
}
return clazz.getTypeParameters().length > 0;
}
static boolean containsTypeVariable(Type type) {
type = getCanonicalType(type);
if (type instanceof TypeVariable<?>) {
return true;
}
if (isParameterizedType(type)) {
ParameterizedType parameterizedType = asParameterizedType(type);
for (Type t : parameterizedType.getActualTypeArguments()) {
if (containsTypeVariable(t)) {
return true;
}
}
}
if (type instanceof GenericArrayType) {
GenericArrayType genericArrayType = (GenericArrayType) type;
return containsTypeVariable(genericArrayType.getGenericComponentType());
}
return false;
}
static boolean isIllegalBeanType(Type type) {
if (type instanceof TypeVariable<?>) {
return true;
} else if (isParameterizedType(type)) {
ParameterizedType parameterizedType = asParameterizedType(type);
for (Type typeArgument : parameterizedType.getActualTypeArguments()) {
if (typeArgument instanceof TypeVariable<?>) {
// Parameterized type with type variable is legal
continue;
} else if (typeArgument instanceof WildcardType || isIllegalBeanType(typeArgument)) {
// the 2nd condition is a bit weird, because the spec doesn't say
// anything about illegal type arguments, but Weld has it...
return true;
}
}
} else if (type instanceof GenericArrayType) {
GenericArrayType arrayType = (GenericArrayType) type;
return isIllegalBeanType(arrayType.getGenericComponentType());
}
return false;
}
@SuppressWarnings("unchecked")
private static <T> Class<T> getBound(Type[] bounds) {
if (bounds.length == 0) {
return (Class<T>) Object.class;
} else {
return getRawType(bounds[0]);
}
}
}
| is |
java | redisson__redisson | redisson/src/main/java/org/redisson/reactive/RedissonMapCacheReactive.java | {
"start": 759,
"end": 974
} | class ____<K, V> extends RedissonMapReactive<K, V> {
public RedissonMapCacheReactive(RMap<K, V> map, CommandReactiveExecutor commandExecutor) {
super(map, commandExecutor);
}
}
| RedissonMapCacheReactive |
java | apache__spark | sql/api/src/main/java/org/apache/spark/sql/streaming/GroupStateTimeout.java | {
"start": 1192,
"end": 2603
} | class ____ {
// NOTE: if you're adding new type of timeout, you should also fix the places below:
// - Scala:
// org.apache.spark.sql.execution.streaming.GroupStateImpl.getGroupStateTimeoutFromString
// - Python: pyspark.sql.streaming.state.GroupStateTimeout
/**
* Timeout based on processing time.
* <p>
* The duration of timeout can be set for each group in
* {@code map/flatMapGroupsWithState} by calling {@code GroupState.setTimeoutDuration()}.
* <p>
* See documentation on {@code GroupState} for more details.
*/
public static GroupStateTimeout ProcessingTimeTimeout() {
return ProcessingTimeTimeout$.MODULE$;
}
/**
* Timeout based on event-time.
* <p>
* The event-time timestamp for timeout can be set for each
* group in {@code map/flatMapGroupsWithState} by calling
* {@code GroupState.setTimeoutTimestamp()}.
* In addition, you have to define the watermark in the query using
* {@code Dataset.withWatermark}.
* When the watermark advances beyond the set timestamp of a group and the group has not
* received any data, then the group times out.
* <p>
* See documentation on {@code GroupState} for more details.
*/
public static GroupStateTimeout EventTimeTimeout() { return EventTimeTimeout$.MODULE$; }
/** No timeout. */
public static GroupStateTimeout NoTimeout() { return NoTimeout$.MODULE$; }
}
| GroupStateTimeout |
java | quarkusio__quarkus | independent-projects/arc/runtime/src/main/java/io/quarkus/arc/InjectableBean.java | {
"start": 5669,
"end": 6085
} | enum ____ {
CLASS,
PRODUCER_FIELD,
PRODUCER_METHOD,
SYNTHETIC,
INTERCEPTOR,
DECORATOR,
BUILTIN,
;
public static Kind from(String value) {
for (Kind kind : values()) {
if (kind.toString().equals(value)) {
return kind;
}
}
return null;
}
}
}
| Kind |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/SourceLocationTest.java | {
"start": 344,
"end": 1102
} | class ____ {
@Test
public void test_0() throws Exception {
String sql = "\nselect getdate()";
DbType[] dbTypes = new DbType[]{DbType.mysql, DbType.oracle, DbType.db2, DbType.odps};
for (DbType dbType : dbTypes) {
SQLSelectStatement stmt = (SQLSelectStatement) SQLUtils
.parseSingleStatement(
sql,
dbType,
SQLParserFeature.KeepSourceLocation
);
SQLExpr expr = stmt.getSelect().getQueryBlock().getSelectList().get(0).getExpr();
assertEquals(2, expr.getSourceLine());
assertEquals(dbType.name(), 8, expr.getSourceColumn());
}
}
}
| SourceLocationTest |
java | netty__netty | codec-base/src/main/java/io/netty/handler/codec/DefaultHeadersImpl.java | {
"start": 904,
"end": 1543
} | class ____<K, V> extends DefaultHeaders<K, V, DefaultHeadersImpl<K, V>> {
public DefaultHeadersImpl(HashingStrategy<K> nameHashingStrategy,
ValueConverter<V> valueConverter, NameValidator<K> nameValidator) {
super(nameHashingStrategy, valueConverter, nameValidator);
}
public DefaultHeadersImpl(HashingStrategy<K> nameHashingStrategy, ValueConverter<V> valueConverter,
NameValidator<K> nameValidator, int arraySizeHint, ValueValidator<V> valueValidator) {
super(nameHashingStrategy, valueConverter, nameValidator, arraySizeHint, valueValidator);
}
}
| DefaultHeadersImpl |
java | apache__rocketmq | proxy/src/test/java/org/apache/rocketmq/proxy/grpc/v2/BaseActivityTest.java | {
"start": 1888,
"end": 4376
} | class ____ extends InitConfigTest {
protected static final Random RANDOM = new Random();
protected MessagingProcessor messagingProcessor;
protected GrpcClientSettingsManager grpcClientSettingsManager;
protected GrpcChannelManager grpcChannelManager;
protected ProxyRelayService proxyRelayService;
protected ReceiptHandleProcessor receiptHandleProcessor;
protected MetadataService metadataService;
protected static final String REMOTE_ADDR = "192.168.0.1:8080";
protected static final String LOCAL_ADDR = "127.0.0.1:8080";
protected Metadata metadata = new Metadata();
protected static final String CLIENT_ID = "client-id" + UUID.randomUUID();
protected static final String JAVA = "JAVA";
public void before() throws Throwable {
super.before();
messagingProcessor = mock(MessagingProcessor.class);
grpcClientSettingsManager = mock(GrpcClientSettingsManager.class);
proxyRelayService = mock(ProxyRelayService.class);
receiptHandleProcessor = mock(ReceiptHandleProcessor.class);
metadataService = mock(MetadataService.class);
metadata.put(GrpcConstants.CLIENT_ID, CLIENT_ID);
metadata.put(GrpcConstants.LANGUAGE, JAVA);
metadata.put(GrpcConstants.REMOTE_ADDRESS, REMOTE_ADDR);
metadata.put(GrpcConstants.LOCAL_ADDRESS, LOCAL_ADDR);
when(messagingProcessor.getProxyRelayService()).thenReturn(proxyRelayService);
when(messagingProcessor.getMetadataService()).thenReturn(metadataService);
grpcChannelManager = new GrpcChannelManager(messagingProcessor.getProxyRelayService(), grpcClientSettingsManager);
}
protected ProxyContext createContext() {
return ProxyContext.create()
.withVal(ContextVariable.CLIENT_ID, CLIENT_ID)
.withVal(ContextVariable.LANGUAGE, JAVA)
.withVal(ContextVariable.REMOTE_ADDRESS, REMOTE_ADDR)
.withVal(ContextVariable.LOCAL_ADDRESS, LOCAL_ADDR)
.withVal(ContextVariable.REMAINING_MS, Duration.ofSeconds(10).toMillis());
}
protected static String buildReceiptHandle(String topic, long popTime, long invisibleTime) {
return ExtraInfoUtil.buildExtraInfo(
RANDOM.nextInt(Integer.MAX_VALUE),
popTime,
invisibleTime,
0,
topic,
"brokerName",
RANDOM.nextInt(8),
RANDOM.nextInt(Integer.MAX_VALUE)
);
}
}
| BaseActivityTest |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/tests/net/VertxConnectionTest.java | {
"start": 21819,
"end": 21921
} | class ____ {
final String id;
Message(String id) {
this.id = id;
}
}
static | Message |
java | google__guice | core/test/com/google/inject/BindingTest.java | {
"start": 21394,
"end": 21436
} | interface ____ {};
private static | Sandwitch |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/cache/annotation/AnnotationCacheOperationSourceTests.java | {
"start": 14209,
"end": 14332
} | class ____ {
@Cacheable
public void multipleCacheConfig() {
}
}
@CacheConfig("myCache")
private | MultipleCacheConfig |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/selection/primitives/MyLong.java | {
"start": 244,
"end": 420
} | class ____ {
private final Long value;
public MyLong( Long value ) {
this.value = value;
}
public Long getValue() {
return value;
}
}
| MyLong |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/operators/sorted/state/BatchExecutionInternalTimeServiceWithAsyncState.java | {
"start": 1592,
"end": 3927
} | class ____<K, N>
extends BatchExecutionInternalTimeService<K, N> {
private AsyncExecutionController<K, ?> asyncExecutionController;
BatchExecutionInternalTimeServiceWithAsyncState(
ProcessingTimeService processingTimeService, Triggerable<K, N> triggerTarget) {
super(processingTimeService, triggerTarget);
}
/** Set up the async execution controller. */
public void setup(AsyncExecutionController<K, ?> asyncExecutionController) {
if (asyncExecutionController != null) {
this.asyncExecutionController = asyncExecutionController;
}
}
/**
* Sets the current key. Timers that are due to be fired are collected and will be triggered.
*/
@Override
public void setCurrentKey(K currentKey) throws Exception {
if (currentKey != null && currentKey.equals(this.currentKey)) {
return;
}
currentWatermark = Long.MAX_VALUE;
InternalTimer<K, N> timer;
while ((timer = eventTimeTimersQueue.poll()) != null) {
final InternalTimer<K, N> timerToTrigger = timer;
maintainContextAndProcess(
timerToTrigger, () -> triggerTarget.onEventTime(timerToTrigger));
}
while ((timer = processingTimeTimersQueue.poll()) != null) {
final InternalTimer<K, N> timerToTrigger = timer;
maintainContextAndProcess(
timerToTrigger, () -> triggerTarget.onProcessingTime(timerToTrigger));
}
currentWatermark = Long.MIN_VALUE;
this.currentKey = currentKey;
}
private void maintainContextAndProcess(
InternalTimer<K, N> timer, ThrowingRunnable<Exception> runnable) {
// Since we are in middle of processing a record, we need to maintain the context.
final RecordContext<K> previousContext = asyncExecutionController.getCurrentContext();
RecordContext<K> recordCtx = asyncExecutionController.buildContext(timer, timer.getKey());
recordCtx.retain();
asyncExecutionController.setCurrentContext(recordCtx);
asyncExecutionController.syncPointRequestWithCallback(runnable, true);
recordCtx.release();
asyncExecutionController.setCurrentContext(previousContext);
}
}
| BatchExecutionInternalTimeServiceWithAsyncState |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/ResponseEntity.java | {
"start": 12318,
"end": 16406
} | interface ____<B extends HeadersBuilder<B>> {
/**
* Add the given, single header value under the given name.
* @param headerName the header name
* @param headerValues the header value(s)
* @return this builder
* @see HttpHeaders#add(String, String)
*/
B header(String headerName, String... headerValues);
/**
* Copy the given headers into the entity's headers map.
* @param headers the existing HttpHeaders to copy from
* @return this builder
* @since 4.1.2
* @see HttpHeaders#add(String, String)
*/
B headers(@Nullable HttpHeaders headers);
/**
* Manipulate this entity's headers with the given consumer. The
* headers provided to the consumer are "live", so that the consumer can be used to
* {@linkplain HttpHeaders#set(String, String) overwrite} existing header values,
* {@linkplain HttpHeaders#remove(String) remove} values, or use any of the other
* {@link HttpHeaders} methods.
* @param headersConsumer a function that consumes the {@code HttpHeaders}
* @return this builder
* @since 5.2
*/
B headers(Consumer<HttpHeaders> headersConsumer);
/**
* Set the set of allowed {@link HttpMethod HTTP methods}, as specified
* by the {@code Allow} header.
* @param allowedMethods the allowed methods
* @return this builder
* @see HttpHeaders#setAllow(Set)
*/
B allow(HttpMethod... allowedMethods);
/**
* Set the entity tag of the body, as specified by the {@code ETag} header.
* @param etag the new entity tag
* @return this builder
* @see HttpHeaders#setETag(String)
*/
B eTag(@Nullable String etag);
/**
* Set the time the resource was last changed, as specified by the
* {@code Last-Modified} header.
* @param lastModified the last modified date
* @return this builder
* @since 5.1.4
* @see HttpHeaders#setLastModified(ZonedDateTime)
*/
B lastModified(ZonedDateTime lastModified);
/**
* Set the time the resource was last changed, as specified by the
* {@code Last-Modified} header.
* @param lastModified the last modified date
* @return this builder
* @since 5.1.4
* @see HttpHeaders#setLastModified(Instant)
*/
B lastModified(Instant lastModified);
/**
* Set the time the resource was last changed, as specified by the
* {@code Last-Modified} header.
* <p>The date should be specified as the number of milliseconds since
* January 1, 1970 GMT.
* @param lastModified the last modified date
* @return this builder
* @see HttpHeaders#setLastModified(long)
*/
B lastModified(long lastModified);
/**
* Set the location of a resource, as specified by the {@code Location} header.
* @param location the location
* @return this builder
* @see HttpHeaders#setLocation(URI)
*/
B location(URI location);
/**
* Set the caching directives for the resource, as specified by the HTTP 1.1
* {@code Cache-Control} header.
* <p>A {@code CacheControl} instance can be built like
* {@code CacheControl.maxAge(3600).cachePublic().noTransform()}.
* @param cacheControl a builder for cache-related HTTP response headers
* @return this builder
* @since 4.2
* @see <a href="https://tools.ietf.org/html/rfc7234#section-5.2">RFC-7234 Section 5.2</a>
*/
B cacheControl(CacheControl cacheControl);
/**
* Configure one or more request header names (for example, "Accept-Language") to
* add to the "Vary" response header to inform clients that the response is
* subject to content negotiation and variances based on the value of the
* given request headers. The configured request header names are added only
* if not already present in the response "Vary" header.
* @param requestHeaders request header names
* @since 4.3
*/
B varyBy(String... requestHeaders);
/**
* Build the response entity with no body.
* @return the response entity
* @see BodyBuilder#body(Object)
*/
<T> ResponseEntity<T> build();
}
/**
* Defines a builder that adds a body to the response entity.
* @since 4.1
*/
public | HeadersBuilder |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/performance/OperationCost.java | {
"start": 1254,
"end": 6833
} | class ____ {
/** Head costs for getFileStatus() directory probe: {@value}. */
public static final int FILESTATUS_DIR_PROBE_H = 0;
/** List costs for getFileStatus() directory probe: {@value}. */
public static final int FILESTATUS_DIR_PROBE_L = 1;
/** Head cost getFileStatus() file probe only. */
public static final int FILESTATUS_FILE_PROBE_H = 1;
/** Liast cost getFileStatus() file probe only. */
public static final int FILESTATUS_FILE_PROBE_L = 0;
/**
* Delete cost when deleting an object.
*/
public static final int DELETE_OBJECT_REQUEST = 1;
/**
* Delete cost when deleting a marker.
* Note: if bulk delete is disabled, this changes to being
* the number of directories deleted.
*/
public static final int DELETE_MARKER_REQUEST = DELETE_OBJECT_REQUEST;
/**
* No Head or List IO takes place; other operations
* may still take place.
*/
public static final OperationCost NO_IO =
new OperationCost(0, 0);
/**
* More detailed description of the NO_IO cost.
*/
public static final OperationCost NO_HEAD_OR_LIST =
NO_IO;
/** A HEAD operation. */
public static final OperationCost HEAD_OPERATION = new OperationCost(1, 0);
/** A LIST operation. */
public static final OperationCost LIST_OPERATION = new OperationCost(0, 1);
/**
* Cost of {@link org.apache.hadoop.fs.s3a.impl.StatusProbeEnum#DIRECTORIES}.
*/
public static final OperationCost FILE_STATUS_DIR_PROBE = LIST_OPERATION;
/**
* Cost of {@link org.apache.hadoop.fs.s3a.impl.StatusProbeEnum#FILE}.
*/
public static final OperationCost FILE_STATUS_FILE_PROBE = HEAD_OPERATION;
/**
* Cost of getFileStatus on root directory.
*/
public static final OperationCost ROOT_FILE_STATUS_PROBE = NO_HEAD_OR_LIST;
/**
* Cost of {@link org.apache.hadoop.fs.s3a.impl.StatusProbeEnum#ALL}.
*/
public static final OperationCost FILE_STATUS_ALL_PROBES =
FILE_STATUS_FILE_PROBE.plus(FILE_STATUS_DIR_PROBE);
/** getFileStatus() on a file which exists. */
public static final OperationCost GET_FILE_STATUS_ON_FILE =
FILE_STATUS_FILE_PROBE;
/** List costs for getFileStatus() on a non-empty directory: {@value}. */
public static final OperationCost GET_FILE_STATUS_ON_DIR =
FILE_STATUS_FILE_PROBE.plus(FILE_STATUS_DIR_PROBE);
/** Costs for getFileStatus() on an empty directory: {@value}. */
public static final OperationCost GET_FILE_STATUS_ON_EMPTY_DIR =
GET_FILE_STATUS_ON_DIR;
/** getFileStatus() directory marker which exists. */
public static final OperationCost GET_FILE_STATUS_ON_DIR_MARKER =
GET_FILE_STATUS_ON_EMPTY_DIR;
/** getFileStatus() call which fails to find any entry. */
public static final OperationCost GET_FILE_STATUS_FNFE =
FILE_STATUS_ALL_PROBES;
/** listLocatedStatus always does a LIST. */
public static final OperationCost LIST_LOCATED_STATUS_LIST_OP =
new OperationCost(0, 1);
/** listFiles always does a LIST. */
public static final OperationCost LIST_FILES_LIST_OP = LIST_OPERATION;
/** listStatus always does a LIST. */
public static final OperationCost LIST_STATUS_LIST_OP = LIST_OPERATION;
/**
* Metadata cost of a copy operation, as used during rename.
*/
public static final OperationCost COPY_OP =
new OperationCost(1, 0);
/**
* Cost of renaming a file to a different directory.
* <p></p>
* LIST on dest not found, look for dest dir, and then, at
* end of rename, whether a parent dir needs to be created.
*/
public static final OperationCost RENAME_SINGLE_FILE_DIFFERENT_DIR =
FILE_STATUS_FILE_PROBE // source file probe
.plus(GET_FILE_STATUS_FNFE) // dest does not exist
.plus(FILE_STATUS_FILE_PROBE) // parent dir of dest is not file
.plus(FILE_STATUS_DIR_PROBE) // recreate source parent dir?
.plus(COPY_OP); // metadata read on copy
/**
* Cost of renaming a file to the same directory
* <p></p>
* No need to look for parent directories, so only file
* existence checks and the copy.
*/
public static final OperationCost RENAME_SINGLE_FILE_SAME_DIR =
FILE_STATUS_FILE_PROBE // source file probe
.plus(GET_FILE_STATUS_FNFE) // dest must not exist
.plus(COPY_OP); // metadata read on copy
/**
* create(overwrite = true) does not look for the file existing.
*/
public static final OperationCost CREATE_FILE_OVERWRITE =
FILE_STATUS_DIR_PROBE;
/**
* create(overwrite = false) runs all the checks.
*/
public static final OperationCost CREATE_FILE_NO_OVERWRITE =
FILE_STATUS_ALL_PROBES;
/** Expected HEAD count. */
private final int head;
/** Expected LIST count. */
private final int list;
/**
* Constructor.
* @param head head requests.
* @param list list requests.
*/
public OperationCost(final int head,
final int list) {
this.head = head;
this.list = list;
}
/** Expected HEAD count. */
int head() {
return head;
}
/** Expected LIST count. */
int list() {
return list;
}
/**
* Add to create a new cost.
* @param that the other entry
* @return cost of the combined operation.
*/
public OperationCost plus(OperationCost that) {
return new OperationCost(
head + that.head,
list + that.list);
}
@Override
public String toString() {
return "OperationCost{" +
"head=" + head +
", list=" + list +
'}';
}
}
| OperationCost |
java | spring-projects__spring-boot | buildpack/spring-boot-buildpack-platform/src/main/java/org/springframework/boot/buildpack/platform/docker/transport/LocalHttpClientTransport.java | {
"start": 4195,
"end": 4922
} | class ____ implements DetachedSocketFactory {
private static final String NPIPE_PREFIX = "npipe://";
private final ResolvedDockerHost dockerHost;
LocalDetachedSocketFactory(ResolvedDockerHost dockerHost) {
this.dockerHost = dockerHost;
}
@Override
public Socket create(Proxy proxy) throws IOException {
String address = this.dockerHost.getAddress();
if (address.startsWith(NPIPE_PREFIX)) {
return NamedPipeSocket.get(address.substring(NPIPE_PREFIX.length()));
}
return (!Platform.isWindows()) ? UnixDomainSocket.get(address) : NamedPipeSocket.get(address);
}
}
/**
* {@link DnsResolver} that ensures only the loopback address is used.
*/
private static final | LocalDetachedSocketFactory |
java | spring-projects__spring-boot | core/spring-boot-test/src/test/java/org/springframework/boot/test/context/bootstrap/SpringBootTestContextBootstrapperTests.java | {
"start": 6551,
"end": 6656
} | class ____ {
}
@SpringBootTest(args = "--app.test=different")
static | SpringBootTestSameArgsConfiguration |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/deser/DefaultObjectDeserializerTest6.java | {
"start": 874,
"end": 1213
} | class ____ {
private final Map<Object, Map<Object, Object>> value;
@JSONCreator
public Entity(@JSONField(name = "value") Map<Object, Map<Object, Object>> value){
this.value = value;
}
public Map<Object, Map<Object, Object>> getValue() {
return value;
}
}
}
| Entity |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/operators/co/CoBroadcastWithKeyedOperator.java | {
"start": 9927,
"end": 12674
} | class ____ extends ReadOnlyContext {
private final ExecutionConfig config;
private final Map<MapStateDescriptor<?, ?>, BroadcastState<?, ?>> states;
private final TimerService timerService;
private StreamRecord<IN1> element;
ReadOnlyContextImpl(
final ExecutionConfig executionConfig,
final KeyedBroadcastProcessFunction<KS, IN1, IN2, OUT> function,
final Map<MapStateDescriptor<?, ?>, BroadcastState<?, ?>> broadcastStates,
final TimerService timerService) {
function.super();
this.config = Preconditions.checkNotNull(executionConfig);
this.states = Preconditions.checkNotNull(broadcastStates);
this.timerService = Preconditions.checkNotNull(timerService);
}
void setElement(StreamRecord<IN1> e) {
this.element = e;
}
@Override
public Long timestamp() {
checkState(element != null);
return element.hasTimestamp() ? element.getTimestamp() : null;
}
@Override
public TimerService timerService() {
return timerService;
}
@Override
public long currentProcessingTime() {
return timerService.currentProcessingTime();
}
@Override
public long currentWatermark() {
return timerService.currentWatermark();
}
@Override
public <X> void output(OutputTag<X> outputTag, X value) {
checkArgument(outputTag != null, "OutputTag must not be null.");
output.collect(outputTag, new StreamRecord<>(value, element.getTimestamp()));
}
@Override
public <K, V> ReadOnlyBroadcastState<K, V> getBroadcastState(
MapStateDescriptor<K, V> stateDescriptor) {
Preconditions.checkNotNull(stateDescriptor);
stateDescriptor.initializeSerializerUnlessSet(config);
ReadOnlyBroadcastState<K, V> state =
(ReadOnlyBroadcastState<K, V>) states.get(stateDescriptor);
if (state == null) {
throw new IllegalArgumentException(
"The requested state does not exist. "
+ "Check for typos in your state descriptor, or specify the state descriptor "
+ "in the datastream.broadcast(...) call if you forgot to register it.");
}
return state;
}
@Override
@SuppressWarnings("unchecked")
public KS getCurrentKey() {
return (KS) CoBroadcastWithKeyedOperator.this.getCurrentKey();
}
}
private | ReadOnlyContextImpl |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/loader/ast/spi/MultiNaturalIdLoader.java | {
"start": 324,
"end": 996
} | interface ____<E> extends EntityMultiLoader<E> {
/**
* Load multiple entities by natural-id. The exact result depends on the passed options.
*
* @param naturalIds The natural-ids to load. The values of this array will depend on whether the
* natural-id is simple or complex.
*
* @param <K> The basic form for a natural-id is a Map of its attribute values, or an array of the
* values positioned according to "attribute ordering". Simple natural-ids can also be expressed
* by their simple (basic/embedded) type.
*/
<K> List<E> multiLoad(K[] naturalIds, MultiNaturalIdLoadOptions options, SharedSessionContractImplementor session);
}
| MultiNaturalIdLoader |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/CollectingErrorsTest.java | {
"start": 4620,
"end": 5922
} | class ____ {
@Test
@DisplayName("should fail fast by default when error encountered")
void failFastDefault() {
// setup
String json = "{\"name\":\"John\",\"age\":\"not-a-number\"}";
// when/then
assertThatThrownBy(() -> MAPPER.readValue(json, Person.class))
.isInstanceOf(DatabindException.class)
.hasMessageContaining("not-a-number");
}
@Test
@DisplayName("should fail fast when using regular readValue even after problemCollectingReader")
void failFastAfterCollectErrors() {
// setup
String json = "{\"name\":\"John\",\"age\":\"invalid\"}";
ObjectReader reader = MAPPER.readerFor(Person.class).problemCollectingReader();
// when/then - using regular readValue, not readValueCollectingProblems
assertThatThrownBy(() -> reader.readValue(json))
.isInstanceOf(DatabindException.class);
}
}
/*
/**********************************************************************
/* Test: Per-call bucket isolation
/**********************************************************************
*/
@Nested
@DisplayName("Per-call bucket isolation")
| DefaultBehaviorTests |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/JobGraphGeneratorTestBase.java | {
"start": 119306,
"end": 119639
} | class ____ extends AbstractStreamOperator<Integer>
implements OneInputStreamOperator<Integer, Integer> {
@Override
public void processElement(StreamRecord<Integer> element) throws Exception {
throw new UnsupportedOperationException();
}
}
private static | SerializationTestOperator |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/common/typeutils/base/CharSerializerTest.java | {
"start": 1102,
"end": 1912
} | class ____ extends SerializerTestBase<Character> {
@Override
protected TypeSerializer<Character> createSerializer() {
return new CharSerializer();
}
@Override
protected int getLength() {
return 2;
}
@Override
protected Class<Character> getTypeClass() {
return Character.class;
}
@Override
protected Character[] getTestData() {
Random rnd = new Random(874597969123412341L);
int rndInt = rnd.nextInt(Character.MAX_VALUE);
return new Character[] {
new Character('a'),
new Character('@'),
new Character('ä'),
new Character('1'),
new Character((char) rndInt),
Character.MAX_VALUE,
Character.MIN_VALUE
};
}
}
| CharSerializerTest |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/util/ModuleUtils.java | {
"start": 3136,
"end": 4029
} | class ____ to apply; never {@code null}
* @return an immutable list of all such classes found; never {@code null}
* but potentially empty
*/
public static List<Class<?>> findAllClassesInModule(String moduleName, ClassFilter filter) {
Preconditions.notBlank(moduleName, "Module name must not be null or empty");
Preconditions.notNull(filter, "Class filter must not be null");
logger.debug(() -> "Looking for classes in module: " + moduleName);
// @formatter:off
Set<ModuleReference> moduleReferences = streamResolvedModules(isEqual(moduleName))
.map(ResolvedModule::reference)
.collect(toSet());
// @formatter:on
return scan(moduleReferences, filter, ModuleUtils.class.getClassLoader());
}
/**
* Find all {@linkplain Class classes} for the given module.
*
* @param module the module to scan; never {@code null} or <em>unnamed</em>
* @param filter the | filter |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/benchmark/e2e/HandleGlobalFailureAndRestartAllTasksBenchmark.java | {
"start": 2478,
"end": 5886
} | class ____ extends SchedulerEndToEndBenchmarkBase {
private static final int SLOTS_PER_TASK_EXECUTOR = 4;
private DefaultScheduler scheduler;
private ManuallyTriggeredScheduledExecutor taskRestartExecutor;
public void setup(JobConfiguration jobConfiguration) throws Exception {
taskRestartExecutor = new ManuallyTriggeredScheduledExecutor();
// Use DirectScheduledExecutorService to ensure that we can run
// DefaultScheduler#restartTasks in the current thread synchronously when tasks restart is
// triggered.
scheduledExecutorService = new DirectScheduledExecutorService();
super.setup(jobConfiguration);
scheduler =
createScheduler(
jobGraph,
physicalSlotProvider,
mainThreadExecutor,
scheduledExecutorService,
taskRestartExecutor,
new FixedDelayRestartBackoffTimeStrategy
.FixedDelayRestartBackoffTimeStrategyFactory(1, 1)
.create());
scheduler.startScheduling();
offerSlots();
}
public void handleGlobalFailureAndRestartAllTasks() throws Exception {
// trigger failover, force reset state to canceled.
scheduler.handleGlobalFailure(new RuntimeException("For test."));
completeCancellingForAllVertices(scheduler.getExecutionGraph());
taskRestartExecutor.triggerScheduledTasks();
}
private DefaultScheduler createScheduler(
JobGraph jobGraph,
PhysicalSlotProvider physicalSlotProvider,
ComponentMainThreadExecutor mainThreadExecutor,
ScheduledExecutorService executorService,
ScheduledExecutor taskRestartExecutor,
RestartBackoffTimeStrategy restartBackoffTimeStrategy)
throws Exception {
DefaultSchedulerBuilder schedulerBuilder =
new DefaultSchedulerBuilder(
jobGraph,
mainThreadExecutor,
executorService,
executorService,
taskRestartExecutor)
.setExecutionSlotAllocatorFactory(
SchedulerTestingUtils.newSlotSharingExecutionSlotAllocatorFactory(
physicalSlotProvider))
.setRestartBackoffTimeStrategy(restartBackoffTimeStrategy);
if (jobGraph.getJobType() == JobType.BATCH) {
return createAdaptiveBatchScheduler(schedulerBuilder, jobConfiguration);
} else {
return schedulerBuilder.build();
}
}
private void offerSlots() {
final int numberSlots =
StreamSupport.stream(jobGraph.getVertices().spliterator(), false)
.mapToInt(JobVertex::getParallelism)
.sum();
for (int i = 0; i < Math.ceil((double) numberSlots / SLOTS_PER_TASK_EXECUTOR); i++) {
SlotPoolUtils.tryOfferSlots(
slotPool,
mainThreadExecutor,
Collections.nCopies(SLOTS_PER_TASK_EXECUTOR, ResourceProfile.ANY));
}
}
}
| HandleGlobalFailureAndRestartAllTasksBenchmark |
java | netty__netty | transport-udt/src/test/java/io/netty/test/udt/util/EchoMessageHandler.java | {
"start": 1226,
"end": 2408
} | class ____ extends ChannelInboundHandlerAdapter {
private static final InternalLogger log = InternalLoggerFactory.getInstance(EchoMessageHandler.class);
private final UdtMessage message;
private volatile int counter;
public EchoMessageHandler(final int messageSize) {
final ByteBuf byteBuf = Unpooled.buffer(messageSize);
for (int i = 0; i < byteBuf.capacity(); i++) {
byteBuf.writeByte((byte) i);
}
message = new UdtMessage(byteBuf);
}
@Override
public void channelActive(final ChannelHandlerContext ctx) throws Exception {
log.info("ECHO active {}", NioUdtProvider.socketUDT(ctx.channel()).toStringOptions());
ctx.writeAndFlush(message);
}
@Override
public void exceptionCaught(final ChannelHandlerContext ctx, final Throwable e) {
log.error("exception", e);
ctx.close();
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
counter += ((UdtMessage) msg).content().readableBytes();
ctx.writeAndFlush(msg);
}
public int counter() {
return counter;
}
}
| EchoMessageHandler |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/sps/ExternalSPSBlockMoveTaskHandler.java | {
"start": 3026,
"end": 3369
} | class ____ the external SPS block movements. This will move the
* given block to a target datanode by directly establishing socket connection
* to it and invokes function
* {@link Sender#replaceBlock(ExtendedBlock, StorageType, Token, String,
* DatanodeInfo, String)}.
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public | handles |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_isEqualToNormalizingPunctuationAndWhitespace_Test.java | {
"start": 1176,
"end": 2281
} | class ____ extends CharSequenceAssertBaseTest {
@Override
protected CharSequenceAssert invoke_api_method() {
return assertions.isEqualToNormalizingPunctuationAndWhitespace("Game of Thrones");
}
@Override
protected void verify_internal_effects() {
verify(strings).assertEqualsNormalizingPunctuationAndWhitespace(getInfo(assertions), getActual(assertions),
"Game of Thrones");
}
@ParameterizedTest
@MethodSource("notEqualToNormalizingWhiteSpaceGenerator")
void should_pass_if_actual_is_equal_normalizing_breaking_spaces(String actual, String expected) {
assertThat(actual).isEqualToNormalizingPunctuationAndWhitespace(expected);
}
public static Stream<Arguments> notEqualToNormalizingWhiteSpaceGenerator() {
return NON_BREAKING_SPACES.stream()
.map(nonBreakingSpace -> arguments("my" + nonBreakingSpace
+ "foo bar", "my foo bar"));
}
}
| CharSequenceAssert_isEqualToNormalizingPunctuationAndWhitespace_Test |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/http/codec/cbor/Jackson2CborEncoderTests.java | {
"start": 1766,
"end": 3821
} | class ____ extends AbstractLeakCheckingTests {
private static final MimeType CBOR_MIME_TYPE = new MimeType("application", "cbor");
private final ObjectMapper mapper = Jackson2ObjectMapperBuilder.cbor().build();
private final Jackson2CborEncoder encoder = new Jackson2CborEncoder();
private Consumer<DataBuffer> pojoConsumer(Pojo expected) {
return dataBuffer -> {
try {
Pojo actual = this.mapper.reader().forType(Pojo.class)
.readValue(DataBufferTestUtils.dumpBytes(dataBuffer));
assertThat(actual).isEqualTo(expected);
release(dataBuffer);
}
catch (IOException ex) {
throw new UncheckedIOException(ex);
}
};
}
@Test
void canEncode() {
ResolvableType pojoType = ResolvableType.forClass(Pojo.class);
assertThat(this.encoder.canEncode(pojoType, CBOR_MIME_TYPE)).isTrue();
assertThat(this.encoder.canEncode(pojoType, null)).isTrue();
// SPR-15464
assertThat(this.encoder.canEncode(ResolvableType.NONE, null)).isTrue();
}
@Test
void canNotEncode() {
assertThat(this.encoder.canEncode(ResolvableType.forClass(String.class), null)).isFalse();
assertThat(this.encoder.canEncode(ResolvableType.forClass(Pojo.class), APPLICATION_XML)).isFalse();
ResolvableType sseType = ResolvableType.forClass(ServerSentEvent.class);
assertThat(this.encoder.canEncode(sseType, CBOR_MIME_TYPE)).isFalse();
}
@Test
void encode() {
Pojo value = new Pojo("foo", "bar");
DataBuffer result = encoder.encodeValue(value, this.bufferFactory, ResolvableType.forClass(Pojo.class), CBOR_MIME_TYPE, null);
pojoConsumer(value).accept(result);
}
@Test
void encodeStream() {
Pojo pojo1 = new Pojo("foo", "bar");
Pojo pojo2 = new Pojo("foofoo", "barbar");
Pojo pojo3 = new Pojo("foofoofoo", "barbarbar");
Flux<Pojo> input = Flux.just(pojo1, pojo2, pojo3);
ResolvableType type = ResolvableType.forClass(Pojo.class);
assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() ->
encoder.encode(input, this.bufferFactory, type, CBOR_MIME_TYPE, null));
}
}
| Jackson2CborEncoderTests |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/client/reactive/HttpComponentsClientHttpConnector.java | {
"start": 5736,
"end": 6236
} | class ____ implements FutureCallback<Void> {
private final MonoSink<?> sink;
public ResultCallback(MonoSink<?> sink) {
this.sink = sink;
}
@Override
public void completed(Void result) {
this.sink.success();
}
@Override
public void failed(Exception ex) {
this.sink.error(ex instanceof HttpStreamResetException && ex.getCause() != null ? ex.getCause() : ex);
}
@Override
public void cancelled() {
this.sink.error(new CancellationException());
}
}
}
| ResultCallback |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/support/DynamicPropertyRegistrarBeanInitializer.java | {
"start": 1529,
"end": 3164
} | class ____ implements BeanFactoryInitializer<ListableBeanFactory>, EnvironmentAware {
private static final Log logger = LogFactory.getLog(DynamicPropertyRegistrarBeanInitializer.class);
/**
* The bean name of the internally managed {@code DynamicPropertyRegistrarBeanInitializer}.
*/
static final String BEAN_NAME =
"org.springframework.test.context.support.internalDynamicPropertyRegistrarBeanInitializer";
private @Nullable ConfigurableEnvironment environment;
@Override
public void setEnvironment(Environment environment) {
if (!(environment instanceof ConfigurableEnvironment configurableEnvironment)) {
throw new IllegalArgumentException("Environment must be a ConfigurableEnvironment");
}
this.environment = configurableEnvironment;
}
@Override
public void initialize(ListableBeanFactory beanFactory) {
if (this.environment == null) {
throw new IllegalStateException("Environment is required");
}
String[] beanNames = BeanFactoryUtils.beanNamesForTypeIncludingAncestors(
beanFactory, DynamicPropertyRegistrar.class);
if (beanNames.length > 0) {
DynamicValuesPropertySource propertySource = DynamicValuesPropertySource.getOrCreate(this.environment);
DynamicPropertyRegistry registry = propertySource.dynamicPropertyRegistry;
for (String name : beanNames) {
if (logger.isDebugEnabled()) {
logger.debug("Eagerly initializing DynamicPropertyRegistrar bean '%s'".formatted(name));
}
DynamicPropertyRegistrar registrar = beanFactory.getBean(name, DynamicPropertyRegistrar.class);
registrar.accept(registry);
}
}
}
}
| DynamicPropertyRegistrarBeanInitializer |
java | apache__camel | components/camel-nitrite/src/main/java/org/apache/camel/component/nitrite/operation/collection/RemoveCollectionOperation.java | {
"start": 1329,
"end": 2260
} | class ____ extends AbstractNitriteOperation implements CollectionOperation {
private Filter filter;
private RemoveOptions removeOptions;
public RemoveCollectionOperation(Filter filter) {
this.filter = filter;
}
public RemoveCollectionOperation(Filter filter, RemoveOptions removeOptions) {
this.filter = filter;
this.removeOptions = removeOptions;
}
@Override
protected void execute(Exchange exchange, NitriteEndpoint endpoint) throws Exception {
NitriteCollection collection = (NitriteCollection) endpoint.getNitriteCollection();
if (filter != null && removeOptions != null) {
exchange.getMessage().setHeader(NitriteConstants.WRITE_RESULT, collection.remove(filter, removeOptions));
} else {
exchange.getMessage().setHeader(NitriteConstants.WRITE_RESULT, collection.remove(filter));
}
}
}
| RemoveCollectionOperation |
java | apache__camel | components/camel-ldap/src/generated/java/org/apache/camel/component/ldap/LdapEndpointConfigurer.java | {
"start": 731,
"end": 3028
} | class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
LdapEndpoint target = (LdapEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "base": target.setBase(property(camelContext, java.lang.String.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "pagesize":
case "pageSize": target.setPageSize(property(camelContext, java.lang.Integer.class, value)); return true;
case "returnedattributes":
case "returnedAttributes": target.setReturnedAttributes(property(camelContext, java.lang.String.class, value)); return true;
case "scope": target.setScope(property(camelContext, java.lang.String.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "base": return java.lang.String.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "pagesize":
case "pageSize": return java.lang.Integer.class;
case "returnedattributes":
case "returnedAttributes": return java.lang.String.class;
case "scope": return java.lang.String.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
LdapEndpoint target = (LdapEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "base": return target.getBase();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "pagesize":
case "pageSize": return target.getPageSize();
case "returnedattributes":
case "returnedAttributes": return target.getReturnedAttributes();
case "scope": return target.getScope();
default: return null;
}
}
}
| LdapEndpointConfigurer |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet2/Hamlet.java | {
"start": 73719,
"end": 78180
} | class ____<T extends __> extends EImp<T> implements HamletSpec.TABLE {
public TABLE(String name, T parent, EnumSet<EOpt> opts) {
super(name, parent, opts);
}
@Override
public TABLE<T> $id(String value) {
addAttr("id", value);
return this;
}
@Override
public TABLE<T> $class(String value) {
addAttr("class", value);
return this;
}
@Override
public TABLE<T> $title(String value) {
addAttr("title", value);
return this;
}
@Override
public TABLE<T> $style(String value) {
addAttr("style", value);
return this;
}
@Override
public TABLE<T> $lang(String value) {
addAttr("lang", value);
return this;
}
@Override
public TABLE<T> $dir(Dir value) {
addAttr("dir", value);
return this;
}
@Override
public TABLE<T> $onclick(String value) {
addAttr("onclick", value);
return this;
}
@Override
public TABLE<T> $ondblclick(String value) {
addAttr("ondblclick", value);
return this;
}
@Override
public TABLE<T> $onmousedown(String value) {
addAttr("onmousedown", value);
return this;
}
@Override
public TABLE<T> $onmouseup(String value) {
addAttr("onmouseup", value);
return this;
}
@Override
public TABLE<T> $onmouseover(String value) {
addAttr("onmouseover", value);
return this;
}
@Override
public TABLE<T> $onmousemove(String value) {
addAttr("onmousemove", value);
return this;
}
@Override
public TABLE<T> $onmouseout(String value) {
addAttr("onmouseout", value);
return this;
}
@Override
public TABLE<T> $onkeypress(String value) {
addAttr("onkeypress", value);
return this;
}
@Override
public TABLE<T> $onkeydown(String value) {
addAttr("onkeydown", value);
return this;
}
@Override
public TABLE<T> $onkeyup(String value) {
addAttr("onkeyup", value);
return this;
}
@Override
public TABLE<T> caption(String cdata) {
return caption().__(cdata).__();
}
@Override
public CAPTION<TABLE<T>> caption() {
closeAttrs();
return caption_(this, false);
}
@Override
public COLGROUP<TABLE<T>> colgroup() {
closeAttrs();
return colgroup_(this, false);
}
@Override
public THEAD<TABLE<T>> thead(String selector) {
return setSelector(thead(), selector);
}
@Override
public THEAD<TABLE<T>> thead() {
closeAttrs();
return thead_(this, false);
}
@Override
public TFOOT<TABLE<T>> tfoot() {
closeAttrs();
return tfoot_(this, false);
}
@Override
public TFOOT<TABLE<T>> tfoot(String selector) {
return setSelector(tfoot(), selector);
}
@Override
public TBODY<TABLE<T>> tbody() {
closeAttrs();
return tbody_(this, false);
}
@Override
public TBODY<TABLE<T>> tbody(String selector) {
return setSelector(tbody(), selector);
}
@Override
public TR<TABLE<T>> tr() {
closeAttrs();
return tr_(this, false);
}
@Override
public TR<TABLE<T>> tr(String selector) {
return setSelector(tr(), selector);
}
@Override
public COL<TABLE<T>> col() {
closeAttrs();
return col_(this, false);
}
@Override
public TABLE<T> col(String selector) {
return setSelector(col(), selector).__();
}
}
private <T extends __> CAPTION<T> caption_(T e, boolean inline) {
return new CAPTION<T>("caption", e, opt(true, inline, false)); }
private <T extends __> COLGROUP<T> colgroup_(T e, boolean inline) {
return new COLGROUP<T>("colgroup", e, opt(false, inline, false)); }
private <T extends __> THEAD<T> thead_(T e, boolean inline) {
return new THEAD<T>("thead", e, opt(true, inline, false)); }
private <T extends __> TFOOT<T> tfoot_(T e, boolean inline) {
return new TFOOT<T>("tfoot", e, opt(true, inline, false)); }
private <T extends __> TBODY<T> tbody_(T e, boolean inline) {
return new TBODY<T>("tbody", e, opt(true, inline, false)); }
private <T extends __> COL<T> col_(T e, boolean inline) {
return new COL<T>("col", e, opt(false, inline, false)); }
private <T extends __> TR<T> tr_(T e, boolean inline) {
return new TR<T>("tr", e, opt(true, inline, false)); }
public | TABLE |
java | elastic__elasticsearch | x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/TransportDeleteQueryRuleAction.java | {
"start": 936,
"end": 2009
} | class ____ extends HandledTransportAction<DeleteQueryRuleAction.Request, AcknowledgedResponse> {
protected final QueryRulesIndexService systemIndexService;
@Inject
public TransportDeleteQueryRuleAction(
TransportService transportService,
ClusterService clusterService,
ActionFilters actionFilters,
Client client
) {
super(
DeleteQueryRuleAction.NAME,
transportService,
actionFilters,
DeleteQueryRuleAction.Request::new,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
this.systemIndexService = new QueryRulesIndexService(client, clusterService.getClusterSettings());
}
@Override
protected void doExecute(Task task, DeleteQueryRuleAction.Request request, ActionListener<AcknowledgedResponse> listener) {
String rulesetId = request.rulesetId();
String ruleId = request.ruleId();
systemIndexService.deleteQueryRule(rulesetId, ruleId, listener.map(v -> AcknowledgedResponse.TRUE));
}
}
| TransportDeleteQueryRuleAction |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java | {
"start": 72779,
"end": 72965
} | class ____
implements EventHandler<JobHistoryEvent> {
@Override
public void handle(JobHistoryEvent event) {
handleTimelineEvent(event);
}
}
}
| ForwardingEventHandler |
java | spring-projects__spring-boot | module/spring-boot-elasticsearch/src/main/java/org/springframework/boot/elasticsearch/autoconfigure/ElasticsearchClientConfigurations.java | {
"start": 3940,
"end": 4153
} | class ____ {
@Bean
@ConditionalOnMissingBean
ElasticsearchClient elasticsearchClient(ElasticsearchTransport transport) {
return new ElasticsearchClient(transport);
}
}
}
| ElasticsearchClientConfiguration |
java | spring-projects__spring-framework | spring-jms/src/main/java/org/springframework/jms/connection/CachedMessageProducer.java | {
"start": 1210,
"end": 7403
} | class ____ implements MessageProducer, QueueSender, TopicPublisher {
private final MessageProducer target;
private @Nullable Boolean originalDisableMessageID;
private @Nullable Boolean originalDisableMessageTimestamp;
private @Nullable Long originalDeliveryDelay;
private int deliveryMode;
private int priority;
private long timeToLive;
public CachedMessageProducer(MessageProducer target) throws JMSException {
this.target = target;
this.deliveryMode = target.getDeliveryMode();
this.priority = target.getPriority();
this.timeToLive = target.getTimeToLive();
}
@Override
public void setDisableMessageID(boolean disableMessageID) throws JMSException {
if (this.originalDisableMessageID == null) {
this.originalDisableMessageID = this.target.getDisableMessageID();
}
this.target.setDisableMessageID(disableMessageID);
}
@Override
public boolean getDisableMessageID() throws JMSException {
return this.target.getDisableMessageID();
}
@Override
public void setDisableMessageTimestamp(boolean disableMessageTimestamp) throws JMSException {
if (this.originalDisableMessageTimestamp == null) {
this.originalDisableMessageTimestamp = this.target.getDisableMessageTimestamp();
}
this.target.setDisableMessageTimestamp(disableMessageTimestamp);
}
@Override
public boolean getDisableMessageTimestamp() throws JMSException {
return this.target.getDisableMessageTimestamp();
}
@Override
public void setDeliveryDelay(long deliveryDelay) throws JMSException {
if (this.originalDeliveryDelay == null) {
this.originalDeliveryDelay = this.target.getDeliveryDelay();
}
this.target.setDeliveryDelay(deliveryDelay);
}
@Override
public long getDeliveryDelay() throws JMSException {
return this.target.getDeliveryDelay();
}
@Override
public void setDeliveryMode(int deliveryMode) {
this.deliveryMode = deliveryMode;
}
@Override
public int getDeliveryMode() {
return this.deliveryMode;
}
@Override
public void setPriority(int priority) {
this.priority = priority;
}
@Override
public int getPriority() {
return this.priority;
}
@Override
public void setTimeToLive(long timeToLive) {
this.timeToLive = timeToLive;
}
@Override
public long getTimeToLive() {
return this.timeToLive;
}
@Override
public Destination getDestination() throws JMSException {
return this.target.getDestination();
}
@Override
public Queue getQueue() throws JMSException {
return (Queue) this.target.getDestination();
}
@Override
public Topic getTopic() throws JMSException {
return (Topic) this.target.getDestination();
}
@Override
public void send(Message message) throws JMSException {
this.target.send(message, this.deliveryMode, this.priority, this.timeToLive);
}
@Override
public void send(Message message, int deliveryMode, int priority, long timeToLive) throws JMSException {
this.target.send(message, deliveryMode, priority, timeToLive);
}
@Override
public void send(Destination destination, Message message) throws JMSException {
this.target.send(destination, message, this.deliveryMode, this.priority, this.timeToLive);
}
@Override
public void send(Destination destination, Message message, int deliveryMode, int priority, long timeToLive) throws JMSException {
this.target.send(destination, message, deliveryMode, priority, timeToLive);
}
@Override
public void send(Message message, CompletionListener completionListener) throws JMSException {
this.target.send(message, this.deliveryMode, this.priority, this.timeToLive, completionListener);
}
@Override
public void send(Message message, int deliveryMode, int priority, long timeToLive,
CompletionListener completionListener) throws JMSException {
this.target.send(message, deliveryMode, priority, timeToLive, completionListener);
}
@Override
public void send(Destination destination, Message message, CompletionListener completionListener) throws JMSException {
this.target.send(destination, message, this.deliveryMode, this.priority, this.timeToLive, completionListener);
}
@Override
public void send(Destination destination, Message message, int deliveryMode, int priority,
long timeToLive, CompletionListener completionListener) throws JMSException {
this.target.send(destination, message, deliveryMode, priority, timeToLive, completionListener);
}
@Override
public void send(Queue queue, Message message) throws JMSException {
this.target.send(queue, message, this.deliveryMode, this.priority, this.timeToLive);
}
@Override
public void send(Queue queue, Message message, int deliveryMode, int priority, long timeToLive) throws JMSException {
this.target.send(queue, message, deliveryMode, priority, timeToLive);
}
@Override
public void publish(Message message) throws JMSException {
this.target.send(message, this.deliveryMode, this.priority, this.timeToLive);
}
@Override
public void publish(Message message, int deliveryMode, int priority, long timeToLive) throws JMSException {
this.target.send(message, deliveryMode, priority, timeToLive);
}
@Override
public void publish(Topic topic, Message message) throws JMSException {
this.target.send(topic, message, this.deliveryMode, this.priority, this.timeToLive);
}
@Override
public void publish(Topic topic, Message message, int deliveryMode, int priority, long timeToLive) throws JMSException {
this.target.send(topic, message, deliveryMode, priority, timeToLive);
}
@Override
public void close() throws JMSException {
// It's a cached MessageProducer... reset properties only.
if (this.originalDisableMessageID != null) {
this.target.setDisableMessageID(this.originalDisableMessageID);
this.originalDisableMessageID = null;
}
if (this.originalDisableMessageTimestamp != null) {
this.target.setDisableMessageTimestamp(this.originalDisableMessageTimestamp);
this.originalDisableMessageTimestamp = null;
}
if (this.originalDeliveryDelay != null) {
this.target.setDeliveryDelay(this.originalDeliveryDelay);
this.originalDeliveryDelay = null;
}
}
@Override
public String toString() {
return "Cached JMS MessageProducer: " + this.target;
}
}
| CachedMessageProducer |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/jackson2/WebServletJackson2Module.java | {
"start": 2244,
"end": 2996
} | class ____ extends SimpleModule {
public WebServletJackson2Module() {
super(WebServletJackson2Module.class.getName(), new Version(1, 0, 0, null, null, null));
}
@Override
public void setupModule(SetupContext context) {
SecurityJackson2Modules.enableDefaultTyping(context.getOwner());
context.setMixInAnnotations(Cookie.class, CookieMixin.class);
context.setMixInAnnotations(SavedCookie.class, SavedCookieMixin.class);
context.setMixInAnnotations(DefaultSavedRequest.class, DefaultSavedRequestMixin.class);
context.setMixInAnnotations(WebAuthenticationDetails.class, WebAuthenticationDetailsMixin.class);
context.setMixInAnnotations(SwitchUserGrantedAuthority.class, SwitchUserGrantedAuthorityMixIn.class);
}
}
| WebServletJackson2Module |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/mapping/Map.java | {
"start": 704,
"end": 2275
} | class ____ extends IndexedCollection {
private String mapKeyPropertyName;
private boolean hasMapKeyProperty;
public Map(MetadataBuildingContext buildingContext, PersistentClass owner) {
super( buildingContext, owner );
}
public Map(Supplier<ManagedBean<? extends UserCollectionType>> customTypeBeanResolver, PersistentClass owner, MetadataBuildingContext buildingContext) {
super( customTypeBeanResolver, owner, buildingContext );
}
private Map(Map original) {
super( original );
}
@Override
public Map copy() {
return new Map( this );
}
public boolean isMap() {
return true;
}
public String getMapKeyPropertyName() {
return mapKeyPropertyName;
}
public void setMapKeyPropertyName(String mapKeyPropertyName) {
this.mapKeyPropertyName = mapKeyPropertyName;
}
public CollectionType getDefaultCollectionType() {
if ( isSorted() ) {
return new SortedMapType( getRole(), getReferencedPropertyName(), getComparator() );
}
else if ( hasOrder() ) {
return new OrderedMapType( getRole(), getReferencedPropertyName() );
}
else {
return new MapType( getRole(), getReferencedPropertyName() );
}
}
public void createAllKeys() throws MappingException {
super.createAllKeys();
if ( !isInverse() ) {
getIndex().createForeignKey();
}
}
public Object accept(ValueVisitor visitor) {
return visitor.accept(this);
}
@Override
public boolean hasMapKeyProperty() {
return hasMapKeyProperty;
}
public void setHasMapKeyProperty(boolean hasMapKeyProperty) {
this.hasMapKeyProperty = hasMapKeyProperty;
}
}
| Map |
java | apache__flink | flink-annotations/src/main/java/org/apache/flink/annotation/docs/FlinkJsonSchema.java | {
"start": 1148,
"end": 1248
} | class ____ {
private FlinkJsonSchema() {}
/**
* This allows documenting a | FlinkJsonSchema |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/spi/ClassResolver.java | {
"start": 1533,
"end": 1664
} | class ____ or <tt>null</tt> if not found
*/
ClassLoader getClassLoader(String name);
/**
* Resolves the given | loader |
java | alibaba__nacos | api/src/main/java/com/alibaba/nacos/api/ai/model/mcp/registry/Repository.java | {
"start": 863,
"end": 1536
} | class ____ {
private String url;
private String source;
private String id;
private String subfolder;
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getSource() {
return source;
}
public void setSource(String source) {
this.source = source;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getSubfolder() {
return subfolder;
}
public void setSubfolder(String subfolder) {
this.subfolder = subfolder;
}
}
| Repository |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/indexcoll/Generation.java | {
"start": 769,
"end": 1097
} | class ____ {
private String description;
public SubGeneration() {
}
public SubGeneration(String description) {
this.description = description;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
}
}
| SubGeneration |
java | apache__hadoop | hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingOutputOnlyKeys.java | {
"start": 918,
"end": 1506
} | class ____ extends TestStreaming {
public TestStreamingOutputOnlyKeys() throws IOException {
super();
}
@Test
public void testOutputOnlyKeys() throws Exception {
args.add("-jobconf"); args.add("stream.reduce.input" +
"=keyonlytext");
args.add("-jobconf"); args.add("stream.reduce.output" +
"=keyonlytext");
super.testCommandLine();
}
@Override
public String getExpectedOutput() {
return outputExpect.replaceAll("\t", "");
}
@Override
@Test
public void testCommandLine() {
// Do nothing
}
}
| TestStreamingOutputOnlyKeys |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/guide/GuideDebuggingExtraTests.java | {
"start": 1092,
"end": 2432
} | class ____ {
@Test
public void debuggingActivatedWithDeepTraceback() {
Hooks.onOperatorDebug();
StringWriter sw = new StringWriter();
FakeRepository.findAllUserByName(Flux.just("pedro", "simon", "stephane"))
.transform(FakeUtils1.applyFilters)
.transform(FakeUtils2.enrichUser)
.subscribe(System.out::println,
t -> t.printStackTrace(new PrintWriter(sw))
);
String debugStack = sw.toString();
assertThat(debugStack.substring(0, debugStack.indexOf("Original Stack Trace:")))
.contains("Error has been observed at the following site(s):")
.contains("\t*________Flux.map ⇢ at reactor.guide.FakeRepository.findAllUserByName(FakeRepository.java")
.contains("\t|_ Flux.map ⇢ at reactor.guide.FakeRepository.findAllUserByName(FakeRepository.java")
.contains("\t|_ Flux.filter ⇢ at reactor.guide.FakeUtils1.lambda$static$")
.contains("\t|_ Flux.transform ⇢ at reactor.guide.GuideDebuggingExtraTests.debuggingActivatedWithDeepTraceback(GuideDebuggingExtraTests.java")
.contains("\t|_ Flux.elapsed ⇢ at reactor.guide.FakeUtils2.lambda$static$")
.contains("\t|_ Flux.transform ⇢ at reactor.guide.GuideDebuggingExtraTests.debuggingActivatedWithDeepTraceback(GuideDebuggingExtraTests.java");
}
}
| GuideDebuggingExtraTests |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/ast/statement/SQLUpdateSetItem.java | {
"start": 1015,
"end": 4138
} | class ____ extends SQLObjectImpl implements SQLReplaceable {
private SQLExpr column;
private SQLExpr value;
public SQLUpdateSetItem() {
}
public SQLUpdateSetItem(SQLExpr column, SQLExpr value) {
this.setColumn(column);
this.setValue(value);
}
public SQLExpr getColumn() {
return column;
}
public void cloneTo(SQLUpdateSetItem x) {
if (column != null) {
x.column = column.clone();
x.column.setParent(x);
}
if (value != null) {
x.value = value.clone();
x.value.setParent(x);
}
}
@Override
public SQLUpdateSetItem clone() {
SQLUpdateSetItem x = new SQLUpdateSetItem();
cloneTo(x);
return x;
}
public void setColumn(SQLExpr x) {
if (x != null) {
x.setParent(this);
}
this.column = x;
}
public SQLExpr getValue() {
return value;
}
public void setValue(SQLExpr value) {
if (value != null) {
value.setParent(this);
}
this.value = value;
}
public void output(StringBuilder buf) {
column.output(buf);
buf.append(" = ");
value.output(buf);
}
@Override
protected void accept0(SQLASTVisitor visitor) {
if (visitor.visit(this)) {
if (column != null) {
column.accept(visitor);
}
if (value != null) {
value.accept(visitor);
}
}
visitor.endVisit(this);
}
public boolean columnMatch(String column) {
if (this.column instanceof SQLIdentifierExpr) {
return ((SQLIdentifierExpr) this.column).nameEquals(column);
} else if (this.column instanceof SQLPropertyExpr) {
((SQLPropertyExpr) this.column).nameEquals(column);
}
return false;
}
public boolean columnMatch(long columnHash) {
if (this.column instanceof SQLName) {
return ((SQLName) this.column).nameHashCode64() == columnHash;
}
return false;
}
@Override
public boolean replace(SQLExpr expr, SQLExpr target) {
if (expr == this.column) {
this.setColumn(target);
return true;
}
if (expr == this.value) {
setValue(target);
return true;
}
return false;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SQLUpdateSetItem that = (SQLUpdateSetItem) o;
if (column != null ? !column.equals(that.column) : that.column != null) {
return false;
}
return value != null ? value.equals(that.value) : that.value == null;
}
@Override
public int hashCode() {
int result = column != null ? column.hashCode() : 0;
result = 31 * result + (value != null ? value.hashCode() : 0);
return result;
}
}
| SQLUpdateSetItem |
java | google__guava | android/guava/src/com/google/common/net/MediaType.java | {
"start": 45855,
"end": 49194
} | class ____ {
final String input;
int position = 0;
Tokenizer(String input) {
this.input = input;
}
@CanIgnoreReturnValue
String consumeTokenIfPresent(CharMatcher matcher) {
checkState(hasMore());
int startPosition = position;
position = matcher.negate().indexIn(input, startPosition);
return hasMore() ? input.substring(startPosition, position) : input.substring(startPosition);
}
String consumeToken(CharMatcher matcher) {
int startPosition = position;
String token = consumeTokenIfPresent(matcher);
checkState(position != startPosition);
return token;
}
char consumeCharacter(CharMatcher matcher) {
checkState(hasMore());
char c = previewChar();
checkState(matcher.matches(c));
position++;
return c;
}
@CanIgnoreReturnValue
char consumeCharacter(char c) {
checkState(hasMore());
checkState(previewChar() == c);
position++;
return c;
}
char previewChar() {
checkState(hasMore());
return input.charAt(position);
}
boolean hasMore() {
return (position >= 0) && (position < input.length());
}
}
@Override
public boolean equals(@Nullable Object obj) {
if (obj == this) {
return true;
} else if (obj instanceof MediaType) {
MediaType that = (MediaType) obj;
return this.type.equals(that.type)
&& this.subtype.equals(that.subtype)
// compare parameters regardless of order
&& this.parametersAsMap().equals(that.parametersAsMap());
} else {
return false;
}
}
@Override
public int hashCode() {
// racy single-check idiom
int h = hashCode;
if (h == 0) {
h = hash(type, subtype, parametersAsMap());
hashCode = h;
}
return h;
}
private static final MapJoiner PARAMETER_JOINER = Joiner.on("; ").withKeyValueSeparator("=");
/**
* Returns the string representation of this media type in the format described in <a
* href="http://www.ietf.org/rfc/rfc2045.txt">RFC 2045</a>.
*/
@Override
public String toString() {
// racy single-check idiom, safe because String is immutable
String result = toString;
if (result == null) {
result = computeToString();
toString = result;
}
return result;
}
private String computeToString() {
StringBuilder builder = new StringBuilder().append(type).append('/').append(subtype);
if (!parameters.isEmpty()) {
builder.append("; ");
Multimap<String, String> quotedParameters =
Multimaps.transformValues(
parameters,
(String value) ->
(TOKEN_MATCHER.matchesAllOf(value) && !value.isEmpty())
? value
: escapeAndQuote(value));
PARAMETER_JOINER.appendTo(builder, quotedParameters.entries());
}
return builder.toString();
}
private static String escapeAndQuote(String value) {
StringBuilder escaped = new StringBuilder(value.length() + 16).append('"');
for (int i = 0; i < value.length(); i++) {
char ch = value.charAt(i);
if (ch == '\r' || ch == '\\' || ch == '"') {
escaped.append('\\');
}
escaped.append(ch);
}
return escaped.append('"').toString();
}
}
| Tokenizer |
java | apache__flink | flink-formats/flink-csv/src/main/java/org/apache/flink/formats/csv/CsvReaderFormat.java | {
"start": 2430,
"end": 3284
} | class ____ with the fields order exactly matching those
* of the CSV file columns).</i>
*
* <p>If you need more fine-grained control over the CSV schema or the parsing options, use the more
* low-level {@code forSchema} static factory method based on the {@code Jackson} library utilities:
*
* <pre>{@code
* Function<CsvMapper, CsvSchema> schemaGenerator =
* mapper -> mapper.schemaFor(SomePojo.class)
* .withColumnSeparator('|');
* CsvReaderFormat<SomePojo> csvFormat =
* CsvReaderFormat.forSchema(() -> new CsvMapper(), schemaGenerator, TypeInformation.of(SomePojo.class));
* FileSource<SomePojo> source =
* FileSource.forRecordStreamFormat(csvFormat, Path.fromLocalFile(filesPath)).build();
* }</pre>
*
* @param <T> The type of the returned elements.
*/
@PublicEvolving
public | definition |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvtVO/wuqi/InstanceSchema.java | {
"start": 84,
"end": 3188
} | class ____ {
public InstanceSchema() {
this.created = System.currentTimeMillis() / 1000;
this.updated = System.currentTimeMillis() / 1000;
this.isDeleted = 0;
this.isTagField = 0;
}
private int id;
private String instanceName;
private String fieldName;
private String fieldType;
private String fieldBaseType;
private String fieldComment;
private int fieldIndexed;
private int fieldStored;
private Integer fieldTag;
private int isDeleted;
private long created;
private long updated;
private Integer cycleType;
private Integer isTagField;
private String defaultValue;
public String getDefaultValue() {
return defaultValue;
}
public void setDefaultValue(String defaultValue) {
this.defaultValue = defaultValue;
}
public Integer getIsTagField() {
return isTagField;
}
public void setIsTagField(Integer isTagField) {
this.isTagField = isTagField;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getInstanceName() {
return instanceName;
}
public void setInstanceName(String instanceName) {
this.instanceName = instanceName;
}
public String getFieldName() {
return fieldName;
}
public void setFieldName(String fieldName) {
this.fieldName = fieldName;
}
public String getFieldType() {
return fieldType;
}
public void setFieldType(String fieldType) {
this.fieldType = fieldType;
}
public String getFieldBaseType() {
return fieldBaseType;
}
public void setFieldBaseType(String fieldBaseType) {
this.fieldBaseType = fieldBaseType;
}
public String getFieldComment() {
return fieldComment;
}
public void setFieldComment(String fieldComment) {
this.fieldComment = fieldComment;
}
public int getFieldIndexed() {
return fieldIndexed;
}
public void setFieldIndexed(int fieldIndexed) {
this.fieldIndexed = fieldIndexed;
}
public int getFieldStored() {
return fieldStored;
}
public void setFieldStored(int fieldStored) {
this.fieldStored = fieldStored;
}
public Integer getFieldTag() {
return fieldTag;
}
public void setFieldTag(Integer fieldTag) {
this.fieldTag = fieldTag;
}
public int getIsDeleted() {
return isDeleted;
}
public void setIsDeleted(int isDeleted) {
this.isDeleted = isDeleted;
}
public Integer getCycleType() {
return cycleType;
}
public void setCycleType(Integer cycleType) {
this.cycleType = cycleType;
}
public long getCreated() {
return created;
}
public void setCreated(long created) {
this.created = created;
}
public long getUpdated() {
return updated;
}
public void setUpdated(long updated) {
this.updated = updated;
}
}
| InstanceSchema |
java | spring-projects__spring-security | config/src/main/java/org/springframework/security/config/annotation/authentication/configuration/EnableGlobalAuthentication.java | {
"start": 2237,
"end": 2428
} | class ____ be used to configure a global instance of
* {@link AuthenticationManagerBuilder}. For example:
*
* <pre class="code">
* @Configuration
* @EnableWebSecurity
* public | can |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/operators/testutils/UniformIntTupleGenerator.java | {
"start": 969,
"end": 2203
} | class ____ implements MutableObjectIterator<Tuple2<Integer, Integer>> {
private final int numKeys;
private final int numVals;
private int keyCnt = 0;
private int valCnt = 0;
private boolean repeatKey;
public UniformIntTupleGenerator(int numKeys, int numVals, boolean repeatKey) {
this.numKeys = numKeys;
this.numVals = numVals;
this.repeatKey = repeatKey;
}
@Override
public Tuple2<Integer, Integer> next(Tuple2<Integer, Integer> target) {
if (!repeatKey) {
if (valCnt >= numVals) {
return null;
}
target.f0 = keyCnt++;
target.f1 = valCnt;
if (keyCnt == numKeys) {
keyCnt = 0;
valCnt++;
}
} else {
if (keyCnt >= numKeys) {
return null;
}
target.f0 = keyCnt;
target.f1 = valCnt++;
if (valCnt == numVals) {
valCnt = 0;
keyCnt++;
}
}
return target;
}
@Override
public Tuple2<Integer, Integer> next() {
return next(new Tuple2<Integer, Integer>());
}
}
| UniformIntTupleGenerator |
java | elastic__elasticsearch | build-tools/src/main/java/org/elasticsearch/gradle/testclusters/MockApmServer.java | {
"start": 1975,
"end": 4269
} | class ____ {
private static final Logger logger = Logging.getLogger(MockApmServer.class);
private static final org.slf4j.Logger log = LoggerFactory.getLogger(MockApmServer.class);
private static final LookupCache<String, String> transactionCache = new LRUMap(16, 16);
private final Pattern metricFilter;
private final Pattern transactionFilter;
private final Pattern transactionExcludesFilter;
private HttpServer instance;
public MockApmServer(String metricFilter, String transactionFilter, String transactionExcludesFilter) {
this.metricFilter = createWildcardPattern(metricFilter);
this.transactionFilter = createWildcardPattern(transactionFilter);
this.transactionExcludesFilter = createWildcardPattern(transactionExcludesFilter);
}
private Pattern createWildcardPattern(String filter) {
if (filter == null || filter.isEmpty()) {
return null;
}
var pattern = Arrays.stream(filter.split(",\\s*"))
.map(Pattern::quote)
.map(s -> s.replace("*", "\\E.*\\Q"))
.collect(Collectors.joining(")|(", "(", ")"));
return Pattern.compile(pattern);
}
/**
* Start the Mock APM server. Just returns empty JSON structures for every incoming message
*
* @throws IOException
*/
public void start() throws IOException {
if (instance != null) {
throw new IllegalStateException("MockApmServer already started");
}
InetSocketAddress addr = new InetSocketAddress("0.0.0.0", 0);
HttpServer server = HttpServer.create(addr, 10);
server.createContext("/", new RootHandler());
server.start();
instance = server;
logger.lifecycle("MockApmServer started on port " + server.getAddress().getPort());
}
public int getPort() {
if (instance == null) {
throw new IllegalStateException("MockApmServer not started");
}
return instance.getAddress().getPort();
}
/**
* Stop the server gracefully if possible
*/
public void stop() {
if (instance != null) {
logger.lifecycle("stopping apm server");
instance.stop(1);
instance = null;
}
}
| MockApmServer |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/dynamic/intercept/DefaultMethodInvokingInterceptor.java | {
"start": 547,
"end": 1684
} | class ____ implements MethodInterceptor {
private final Map<Method, MethodHandle> methodHandleCache = new ConcurrentHashMap<>();
@Override
public Object invoke(MethodInvocation invocation) throws Throwable {
Method method = invocation.getMethod();
if (!method.isDefault()) {
return invocation.proceed();
}
LettuceAssert.isTrue(invocation instanceof InvocationTargetProvider,
"Invocation must provide a target object via InvocationTargetProvider");
InvocationTargetProvider targetProvider = (InvocationTargetProvider) invocation;
return methodHandleCache.computeIfAbsent(method, DefaultMethodInvokingInterceptor::lookupMethodHandle)
.bindTo(targetProvider.getInvocationTarget()).invokeWithArguments(invocation.getArguments());
}
private static MethodHandle lookupMethodHandle(Method method) {
try {
return DefaultMethods.lookupMethodHandle(method);
} catch (ReflectiveOperationException e) {
throw new IllegalArgumentException(e);
}
}
}
| DefaultMethodInvokingInterceptor |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/state/heap/AbstractHeapMergingState.java | {
"start": 1478,
"end": 3561
} | class ____<K, N, IN, SV, OUT>
extends AbstractHeapAppendingState<K, N, IN, SV, OUT>
implements InternalMergingState<K, N, IN, SV, OUT> {
/** The merge transformation function that implements the merge logic. */
private final MergeTransformation mergeTransformation;
/**
* Creates a new key/value state for the given hash map of key/value pairs.
*
* @param stateTable The state table for which this state is associated to.
* @param keySerializer The serializer for the keys.
* @param valueSerializer The serializer for the state.
* @param namespaceSerializer The serializer for the namespace.
* @param defaultValue The default value for the state.
*/
protected AbstractHeapMergingState(
StateTable<K, N, SV> stateTable,
TypeSerializer<K> keySerializer,
TypeSerializer<SV> valueSerializer,
TypeSerializer<N> namespaceSerializer,
SV defaultValue) {
super(stateTable, keySerializer, valueSerializer, namespaceSerializer, defaultValue);
this.mergeTransformation = new MergeTransformation();
}
@Override
public void mergeNamespaces(N target, Collection<N> sources) throws Exception {
if (sources == null || sources.isEmpty()) {
return; // nothing to do
}
final StateTable<K, N, SV> map = stateTable;
SV merged = null;
// merge the sources
for (N source : sources) {
// get and remove the next source per namespace/key
SV sourceState = map.removeAndGetOld(source);
if (merged != null && sourceState != null) {
merged = mergeState(merged, sourceState);
} else if (merged == null) {
merged = sourceState;
}
}
// merge into the target, if needed
if (merged != null) {
map.transform(target, merged, mergeTransformation);
}
}
protected abstract SV mergeState(SV a, SV b) throws Exception;
final | AbstractHeapMergingState |
java | apache__dubbo | dubbo-serialization/dubbo-serialization-fastjson2/src/main/java/org/apache/dubbo/common/serialize/fastjson2/FastJson2ObjectOutput.java | {
"start": 1104,
"end": 5008
} | class ____ implements ObjectOutput {
private final Fastjson2CreatorManager fastjson2CreatorManager;
private final Fastjson2SecurityManager fastjson2SecurityManager;
private volatile ClassLoader classLoader;
private final OutputStream os;
public FastJson2ObjectOutput(
Fastjson2CreatorManager fastjson2CreatorManager,
Fastjson2SecurityManager fastjson2SecurityManager,
OutputStream out) {
this.fastjson2CreatorManager = fastjson2CreatorManager;
this.fastjson2SecurityManager = fastjson2SecurityManager;
this.classLoader = Thread.currentThread().getContextClassLoader();
this.os = out;
fastjson2CreatorManager.setCreator(classLoader);
}
@Override
public void writeBool(boolean v) throws IOException {
writeObject(v);
}
@Override
public void writeByte(byte v) throws IOException {
writeObject(v);
}
@Override
public void writeShort(short v) throws IOException {
writeObject(v);
}
@Override
public void writeInt(int v) throws IOException {
writeObject(v);
}
@Override
public void writeLong(long v) throws IOException {
writeObject(v);
}
@Override
public void writeFloat(float v) throws IOException {
writeObject(v);
}
@Override
public void writeDouble(double v) throws IOException {
writeObject(v);
}
@Override
public void writeUTF(String v) throws IOException {
writeObject(v);
}
@Override
public void writeBytes(byte[] b) throws IOException {
writeLength(b.length);
os.write(b);
}
@Override
public void writeBytes(byte[] b, int off, int len) throws IOException {
writeLength(len);
os.write(b, off, len);
}
@Override
public void writeObject(Object obj) throws IOException {
updateClassLoaderIfNeed();
byte[] bytes;
if (fastjson2SecurityManager.getSecurityFilter().isCheckSerializable()) {
bytes = JSONB.toBytes(
obj,
JSONWriter.Feature.WriteClassName,
JSONWriter.Feature.FieldBased,
JSONWriter.Feature.ErrorOnNoneSerializable,
JSONWriter.Feature.ReferenceDetection,
JSONWriter.Feature.WriteNulls,
JSONWriter.Feature.NotWriteDefaultValue,
JSONWriter.Feature.NotWriteHashMapArrayListClassName,
JSONWriter.Feature.WriteNameAsSymbol);
} else {
bytes = JSONB.toBytes(
obj,
JSONWriter.Feature.WriteClassName,
JSONWriter.Feature.FieldBased,
JSONWriter.Feature.ReferenceDetection,
JSONWriter.Feature.WriteNulls,
JSONWriter.Feature.NotWriteDefaultValue,
JSONWriter.Feature.NotWriteHashMapArrayListClassName,
JSONWriter.Feature.WriteNameAsSymbol);
}
writeLength(bytes.length);
os.write(bytes);
os.flush();
}
private void updateClassLoaderIfNeed() {
ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader();
if (currentClassLoader != classLoader) {
fastjson2CreatorManager.setCreator(currentClassLoader);
classLoader = currentClassLoader;
}
}
private void writeLength(int value) throws IOException {
byte[] bytes = new byte[Integer.BYTES];
int length = bytes.length;
for (int i = 0; i < length; i++) {
bytes[length - i - 1] = (byte) (value & 0xFF);
value >>= 8;
}
os.write(bytes);
}
@Override
public void flushBuffer() throws IOException {
os.flush();
}
}
| FastJson2ObjectOutput |
java | elastic__elasticsearch | x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RequestIndexFilteringIT.java | {
"start": 670,
"end": 939
} | class ____ extends RequestIndexFilteringTestCase {
@ClassRule
public static ElasticsearchCluster cluster = Clusters.testCluster();
@Override
protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
}
| RequestIndexFilteringIT |
java | micronaut-projects__micronaut-core | http-client-core/src/main/java/io/micronaut/http/client/multipart/StringPart.java | {
"start": 698,
"end": 844
} | class ____ a String {@link Part} in {@link MultipartBody} to build a Netty multipart request.
*
* @author Puneet Behl
* @since 1.0
*/
| representing |
java | spring-projects__spring-boot | module/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/endpoint/annotation/EndpointDiscovererTests.java | {
"start": 17153,
"end": 17517
} | class ____ {
@Bean
SpecializedExtension specializedExtension() {
Enhancer enhancer = new Enhancer();
enhancer.setSuperclass(SpecializedExtension.class);
enhancer.setCallback((FixedValue) () -> null);
return (SpecializedExtension) enhancer.create();
}
}
@Configuration(proxyBeanMethods = false)
static | ProxiedSpecializedTestEndpointConfiguration |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmnode/RMNodeEventType.java | {
"start": 878,
"end": 1446
} | enum ____ {
STARTED,
// Source: AdminService
DECOMMISSION,
GRACEFUL_DECOMMISSION,
RECOMMISSION,
// Source: AdminService, ResourceTrackerService
RESOURCE_UPDATE,
// ResourceTrackerService
STATUS_UPDATE,
REBOOTING,
RECONNECTED,
SHUTDOWN,
// Source: Application
CLEANUP_APP,
// Source: Container
CONTAINER_ALLOCATED,
CLEANUP_CONTAINER,
UPDATE_CONTAINER,
// Source: ClientRMService
SIGNAL_CONTAINER,
// Source: RMAppAttempt
FINISHED_CONTAINERS_PULLED_BY_AM,
// Source: NMLivelinessMonitor
EXPIRE
}
| RMNodeEventType |
java | elastic__elasticsearch | modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java | {
"start": 1204,
"end": 4521
} | class ____ extends ScriptTestCase {
private final PainlessLookup painlessLookup = PainlessLookupBuilder.buildFromWhitelists(
PAINLESS_BASE_WHITELIST,
new HashMap<>(),
new HashMap<>()
);
public void testExplain() {
// Debug.explain can explain an object
Object dummy = new Object();
var wrapper = expectScriptThrows(ErrorCauseWrapper.class, () -> exec("Debug.explain(params.a)", singletonMap("a", dummy), true));
assertThat(wrapper.realCause.getClass(), equalTo(PainlessExplainError.class));
var e = (PainlessExplainError) wrapper.realCause;
assertSame(dummy, e.getObjectToExplain());
assertThat(e.getHeaders(painlessLookup), hasEntry("es.to_string", singletonList(dummy.toString())));
assertThat(e.getHeaders(painlessLookup), hasEntry("es.java_class", singletonList("java.lang.Object")));
assertThat(e.getHeaders(painlessLookup), hasEntry("es.painless_class", singletonList("java.lang.Object")));
// Null should be ok
wrapper = expectScriptThrows(ErrorCauseWrapper.class, () -> exec("Debug.explain(null)"));
assertThat(wrapper.realCause.getClass(), equalTo(PainlessExplainError.class));
e = (PainlessExplainError) wrapper.realCause;
assertNull(e.getObjectToExplain());
assertThat(e.getHeaders(painlessLookup), hasEntry("es.to_string", singletonList("null")));
assertThat(e.getHeaders(painlessLookup), not(hasKey("es.java_class")));
assertThat(e.getHeaders(painlessLookup), not(hasKey("es.painless_class")));
// You can't catch the explain exception
wrapper = expectScriptThrows(ErrorCauseWrapper.class, () -> exec("""
try {
Debug.explain(params.a)
} catch (Exception e) {
return 1
}""", singletonMap("a", dummy), true));
assertThat(wrapper.realCause.getClass(), equalTo(PainlessExplainError.class));
e = (PainlessExplainError) wrapper.realCause;
assertSame(dummy, e.getObjectToExplain());
}
/**
* {@link PainlessExplainError} doesn't serialize but the headers still make it.
*/
public void testPainlessExplainErrorSerialization() throws IOException {
Map<String, Object> params = singletonMap("a", "jumped over the moon");
ScriptException e = expectThrows(ScriptException.class, () -> exec("Debug.explain(params.a)", params, true));
assertEquals(singletonList("jumped over the moon"), e.getMetadata("es.to_string"));
assertEquals(singletonList("java.lang.String"), e.getMetadata("es.java_class"));
assertEquals(singletonList("java.lang.String"), e.getMetadata("es.painless_class"));
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeException(e);
try (StreamInput in = out.bytes().streamInput()) {
ElasticsearchException read = (ScriptException) in.readException();
assertEquals(singletonList("jumped over the moon"), read.getMetadata("es.to_string"));
assertEquals(singletonList("java.lang.String"), read.getMetadata("es.java_class"));
assertEquals(singletonList("java.lang.String"), read.getMetadata("es.painless_class"));
}
}
}
}
| DebugTests |
java | google__dagger | javatests/dagger/spi/SpiPluginTest.java | {
"start": 7423,
"end": 8269
} | interface ____");
Compilation inFooDepCompilation =
compilationFactory.compilationWithErrorOnDependency("inFooDep");
assertThat(inFooDepCompilation)
.hadErrorContaining(
message(
"[FailingPlugin] Bad Dependency: test.Foo(inFooDep)",
" test.Duplicated is injected at",
" [test.TestComponent] test.Foo(inFooDep)",
" test.Foo is injected at",
" [test.TestComponent] test.EntryPoint(foo, …)",
" test.EntryPoint is requested at",
" [test.TestComponent] test.TestComponent.entryPoint()",
"The following other entry points also depend on it:",
" test.TestComponent.chain()"))
.inFile(component)
.onLineContaining(" | TestComponent |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/ClassUtils.java | {
"start": 8278,
"end": 8395
} | class ____, this method converts them into classes.
*
* <p>
* A new {@link List} is returned. If the | names |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/SnapshotAccessControlException.java | {
"start": 957,
"end": 1257
} | class ____ extends AccessControlException {
private static final long serialVersionUID = 1L;
public SnapshotAccessControlException(final String message) {
super(message);
}
public SnapshotAccessControlException(final Throwable cause) {
super(cause);
}
}
| SnapshotAccessControlException |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/LuceneEndpointBuilderFactory.java | {
"start": 12463,
"end": 12780
} | class ____ extends AbstractEndpointBuilder implements LuceneEndpointBuilder, AdvancedLuceneEndpointBuilder {
public LuceneEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new LuceneEndpointBuilderImpl(path);
}
} | LuceneEndpointBuilderImpl |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/RScheduledFuture.java | {
"start": 752,
"end": 834
} | interface ____<V> extends RExecutorFuture<V>, ScheduledFuture<V> {
}
| RScheduledFuture |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/resource/DnsResolvers.java | {
"start": 187,
"end": 631
} | enum ____ implements DnsResolver {
/**
* Non-resolving {@link DnsResolver}. Returns an empty {@link InetAddress} to indicate an unresolved address.
*
* @see java.net.InetSocketAddress#createUnresolved(String, int)
* @since 4.4
*/
UNRESOLVED {
@Override
public InetAddress[] resolve(String host) throws UnknownHostException {
return new InetAddress[0];
}
};
}
| DnsResolvers |
java | netty__netty | codec-http2/src/main/java/io/netty/handler/codec/http2/ReadOnlyHttp2Headers.java | {
"start": 29217,
"end": 30727
} | class ____ implements Map.Entry<CharSequence, CharSequence>,
Iterator<Map.Entry<CharSequence, CharSequence>> {
private int i;
private AsciiString[] current = pseudoHeaders.length != 0 ? pseudoHeaders : otherHeaders;
private AsciiString key;
private AsciiString value;
@Override
public boolean hasNext() {
return i != current.length;
}
@Override
public Map.Entry<CharSequence, CharSequence> next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
key = current[i];
value = current[i + 1];
i += 2;
if (i == current.length && current == pseudoHeaders) {
current = otherHeaders;
i = 0;
}
return this;
}
@Override
public CharSequence getKey() {
return key;
}
@Override
public CharSequence getValue() {
return value;
}
@Override
public CharSequence setValue(CharSequence value) {
throw new UnsupportedOperationException("read only");
}
@Override
public void remove() {
throw new UnsupportedOperationException("read only");
}
@Override
public String toString() {
return key.toString() + '=' + value.toString();
}
}
}
| ReadOnlyIterator |
java | apache__avro | lang/java/mapred/src/main/java/org/apache/avro/mapreduce/CombineAvroKeyValueFileInputFormat.java | {
"start": 2395,
"end": 2831
} | class ____<K, V>
extends CombineFileRecordReaderWrapper<AvroKey<K>, AvroValue<V>> {
// this constructor signature is required by CombineFileRecordReader
public AvroKeyValueFileRecordReaderWrapper(CombineFileSplit split, TaskAttemptContext context, Integer idx)
throws IOException, InterruptedException {
super(new AvroKeyValueInputFormat<>(), split, context, idx);
}
}
}
| AvroKeyValueFileRecordReaderWrapper |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/state/changelog/SequenceNumber.java | {
"start": 1487,
"end": 2809
} | class ____ implements SequenceNumber {
private static final long serialVersionUID = 1L;
public final long number;
GenericSequenceNumber(long number) {
Preconditions.checkArgument(number >= 0);
this.number = number;
}
@Override
public int compareTo(SequenceNumber o) {
Preconditions.checkArgument(o instanceof GenericSequenceNumber);
return Long.compare(this.number, ((GenericSequenceNumber) o).number);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof GenericSequenceNumber)) {
return false;
}
return number == ((GenericSequenceNumber) o).number;
}
@Override
public int hashCode() {
return Objects.hash(number);
}
@Override
public SequenceNumber next() {
checkState(number < Long.MAX_VALUE);
return SequenceNumber.of(number + 1);
}
@Override
public String toString() {
return Long.toString(number);
}
}
static SequenceNumber of(long number) {
return new GenericSequenceNumber(number);
}
}
| GenericSequenceNumber |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/internal/util/ReflectHelperTest.java | {
"start": 1291,
"end": 1340
} | interface ____ extends A {
String getName();
}
| B |
java | apache__flink | flink-rpc/flink-rpc-akka/src/main/java/org/apache/flink/runtime/rpc/pekko/PekkoRpcServiceUtils.java | {
"start": 14015,
"end": 15676
} | class ____
try (TemporaryClassLoaderContext ignored =
TemporaryClassLoaderContext.of(getClass().getClassLoader())) {
if (externalAddress == null) {
// create local actor system
actorSystem =
ActorSystemBootstrapTools.startLocalActorSystem(
configuration,
actorSystemName,
logger,
actorSystemExecutorConfiguration,
customConfig);
} else {
// create remote actor system
actorSystem =
ActorSystemBootstrapTools.startRemoteActorSystem(
configuration,
actorSystemName,
externalAddress,
externalPortRange,
bindAddress,
Optional.ofNullable(bindPort),
logger,
actorSystemExecutorConfiguration,
customConfig);
}
}
return constructor.apply(
actorSystem,
PekkoRpcServiceConfiguration.fromConfiguration(configuration),
RpcService.class.getClassLoader());
}
}
// ------------------------------------------------------------------------
/** This | loader |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/util/ObjectUtils.java | {
"start": 7163,
"end": 8032
} | enum ____ to check, typically obtained via {@code MyEnum.values()}
* @param constant the constant name to find (must not be null or empty string)
* @param caseSensitive whether case is significant in determining a match
* @return whether the constant has been found in the given array
*/
public static boolean containsConstant(Enum<?>[] enumValues, String constant, boolean caseSensitive) {
for (Enum<?> candidate : enumValues) {
if (caseSensitive ? candidate.toString().equals(constant) :
candidate.toString().equalsIgnoreCase(constant)) {
return true;
}
}
return false;
}
/**
* Case insensitive alternative to {@link Enum#valueOf(Class, String)}.
* @param <E> the concrete Enum type
* @param enumValues the array of all Enum constants in question, usually per {@code Enum.values()}
* @param constant the constant to get the | values |
java | redisson__redisson | redisson-spring-data/redisson-spring-data-32/src/test/java/org/redisson/spring/data/connection/RedissonReactiveKeyCommandsTest.java | {
"start": 589,
"end": 1863
} | class ____ extends BaseConnectionTest {
@Test
public void testExpiration() {
RedissonConnectionFactory factory = new RedissonConnectionFactory(redisson);
ReactiveStringRedisTemplate t = new ReactiveStringRedisTemplate(factory);
t.opsForValue().set("123", "4343").block();
t.expire("123", Duration.ofMillis(1001)).block();
assertThat(t.getExpire("123").block().toMillis()).isBetween(900L, 1000L);
}
@Test
public void testPending() {
RedissonConnectionFactory factory = new RedissonConnectionFactory(redisson);
ReactiveStringRedisTemplate t = new ReactiveStringRedisTemplate(factory);
t.opsForStream().createGroup("test", ReadOffset.latest(), "testGroup").block();
t.opsForStream().add("test", Collections.singletonMap("1", "1")).block();
assertThat(t.opsForStream().pending("test", "testGroup").block().getTotalPendingMessages()).isEqualTo(0);
t.opsForStream().read(Consumer.from("testGroup", "test1"), StreamOffset.create("test", ReadOffset.from(">"))).single().block();
PendingMessages msg = t.opsForStream().pending("test", "testGroup", Range.unbounded(), 10).block();
assertThat(msg.size()).isEqualTo(1);
}
}
| RedissonReactiveKeyCommandsTest |
java | netty__netty | codec-socks/src/main/java/io/netty/handler/codec/socksx/v4/Socks4CommandStatus.java | {
"start": 785,
"end": 2778
} | class ____ implements Comparable<Socks4CommandStatus> {
public static final Socks4CommandStatus SUCCESS = new Socks4CommandStatus(0x5a, "SUCCESS");
public static final Socks4CommandStatus REJECTED_OR_FAILED = new Socks4CommandStatus(0x5b, "REJECTED_OR_FAILED");
public static final Socks4CommandStatus IDENTD_UNREACHABLE = new Socks4CommandStatus(0x5c, "IDENTD_UNREACHABLE");
public static final Socks4CommandStatus IDENTD_AUTH_FAILURE = new Socks4CommandStatus(0x5d, "IDENTD_AUTH_FAILURE");
public static Socks4CommandStatus valueOf(byte b) {
switch (b) {
case 0x5a:
return SUCCESS;
case 0x5b:
return REJECTED_OR_FAILED;
case 0x5c:
return IDENTD_UNREACHABLE;
case 0x5d:
return IDENTD_AUTH_FAILURE;
}
return new Socks4CommandStatus(b);
}
private final byte byteValue;
private final String name;
private String text;
public Socks4CommandStatus(int byteValue) {
this(byteValue, "UNKNOWN");
}
public Socks4CommandStatus(int byteValue, String name) {
this.name = ObjectUtil.checkNotNull(name, "name");
this.byteValue = (byte) byteValue;
}
public byte byteValue() {
return byteValue;
}
public boolean isSuccess() {
return byteValue == 0x5a;
}
@Override
public int hashCode() {
return byteValue;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof Socks4CommandStatus)) {
return false;
}
return byteValue == ((Socks4CommandStatus) obj).byteValue;
}
@Override
public int compareTo(Socks4CommandStatus o) {
return byteValue - o.byteValue;
}
@Override
public String toString() {
String text = this.text;
if (text == null) {
this.text = text = name + '(' + (byteValue & 0xFF) + ')';
}
return text;
}
}
| Socks4CommandStatus |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/model/ServerResourceMethod.java | {
"start": 3700,
"end": 3956
} | class ____ if different to {@link #getActualDeclaringClassName()} or null
*/
public String getClassDeclMethodThatHasJaxRsEndpointDefiningAnn() {
return classDeclMethodThatHasJaxRsEndpointDefiningAnn;
}
/**
* Sets a declaring | name |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/scripting/ScriptSource.java | {
"start": 928,
"end": 1449
} | interface ____ {
/**
* Retrieve the current script source text as String.
* @return the script text
* @throws IOException if script retrieval failed
*/
String getScriptAsString() throws IOException;
/**
* Indicate whether the underlying script data has been modified since
* the last time {@link #getScriptAsString()} was called.
* Returns {@code true} if the script has not been read yet.
* @return whether the script data has been modified
*/
boolean isModified();
/**
* Determine a | ScriptSource |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/NewProcessorAndServiceTest.java | {
"start": 1858,
"end": 2334
} | class ____ implements Processor, Service {
private boolean started;
@Override
public void process(Exchange exchange) {
exchange.getMessage().setBody("Bye World");
}
@Override
public void start() {
started = true;
}
@Override
public void stop() {
started = false;
}
public boolean isStarted() {
return started;
}
}
}
| MyProcessor |
java | quarkusio__quarkus | extensions/panache/hibernate-reactive-panache-common/runtime/src/main/java/io/quarkus/hibernate/reactive/panache/common/runtime/WithSessionOnDemandInterceptor.java | {
"start": 396,
"end": 984
} | class ____ extends AbstractUniInterceptor {
@AroundInvoke
public Object intercept(InvocationContext context) throws Exception {
// Bindings are validated at build time - method-level binding declared on a method that does not return Uni results in a build failure
// However, a class-level binding implies that methods that do not return Uni are just a no-op
if (isUniReturnType(context)) {
return SessionOperations.withSessionOnDemand(() -> proceedUni(context));
}
return context.proceed();
}
}
| WithSessionOnDemandInterceptor |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/engine/LuceneSyntheticSourceChangesSnapshot.java | {
"start": 1826,
"end": 12412
} | class ____ extends SearchBasedChangesSnapshot {
private final long maxMemorySizeInBytes;
private final StoredFieldLoader storedFieldLoader;
private final SourceLoader sourceLoader;
private int skippedOperations;
private long lastSeenSeqNo;
private record SearchRecord(FieldDoc doc, boolean isTombstone, long seqNo, long primaryTerm, long version, long size) {
int index() {
return doc.shardIndex;
}
int docID() {
return doc.doc;
}
boolean hasRecoverySourceSize() {
return size != -1;
}
}
private final Deque<SearchRecord> pendingDocs = new LinkedList<>();
private final Deque<Translog.Operation> operationQueue = new LinkedList<>();
public LuceneSyntheticSourceChangesSnapshot(
MapperService mapperService,
Engine.Searcher engineSearcher,
int searchBatchSize,
long maxMemorySizeInBytes,
long fromSeqNo,
long toSeqNo,
boolean requiredFullRange,
boolean accessStats,
IndexVersion indexVersionCreated
) throws IOException {
super(mapperService, engineSearcher, searchBatchSize, fromSeqNo, toSeqNo, requiredFullRange, accessStats, indexVersionCreated);
// a MapperService#updateMapping(...) of empty index may not have been invoked and then mappingLookup is empty
assert engineSearcher.getDirectoryReader().maxDoc() == 0 || mapperService.mappingLookup().isSourceSynthetic()
: "either an empty index or synthetic source must be enabled for proper functionality.";
// ensure we can buffer at least one document
this.maxMemorySizeInBytes = maxMemorySizeInBytes > 0 ? maxMemorySizeInBytes : 1;
this.sourceLoader = mapperService.mappingLookup().newSourceLoader(null, SourceFieldMetrics.NOOP);
Set<String> storedFields = sourceLoader.requiredStoredFields();
String defaultCodec = EngineConfig.INDEX_CODEC_SETTING.get(mapperService.getIndexSettings().getSettings());
// zstd best compression stores upto 2048 docs in a block, so it is likely that in this case docs are co-located in same block:
boolean forceSequentialReader = CodecService.BEST_COMPRESSION_CODEC.equals(defaultCodec);
this.storedFieldLoader = StoredFieldLoader.create(false, storedFields, forceSequentialReader);
this.lastSeenSeqNo = fromSeqNo - 1;
}
@Override
public int skippedOperations() {
return skippedOperations;
}
@Override
protected Translog.Operation nextOperation() throws IOException {
while (true) {
if (operationQueue.isEmpty()) {
loadNextBatch();
}
if (operationQueue.isEmpty()) {
return null;
}
var op = operationQueue.pollFirst();
if (op.seqNo() == lastSeenSeqNo) {
skippedOperations++;
continue;
}
lastSeenSeqNo = op.seqNo();
return op;
}
}
private void loadNextBatch() throws IOException {
List<SearchRecord> documentsToLoad = new ArrayList<>();
long accumulatedSize = 0;
while (accumulatedSize < maxMemorySizeInBytes) {
if (pendingDocs.isEmpty()) {
ScoreDoc[] topDocs = nextTopDocs().scoreDocs;
if (topDocs.length == 0) {
break;
}
pendingDocs.addAll(Arrays.asList(transformScoreDocsToRecords(topDocs)));
}
SearchRecord document = pendingDocs.pollFirst();
document.doc().shardIndex = documentsToLoad.size();
documentsToLoad.add(document);
accumulatedSize += document.size();
}
for (var op : loadDocuments(documentsToLoad)) {
if (op == null) {
skippedOperations++;
continue;
}
operationQueue.add(op);
}
}
private SearchRecord[] transformScoreDocsToRecords(ScoreDoc[] scoreDocs) throws IOException {
ArrayUtil.introSort(scoreDocs, Comparator.comparingInt(doc -> doc.doc));
SearchRecord[] documentRecords = new SearchRecord[scoreDocs.length];
CombinedDocValues combinedDocValues = null;
int docBase = -1;
int maxDoc = 0;
int readerIndex = 0;
LeafReaderContext leafReaderContext;
for (int i = 0; i < scoreDocs.length; i++) {
ScoreDoc scoreDoc = scoreDocs[i];
if (scoreDoc.doc >= docBase + maxDoc) {
do {
leafReaderContext = leaves().get(readerIndex++);
docBase = leafReaderContext.docBase;
maxDoc = leafReaderContext.reader().maxDoc();
} while (scoreDoc.doc >= docBase + maxDoc);
combinedDocValues = new CombinedDocValues(leafReaderContext.reader());
}
int segmentDocID = scoreDoc.doc - docBase;
int index = scoreDoc.shardIndex;
var primaryTerm = combinedDocValues.docPrimaryTerm(segmentDocID);
assert primaryTerm > 0 : "nested child document must be excluded";
documentRecords[index] = new SearchRecord(
(FieldDoc) scoreDoc,
combinedDocValues.isTombstone(segmentDocID),
combinedDocValues.docSeqNo(segmentDocID),
primaryTerm,
combinedDocValues.docVersion(segmentDocID),
combinedDocValues.recoverySourceSize(segmentDocID)
);
}
return documentRecords;
}
private Translog.Operation[] loadDocuments(List<SearchRecord> documentRecords) throws IOException {
documentRecords.sort(Comparator.comparingInt(doc -> doc.docID()));
Translog.Operation[] operations = new Translog.Operation[documentRecords.size()];
int docBase = -1;
int maxDoc = 0;
int readerIndex = 0;
LeafReaderContext leafReaderContext = null;
LeafStoredFieldLoader leafFieldLoader = null;
SourceLoader.Leaf leafSourceLoader = null;
for (int i = 0; i < documentRecords.size(); i++) {
SearchRecord docRecord = documentRecords.get(i);
if (docRecord.docID() >= docBase + maxDoc) {
do {
leafReaderContext = leaves().get(readerIndex++);
docBase = leafReaderContext.docBase;
maxDoc = leafReaderContext.reader().maxDoc();
} while (docRecord.docID() >= docBase + maxDoc);
// TODO: instead of building an array, consider just checking whether doc ids are dense.
// Note, field loaders then would lose the ability to optionally eagerly loading values.
IntArrayList nextDocIds = new IntArrayList();
for (int j = i; j < documentRecords.size(); j++) {
var record = documentRecords.get(j);
if (record.isTombstone()) {
continue;
}
int docID = record.docID();
if (docID >= docBase + maxDoc) {
break;
}
int segmentDocID = docID - docBase;
nextDocIds.add(segmentDocID);
}
// This computed doc ids arrays us used by stored field loader as a heuristic to determine whether to use a sequential
// stored field reader (which bulk loads stored fields and avoids decompressing the same blocks multiple times). For
// source loader, it is also used as a heuristic for bulk reading doc values (E.g. SingletonDocValuesLoader).
int[] nextDocIdArray = nextDocIds.toArray();
leafFieldLoader = storedFieldLoader.getLoader(leafReaderContext, nextDocIdArray);
leafSourceLoader = sourceLoader.leaf(leafReaderContext.reader(), nextDocIdArray);
setNextSyntheticFieldsReader(leafReaderContext);
}
int segmentDocID = docRecord.docID() - docBase;
leafFieldLoader.advanceTo(segmentDocID);
operations[docRecord.index()] = createOperation(docRecord, leafFieldLoader, leafSourceLoader, segmentDocID, leafReaderContext);
}
return operations;
}
private Translog.Operation createOperation(
SearchRecord docRecord,
LeafStoredFieldLoader fieldLoader,
SourceLoader.Leaf sourceLoader,
int segmentDocID,
LeafReaderContext context
) throws IOException {
if (docRecord.isTombstone() && fieldLoader.id() == null) {
assert docRecord.version() == 1L : "Noop tombstone should have version 1L; actual version [" + docRecord.version() + "]";
assert assertDocSoftDeleted(context.reader(), segmentDocID) : "Noop but soft_deletes field is not set [" + docRecord + "]";
return new Translog.NoOp(docRecord.seqNo(), docRecord.primaryTerm(), "null");
} else if (docRecord.isTombstone()) {
assert assertDocSoftDeleted(context.reader(), segmentDocID) : "Delete op but soft_deletes field is not set [" + docRecord + "]";
return new Translog.Delete(fieldLoader.id(), docRecord.seqNo(), docRecord.primaryTerm(), docRecord.version());
} else {
if (docRecord.hasRecoverySourceSize() == false) {
// TODO: Callers should ask for the range that source should be retained. Thus we should always
// check for the existence source once we make peer-recovery to send ops after the local checkpoint.
if (requiredFullRange) {
throw new MissingHistoryOperationsException(
"source not found for seqno=" + docRecord.seqNo() + " from_seqno=" + fromSeqNo + " to_seqno=" + toSeqNo
);
} else {
skippedOperations++;
return null;
}
}
var source = addSyntheticFields(sourceLoader.source(fieldLoader, segmentDocID), segmentDocID);
return new Translog.Index(
fieldLoader.id(),
docRecord.seqNo(),
docRecord.primaryTerm(),
docRecord.version(),
source.internalSourceRef(),
fieldLoader.routing(),
-1 // autogenerated timestamp
);
}
}
}
| LuceneSyntheticSourceChangesSnapshot |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/protocolPB/RouterAdminProtocolPB.java | {
"start": 1880,
"end": 1972
} | interface ____ extends
RouterAdminProtocolService.BlockingInterface {
} | RouterAdminProtocolPB |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/query/KeysetScrollDelegate.java | {
"start": 5128,
"end": 5877
} | class ____ extends KeysetScrollDelegate {
@Override
protected Sort getSortOrders(Sort sort) {
List<Order> orders = new ArrayList<>();
for (Order order : sort) {
orders.add(new Order(order.isAscending() ? Sort.Direction.DESC : Sort.Direction.ASC, order.getProperty()));
}
return Sort.by(orders);
}
@Override
protected <T> List<T> postProcessResults(List<T> result) {
Collections.reverse(result);
return result;
}
@Override
protected <T> List<T> getResultWindow(List<T> list, int limit) {
return CollectionUtils.getLast(limit, list);
}
}
/**
* Adapter to construct scroll queries.
*
* @param <E> property path expression type.
* @param <P> predicate type.
*/
public | ReverseKeysetScrollDelegate |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/consumer/InputChannelBuilder.java | {
"start": 2072,
"end": 9020
} | class ____ {
public static final ConnectionID STUB_CONNECTION_ID =
new ConnectionID(ResourceID.generate(), new InetSocketAddress("localhost", 5000), 0);
private int channelIndex = 0;
private ResultPartitionID partitionId = new ResultPartitionID();
private ResultSubpartitionIndexSet subpartitionIndexSet = new ResultSubpartitionIndexSet(0);
private ConnectionID connectionID = STUB_CONNECTION_ID;
private ResultPartitionManager partitionManager =
new TestingResultPartitionManager(new NoOpResultSubpartitionView());
private TaskEventPublisher taskEventPublisher = new TaskEventDispatcher();
private ChannelStateWriter stateWriter = ChannelStateWriter.NO_OP;
private ConnectionManager connectionManager = new TestingConnectionManager();
private int initialBackoff = 0;
private int maxBackoff = 0;
private int partitionRequestListenerTimeout = 0;
private int networkBuffersPerChannel = 2;
private InputChannelMetrics metrics =
InputChannelTestUtils.newUnregisteredInputChannelMetrics();
public static InputChannelBuilder newBuilder() {
return new InputChannelBuilder();
}
public InputChannelBuilder setChannelIndex(int channelIndex) {
this.channelIndex = channelIndex;
return this;
}
public InputChannelBuilder setPartitionId(ResultPartitionID partitionId) {
this.partitionId = partitionId;
return this;
}
public InputChannelBuilder setSubpartitionIndexSet(
ResultSubpartitionIndexSet subpartitionIndexSet) {
this.subpartitionIndexSet = subpartitionIndexSet;
return this;
}
public InputChannelBuilder setPartitionManager(ResultPartitionManager partitionManager) {
this.partitionManager = partitionManager;
return this;
}
InputChannelBuilder setTaskEventPublisher(TaskEventPublisher taskEventPublisher) {
this.taskEventPublisher = taskEventPublisher;
return this;
}
public InputChannelBuilder setConnectionManager(ConnectionManager connectionManager) {
this.connectionManager = connectionManager;
return this;
}
public InputChannelBuilder setInitialBackoff(int initialBackoff) {
this.initialBackoff = initialBackoff;
return this;
}
public InputChannelBuilder setMaxBackoff(int maxBackoff) {
this.maxBackoff = maxBackoff;
return this;
}
public InputChannelBuilder setPartitionRequestListenerTimeout(
int partitionRequestListenerTimeout) {
this.partitionRequestListenerTimeout = partitionRequestListenerTimeout;
return this;
}
public InputChannelBuilder setNetworkBuffersPerChannel(int networkBuffersPerChannel) {
this.networkBuffersPerChannel = networkBuffersPerChannel;
return this;
}
public InputChannelBuilder setMetrics(InputChannelMetrics metrics) {
this.metrics = metrics;
return this;
}
public InputChannelBuilder setStateWriter(ChannelStateWriter stateWriter) {
this.stateWriter = stateWriter;
return this;
}
public InputChannelBuilder setupFromNettyShuffleEnvironment(NettyShuffleEnvironment network) {
this.partitionManager = network.getResultPartitionManager();
this.connectionManager = network.getConnectionManager();
this.initialBackoff = network.getConfiguration().partitionRequestInitialBackoff();
this.maxBackoff = network.getConfiguration().partitionRequestMaxBackoff();
this.networkBuffersPerChannel = network.getConfiguration().networkBuffersPerChannel();
return this;
}
UnknownInputChannel buildUnknownChannel(SingleInputGate inputGate) {
UnknownInputChannel channel =
new UnknownInputChannel(
inputGate,
channelIndex,
partitionId,
subpartitionIndexSet,
partitionManager,
taskEventPublisher,
connectionManager,
initialBackoff,
maxBackoff,
partitionRequestListenerTimeout,
networkBuffersPerChannel,
metrics);
channel.setChannelStateWriter(stateWriter);
return channel;
}
public LocalInputChannel buildLocalChannel(SingleInputGate inputGate) {
return new LocalInputChannel(
inputGate,
channelIndex,
partitionId,
subpartitionIndexSet,
partitionManager,
taskEventPublisher,
initialBackoff,
maxBackoff,
metrics.getNumBytesInLocalCounter(),
metrics.getNumBuffersInLocalCounter(),
stateWriter);
}
public RemoteInputChannel buildRemoteChannel(SingleInputGate inputGate) {
return new RemoteInputChannel(
inputGate,
channelIndex,
partitionId,
subpartitionIndexSet,
connectionID,
connectionManager,
initialBackoff,
maxBackoff,
partitionRequestListenerTimeout,
networkBuffersPerChannel,
metrics.getNumBytesInRemoteCounter(),
metrics.getNumBuffersInRemoteCounter(),
stateWriter);
}
public LocalRecoveredInputChannel buildLocalRecoveredChannel(SingleInputGate inputGate) {
LocalRecoveredInputChannel channel =
new LocalRecoveredInputChannel(
inputGate,
channelIndex,
partitionId,
subpartitionIndexSet,
partitionManager,
taskEventPublisher,
initialBackoff,
maxBackoff,
networkBuffersPerChannel,
metrics);
channel.setChannelStateWriter(stateWriter);
return channel;
}
public RemoteRecoveredInputChannel buildRemoteRecoveredChannel(SingleInputGate inputGate) {
RemoteRecoveredInputChannel channel =
new RemoteRecoveredInputChannel(
inputGate,
channelIndex,
partitionId,
subpartitionIndexSet,
connectionID,
connectionManager,
initialBackoff,
maxBackoff,
partitionRequestListenerTimeout,
networkBuffersPerChannel,
metrics);
channel.setChannelStateWriter(stateWriter);
return channel;
}
}
| InputChannelBuilder |
java | spring-projects__spring-framework | framework-docs/src/main/java/org/springframework/docs/web/websocket/stomp/websocketstomphandlebrokerrelay/WebSocketConfiguration.java | {
"start": 1172,
"end": 1598
} | class ____ implements WebSocketMessageBrokerConfigurer {
@Override
public void registerStompEndpoints(StompEndpointRegistry registry) {
registry.addEndpoint("/portfolio").withSockJS();
}
@Override
public void configureMessageBroker(MessageBrokerRegistry registry) {
registry.enableStompBrokerRelay("/topic", "/queue");
registry.setApplicationDestinationPrefixes("/app");
}
}
// end::snippet[]
| WebSocketConfiguration |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/layout/ConcurrentLoggingWithJsonLayoutTest.java | {
"start": 1466,
"end": 3244
} | class ____ {
@ClassRule
public static LoggerContextRule context = new LoggerContextRule("log4j2-json-layout.xml");
private static final String PATH = "target/test-json-layout.log";
@AfterClass
public static void after() {
new File(PATH).delete();
}
@Test
public void testConcurrentLogging() throws Throwable {
final Logger log = context.getLogger(ConcurrentLoggingWithJsonLayoutTest.class);
final Set<Thread> threads = Collections.synchronizedSet(new HashSet<Thread>());
final List<Throwable> thrown = Collections.synchronizedList(new ArrayList<Throwable>());
for (int x = 0; x < Runtime.getRuntime().availableProcessors() * 2; x++) {
final Thread t = new LoggingThread(threads, log);
threads.add(t);
// Appender is configured with ignoreExceptions="false";
// any exceptions are propagated to the caller, so we can catch them here.
t.setUncaughtExceptionHandler((t1, e) -> thrown.add(e));
t.start();
}
while (!threads.isEmpty()) {
log.info("not done going to sleep...");
Thread.sleep(10);
}
// if any error occurred, fail this test
if (!thrown.isEmpty()) {
throw thrown.get(0);
}
// simple test to ensure content is not corrupted
if (new File(PATH).exists()) {
final List<String> lines = Files.readAllLines(new File(PATH).toPath(), Charset.defaultCharset());
for (final String line : lines) {
assertThat(line, containsString("\"thread\":"));
assertThat(line, endsWith("\"threadPriority\":5}"));
}
}
}
private | ConcurrentLoggingWithJsonLayoutTest |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrationEligibilityCheck.java | {
"start": 1098,
"end": 2970
} | class ____ {
public static final Setting<Boolean> ENABLE_CONFIG_MIGRATION = Setting.boolSetting(
"xpack.ml.enable_config_migration",
true,
Setting.Property.OperatorDynamic,
Setting.Property.NodeScope
);
private volatile boolean isConfigMigrationEnabled;
public MlConfigMigrationEligibilityCheck(Settings settings, ClusterService clusterService) {
isConfigMigrationEnabled = ENABLE_CONFIG_MIGRATION.get(settings);
clusterService.getClusterSettings().addSettingsUpdateConsumer(ENABLE_CONFIG_MIGRATION, this::setConfigMigrationEnabled);
}
private void setConfigMigrationEnabled(boolean configMigrationEnabled) {
this.isConfigMigrationEnabled = configMigrationEnabled;
}
/**
* Can migration start? Returns:
* False if config migration is disabled via the setting {@link #ENABLE_CONFIG_MIGRATION}
* False if the .ml-config index shards are not active
* True otherwise
* @param clusterState The cluster state
* @return A boolean that dictates if config migration can start
*/
public boolean canStartMigration(ClusterState clusterState) {
if (isConfigMigrationEnabled == false) {
return false;
}
return mlConfigIndexIsAllocated(clusterState);
}
static boolean mlConfigIndexIsAllocated(ClusterState clusterState) {
IndexAbstraction configIndexOrAlias = clusterState.metadata().getProject().getIndicesLookup().get(MlConfigIndex.indexName());
if (configIndexOrAlias == null) {
return false;
}
IndexRoutingTable routingTable = clusterState.getRoutingTable().index(configIndexOrAlias.getWriteIndex());
return routingTable != null && routingTable.allPrimaryShardsActive() && routingTable.readyForSearch();
}
}
| MlConfigMigrationEligibilityCheck |
java | micronaut-projects__micronaut-core | inject-java/src/test/java/io/micronaut/aop/around/proxytarget/ByteBuddyProxyTargetTest.java | {
"start": 405,
"end": 3760
} | class ____ {
@Test
void test() {
try (ApplicationContext context = ApplicationContext.run(Map.of("spec.name", "RuntimeProxyTest"))) {
ButeBuddyProxyTargetProxyingClass<String> proxyingClass = context.getBean(ButeBuddyProxyTargetProxyingClass.class);
ButeBuddyProxyTargetProxyingClass<String> target = null;
if (proxyingClass instanceof InterceptedProxy<?> interceptedProxy) {
assertEquals(true, interceptedProxy.hasCachedInterceptedTarget());
target = (ButeBuddyProxyTargetProxyingClass<String>) interceptedProxy.interceptedTarget();
}
// Assert @PostConstruct init() was invoked exactly once
assertEquals(1, target.lifeCycleCount, "PostConstruct init() should increment lifeCycleCount once");
// Invocation count should be zero before any method calls
assertEquals(0, target.invocationCount, "invocationCount should be zero before invoking test(String)");
// Assert parameter was mutated by runtime proxy and result is correct
assertEquals("Name is changed", proxyingClass.test("test"));
// Assert invocation count is incremented by test(String)
assertEquals(1, target.invocationCount, "invocationCount should be incremented after test(String)");
// Assert primitive overloads and return types are correctly proxied
assertEquals("Age is 10", proxyingClass.test(5));
assertEquals("Name is changed and age is 5", proxyingClass.test("test", 5));
assertEquals("noargs", proxyingClass.test());
proxyingClass.testVoid("test");
proxyingClass.testVoid("test", 10);
assertTrue(proxyingClass.testBoolean("test"));
assertTrue(proxyingClass.testBoolean("test", 10));
assertEquals(1, proxyingClass.testInt("test"));
assertEquals(10, proxyingClass.testInt("test", 5));
assertEquals(1L, proxyingClass.testLong("test"));
assertEquals(10L, proxyingClass.testLong("test", 5));
assertEquals((short) 1, proxyingClass.testShort("test"));
assertEquals((short) 10, proxyingClass.testShort("test", 5));
assertEquals((byte) 1, proxyingClass.testByte("test"));
assertEquals((byte) 10, proxyingClass.testByte("test", 5));
assertEquals(1D, proxyingClass.testDouble("test"));
assertEquals(10D, proxyingClass.testDouble("test", 5));
assertEquals(1F, proxyingClass.testFloat("test"));
assertEquals(10F, proxyingClass.testFloat("test", 5));
assertEquals((char) 1, proxyingClass.testChar("test"));
assertEquals((char) 10, proxyingClass.testChar("test", 5));
byte[] data = new byte[]{1, 2, 3};
assertArrayEquals(data, proxyingClass.testByteArray("test", data));
assertEquals("Name is changed", proxyingClass.testGenericsWithExtends("test", 5));
assertEquals(List.of("changed"), proxyingClass.testListWithWildCardSuper("test", List.of("a")));
assertEquals(List.of("changed"), proxyingClass.testListWithWildCardExtends("test", List.of("a")));
assertEquals("Name is changed", proxyingClass.testGenericsFromType("test", 5));
}
}
}
| ByteBuddyProxyTargetTest |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oracle/select/OracleSelectTest86_comment.java | {
"start": 1094,
"end": 3929
} | class ____ extends OracleTest {
public void test_0() throws Exception {
String sql = //
"/*sqlId=9f0szhacj63ag*/SELECT /*+rule*/ SYS_XMLGEN(VALUE(KU$), XMLFORMAT.createFormat2('TABLE_T', '7')), "
+ "KU$.OBJ_NUM FROM SYS.KU$_HTABLE_VIEW KU$ WHERE"
+ " NOT (BITAND (KU$.PROPERTY,8192)=8192) AND "
+ " NOT BITAND(KU$.SCHEMA_OBJ.FLAGS,128)!=0 "
+ "AND KU$.SCHEMA_OBJ.NAME=:NAME1 "
+ "AND KU$.SCHEMA_OBJ.OWNER_NAME=:SCHEMA2";
System.out.println(sql);
OracleLexer lexer = new OracleLexer(sql);
lexer.config(SQLParserFeature.SkipComments, false);
lexer.nextToken();
String comment = lexer.stringVal();
assertEquals("/*sqlId=9f0szhacj63ag*/", comment);
OracleStatementParser parser = new OracleStatementParser(sql, SQLParserFeature.KeepComments);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
stmt.accept(visitor);
{
String text = SQLUtils.toOracleString(stmt);
assertEquals("/*sqlId=9f0szhacj63ag*/\n" +
"SELECT /*+rule*/ SYS_XMLGEN(VALUE(KU$), XMLFORMAT.createFormat2('TABLE_T', '7'))\n" +
"\t, KU$.OBJ_NUM\n" +
"FROM SYS.KU$_HTABLE_VIEW KU$\n" +
"WHERE NOT (BITAND(KU$.PROPERTY, 8192) = 8192)\n" +
"\tAND NOT BITAND(KU$.SCHEMA_OBJ.FLAGS, 128) != 0\n" +
"\tAND KU$.SCHEMA_OBJ.NAME = :NAME1\n" +
"\tAND KU$.SCHEMA_OBJ.OWNER_NAME = :SCHEMA2", text);
}
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(8, visitor.getColumns().size());
assertEquals(2, visitor.getConditions().size());
assertEquals(0, visitor.getRelationships().size());
assertEquals(0, visitor.getOrderByColumns().size());
// assertTrue(visitor.containsTable("sup_registration"));
// assertTrue(visitor.containsTable("sup_task"));
// assertTrue(visitor.containsTable("sys_org"));
//
// assertTrue(visitor.containsColumn("sup_task", "orgid"));
// assertTrue(visitor.containsColumn("sup_task", "orgid"));
//
}
}
| OracleSelectTest86_comment |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/internal/AnnotatedDiscriminatorColumn.java | {
"start": 528,
"end": 4103
} | class ____ extends AnnotatedColumn {
public static final String DEFAULT_DISCRIMINATOR_COLUMN_NAME = "DTYPE";
public static final String DEFAULT_DISCRIMINATOR_TYPE = "string";
private static final long DEFAULT_DISCRIMINATOR_LENGTH = 31;
private String discriminatorTypeName;
public AnnotatedDiscriminatorColumn(String defaultColumnName) {
//discriminator default value
super();
setLogicalColumnName( defaultColumnName );
setNullable( false );
setDiscriminatorTypeName( DEFAULT_DISCRIMINATOR_TYPE );
setLength( DEFAULT_DISCRIMINATOR_LENGTH );
}
public String getDiscriminatorTypeName() {
return discriminatorTypeName;
}
public void setDiscriminatorTypeName(String discriminatorTypeName) {
this.discriminatorTypeName = discriminatorTypeName;
}
public static AnnotatedDiscriminatorColumn buildDiscriminatorColumn(
DiscriminatorColumn discriminatorColumn,
DiscriminatorFormula discriminatorFormula,
Column columnOverride,
String defaultColumnName,
MetadataBuildingContext context) {
final var parent = new AnnotatedColumns();
parent.setBuildingContext( context );
final var column = new AnnotatedDiscriminatorColumn( defaultColumnName );
final DiscriminatorType discriminatorType;
if ( discriminatorFormula != null ) {
final var type = discriminatorFormula.discriminatorType();
if ( type == DiscriminatorType.STRING ) {
discriminatorType = discriminatorColumn == null ? type : discriminatorColumn.discriminatorType();
}
else {
discriminatorType = type;
}
column.setImplicit( false );
column.setFormula( discriminatorFormula.value() );
}
else if ( discriminatorColumn != null ) {
discriminatorType = discriminatorColumn.discriminatorType();
column.setImplicit( false );
if ( !discriminatorColumn.columnDefinition().isBlank() ) {
column.setSqlType( discriminatorColumn.columnDefinition() );
}
if ( !discriminatorColumn.name().isBlank() ) {
column.setLogicalColumnName( discriminatorColumn.name() );
}
column.setNullable( false );
column.setOptions( discriminatorColumn.options() );
}
else {
discriminatorType = DiscriminatorType.STRING;
column.setImplicit( true );
}
if ( columnOverride != null ) {
column.setLogicalColumnName( columnOverride.name() );
final String columnDefinition = columnOverride.columnDefinition();
if ( !columnDefinition.isBlank() ) {
column.setSqlType( columnDefinition );
}
}
setDiscriminatorType( discriminatorType, discriminatorColumn, columnOverride, column );
column.setParent( parent );
column.bind();
return column;
}
private static void setDiscriminatorType(
DiscriminatorType type,
DiscriminatorColumn discriminatorColumn,
Column columnOverride,
AnnotatedDiscriminatorColumn column) {
if ( type == null ) {
column.setDiscriminatorTypeName( "string" );
}
else {
switch ( type ) {
case CHAR:
column.setDiscriminatorTypeName( "character" );
column.setImplicit( false );
column.setLength( 1L );
break;
case INTEGER:
column.setDiscriminatorTypeName( "integer" );
column.setImplicit( false );
break;
case STRING:
column.setDiscriminatorTypeName( "string" );
if ( columnOverride != null ) {
column.setLength( (long) columnOverride.length() );
}
else if ( discriminatorColumn != null ) {
column.setLength( (long) discriminatorColumn.length() );
}
break;
default:
throw new AssertionFailure( "Unknown discriminator type: " + type );
}
}
}
}
| AnnotatedDiscriminatorColumn |
java | quarkusio__quarkus | integration-tests/locales/all/src/main/java/io/quarkus/locales/it/AllLocalesResource.java | {
"start": 320,
"end": 883
} | class ____ extends LocalesResource {
private static final Logger LOG = Logger.getLogger(AllLocalesResource.class);
// @Pattern validation does nothing when placed in LocalesResource.
@GET
@Path("/hibernate-validator-test-validation-message-locale/{id}/")
@Produces(MediaType.TEXT_PLAIN)
public Response validationMessageLocale(
@Pattern(regexp = "A.*", message = "{pattern.message}") @PathParam("id") String id) {
LOG.infof("Triggering test: id: %s", id);
return Response.ok(id).build();
}
}
| AllLocalesResource |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet2/HamletSpec.java | {
"start": 47651,
"end": 49228
} | interface ____ extends Attrs, _Child, /* (%block;|SCRIPT)+ -(FORM) */
_Script, _Block, _FieldSet {
/** server-side form handler
* @param uri the URI.
* @return the current element builder
*/
FORM $action(String uri);
/** HTTP method used to submit the form
* @param method method.
* @return the current element builder
*/
FORM $method(Method method);
/**
* contentype for "POST" method.
* The default is "application/x-www-form-urlencoded".
* Use "multipart/form-data" for input type=file
* @param enctype enctype.
* @return the current element builder
*/
FORM $enctype(String enctype);
/** list of MIME types for file upload
* @param cdata the content of the element.
* @return the current element builder
*/
FORM $accept(String cdata);
/** name of form for scripting
* @param cdata the content of the element.
* @return the current element builder
*/
FORM $name(String cdata);
/** the form was submitted
* @param script to invoke.
* @return the current element builder
*/
FORM $onsubmit(String script);
/** the form was reset
* @param script to invoke.
* @return the current element builder
*/
FORM $onreset(String script);
/** (space and/or comma separated) list of supported charsets
* @param cdata the content of the element.
* @return the current element builder
*/
FORM $accept_charset(String cdata);
}
/**
*
*/
public | FORM |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/load/data/FileDescriptorLocalUriFetcher.java | {
"start": 396,
"end": 1555
} | class ____ extends LocalUriFetcher<ParcelFileDescriptor> {
public FileDescriptorLocalUriFetcher(ContentResolver contentResolver, Uri uri) {
super(contentResolver, uri);
}
/**
* useMediaStoreApisIfAvailable is part of an experiment and the constructor can be removed in a
* future version.
*/
public FileDescriptorLocalUriFetcher(
ContentResolver contentResolver, Uri uri, boolean useMediaStoreApisIfAvailable) {
super(contentResolver, uri, useMediaStoreApisIfAvailable);
}
@Override
protected ParcelFileDescriptor loadResource(Uri uri, ContentResolver contentResolver)
throws FileNotFoundException {
AssetFileDescriptor assetFileDescriptor = openAssetFileDescriptor(uri);
if (assetFileDescriptor == null) {
throw new FileNotFoundException("FileDescriptor is null for: " + uri);
}
return assetFileDescriptor.getParcelFileDescriptor();
}
@Override
protected void close(ParcelFileDescriptor data) throws IOException {
data.close();
}
@NonNull
@Override
public Class<ParcelFileDescriptor> getDataClass() {
return ParcelFileDescriptor.class;
}
}
| FileDescriptorLocalUriFetcher |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java | {
"start": 3238,
"end": 3573
} | class ____ extends FieldMapper {
private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(IpFieldMapper.class);
public static final String CONTENT_TYPE = "ip";
private static IpFieldMapper toType(FieldMapper in) {
return (IpFieldMapper) in;
}
public static final | IpFieldMapper |
java | apache__spark | common/sketch/src/main/java/org/apache/spark/util/sketch/BitArray.java | {
"start": 966,
"end": 3575
} | class ____ {
private final long[] data;
private long bitCount;
static int numWords(long numBits) {
if (numBits <= 0) {
throw new IllegalArgumentException("numBits must be positive, but got " + numBits);
}
long numWords = (long) Math.ceil(numBits / 64.0);
if (numWords > Integer.MAX_VALUE) {
throw new IllegalArgumentException("Can't allocate enough space for " + numBits + " bits");
}
return (int) numWords;
}
BitArray(long numBits) {
this(new long[numWords(numBits)]);
}
private BitArray(long[] data) {
this.data = data;
long bitCount = 0;
for (long word : data) {
bitCount += Long.bitCount(word);
}
this.bitCount = bitCount;
}
/** Returns true if the bit changed value. */
boolean set(long index) {
if (!get(index)) {
data[(int) (index >>> 6)] |= (1L << index);
bitCount++;
return true;
}
return false;
}
boolean get(long index) {
return (data[(int) (index >>> 6)] & (1L << index)) != 0;
}
/** Number of bits */
long bitSize() {
return (long) data.length * Long.SIZE;
}
/** Number of set bits (1s) */
long cardinality() {
return bitCount;
}
/** Combines the two BitArrays using bitwise OR. */
void putAll(BitArray array) {
assert data.length == array.data.length : "BitArrays must be of equal length when merging";
long bitCount = 0;
for (int i = 0; i < data.length; i++) {
data[i] |= array.data[i];
bitCount += Long.bitCount(data[i]);
}
this.bitCount = bitCount;
}
/** Combines the two BitArrays using bitwise AND. */
void and(BitArray array) {
assert data.length == array.data.length : "BitArrays must be of equal length when merging";
long bitCount = 0;
for (int i = 0; i < data.length; i++) {
data[i] &= array.data[i];
bitCount += Long.bitCount(data[i]);
}
this.bitCount = bitCount;
}
void writeTo(DataOutputStream out) throws IOException {
out.writeInt(data.length);
for (long datum : data) {
out.writeLong(datum);
}
}
static BitArray readFrom(DataInputStream in) throws IOException {
int numWords = in.readInt();
long[] data = new long[numWords];
for (int i = 0; i < numWords; i++) {
data[i] = in.readLong();
}
return new BitArray(data);
}
@Override
public boolean equals(Object other) {
if (this == other) return true;
if (!(other instanceof BitArray that)) return false;
return Arrays.equals(data, that.data);
}
@Override
public int hashCode() {
return Arrays.hashCode(data);
}
}
| BitArray |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.