language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | elastic__elasticsearch | modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/AggregationTestCase.java | {
"start": 724,
"end": 812
} | class ____ aggregator unit tests that reside in aggregations module.
*/
public abstract | for |
java | spring-projects__spring-boot | module/spring-boot-jackson/src/main/java/org/springframework/boot/jackson/autoconfigure/JacksonAutoConfiguration.java | {
"start": 5303,
"end": 5641
} | class ____ {
@Bean
StandardJsonMapperBuilderCustomizer standardJsonMapperBuilderCustomizer(JacksonProperties jacksonProperties,
ObjectProvider<JacksonModule> modules) {
return new StandardJsonMapperBuilderCustomizer(jacksonProperties, modules.stream().toList());
}
static final | JacksonJsonMapperBuilderCustomizerConfiguration |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/criteria/components/ComponentInWhereClauseTest.java | {
"start": 8349,
"end": 8958
} | class ____ {
@OneToMany(cascade = CascadeType.PERSIST)
private Set<Project> previousProjects = new HashSet<>();
@ManyToOne(cascade = CascadeType.PERSIST)
private Project currentProject;
public void addPreviousProject(Project project) {
this.previousProjects.add( project );
}
public Set<Project> getPreviousProjects() {
return previousProjects;
}
public Project getCurrentProject() {
return currentProject;
}
public void setCurrentProject(Project project) {
this.currentProject = project;
}
}
@Entity(name = "Project")
@Table(name = "PROJECT")
public static | Projects |
java | apache__camel | dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ExpressionDeserializers.java | {
"start": 977,
"end": 9748
} | class ____ extends YamlDeserializerSupport {
private ExpressionDeserializers() {
}
public static ExpressionDefinition constructExpressionType(Node node) {
MappingNode mn = asMappingNode(node);
if (mn.getValue().size() != 1) {
return null;
}
NodeTuple nt = mn.getValue().get(0);
YamlDeserializationContext dc = getDeserializationContext(node);
String key = asText(nt.getKeyNode());
Node val = setDeserializationContext(nt.getValueNode(), dc);
ExpressionDefinition answer = constructExpressionType(key, val);
if (answer == null) {
throw new org.apache.camel.dsl.yaml.common.exception.InvalidExpressionException(node, "Unknown expression with id: " + key);
}
return answer;
}
public static ExpressionDefinition constructExpressionType(String id, Node node) {
switch(id) {
case "constant": {
return asType(node, org.apache.camel.model.language.ConstantExpression.class);
}
case "csimple": {
return asType(node, org.apache.camel.model.language.CSimpleExpression.class);
}
case "datasonnet": {
return asType(node, org.apache.camel.model.language.DatasonnetExpression.class);
}
case "exchangeProperty": {
return asType(node, org.apache.camel.model.language.ExchangePropertyExpression.class);
}
case "groovy": {
return asType(node, org.apache.camel.model.language.GroovyExpression.class);
}
case "header": {
return asType(node, org.apache.camel.model.language.HeaderExpression.class);
}
case "hl7terser": {
return asType(node, org.apache.camel.model.language.Hl7TerserExpression.class);
}
case "java": {
return asType(node, org.apache.camel.model.language.JavaExpression.class);
}
case "joor": {
return asType(node, org.apache.camel.model.language.JoorExpression.class);
}
case "jq": {
return asType(node, org.apache.camel.model.language.JqExpression.class);
}
case "js": {
return asType(node, org.apache.camel.model.language.JavaScriptExpression.class);
}
case "jsonpath": {
return asType(node, org.apache.camel.model.language.JsonPathExpression.class);
}
case "language": {
return asType(node, org.apache.camel.model.language.LanguageExpression.class);
}
case "method": {
return asType(node, org.apache.camel.model.language.MethodCallExpression.class);
}
case "mvel": {
return asType(node, org.apache.camel.model.language.MvelExpression.class);
}
case "ognl": {
return asType(node, org.apache.camel.model.language.OgnlExpression.class);
}
case "python": {
return asType(node, org.apache.camel.model.language.PythonExpression.class);
}
case "ref": {
return asType(node, org.apache.camel.model.language.RefExpression.class);
}
case "simple": {
return asType(node, org.apache.camel.model.language.SimpleExpression.class);
}
case "spel": {
return asType(node, org.apache.camel.model.language.SpELExpression.class);
}
case "tokenize": {
return asType(node, org.apache.camel.model.language.TokenizerExpression.class);
}
case "variable": {
return asType(node, org.apache.camel.model.language.VariableExpression.class);
}
case "wasm": {
return asType(node, org.apache.camel.model.language.WasmExpression.class);
}
case "xpath": {
return asType(node, org.apache.camel.model.language.XPathExpression.class);
}
case "xquery": {
return asType(node, org.apache.camel.model.language.XQueryExpression.class);
}
case "xtokenize": {
return asType(node, org.apache.camel.model.language.XMLTokenizerExpression.class);
}
case "expression": {
return constructExpressionType(node);
}
case "expression-type": {
return constructExpressionType(node);
}
case "expressionType": {
return constructExpressionType(node);
}
}
return null;
}
@YamlType(
types = org.apache.camel.model.language.ExpressionDefinition.class,
order = org.apache.camel.dsl.yaml.common.YamlDeserializerResolver.ORDER_LOWEST - 1,
properties = {
@YamlProperty(name = "constant", type = "object:org.apache.camel.model.language.ConstantExpression", oneOf = "expression"),
@YamlProperty(name = "csimple", type = "object:org.apache.camel.model.language.CSimpleExpression", oneOf = "expression"),
@YamlProperty(name = "datasonnet", type = "object:org.apache.camel.model.language.DatasonnetExpression", oneOf = "expression"),
@YamlProperty(name = "exchangeProperty", type = "object:org.apache.camel.model.language.ExchangePropertyExpression", oneOf = "expression"),
@YamlProperty(name = "groovy", type = "object:org.apache.camel.model.language.GroovyExpression", oneOf = "expression"),
@YamlProperty(name = "header", type = "object:org.apache.camel.model.language.HeaderExpression", oneOf = "expression"),
@YamlProperty(name = "hl7terser", type = "object:org.apache.camel.model.language.Hl7TerserExpression", oneOf = "expression"),
@YamlProperty(name = "java", type = "object:org.apache.camel.model.language.JavaExpression", oneOf = "expression"),
@YamlProperty(name = "joor", type = "object:org.apache.camel.model.language.JoorExpression", oneOf = "expression"),
@YamlProperty(name = "jq", type = "object:org.apache.camel.model.language.JqExpression", oneOf = "expression"),
@YamlProperty(name = "js", type = "object:org.apache.camel.model.language.JavaScriptExpression", oneOf = "expression"),
@YamlProperty(name = "jsonpath", type = "object:org.apache.camel.model.language.JsonPathExpression", oneOf = "expression"),
@YamlProperty(name = "language", type = "object:org.apache.camel.model.language.LanguageExpression", oneOf = "expression"),
@YamlProperty(name = "method", type = "object:org.apache.camel.model.language.MethodCallExpression", oneOf = "expression"),
@YamlProperty(name = "mvel", type = "object:org.apache.camel.model.language.MvelExpression", oneOf = "expression"),
@YamlProperty(name = "ognl", type = "object:org.apache.camel.model.language.OgnlExpression", oneOf = "expression"),
@YamlProperty(name = "python", type = "object:org.apache.camel.model.language.PythonExpression", oneOf = "expression"),
@YamlProperty(name = "ref", type = "object:org.apache.camel.model.language.RefExpression", oneOf = "expression"),
@YamlProperty(name = "simple", type = "object:org.apache.camel.model.language.SimpleExpression", oneOf = "expression"),
@YamlProperty(name = "spel", type = "object:org.apache.camel.model.language.SpELExpression", oneOf = "expression"),
@YamlProperty(name = "tokenize", type = "object:org.apache.camel.model.language.TokenizerExpression", oneOf = "expression"),
@YamlProperty(name = "variable", type = "object:org.apache.camel.model.language.VariableExpression", oneOf = "expression"),
@YamlProperty(name = "wasm", type = "object:org.apache.camel.model.language.WasmExpression", oneOf = "expression"),
@YamlProperty(name = "xpath", type = "object:org.apache.camel.model.language.XPathExpression", oneOf = "expression"),
@YamlProperty(name = "xquery", type = "object:org.apache.camel.model.language.XQueryExpression", oneOf = "expression"),
@YamlProperty(name = "xtokenize", type = "object:org.apache.camel.model.language.XMLTokenizerExpression", oneOf = "expression")
}
)
public static | ExpressionDeserializers |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/operators/sink/SinkV2CommitterOperatorTest.java | {
"start": 18808,
"end": 19151
} | class ____ {
SupportsCommitter<String> sink;
IntSupplier commitCounter;
@SuppressWarnings("unchecked")
public SinkAndCounters(TestSinkV2<?> sink, IntSupplier commitCounter) {
this.sink = (SupportsCommitter<String>) sink;
this.commitCounter = commitCounter;
}
}
}
| SinkAndCounters |
java | google__dagger | javatests/dagger/functional/componentdependency/ComponentDependenciesTest.java | {
"start": 1686,
"end": 1841
} | interface ____ extends OneOverride, TwoOverride {
@Override
String getString();
}
@Component(dependencies = MergedOverride.class)
| MergedOverride |
java | apache__maven | impl/maven-core/src/main/java/org/apache/maven/execution/ReactorManager.java | {
"start": 1343,
"end": 6141
} | class ____ {
public static final String FAIL_FAST = "fail-fast";
public static final String FAIL_AT_END = "fail-at-end";
public static final String FAIL_NEVER = "fail-never";
public static final String MAKE_MODE = "make";
public static final String MAKE_DEPENDENTS_MODE = "make-dependents";
// make projects that depend on me, and projects that I depend on
public static final String MAKE_BOTH_MODE = "make-both";
private List<String> blackList = new ArrayList<>();
private Map<String, BuildFailure> buildFailuresByProject = new HashMap<>();
private Map<String, Map<String, Map>> pluginContextsByProjectAndPluginKey = new HashMap<>();
private String failureBehavior = FAIL_FAST;
private final ProjectSorter sorter;
private Map<String, BuildSuccess> buildSuccessesByProject = new HashMap<>();
public ReactorManager(List<MavenProject> projects) throws CycleDetectedException, DuplicateProjectException {
this.sorter = new ProjectSorter(projects);
}
public Map getPluginContext(PluginDescriptor plugin, MavenProject project) {
Map<String, Map> pluginContextsByKey =
pluginContextsByProjectAndPluginKey.computeIfAbsent(project.getId(), k -> new HashMap<>());
return pluginContextsByKey.computeIfAbsent(plugin.getPluginLookupKey(), k -> new HashMap<>());
}
public void setFailureBehavior(String failureBehavior) {
if (failureBehavior == null) {
this.failureBehavior = FAIL_FAST; // default
return;
}
if (FAIL_FAST.equals(failureBehavior)
|| FAIL_AT_END.equals(failureBehavior)
|| FAIL_NEVER.equals(failureBehavior)) {
this.failureBehavior = failureBehavior;
} else {
throw new IllegalArgumentException("Invalid failure behavior (must be one of: '" + FAIL_FAST + "', '"
+ FAIL_AT_END + "', '" + FAIL_NEVER + "').");
}
}
public String getFailureBehavior() {
return failureBehavior;
}
public void blackList(MavenProject project) {
blackList(getProjectKey(project));
}
private void blackList(String id) {
if (!blackList.contains(id)) {
blackList.add(id);
List<String> dependents = sorter.getDependents(id);
if (dependents != null && !dependents.isEmpty()) {
for (String dependentId : dependents) {
if (!buildSuccessesByProject.containsKey(dependentId)
&& !buildFailuresByProject.containsKey(dependentId)) {
blackList(dependentId);
}
}
}
}
}
public boolean isBlackListed(MavenProject project) {
return blackList.contains(getProjectKey(project));
}
private static String getProjectKey(MavenProject project) {
return ArtifactUtils.versionlessKey(project.getGroupId(), project.getArtifactId());
}
public void registerBuildFailure(MavenProject project, Exception error, String task, long time) {
buildFailuresByProject.put(getProjectKey(project), new BuildFailure(project, time, error));
}
public void registerBuildFailure(MavenProject project, Exception error, String task, Duration time) {
buildFailuresByProject.put(getProjectKey(project), new BuildFailure(project, time, error));
}
public boolean hasBuildFailures() {
return !buildFailuresByProject.isEmpty();
}
public boolean hasBuildFailure(MavenProject project) {
return buildFailuresByProject.containsKey(getProjectKey(project));
}
public boolean hasMultipleProjects() {
return sorter.hasMultipleProjects();
}
public List<MavenProject> getSortedProjects() {
return sorter.getSortedProjects();
}
public boolean hasBuildSuccess(MavenProject project) {
return buildSuccessesByProject.containsKey(getProjectKey(project));
}
public void registerBuildSuccess(MavenProject project, long time) {
buildSuccessesByProject.put(getProjectKey(project), new BuildSuccess(project, time));
}
public void registerBuildSuccess(MavenProject project, Duration time) {
buildSuccessesByProject.put(getProjectKey(project), new BuildSuccess(project, time));
}
public BuildFailure getBuildFailure(MavenProject project) {
return buildFailuresByProject.get(getProjectKey(project));
}
public BuildSuccess getBuildSuccess(MavenProject project) {
return buildSuccessesByProject.get(getProjectKey(project));
}
public boolean executedMultipleProjects() {
return buildFailuresByProject.size() + buildSuccessesByProject.size() > 1;
}
}
| ReactorManager |
java | FasterXML__jackson-core | src/test/java/tools/jackson/core/unittest/util/JsonBufferRecyclersTest.java | {
"start": 732,
"end": 5647
} | class ____ extends JacksonCoreTestBase
{
// // Parsers with RecyclerPools:
@Test
void parserWithThreadLocalPool() throws Exception {
_testParser(JsonRecyclerPools.threadLocalPool(), -1, -1);
}
@Test
void parserWithNopLocalPool() throws Exception {
_testParser(JsonRecyclerPools.nonRecyclingPool(), 0, 0);
}
@Test
void parserWithDequeuPool() throws Exception {
_testParser(JsonRecyclerPools.newConcurrentDequePool(), 0, 1);
_testParser(JsonRecyclerPools.sharedConcurrentDequePool(), null, null);
}
@Test
void parserWithBoundedPool() throws Exception {
_testParser(JsonRecyclerPools.newBoundedPool(5), 0, 1);
_testParser(JsonRecyclerPools.sharedBoundedPool(), null, null);
}
private void _testParser(RecyclerPool<BufferRecycler> pool,
Integer expSizeBefore, Integer expSizeAfter) throws Exception
{
JsonFactory jsonF = JsonFactory.builder()
.recyclerPool(pool)
.build();
if (expSizeBefore != null) {
assertEquals(expSizeBefore, pool.pooledCount());
}
JsonParser p = jsonF.createParser(ObjectReadContext.empty(),
a2q("{'a':123,'b':'foobar'}"));
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertToken(JsonToken.PROPERTY_NAME, p.nextToken());
assertEquals("a", p.currentName());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
assertEquals(123, p.getIntValue());
assertToken(JsonToken.PROPERTY_NAME, p.nextToken());
assertEquals("b", p.currentName());
assertToken(JsonToken.VALUE_STRING, p.nextToken());
assertEquals("foobar", p.getString());
assertToken(JsonToken.END_OBJECT, p.nextToken());
p.close();
if (expSizeAfter != null) {
assertEquals(expSizeAfter, pool.pooledCount());
}
}
// // Generators with RecyclerPools:
@Test
void generatorWithThreadLocalPool() throws Exception {
_testGenerator(JsonRecyclerPools.threadLocalPool(), -1, -1);
}
@Test
void generatorWithNopLocalPool() throws Exception {
_testGenerator(JsonRecyclerPools.nonRecyclingPool(), 0, 0);
}
@Test
void generatorWithDequeuPool() throws Exception {
_testGenerator(JsonRecyclerPools.newConcurrentDequePool(), 0, 1);
_testGenerator(JsonRecyclerPools.sharedConcurrentDequePool(), null, null);
}
@Test
void generatorWithBoundedPool() throws Exception {
_testGenerator(JsonRecyclerPools.newBoundedPool(5), 0, 1);
_testGenerator(JsonRecyclerPools.sharedBoundedPool(), null, null);
}
private void _testGenerator(RecyclerPool<BufferRecycler> pool,
Integer expSizeBefore, Integer expSizeAfter) throws Exception
{
JsonFactory jsonF = JsonFactory.builder()
.recyclerPool(pool)
.build();
if (expSizeBefore != null) {
assertEquals(expSizeBefore, pool.pooledCount());
}
StringWriter w = new StringWriter();
try (JsonGenerator g = jsonF.createGenerator(ObjectWriteContext.empty(), w)) {
g.writeStartObject();
g.writeNumberProperty("a", -42);
g.writeStringProperty("b", "barfoo");
g.writeEndObject();
}
if (expSizeAfter != null) {
assertEquals(expSizeAfter, pool.pooledCount());
}
assertEquals(a2q("{'a':-42,'b':'barfoo'}"), w.toString());
}
// // Read-and-Write: Parser and Generator, overlapping usage
@Test
void copyWithThreadLocalPool() throws Exception {
_testCopy(JsonRecyclerPools.threadLocalPool());
}
@Test
void copyWithNopLocalPool() throws Exception {
_testCopy(JsonRecyclerPools.nonRecyclingPool());
}
@Test
void copyWithDequeuPool() throws Exception {
_testCopy(JsonRecyclerPools.newConcurrentDequePool());
_testCopy(JsonRecyclerPools.sharedConcurrentDequePool());
}
@Test
void copyWithBoundedPool() throws Exception {
_testCopy(JsonRecyclerPools.newBoundedPool(5));
_testCopy(JsonRecyclerPools.sharedBoundedPool());
}
private void _testCopy(RecyclerPool<BufferRecycler> pool) throws Exception
{
JsonFactory jsonF = JsonFactory.builder()
.recyclerPool(pool)
.build();
final String DOC = a2q("{'a':123,'b':'foobar'}");
JsonParser p = jsonF.createParser(ObjectReadContext.empty(), DOC);
StringWriter w = new StringWriter();
JsonGenerator g = jsonF.createGenerator(ObjectWriteContext.empty(), w);
while (p.nextToken() != null) {
g.copyCurrentEvent(p);
}
p.close();
g.close();
assertEquals(DOC, w.toString());
}
}
| JsonBufferRecyclersTest |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/preemption/CheckpointAMPreemptionPolicy.java | {
"start": 3092,
"end": 11704
} | class ____ implements AMPreemptionPolicy {
// task attempts flagged for preemption
private final Set<TaskAttemptId> toBePreempted;
private final Set<TaskAttemptId> countedPreemptions;
private final Map<TaskId,TaskCheckpointID> checkpoints;
private final Map<TaskAttemptId,Resource> pendingFlexiblePreemptions;
@SuppressWarnings("rawtypes")
private EventHandler eventHandler;
static final Logger LOG = LoggerFactory
.getLogger(CheckpointAMPreemptionPolicy.class);
public CheckpointAMPreemptionPolicy() {
this(Collections.synchronizedSet(new HashSet<TaskAttemptId>()),
Collections.synchronizedSet(new HashSet<TaskAttemptId>()),
Collections.synchronizedMap(new HashMap<TaskId,TaskCheckpointID>()),
Collections.synchronizedMap(new HashMap<TaskAttemptId,Resource>()));
}
CheckpointAMPreemptionPolicy(Set<TaskAttemptId> toBePreempted,
Set<TaskAttemptId> countedPreemptions,
Map<TaskId,TaskCheckpointID> checkpoints,
Map<TaskAttemptId,Resource> pendingFlexiblePreemptions) {
this.toBePreempted = toBePreempted;
this.countedPreemptions = countedPreemptions;
this.checkpoints = checkpoints;
this.pendingFlexiblePreemptions = pendingFlexiblePreemptions;
}
@Override
public void init(AppContext context) {
this.eventHandler = context.getEventHandler();
}
@Override
public void preempt(Context ctxt, PreemptionMessage preemptionRequests) {
if (preemptionRequests != null) {
// handling non-negotiable preemption
StrictPreemptionContract cStrict = preemptionRequests.getStrictContract();
if (cStrict != null
&& cStrict.getContainers() != null
&& cStrict.getContainers().size() > 0) {
LOG.info("strict preemption :" +
preemptionRequests.getStrictContract().getContainers().size() +
" containers to kill");
// handle strict preemptions. These containers are non-negotiable
for (PreemptionContainer c :
preemptionRequests.getStrictContract().getContainers()) {
ContainerId reqCont = c.getId();
TaskAttemptId reqTask = ctxt.getTaskAttempt(reqCont);
if (reqTask != null) {
// ignore requests for preempting containers running maps
if (org.apache.hadoop.mapreduce.v2.api.records.TaskType.REDUCE
.equals(reqTask.getTaskId().getTaskType())) {
toBePreempted.add(reqTask);
LOG.info("preempting " + reqCont + " running task:" + reqTask);
} else {
LOG.info("NOT preempting " + reqCont + " running task:" + reqTask);
}
}
}
}
// handling negotiable preemption
PreemptionContract cNegot = preemptionRequests.getContract();
if (cNegot != null
&& cNegot.getResourceRequest() != null
&& cNegot.getResourceRequest().size() > 0
&& cNegot.getContainers() != null
&& cNegot.getContainers().size() > 0) {
LOG.info("negotiable preemption :" +
preemptionRequests.getContract().getResourceRequest().size() +
" resourceReq, " +
preemptionRequests.getContract().getContainers().size() +
" containers");
// handle fungible preemption. Here we only look at the total amount of
// resources to be preempted and pick enough of our containers to
// satisfy that. We only support checkpointing for reducers for now.
List<PreemptionResourceRequest> reqResources =
preemptionRequests.getContract().getResourceRequest();
// compute the total amount of pending preemptions (to be discounted
// from current request)
int pendingPreemptionRam = 0;
int pendingPreemptionCores = 0;
for (Resource r : pendingFlexiblePreemptions.values()) {
pendingPreemptionRam += r.getMemorySize();
pendingPreemptionCores += r.getVirtualCores();
}
// discount preemption request based on currently pending preemption
for (PreemptionResourceRequest rr : reqResources) {
ResourceRequest reqRsrc = rr.getResourceRequest();
if (!ResourceRequest.ANY.equals(reqRsrc.getResourceName())) {
// For now, only respond to aggregate requests and ignore locality
continue;
}
LOG.info("ResourceRequest:" + reqRsrc);
int reqCont = reqRsrc.getNumContainers();
long reqMem = reqRsrc.getCapability().getMemorySize();
long totalMemoryToRelease = reqCont * reqMem;
int reqCores = reqRsrc.getCapability().getVirtualCores();
int totalCoresToRelease = reqCont * reqCores;
// remove
if (pendingPreemptionRam > 0) {
// if goes negative we simply exit
totalMemoryToRelease -= pendingPreemptionRam;
// decrement pending resources if zero or negatve we will
// ignore it while processing next PreemptionResourceRequest
pendingPreemptionRam -= totalMemoryToRelease;
}
if (pendingPreemptionCores > 0) {
totalCoresToRelease -= pendingPreemptionCores;
pendingPreemptionCores -= totalCoresToRelease;
}
// reverse order of allocation (for now)
List<Container> listOfCont = ctxt.getContainers(TaskType.REDUCE);
Collections.sort(listOfCont, new Comparator<Container>() {
@Override
public int compare(final Container o1, final Container o2) {
return o2.getId().compareTo(o1.getId());
}
});
// preempt reducers first
for (Container cont : listOfCont) {
if (totalMemoryToRelease <= 0 && totalCoresToRelease<=0) {
break;
}
TaskAttemptId reduceId = ctxt.getTaskAttempt(cont.getId());
int cMem = (int) cont.getResource().getMemorySize();
int cCores = cont.getResource().getVirtualCores();
if (!toBePreempted.contains(reduceId)) {
totalMemoryToRelease -= cMem;
totalCoresToRelease -= cCores;
toBePreempted.add(reduceId);
pendingFlexiblePreemptions.put(reduceId, cont.getResource());
}
LOG.info("ResourceRequest:" + reqRsrc + " satisfied preempting "
+ reduceId);
}
// if map was preemptable we would do add them to toBePreempted here
}
}
}
}
@Override
public void handleFailedContainer(TaskAttemptId attemptID) {
toBePreempted.remove(attemptID);
checkpoints.remove(attemptID.getTaskId());
}
@Override
public void handleCompletedContainer(TaskAttemptId attemptID){
LOG.info(" task completed:" + attemptID);
toBePreempted.remove(attemptID);
pendingFlexiblePreemptions.remove(attemptID);
}
@Override
public boolean isPreempted(TaskAttemptId yarnAttemptID) {
if (toBePreempted.contains(yarnAttemptID)) {
updatePreemptionCounters(yarnAttemptID);
return true;
}
return false;
}
@Override
public void reportSuccessfulPreemption(TaskAttemptId taskAttemptID) {
// ignore
}
@Override
public TaskCheckpointID getCheckpointID(TaskId taskId) {
return checkpoints.get(taskId);
}
@Override
public void setCheckpointID(TaskId taskId, TaskCheckpointID cid) {
checkpoints.put(taskId, cid);
if (cid != null) {
updateCheckpointCounters(taskId, cid);
}
}
@SuppressWarnings({ "unchecked" })
private void updateCheckpointCounters(TaskId taskId, TaskCheckpointID cid) {
JobCounterUpdateEvent jce = new JobCounterUpdateEvent(taskId.getJobId());
jce.addCounterUpdate(JobCounter.CHECKPOINTS, 1);
eventHandler.handle(jce);
jce = new JobCounterUpdateEvent(taskId.getJobId());
jce.addCounterUpdate(JobCounter.CHECKPOINT_BYTES, cid.getCheckpointBytes());
eventHandler.handle(jce);
jce = new JobCounterUpdateEvent(taskId.getJobId());
jce.addCounterUpdate(JobCounter.CHECKPOINT_TIME, cid.getCheckpointTime());
eventHandler.handle(jce);
}
@SuppressWarnings({ "unchecked" })
private void updatePreemptionCounters(TaskAttemptId yarnAttemptID) {
if (!countedPreemptions.contains(yarnAttemptID)) {
countedPreemptions.add(yarnAttemptID);
JobCounterUpdateEvent jce = new JobCounterUpdateEvent(yarnAttemptID
.getTaskId().getJobId());
jce.addCounterUpdate(JobCounter.TASKS_REQ_PREEMPT, 1);
eventHandler.handle(jce);
}
}
}
| CheckpointAMPreemptionPolicy |
java | apache__camel | components/camel-weather/src/main/java/org/apache/camel/component/weather/geolocation/GeoLocationProvider.java | {
"start": 867,
"end": 961
} | interface ____ {
GeoLocation getCurrentGeoLocation() throws Exception;
}
| GeoLocationProvider |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/SealedTypesWithSubtypesTest.java | {
"start": 2448,
"end": 2615
} | class ____ {
@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS, include = JsonTypeInfo.As.WRAPPER_ARRAY)
public java.util.Date value;
}
static | DateWrapper |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3ClientFactory.java | {
"start": 1910,
"end": 2134
} | interface ____ ensure that a client
* implementing only the deprecated method will work.
* See https://github.com/apache/hbase-filesystem
*
*/
@InterfaceAudience.LimitedPrivate("HBoss")
@InterfaceStability.Evolving
public | to |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java | {
"start": 1777,
"end": 5227
} | class ____ extends SpanMultiTermQueryWrapper.SpanRewriteMethod {
private final int maxExpansions;
private final boolean hardLimit;
public SpanBooleanQueryRewriteWithMaxClause() {
this(IndexSearcher.getMaxClauseCount(), true);
}
public SpanBooleanQueryRewriteWithMaxClause(int maxExpansions, boolean hardLimit) {
this.maxExpansions = maxExpansions;
this.hardLimit = hardLimit;
}
public int getMaxExpansions() {
return maxExpansions;
}
public boolean isHardLimit() {
return hardLimit;
}
@Override
public SpanQuery rewrite(IndexSearcher indexSearcher, MultiTermQuery query) throws IOException {
final MultiTermQuery.RewriteMethod delegate = new MultiTermQuery.RewriteMethod() {
@Override
public Query rewrite(IndexSearcher indexSearcher, MultiTermQuery query) throws IOException {
IndexReader reader = indexSearcher.getIndexReader();
Collection<SpanQuery> queries = collectTerms(reader, query);
if (queries.size() == 0) {
return new SpanMatchNoDocsQuery(query.getField(), "no expansion found for " + query.toString());
} else if (queries.size() == 1) {
return queries.iterator().next();
} else {
return new SpanOrQuery(queries.toArray(new SpanQuery[0]));
}
}
private Collection<SpanQuery> collectTerms(IndexReader reader, MultiTermQuery query) throws IOException {
Set<SpanQuery> queries = new HashSet<>();
IndexReaderContext topReaderContext = reader.getContext();
for (LeafReaderContext context : topReaderContext.leaves()) {
final Terms terms = context.reader().terms(query.getField());
if (terms == null) {
// field does not exist
continue;
}
final TermsEnum termsEnum = getTermsEnum(query, terms, new AttributeSource());
assert termsEnum != null;
if (termsEnum == TermsEnum.EMPTY) {
continue;
}
BytesRef bytes;
while ((bytes = termsEnum.next()) != null) {
if (queries.size() >= maxExpansions) {
if (hardLimit) {
throw new ElasticsearchStatusException(
"["
+ query.toString()
+ " ] "
+ "exceeds maxClauseCount [ Boolean maxClauseCount is set to "
+ IndexSearcher.getMaxClauseCount()
+ "]",
RestStatus.BAD_REQUEST
);
} else {
return queries;
}
}
queries.add(new SpanTermQuery(new Term(query.getField(), bytes)));
}
}
return queries;
}
};
return (SpanQuery) delegate.rewrite(indexSearcher, query);
}
}
| SpanBooleanQueryRewriteWithMaxClause |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/serializer/TestInnerClass1.java | {
"start": 143,
"end": 377
} | class ____ extends TestCase {
public void test_inner() throws Exception {
VO vo = new VO();
String text = JSON.toJSONString(vo);
Assert.assertEquals("{\"value\":234}", text);
}
private | TestInnerClass1 |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestUpdateModelSnapshotAction.java | {
"start": 1177,
"end": 2128
} | class ____ extends BaseRestHandler {
@Override
public List<Route> routes() {
return List.of(new Route(POST, BASE_PATH + "anomaly_detectors/{" + ID + "}/model_snapshots/{" + SNAPSHOT_ID + "}/_update"));
}
@Override
public String getName() {
return "ml_update_model_snapshot_action";
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
XContentParser parser = restRequest.contentParser();
UpdateModelSnapshotAction.Request updateModelSnapshot = UpdateModelSnapshotAction.Request.parseRequest(
restRequest.param(Job.ID.getPreferredName()),
restRequest.param(SNAPSHOT_ID.getPreferredName()),
parser
);
return channel -> client.execute(UpdateModelSnapshotAction.INSTANCE, updateModelSnapshot, new RestToXContentListener<>(channel));
}
}
| RestUpdateModelSnapshotAction |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/collection/map/EntityMapKeyWithUniqueKeyEqualsHashCodeTest.java | {
"start": 2250,
"end": 2625
} | class ____ {
@Id
public Long id;
@Column(nullable = false, unique = true)
public String handle;
@Override
public boolean equals(Object o) {
return o instanceof KeyEntity oke
&& Objects.equals( handle, oke.handle );
}
@Override
public int hashCode() {
return Objects.hashCode( handle );
}
}
@Entity(name = "ValueEntity")
public static | KeyEntity |
java | apache__maven | impl/maven-cli/src/test/java/org/apache/maven/cling/invoker/mvnup/goals/ModelVersionUtilsTest.java | {
"start": 1916,
"end": 2007
} | class ____ {
@Nested
@DisplayName("Model Version Detection")
| ModelVersionUtilsTest |
java | spring-projects__spring-security | core/src/main/java/org/springframework/security/jackson/UsernamePasswordAuthenticationTokenMixin.java | {
"start": 1001,
"end": 1534
} | class ____ a custom deserializer
* {@link UsernamePasswordAuthenticationTokenDeserializer}.
*
* @author Sebastien Deleuze
* @author Jitendra Singh
* @since 7.0
* @see CoreJacksonModule
* @see SecurityJacksonModules
*/
@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS)
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.ANY, getterVisibility = JsonAutoDetect.Visibility.NONE,
isGetterVisibility = JsonAutoDetect.Visibility.NONE)
@JsonDeserialize(using = UsernamePasswordAuthenticationTokenDeserializer.class)
abstract | register |
java | spring-projects__spring-boot | core/spring-boot-docker-compose/src/main/java/org/springframework/boot/docker/compose/core/DefaultDockerCompose.java | {
"start": 1213,
"end": 4660
} | class ____ implements DockerCompose {
private final DockerCli cli;
private final DockerHost hostname;
DefaultDockerCompose(DockerCli cli, @Nullable String host) {
this.cli = cli;
this.hostname = DockerHost.get(host, () -> cli.run(new DockerCliCommand.Context()));
}
@Override
public void up(LogLevel logLevel) {
up(logLevel, Collections.emptyList());
}
@Override
public void up(LogLevel logLevel, List<String> arguments) {
this.cli.run(new DockerCliCommand.ComposeUp(logLevel, arguments));
}
@Override
public void down(Duration timeout) {
down(timeout, Collections.emptyList());
}
@Override
public void down(Duration timeout, List<String> arguments) {
this.cli.run(new DockerCliCommand.ComposeDown(timeout, arguments));
}
@Override
public void start(LogLevel logLevel) {
start(logLevel, Collections.emptyList());
}
@Override
public void start(LogLevel logLevel, List<String> arguments) {
this.cli.run(new DockerCliCommand.ComposeStart(logLevel, arguments));
}
@Override
public void stop(Duration timeout) {
stop(timeout, Collections.emptyList());
}
@Override
public void stop(Duration timeout, List<String> arguments) {
this.cli.run(new DockerCliCommand.ComposeStop(timeout, arguments));
}
@Override
public boolean hasDefinedServices() {
return !this.cli.run(new DockerCliCommand.ComposeConfig()).services().isEmpty();
}
@Override
public List<RunningService> getRunningServices() {
List<DockerCliComposePsResponse> runningPsResponses = runComposePs().stream().filter(this::isRunning).toList();
if (runningPsResponses.isEmpty()) {
return Collections.emptyList();
}
DockerComposeFile dockerComposeFile = this.cli.getDockerComposeFile();
List<RunningService> result = new ArrayList<>();
Map<String, DockerCliInspectResponse> inspected = inspect(runningPsResponses);
for (DockerCliComposePsResponse psResponse : runningPsResponses) {
DockerCliInspectResponse inspectResponse = inspectContainer(psResponse.id(), inspected);
Assert.state(inspectResponse != null, () -> "Failed to inspect container '%s'".formatted(psResponse.id()));
result.add(new DefaultRunningService(this.hostname, dockerComposeFile, psResponse, inspectResponse));
}
return Collections.unmodifiableList(result);
}
private Map<String, DockerCliInspectResponse> inspect(List<DockerCliComposePsResponse> runningPsResponses) {
List<String> ids = runningPsResponses.stream().map(DockerCliComposePsResponse::id).toList();
List<DockerCliInspectResponse> inspectResponses = this.cli.run(new DockerCliCommand.Inspect(ids));
return inspectResponses.stream().collect(Collectors.toMap(DockerCliInspectResponse::id, Function.identity()));
}
private @Nullable DockerCliInspectResponse inspectContainer(String id,
Map<String, DockerCliInspectResponse> inspected) {
DockerCliInspectResponse inspect = inspected.get(id);
if (inspect != null) {
return inspect;
}
// Docker Compose v2.23.0 returns truncated ids, so we have to do a prefix match
for (Entry<String, DockerCliInspectResponse> entry : inspected.entrySet()) {
if (entry.getKey().startsWith(id)) {
return entry.getValue();
}
}
return null;
}
private List<DockerCliComposePsResponse> runComposePs() {
return this.cli.run(new DockerCliCommand.ComposePs());
}
private boolean isRunning(DockerCliComposePsResponse psResponse) {
return !"exited".equals(psResponse.state());
}
}
| DefaultDockerCompose |
java | elastic__elasticsearch | modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistMethod.java | {
"start": 1653,
"end": 2654
} | class ____ for the owner of an augmented method. If the method is not augmented
* this should be {@code null}.
*/
public final String augmentedCanonicalClassName;
/** The method name used to look up the method reflection object. */
public final String methodName;
/**
* The canonical type name for the return type.
*/
public final String returnCanonicalTypeName;
/**
* A {@link List} of {@link String}s that are the canonical type names for the parameters of the
* method used to look up the method reflection object.
*/
public final List<String> canonicalTypeNameParameters;
/** The {@link Map} of annotations for this method. */
public final Map<Class<?>, Object> painlessAnnotations;
/**
* Standard constructor. All values must be not {@code null} with the exception of
* augmentedCanonicalClassName; augmentedCanonicalClassName will be {@code null} unless the method
* is augmented as described in the | name |
java | spring-projects__spring-boot | module/spring-boot-security/src/test/java/org/springframework/boot/security/autoconfigure/actuate/web/servlet/AbstractEndpointRequestIntegrationTests.java | {
"start": 3039,
"end": 3143
} | class ____ {@link EndpointRequest} tests.
*
* @author Madhura Bhave
* @author Chris Bono
*/
abstract | for |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/idgen/userdefined/ExportableValueGeneratorTest.java | {
"start": 2663,
"end": 2739
} | interface ____ {
String sequenceName();
}
public static | OnExecutionSequence |
java | quarkusio__quarkus | devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/dsl/CompilerOption.java | {
"start": 91,
"end": 516
} | class ____ {
private final String name;
private final List<String> opts = new ArrayList<>(0);
public CompilerOption(String name) {
this.name = name;
}
public CompilerOption args(List<String> options) {
opts.addAll(options);
return this;
}
public String getName() {
return name;
}
public List<String> getArgs() {
return opts;
}
}
| CompilerOption |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/verification/MockAwareVerificationMode.java | {
"start": 410,
"end": 1408
} | class ____ implements VerificationMode {
private final Object mock;
private final VerificationMode mode;
private final Set<VerificationListener> listeners;
public MockAwareVerificationMode(
Object mock, VerificationMode mode, Set<VerificationListener> listeners) {
this.mock = mock;
this.mode = mode;
this.listeners = listeners;
}
@Override
public void verify(VerificationData data) {
try {
mode.verify(data);
notifyListeners(new VerificationEventImpl(mock, mode, data, null));
} catch (RuntimeException | Error e) {
notifyListeners(new VerificationEventImpl(mock, mode, data, e));
throw e;
}
}
private void notifyListeners(VerificationEvent event) {
for (VerificationListener listener : listeners) {
listener.onVerification(event);
}
}
public Object getMock() {
return mock;
}
}
| MockAwareVerificationMode |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/creation/bytebuddy/InlineDelegateByteBuddyMockMaker.java | {
"start": 24032,
"end": 24621
} | enum ____ from a stubbed method call."),
generationFailed);
}
if (TypeSupport.INSTANCE.isSealed(typeToMock)
&& Modifier.isAbstract(typeToMock.getModifiers())) {
throw new MockitoException(
join(
"Mockito cannot mock this class: " + typeToMock + ".",
"Sealed interfaces or abstract classes can't be mocked. Interfaces cannot be instantiated and cannot be subclassed for mocking purposes.",
"Instead of mocking a sealed | literal |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/PropertyNamingStrategy.java | {
"start": 1499,
"end": 5158
} | class ____
implements java.io.Serializable
{
private static final long serialVersionUID = 2L;
/*
/**********************************************************
/* API
/**********************************************************
*/
/**
* Method called to find external name (name used in JSON) for given logical
* POJO property,
* as defined by given field.
*
* @param config Configuration in used: either <code>SerializationConfig</code>
* or <code>DeserializationConfig</code>, depending on whether method is called
* during serialization or deserialization
* @param field Field used to access property
* @param defaultName Default name that would be used for property in absence of custom strategy
*
* @return Logical name to use for property that the field represents
*/
public String nameForField(MapperConfig<?> config, AnnotatedField field,
String defaultName)
{
return defaultName;
}
/**
* Method called to find external name (name used in JSON) for given logical
* POJO property,
* as defined by given getter method; typically called when building a serializer.
* (but not always -- when using "getter-as-setter", may be called during
* deserialization)
*
* @param config Configuration in used: either <code>SerializationConfig</code>
* or <code>DeserializationConfig</code>, depending on whether method is called
* during serialization or deserialization
* @param method Method used to access property.
* @param defaultName Default name that would be used for property in absence of custom strategy
*
* @return Logical name to use for property that the method represents
*/
public String nameForGetterMethod(MapperConfig<?> config, AnnotatedMethod method,
String defaultName)
{
return defaultName;
}
/**
* Method called to find external name (name used in JSON) for given logical
* POJO property,
* as defined by given setter method; typically called when building a deserializer
* (but not necessarily only then).
*
* @param config Configuration in used: either <code>SerializationConfig</code>
* or <code>DeserializationConfig</code>, depending on whether method is called
* during serialization or deserialization
* @param method Method used to access property.
* @param defaultName Default name that would be used for property in absence of custom strategy
*
* @return Logical name to use for property that the method represents
*/
public String nameForSetterMethod(MapperConfig<?> config, AnnotatedMethod method,
String defaultName)
{
return defaultName;
}
/**
* Method called to find external name (name used in JSON) for given logical
* POJO property,
* as defined by given constructor parameter; typically called when building a deserializer
* (but not necessarily only then).
*
* @param config Configuration in used: either <code>SerializationConfig</code>
* or <code>DeserializationConfig</code>, depending on whether method is called
* during serialization or deserialization
* @param ctorParam Constructor parameter used to pass property.
* @param defaultName Default name that would be used for property in absence of custom strategy
*/
public String nameForConstructorParameter(MapperConfig<?> config, AnnotatedParameter ctorParam,
String defaultName)
{
return defaultName;
}
}
| PropertyNamingStrategy |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/jmx/export/MBeanExporter.java | {
"start": 37317,
"end": 43061
} | class ____ not resolvable
}
}
}
}
/**
* Indicates whether a particular bean name is present in the excluded beans list.
*/
private boolean isExcluded(String beanName) {
return (this.excludedBeans.contains(beanName) ||
(!beanName.isEmpty() && (beanName.charAt(0) == BeanFactory.FACTORY_BEAN_PREFIX_CHAR) &&
this.excludedBeans.contains(beanName.substring(1)))); // length of '&'
}
/**
* Return whether the specified bean definition should be considered as abstract.
*/
private boolean isBeanDefinitionAbstract(ListableBeanFactory beanFactory, String beanName) {
return (beanFactory instanceof ConfigurableListableBeanFactory clbf && beanFactory.containsBeanDefinition(beanName) &&
clbf.getBeanDefinition(beanName).isAbstract());
}
//---------------------------------------------------------------------
// Notification and listener management
//---------------------------------------------------------------------
/**
* If the supplied managed resource implements the {@link NotificationPublisherAware} an instance of
* {@link org.springframework.jmx.export.notification.NotificationPublisher} is injected.
*/
private void injectNotificationPublisherIfNecessary(
Object managedResource, @Nullable ModelMBean modelMBean, @Nullable ObjectName objectName) {
if (managedResource instanceof NotificationPublisherAware npa && modelMBean != null && objectName != null) {
npa.setNotificationPublisher(new ModelMBeanNotificationPublisher(modelMBean, objectName, managedResource));
}
}
/**
* Register the configured {@link NotificationListener NotificationListeners}
* with the {@link MBeanServer}.
*/
private void registerNotificationListeners() throws MBeanExportException {
if (this.notificationListeners != null) {
Assert.state(this.server != null, "No MBeanServer available");
for (NotificationListenerBean bean : this.notificationListeners) {
try {
ObjectName[] mappedObjectNames = bean.getResolvedObjectNames();
if (mappedObjectNames == null) {
// Mapped to all MBeans registered by the MBeanExporter.
mappedObjectNames = getRegisteredObjectNames();
}
if (this.registeredNotificationListeners.put(bean, mappedObjectNames) == null) {
for (ObjectName mappedObjectName : mappedObjectNames) {
this.server.addNotificationListener(mappedObjectName, bean.getNotificationListener(),
bean.getNotificationFilter(), bean.getHandback());
}
}
}
catch (Throwable ex) {
throw new MBeanExportException("Unable to register NotificationListener", ex);
}
}
}
}
/**
* Unregister the configured {@link NotificationListener NotificationListeners}
* from the {@link MBeanServer}.
*/
private void unregisterNotificationListeners() {
if (this.server != null) {
this.registeredNotificationListeners.forEach((bean, mappedObjectNames) -> {
for (ObjectName mappedObjectName : mappedObjectNames) {
try {
this.server.removeNotificationListener(mappedObjectName, bean.getNotificationListener(),
bean.getNotificationFilter(), bean.getHandback());
}
catch (Throwable ex) {
if (logger.isDebugEnabled()) {
logger.debug("Unable to unregister NotificationListener", ex);
}
}
}
});
}
this.registeredNotificationListeners.clear();
}
/**
* Called when an MBean is registered. Notifies all registered
* {@link MBeanExporterListener MBeanExporterListeners} of the registration event.
* <p>Please note that if an {@link MBeanExporterListener} throws a (runtime)
* exception when notified, this will essentially interrupt the notification process
* and any remaining listeners that have yet to be notified will not (obviously)
* receive the {@link MBeanExporterListener#mbeanRegistered(javax.management.ObjectName)}
* callback.
* @param objectName the {@code ObjectName} of the registered MBean
*/
@Override
protected void onRegister(ObjectName objectName) {
notifyListenersOfRegistration(objectName);
}
/**
* Called when an MBean is unregistered. Notifies all registered
* {@link MBeanExporterListener MBeanExporterListeners} of the unregistration event.
* <p>Please note that if an {@link MBeanExporterListener} throws a (runtime)
* exception when notified, this will essentially interrupt the notification process
* and any remaining listeners that have yet to be notified will not (obviously)
* receive the {@link MBeanExporterListener#mbeanUnregistered(javax.management.ObjectName)}
* callback.
* @param objectName the {@code ObjectName} of the unregistered MBean
*/
@Override
protected void onUnregister(ObjectName objectName) {
notifyListenersOfUnregistration(objectName);
}
/**
* Notifies all registered {@link MBeanExporterListener MBeanExporterListeners} of the
* registration of the MBean identified by the supplied {@link ObjectName}.
*/
private void notifyListenersOfRegistration(ObjectName objectName) {
if (this.listeners != null) {
for (MBeanExporterListener listener : this.listeners) {
listener.mbeanRegistered(objectName);
}
}
}
/**
* Notifies all registered {@link MBeanExporterListener MBeanExporterListeners} of the
* unregistration of the MBean identified by the supplied {@link ObjectName}.
*/
private void notifyListenersOfUnregistration(ObjectName objectName) {
if (this.listeners != null) {
for (MBeanExporterListener listener : this.listeners) {
listener.mbeanUnregistered(objectName);
}
}
}
//---------------------------------------------------------------------
// Inner classes for internal use
//---------------------------------------------------------------------
/**
* Internal callback | is |
java | netty__netty | transport-udt/src/test/java/io/netty/test/udt/nio/AbstractUdtTest.java | {
"start": 856,
"end": 1074
} | class ____ {
/**
* UDT test assumptions.
*/
@BeforeAll
public static void assumeConditions() {
assumeTrue(UnitHelp.canLoadAndInitClass("com.barchart.udt.SocketUDT"));
}
}
| AbstractUdtTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/property/access/spi/GetterFieldImplTest.java | {
"start": 422,
"end": 1041
} | class ____ {
@Test
public void testGet() {
Target target = new Target();
assertThat( getter( "active" ).get( target ) ).isEqualTo( true );
assertThat( getter( "children" ).get( target ) ).isEqualTo( (byte) 2 );
assertThat( getter( "gender" ).get( target ) ).isEqualTo( 'M' );
assertThat( getter( "code" ).get( target ) ).isEqualTo( Integer.MAX_VALUE );
assertThat( getter( "id" ).get( target ) ).isEqualTo( Long.MAX_VALUE );
assertThat( getter( "age" ).get( target ) ).isEqualTo( (short) 34 );
assertThat( getter( "name" ).get( target ) ).isEqualTo( "John Doe" );
}
private static | GetterFieldImplTest |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/typeutils/base/DoubleComparator.java | {
"start": 1060,
"end": 2158
} | class ____ extends BasicTypeComparator<Double> {
private static final long serialVersionUID = 1L;
public DoubleComparator(boolean ascending) {
super(ascending);
}
@Override
public int compareSerialized(DataInputView firstSource, DataInputView secondSource)
throws IOException {
double l1 = firstSource.readDouble();
double l2 = secondSource.readDouble();
int comp = (l1 < l2 ? -1 : (l1 > l2 ? 1 : 0));
return ascendingComparison ? comp : -comp;
}
@Override
public boolean supportsNormalizedKey() {
return false;
}
@Override
public int getNormalizeKeyLen() {
return 0;
}
@Override
public boolean isNormalizedKeyPrefixOnly(int keyBytes) {
return true;
}
@Override
public void putNormalizedKey(Double value, MemorySegment target, int offset, int numBytes) {
throw new UnsupportedOperationException();
}
@Override
public DoubleComparator duplicate() {
return new DoubleComparator(ascendingComparison);
}
}
| DoubleComparator |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/EnrollmentTokenTests.java | {
"start": 675,
"end": 2769
} | class ____ extends ESTestCase {
public void testEnrollmentToken() throws Exception {
final EnrollmentToken enrollmentToken = createEnrollmentToken();
final String apiKey = enrollmentToken.getApiKey();
final String fingerprint = enrollmentToken.getFingerprint();
final String version = enrollmentToken.getVersion();
final List<String> boundAddresses = enrollmentToken.getBoundAddress();
final String jsonString = enrollmentToken.getRaw();
final String encoded = enrollmentToken.getEncoded();
final Map<String, String> enrollmentMap;
try (XContentParser parser = createParser(JsonXContent.jsonXContent, jsonString)) {
final Map<String, Object> info = parser.map();
assertNotEquals(info, null);
enrollmentMap = info.entrySet().stream().collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue().toString()));
}
assertEquals(enrollmentMap.get("key"), apiKey);
assertEquals(enrollmentMap.get("fgr"), fingerprint);
assertEquals(enrollmentMap.get("ver"), version);
assertEquals(enrollmentMap.get("adr"), "[" + boundAddresses.stream().collect(Collectors.joining(", ")) + "]");
assertEquals(new String(Base64.getDecoder().decode(encoded), StandardCharsets.UTF_8), jsonString);
}
public void testDeserialization() throws Exception {
final EnrollmentToken enrollmentToken = createEnrollmentToken();
final EnrollmentToken deserialized = EnrollmentToken.decodeFromString(enrollmentToken.getEncoded());
assertThat(enrollmentToken, equalTo(deserialized));
}
private EnrollmentToken createEnrollmentToken() {
final String apiKey = randomAlphaOfLength(16);
final String fingerprint = randomAlphaOfLength(64);
final String version = randomAlphaOfLength(5);
final List<String> boundAddresses = Arrays.asList(generateRandomStringArray(4, randomIntBetween(2, 32), false));
return new EnrollmentToken(apiKey, fingerprint, version, boundAddresses);
}
}
| EnrollmentTokenTests |
java | junit-team__junit5 | platform-tooling-support-tests/src/main/java/platform/tooling/support/MavenRepo.java | {
"start": 568,
"end": 2155
} | class ____ {
private MavenRepo() {
}
public static Path dir() {
var candidates = Stream.of(Path.of("../build/repo"));
var candidate = candidates.filter(Files::isDirectory).findFirst().orElse(Path.of("build/repo"));
return Path.of(System.getProperty("maven.repo", candidate.toString()));
}
public static Path jar(String artifactId) {
return artifact(artifactId, fileName -> fileName.endsWith(".jar") //
&& !fileName.endsWith("-sources.jar") //
&& !fileName.endsWith("-javadoc.jar"));
}
public static Path gradleModuleMetadata(String artifactId) {
return artifact(artifactId, fileName -> fileName.endsWith(".module"));
}
public static Path pom(String artifactId) {
return artifact(artifactId, fileName -> fileName.endsWith(".pom"));
}
private static Path artifact(String artifactId, Predicate<String> fileNamePredicate) {
var parentDir = dir() //
.resolve(groupId(artifactId).replace('.', File.separatorChar)) //
.resolve(artifactId) //
.resolve(Helper.version());
try (var files = Files.list(parentDir)) {
return files.filter(
file -> fileNamePredicate.test(file.getFileName().toString())).findFirst().orElseThrow();
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private static String groupId(String artifactId) {
if (artifactId.startsWith("junit-jupiter")) {
return "org.junit.jupiter";
}
if (artifactId.startsWith("junit-platform")) {
return "org.junit.platform";
}
if (artifactId.startsWith("junit-vintage")) {
return "org.junit.vintage";
}
return "org.junit";
}
}
| MavenRepo |
java | google__dagger | javatests/dagger/internal/codegen/MapBindingComponentProcessorTest.java | {
"start": 20870,
"end": 21296
} | interface ____ {",
" Map<String, String> dispatcher();",
"}");
CompilerTests.daggerCompiler(mapModuleFile, componentFile)
.withProcessingOptions(compilerMode.processorOptions())
.compile(
subject -> {
subject.hasErrorCount(0);
subject.generatedSource(goldenFileRule.goldenSource("test/DaggerTestComponent"));
});
}
}
| TestComponent |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/io/network/netty/NettyPartitionRequestClient.java | {
"start": 12859,
"end": 13328
} | class ____ extends ClientOutboundMessage {
private final int bufferSize;
private NewBufferSizeMessage(RemoteInputChannel inputChannel, int bufferSize) {
super(checkNotNull(inputChannel));
this.bufferSize = bufferSize;
}
@Override
Object buildMessage() {
return new NettyMessage.NewBufferSize(bufferSize, inputChannel.getInputChannelId());
}
}
private static | NewBufferSizeMessage |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/query/KeyQuery.java | {
"start": 1096,
"end": 2407
} | class ____<K, V> implements Query<V> {
private final K key;
private final boolean skipCache;
private KeyQuery(final K key, final boolean skipCache) {
this.key = key;
this.skipCache = skipCache;
}
/**
* Creates a query that will retrieve the record identified by {@code key} if it exists
* (or {@code null} otherwise).
* @param key The key to retrieve
* @param <K> The type of the key
* @param <V> The type of the value that will be retrieved
*/
public static <K, V> KeyQuery<K, V> withKey(final K key) {
Objects.requireNonNull(key, "the key should not be null");
return new KeyQuery<>(key, false);
}
/**
* Specifies that the cache should be skipped during query evaluation. This means, that the query will always
* get forwarded to the underlying store.
*/
public KeyQuery<K, V> skipCache() {
return new KeyQuery<>(key, true);
}
/**
* Return the key that was specified for this query.
*
* @return The key that was specified for this query.
*/
public K getKey() {
return key;
}
/**
* The flag whether to skip the cache or not during query evaluation.
*/
public boolean isSkipCache() {
return skipCache;
}
}
| KeyQuery |
java | apache__flink | flink-core/src/main/java/org/apache/flink/util/Preconditions.java | {
"start": 1724,
"end": 13126
} | class ____ {
// ------------------------------------------------------------------------
// Null checks
// ------------------------------------------------------------------------
/**
* Ensures that the given object reference is not null. Upon violation, a {@code
* NullPointerException} with no message is thrown.
*
* @param reference The object reference
* @return The object reference itself (generically typed).
* @throws NullPointerException Thrown, if the passed reference was null.
*/
public static <T> T checkNotNull(@Nullable T reference) {
if (reference == null) {
throw new NullPointerException();
}
return reference;
}
/**
* Ensures that the given object reference is not null. Upon violation, a {@code
* NullPointerException} with the given message is thrown.
*
* @param reference The object reference
* @param errorMessage The message for the {@code NullPointerException} that is thrown if the
* check fails.
* @return The object reference itself (generically typed).
* @throws NullPointerException Thrown, if the passed reference was null.
*/
public static <T> T checkNotNull(@Nullable T reference, @Nullable String errorMessage) {
if (reference == null) {
throw new NullPointerException(String.valueOf(errorMessage));
}
return reference;
}
/**
* Ensures that the given object reference is not null. Upon violation, a {@code
* NullPointerException} with the given message is thrown.
*
* <p>The error message is constructed from a template and an arguments array, after a similar
* fashion as {@link String#format(String, Object...)}, but supporting only {@code %s} as a
* placeholder.
*
* @param reference The object reference
* @param errorMessageTemplate The message template for the {@code NullPointerException} that is
* thrown if the check fails. The template substitutes its {@code %s} placeholders with the
* error message arguments.
* @param errorMessageArgs The arguments for the error message, to be inserted into the message
* template for the {@code %s} placeholders.
* @return The object reference itself (generically typed).
* @throws NullPointerException Thrown, if the passed reference was null.
*/
public static <T> T checkNotNull(
T reference,
@Nullable String errorMessageTemplate,
@Nullable Object... errorMessageArgs) {
if (reference == null) {
throw new NullPointerException(format(errorMessageTemplate, errorMessageArgs));
}
return reference;
}
// ------------------------------------------------------------------------
// Boolean Condition Checking (Argument)
// ------------------------------------------------------------------------
/**
* Checks the given boolean condition, and throws an {@code IllegalArgumentException} if the
* condition is not met (evaluates to {@code false}).
*
* @param condition The condition to check
* @throws IllegalArgumentException Thrown, if the condition is violated.
*/
public static void checkArgument(boolean condition) {
if (!condition) {
throw new IllegalArgumentException();
}
}
/**
* Checks the given boolean condition, and throws an {@code IllegalArgumentException} if the
* condition is not met (evaluates to {@code false}). The exception will have the given error
* message.
*
* @param condition The condition to check
* @param errorMessage The message for the {@code IllegalArgumentException} that is thrown if
* the check fails.
* @throws IllegalArgumentException Thrown, if the condition is violated.
*/
public static void checkArgument(boolean condition, @Nullable Object errorMessage) {
if (!condition) {
throw new IllegalArgumentException(String.valueOf(errorMessage));
}
}
/**
* Checks the given boolean condition, and throws an {@code IllegalArgumentException} if the
* condition is not met (evaluates to {@code false}).
*
* @param condition The condition to check
* @param errorMessageTemplate The message template for the {@code IllegalArgumentException}
* that is thrown if the check fails. The template substitutes its {@code %s} placeholders
* with the error message arguments.
* @param errorMessageArgs The arguments for the error message, to be inserted into the message
* template for the {@code %s} placeholders.
* @throws IllegalArgumentException Thrown, if the condition is violated.
*/
public static void checkArgument(
boolean condition,
@Nullable String errorMessageTemplate,
@Nullable Object... errorMessageArgs) {
if (!condition) {
throw new IllegalArgumentException(format(errorMessageTemplate, errorMessageArgs));
}
}
// ------------------------------------------------------------------------
// Boolean Condition Checking (State)
// ------------------------------------------------------------------------
/**
* Checks the given boolean condition, and throws an {@code IllegalStateException} if the
* condition is not met (evaluates to {@code false}).
*
* @param condition The condition to check
* @throws IllegalStateException Thrown, if the condition is violated.
*/
public static void checkState(boolean condition) {
if (!condition) {
throw new IllegalStateException();
}
}
/**
* Checks the given boolean condition, and throws an {@code IllegalStateException} if the
* condition is not met (evaluates to {@code false}). The exception will have the given error
* message.
*
* @param condition The condition to check
* @param errorMessage The message for the {@code IllegalStateException} that is thrown if the
* check fails.
* @throws IllegalStateException Thrown, if the condition is violated.
*/
public static void checkState(boolean condition, @Nullable Object errorMessage) {
if (!condition) {
throw new IllegalStateException(String.valueOf(errorMessage));
}
}
/**
* Checks the given boolean condition, and throws an {@code IllegalStateException} if the
* condition is not met (evaluates to {@code false}).
*
* @param condition The condition to check
* @param errorMessageTemplate The message template for the {@code IllegalStateException} that
* is thrown if the check fails. The template substitutes its {@code %s} placeholders with
* the error message arguments.
* @param errorMessageArgs The arguments for the error message, to be inserted into the message
* template for the {@code %s} placeholders.
* @throws IllegalStateException Thrown, if the condition is violated.
*/
public static void checkState(
boolean condition,
@Nullable String errorMessageTemplate,
@Nullable Object... errorMessageArgs) {
if (!condition) {
throw new IllegalStateException(format(errorMessageTemplate, errorMessageArgs));
}
}
/**
* Ensures that the given index is valid for an array, list or string of the given size.
*
* @param index index to check
* @param size size of the array, list or string
* @throws IllegalArgumentException Thrown, if size is negative.
* @throws IndexOutOfBoundsException Thrown, if the index negative or greater than or equal to
* size
*/
public static void checkElementIndex(int index, int size) {
checkArgument(size >= 0, "Size was negative.");
if (index < 0 || index >= size) {
throw new IndexOutOfBoundsException("Index: " + index + ", Size: " + size);
}
}
/**
* Ensures that the given index is valid for an array, list or string of the given size.
*
* @param index index to check
* @param size size of the array, list or string
* @param errorMessage The message for the {@code IndexOutOfBoundsException} that is thrown if
* the check fails.
* @throws IllegalArgumentException Thrown, if size is negative.
* @throws IndexOutOfBoundsException Thrown, if the index negative or greater than or equal to
* size
*/
public static void checkElementIndex(int index, int size, @Nullable String errorMessage) {
checkArgument(size >= 0, "Size was negative.");
if (index < 0 || index >= size) {
throw new IndexOutOfBoundsException(
String.valueOf(errorMessage) + " Index: " + index + ", Size: " + size);
}
}
/**
* Ensures that future has completed normally.
*
* @throws IllegalStateException Thrown, if future has not completed or it has completed
* exceptionally.
*/
public static void checkCompletedNormally(CompletableFuture<?> future) {
checkState(future.isDone());
if (future.isCompletedExceptionally()) {
try {
future.get();
} catch (InterruptedException | ExecutionException e) {
throw new IllegalStateException(e);
}
}
}
// ------------------------------------------------------------------------
// Utilities
// ------------------------------------------------------------------------
/**
* A simplified formatting method. Similar to {@link String#format(String, Object...)}, but with
* lower overhead (only String parameters, no locale, no format validation).
*
* <p>This method is taken quasi verbatim from the Guava Preconditions class.
*/
private static String format(@Nullable String template, @Nullable Object... args) {
final int numArgs = args == null ? 0 : args.length;
template = String.valueOf(template); // null -> "null"
// start substituting the arguments into the '%s' placeholders
StringBuilder builder = new StringBuilder(template.length() + 16 * numArgs);
int templateStart = 0;
int i = 0;
while (i < numArgs) {
int placeholderStart = template.indexOf("%s", templateStart);
if (placeholderStart == -1) {
break;
}
builder.append(template.substring(templateStart, placeholderStart));
builder.append(args[i++]);
templateStart = placeholderStart + 2;
}
builder.append(template.substring(templateStart));
// if we run out of placeholders, append the extra args in square braces
if (i < numArgs) {
builder.append(" [");
builder.append(args[i++]);
while (i < numArgs) {
builder.append(", ");
builder.append(args[i++]);
}
builder.append(']');
}
return builder.toString();
}
// ------------------------------------------------------------------------
/** Private constructor to prevent instantiation. */
private Preconditions() {}
}
| Preconditions |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/dispatcher/TriggerCheckpointFunction.java | {
"start": 1041,
"end": 1169
} | interface ____ functions triggering checkpoints. Currently only serves to shorten
* signatures.
*/
@FunctionalInterface
public | for |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/schematools/PrimaryKeyColumnOrderTest.java | {
"start": 2844,
"end": 6666
} | class ____ extends BaseSessionFactoryFunctionalTest {
@BeforeEach
public void setUp(SessionFactoryScope scope) {
//noinspection deprecation
scope.inTransaction(session ->
session.createNativeQuery(
"create table TEST_ENTITY ( Z int , A int NOT NULL , B int NOT NULL , CONSTRAINT PK_TEST_ENTITY PRIMARY KEY ( B, A ))" )
.executeUpdate()
);
}
@AfterEach
public void tearDown(SessionFactoryScope scope) {
scope.dropData();
}
@Test
public void getPrimaryKey() throws Exception {
StandardServiceRegistry ssr = ServiceRegistryUtil.serviceRegistryBuilder()
.applySetting(
AvailableSettings.HBM2DDL_JDBC_METADATA_EXTRACTOR_STRATEGY,
JdbcMetadataAccessStrategy.GROUPED
)
.build();
DdlTransactionIsolator ddlTransactionIsolator = null;
ExtractionContextImpl extractionContext = null;
try {
ddlTransactionIsolator = buildDdlTransactionIsolator( ssr );
extractionContext = buildContext( ssr, ddlTransactionIsolator );
TableInformation table = buildInformationExtractor( extractionContext ).getTable(
null,
null,
new Identifier( "TEST_ENTITY", false )
);
PrimaryKeyInformation primaryKey = table.getPrimaryKey();
assertThat( primaryKey ).isNotNull();
List<String> pkColumnNames = new ArrayList<>();
primaryKey.getColumns().forEach( columnInformation -> {
pkColumnNames.add( columnInformation.getColumnIdentifier()
.getCanonicalName()
.toLowerCase( Locale.ROOT ) );
} );
assertThat( pkColumnNames.size() ).isEqualTo( 2 );
assertTrue( pkColumnNames.contains( "a" ) );
assertTrue( pkColumnNames.contains( "b" ) );
}
finally {
if ( extractionContext != null ) {
extractionContext.cleanup();
}
if ( ddlTransactionIsolator != null ) {
ddlTransactionIsolator.release();
}
StandardServiceRegistryBuilder.destroy( ssr );
}
}
private InformationExtractor buildInformationExtractor(ExtractionContextImpl extractionContext) throws Exception {
ExtractionTool extractionTool = new HibernateSchemaManagementTool().getExtractionTool();
return extractionTool.createInformationExtractor( extractionContext );
}
private static ExtractionContextImpl buildContext(
StandardServiceRegistry ssr,
DdlTransactionIsolator ddlTransactionIsolator) throws Exception {
Database database = new MetadataSources( ssr ).buildMetadata().getDatabase();
SqlStringGenerationContext sqlStringGenerationContext = SqlStringGenerationContextImpl.forTests( database.getJdbcEnvironment() );
DatabaseInformation dbInfo = buildDatabaseInformation(
ssr,
database,
sqlStringGenerationContext,
ddlTransactionIsolator
);
return new ExtractionContextImpl(
ssr,
database.getJdbcEnvironment(),
sqlStringGenerationContext,
ssr.getService( JdbcServices.class ).getBootstrapJdbcConnectionAccess(),
(ExtractionContext.DatabaseObjectAccess) dbInfo
);
}
private static DatabaseInformationImpl buildDatabaseInformation(
StandardServiceRegistry ssr,
Database database,
SqlStringGenerationContext sqlStringGenerationContext,
DdlTransactionIsolator ddlTransactionIsolator) throws Exception {
return new DatabaseInformationImpl(
ssr,
database.getJdbcEnvironment(),
sqlStringGenerationContext,
ddlTransactionIsolator,
database.getServiceRegistry().getService( SchemaManagementTool.class )
);
}
private static DdlTransactionIsolator buildDdlTransactionIsolator(StandardServiceRegistry ssr) {
final ConnectionProvider connectionProvider = ssr.getService( ConnectionProvider.class );
return new DdlTransactionIsolatorTestingImpl(
ssr,
new JdbcEnvironmentInitiator.ConnectionProviderJdbcConnectionAccess( connectionProvider )
);
}
@Entity
@Table(name = "TEST_ENTITY")
public static | PrimaryKeyColumnOrderTest |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/webmonitor/threadinfo/VertexThreadInfoTracker.java | {
"start": 15925,
"end": 16923
} | class ____ {
private final JobID jobId;
private final JobVertexID jobVertexId;
private JobVertexKey(JobID jobId, JobVertexID jobVertexId) {
this.jobId = jobId;
this.jobVertexId = jobVertexId;
}
private ExecutionVertexKey toExecutionVertexKey(int subtaskIndex) {
return new ExecutionVertexKey(jobId, jobVertexId, subtaskIndex);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
JobVertexKey jobVertexKey = (JobVertexKey) o;
return Objects.equals(jobId, jobVertexKey.jobId)
&& Objects.equals(jobVertexId, jobVertexKey.jobVertexId);
}
@Override
public int hashCode() {
return Objects.hash(jobId, jobVertexId);
}
}
static | JobVertexKey |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/inheritance/joined/relation/PolymorphicCollection.java | {
"start": 795,
"end": 2772
} | class ____ {
private Integer ed_id1;
private Integer c_id;
private Integer p_id;
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) {
ed_id1 = 1;
p_id = 10;
c_id = 100;
// Rev 1
scope.inTransaction( em -> {
ReferencedEntity re = new ReferencedEntity( ed_id1 );
em.persist( re );
} );
// Rev 2
scope.inTransaction( em -> {
ReferencedEntity re = em.find( ReferencedEntity.class, ed_id1 );
ParentIngEntity pie = new ParentIngEntity( p_id, "x" );
pie.setReferenced( re );
em.persist( pie );
p_id = pie.getId();
} );
// Rev 3
scope.inTransaction( em -> {
ReferencedEntity re = em.find( ReferencedEntity.class, ed_id1 );
ChildIngEntity cie = new ChildIngEntity( c_id, "y", 1l );
cie.setReferenced( re );
em.persist( cie );
c_id = cie.getId();
} );
}
@Test
public void testRevisionsCounts(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
assertEquals( Arrays.asList( 1, 2, 3 ), auditReader.getRevisions( ReferencedEntity.class, ed_id1 ) );
assertEquals( Arrays.asList( 2 ), auditReader.getRevisions( ParentIngEntity.class, p_id ) );
assertEquals( Arrays.asList( 3 ), auditReader.getRevisions( ChildIngEntity.class, c_id ) );
} );
}
@Test
public void testHistoryOfReferencedCollection(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
assertEquals( 0, auditReader.find( ReferencedEntity.class, ed_id1, 1 ).getReferencing().size() );
assertEquals(
TestTools.makeSet( new ParentIngEntity( p_id, "x" ) ),
auditReader.find( ReferencedEntity.class, ed_id1, 2 ).getReferencing()
);
assertEquals(
TestTools.makeSet( new ParentIngEntity( p_id, "x" ), new ChildIngEntity( c_id, "y", 1l ) ),
auditReader.find( ReferencedEntity.class, ed_id1, 3 ).getReferencing()
);
} );
}
}
| PolymorphicCollection |
java | apache__camel | components/camel-spring-parent/camel-spring-ai/camel-spring-ai-tools/src/main/java/org/apache/camel/component/springai/tools/SpringAiToolsEndpoint.java | {
"start": 3403,
"end": 14261
} | class ____ the tool")
private Class<?> inputType;
@Metadata(label = "consumer")
@UriParam(description = "Whether the tool result should be returned directly or passed back to the model. "
+ "Default is false, meaning the result is passed back to the model for further processing.",
defaultValue = "false")
private boolean returnDirect;
public SpringAiToolsEndpoint(String uri, SpringAiToolsComponent component, String toolId, String tags,
SpringAiToolsConfiguration configuration) {
super(uri, component);
this.toolId = toolId;
this.tags = tags;
this.configuration = configuration;
}
@Override
public Producer createProducer() throws Exception {
throw new UnsupportedOperationException(
"The spring-ai-tools component does not support producer mode. "
+ "Use the spring-ai-chat component with tags parameter to invoke tools. "
+ "Example: spring-ai-chat:chat?tags=weather&chatClient=#chatClient");
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
if (description == null) {
throw new IllegalArgumentException(
"In order to use the spring-ai-tools component as a consumer, you need to specify at least description");
}
final String toolName;
if (name != null) {
toolName = name;
} else {
toolName = toolId;
}
final SpringAiToolsConsumer springAiToolsConsumer = new SpringAiToolsConsumer(this, processor);
configureConsumer(springAiToolsConsumer);
// Create a function that executes the Camel route
java.util.function.Function<java.util.Map<String, Object>, String> function = args -> {
try {
org.apache.camel.Exchange exchange = createExchange();
// Set arguments as headers
for (java.util.Map.Entry<String, Object> entry : args.entrySet()) {
exchange.getMessage().setHeader(entry.getKey(), entry.getValue());
}
// Execute the consumer route
springAiToolsConsumer.getProcessor().process(exchange);
// Return the result
return exchange.getIn().getBody(String.class);
} catch (Exception e) {
throw new RuntimeException("Error executing tool", e);
}
};
// Build the tool callback using FunctionToolCallback
FunctionToolCallback.Builder builder = FunctionToolCallback.builder(toolName, function)
.description(description);
if (inputType != null) {
builder.inputType(inputType);
} else if (parameters != null && !parameters.isEmpty()) {
// Build JSON schema from parameters map
String inputSchema = buildJsonSchemaFromParameters(parameters);
builder.inputSchema(inputSchema);
builder.inputType(java.util.Map.class);
} else {
builder.inputType(java.util.Map.class);
}
// Configure tool metadata
if (returnDirect) {
builder.toolMetadata(ToolMetadata.builder().returnDirect(true).build());
}
ToolCallback toolCallback = builder.build();
CamelToolSpecification camelToolSpecification
= new CamelToolSpecification(toolCallback, springAiToolsConsumer);
final CamelToolExecutorCache executorCache = CamelToolExecutorCache.getInstance();
String[] splitTags = TagsHelper.splitTags(tags);
for (String tag : splitTags) {
executorCache.put(tag, camelToolSpecification);
}
return camelToolSpecification.getConsumer();
}
/**
* A freely named tool ID (prefer to use something unique)
*
* @return
*/
public String getToolId() {
return toolId;
}
/**
* The tool configuration
*
* @return
*/
public SpringAiToolsConfiguration getConfiguration() {
return configuration;
}
/**
* A description of the tool. This is passed to the LLM, so it should be descriptive of the tool capabilities
*
* @return
*/
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
/**
* The tool name. This is passed to the LLM, so it should conform to any LLM restrictions.
*
* @return
*/
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
/**
* The input parameters for the tool
*
* @return
*/
public Map<String, String> getParameters() {
return parameters;
}
public void setParameters(Map<String, String> parameters) {
this.parameters = parameters;
}
public void setTags(String tags) {
this.tags = tags;
}
/**
* The tags associated with the tool
*
* @return
*/
public String getTags() {
return tags;
}
public Class<?> getInputType() {
return inputType;
}
public void setInputType(Class<?> inputType) {
this.inputType = inputType;
}
/**
* Whether the tool result should be returned directly or passed back to the model
*
* @return
*/
public boolean isReturnDirect() {
return returnDirect;
}
public void setReturnDirect(boolean returnDirect) {
this.returnDirect = returnDirect;
}
@Override
protected void doStop() throws Exception {
super.doStop();
CamelToolExecutorCache.getInstance().getTools().clear();
}
/**
* Builds a JSON schema from the parameters map in the format expected by openai function call API
* https://platform.openai.com/docs/guides/function-calling.
* <p>
* Supports the following parameter formats:
* <ul>
* <li>parameter.<name>=<type> - defines the parameter type (e.g., parameter.location=string)</li>
* <li>parameter.<name>.description=<text> - defines the parameter description</li>
* <li>parameter.<name>.required=<true|false> - defines if parameter is required (default: false)</li>
* <li>parameter.<name>.enum=<value1,value2,...> - defines allowed values (comma-separated)</li>
* </ul>
*
* @param parameters the map of parameter configurations
* @return JSON schema string conforming to JSON Schema Draft 2020-12
*/
private String buildJsonSchemaFromParameters(Map<String, String> parameters) {
try {
// Parse parameters into structured format
Map<String, ParameterMetadata> paramMetadata = parseParameterMetadata(parameters);
// Build JSON schema
ObjectNode schema = OBJECT_MAPPER.createObjectNode();
schema.put("type", "object");
ObjectNode properties = schema.putObject("properties");
List<String> requiredParams = new ArrayList<>();
for (Map.Entry<String, ParameterMetadata> entry : paramMetadata.entrySet()) {
String paramName = entry.getKey();
ParameterMetadata metadata = entry.getValue();
ObjectNode property = properties.putObject(paramName);
property.put("type", mapTypeToJsonSchemaType(metadata.type));
if (metadata.description != null) {
property.put("description", metadata.description);
}
if (metadata.enumValues != null && !metadata.enumValues.isEmpty()) {
ArrayNode enumArray = property.putArray("enum");
for (String enumValue : metadata.enumValues) {
enumArray.add(enumValue.trim());
}
}
if (metadata.required) {
requiredParams.add(paramName);
}
}
// Add required array if there are required parameters
if (!requiredParams.isEmpty()) {
ArrayNode requiredArray = schema.putArray("required");
for (String requiredParam : requiredParams) {
requiredArray.add(requiredParam);
}
}
return OBJECT_MAPPER.writeValueAsString(schema);
} catch (Exception e) {
throw new RuntimeException("Error building JSON schema from parameters", e);
}
}
/**
* Parses the flat parameter map into structured metadata.
* <p>
* Handles parameter configurations like:
* <ul>
* <li>parameter.location=string</li>
* <li>parameter.location.description=The city and state</li>
* <li>parameter.location.required=true</li>
* <li>parameter.location.enum=C,F</li>
* </ul>
*
* @param parameters the flat parameter map
* @return map of parameter names to their metadata
*/
private Map<String, ParameterMetadata> parseParameterMetadata(Map<String, String> parameters) {
Map<String, ParameterMetadata> metadata = new HashMap<>();
for (Map.Entry<String, String> entry : parameters.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
if (key.contains(".")) {
// Handle nested properties like "location.description"
String[] parts = key.split("\\.", 2);
String paramName = parts[0];
String propertyName = parts[1];
ParameterMetadata meta = metadata.computeIfAbsent(paramName, k -> new ParameterMetadata());
switch (propertyName) {
case "description":
meta.description = value;
break;
case "required":
meta.required = Boolean.parseBoolean(value);
break;
case "enum":
meta.enumValues = List.of(value.split(","));
break;
default:
// Ignore unknown properties
break;
}
} else {
// Handle direct parameter type like "location=string"
ParameterMetadata meta = metadata.computeIfAbsent(key, k -> new ParameterMetadata());
meta.type = value;
}
}
return metadata;
}
/**
* Internal | for |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/headers/UserAgentProgrammaticTest.java | {
"start": 532,
"end": 1503
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar.addClasses(Resource.class, Client.class))
.overrideRuntimeConfigKey("quarkus.rest-client.user-agent", "from-config");
@TestHTTPResource
URI baseUri;
@Test
void test() {
Client client = QuarkusRestClientBuilder.newBuilder().baseUri(baseUri).userAgent("programmatic").build(Client.class);
assertThat(client.call()).isEqualTo("programmatic");
Client client2 = QuarkusRestClientBuilder.newBuilder().baseUri(baseUri).userAgent("programmatic2").build(Client.class);
assertThat(client2.call()).isEqualTo("programmatic2");
Client client3 = QuarkusRestClientBuilder.newBuilder().baseUri(baseUri).build(Client.class);
assertThat(client3.call()).isEqualTo("from-config");
}
@Path("/")
@ApplicationScoped
public static | UserAgentProgrammaticTest |
java | junit-team__junit5 | platform-tests/src/test/java/org/junit/platform/console/command/ThemeTests.java | {
"start": 607,
"end": 1114
} | class ____ {
@Test
void givenUtf8ShouldReturnUnicode() {
assertEquals(Theme.UNICODE, Theme.valueOf(StandardCharsets.UTF_8));
}
@Test
void givenAnythingElseShouldReturnAscii() {
assertAll("All character sets that are not UTF-8 should return Theme.ASCII", () -> {
assertEquals(Theme.ASCII, Theme.valueOf(StandardCharsets.ISO_8859_1));
assertEquals(Theme.ASCII, Theme.valueOf(StandardCharsets.US_ASCII));
assertEquals(Theme.ASCII, Theme.valueOf(StandardCharsets.UTF_16));
});
}
}
| ThemeTests |
java | apache__camel | components/camel-tika/src/generated/java/org/apache/camel/component/tika/TikaEndpointConfigurer.java | {
"start": 731,
"end": 3703
} | class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
TikaEndpoint target = (TikaEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "tikaconfig":
case "tikaConfig": target.getTikaConfiguration().setTikaConfig(property(camelContext, org.apache.tika.config.TikaConfig.class, value)); return true;
case "tikaconfiguri":
case "tikaConfigUri": target.getTikaConfiguration().setTikaConfigUri(property(camelContext, java.lang.String.class, value)); return true;
case "tikaparseoutputencoding":
case "tikaParseOutputEncoding": target.getTikaConfiguration().setTikaParseOutputEncoding(property(camelContext, java.lang.String.class, value)); return true;
case "tikaparseoutputformat":
case "tikaParseOutputFormat": target.getTikaConfiguration().setTikaParseOutputFormat(property(camelContext, org.apache.camel.component.tika.TikaParseOutputFormat.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "tikaconfig":
case "tikaConfig": return org.apache.tika.config.TikaConfig.class;
case "tikaconfiguri":
case "tikaConfigUri": return java.lang.String.class;
case "tikaparseoutputencoding":
case "tikaParseOutputEncoding": return java.lang.String.class;
case "tikaparseoutputformat":
case "tikaParseOutputFormat": return org.apache.camel.component.tika.TikaParseOutputFormat.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
TikaEndpoint target = (TikaEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "tikaconfig":
case "tikaConfig": return target.getTikaConfiguration().getTikaConfig();
case "tikaconfiguri":
case "tikaConfigUri": return target.getTikaConfiguration().getTikaConfigUri();
case "tikaparseoutputencoding":
case "tikaParseOutputEncoding": return target.getTikaConfiguration().getTikaParseOutputEncoding();
case "tikaparseoutputformat":
case "tikaParseOutputFormat": return target.getTikaConfiguration().getTikaParseOutputFormat();
default: return null;
}
}
}
| TikaEndpointConfigurer |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ClassNamedLikeTypeParameterTest.java | {
"start": 878,
"end": 1213
} | class ____ {
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(ClassNamedLikeTypeParameter.class, getClass());
@Test
public void positiveCases() {
compilationHelper
.addSourceLines(
"Test.java",
"""
public | ClassNamedLikeTypeParameterTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java | {
"start": 15413,
"end": 18459
} | class ____ implements AllReader {
@Override
public Block read(BlockFactory factory, Docs docs, int offset, boolean nullsFiltered) throws IOException {
return factory.constantNulls(docs.count() - offset);
}
@Override
public void read(int docId, StoredFields storedFields, Builder builder) throws IOException {
builder.appendNull();
}
@Override
public boolean canReuse(int startingDocID) {
return true;
}
@Override
public String toString() {
return "constant_nulls";
}
}
/**
* Load blocks with only {@code value}.
*/
static BlockLoader constantBytes(BytesRef value) {
return new BlockLoader() {
@Override
public Builder builder(BlockFactory factory, int expectedCount) {
return factory.bytesRefs(expectedCount);
}
@Override
public ColumnAtATimeReader columnAtATimeReader(LeafReaderContext context) {
return new ColumnAtATimeReader() {
@Override
public Block read(BlockFactory factory, Docs docs, int offset, boolean nullsFiltered) {
return factory.constantBytes(value, docs.count() - offset);
}
@Override
public boolean canReuse(int startingDocID) {
return true;
}
@Override
public String toString() {
return "constant[" + value + "]";
}
};
}
@Override
public RowStrideReader rowStrideReader(LeafReaderContext context) {
return new RowStrideReader() {
@Override
public void read(int docId, StoredFields storedFields, Builder builder) {
((BlockLoader.BytesRefBuilder) builder).appendBytesRef(value);
}
@Override
public boolean canReuse(int startingDocID) {
return true;
}
@Override
public String toString() {
return "constant[" + value + "]";
}
};
}
@Override
public StoredFieldsSpec rowStrideStoredFieldSpec() {
return StoredFieldsSpec.NO_REQUIREMENTS;
}
@Override
public boolean supportsOrdinals() {
return false;
}
@Override
public SortedSetDocValues ordinals(LeafReaderContext context) {
throw new UnsupportedOperationException();
}
@Override
public String toString() {
return "ConstantBytes[" + value + "]";
}
};
}
abstract | ConstantNullsReader |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/validation/MessageSourceMessageInterpolator.java | {
"start": 1166,
"end": 4299
} | class ____ implements MessageInterpolator {
private static final String DEFAULT_MESSAGE = MessageSourceMessageInterpolator.class.getName();
private static final char PREFIX = '{';
private static final char SUFFIX = '}';
private static final char ESCAPE = '\\';
private final MessageSource messageSource;
private final MessageInterpolator messageInterpolator;
MessageSourceMessageInterpolator(MessageSource messageSource, MessageInterpolator messageInterpolator) {
this.messageSource = messageSource;
this.messageInterpolator = messageInterpolator;
}
@Override
public String interpolate(String messageTemplate, Context context) {
return interpolate(messageTemplate, context, LocaleContextHolder.getLocale());
}
@Override
public String interpolate(String messageTemplate, Context context, Locale locale) {
String message = replaceParameters(messageTemplate, locale);
return this.messageInterpolator.interpolate(message, context, locale);
}
/**
* Recursively replaces all message parameters.
* <p>
* The message parameter prefix <code>{</code> and suffix <code>}</code> can
* be escaped using {@code \}, e.g. <code>\{escaped\}</code>.
* @param message the message containing the parameters to be replaced
* @param locale the locale to use when resolving replacements
* @return the message with parameters replaced
*/
private String replaceParameters(String message, Locale locale) {
return replaceParameters(message, locale, new LinkedHashSet<>(4));
}
private String replaceParameters(String message, Locale locale, Set<String> visitedParameters) {
StringBuilder buf = new StringBuilder(message);
int parentheses = 0;
int startIndex = -1;
int endIndex = -1;
for (int i = 0; i < buf.length(); i++) {
if (buf.charAt(i) == ESCAPE) {
i++;
}
else if (buf.charAt(i) == PREFIX) {
if (startIndex == -1) {
startIndex = i;
}
parentheses++;
}
else if (buf.charAt(i) == SUFFIX) {
if (parentheses > 0) {
parentheses--;
}
endIndex = i;
}
if (parentheses == 0 && startIndex < endIndex) {
String parameter = buf.substring(startIndex + 1, endIndex);
if (!visitedParameters.add(parameter)) {
throw new IllegalArgumentException("Circular reference '{" + String.join(" -> ", visitedParameters)
+ " -> " + parameter + "}'");
}
String value = replaceParameter(parameter, locale, visitedParameters);
if (value != null) {
buf.replace(startIndex, endIndex + 1, value);
i = startIndex + value.length() - 1;
}
visitedParameters.remove(parameter);
startIndex = -1;
endIndex = -1;
}
}
return buf.toString();
}
private @Nullable String replaceParameter(String parameter, Locale locale, Set<String> visitedParameters) {
parameter = replaceParameters(parameter, locale, visitedParameters);
String value = this.messageSource.getMessage(parameter, null, DEFAULT_MESSAGE, locale);
if (value == null || value.equals(DEFAULT_MESSAGE)) {
return null;
}
return replaceParameters(value, locale, visitedParameters);
}
}
| MessageSourceMessageInterpolator |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/reader/TimelinePutResponseReader.java | {
"start": 1582,
"end": 2269
} | class ____ implements MessageBodyReader<TimelinePutResponse> {
private ObjectMapper objectMapper = new ObjectMapper();
@Override
public boolean isReadable(Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType) {
return type == TimelinePutResponse.class;
}
@Override
public TimelinePutResponse readFrom(Class<TimelinePutResponse> type, Type genericType,
Annotation[] annotations, MediaType mediaType,
MultivaluedMap<String, String> httpHeaders,
InputStream entityStream) throws IOException, WebApplicationException {
return objectMapper.readValue(entityStream, TimelinePutResponse.class);
}
}
| TimelinePutResponseReader |
java | apache__flink | flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/TypeInferenceExtractorTest.java | {
"start": 92789,
"end": 92999
} | class ____ extends ScalarFunction {
public Long eval(int[] i) {
return null;
}
}
@FunctionHint(accumulator = @DataTypeHint("INT"))
private static | InvalidMethodScalarFunction |
java | apache__dubbo | dubbo-remoting/dubbo-remoting-api/src/test/java/org/apache/dubbo/remoting/transport/CodecSupportTest.java | {
"start": 1220,
"end": 2038
} | class ____ {
@Test
void testHeartbeat() throws Exception {
Byte proto = CodecSupport.getIDByName(DefaultSerializationSelector.getDefaultRemotingSerialization());
Serialization serialization = CodecSupport.getSerializationById(proto);
byte[] nullBytes = CodecSupport.getNullBytesOf(serialization);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutput out = serialization.serialize(null, baos);
out.writeObject(null);
out.flushBuffer();
InputStream is = new ByteArrayInputStream(baos.toByteArray());
baos.close();
byte[] payload = CodecSupport.getPayload(is);
Assertions.assertArrayEquals(nullBytes, payload);
Assertions.assertTrue(CodecSupport.isHeartBeat(payload, proto));
}
}
| CodecSupportTest |
java | spring-projects__spring-boot | module/spring-boot-validation/src/test/java/org/springframework/boot/validation/autoconfigure/ValidationAutoConfigurationTests.java | {
"start": 16768,
"end": 17113
} | class ____ {
CustomValidatorConfiguration(SomeService someService) {
}
@Bean
Validator customValidator() {
return new CustomValidatorBean();
}
@Bean
static TestBeanPostProcessor testBeanPostProcessor() {
return new TestBeanPostProcessor();
}
@Configuration(proxyBeanMethods = false)
static | CustomValidatorConfiguration |
java | google__guava | android/guava/src/com/google/common/util/concurrent/Partially.java | {
"start": 907,
"end": 1486
} | class ____ exists solely to let us write {@code Partially.GwtIncompatible} instead of plain
* {@code GwtIncompatible}. This is more accurate for {@link Futures#catching}, which is available
* under GWT but with a slightly different signature.
*
* <p>We can't use {@code PartiallyGwtIncompatible} because then the GWT compiler wouldn't recognize
* it as a {@code GwtIncompatible} annotation. And for {@code Futures.catching}, we need the GWT
* compiler to autostrip the normal server method in order to expose the special, inherited GWT
* version.
*/
@GwtCompatible
final | that |
java | google__dagger | javatests/dagger/internal/codegen/MultipleRequestTest.java | {
"start": 2966,
"end": 3420
} | interface ____ {",
" FieldInjectsMultiple get();",
"}"))
.compile(subject -> subject.hasErrorCount(0));
}
@Test public void multipleRequests_providesMethod() {
CompilerTests.daggerCompiler(
CompilerTests.javaSource(
"test.Dep",
"package test;",
"",
"import javax.inject.Inject;",
"",
" | SimpleComponent |
java | google__dagger | javatests/dagger/internal/codegen/AssistedFactoryErrorsTest.java | {
"start": 11684,
"end": 12488
} | interface ____ {",
" Foo<String> create(String str1, String str2);",
"}");
CompilerTests.daggerCompiler(foo, fooFactory)
.withProcessingOptions(compilerMode.processorOptions())
.compile(
subject -> {
subject.hasErrorCount(1);
subject.hasErrorContaining(
"@AssistedFactory method has duplicate @Assisted types: "
+ "@Assisted java.lang.String");
});
}
@Test
public void testAssistedInjectionRequest() {
Source foo =
CompilerTests.javaSource(
"test.Foo",
"package test;",
"",
"import dagger.assisted.Assisted;",
"import dagger.assisted.AssistedInject;",
"",
" | FooFactory |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/source/internal/hbm/EntityHierarchyBuilder.java | {
"start": 3188,
"end": 8666
} | class ____ at the hibernate-mapping level
for ( var jaxbRootEntity : mappingBinding.getClazz() ) {
// we can immediately handle <class/> elements in terms of creating the hierarchy entry
final var rootEntitySource = new RootEntitySourceImpl( mappingDocument, jaxbRootEntity );
entitySourceByNameMap.put( rootEntitySource.getEntityNamingSource().getEntityName(), rootEntitySource );
entityHierarchyList.add( new EntityHierarchySourceImpl( rootEntitySource, mappingDocument ) );
linkAnyWaiting( mappingDocument, rootEntitySource );
// process any of its nested sub-entity definitions
processRootEntitySubEntityElements( mappingDocument, jaxbRootEntity, rootEntitySource );
}
// iterate all discriminator-based subclass definitions at the hibernate-mapping level
for ( var discriminatorSubclassEntityBinding : mappingBinding.getSubclass() ) {
processTopLevelSubClassBinding( mappingDocument, discriminatorSubclassEntityBinding );
}
// iterate all joined-subclass definitions at the hibernate-mapping level
for ( var joinedSubclassEntityBinding : mappingBinding.getJoinedSubclass() ) {
processTopLevelSubClassBinding( mappingDocument, joinedSubclassEntityBinding );
}
// iterate all union-subclass definitions at the hibernate-mapping level
for ( var unionSubclassEntityBinding : mappingBinding.getUnionSubclass() ) {
processTopLevelSubClassBinding( mappingDocument, unionSubclassEntityBinding );
}
}
private void processRootEntitySubEntityElements(
MappingDocument mappingDocument,
JaxbHbmRootEntityType jaxbRootEntity,
RootEntitySourceImpl rootEntitySource) {
// todo : technically we should only allow one mutually-exclusive; should we enforce that here?
// I believe the DTD/XSD does enforce that, so maybe not a big deal
processElements( mappingDocument, jaxbRootEntity.getSubclass(), rootEntitySource );
processElements( mappingDocument, jaxbRootEntity.getJoinedSubclass(), rootEntitySource );
processElements( mappingDocument, jaxbRootEntity.getUnionSubclass(), rootEntitySource );
}
private void processSubEntityElements(
MappingDocument mappingDocument,
JaxbHbmEntityBaseDefinition entityBinding,
AbstractEntitySourceImpl container) {
if ( entityBinding instanceof JaxbHbmDiscriminatorSubclassEntityType jaxbSubclass ) {
processElements( mappingDocument, jaxbSubclass.getSubclass(), container );
}
else if ( entityBinding instanceof JaxbHbmJoinedSubclassEntityType jaxbJoinedSubclass ) {
processElements( mappingDocument, jaxbJoinedSubclass.getJoinedSubclass(), container );
}
else if ( entityBinding instanceof JaxbHbmUnionSubclassEntityType jaxbUnionSubclass ) {
processElements( mappingDocument, jaxbUnionSubclass.getUnionSubclass(), container );
}
}
private void processElements(
MappingDocument mappingDocument,
List<? extends JaxbHbmSubclassEntityBaseDefinition> nestedSubEntityList,
AbstractEntitySourceImpl container) {
for ( final var jaxbSubEntity : nestedSubEntityList ) {
final var subClassEntitySource = createSubClassEntitySource(
mappingDocument,
jaxbSubEntity,
container
);
entitySourceByNameMap.put(
subClassEntitySource.getEntityNamingSource().getEntityName(),
subClassEntitySource
);
container.add( subClassEntitySource );
linkAnyWaiting( mappingDocument, subClassEntitySource );
// Re-run the subelement to handle subclasses within the subclass.
processSubEntityElements( mappingDocument, jaxbSubEntity, subClassEntitySource );
}
}
private SubclassEntitySourceImpl createSubClassEntitySource(
MappingDocument mappingDocument,
JaxbHbmSubclassEntityBaseDefinition jaxbSubEntity,
EntitySource superEntity) {
return jaxbSubEntity instanceof JaxbHbmJoinedSubclassEntityType jaxbJoinedSubclass
? new JoinedSubclassEntitySourceImpl( mappingDocument, jaxbJoinedSubclass, superEntity )
: new SubclassEntitySourceImpl( mappingDocument, jaxbSubEntity, superEntity );
}
private void processTopLevelSubClassBinding(
MappingDocument mappingDocument,
JaxbHbmSubclassEntityBaseDefinition jaxbSubEntityMapping) {
final var entityItExtends = locateExtendedEntitySource( mappingDocument, jaxbSubEntityMapping );
if ( entityItExtends == null ) {
// we have not seen its declared super-type yet, add it to the queue to be linked up
// later when (if) we do
addToToBeLinkedQueue( mappingDocument, jaxbSubEntityMapping );
}
else {
// we have seen its super-type already
final var subEntitySource = createSubClassEntitySource(
mappingDocument,
jaxbSubEntityMapping,
entityItExtends
);
entitySourceByNameMap.put( subEntitySource.getEntityNamingSource().getEntityName(), subEntitySource );
entityItExtends.add( subEntitySource );
// this may have been a "middle type". So link any sub entities that may be waiting on it
linkAnyWaiting( mappingDocument, subEntitySource );
processSubEntityElements( mappingDocument, jaxbSubEntityMapping, subEntitySource );
}
}
private AbstractEntitySourceImpl locateExtendedEntitySource(
MappingDocument mappingDocument,
JaxbHbmSubclassEntityBaseDefinition jaxbSubEntityMapping) {
// NOTE: extends may refer to either an entity-name or a class-name, we need to check each
// first check using the entity name
var entityItExtends = entitySourceByNameMap.get( jaxbSubEntityMapping.getExtends() );
if ( entityItExtends == null ) {
// next, check using the | definitions |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java | {
"start": 2298,
"end": 13954
} | class ____ extends Query implements Accountable {
private final String id;
private final long sizeInCache;
DummyQuery(int id) {
this(Integer.toString(id), 10);
}
DummyQuery(String id, long sizeInCache) {
this.id = id;
this.sizeInCache = sizeInCache;
}
@Override
public boolean equals(Object obj) {
return sameClassAs(obj) && id.equals(((DummyQuery) obj).id);
}
@Override
public int hashCode() {
return 31 * classHash() + id.hashCode();
}
@Override
public void visit(QueryVisitor visitor) {
visitor.visitLeaf(this);
}
@Override
public String toString(String field) {
return "dummy";
}
@Override
public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) {
return new ConstantScoreWeight(this, boost) {
@Override
public ScorerSupplier scorerSupplier(LeafReaderContext context) {
Scorer scorer = new ConstantScoreScorer(score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc()));
return new DefaultScorerSupplier(scorer);
}
@Override
public boolean isCacheable(LeafReaderContext ctx) {
return true;
}
};
}
@Override
public long ramBytesUsed() {
return sizeInCache;
}
}
public void testBasics() throws IOException {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
w.addDocument(new Document());
DirectoryReader r = DirectoryReader.open(w);
w.close();
ShardId shard = new ShardId("index", "_na_", 0);
r = ElasticsearchDirectoryReader.wrap(r, shard);
IndexSearcher s = new IndexSearcher(r);
s.setQueryCachingPolicy(TrivialQueryCachingPolicy.ALWAYS);
Settings settings = Settings.builder()
.put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT_SETTING.getKey(), 10)
.put(IndicesQueryCache.INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING.getKey(), true)
.build();
IndicesQueryCache cache = new IndicesQueryCache(settings);
s.setQueryCache(cache);
QueryCacheStats stats = cache.getStats(shard, () -> 0L);
assertEquals(0L, stats.getCacheSize());
assertEquals(0L, stats.getCacheCount());
assertEquals(0L, stats.getHitCount());
assertEquals(0L, stats.getMissCount());
assertEquals(1, s.count(new DummyQuery(0)));
stats = cache.getStats(shard, () -> 0L);
assertEquals(1L, stats.getCacheSize());
assertEquals(1L, stats.getCacheCount());
assertEquals(0L, stats.getHitCount());
assertEquals(2L, stats.getMissCount());
for (int i = 1; i < 20; ++i) {
assertEquals(1, s.count(new DummyQuery(i)));
}
stats = cache.getStats(shard, () -> 0L);
assertEquals(10L, stats.getCacheSize());
assertEquals(20L, stats.getCacheCount());
assertEquals(0L, stats.getHitCount());
assertEquals(40L, stats.getMissCount());
s.count(new DummyQuery(10));
stats = cache.getStats(shard, () -> 0L);
assertEquals(10L, stats.getCacheSize());
assertEquals(20L, stats.getCacheCount());
assertEquals(1L, stats.getHitCount());
assertEquals(40L, stats.getMissCount());
IOUtils.close(r, dir);
// got emptied, but no changes to other metrics
stats = cache.getStats(shard, () -> 0L);
assertEquals(0L, stats.getCacheSize());
assertEquals(20L, stats.getCacheCount());
assertEquals(1L, stats.getHitCount());
assertEquals(40L, stats.getMissCount());
cache.onClose(shard);
// forgot everything
stats = cache.getStats(shard, () -> 0L);
assertEquals(0L, stats.getCacheSize());
assertEquals(0L, stats.getCacheCount());
assertEquals(0L, stats.getHitCount());
assertEquals(0L, stats.getMissCount());
cache.close(); // this triggers some assertions
}
public void testTwoShards() throws IOException {
Directory dir1 = newDirectory();
IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig());
w1.addDocument(new Document());
DirectoryReader r1 = DirectoryReader.open(w1);
w1.close();
ShardId shard1 = new ShardId("index", "_na_", 0);
r1 = ElasticsearchDirectoryReader.wrap(r1, shard1);
IndexSearcher s1 = new IndexSearcher(r1);
s1.setQueryCachingPolicy(TrivialQueryCachingPolicy.ALWAYS);
Directory dir2 = newDirectory();
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig());
w2.addDocument(new Document());
DirectoryReader r2 = DirectoryReader.open(w2);
w2.close();
ShardId shard2 = new ShardId("index", "_na_", 1);
r2 = ElasticsearchDirectoryReader.wrap(r2, shard2);
IndexSearcher s2 = new IndexSearcher(r2);
s2.setQueryCachingPolicy(TrivialQueryCachingPolicy.ALWAYS);
Settings settings = Settings.builder()
.put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT_SETTING.getKey(), 10)
.put(IndicesQueryCache.INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING.getKey(), true)
.build();
IndicesQueryCache cache = new IndicesQueryCache(settings);
s1.setQueryCache(cache);
s2.setQueryCache(cache);
assertEquals(1, s1.count(new DummyQuery(0)));
QueryCacheStats stats1 = cache.getStats(shard1, () -> 0L);
assertEquals(1L, stats1.getCacheSize());
assertEquals(1L, stats1.getCacheCount());
assertEquals(0L, stats1.getHitCount());
assertEquals(2L, stats1.getMissCount());
QueryCacheStats stats2 = cache.getStats(shard2, () -> 0L);
assertEquals(0L, stats2.getCacheSize());
assertEquals(0L, stats2.getCacheCount());
assertEquals(0L, stats2.getHitCount());
assertEquals(0L, stats2.getMissCount());
assertEquals(1, s2.count(new DummyQuery(0)));
stats1 = cache.getStats(shard1, () -> 0L);
assertEquals(1L, stats1.getCacheSize());
assertEquals(1L, stats1.getCacheCount());
assertEquals(0L, stats1.getHitCount());
assertEquals(2L, stats1.getMissCount());
stats2 = cache.getStats(shard2, () -> 0L);
assertEquals(1L, stats2.getCacheSize());
assertEquals(1L, stats2.getCacheCount());
assertEquals(0L, stats2.getHitCount());
assertEquals(2L, stats2.getMissCount());
for (int i = 0; i < 20; ++i) {
assertEquals(1, s2.count(new DummyQuery(i)));
}
stats1 = cache.getStats(shard1, () -> 0L);
assertEquals(0L, stats1.getCacheSize()); // evicted
assertEquals(1L, stats1.getCacheCount());
assertEquals(0L, stats1.getHitCount());
assertEquals(2L, stats1.getMissCount());
stats2 = cache.getStats(shard2, () -> 0L);
assertEquals(10L, stats2.getCacheSize());
assertEquals(20L, stats2.getCacheCount());
assertEquals(1L, stats2.getHitCount());
assertEquals(40L, stats2.getMissCount());
IOUtils.close(r1, dir1);
// no changes
stats1 = cache.getStats(shard1, () -> 0L);
assertEquals(0L, stats1.getCacheSize());
assertEquals(1L, stats1.getCacheCount());
assertEquals(0L, stats1.getHitCount());
assertEquals(2L, stats1.getMissCount());
stats2 = cache.getStats(shard2, () -> 0L);
assertEquals(10L, stats2.getCacheSize());
assertEquals(20L, stats2.getCacheCount());
assertEquals(1L, stats2.getHitCount());
assertEquals(40L, stats2.getMissCount());
cache.onClose(shard1);
// forgot everything about shard1
stats1 = cache.getStats(shard1, () -> 0L);
assertEquals(0L, stats1.getCacheSize());
assertEquals(0L, stats1.getCacheCount());
assertEquals(0L, stats1.getHitCount());
assertEquals(0L, stats1.getMissCount());
stats2 = cache.getStats(shard2, () -> 0L);
assertEquals(10L, stats2.getCacheSize());
assertEquals(20L, stats2.getCacheCount());
assertEquals(1L, stats2.getHitCount());
assertEquals(40L, stats2.getMissCount());
IOUtils.close(r2, dir2);
cache.onClose(shard2);
// forgot everything about shard2
stats1 = cache.getStats(shard1, () -> 0L);
assertEquals(0L, stats1.getCacheSize());
assertEquals(0L, stats1.getCacheCount());
assertEquals(0L, stats1.getHitCount());
assertEquals(0L, stats1.getMissCount());
assertEquals(0L, stats1.getMemorySizeInBytes());
stats2 = cache.getStats(shard2, () -> 0L);
assertEquals(0L, stats2.getCacheSize());
assertEquals(0L, stats2.getCacheCount());
assertEquals(0L, stats2.getHitCount());
assertEquals(0L, stats2.getMissCount());
assertEquals(0L, stats2.getMemorySizeInBytes());
cache.close(); // this triggers some assertions
}
// Make sure the cache behaves correctly when a segment that is associated
// with an empty cache gets closed. In that particular case, the eviction
// callback is called with a number of evicted entries equal to 0
// see https://github.com/elastic/elasticsearch/issues/15043
public void testStatsOnEviction() throws IOException {
Directory dir1 = newDirectory();
IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig());
w1.addDocument(new Document());
DirectoryReader r1 = DirectoryReader.open(w1);
w1.close();
ShardId shard1 = new ShardId("index", "_na_", 0);
r1 = ElasticsearchDirectoryReader.wrap(r1, shard1);
IndexSearcher s1 = newSearcher(r1, false);
s1.setQueryCachingPolicy(TrivialQueryCachingPolicy.ALWAYS);
Directory dir2 = newDirectory();
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig());
w2.addDocument(new Document());
DirectoryReader r2 = DirectoryReader.open(w2);
w2.close();
ShardId shard2 = new ShardId("index", "_na_", 1);
r2 = ElasticsearchDirectoryReader.wrap(r2, shard2);
IndexSearcher s2 = newSearcher(r2, false);
s2.setQueryCachingPolicy(TrivialQueryCachingPolicy.ALWAYS);
Settings settings = Settings.builder()
.put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT_SETTING.getKey(), 10)
.put(IndicesQueryCache.INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING.getKey(), true)
.build();
IndicesQueryCache cache = new IndicesQueryCache(settings);
s1.setQueryCache(cache);
s2.setQueryCache(cache);
assertEquals(1, s1.count(new DummyQuery(0)));
for (int i = 1; i <= 20; ++i) {
assertEquals(1, s2.count(new DummyQuery(i)));
}
QueryCacheStats stats1 = cache.getStats(shard1, () -> 0L);
assertEquals(0L, stats1.getCacheSize());
assertEquals(1L, stats1.getCacheCount());
// this used to fail because we were evicting an empty cache on
// the segment from r1
IOUtils.close(r1, dir1);
cache.onClose(shard1);
IOUtils.close(r2, dir2);
cache.onClose(shard2);
cache.close(); // this triggers some assertions
}
private static | DummyQuery |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/security/HelloResource.java | {
"start": 290,
"end": 577
} | class ____ {
@GET
public String hello() {
return "hello";
}
@ServerExceptionMapper
public Response unauthorizedExceptionMapper(UnauthorizedException unauthorizedException) {
return Response.ok("unauthorizedExceptionMapper").build();
}
}
| HelloResource |
java | quarkusio__quarkus | extensions/smallrye-graphql/deployment/src/test/java/io/quarkus/smallrye/graphql/deployment/AdapterTest.java | {
"start": 1777,
"end": 2190
} | class ____ {
@Query
public Person getPerson() {
Address a = new Address();
a.code = "1234";
a.addLine("1 Street street");
a.addLine("City");
a.addLine("Province");
Person p = new Person();
p.setName("Phillip Kruger");
p.setAddress(a);
return p;
}
}
public static | AdapterApi |
java | apache__flink | flink-table/flink-sql-parser/src/main/java/org/apache/calcite/sql/type/SqlTypeName.java | {
"start": 36724,
"end": 40244
} | enum ____ {
ZERO,
UNDERFLOW,
OVERFLOW
}
private static @Nullable BigDecimal getNumericLimit(
int radix, int exponent, boolean sign, Limit limit, boolean beyond) {
switch (limit) {
case OVERFLOW:
// 2-based schemes run from -2^(N-1) to 2^(N-1)-1 e.g. -128 to +127
// 10-based schemas run from -(10^N-1) to 10^N-1 e.g. -99 to +99
final BigDecimal bigRadix = BigDecimal.valueOf(radix);
if (radix == 2) {
--exponent;
}
BigDecimal decimal = bigRadix.pow(exponent);
if (sign || (radix != 2)) {
decimal = decimal.subtract(BigDecimal.ONE);
}
if (beyond) {
decimal = decimal.add(BigDecimal.ONE);
}
if (!sign) {
decimal = decimal.negate();
}
return decimal;
case UNDERFLOW:
return beyond ? null : (sign ? BigDecimal.ONE : BigDecimal.ONE.negate());
case ZERO:
return BigDecimal.ZERO;
default:
throw Util.unexpected(limit);
}
}
public SqlLiteral createLiteral(Object o, SqlParserPos pos) {
switch (this) {
case BOOLEAN:
return SqlLiteral.createBoolean((Boolean) o, pos);
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
case DECIMAL:
return SqlLiteral.createExactNumeric(o.toString(), pos);
case VARCHAR:
case CHAR:
return SqlLiteral.createCharString((String) o, pos);
case VARBINARY:
case BINARY:
return SqlLiteral.createBinaryString((byte[]) o, pos);
case DATE:
return SqlLiteral.createDate(
o instanceof Calendar
? DateString.fromCalendarFields((Calendar) o)
: (DateString) o,
pos);
case TIME:
return SqlLiteral.createTime(
o instanceof Calendar
? TimeString.fromCalendarFields((Calendar) o)
: (TimeString) o,
0 /* todo */,
pos);
case TIMESTAMP:
return SqlLiteral.createTimestamp(
this,
o instanceof Calendar
? TimestampString.fromCalendarFields((Calendar) o)
: (TimestampString) o,
0 /* todo */,
pos);
default:
throw Util.unexpected(this);
}
}
/** Returns the name of this type. */
public String getName() {
return name();
}
/**
* Returns the name of this type, with underscores converted to spaces, for example "TIMESTAMP
* WITH LOCAL TIME ZONE", "DATE".
*/
public String getSpaceName() {
return name().replace('_', ' ');
}
/**
* Flags indicating precision/scale combinations.
*
* <p>Note: for intervals:
*
* <ul>
* <li>precision = start (leading field) precision
* <li>scale = fractional second precision
* </ul>
*/
private | Limit |
java | apache__maven | impl/maven-core/src/main/java/org/apache/maven/lifecycle/internal/GoalTask.java | {
"start": 922,
"end": 1044
} | class ____ not part of any public api and can be changed or deleted without prior notice.
*
* @since 3.0
*/
public final | is |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/results/graph/entity/internal/EntityFetchSelectImpl.java | {
"start": 770,
"end": 2259
} | class ____ extends AbstractNonJoinedEntityFetch {
private final boolean isAffectedByFilter;
public EntityFetchSelectImpl(
FetchParent fetchParent,
ToOneAttributeMapping fetchedAttribute,
NavigablePath navigablePath,
DomainResult<?> keyResult,
boolean selectByUniqueKey,
boolean isAffectedByFilter,
DomainResultCreationState creationState) {
super( navigablePath, fetchedAttribute, fetchParent, keyResult, false, selectByUniqueKey, creationState );
this.isAffectedByFilter = isAffectedByFilter;
}
/**
* For Hibernate Reactive
*/
protected EntityFetchSelectImpl(EntityFetchSelectImpl original) {
super(
original.getNavigablePath(),
original.getFetchedMapping(),
original.getFetchParent(),
original.getKeyResult(),
original.getDiscriminatorFetch(),
original.isSelectByUniqueKey()
);
this.isAffectedByFilter = original.isAffectedByFilter();
}
@Override
public FetchTiming getTiming() {
return FetchTiming.IMMEDIATE;
}
public boolean isAffectedByFilter() {
return isAffectedByFilter;
}
@Override
public EntityInitializer<?> createInitializer(InitializerParent<?> parent, AssemblerCreationState creationState) {
return EntitySelectFetchInitializerBuilder.createInitializer(
parent,
getFetchedMapping(),
getReferencedMappingContainer().getEntityPersister(),
getKeyResult(),
getNavigablePath(),
isSelectByUniqueKey(),
isAffectedByFilter(),
creationState
);
}
}
| EntityFetchSelectImpl |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/web/AbstractGenericWebContextLoader.java | {
"start": 9137,
"end": 11003
} | class ____ @WebAppConfiguration."""
.formatted(mergedConfig));
}
if (logger.isTraceEnabled()) {
logger.trace("Loading WebApplicationContext for AOT runtime for " + mergedConfig);
}
else if (logger.isDebugEnabled()) {
logger.debug("Loading WebApplicationContext for AOT runtime for test class " +
mergedConfig.getTestClass().getName());
}
validateMergedContextConfiguration(webMergedConfig);
GenericWebApplicationContext context = createContext();
try {
configureWebResources(context, webMergedConfig);
prepareContext(context, webMergedConfig);
initializer.initialize(context);
customizeContext(context, webMergedConfig);
context.refresh();
return context;
}
catch (Exception ex) {
throw new ContextLoadException(context, ex);
}
}
/**
* Load a {@link GenericWebApplicationContext} for the supplied
* {@link MergedContextConfiguration}.
* @param mergedConfig the merged context configuration to use to load the
* application context
* @param forAotProcessing {@code true} if the context is being loaded for
* AOT processing, meaning not to refresh the {@code ApplicationContext} or
* register a JVM shutdown hook for it
* @return a new web application context
* @see org.springframework.test.context.SmartContextLoader#loadContext(MergedContextConfiguration)
* @see org.springframework.test.context.aot.AotContextLoader#loadContextForAotProcessing(MergedContextConfiguration, RuntimeHints)
*/
private GenericWebApplicationContext loadContext(
MergedContextConfiguration mergedConfig, boolean forAotProcessing) throws Exception {
if (!(mergedConfig instanceof WebMergedContextConfiguration webMergedConfig)) {
throw new IllegalArgumentException("""
Cannot load WebApplicationContext from non-web merged context configuration %s. \
Consider annotating your test | with |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/wall/WallUtils.java | {
"start": 694,
"end": 2510
} | class ____ {
public static boolean isValidateDB2(String sql) {
DB2WallProvider provider = new DB2WallProvider();
return provider.checkValid(sql);
}
public static boolean isValidateDB2(String sql, WallConfig config) {
DB2WallProvider provider = new DB2WallProvider(config);
return provider.checkValid(sql);
}
public static boolean isValidatePostgres(String sql) {
PGWallProvider provider = new PGWallProvider();
return provider.checkValid(sql);
}
public static boolean isValidatePostgres(String sql, WallConfig config) {
PGWallProvider provider = new PGWallProvider(config);
return provider.checkValid(sql);
}
public static boolean isValidateMySql(String sql) {
MySqlWallProvider provider = new MySqlWallProvider();
return provider.checkValid(sql);
}
public static boolean isValidateMySql(String sql, WallConfig config) {
MySqlWallProvider provider = new MySqlWallProvider(config);
return provider.checkValid(sql);
}
public static boolean isValidateOracle(String sql) {
OracleWallProvider provider = new OracleWallProvider();
return provider.checkValid(sql);
}
public static boolean isValidateOracle(String sql, WallConfig config) {
OracleWallProvider provider = new OracleWallProvider(config);
return provider.checkValid(sql);
}
public static boolean isValidateSqlServer(String sql) {
SQLServerWallProvider provider = new SQLServerWallProvider();
return provider.checkValid(sql);
}
public static boolean isValidateSqlServer(String sql, WallConfig config) {
SQLServerWallProvider provider = new SQLServerWallProvider(config);
return provider.checkValid(sql);
}
}
| WallUtils |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/type/descriptor/java/OffsetDateTimeJavaType.java | {
"start": 1415,
"end": 8387
} | class ____ extends AbstractTemporalJavaType<OffsetDateTime>
implements VersionJavaType<OffsetDateTime> {
/**
* Singleton access
*/
public static final OffsetDateTimeJavaType INSTANCE = new OffsetDateTimeJavaType();
private static final DateTimeFormatter PARSE_FORMATTER;
static {
PARSE_FORMATTER = new DateTimeFormatterBuilder()
.parseCaseInsensitive()
.append(ISO_LOCAL_DATE_TIME)
.optionalStart()
.parseLenient()
.appendOffset( "+HH:MM:ss", "Z" )
.parseStrict()
.toFormatter();
}
public OffsetDateTimeJavaType() {
super( OffsetDateTime.class, ImmutableMutabilityPlan.instance(), OffsetDateTime.timeLineOrder() );
}
@Override
public boolean isInstance(Object value) {
return value instanceof OffsetDateTime;
}
@Override
public TemporalType getPrecision() {
return TemporalType.TIMESTAMP;
}
@Override
public JdbcType getRecommendedJdbcType(JdbcTypeIndicators stdIndicators) {
if ( stdIndicators.isPreferJavaTimeJdbcTypesEnabled() ) {
return stdIndicators.getJdbcType( SqlTypes.OFFSET_DATE_TIME );
}
return stdIndicators.getJdbcType( stdIndicators.getDefaultZonedTimestampSqlType() );
}
@Override @SuppressWarnings("unchecked")
protected <X> TemporalJavaType<X> forTimestampPrecision(TypeConfiguration typeConfiguration) {
return (TemporalJavaType<X>) this;
}
@Override
public boolean useObjectEqualsHashCode() {
return true;
}
@Override
public String toString(OffsetDateTime value) {
return ISO_OFFSET_DATE_TIME.format( value );
}
@Override
public OffsetDateTime fromString(CharSequence string) {
return OffsetDateTime.from( ISO_OFFSET_DATE_TIME.parse( string ) );
}
@Override
public OffsetDateTime fromEncodedString(CharSequence charSequence, int start, int end) {
try {
final TemporalAccessor temporalAccessor = PARSE_FORMATTER.parse( subSequence( charSequence, start, end ) );
if ( temporalAccessor.isSupported( ChronoField.OFFSET_SECONDS ) ) {
return OffsetDateTime.from( temporalAccessor );
}
else {
// For databases that don't have timezone support, we encode timestamps at UTC, so allow parsing that as well
return LocalDateTime.from( temporalAccessor ).atOffset( ZoneOffset.UTC );
}
}
catch ( DateTimeParseException pe) {
throw new HibernateException( "could not parse timestamp string " + subSequence( charSequence, start, end ), pe );
}
}
@Override
@SuppressWarnings("unchecked")
public <X> X unwrap(OffsetDateTime offsetDateTime, Class<X> type, WrapperOptions options) {
if ( offsetDateTime == null ) {
return null;
}
if ( OffsetDateTime.class.isAssignableFrom( type ) ) {
return (X) offsetDateTime;
}
if ( ZonedDateTime.class.isAssignableFrom( type ) ) {
return (X) offsetDateTime.toZonedDateTime();
}
if ( Instant.class.isAssignableFrom( type ) ) {
return (X) offsetDateTime.toInstant();
}
if ( Calendar.class.isAssignableFrom( type ) ) {
return (X) GregorianCalendar.from( offsetDateTime.toZonedDateTime() );
}
if ( Timestamp.class.isAssignableFrom( type ) ) {
/*
* This works around two bugs:
* - HHH-13266 (JDK-8061577): around and before 1900,
* the number of milliseconds since the epoch does not mean the same thing
* for java.util and java.time, so conversion must be done using the year, month, day, hour, etc.
* - HHH-13379 (JDK-4312621): after 1908 (approximately),
* Daylight Saving Time introduces ambiguity in the year/month/day/hour/etc representation once a year
* (on DST end), so conversion must be done using the number of milliseconds since the epoch.
* - around 1905, both methods are equally valid, so we don't really care which one is used.
*/
if ( offsetDateTime.getYear() < 1905 ) {
return (X) Timestamp.valueOf(
offsetDateTime.atZoneSameInstant( ZoneId.systemDefault() ).toLocalDateTime()
);
}
else {
return (X) Timestamp.from( offsetDateTime.toInstant() );
}
}
if ( java.sql.Date.class.isAssignableFrom( type ) ) {
return (X) java.sql.Date.from( offsetDateTime.toInstant() );
}
if ( java.sql.Time.class.isAssignableFrom( type ) ) {
return (X) java.sql.Time.from( offsetDateTime.toInstant() );
}
if ( Date.class.isAssignableFrom( type ) ) {
return (X) Date.from( offsetDateTime.toInstant() );
}
if ( Long.class.isAssignableFrom( type ) ) {
return (X) Long.valueOf( offsetDateTime.toInstant().toEpochMilli() );
}
throw unknownUnwrap( type );
}
@Override
public <X> OffsetDateTime wrap(X value, WrapperOptions options) {
if ( value == null ) {
return null;
}
if (value instanceof OffsetDateTime offsetDateTime) {
return offsetDateTime;
}
if (value instanceof ZonedDateTime zonedDateTime) {
return OffsetDateTime.of( zonedDateTime.toLocalDateTime(), zonedDateTime.getOffset() );
}
if (value instanceof Instant instant) {
return instant.atOffset( ZoneOffset.UTC );
}
if (value instanceof Timestamp timestamp) {
/*
* This works around two bugs:
* - HHH-13266 (JDK-8061577): around and before 1900,
* the number of milliseconds since the epoch does not mean the same thing
* for java.util and java.time, so conversion must be done using the year, month, day, hour, etc.
* - HHH-13379 (JDK-4312621): after 1908 (approximately),
* Daylight Saving Time introduces ambiguity in the year/month/day/hour/etc representation once a year
* (on DST end), so conversion must be done using the number of milliseconds since the epoch.
* - around 1905, both methods are equally valid, so we don't really care which one is used.
*/
if ( timestamp.getYear() < 5 ) { // Timestamp year 0 is 1900
return timestamp.toLocalDateTime().atZone( ZoneId.systemDefault() ).toOffsetDateTime();
}
else {
return OffsetDateTime.ofInstant( timestamp.toInstant(), ZoneId.systemDefault() );
}
}
if (value instanceof Date date) {
return OffsetDateTime.ofInstant( date.toInstant(), ZoneId.systemDefault() );
}
if (value instanceof Long longValue) {
return OffsetDateTime.ofInstant( Instant.ofEpochMilli( longValue ), ZoneId.systemDefault() );
}
if (value instanceof Calendar calendar) {
return OffsetDateTime.ofInstant( calendar.toInstant(), calendar.getTimeZone().toZoneId() );
}
throw unknownWrap( value.getClass() );
}
@Override
public int getDefaultSqlPrecision(Dialect dialect, JdbcType jdbcType) {
return dialect.getDefaultTimestampPrecision();
}
@Override
public OffsetDateTime seed(
Long length,
Integer precision,
Integer scale,
SharedSessionContractImplementor session) {
return OffsetDateTime.now( ClockHelper.forPrecision( precision, session ) );
}
@Override
public OffsetDateTime next(
OffsetDateTime current,
Long length,
Integer precision,
Integer scale,
SharedSessionContractImplementor session) {
return OffsetDateTime.now( ClockHelper.forPrecision( precision, session ) );
}
}
| OffsetDateTimeJavaType |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/api/RecursiveComparisonAssert.java | {
"start": 49121,
"end": 51551
} | class ____ {
* int number;
* String street;
*
* // only compares number, ouch!
* {@literal @}Override
* public boolean equals(final Object other) {
* if (!(other instanceof Address)) return false;
* Address castOther = (Address) other;
* return Objects.equals(number, castOther.number);
* }
* }
*
* Person sherlock = new Person("Sherlock", 1.80);
* sherlock.home.address.street = "Baker Street";
* sherlock.home.address.number = 221;
*
* Person sherlock2 = new Person("Sherlock", 1.80);
* sherlock2.home.address.street = "Butcher Street";
* sherlock2.home.address.number = 221;
*
* // Assertion succeeds because:
* // - overridden equals are used
* // - Address has overridden equals and does not compare street fields.
* assertThat(sherlock).usingRecursiveComparison()
* .usingOverriddenEquals()
* .isEqualTo(sherlock2);
*
* // ignoringOverriddenEqualsForTypes force a recursive comparison on the given types.
* // Assertion fails because:
* // - Address equals is not used.
* // - street fields are compared and differ.
* assertThat(sherlock).usingRecursiveComparison()
* .usingOverriddenEquals()
* .ignoringOverriddenEqualsForTypes(Address.class)
* .isEqualTo(sherlock2);</code></pre>
*
* @param types the types we want to force a recursive comparison on.
* @return this {@link RecursiveComparisonAssert} to chain other methods.
*/
@CheckReturnValue
public SELF ignoringOverriddenEqualsForTypes(Class<?>... types) {
recursiveComparisonConfiguration.ignoreOverriddenEqualsForTypes(types);
return myself;
}
/**
* In case you have instructed the recursive comparison to use overridden {@code equals} with {@link #usingOverriddenEquals()},
* this method allows forcing a recursive comparison for the fields matching the given regexes (it adds them to the already registered ones).
* <p>
* Since 3.17.0 all overridden {@code equals} so this method is only relevant if you have called {@link #usingOverriddenEquals()} before.
* <p>
* Nested fields can be specified by using dots like: {@code home\.address\.street} ({@code \} is used to escape
* dots since they have a special meaning in regexes).
* <p>
* Example:
* <pre><code class='java'> | Address |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/hql/FetchNonRootRelativeElementCollectionAndAssociationTest.java | {
"start": 2003,
"end": 2429
} | class ____ {
@Id
private Long id;
@OneToOne(mappedBy = "product", cascade = ALL, fetch = FetchType.LAZY)
private ProductDetail productDetail;
@OneToOne(mappedBy = "product", cascade = ALL, fetch = FetchType.LAZY)
private ProductNaturalId naturalId;
@ElementCollection(fetch = FetchType.LAZY)
private Map<String, String> normalizedPricesByUnit = new HashMap<>();
}
@Entity(name = "ProductDetail")
public | Product |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextNonDynamicFieldMapperTests.java | {
"start": 805,
"end": 1769
} | class ____ extends NonDynamicFieldMapperTests {
@Before
public void setup() throws Exception {
ModelRegistry modelRegistry = node().injector().getInstance(ModelRegistry.class);
Utils.storeSparseModel("sparse-endpoint", modelRegistry);
}
@Override
protected Settings nodeSettings() {
return Settings.builder().put(LicenseSettings.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial").build();
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return List.of(LocalStateInferencePlugin.class);
}
@Override
protected String getTypeName() {
return SemanticTextFieldMapper.CONTENT_TYPE;
}
@Override
protected String getMapping() {
return String.format(Locale.ROOT, """
"type": "%s",
"inference_id": "%s"
""", SemanticTextFieldMapper.CONTENT_TYPE, "sparse-endpoint");
}
}
| SemanticTextNonDynamicFieldMapperTests |
java | netty__netty | codec-http2/src/main/java/io/netty/handler/codec/http2/Http2FlowController.java | {
"start": 784,
"end": 3615
} | interface ____ {
/**
* Set the {@link ChannelHandlerContext} for which to apply flow control on.
* <p>
* This <strong>must</strong> be called to properly initialize the {@link Http2FlowController}.
* Not calling this is considered a programming error.
* @param ctx The {@link ChannelHandlerContext} for which to apply flow control on.
* @throws Http2Exception if any protocol-related error occurred.
*/
void channelHandlerContext(ChannelHandlerContext ctx) throws Http2Exception;
/**
* Sets the connection-wide initial flow control window and updates all stream windows (but not the connection
* stream window) by the delta.
* <p>
* Represents the value for
* <a href="https://tools.ietf.org/html/rfc7540#section-6.5.2">SETTINGS_INITIAL_WINDOW_SIZE</a>. This method should
* only be called by Netty (not users) as a result of a receiving a {@code SETTINGS} frame.
*
* @param newWindowSize the new initial window size.
* @throws Http2Exception thrown if any protocol-related error occurred.
*/
void initialWindowSize(int newWindowSize) throws Http2Exception;
/**
* Gets the connection-wide initial flow control window size that is used as the basis for new stream flow
* control windows.
* <p>
* Represents the value for
* <a href="https://tools.ietf.org/html/rfc7540#section-6.5.2">SETTINGS_INITIAL_WINDOW_SIZE</a>. The initial value
* returned by this method must be {@link Http2CodecUtil#DEFAULT_WINDOW_SIZE}.
*/
int initialWindowSize();
/**
* Get the portion of the flow control window for the given stream that is currently available for sending/receiving
* frames which are subject to flow control. This quantity is measured in number of bytes.
*/
int windowSize(Http2Stream stream);
/**
* Increments the size of the stream's flow control window by the given delta.
* <p>
* In the case of a {@link Http2RemoteFlowController} this is called upon receipt of a
* {@code WINDOW_UPDATE} frame from the remote endpoint to mirror the changes to the window
* size.
* <p>
* For a {@link Http2LocalFlowController} this can be called to request the expansion of the
* window size published by this endpoint. It is up to the implementation, however, as to when a
* {@code WINDOW_UPDATE} is actually sent.
*
* @param stream The subject stream. Use {@link Http2Connection#connectionStream()} for
* requesting the size of the connection window.
* @param delta the change in size of the flow control window.
* @throws Http2Exception thrown if a protocol-related error occurred.
*/
void incrementWindowSize(Http2Stream stream, int delta) throws Http2Exception;
}
| Http2FlowController |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/api/reactive/RedisJsonReactiveCommands.java | {
"start": 904,
"end": 30164
} | interface ____<K, V> {
/**
* Append the JSON values into the array at a given {@link JsonPath} after the last element in a said array.
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the array inside the document.
* @param values one or more {@link JsonValue} to be appended.
* @return Long the resulting size of the arrays after the new data was appended, or null if the path does not exist.
* @since 6.5
*/
Flux<Long> jsonArrappend(K key, JsonPath jsonPath, JsonValue... values);
/**
* Append the JSON values into the array at the {@link JsonPath#ROOT_PATH} after the last element in a said array.
*
* @param key the key holding the JSON document.
* @param values one or more {@link JsonValue} to be appended.
* @return Long the resulting size of the arrays after the new data was appended, or null if the path does not exist.
* @since 6.5
*/
Flux<Long> jsonArrappend(K key, JsonValue... values);
/**
* Append the JSON string values into the array at the {@link JsonPath#ROOT_PATH} after the last element in a said array.
*
* @param key the key holding the JSON document.
* @param jsonStrings one or more JSON strings to be appended.
* @return Long the resulting size of the arrays after the new data was appended, or null if the path does not exist.
* @since 6.8
*/
Flux<Long> jsonArrappend(K key, String... jsonStrings);
/**
* Append the JSON string values into the array at a given {@link JsonPath} after the last element in a said array.
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the array inside the document.
* @param jsonStrings one or more JSON strings to be appended.
* @return Long the resulting size of the arrays after the new data was appended, or null if the path does not exist.
* @since 6.8
*/
Flux<Long> jsonArrappend(K key, JsonPath jsonPath, String... jsonStrings);
/**
* Search for the first occurrence of a {@link JsonValue} in an array at a given {@link JsonPath} and return its index.
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the array inside the document.
* @param value the {@link JsonValue} to search for.
* @param range the {@link JsonRangeArgs} to search within.
* @return Long the index hosting the searched element, -1 if not found or null if the specified path is not an array.
* @since 6.5
*/
Flux<Long> jsonArrindex(K key, JsonPath jsonPath, JsonValue value, JsonRangeArgs range);
/**
* Search for the first occurrence of a {@link JsonValue} in an array at a given {@link JsonPath} and return its index. This
* method uses defaults for the start and end indexes, see {@link JsonRangeArgs#DEFAULT_START_INDEX} and
* {@link JsonRangeArgs#DEFAULT_END_INDEX}.
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the array inside the document.
* @param value the {@link JsonValue} to search for.
* @return Long the index hosting the searched element, -1 if not found or null if the specified path is not an array.
* @since 6.5
*/
Flux<Long> jsonArrindex(K key, JsonPath jsonPath, JsonValue value);
/**
* Search for the first occurrence of a JSON string in an array at a given {@link JsonPath} and return its index. This
* method uses defaults for the start and end indexes, see {@link JsonRangeArgs#DEFAULT_START_INDEX} and
* {@link JsonRangeArgs#DEFAULT_END_INDEX}.
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the array inside the document.
* @param jsonString the JSON string to search for.
* @return Long the index hosting the searched element, -1 if not found or null if the specified path is not an array.
* @since 6.8
*/
Flux<Long> jsonArrindex(K key, JsonPath jsonPath, String jsonString);
/**
* Search for the first occurrence of a JSON string in an array at a given {@link JsonPath} and return its index.
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the array inside the document.
* @param jsonString the JSON string to search for.
* @param range the {@link JsonRangeArgs} to search within.
* @return Long the index hosting the searched element, -1 if not found or null if the specified path is not an array.
* @since 6.8
*/
Flux<Long> jsonArrindex(K key, JsonPath jsonPath, String jsonString, JsonRangeArgs range);
/**
* Insert the {@link JsonValue}s into the array at a given {@link JsonPath} before the provided index, shifting the existing
* elements to the right
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the array inside the document.
* @param index the index before which the new elements will be inserted.
* @param values one or more {@link JsonValue}s to be inserted.
* @return Long the resulting size of the arrays after the new data was inserted, or null if the path does not exist.
* @since 6.5
*/
Flux<Long> jsonArrinsert(K key, JsonPath jsonPath, int index, JsonValue... values);
/**
* Insert the JSON string values into the array at a given {@link JsonPath} before the provided index.
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the array inside the document.
* @param index the index before which the new elements will be inserted.
* @param jsonStrings one or more JSON strings to be inserted.
* @return Long the resulting size of the arrays after the new data was inserted, or null if the path does not exist.
* @since 6.8
*/
Flux<Long> jsonArrinsert(K key, JsonPath jsonPath, int index, String... jsonStrings);
/**
* Report the length of the JSON array at a given {@link JsonPath}
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the array inside the document.
* @return the size of the arrays, or null if the path does not exist.
* @since 6.5
*/
Flux<Long> jsonArrlen(K key, JsonPath jsonPath);
/**
* Report the length of the JSON array at a the {@link JsonPath#ROOT_PATH}
*
* @param key the key holding the JSON document.
* @return the size of the arrays, or null if the path does not exist.
* @since 6.5
*/
Flux<Long> jsonArrlen(K key);
/**
* Remove and return {@link JsonValue} at a given index in the array at a given {@link JsonPath}
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the array inside the document.
* @param index the index of the element to be removed. Default is -1, meaning the last element. Out-of-range indexes round
* to their respective array ends. Popping an empty array returns null.
* @return List<JsonValue> the removed element, or null if the specified path is not an array.
* @since 6.5
*/
Flux<JsonValue> jsonArrpop(K key, JsonPath jsonPath, int index);
/**
* Remove and return the JSON value at a given index in the array at a given {@link JsonPath} as raw JSON strings.
* <p>
* Behaves like {@link #jsonArrpop(Object, JsonPath, int)} but returns {@code List<String>} with raw JSON instead of
* {@link JsonValue} wrappers.
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the array inside the document.
* @param index the index of the element to be removed. Default is -1, meaning the last element. Out-of-range indexes round
* to their respective array ends. Popping an empty array returns null.
* @return List<String> the removed element, or null if the specified path is not an array.
* @since 7.0
*/
Flux<String> jsonArrpopRaw(K key, JsonPath jsonPath, int index);
/**
* Remove and return {@link JsonValue} at index -1 (last element) in the array at a given {@link JsonPath}
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the array inside the document.
* @return List<JsonValue> the removed element, or null if the specified path is not an array.
* @since 6.5
*/
Flux<JsonValue> jsonArrpop(K key, JsonPath jsonPath);
/**
* Remove and return the JSON value at index -1 (last element) in the array at a given {@link JsonPath} as raw JSON strings.
* <p>
* Behaves like {@link #jsonArrpop(Object, JsonPath)} but returns {@code List<String>} with raw JSON instead of
* {@link JsonValue} wrappers.
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the array inside the document.
* @return List<String> the removed element, or null if the specified path is not an array.
* @since 7.0
*/
Flux<String> jsonArrpopRaw(K key, JsonPath jsonPath);
/**
* Remove and return {@link JsonValue} at index -1 (last element) in the array at the {@link JsonPath#ROOT_PATH}
*
* @param key the key holding the JSON document.
* @return List<JsonValue> the removed element, or null if the specified path is not an array.
* @since 6.5
*/
Flux<JsonValue> jsonArrpop(K key);
/**
* Remove and return the JSON value at index -1 (last element) in the array at the {@link JsonPath#ROOT_PATH} as raw JSON
* strings.
* <p>
* Behaves like {@link #jsonArrpop(Object)} but returns {@code List<String>} with raw JSON instead of {@link JsonValue}
* wrappers.
*
* @param key the key holding the JSON document.
* @return List<String> the removed element, or null if the specified path is not an array.
* @since 7.0
*/
Flux<String> jsonArrpopRaw(K key);
/**
* Trim an array at a given {@link JsonPath} so that it contains only the specified inclusive range of elements. All
* elements with indexes smaller than the start range and all elements with indexes bigger than the end range are trimmed.
* <p>
* Behavior as of RedisJSON v2.0:
* <ul>
* <li>If start is larger than the array's size or start > stop, returns 0 and an empty array.</li>
* <li>If start is < 0, then start from the end of the array.</li>
* <li>If stop is larger than the end of the array, it is treated like the last element.</li>
* </ul>
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the array inside the document.
* @param range the {@link JsonRangeArgs} to trim by.
* @return Long the resulting size of the arrays after the trimming, or null if the path does not exist.
* @since 6.5
*/
Flux<Long> jsonArrtrim(K key, JsonPath jsonPath, JsonRangeArgs range);
/**
* Clear container values (arrays/objects) and set numeric values to 0
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the value to clear.
* @return Long the number of values removed plus all the matching JSON numerical values that are zeroed.
* @since 6.5
*/
Mono<Long> jsonClear(K key, JsonPath jsonPath);
/**
* Clear container values (arrays/objects) and set numeric values to 0 at the {@link JsonPath#ROOT_PATH}
*
* @param key the key holding the JSON document.
* @return Long the number of values removed plus all the matching JSON numerical values that are zeroed.
* @since 6.5
*/
Mono<Long> jsonClear(K key);
/**
* Deletes a value inside the JSON document at a given {@link JsonPath}
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the value to clear.
* @return Long the number of values removed (0 or more).
* @since 6.5
*/
Mono<Long> jsonDel(K key, JsonPath jsonPath);
/**
* Deletes a value inside the JSON document at the {@link JsonPath#ROOT_PATH}
*
* @param key the key holding the JSON document.
* @return Long the number of values removed (0 or more).
* @since 6.5
*/
Mono<Long> jsonDel(K key);
/**
* Return the value at the specified path in JSON serialized form.
* <p>
* When using a single JSONPath, the root of the matching values is a JSON string with a top-level array of serialized JSON
* value. In contrast, a legacy path returns a single value.
* <p>
* When using multiple JSONPath arguments, the root of the matching values is a JSON string with a top-level object, with
* each object value being a top-level array of serialized JSON value. In contrast, if all paths are legacy paths, each
* object value is a single serialized JSON value. If there are multiple paths that include both legacy path and JSONPath,
* the returned value conforms to the JSONPath version (an array of values).
*
* @param key the key holding the JSON document.
* @param options the {@link JsonGetArgs} to use.
* @param jsonPaths the {@link JsonPath}s to use to identify the values to get.
* @return JsonValue the value at path in JSON serialized form, or null if the path does not exist.
* @since 6.5
*/
Flux<JsonValue> jsonGet(K key, JsonGetArgs options, JsonPath... jsonPaths);
/**
* Return the value at the specified path in JSON serialized form as raw strings.
* <p>
* Behaves like {@link #jsonGet(Object, JsonGetArgs, JsonPath...)} but returns {@code List<String>} with raw JSON instead of
* {@link JsonValue} wrappers.
*
* @param key the key holding the JSON document.
* @param options the {@link JsonGetArgs} to use.
* @param jsonPaths the {@link JsonPath}s to use to identify the values to get.
* @return List<String> the value at path in JSON serialized form, or null if the path does not exist.
* @since 7.0
*/
Flux<String> jsonGetRaw(K key, JsonGetArgs options, JsonPath... jsonPaths);
/**
* Return the value at the specified path in JSON serialized form. Uses defaults for the {@link JsonGetArgs}.
* <p>
* When using a single JSONPath, the root of the matching values is a JSON string with a top-level array of serialized JSON
* value. In contrast, a legacy path returns a single value.
* <p>
* When using multiple JSONPath arguments, the root of the matching values is a JSON string with a top-level object, with
* each object value being a top-level array of serialized JSON value. In contrast, if all paths are legacy paths, each
* object value is a single serialized JSON value. If there are multiple paths that include both legacy path and JSONPath,
* the returned value conforms to the JSONPath version (an array of values).
*
* @param key the key holding the JSON document.
* @param jsonPaths the {@link JsonPath}s to use to identify the values to get.
* @return JsonValue the value at path in JSON serialized form, or null if the path does not exist.
* @since 6.5
*/
Flux<JsonValue> jsonGet(K key, JsonPath... jsonPaths);
/**
* Return the value at the specified path in JSON serialized form as raw strings. Uses defaults for the {@link JsonGetArgs}.
* <p>
* Behaves like {@link #jsonGet(Object, JsonPath...)} but returns {@code List<String>} with raw JSON instead of
* {@link JsonValue} wrappers.
*
* @param key the key holding the JSON document.
* @param jsonPaths the {@link JsonPath}s to use to identify the values to get.
* @return List<String> the value at path in JSON serialized form, or null if the path does not exist.
* @since 7.0
*/
Flux<String> jsonGetRaw(K key, JsonPath... jsonPaths);
/**
* Merge a given {@link JsonValue} with the value matching {@link JsonPath}. Consequently, JSON values at matching paths are
* updated, deleted, or expanded with new children.
* <p>
* Merging is done according to the following rules per JSON value in the value argument while considering the corresponding
* original value if it exists:
* <ul>
* <li>merging an existing object key with a null value deletes the key</li>
* <li>merging an existing object key with non-null value updates the value</li>
* <li>merging a non-existing object key adds the key and value</li>
* <li>merging an existing array with any merged value, replaces the entire array with the value</li>
* </ul>
* <p>
* This command complies with RFC7396 "Json Merge Patch"
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the value to merge.
* @param value the {@link JsonValue} to merge.
* @return String "OK" if the set was successful, error if the operation failed.
* @since 6.5
* @see <A href="https://tools.ietf.org/html/rfc7396">RFC7396</a>
*/
Mono<String> jsonMerge(K key, JsonPath jsonPath, JsonValue value);
/**
* Merge a given JSON string with the value matching {@link JsonPath}.
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the value to merge.
* @param jsonString the JSON string to merge.
* @return String "OK" if the merge was successful, error if the operation failed.
* @since 6.8
*/
Mono<String> jsonMerge(K key, JsonPath jsonPath, String jsonString);
/**
* Return the values at the specified path from multiple key arguments.
*
* @param jsonPath the {@link JsonPath} pointing to the value to fetch.
* @param keys the keys holding the {@link JsonValue}s to fetch.
* @return List<JsonValue> the values at path, or null if the path does not exist.
* @since 6.5
*/
Flux<JsonValue> jsonMGet(JsonPath jsonPath, K... keys);
/**
* Return the values at the specified path from multiple key arguments as raw JSON strings.
* <p>
* Behaves like {@link #jsonMGet(JsonPath, Object[])} but returns {@code List<String>} with raw JSON instead of
* {@link JsonValue} wrappers.
*
* @param jsonPath the {@link JsonPath} pointing to the value to fetch.
* @param keys the keys holding the values to fetch.
* @return List<String> the values at path, or null if the path does not exist.
* @since 7.0
*/
Flux<String> jsonMGetRaw(JsonPath jsonPath, K... keys);
/**
* Set or update one or more JSON values according to the specified {@link JsonMsetArgs}
* <p>
* JSON.MSET is atomic, hence, all given additions or updates are either applied or not. It is not possible for clients to
* see that some keys were updated while others are unchanged.
* <p>
* A JSON value is a hierarchical structure. If you change a value in a specific path - nested values are affected.
*
* @param arguments the {@link JsonMsetArgs} specifying the values to change.
* @return "OK" if the operation was successful, error otherwise
* @since 6.5
*/
Mono<String> jsonMSet(List<JsonMsetArgs<K, V>> arguments);
/**
* Increment the number value stored at the specified {@link JsonPath} in the JSON document by the provided increment.
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the value to increment.
* @param number the increment value.
* @return a {@link List} of the new values after the increment.
* @since 6.5
*/
Flux<Number> jsonNumincrby(K key, JsonPath jsonPath, Number number);
/**
* Return the keys in the JSON document that are referenced by the given {@link JsonPath}
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the value(s) whose key(s) we want.
* @return List<V> the keys in the JSON document that are referenced by the given {@link JsonPath}.
* @since 6.5
*/
Flux<V> jsonObjkeys(K key, JsonPath jsonPath);
/**
* Return the keys in the JSON document that are referenced by the {@link JsonPath#ROOT_PATH}
*
* @param key the key holding the JSON document.
* @return List<V> the keys in the JSON document that are referenced by the given {@link JsonPath}.
* @since 6.5
*/
Flux<V> jsonObjkeys(K key);
/**
* Report the number of keys in the JSON object at the specified {@link JsonPath} and for the provided key
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the value(s) whose key(s) we want to count
* @return Long the number of keys in the JSON object at the specified path, or null if the path does not exist.
* @since 6.5
*/
Flux<Long> jsonObjlen(K key, JsonPath jsonPath);
/**
* Report the number of keys in the JSON object at the {@link JsonPath#ROOT_PATH} and for the provided key
*
* @param key the key holding the JSON document.
* @return Long the number of keys in the JSON object at the specified path, or null if the path does not exist.
* @since 6.5
*/
Flux<Long> jsonObjlen(K key);
/**
* Sets the JSON value at a given {@link JsonPath} in the JSON document.
* <p>
* For new Redis keys, the path must be the root. For existing keys, when the entire path exists, the value that it contains
* is replaced with the JSON value. For existing keys, when the path exists, except for the last element, a new child is
* added with the JSON value.
* <p>
* Adds a key (with its respective value) to a JSON Object (in a RedisJSON data type key) only if it is the last child in
* the path, or it is the parent of a new child being added in the path. Optional arguments NX and XX modify this behavior
* for both new RedisJSON data type keys and the JSON Object keys in them.
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the value(s) where we want to set the value.
* @param value the {@link JsonValue} to set.
* @param options the {@link JsonSetArgs} the options for setting the value.
* @return String "OK" if the set was successful, null if the {@link JsonSetArgs} conditions are not met.
* @since 6.5
*/
Mono<String> jsonSet(K key, JsonPath jsonPath, JsonValue value, JsonSetArgs options);
/**
* Sets the JSON value at a given {@link JsonPath} in the JSON document using defaults for the {@link JsonSetArgs}.
* <p>
* For new Redis keys the path must be the root. For existing keys, when the entire path exists, the value that it contains
* is replaced with the JSON value. For existing keys, when the path exists, except for the last element, a new child is
* added with the JSON value.
* <p>
* Adds a key (with its respective value) to a JSON Object (in a RedisJSON data type key) only if it is the last child in
* the path, or it is the parent of a new child being added in the path. Optional arguments NX and XX modify this behavior
* for both new RedisJSON data type keys and the JSON Object keys in them.
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the value(s) where we want to set the value.
* @param value the {@link JsonValue} to set.
* @return String "OK" if the set was successful, null if the {@link JsonSetArgs} conditions are not met.
* @since 6.5
*/
Mono<String> jsonSet(K key, JsonPath jsonPath, JsonValue value);
/**
* Sets the JSON value at a given {@link JsonPath} in the JSON document using defaults for the {@link JsonSetArgs}.
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the value(s) where we want to set the value.
* @param jsonString the JSON string to set.
* @return String "OK" if the set was successful, null if the {@link JsonSetArgs} conditions are not met.
* @since 6.8
*/
Mono<String> jsonSet(K key, JsonPath jsonPath, String jsonString);
/**
* Sets the JSON value at a given {@link JsonPath} in the JSON document.
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the value(s) where we want to set the value.
* @param jsonString the JSON string to set.
* @param options the {@link JsonSetArgs} the options for setting the value.
* @return String "OK" if the set was successful, null if the {@link JsonSetArgs} conditions are not met.
* @since 6.8
*/
Mono<String> jsonSet(K key, JsonPath jsonPath, String jsonString, JsonSetArgs options);
/**
* Append the json-string values to the string at the provided {@link JsonPath} in the JSON document.
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the value(s) where we want to append the value.
* @param value the {@link JsonValue} to append.
* @return Long the new length of the string, or null if the matching JSON value is not a string.
* @since 6.5
*/
Flux<Long> jsonStrappend(K key, JsonPath jsonPath, JsonValue value);
/**
* Append the json-string values to the string at the {@link JsonPath#ROOT_PATH} in the JSON document.
*
* @param key the key holding the JSON document.
* @param value the {@link JsonValue} to append.
* @return Long the new length of the string, or null if the matching JSON value is not a string.
* @since 6.5
*/
Flux<Long> jsonStrappend(K key, JsonValue value);
/**
* Append the JSON string to the string at the {@link JsonPath#ROOT_PATH} in the JSON document.
*
* @param key the key holding the JSON document.
* @param jsonString the JSON string to append.
* @return Long the new length of the string, or null if the matching JSON value is not a string.
* @since 6.8
*/
Flux<Long> jsonStrappend(K key, String jsonString);
/**
* Append the JSON string to the string at the provided {@link JsonPath} in the JSON document.
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the value(s) where we want to append the value.
* @param jsonString the JSON string to append.
* @return Long the new length of the string, or null if the matching JSON value is not a string.
* @since 6.8
*/
Flux<Long> jsonStrappend(K key, JsonPath jsonPath, String jsonString);
/**
* Report the length of the JSON String at the provided {@link JsonPath} in the JSON document.
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the value(s).
* @return Long (in recursive descent) the length of the JSON String at the provided {@link JsonPath}, or null if the value
* ath the desired path is not a string.
* @since 6.5
*/
Flux<Long> jsonStrlen(K key, JsonPath jsonPath);
/**
* Report the length of the JSON String at the {@link JsonPath#ROOT_PATH} in the JSON document.
*
* @param key the key holding the JSON document.
* @return Long (in recursive descent) the length of the JSON String at the provided {@link JsonPath}, or null if the value
* ath the desired path is not a string.
* @since 6.5
*/
Flux<Long> jsonStrlen(K key);
/**
* Toggle a Boolean value stored at the provided {@link JsonPath} in the JSON document.
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the value(s).
* @return List<Long> the new value after the toggle, 0 for false, 1 for true or null if the path does not exist.
* @since 6.5
*/
Flux<Long> jsonToggle(K key, JsonPath jsonPath);
/**
* Report the type of JSON value at the provided {@link JsonPath} in the JSON document.
*
* @param key the key holding the JSON document.
* @param jsonPath the {@link JsonPath} pointing to the value(s).
* @return List<JsonType> the type of JSON value at the provided {@link JsonPath}
* @since 6.5
*/
Flux<JsonType> jsonType(K key, JsonPath jsonPath);
/**
* Report the type of JSON value at the {@link JsonPath#ROOT_PATH} in the JSON document.
*
* @param key the key holding the JSON document.
* @return List<JsonType> the type of JSON value at the provided {@link JsonPath}
* @since 6.5
*/
Flux<JsonType> jsonType(K key);
}
| RedisJsonReactiveCommands |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/any/annotations/AnyMergeTest.java | {
"start": 1175,
"end": 2170
} | class ____ {
@Test
public void testMerge(SessionFactoryScope scope) {
Amount amount = new Amount( new BigDecimal( 10 ) );
Bonus bonus = new Bonus( "that's a bonus", amount );
scope.inTransaction(
session -> {
session.persist( amount );
session.persist( bonus );
}
);
scope.inTransaction(
session -> {
InvoicePosition invoicePosition = new InvoicePosition();
invoicePosition.setReference( bonus );
InvoicePosition merged = session.merge( invoicePosition );
Reference mergedReference = merged.getReference();
assertThat( mergedReference ).isExactlyInstanceOf( Bonus.class );
Bonus mergedBonus = (Bonus) mergedReference;
// check the merged values are copies of the original ones
assertThat( mergedBonus ).isNotEqualTo( bonus );
assertThat( mergedBonus.amount ).isNotEqualTo( amount );
assertThat( mergedBonus.amount.quantity.compareTo( new BigDecimal( 10 ) ) ).isEqualTo( 0 );
}
);
}
public | AnyMergeTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/onetoone/OptionalOneToOneMapsIdQueryTest.java | {
"start": 8138,
"end": 8337
} | class ____ {
@Id
private long barId;
private long longValue;
}
@Entity(name = "FooHasBarWithNonIdPropNamedId")
@Table(name = "FooHasBarNonIdPropNamedId")
public static | BarWithNoIdOrPropNamedId |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/volume/csi/VolumeManagerImpl.java | {
"start": 2290,
"end": 7148
} | class ____ extends AbstractService
implements VolumeManager {
private static final Logger LOG =
LoggerFactory.getLogger(VolumeManagerImpl.class);
private final VolumeStates volumeStates;
private ScheduledExecutorService provisioningExecutor;
private Map<String, CsiAdaptorProtocol> csiAdaptorMap;
private final static int PROVISIONING_TASK_THREAD_POOL_SIZE = 10;
public VolumeManagerImpl() {
super(VolumeManagerImpl.class.getName());
this.volumeStates = new VolumeStates();
this.csiAdaptorMap = new ConcurrentHashMap<>();
this.provisioningExecutor = Executors
.newScheduledThreadPool(PROVISIONING_TASK_THREAD_POOL_SIZE);
}
// Init the CSI adaptor cache according to the configuration.
// user only needs to configure a list of adaptor addresses,
// this method extracts each address and init an adaptor client,
// then proceed with a hand-shake by calling adaptor's getPluginInfo
// method to retrieve the driver info. If the driver can be resolved,
// it is then added to the cache. Note, we don't allow two drivers
// specified with same driver-name even version is different.
private void initCsiAdaptorCache(
final Map<String, CsiAdaptorProtocol> adaptorMap, Configuration conf)
throws IOException, YarnException {
LOG.info("Initializing cache for csi-driver-adaptors");
String[] addresses =
conf.getStrings(YarnConfiguration.NM_CSI_ADAPTOR_ADDRESSES);
if (addresses != null && addresses.length > 0) {
for (String addr : addresses) {
LOG.info("Found csi-driver-adaptor socket address: " + addr);
InetSocketAddress address = NetUtils.createSocketAddr(addr);
YarnRPC rpc = YarnRPC.create(conf);
UserGroupInformation currentUser =
UserGroupInformation.getCurrentUser();
CsiAdaptorProtocol adaptorClient = NMProxy
.createNMProxy(conf, CsiAdaptorProtocol.class, currentUser, rpc,
address);
// Attempt to resolve the driver by contacting to
// the diver's identity service on the given address.
// If the call failed, the initialization is also failed
// in order running into inconsistent state.
LOG.info("Retrieving info from csi-driver-adaptor on address " + addr);
GetPluginInfoResponse response =
adaptorClient.getPluginInfo(GetPluginInfoRequest.newInstance());
if (!Strings.isNullOrEmpty(response.getDriverName())) {
String driverName = response.getDriverName();
if (adaptorMap.containsKey(driverName)) {
throw new YarnException(
"Duplicate driver adaptor found," + " driver name: "
+ driverName);
}
adaptorMap.put(driverName, adaptorClient);
LOG.info("CSI Adaptor added to the cache, adaptor name: " + driverName
+ ", driver version: " + response.getVersion());
}
}
}
}
/**
* Returns a CsiAdaptorProtocol client by the given driver name,
* returns null if no adaptor is found for the driver, that means
* the driver has not registered to the volume manager yet enhance not valid.
* @param driverName the name of the driver
* @return CsiAdaptorProtocol client or null if driver not registered
*/
public CsiAdaptorProtocol getAdaptorByDriverName(String driverName) {
return csiAdaptorMap.get(driverName);
}
@VisibleForTesting
@Override
public void registerCsiDriverAdaptor(String driverName,
CsiAdaptorProtocol client) {
this.csiAdaptorMap.put(driverName, client);
}
@Override
protected void serviceInit(Configuration conf) throws Exception {
initCsiAdaptorCache(csiAdaptorMap, conf);
super.serviceInit(conf);
}
@Override
protected void serviceStart() throws Exception {
super.serviceStart();
}
@Override
protected void serviceStop() throws Exception {
provisioningExecutor.shutdown();
super.serviceStop();
}
@Override
public VolumeStates getVolumeStates() {
return this.volumeStates;
}
@Override
public Volume addOrGetVolume(Volume volume) {
if (volumeStates.getVolume(volume.getVolumeId()) != null) {
// volume already exists
return volumeStates.getVolume(volume.getVolumeId());
} else {
this.volumeStates.addVolumeIfAbsent(volume);
return volume;
}
}
@Override
public ScheduledFuture<VolumeProvisioningResults> schedule(
VolumeProvisioningTask volumeProvisioningTask,
int delaySecond) {
LOG.info("Scheduling provision volume task (with delay "
+ delaySecond + "s)," + " handling "
+ volumeProvisioningTask.getVolumes().size()
+ " volume provisioning");
return provisioningExecutor.schedule(volumeProvisioningTask,
delaySecond, TimeUnit.SECONDS);
}
}
| VolumeManagerImpl |
java | spring-projects__spring-boot | buildpack/spring-boot-buildpack-platform/src/main/java/org/springframework/boot/buildpack/platform/docker/transport/LocalHttpClientTransport.java | {
"start": 5356,
"end": 5552
} | class ____ implements HttpRoutePlanner {
@Override
public HttpRoute determineRoute(HttpHost target, HttpContext context) {
return new HttpRoute(LOCAL_DOCKER_HOST);
}
}
}
| LocalRoutePlanner |
java | quarkusio__quarkus | extensions/smallrye-jwt/deployment/src/test/java/io/quarkus/jwt/test/JwtCallerPrincipalUnitTest.java | {
"start": 533,
"end": 5072
} | class ____ {
@Test
public void testAllClaims() throws InvalidJwtException {
InputStream is = getClass().getResourceAsStream("/Token1.json");
JsonObject content = Json.createReader(is).readObject();
JwtClaims jwtClaims = JwtClaims.parse(content.toString());
DefaultJWTCallerPrincipal principal = new DefaultJWTCallerPrincipal(jwtClaims);
String iss = principal.getIssuer();
Assertions.assertEquals("https://server.example.com", iss);
String jti = principal.getTokenID();
Assertions.assertEquals("a-123", jti);
String name = principal.getName();
Assertions.assertEquals("jdoe@example.com", name);
String upn = principal.getClaim(Claims.upn.name());
Assertions.assertEquals("jdoe@example.com", upn);
Set<String> aud = principal.getAudience();
Assertions.assertEquals(new HashSet<>(Arrays.asList("s6BhdRkqt3")), aud);
Long exp = principal.getExpirationTime();
Assertions.assertEquals(1311281970l, exp.longValue());
Long iat = principal.getIssuedAtTime();
Assertions.assertEquals(1311280970l, iat.longValue());
String sub = principal.getSubject();
Assertions.assertEquals("24400320", sub);
Set<String> groups = principal.getGroups();
String[] expectedGroups = { "Echoer",
"Tester",
"group1",
"group2" };
Assertions.assertEquals(new HashSet<String>(Arrays.asList(expectedGroups)), groups);
/*
* "customDoubleArray": [0.1, 1.1, 2.2, 3.3, 4.4],
*/
JsonArray customDoubleArray = principal.getClaim("customDoubleArray");
Assertions.assertEquals(5, customDoubleArray.size());
Assertions.assertEquals(Json.createValue(0.1), customDoubleArray.getJsonNumber(0));
Assertions.assertEquals(Json.createValue(1.1), customDoubleArray.getJsonNumber(1));
Assertions.assertEquals(Json.createValue(2.2), customDoubleArray.getJsonNumber(2));
Assertions.assertEquals(Json.createValue(3.3), customDoubleArray.getJsonNumber(3));
Assertions.assertEquals(Json.createValue(4.4), customDoubleArray.getJsonNumber(4));
// "customString": "customStringValue",
Assertions.assertEquals("customStringValue", principal.getClaim("customString"));
// "customInteger": 123456789,
JsonNumber customInteger = principal.getClaim("customInteger");
Assertions.assertEquals(Json.createValue(123456789), customInteger);
// "customDouble": 3.141592653589793,
JsonNumber customDouble = principal.getClaim("customDouble");
Assertions.assertEquals(Json.createValue(3.141592653589793), customDouble);
/*
* "customStringArray": ["value0", "value1", "value2" ],
*/
JsonArray customStringArray = principal.getClaim("customStringArray");
Assertions.assertEquals(3, customStringArray.size());
Assertions.assertEquals(Json.createValue("value0"), customStringArray.getJsonString(0));
Assertions.assertEquals(Json.createValue("value1"), customStringArray.getJsonString(1));
Assertions.assertEquals(Json.createValue("value2"), customStringArray.getJsonString(2));
/* "customIntegerArray": [0,1,2,3] */
JsonArray customIntegerArray = principal.getClaim("customIntegerArray");
Assertions.assertEquals(4, customIntegerArray.size());
Assertions.assertEquals(Json.createValue(0), customIntegerArray.getJsonNumber(0));
Assertions.assertEquals(Json.createValue(1), customIntegerArray.getJsonNumber(1));
Assertions.assertEquals(Json.createValue(2), customIntegerArray.getJsonNumber(2));
Assertions.assertEquals(Json.createValue(3), customIntegerArray.getJsonNumber(3));
/*
* "customObject": {
* "my-service": {
* "groups": [
* "group1",
* "group2"
* ],
* "roles": [
* "role-in-my-service"
* ]
* },
* "service-B": {
* "roles": [
* "role-in-B"
* ]
* },
* "service-C": {
* "groups": [
* "groupC",
* "web-tier"
* ]
* }
* }
*/
JsonObject customObject = principal.getClaim("customObject");
String[] keys = { "my-service", "service-B", "service-C" };
Assertions.assertEquals(new HashSet<>(Arrays.asList(keys)), customObject.keySet());
}
}
| JwtCallerPrincipalUnitTest |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractOpen.java | {
"start": 1619,
"end": 3569
} | class ____ extends AbstractContractOpenTest {
@Override
protected AbstractFSContract createContract(Configuration conf) {
return new S3AContract(conf);
}
/**
* S3A always declares zero byte files as encrypted.
* @return true, always.
*/
@Override
protected boolean areZeroByteFilesEncrypted() {
return true;
}
@Test
public void testOpenFileApplyReadBadName() throws Throwable {
describe("use the apply sequence to read a whole file");
Path path = methodPath();
FileSystem fs = getFileSystem();
touch(fs, path);
FileStatus st = fs.getFileStatus(path);
// The final element of the path is different, so
// openFile must fail
FileStatus st2 = new FileStatus(
0, false,
st.getReplication(),
st.getBlockSize(),
st.getModificationTime(),
st.getAccessTime(),
st.getPermission(),
st.getOwner(),
st.getGroup(),
new Path("gopher:///localhost/something.txt"));
intercept(IllegalArgumentException.class, () ->
fs.openFile(path)
.withFileStatus(st2)
.build());
}
/**
* Pass in a directory reference and expect the openFile call
* to fail.
*/
@Test
public void testOpenFileDirectory() throws Throwable {
describe("Change the status to a directory");
Path path = methodPath();
FileSystem fs = getFileSystem();
int len = 4096;
createFile(fs, path, true,
dataset(len, 0x40, 0x80));
FileStatus st = fs.getFileStatus(path);
FileStatus st2 = new FileStatus(
len, true,
st.getReplication(),
st.getBlockSize(),
st.getModificationTime(),
st.getAccessTime(),
st.getPermission(),
st.getOwner(),
st.getGroup(),
path);
intercept(FileNotFoundException.class, () ->
fs.openFile(path)
.withFileStatus(st2)
.build());
}
}
| ITestS3AContractOpen |
java | apache__spark | common/network-common/src/main/java/org/apache/spark/network/buffer/NioManagedBuffer.java | {
"start": 1078,
"end": 2001
} | class ____ extends ManagedBuffer {
private final ByteBuffer buf;
public NioManagedBuffer(ByteBuffer buf) {
this.buf = buf;
}
@Override
public long size() {
return buf.remaining();
}
@Override
public ByteBuffer nioByteBuffer() throws IOException {
return buf.duplicate();
}
@Override
public InputStream createInputStream() throws IOException {
return new ByteBufInputStream(Unpooled.wrappedBuffer(buf));
}
@Override
public ManagedBuffer retain() {
return this;
}
@Override
public ManagedBuffer release() {
return this;
}
@Override
public Object convertToNetty() throws IOException {
return Unpooled.wrappedBuffer(buf);
}
@Override
public Object convertToNettyForSsl() throws IOException {
return Unpooled.wrappedBuffer(buf);
}
@Override
public String toString() {
return "NioManagedBuffer[buf=" + buf + "]";
}
}
| NioManagedBuffer |
java | micronaut-projects__micronaut-core | inject-groovy/src/main/groovy/io/micronaut/ast/groovy/visitor/GroovyGenericPlaceholderElement.java | {
"start": 1592,
"end": 7251
} | class ____ extends GroovyClassElement implements GenericPlaceholderElement {
private final GroovyNativeElement placeholderNativeElement;
private final Element declaringElement;
private final String variableName;
private final GroovyClassElement resolved;
private final List<GroovyClassElement> bounds;
private final boolean rawType;
private final ElementAnnotationMetadata typeAnnotationMetadata;
@Nullable
private ElementAnnotationMetadata genericTypeAnnotationMetadata;
GroovyGenericPlaceholderElement(GroovyVisitorContext visitorContext,
Element declaringElement,
GroovyNativeElement placeholderNativeElement,
@Nullable
GroovyClassElement resolved,
List<GroovyClassElement> bounds,
int arrayDimensions,
boolean rawType,
String variableName) {
this(visitorContext, declaringElement, placeholderNativeElement, variableName, resolved, bounds, selectClassElementRepresentingThisPlaceholder(resolved, bounds), arrayDimensions, rawType);
}
GroovyGenericPlaceholderElement(GroovyVisitorContext visitorContext,
Element declaringElement,
GroovyNativeElement placeholderNativeElement,
String variableName,
@Nullable
GroovyClassElement resolved,
List<GroovyClassElement> bounds,
GroovyClassElement classElementRepresentingThisPlaceholder,
int arrayDimensions,
boolean rawType) {
super(visitorContext,
classElementRepresentingThisPlaceholder.getNativeType(),
classElementRepresentingThisPlaceholder.getElementAnnotationMetadataFactory(),
classElementRepresentingThisPlaceholder.resolvedTypeArguments,
arrayDimensions);
this.declaringElement = declaringElement;
this.placeholderNativeElement = placeholderNativeElement;
this.variableName = variableName;
this.resolved = resolved;
this.bounds = bounds;
this.rawType = rawType;
typeAnnotationMetadata = new GenericPlaceholderElementAnnotationMetadata(this, classElementRepresentingThisPlaceholder);
}
private static GroovyClassElement selectClassElementRepresentingThisPlaceholder(@Nullable GroovyClassElement resolved,
@NonNull List<GroovyClassElement> bounds) {
if (resolved != null) {
return resolved;
}
return WildcardElement.findUpperType(bounds, bounds);
}
@Override
public boolean isTypeVariable() {
return true;
}
@Override
protected MutableAnnotationMetadataDelegate<?> getAnnotationMetadataToWrite() {
return getGenericTypeAnnotationMetadata();
}
@NonNull
@Override
public MutableAnnotationMetadataDelegate<AnnotationMetadata> getGenericTypeAnnotationMetadata() {
if (genericTypeAnnotationMetadata == null) {
genericTypeAnnotationMetadata = elementAnnotationMetadataFactory.buildGenericTypeAnnotations(this);
}
return genericTypeAnnotationMetadata;
}
@NonNull
@Override
public MutableAnnotationMetadataDelegate<AnnotationMetadata> getTypeAnnotationMetadata() {
return typeAnnotationMetadata;
}
@NonNull
@Override
public AnnotationMetadata getAnnotationMetadata() {
return new AnnotationMetadataHierarchy(true, super.getAnnotationMetadata(), getGenericTypeAnnotationMetadata());
}
@NonNull
@Override
public GroovyNativeElement getGenericNativeType() {
return placeholderNativeElement;
}
@Override
public boolean isRawType() {
return rawType;
}
@NonNull
@Override
protected GroovyClassElement copyConstructor() {
return new GroovyGenericPlaceholderElement(visitorContext, declaringElement, placeholderNativeElement, variableName, resolved, bounds, selectClassElementRepresentingThisPlaceholder(resolved, bounds), getArrayDimensions(), rawType);
}
@NonNull
@Override
public List<GroovyClassElement> getBounds() {
return bounds;
}
@NonNull
@Override
public String getVariableName() {
return variableName;
}
@Override
public Optional<Element> getDeclaringElement() {
return Optional.ofNullable(declaringElement);
}
@Override
public ClassElement withArrayDimensions(int arrayDimensions) {
return new GroovyGenericPlaceholderElement(visitorContext, declaringElement, placeholderNativeElement, variableName, resolved, bounds, selectClassElementRepresentingThisPlaceholder(resolved, bounds), arrayDimensions, rawType);
}
@Override
public ClassElement foldBoundGenericTypes(@NonNull Function<ClassElement, ClassElement> fold) {
Objects.requireNonNull(fold, "Function argument cannot be null");
return fold.apply(this);
}
@Override
public Optional<ClassElement> getResolved() {
return Optional.ofNullable(resolved);
}
@Nullable
public GroovyClassElement getResolvedInternal() {
return resolved;
}
}
| GroovyGenericPlaceholderElement |
java | mockito__mockito | mockito-core/src/test/java/org/mockitousage/annotation/DoNotMockTest.java | {
"start": 7057,
"end": 7156
} | class ____ {}
@DoNotMock(reason = "Special reason")
private | NotMockableWithDifferentAnnotation |
java | apache__dubbo | dubbo-common/src/main/java/org/apache/dubbo/common/utils/LogUtil.java | {
"start": 1045,
"end": 4146
} | class ____ {
private static final Logger Log = LoggerFactory.getLogger(LogUtil.class);
public static void start() {
DubboAppender.doStart();
}
public static void stop() {
DubboAppender.doStop();
}
public static boolean checkNoError() {
if (findLevel(Level.ERROR) == 0) {
return true;
} else {
return false;
}
}
public static int findName(String expectedLogName) {
int count = 0;
List<Log> logList = DubboAppender.logList;
for (int i = 0; i < logList.size(); i++) {
String logName = logList.get(i).getLogName();
if (logName.contains(expectedLogName)) {
count++;
}
}
return count;
}
public static int findLevel(Level expectedLevel) {
int count = 0;
List<Log> logList = DubboAppender.logList;
for (int i = 0; i < logList.size(); i++) {
Level logLevel = logList.get(i).getLogLevel();
if (logLevel.equals(expectedLevel)) {
count++;
}
}
return count;
}
public static int findLevelWithThreadName(Level expectedLevel, String threadName) {
int count = 0;
List<Log> logList = DubboAppender.logList;
for (int i = 0; i < logList.size(); i++) {
Log log = logList.get(i);
if (log.getLogLevel().equals(expectedLevel) && log.getLogThread().equals(threadName)) {
count++;
}
}
return count;
}
public static int findThread(String expectedThread) {
int count = 0;
List<Log> logList = DubboAppender.logList;
for (int i = 0; i < logList.size(); i++) {
String logThread = logList.get(i).getLogThread();
if (logThread.contains(expectedThread)) {
count++;
}
}
return count;
}
public static int findMessage(String expectedMessage) {
int count = 0;
List<Log> logList = DubboAppender.logList;
for (int i = 0; i < logList.size(); i++) {
String logMessage = logList.get(i).getLogMessage();
if (logMessage.contains(expectedMessage)) {
count++;
}
}
return count;
}
public static int findMessage(Level expectedLevel, String expectedMessage) {
int count = 0;
List<Log> logList = DubboAppender.logList;
for (int i = 0; i < logList.size(); i++) {
Level logLevel = logList.get(i).getLogLevel();
if (logLevel.equals(expectedLevel)) {
String logMessage = logList.get(i).getLogMessage();
if (logMessage.contains(expectedMessage)) {
count++;
}
}
}
return count;
}
public static <T> void printList(List<T> list) {
Log.info("PrintList:");
Iterator<T> it = list.iterator();
while (it.hasNext()) {
Log.info(it.next().toString());
}
}
}
| LogUtil |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/cluster/ClusterTopologyRefreshScheduler.java | {
"start": 10681,
"end": 11968
} | class ____ extends AtomicBoolean implements Runnable {
private static final long serialVersionUID = -1337731371220365694L;
private final Supplier<CompletionStage<?>> reloadTopologyAsync;
ClusterTopologyRefreshTask(Supplier<CompletionStage<?>> reloadTopologyAsync) {
this.reloadTopologyAsync = reloadTopologyAsync;
}
public void run() {
if (compareAndSet(false, true)) {
doRun();
return;
}
if (logger.isDebugEnabled()) {
logger.debug("ClusterTopologyRefreshTask already in progress");
}
}
void doRun() {
if (logger.isDebugEnabled()) {
logger.debug("ClusterTopologyRefreshTask requesting partitions");
}
try {
reloadTopologyAsync.get().whenComplete((ignore, throwable) -> {
if (throwable != null) {
logger.warn("Cannot refresh Redis Cluster topology", throwable);
}
set(false);
});
} catch (Exception e) {
logger.warn("Cannot refresh Redis Cluster topology", e);
}
}
}
}
| ClusterTopologyRefreshTask |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/test/java/org/springframework/security/oauth2/server/authorization/client/JdbcRegisteredClientRepositoryTests.java | {
"start": 14846,
"end": 19356
} | class ____ implements RowMapper<RegisteredClient> {
private final JsonMapper jsonMapper;
private CustomRegisteredClientRowMapper() {
List<JacksonModule> modules = SecurityJacksonModules
.getModules(CustomRegisteredClientRowMapper.class.getClassLoader());
this.jsonMapper = JsonMapper.builder().addModules(modules).build();
}
@Override
public RegisteredClient mapRow(ResultSet rs, int rowNum) throws SQLException {
Timestamp clientIdIssuedAt = rs.getTimestamp("clientIdIssuedAt");
Timestamp clientSecretExpiresAt = rs.getTimestamp("clientSecretExpiresAt");
Set<String> clientAuthenticationMethods = StringUtils
.commaDelimitedListToSet(rs.getString("clientAuthenticationMethods"));
Set<String> authorizationGrantTypes = StringUtils
.commaDelimitedListToSet(rs.getString("authorizationGrantTypes"));
Set<String> redirectUris = StringUtils.commaDelimitedListToSet(rs.getString("redirectUris"));
Set<String> postLogoutRedirectUris = StringUtils
.commaDelimitedListToSet(rs.getString("postLogoutRedirectUris"));
Set<String> clientScopes = StringUtils.commaDelimitedListToSet(rs.getString("scopes"));
// @formatter:off
RegisteredClient.Builder builder = RegisteredClient.withId(rs.getString("id"))
.clientId(rs.getString("clientId"))
.clientIdIssuedAt((clientIdIssuedAt != null) ? clientIdIssuedAt.toInstant() : null)
.clientSecret(rs.getString("clientSecret"))
.clientSecretExpiresAt((clientSecretExpiresAt != null) ? clientSecretExpiresAt.toInstant() : null)
.clientName(rs.getString("clientName"))
.clientAuthenticationMethods((authenticationMethods) ->
clientAuthenticationMethods.forEach((authenticationMethod) ->
authenticationMethods.add(resolveClientAuthenticationMethod(authenticationMethod))))
.authorizationGrantTypes((grantTypes) ->
authorizationGrantTypes.forEach((grantType) ->
grantTypes.add(resolveAuthorizationGrantType(grantType))))
.redirectUris((uris) -> uris.addAll(redirectUris))
.postLogoutRedirectUris((uris) -> uris.addAll(postLogoutRedirectUris))
.scopes((scopes) -> scopes.addAll(clientScopes));
// @formatter:on
Map<String, Object> clientSettingsMap = parseMap(rs.getString("clientSettings"));
builder.clientSettings(ClientSettings.withSettings(clientSettingsMap).build());
Map<String, Object> tokenSettingsMap = parseMap(rs.getString("tokenSettings"));
builder.tokenSettings(TokenSettings.withSettings(tokenSettingsMap).build());
return builder.build();
}
private Map<String, Object> parseMap(String data) {
final ParameterizedTypeReference<Map<String, Object>> typeReference = new ParameterizedTypeReference<>() {
};
try {
tools.jackson.databind.JavaType javaType = this.jsonMapper.getTypeFactory()
.constructType(typeReference.getType());
return this.jsonMapper.readValue(data, javaType);
}
catch (Exception ex) {
throw new IllegalArgumentException(ex.getMessage(), ex);
}
}
private static AuthorizationGrantType resolveAuthorizationGrantType(String authorizationGrantType) {
if (AuthorizationGrantType.AUTHORIZATION_CODE.getValue().equals(authorizationGrantType)) {
return AuthorizationGrantType.AUTHORIZATION_CODE;
}
else if (AuthorizationGrantType.CLIENT_CREDENTIALS.getValue().equals(authorizationGrantType)) {
return AuthorizationGrantType.CLIENT_CREDENTIALS;
}
else if (AuthorizationGrantType.REFRESH_TOKEN.getValue().equals(authorizationGrantType)) {
return AuthorizationGrantType.REFRESH_TOKEN;
}
// Custom authorization grant type
return new AuthorizationGrantType(authorizationGrantType);
}
private static ClientAuthenticationMethod resolveClientAuthenticationMethod(
String clientAuthenticationMethod) {
if (ClientAuthenticationMethod.CLIENT_SECRET_BASIC.getValue().equals(clientAuthenticationMethod)) {
return ClientAuthenticationMethod.CLIENT_SECRET_BASIC;
}
else if (ClientAuthenticationMethod.CLIENT_SECRET_POST.getValue().equals(clientAuthenticationMethod)) {
return ClientAuthenticationMethod.CLIENT_SECRET_POST;
}
else if (ClientAuthenticationMethod.NONE.getValue().equals(clientAuthenticationMethod)) {
return ClientAuthenticationMethod.NONE;
}
// Custom client authentication method
return new ClientAuthenticationMethod(clientAuthenticationMethod);
}
}
}
}
| CustomRegisteredClientRowMapper |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/type/descriptor/jdbc/JdbcTypeJavaClassMappings.java | {
"start": 1322,
"end": 7901
} | class ____ {
private static final Logger LOG = Logger.getLogger( JdbcTypeJavaClassMappings.class );
public static final JdbcTypeJavaClassMappings INSTANCE = new JdbcTypeJavaClassMappings();
private final ConcurrentHashMap<Class<?>, Integer> javaClassToJdbcTypeCodeMap;
private final ConcurrentHashMap<Integer, Class<?>> jdbcTypeCodeToJavaClassMap;
private JdbcTypeJavaClassMappings() {
javaClassToJdbcTypeCodeMap = buildJavaClassToJdbcTypeCodeMappings();
jdbcTypeCodeToJavaClassMap = buildJdbcTypeCodeToJavaClassMappings();
}
/**
* For the given Java type, determine the JDBC recommended JDBC type.
* <p>
* This includes the mappings defined in <em>TABLE B-2: Java Types Mapped to JDBC Types</em>
* and <em>TABLE B-4: Java Object Types Mapped to JDBC Types</em>, as well as some additional
* "common sense" mappings.
*/
public int determineJdbcTypeCodeForJavaClass(Class<?> cls) {
Integer typeCode = javaClassToJdbcTypeCodeMap.get( cls );
if ( typeCode != null ) {
return typeCode;
}
int specialCode = cls.hashCode();
LOG.debug(
"JDBC type code mapping not known for class [" + cls.getName() + "]; using custom code [" + specialCode + "]"
);
return specialCode;
}
/**
* For the given JDBC type, determine the JDBC recommended Java type.
* <p>
* These mappings are defined by <em>TABLE B-1: JDBC Types Mapped to Java Types</em>.
*/
public Class<?> determineJavaClassForJdbcTypeCode(Integer typeCode) {
Class<?> cls = jdbcTypeCodeToJavaClassMap.get( typeCode );
if ( cls != null ) {
return cls;
}
LOG.debugf(
"Java Class mapping not known for JDBC type code [%s]; using java.lang.Object",
typeCode
);
return Object.class;
}
/**
* @see #determineJavaClassForJdbcTypeCode(Integer)
*/
public Class<?> determineJavaClassForJdbcTypeCode(int typeCode) {
return determineJavaClassForJdbcTypeCode( Integer.valueOf( typeCode ) );
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
private static ConcurrentHashMap<Class<?>, Integer> buildJavaClassToJdbcTypeCodeMappings() {
final ConcurrentHashMap<Class<?>, Integer> workMap = new ConcurrentHashMap<>();
// these mappings are the ones outlined specifically in the spec
workMap.put( String.class, SqlTypes.VARCHAR );
workMap.put( BigDecimal.class, SqlTypes.NUMERIC );
workMap.put( BigInteger.class, SqlTypes.NUMERIC );
workMap.put( Boolean.class, SqlTypes.BIT );
workMap.put( Byte.class, SqlTypes.TINYINT );
workMap.put( Short.class, SqlTypes.SMALLINT );
workMap.put( Integer.class, SqlTypes.INTEGER );
workMap.put( Long.class, SqlTypes.BIGINT );
workMap.put( Float.class, SqlTypes.REAL );
workMap.put( Double.class, SqlTypes.DOUBLE );
workMap.put( byte[].class, SqlTypes.VARBINARY );
workMap.put( java.sql.Date.class, SqlTypes.DATE );
workMap.put( Time.class, SqlTypes.TIME );
workMap.put( Timestamp.class, SqlTypes.TIMESTAMP );
workMap.put( LocalTime.class, SqlTypes.TIME );
workMap.put( OffsetTime.class, SqlTypes.TIME_WITH_TIMEZONE );
workMap.put( LocalDate.class, SqlTypes.DATE );
workMap.put( LocalDateTime.class, SqlTypes.TIMESTAMP );
workMap.put( OffsetDateTime.class, SqlTypes.TIMESTAMP_WITH_TIMEZONE );
workMap.put( ZonedDateTime.class, SqlTypes.TIMESTAMP_WITH_TIMEZONE );
workMap.put( Instant.class, SqlTypes.TIMESTAMP_UTC );
workMap.put( Blob.class, SqlTypes.BLOB );
workMap.put( Clob.class, SqlTypes.CLOB );
workMap.put( Array.class, SqlTypes.ARRAY );
workMap.put( Struct.class, SqlTypes.STRUCT );
workMap.put( Ref.class, SqlTypes.REF );
workMap.put( Class.class, SqlTypes.JAVA_OBJECT );
workMap.put( RowId.class, SqlTypes.ROWID );
workMap.put( SQLXML.class, SqlTypes.SQLXML );
workMap.put( UUID.class, SqlTypes.UUID );
workMap.put( InetAddress.class, SqlTypes.INET );
workMap.put( Inet4Address.class, SqlTypes.INET );
workMap.put( Inet6Address.class, SqlTypes.INET );
workMap.put( Duration.class, SqlTypes.INTERVAL_SECOND );
// additional "common sense" registrations
workMap.put( Character.class, SqlTypes.CHAR );
workMap.put( char[].class, SqlTypes.VARCHAR );
// workMap.put( Character[].class, SqlTypes.VARCHAR );
// workMap.put( Byte[].class, SqlTypes.VARBINARY );
workMap.put( java.util.Date.class, SqlTypes.TIMESTAMP );
workMap.put( Calendar.class, SqlTypes.TIMESTAMP );
return workMap;
}
private static ConcurrentHashMap<Integer, Class<?>> buildJdbcTypeCodeToJavaClassMappings() {
final ConcurrentHashMap<Integer, Class<?>> workMap = new ConcurrentHashMap<>();
workMap.put( SqlTypes.CHAR, String.class );
workMap.put( SqlTypes.VARCHAR, String.class );
workMap.put( SqlTypes.LONGVARCHAR, String.class );
workMap.put( SqlTypes.NCHAR, String.class );
workMap.put( SqlTypes.NVARCHAR, String.class );
workMap.put( SqlTypes.LONGNVARCHAR, String.class );
workMap.put( SqlTypes.NUMERIC, BigDecimal.class );
workMap.put( SqlTypes.DECIMAL, BigDecimal.class );
workMap.put( SqlTypes.BIT, Boolean.class );
workMap.put( SqlTypes.BOOLEAN, Boolean.class );
workMap.put( SqlTypes.TINYINT, Byte.class );
workMap.put( SqlTypes.SMALLINT, Short.class );
workMap.put( SqlTypes.INTEGER, Integer.class );
workMap.put( SqlTypes.BIGINT, Long.class );
workMap.put( SqlTypes.REAL, Float.class );
workMap.put( SqlTypes.DOUBLE, Double.class );
workMap.put( SqlTypes.FLOAT, Double.class );
workMap.put( SqlTypes.BINARY, byte[].class );
workMap.put( SqlTypes.VARBINARY, byte[].class );
workMap.put( SqlTypes.LONGVARBINARY, byte[].class );
workMap.put( SqlTypes.DATE, java.sql.Date.class );
workMap.put( SqlTypes.TIME, Time.class );
workMap.put( SqlTypes.TIMESTAMP, Timestamp.class );
workMap.put( SqlTypes.TIME_WITH_TIMEZONE, OffsetTime.class );
workMap.put( SqlTypes.TIMESTAMP_WITH_TIMEZONE, OffsetDateTime.class );
workMap.put( SqlTypes.BLOB, Blob.class );
workMap.put( SqlTypes.CLOB, Clob.class );
workMap.put( SqlTypes.NCLOB, NClob.class );
workMap.put( SqlTypes.ARRAY, Array.class );
workMap.put( SqlTypes.STRUCT, Struct.class );
workMap.put( SqlTypes.REF, Ref.class );
workMap.put( SqlTypes.JAVA_OBJECT, Object.class );
workMap.put( SqlTypes.ROWID, RowId.class );
workMap.put( SqlTypes.SQLXML, SQLXML.class );
workMap.put( SqlTypes.UUID, UUID.class );
workMap.put( SqlTypes.JSON, String.class );
workMap.put( SqlTypes.INET, InetAddress.class );
workMap.put( SqlTypes.TIMESTAMP_UTC, Instant.class );
workMap.put( SqlTypes.INTERVAL_SECOND, Duration.class );
return workMap;
}
}
| JdbcTypeJavaClassMappings |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/results/graph/Initializer.java | {
"start": 9295,
"end": 9383
} | enum ____ {
UNINITIALIZED,
MISSING,
KEY_RESOLVED,
RESOLVED,
INITIALIZED
}
}
| State |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/nullness/ParameterMissingNullableTest.java | {
"start": 8138,
"end": 8478
} | class ____ {
void assertNot(boolean b) {}
void foo(Integer i) {
assertNot(i == null);
}
}
""")
.doTest();
}
@Test
public void negativeAssert() {
aggressiveHelper
.addSourceLines(
"Foo.java",
"""
| Foo |
java | elastic__elasticsearch | modules/lang-painless/src/test/java/org/elasticsearch/painless/BufferTests.java | {
"start": 792,
"end": 9654
} | class ____ extends ScriptTestCase {
public void testByteBufferMethods() {
ByteBuffer bb = ByteBuffer.wrap(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7 });
Map<String, Object> params = Collections.singletonMap("bb", ByteBuffer.wrap(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7 }));
assertEquals(bb.limit(), exec("def bb = params['bb']; bb.limit()", params, true));
assertEquals(bb.limit(), exec("ByteBuffer bb = params['bb']; bb.limit()", params, true));
assertEquals(bb.order(), exec("def bb = params['bb']; bb.order()", params, true));
assertEquals(bb.order(), exec("ByteBuffer bb = params['bb']; bb.order()", params, true));
assertEquals(
bb.order(ByteOrder.LITTLE_ENDIAN).order(),
exec("def bb = params['bb']; bb.order(ByteOrder.LITTLE_ENDIAN).order()", params, true)
);
assertEquals(
bb.order(ByteOrder.LITTLE_ENDIAN).order(),
exec("ByteBuffer bb = params['bb']; bb.order(ByteOrder.LITTLE_ENDIAN).order()", params, true)
);
assertEquals(bb.get(0), exec("def bb = params['bb']; bb.get(0)", params, true));
assertEquals(bb.get(0), exec("ByteBuffer bb = params['bb']; bb.get(0)", params, true));
assertEquals(bb.get(1), exec("def bb = params['bb']; bb.get(1)", params, true));
assertEquals(bb.get(1), exec("ByteBuffer bb = params['bb']; bb.get(1)", params, true));
assertEquals(bb.getChar(0), exec("def bb = params['bb']; bb.getChar(0)", params, true));
assertEquals(bb.getChar(0), exec("ByteBuffer bb = params['bb']; bb.getChar(0)", params, true));
assertEquals(bb.getDouble(0), (double) exec("def bb = params['bb']; bb.getDouble(0)", params, true), 0.1);
assertEquals(bb.getDouble(0), (double) exec("ByteBuffer bb = params['bb']; bb.getDouble(0)", params, true), 0.1);
assertEquals(bb.getFloat(0), (float) exec("def bb = params['bb']; bb.getFloat(0)", params, true), 0.1);
assertEquals(bb.getFloat(0), (float) exec("ByteBuffer bb = params['bb']; bb.getFloat(0)", params, true), 0.1);
assertEquals(bb.getInt(0), exec("def bb = params['bb']; bb.getInt(0)", params, true));
assertEquals(bb.getInt(0), exec("ByteBuffer bb = params['bb']; bb.getInt(0)", params, true));
assertEquals(bb.getLong(0), exec("def bb = params['bb']; bb.getLong(0)", params, true));
assertEquals(bb.getLong(0), exec("ByteBuffer bb = params['bb']; bb.getLong(0)", params, true));
assertEquals(bb.getShort(0), exec("def bb = params['bb']; bb.getShort(0)", params, true));
assertEquals(bb.getShort(0), exec("ByteBuffer bb = params['bb']; bb.getShort(0)", params, true));
assertEquals(bb.asCharBuffer(), exec("def bb = params['bb']; bb.asCharBuffer()", params, true));
assertEquals(bb.asCharBuffer(), exec("ByteBuffer bb = params['bb']; bb.asCharBuffer()", params, true));
assertEquals(bb.asDoubleBuffer(), exec("def bb = params['bb']; bb.asDoubleBuffer()", params, true));
assertEquals(bb.asDoubleBuffer(), exec("ByteBuffer bb = params['bb']; bb.asDoubleBuffer()", params, true));
assertEquals(bb.asFloatBuffer(), exec("def bb = params['bb']; bb.asFloatBuffer()", params, true));
assertEquals(bb.asFloatBuffer(), exec("ByteBuffer bb = params['bb']; bb.asFloatBuffer()", params, true));
assertEquals(bb.asIntBuffer(), exec("def bb = params['bb']; bb.asIntBuffer()", params, true));
assertEquals(bb.asIntBuffer(), exec("ByteBuffer bb = params['bb']; bb.asIntBuffer()", params, true));
assertEquals(bb.asLongBuffer(), exec("def bb = params['bb']; bb.asLongBuffer()", params, true));
assertEquals(bb.asLongBuffer(), exec("ByteBuffer bb = params['bb']; bb.asLongBuffer()", params, true));
assertEquals(bb.asShortBuffer(), exec("def bb = params['bb']; bb.asShortBuffer()", params, true));
assertEquals(bb.asShortBuffer(), exec("ByteBuffer bb = params['bb']; bb.asShortBuffer()", params, true));
assertEquals(ByteBuffer.wrap(new byte[] { 1, 2, 3 }), exec("ByteBuffer.wrap(new byte[] {1, 2, 3})"));
assertEquals(ByteBuffer.wrap(new byte[] { 1, 2, 3 }, 1, 2), exec("ByteBuffer.wrap(new byte[] {1, 2, 3}, 1, 2)"));
}
public void testCharBufferMethods() {
CharBuffer bb = CharBuffer.wrap(new char[] { 0, 1, 2, 3, 4, 5, 6, 7 });
Map<String, Object> params = Collections.singletonMap("bb", bb);
assertEquals(bb.limit(), exec("def bb = params['bb']; bb.limit()", params, true));
assertEquals(bb.limit(), exec("CharBuffer bb = params['bb']; bb.limit()", params, true));
assertEquals(bb.get(0), exec("def bb = params['bb']; bb.get(0)", params, true));
assertEquals(bb.get(0), exec("CharBuffer bb = params['bb']; bb.get(0)", params, true));
assertEquals(bb.get(1), exec("def bb = params['bb']; bb.get(1)", params, true));
assertEquals(bb.get(1), exec("CharBuffer bb = params['bb']; bb.get(1)", params, true));
}
public void testDoubleBufferMethods() {
DoubleBuffer bb = DoubleBuffer.wrap(new double[] { 0, 1, 2, 3, 4, 5, 6, 7 });
Map<String, Object> params = Collections.singletonMap("bb", bb);
assertEquals(bb.limit(), exec("def bb = params['bb']; bb.limit()", params, true));
assertEquals(bb.limit(), exec("DoubleBuffer bb = params['bb']; bb.limit()", params, true));
assertEquals(bb.get(0), exec("def bb = params['bb']; bb.get(0)", params, true));
assertEquals(bb.get(0), exec("DoubleBuffer bb = params['bb']; bb.get(0)", params, true));
assertEquals(bb.get(1), exec("def bb = params['bb']; bb.get(1)", params, true));
assertEquals(bb.get(1), exec("DoubleBuffer bb = params['bb']; bb.get(1)", params, true));
}
public void testFloatBufferMethods() {
FloatBuffer bb = FloatBuffer.wrap(new float[] { 0, 1, 2, 3, 4, 5, 6, 7 });
Map<String, Object> params = Collections.singletonMap("bb", bb);
assertEquals(bb.limit(), exec("def bb = params['bb']; bb.limit()", params, true));
assertEquals(bb.limit(), exec("FloatBuffer bb = params['bb']; bb.limit()", params, true));
assertEquals(bb.get(0), exec("def bb = params['bb']; bb.get(0)", params, true));
assertEquals(bb.get(0), exec("FloatBuffer bb = params['bb']; bb.get(0)", params, true));
assertEquals(bb.get(1), exec("def bb = params['bb']; bb.get(1)", params, true));
assertEquals(bb.get(1), exec("FloatBuffer bb = params['bb']; bb.get(1)", params, true));
}
public void testIntBufferMethods() {
IntBuffer bb = IntBuffer.wrap(new int[] { 0, 1, 2, 3, 4, 5, 6, 7 });
Map<String, Object> params = Collections.singletonMap("bb", bb);
assertEquals(bb.limit(), exec("def bb = params['bb']; bb.limit()", params, true));
assertEquals(bb.limit(), exec("IntBuffer bb = params['bb']; bb.limit()", params, true));
assertEquals(bb.get(0), exec("def bb = params['bb']; bb.get(0)", params, true));
assertEquals(bb.get(0), exec("IntBuffer bb = params['bb']; bb.get(0)", params, true));
assertEquals(bb.get(1), exec("def bb = params['bb']; bb.get(1)", params, true));
assertEquals(bb.get(1), exec("IntBuffer bb = params['bb']; bb.get(1)", params, true));
}
public void testLongBufferMethods() {
LongBuffer bb = LongBuffer.wrap(new long[] { 0, 1, 2, 3, 4, 5, 6, 7 });
Map<String, Object> params = Collections.singletonMap("bb", bb);
assertEquals(bb.limit(), exec("def bb = params['bb']; bb.limit()", params, true));
assertEquals(bb.limit(), exec("LongBuffer bb = params['bb']; bb.limit()", params, true));
assertEquals(bb.get(0), exec("def bb = params['bb']; bb.get(0)", params, true));
assertEquals(bb.get(0), exec("LongBuffer bb = params['bb']; bb.get(0)", params, true));
assertEquals(bb.get(1), exec("def bb = params['bb']; bb.get(1)", params, true));
assertEquals(bb.get(1), exec("LongBuffer bb = params['bb']; bb.get(1)", params, true));
}
public void testShortBufferMethods() {
ShortBuffer bb = ShortBuffer.wrap(new short[] { 0, 1, 2, 3, 4, 5, 6, 7 });
Map<String, Object> params = Collections.singletonMap("bb", bb);
assertEquals(bb.limit(), exec("def bb = params['bb']; bb.limit()", params, true));
assertEquals(bb.limit(), exec("ShortBuffer bb = params['bb']; bb.limit()", params, true));
assertEquals(bb.get(0), exec("def bb = params['bb']; bb.get(0)", params, true));
assertEquals(bb.get(0), exec("ShortBuffer bb = params['bb']; bb.get(0)", params, true));
assertEquals(bb.get(1), exec("def bb = params['bb']; bb.get(1)", params, true));
assertEquals(bb.get(1), exec("ShortBuffer bb = params['bb']; bb.get(1)", params, true));
}
}
| BufferTests |
java | spring-projects__spring-framework | spring-tx/src/main/java/org/springframework/transaction/config/AnnotationDrivenBeanDefinitionParser.java | {
"start": 5138,
"end": 7647
} | class ____ {
public static void configureAutoProxyCreator(Element element, ParserContext parserContext) {
AopNamespaceUtils.registerAutoProxyCreatorIfNecessary(parserContext, element);
String txAdvisorBeanName = TransactionManagementConfigUtils.TRANSACTION_ADVISOR_BEAN_NAME;
if (!parserContext.getRegistry().containsBeanDefinition(txAdvisorBeanName)) {
Object eleSource = parserContext.extractSource(element);
// Create the TransactionAttributeSource definition.
RootBeanDefinition sourceDef = new RootBeanDefinition(
"org.springframework.transaction.annotation.AnnotationTransactionAttributeSource");
sourceDef.setSource(eleSource);
sourceDef.setRole(BeanDefinition.ROLE_INFRASTRUCTURE);
String sourceName = parserContext.getReaderContext().registerWithGeneratedName(sourceDef);
// Create the TransactionInterceptor definition.
RootBeanDefinition interceptorDef = new RootBeanDefinition(TransactionInterceptor.class);
interceptorDef.setSource(eleSource);
interceptorDef.setRole(BeanDefinition.ROLE_INFRASTRUCTURE);
registerTransactionManager(element, interceptorDef);
interceptorDef.getPropertyValues().add("transactionAttributeSource", new RuntimeBeanReference(sourceName));
String interceptorName = parserContext.getReaderContext().registerWithGeneratedName(interceptorDef);
// Create the TransactionAttributeSourceAdvisor definition.
RootBeanDefinition advisorDef = new RootBeanDefinition(BeanFactoryTransactionAttributeSourceAdvisor.class);
advisorDef.setSource(eleSource);
advisorDef.setRole(BeanDefinition.ROLE_INFRASTRUCTURE);
advisorDef.getPropertyValues().add("transactionAttributeSource", new RuntimeBeanReference(sourceName));
advisorDef.getPropertyValues().add("adviceBeanName", interceptorName);
if (element.hasAttribute("order")) {
advisorDef.getPropertyValues().add("order", element.getAttribute("order"));
}
parserContext.getRegistry().registerBeanDefinition(txAdvisorBeanName, advisorDef);
CompositeComponentDefinition compositeDef = new CompositeComponentDefinition(element.getTagName(), eleSource);
compositeDef.addNestedComponent(new BeanComponentDefinition(sourceDef, sourceName));
compositeDef.addNestedComponent(new BeanComponentDefinition(interceptorDef, interceptorName));
compositeDef.addNestedComponent(new BeanComponentDefinition(advisorDef, txAdvisorBeanName));
parserContext.registerComponent(compositeDef);
}
}
}
}
| AopAutoProxyConfigurer |
java | elastic__elasticsearch | x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/SourceModeLicenseUpgradeIT.java | {
"start": 412,
"end": 16543
} | class ____ extends SourceModeLicenseChangeTestCase {
@Override
protected void applyInitialLicense() {}
@Override
protected void licenseChange() throws IOException {
startTrial();
}
@Override
protected List<TestCase> cases() {
return List.of(new TestCase() {
@Override
public String dataStreamName() {
return "logs-test-regular";
}
@Override
public String indexMode() {
return "logsdb";
}
@Override
public void prepareDataStream() throws IOException {
assertOK(createDataStream(client(), dataStreamName()));
}
@Override
public void rollover() throws IOException {
rolloverDataStream(client(), dataStreamName());
}
@Override
public SourceFieldMapper.Mode initialMode() {
return SourceFieldMapper.Mode.STORED;
}
@Override
public SourceFieldMapper.Mode finalMode() {
return SourceFieldMapper.Mode.SYNTHETIC;
}
}, new TestCase() {
private static final String sourceModeOverride = """
{
"template": {
"settings": {
"index": {
"mapping.source.mode": "SYNTHETIC"
}
}
}
}""";
@Override
public String dataStreamName() {
return "logs-test-explicit-synthetic";
}
@Override
public String indexMode() {
return "logsdb";
}
@Override
public void prepareDataStream() throws IOException {
assertOK(putComponentTemplate(client(), "logs@custom", sourceModeOverride));
assertOK(createDataStream(client(), dataStreamName()));
assertOK(removeComponentTemplate(client(), "logs@custom"));
}
@Override
public void rollover() throws IOException {
assertOK(putComponentTemplate(client(), "logs@custom", sourceModeOverride));
rolloverDataStream(client(), dataStreamName());
assertOK(removeComponentTemplate(client(), "logs@custom"));
}
@Override
public SourceFieldMapper.Mode initialMode() {
return SourceFieldMapper.Mode.STORED;
}
@Override
public SourceFieldMapper.Mode finalMode() {
return SourceFieldMapper.Mode.SYNTHETIC;
}
}, new TestCase() {
private static final String sourceModeOverride = """
{
"template": {
"settings": {
"index": {
"mapping.source.mode": "STORED"
}
}
}
}""";
@Override
public String dataStreamName() {
return "logs-test-explicit-stored";
}
@Override
public String indexMode() {
return "logsdb";
}
@Override
public void prepareDataStream() throws IOException {
assertOK(putComponentTemplate(client(), "logs@custom", sourceModeOverride));
assertOK(createDataStream(client(), dataStreamName()));
assertOK(removeComponentTemplate(client(), "logs@custom"));
}
@Override
public void rollover() throws IOException {
assertOK(putComponentTemplate(client(), "logs@custom", sourceModeOverride));
rolloverDataStream(client(), dataStreamName());
assertOK(removeComponentTemplate(client(), "logs@custom"));
}
@Override
public SourceFieldMapper.Mode initialMode() {
return SourceFieldMapper.Mode.STORED;
}
@Override
public SourceFieldMapper.Mode finalMode() {
return SourceFieldMapper.Mode.STORED;
}
}, new TestCase() {
@Override
public String dataStreamName() {
return "tsdb-test-regular";
}
@Override
public String indexMode() {
return "time_series";
}
@Override
public void prepareDataStream() throws IOException {
var componentTemplate = """
{
"template": {
"settings": {
"index": {
"mode": "time_series",
"routing_path": ["dim"]
}
},
"mappings": {
"properties": {
"dim": {
"type": "keyword",
"time_series_dimension": true
}
}
}
}
}
""";
assertOK(putComponentTemplate(client(), "tsdb-test-regular-component", componentTemplate));
var template = """
{
"index_patterns": ["tsdb-test-regular"],
"priority": 100,
"data_stream": {},
"composed_of": ["tsdb-test-regular-component"]
}
""";
putTemplate(client(), "tsdb-test-regular-template", template);
assertOK(createDataStream(client(), dataStreamName()));
}
@Override
public void rollover() throws IOException {
rolloverDataStream(client(), dataStreamName());
}
@Override
public SourceFieldMapper.Mode initialMode() {
return SourceFieldMapper.Mode.STORED;
}
@Override
public SourceFieldMapper.Mode finalMode() {
return SourceFieldMapper.Mode.SYNTHETIC;
}
}, new TestCase() {
@Override
public String dataStreamName() {
return "tsdb-test-synthetic";
}
@Override
public String indexMode() {
return "time_series";
}
@Override
public void prepareDataStream() throws IOException {
var componentTemplate = """
{
"template": {
"settings": {
"index": {
"mode": "time_series",
"routing_path": ["dim"],
"mapping.source.mode": "SYNTHETIC"
}
},
"mappings": {
"properties": {
"dim": {
"type": "keyword",
"time_series_dimension": true
}
}
}
}
}
""";
assertOK(putComponentTemplate(client(), "tsdb-test-synthetic-component", componentTemplate));
var template = """
{
"index_patterns": ["tsdb-test-synthetic"],
"priority": 100,
"data_stream": {},
"composed_of": ["tsdb-test-synthetic-component"]
}
""";
putTemplate(client(), "tsdb-test-synthetic-template", template);
assertOK(createDataStream(client(), dataStreamName()));
}
@Override
public void rollover() throws IOException {
rolloverDataStream(client(), dataStreamName());
}
@Override
public SourceFieldMapper.Mode initialMode() {
return SourceFieldMapper.Mode.STORED;
}
@Override
public SourceFieldMapper.Mode finalMode() {
return SourceFieldMapper.Mode.SYNTHETIC;
}
}, new TestCase() {
@Override
public String dataStreamName() {
return "tsdb-test-stored";
}
@Override
public String indexMode() {
return "time_series";
}
@Override
public void prepareDataStream() throws IOException {
var componentTemplate = """
{
"template": {
"settings": {
"index": {
"mode": "time_series",
"routing_path": ["dim"],
"mapping.source.mode": "STORED"
}
},
"mappings": {
"properties": {
"dim": {
"type": "keyword",
"time_series_dimension": true
}
}
}
}
}
""";
assertOK(putComponentTemplate(client(), "tsdb-test-stored-component", componentTemplate));
var template = """
{
"index_patterns": ["tsdb-test-stored"],
"priority": 100,
"data_stream": {},
"composed_of": ["tsdb-test-stored-component"]
}
""";
putTemplate(client(), "tsdb-test-stored-template", template);
assertOK(createDataStream(client(), dataStreamName()));
}
@Override
public void rollover() throws IOException {
rolloverDataStream(client(), dataStreamName());
}
@Override
public SourceFieldMapper.Mode initialMode() {
return SourceFieldMapper.Mode.STORED;
}
@Override
public SourceFieldMapper.Mode finalMode() {
return SourceFieldMapper.Mode.STORED;
}
},
new TestCase() {
@Override
public String dataStreamName() {
return "standard";
}
@Override
public String indexMode() {
return "standard";
}
@Override
public void prepareDataStream() throws IOException {
var template = """
{
"index_patterns": ["standard"],
"priority": 100,
"data_stream": {},
"composed_of": []
}
""";
putTemplate(client(), "standard-template", template);
assertOK(createDataStream(client(), dataStreamName()));
}
@Override
public void rollover() throws IOException {
rolloverDataStream(client(), dataStreamName());
}
@Override
public SourceFieldMapper.Mode initialMode() {
return SourceFieldMapper.Mode.STORED;
}
@Override
public SourceFieldMapper.Mode finalMode() {
return SourceFieldMapper.Mode.STORED;
}
},
new TestCase() {
@Override
public String dataStreamName() {
return "standard-synthetic";
}
@Override
public String indexMode() {
return "standard";
}
@Override
public void prepareDataStream() throws IOException {
var componentTemplate = """
{
"template": {
"settings": {
"index": {
"mapping.source.mode": "SYNTHETIC"
}
}
}
}
""";
assertOK(putComponentTemplate(client(), "standard-synthetic-component", componentTemplate));
var template = """
{
"index_patterns": ["standard-synthetic"],
"priority": 100,
"data_stream": {},
"composed_of": ["standard-synthetic-component"]
}
""";
putTemplate(client(), "standard-synthetic-template", template);
assertOK(createDataStream(client(), dataStreamName()));
}
@Override
public void rollover() throws IOException {
rolloverDataStream(client(), dataStreamName());
}
@Override
public SourceFieldMapper.Mode initialMode() {
return SourceFieldMapper.Mode.STORED;
}
@Override
public SourceFieldMapper.Mode finalMode() {
return SourceFieldMapper.Mode.SYNTHETIC;
}
},
new TestCase() {
@Override
public String dataStreamName() {
return "standard-stored";
}
@Override
public String indexMode() {
return "standard";
}
@Override
public void prepareDataStream() throws IOException {
var componentTemplate = """
{
"template": {
"settings": {
"index": {
"mapping.source.mode": "STORED"
}
}
}
}
""";
assertOK(putComponentTemplate(client(), "standard-stored-component", componentTemplate));
var template = """
{
"index_patterns": ["standard-stored"],
"priority": 100,
"data_stream": {},
"composed_of": ["standard-stored-component"]
}
""";
putTemplate(client(), "standard-stored-template", template);
assertOK(createDataStream(client(), dataStreamName()));
}
@Override
public void rollover() throws IOException {
rolloverDataStream(client(), dataStreamName());
}
@Override
public SourceFieldMapper.Mode initialMode() {
return SourceFieldMapper.Mode.STORED;
}
@Override
public SourceFieldMapper.Mode finalMode() {
return SourceFieldMapper.Mode.STORED;
}
}
);
}
}
| SourceModeLicenseUpgradeIT |
java | quarkusio__quarkus | extensions/devservices/oidc/src/main/java/io/quarkus/devservices/oidc/OidcDevServicesConfigBuildItem.java | {
"start": 178,
"end": 468
} | class ____ extends SimpleBuildItem {
private final Map<String, String> config;
OidcDevServicesConfigBuildItem(Map<String, String> config) {
this.config = config;
}
public Map<String, String> getConfig() {
return config;
}
}
| OidcDevServicesConfigBuildItem |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/runtime/src/main/java/io/quarkus/resteasy/reactive/server/runtime/observability/ObservabilityCustomizer.java | {
"start": 400,
"end": 1360
} | class ____ implements HandlerChainCustomizer {
@Override
public List<ServerRestHandler> handlers(Phase phase, ResourceClass resourceClass,
ServerResourceMethod serverResourceMethod) {
if (phase.equals(Phase.AFTER_MATCH)) {
String basePath = resourceClass.getPath();
boolean isSubResource = basePath == null;
ObservabilityHandler observabilityHandler = new ObservabilityHandler();
if (isSubResource) {
observabilityHandler.setTemplatePath(serverResourceMethod.getPath());
observabilityHandler.setSubResource(true);
} else {
observabilityHandler.setTemplatePath(basePath + serverResourceMethod.getPath());
observabilityHandler.setSubResource(false);
}
return Collections.singletonList(observabilityHandler);
}
return Collections.emptyList();
}
}
| ObservabilityCustomizer |
java | hibernate__hibernate-orm | hibernate-graalvm/src/test/java/org/hibernate/graalvm/internal/StaticClassListsTest.java | {
"start": 2124,
"end": 2438
} | enum ____ {
UUID_STRATGY_HOLDERS_USING_SECURE_RANDOM {
@Override
Stream<Class<?>> classes() {
return Stream.of(
UuidVersion6Strategy.Holder.class,
UuidVersion7Strategy.Holder.class
);
}
};
abstract Stream<Class<?>> classes();
}
@Nested
| TypesNeedingRuntimeInitialization_Category |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/MonoLogTest.java | {
"start": 843,
"end": 1579
} | class ____ {
@Test
public void scanOperator(){
Mono<Integer> source = Mono.just(1);
MonoLog<Integer> test = new MonoLog<>(source,
new SignalLogger<>(source, "category", Level.INFO, false, SignalType.ON_COMPLETE));
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
}
@Test
public void scanFuseableOperator(){
Mono<Integer> source = Mono.just(1);
MonoLogFuseable<Integer> test = new MonoLogFuseable<>(source,
new SignalLogger<>(source, "category", Level.INFO, false, SignalType.ON_COMPLETE));
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
}
} | MonoLogTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/PreemptionMessagePBImpl.java | {
"start": 1573,
"end": 4946
} | class ____ extends PreemptionMessage {
PreemptionMessageProto proto = PreemptionMessageProto.getDefaultInstance();
PreemptionMessageProto.Builder builder = null;
boolean viaProto = false;
private StrictPreemptionContract strict;
private PreemptionContract contract;
public PreemptionMessagePBImpl() {
builder = PreemptionMessageProto.newBuilder();
}
public PreemptionMessagePBImpl(PreemptionMessageProto proto) {
this.proto = proto;
viaProto = true;
}
public synchronized PreemptionMessageProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public String toString() {
return TextFormat.shortDebugString(getProto());
}
private void mergeLocalToProto() {
if (viaProto)
maybeInitBuilder();
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
private void mergeLocalToBuilder() {
if (strict != null) {
builder.setStrictContract(convertToProtoFormat(strict));
}
if (contract != null) {
builder.setContract(convertToProtoFormat(contract));
}
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = PreemptionMessageProto.newBuilder(proto);
}
viaProto = false;
}
@Override
public synchronized StrictPreemptionContract getStrictContract() {
PreemptionMessageProtoOrBuilder p = viaProto ? proto : builder;
if (strict != null) {
return strict;
}
if (!p.hasStrictContract()) {
return null;
}
strict = convertFromProtoFormat(p.getStrictContract());
return strict;
}
@Override
public synchronized void setStrictContract(StrictPreemptionContract strict) {
maybeInitBuilder();
if (null == strict) {
builder.clearStrictContract();
}
this.strict = strict;
}
@Override
public synchronized PreemptionContract getContract() {
PreemptionMessageProtoOrBuilder p = viaProto ? proto : builder;
if (contract != null) {
return contract;
}
if (!p.hasContract()) {
return null;
}
contract = convertFromProtoFormat(p.getContract());
return contract;
}
@Override
public synchronized void setContract(final PreemptionContract c) {
maybeInitBuilder();
if (null == c) {
builder.clearContract();
}
this.contract = c;
}
private StrictPreemptionContractPBImpl convertFromProtoFormat(
StrictPreemptionContractProto p) {
return new StrictPreemptionContractPBImpl(p);
}
private StrictPreemptionContractProto convertToProtoFormat(
StrictPreemptionContract t) {
return ((StrictPreemptionContractPBImpl)t).getProto();
}
private PreemptionContractPBImpl convertFromProtoFormat(
PreemptionContractProto p) {
return new PreemptionContractPBImpl(p);
}
private PreemptionContractProto convertToProtoFormat(
PreemptionContract t) {
return ((PreemptionContractPBImpl)t).getProto();
}
}
| PreemptionMessagePBImpl |
java | greenrobot__greendao | tests/DaoTestBase/src/main/java/org/greenrobot/greendao/daotest/SpecialNamesEntityDao.java | {
"start": 791,
"end": 8442
} | class ____ {
public final static Property Id = new Property(0, Long.class, "id", true, "_id");
public final static Property Count = new Property(1, String.class, "count", false, "COUNT");
public final static Property Select = new Property(2, String.class, "select", false, "SELECT");
public final static Property Sum = new Property(3, String.class, "sum", false, "SUM");
public final static Property Avg = new Property(4, String.class, "avg", false, "AVG");
public final static Property Join = new Property(5, String.class, "join", false, "JOIN");
public final static Property Distinct = new Property(6, String.class, "distinct", false, "DISTINCT");
public final static Property On = new Property(7, String.class, "on", false, "ON");
public final static Property Index = new Property(8, String.class, "index", false, "INDEX");
public final static Property Order = new Property(9, Integer.class, "order", false, "ORDER");
}
public SpecialNamesEntityDao(DaoConfig config) {
super(config);
}
public SpecialNamesEntityDao(DaoConfig config, DaoSession daoSession) {
super(config, daoSession);
}
/** Creates the underlying database table. */
public static void createTable(Database db, boolean ifNotExists) {
String constraint = ifNotExists? "IF NOT EXISTS ": "";
db.execSQL("CREATE TABLE " + constraint + "\"ORDER TRANSACTION GROUP BY\" (" + //
"\"_id\" INTEGER PRIMARY KEY ," + // 0: id
"\"COUNT\" TEXT," + // 1: count
"\"SELECT\" TEXT," + // 2: select
"\"SUM\" TEXT," + // 3: sum
"\"AVG\" TEXT," + // 4: avg
"\"JOIN\" TEXT," + // 5: join
"\"DISTINCT\" TEXT," + // 6: distinct
"\"ON\" TEXT," + // 7: on
"\"INDEX\" TEXT," + // 8: index
"\"ORDER\" INTEGER);"); // 9: order
}
/** Drops the underlying database table. */
public static void dropTable(Database db, boolean ifExists) {
String sql = "DROP TABLE " + (ifExists ? "IF EXISTS " : "") + "\"ORDER TRANSACTION GROUP BY\"";
db.execSQL(sql);
}
@Override
protected final void bindValues(DatabaseStatement stmt, SpecialNamesEntity entity) {
stmt.clearBindings();
Long id = entity.getId();
if (id != null) {
stmt.bindLong(1, id);
}
String count = entity.getCount();
if (count != null) {
stmt.bindString(2, count);
}
String select = entity.getSelect();
if (select != null) {
stmt.bindString(3, select);
}
String sum = entity.getSum();
if (sum != null) {
stmt.bindString(4, sum);
}
String avg = entity.getAvg();
if (avg != null) {
stmt.bindString(5, avg);
}
String join = entity.getJoin();
if (join != null) {
stmt.bindString(6, join);
}
String distinct = entity.getDistinct();
if (distinct != null) {
stmt.bindString(7, distinct);
}
String on = entity.getOn();
if (on != null) {
stmt.bindString(8, on);
}
String index = entity.getIndex();
if (index != null) {
stmt.bindString(9, index);
}
Integer order = entity.getOrder();
if (order != null) {
stmt.bindLong(10, order);
}
}
@Override
protected final void bindValues(SQLiteStatement stmt, SpecialNamesEntity entity) {
stmt.clearBindings();
Long id = entity.getId();
if (id != null) {
stmt.bindLong(1, id);
}
String count = entity.getCount();
if (count != null) {
stmt.bindString(2, count);
}
String select = entity.getSelect();
if (select != null) {
stmt.bindString(3, select);
}
String sum = entity.getSum();
if (sum != null) {
stmt.bindString(4, sum);
}
String avg = entity.getAvg();
if (avg != null) {
stmt.bindString(5, avg);
}
String join = entity.getJoin();
if (join != null) {
stmt.bindString(6, join);
}
String distinct = entity.getDistinct();
if (distinct != null) {
stmt.bindString(7, distinct);
}
String on = entity.getOn();
if (on != null) {
stmt.bindString(8, on);
}
String index = entity.getIndex();
if (index != null) {
stmt.bindString(9, index);
}
Integer order = entity.getOrder();
if (order != null) {
stmt.bindLong(10, order);
}
}
@Override
public Long readKey(Cursor cursor, int offset) {
return cursor.isNull(offset + 0) ? null : cursor.getLong(offset + 0);
}
@Override
public SpecialNamesEntity readEntity(Cursor cursor, int offset) {
SpecialNamesEntity entity = new SpecialNamesEntity( //
cursor.isNull(offset + 0) ? null : cursor.getLong(offset + 0), // id
cursor.isNull(offset + 1) ? null : cursor.getString(offset + 1), // count
cursor.isNull(offset + 2) ? null : cursor.getString(offset + 2), // select
cursor.isNull(offset + 3) ? null : cursor.getString(offset + 3), // sum
cursor.isNull(offset + 4) ? null : cursor.getString(offset + 4), // avg
cursor.isNull(offset + 5) ? null : cursor.getString(offset + 5), // join
cursor.isNull(offset + 6) ? null : cursor.getString(offset + 6), // distinct
cursor.isNull(offset + 7) ? null : cursor.getString(offset + 7), // on
cursor.isNull(offset + 8) ? null : cursor.getString(offset + 8), // index
cursor.isNull(offset + 9) ? null : cursor.getInt(offset + 9) // order
);
return entity;
}
@Override
public void readEntity(Cursor cursor, SpecialNamesEntity entity, int offset) {
entity.setId(cursor.isNull(offset + 0) ? null : cursor.getLong(offset + 0));
entity.setCount(cursor.isNull(offset + 1) ? null : cursor.getString(offset + 1));
entity.setSelect(cursor.isNull(offset + 2) ? null : cursor.getString(offset + 2));
entity.setSum(cursor.isNull(offset + 3) ? null : cursor.getString(offset + 3));
entity.setAvg(cursor.isNull(offset + 4) ? null : cursor.getString(offset + 4));
entity.setJoin(cursor.isNull(offset + 5) ? null : cursor.getString(offset + 5));
entity.setDistinct(cursor.isNull(offset + 6) ? null : cursor.getString(offset + 6));
entity.setOn(cursor.isNull(offset + 7) ? null : cursor.getString(offset + 7));
entity.setIndex(cursor.isNull(offset + 8) ? null : cursor.getString(offset + 8));
entity.setOrder(cursor.isNull(offset + 9) ? null : cursor.getInt(offset + 9));
}
@Override
protected final Long updateKeyAfterInsert(SpecialNamesEntity entity, long rowId) {
entity.setId(rowId);
return rowId;
}
@Override
public Long getKey(SpecialNamesEntity entity) {
if(entity != null) {
return entity.getId();
} else {
return null;
}
}
@Override
public boolean hasKey(SpecialNamesEntity entity) {
return entity.getId() != null;
}
@Override
protected final boolean isEntityUpdateable() {
return true;
}
}
| Properties |
java | spring-projects__spring-framework | spring-jms/src/test/java/org/springframework/jms/StubTopic.java | {
"start": 782,
"end": 1101
} | class ____ implements Topic {
public static final String DEFAULT_TOPIC_NAME = "banjo";
private String topicName = DEFAULT_TOPIC_NAME;
public StubTopic() {
}
public StubTopic(String topicName) {
this.topicName = topicName;
}
@Override
public String getTopicName() {
return this.topicName;
}
}
| StubTopic |
java | google__error-prone | check_api/src/main/java/com/google/errorprone/fixes/Replacements.java | {
"start": 4511,
"end": 8127
} | enum ____ {
KEEP {
@Override
Replacement combineDuplicateInserts(Replacement insertion) {
return insertion.withDifferentText(insertion.replaceWith() + insertion.replaceWith());
}
},
DROP {
@Override
Replacement combineDuplicateInserts(Replacement insertion) {
return insertion;
}
};
abstract Replacement combineDuplicateInserts(Replacement insertion);
}
}
@CanIgnoreReturnValue
public Replacements add(Replacement replacement) {
return add(replacement, CoalescePolicy.REJECT);
}
@CanIgnoreReturnValue
public Replacements add(Replacement replacement, CoalescePolicy coalescePolicy) {
if (replacements.containsKey(replacement.range())) {
Replacement existing = replacements.get(replacement.range());
if (replacement.range().isEmpty()) {
// The replacement is an insertion, and there's an existing insertion at the same point.
// First check whether it's a duplicate insert.
if (existing.equals(replacement)) {
replacement = coalescePolicy.handleDuplicateInsertion(replacement);
} else {
// Coalesce overlapping non-duplicate insertions together.
replacement =
replacement.withDifferentText(
coalescePolicy.coalesce(replacement.replaceWith(), existing.replaceWith()));
}
} else if (existing.equals(replacement)) {
// Two copies of a non-insertion edit. Just ignore the new one since it's already done.
} else {
throw new IllegalArgumentException(
String.format("%s conflicts with existing replacement %s", replacement, existing));
}
} else {
checkOverlaps(replacement);
}
replacements.put(replacement.range(), replacement);
return this;
}
private void checkOverlaps(Replacement replacement) {
Range<Integer> replacementRange = replacement.range();
Collection<Replacement> overlap =
overlaps.subRangeMap(replacementRange).asMapOfRanges().values();
checkArgument(
overlap.isEmpty(),
"%s overlaps with existing replacements: %s",
replacement,
Joiner.on(", ").join(overlap));
Set<Integer> containedZeroLengthRangeStarts =
zeroLengthRanges.subSet(
replacementRange.lowerEndpoint(),
/* fromInclusive= */ false,
replacementRange.upperEndpoint(),
/* toInclusive= */ false);
checkArgument(
containedZeroLengthRangeStarts.isEmpty(),
"%s overlaps with existing zero-length replacements: %s",
replacement,
Joiner.on(", ").join(containedZeroLengthRangeStarts));
overlaps.put(replacementRange, replacement);
if (replacementRange.isEmpty()) {
zeroLengthRanges.add(replacementRange.lowerEndpoint());
}
}
/**
* Non-overlapping replacements, sorted in descending order by position. Prefer using {@link
* #ascending} when applying changes, because applying changes in reverse tends to result in
* quadratic-time copying of the underlying string.
*/
@Deprecated
public Set<Replacement> descending() {
// TODO(cushon): refactor SuggestedFix#getReplacements and just return a Collection,
return new LinkedHashSet<>(replacements.values());
}
/** Non-overlapping replacements, sorted in ascending order by position. */
public ImmutableSet<Replacement> ascending() {
return ImmutableSet.copyOf(replacements.descendingMap().values());
}
public boolean isEmpty() {
return replacements.isEmpty();
}
}
| DuplicateInsertPolicy |
java | google__gson | gson/src/test/java/com/google/gson/functional/ReusedTypeVariablesFullyResolveTest.java | {
"start": 2166,
"end": 2241
} | class ____<U, C extends Collection<U>> {
C collection;
}
}
| BaseCollection |
java | elastic__elasticsearch | modules/data-streams/src/main/java/org/elasticsearch/datastreams/rest/RestModifyDataStreamsAction.java | {
"start": 1120,
"end": 2353
} | class ____ extends BaseRestHandler {
@Override
public String getName() {
return "modify_data_stream_action";
}
@Override
public List<Route> routes() {
return List.of(new Route(POST, "/_data_stream/_modify"));
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
ModifyDataStreamsAction.Request modifyDsRequest;
try (XContentParser parser = request.contentParser()) {
modifyDsRequest = ModifyDataStreamsAction.Request.PARSER.parse(
parser,
actions -> new ModifyDataStreamsAction.Request(
RestUtils.getMasterNodeTimeout(request),
RestUtils.getAckTimeout(request),
actions
)
);
}
if (modifyDsRequest.getActions() == null || modifyDsRequest.getActions().isEmpty()) {
throw new IllegalArgumentException("no data stream actions specified, at least one must be specified");
}
return channel -> client.execute(ModifyDataStreamsAction.INSTANCE, modifyDsRequest, new RestToXContentListener<>(channel));
}
}
| RestModifyDataStreamsAction |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/deser/jdk/JDKFromStringDeserializer.java | {
"start": 698,
"end": 2212
} | class ____ simple deserializers that serialize values from String
* representation: this includes JSON Strings and other Scalar values that
* can be coerced into text, like Numbers and Booleans).
* Simple JSON String values are trimmed using {@link java.lang.String#trim}.
* Partial deserializer implementation will try to first access current token as
* a String, calls {@code _deserialize(String,DeserializationContext)} and
* returns return value.
* If this does not work (current token not a simple scalar type), attempts
* are made so that:
*<ul>
* <li>Embedded values ({@link JsonToken#VALUE_EMBEDDED_OBJECT}) are returned as-is
* if they are of compatible type
* </li>
* <li>Arrays may be "unwrapped" if (and only if) {@link DeserializationFeature#UNWRAP_SINGLE_VALUE_ARRAYS}
* is enabled, and array contains just a single scalar value that can be deserialized
* (for example, JSON Array with single JSON String element).
* </li>
* </ul>
*<p>
* Special handling includes:
* <ul>
* <li>Null values ({@link JsonToken#VALUE_NULL}) are handled by returning value
* returned by {@link ValueDeserializer#getNullValue(DeserializationContext)}: default
* implementation simply returns Java `null` but this may be overridden.
* </li>
* <li>Empty String (after trimming) will result in {@link #_deserializeFromEmptyString}
* getting called, and return value being returned as deserialization: default implementation
* simply returns `null`.
* </li>
* </ul>
*/
public | for |
java | apache__camel | components/camel-bindy/src/main/java/org/apache/camel/dataformat/bindy/BindyCsvFactory.java | {
"start": 23793,
"end": 23925
} | class
____ csvRecord = cl.getAnnotation(CsvRecord.class);
// Get annotation @Section from the | CsvRecord |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/ser/jdk/NumberSerializers.java | {
"start": 6425,
"end": 6934
} | class ____ extends Base<Object> {
final static IntLikeSerializer instance = new IntLikeSerializer();
public IntLikeSerializer() {
super(Number.class, JsonParser.NumberType.INT, "integer");
}
@Override
public void serialize(Object value, JsonGenerator gen,
SerializationContext provider) throws JacksonException {
gen.writeNumber(((Number) value).intValue());
}
}
@JacksonStdImpl
public static | IntLikeSerializer |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.