repo stringclasses 11 values | path stringlengths 41 234 | func_name stringlengths 5 78 | original_string stringlengths 71 14.1k | language stringclasses 1 value | code stringlengths 71 14.1k | code_tokens listlengths 22 2.65k | docstring stringlengths 2 5.35k | docstring_tokens listlengths 1 369 | sha stringclasses 11 values | url stringlengths 129 339 | partition stringclasses 1 value | summary stringlengths 7 175 | input_ids listlengths 502 502 | token_type_ids listlengths 502 502 | attention_mask listlengths 502 502 | labels listlengths 502 502 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
hankcs/HanLP | src/main/java/com/hankcs/hanlp/dictionary/TransformMatrixDictionary.java | TransformMatrixDictionary.getFrequency | public int getFrequency(String from, String to)
{
return getFrequency(convert(from), convert(to));
} | java | public int getFrequency(String from, String to)
{
return getFrequency(convert(from), convert(to));
} | [
"public",
"int",
"getFrequency",
"(",
"String",
"from",
",",
"String",
"to",
")",
"{",
"return",
"getFrequency",
"(",
"convert",
"(",
"from",
")",
",",
"convert",
"(",
"to",
")",
")",
";",
"}"
] | 获取转移频次
@param from
@param to
@return | [
"获取转移频次"
] | a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce | https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/dictionary/TransformMatrixDictionary.java#L43-L46 | train | Gets the frequency of a sequence from a sequence of tokens to a sequence of tokens. | [
30522,
2270,
20014,
2131,
19699,
2063,
4226,
9407,
1006,
5164,
2013,
1010,
5164,
2000,
1007,
1063,
2709,
2131,
19699,
2063,
4226,
9407,
1006,
10463,
1006,
2013,
1007,
1010,
10463,
1006,
2000,
1007,
1007,
1025,
1065,
102,
0,
0,
0,
0,
0,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-extra/src/main/java/cn/hutool/extra/servlet/multipart/UploadFileHeader.java | UploadFileHeader.getDataFieldValue | private String getDataFieldValue(String dataHeader, String fieldName) {
String value = null;
String token = StrUtil.format("{}=\"", fieldName);
int pos = dataHeader.indexOf(token);
if (pos > 0) {
int start = pos + token.length();
int end = dataHeader.indexOf('"', start);
if ((start > 0) && (end > 0)) {
value = dataHeader.substring(start, end);
}
}
return value;
} | java | private String getDataFieldValue(String dataHeader, String fieldName) {
String value = null;
String token = StrUtil.format("{}=\"", fieldName);
int pos = dataHeader.indexOf(token);
if (pos > 0) {
int start = pos + token.length();
int end = dataHeader.indexOf('"', start);
if ((start > 0) && (end > 0)) {
value = dataHeader.substring(start, end);
}
}
return value;
} | [
"private",
"String",
"getDataFieldValue",
"(",
"String",
"dataHeader",
",",
"String",
"fieldName",
")",
"{",
"String",
"value",
"=",
"null",
";",
"String",
"token",
"=",
"StrUtil",
".",
"format",
"(",
"\"{}=\\\"\"",
",",
"fieldName",
")",
";",
"int",
"pos",
... | 获得头信息字符串字符串中指定的值
@param dataHeader 头信息
@param fieldName 字段名
@return 字段值 | [
"获得头信息字符串字符串中指定的值"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-extra/src/main/java/cn/hutool/extra/servlet/multipart/UploadFileHeader.java#L102-L114 | train | Returns the value of a single field in the data header. | [
30522,
2797,
5164,
2131,
2850,
2696,
3790,
10175,
5657,
1006,
5164,
2951,
4974,
2121,
1010,
5164,
2492,
18442,
1007,
1063,
5164,
3643,
1027,
19701,
1025,
5164,
19204,
1027,
2358,
22134,
4014,
1012,
4289,
1006,
1000,
1063,
1065,
1027,
1032,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-runtime/src/main/java/org/apache/flink/runtime/taskexecutor/TaskExecutor.java | TaskExecutor.submitTask | @Override
public CompletableFuture<Acknowledge> submitTask(
TaskDeploymentDescriptor tdd,
JobMasterId jobMasterId,
Time timeout) {
try {
final JobID jobId = tdd.getJobId();
final JobManagerConnection jobManagerConnection = jobManagerTable.get(jobId);
if (jobManagerConnection == null) {
final String message = "Could not submit task because there is no JobManager " +
"associated for the job " + jobId + '.';
log.debug(message);
throw new TaskSubmissionException(message);
}
if (!Objects.equals(jobManagerConnection.getJobMasterId(), jobMasterId)) {
final String message = "Rejecting the task submission because the job manager leader id " +
jobMasterId + " does not match the expected job manager leader id " +
jobManagerConnection.getJobMasterId() + '.';
log.debug(message);
throw new TaskSubmissionException(message);
}
if (!taskSlotTable.tryMarkSlotActive(jobId, tdd.getAllocationId())) {
final String message = "No task slot allocated for job ID " + jobId +
" and allocation ID " + tdd.getAllocationId() + '.';
log.debug(message);
throw new TaskSubmissionException(message);
}
// re-integrate offloaded data:
try {
tdd.loadBigData(blobCacheService.getPermanentBlobService());
} catch (IOException | ClassNotFoundException e) {
throw new TaskSubmissionException("Could not re-integrate offloaded TaskDeploymentDescriptor data.", e);
}
// deserialize the pre-serialized information
final JobInformation jobInformation;
final TaskInformation taskInformation;
try {
jobInformation = tdd.getSerializedJobInformation().deserializeValue(getClass().getClassLoader());
taskInformation = tdd.getSerializedTaskInformation().deserializeValue(getClass().getClassLoader());
} catch (IOException | ClassNotFoundException e) {
throw new TaskSubmissionException("Could not deserialize the job or task information.", e);
}
if (!jobId.equals(jobInformation.getJobId())) {
throw new TaskSubmissionException(
"Inconsistent job ID information inside TaskDeploymentDescriptor (" +
tdd.getJobId() + " vs. " + jobInformation.getJobId() + ")");
}
TaskMetricGroup taskMetricGroup = taskManagerMetricGroup.addTaskForJob(
jobInformation.getJobId(),
jobInformation.getJobName(),
taskInformation.getJobVertexId(),
tdd.getExecutionAttemptId(),
taskInformation.getTaskName(),
tdd.getSubtaskIndex(),
tdd.getAttemptNumber());
InputSplitProvider inputSplitProvider = new RpcInputSplitProvider(
jobManagerConnection.getJobManagerGateway(),
taskInformation.getJobVertexId(),
tdd.getExecutionAttemptId(),
taskManagerConfiguration.getTimeout());
TaskManagerActions taskManagerActions = jobManagerConnection.getTaskManagerActions();
CheckpointResponder checkpointResponder = jobManagerConnection.getCheckpointResponder();
GlobalAggregateManager aggregateManager = jobManagerConnection.getGlobalAggregateManager();
LibraryCacheManager libraryCache = jobManagerConnection.getLibraryCacheManager();
ResultPartitionConsumableNotifier resultPartitionConsumableNotifier = jobManagerConnection.getResultPartitionConsumableNotifier();
PartitionProducerStateChecker partitionStateChecker = jobManagerConnection.getPartitionStateChecker();
final TaskLocalStateStore localStateStore = localStateStoresManager.localStateStoreForSubtask(
jobId,
tdd.getAllocationId(),
taskInformation.getJobVertexId(),
tdd.getSubtaskIndex());
final JobManagerTaskRestore taskRestore = tdd.getTaskRestore();
final TaskStateManager taskStateManager = new TaskStateManagerImpl(
jobId,
tdd.getExecutionAttemptId(),
localStateStore,
taskRestore,
checkpointResponder);
Task task = new Task(
jobInformation,
taskInformation,
tdd.getExecutionAttemptId(),
tdd.getAllocationId(),
tdd.getSubtaskIndex(),
tdd.getAttemptNumber(),
tdd.getProducedPartitions(),
tdd.getInputGates(),
tdd.getTargetSlotNumber(),
taskExecutorServices.getMemoryManager(),
taskExecutorServices.getIOManager(),
taskExecutorServices.getNetworkEnvironment(),
taskExecutorServices.getKvStateService(),
taskExecutorServices.getBroadcastVariableManager(),
taskExecutorServices.getTaskEventDispatcher(),
taskStateManager,
taskManagerActions,
inputSplitProvider,
checkpointResponder,
aggregateManager,
blobCacheService,
libraryCache,
fileCache,
taskManagerConfiguration,
taskMetricGroup,
resultPartitionConsumableNotifier,
partitionStateChecker,
getRpcService().getExecutor());
log.info("Received task {}.", task.getTaskInfo().getTaskNameWithSubtasks());
boolean taskAdded;
try {
taskAdded = taskSlotTable.addTask(task);
} catch (SlotNotFoundException | SlotNotActiveException e) {
throw new TaskSubmissionException("Could not submit task.", e);
}
if (taskAdded) {
task.startTaskThread();
return CompletableFuture.completedFuture(Acknowledge.get());
} else {
final String message = "TaskManager already contains a task for id " +
task.getExecutionId() + '.';
log.debug(message);
throw new TaskSubmissionException(message);
}
} catch (TaskSubmissionException e) {
return FutureUtils.completedExceptionally(e);
}
} | java | @Override
public CompletableFuture<Acknowledge> submitTask(
TaskDeploymentDescriptor tdd,
JobMasterId jobMasterId,
Time timeout) {
try {
final JobID jobId = tdd.getJobId();
final JobManagerConnection jobManagerConnection = jobManagerTable.get(jobId);
if (jobManagerConnection == null) {
final String message = "Could not submit task because there is no JobManager " +
"associated for the job " + jobId + '.';
log.debug(message);
throw new TaskSubmissionException(message);
}
if (!Objects.equals(jobManagerConnection.getJobMasterId(), jobMasterId)) {
final String message = "Rejecting the task submission because the job manager leader id " +
jobMasterId + " does not match the expected job manager leader id " +
jobManagerConnection.getJobMasterId() + '.';
log.debug(message);
throw new TaskSubmissionException(message);
}
if (!taskSlotTable.tryMarkSlotActive(jobId, tdd.getAllocationId())) {
final String message = "No task slot allocated for job ID " + jobId +
" and allocation ID " + tdd.getAllocationId() + '.';
log.debug(message);
throw new TaskSubmissionException(message);
}
// re-integrate offloaded data:
try {
tdd.loadBigData(blobCacheService.getPermanentBlobService());
} catch (IOException | ClassNotFoundException e) {
throw new TaskSubmissionException("Could not re-integrate offloaded TaskDeploymentDescriptor data.", e);
}
// deserialize the pre-serialized information
final JobInformation jobInformation;
final TaskInformation taskInformation;
try {
jobInformation = tdd.getSerializedJobInformation().deserializeValue(getClass().getClassLoader());
taskInformation = tdd.getSerializedTaskInformation().deserializeValue(getClass().getClassLoader());
} catch (IOException | ClassNotFoundException e) {
throw new TaskSubmissionException("Could not deserialize the job or task information.", e);
}
if (!jobId.equals(jobInformation.getJobId())) {
throw new TaskSubmissionException(
"Inconsistent job ID information inside TaskDeploymentDescriptor (" +
tdd.getJobId() + " vs. " + jobInformation.getJobId() + ")");
}
TaskMetricGroup taskMetricGroup = taskManagerMetricGroup.addTaskForJob(
jobInformation.getJobId(),
jobInformation.getJobName(),
taskInformation.getJobVertexId(),
tdd.getExecutionAttemptId(),
taskInformation.getTaskName(),
tdd.getSubtaskIndex(),
tdd.getAttemptNumber());
InputSplitProvider inputSplitProvider = new RpcInputSplitProvider(
jobManagerConnection.getJobManagerGateway(),
taskInformation.getJobVertexId(),
tdd.getExecutionAttemptId(),
taskManagerConfiguration.getTimeout());
TaskManagerActions taskManagerActions = jobManagerConnection.getTaskManagerActions();
CheckpointResponder checkpointResponder = jobManagerConnection.getCheckpointResponder();
GlobalAggregateManager aggregateManager = jobManagerConnection.getGlobalAggregateManager();
LibraryCacheManager libraryCache = jobManagerConnection.getLibraryCacheManager();
ResultPartitionConsumableNotifier resultPartitionConsumableNotifier = jobManagerConnection.getResultPartitionConsumableNotifier();
PartitionProducerStateChecker partitionStateChecker = jobManagerConnection.getPartitionStateChecker();
final TaskLocalStateStore localStateStore = localStateStoresManager.localStateStoreForSubtask(
jobId,
tdd.getAllocationId(),
taskInformation.getJobVertexId(),
tdd.getSubtaskIndex());
final JobManagerTaskRestore taskRestore = tdd.getTaskRestore();
final TaskStateManager taskStateManager = new TaskStateManagerImpl(
jobId,
tdd.getExecutionAttemptId(),
localStateStore,
taskRestore,
checkpointResponder);
Task task = new Task(
jobInformation,
taskInformation,
tdd.getExecutionAttemptId(),
tdd.getAllocationId(),
tdd.getSubtaskIndex(),
tdd.getAttemptNumber(),
tdd.getProducedPartitions(),
tdd.getInputGates(),
tdd.getTargetSlotNumber(),
taskExecutorServices.getMemoryManager(),
taskExecutorServices.getIOManager(),
taskExecutorServices.getNetworkEnvironment(),
taskExecutorServices.getKvStateService(),
taskExecutorServices.getBroadcastVariableManager(),
taskExecutorServices.getTaskEventDispatcher(),
taskStateManager,
taskManagerActions,
inputSplitProvider,
checkpointResponder,
aggregateManager,
blobCacheService,
libraryCache,
fileCache,
taskManagerConfiguration,
taskMetricGroup,
resultPartitionConsumableNotifier,
partitionStateChecker,
getRpcService().getExecutor());
log.info("Received task {}.", task.getTaskInfo().getTaskNameWithSubtasks());
boolean taskAdded;
try {
taskAdded = taskSlotTable.addTask(task);
} catch (SlotNotFoundException | SlotNotActiveException e) {
throw new TaskSubmissionException("Could not submit task.", e);
}
if (taskAdded) {
task.startTaskThread();
return CompletableFuture.completedFuture(Acknowledge.get());
} else {
final String message = "TaskManager already contains a task for id " +
task.getExecutionId() + '.';
log.debug(message);
throw new TaskSubmissionException(message);
}
} catch (TaskSubmissionException e) {
return FutureUtils.completedExceptionally(e);
}
} | [
"@",
"Override",
"public",
"CompletableFuture",
"<",
"Acknowledge",
">",
"submitTask",
"(",
"TaskDeploymentDescriptor",
"tdd",
",",
"JobMasterId",
"jobMasterId",
",",
"Time",
"timeout",
")",
"{",
"try",
"{",
"final",
"JobID",
"jobId",
"=",
"tdd",
".",
"getJobId"... | ---------------------------------------------------------------------- | [
"----------------------------------------------------------------------"
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/taskexecutor/TaskExecutor.java#L433-L582 | train | Submits a task to the job manager. | [
30522,
1030,
2058,
15637,
2270,
4012,
10814,
10880,
11263,
11244,
1026,
13399,
1028,
12040,
10230,
2243,
1006,
4708,
3207,
24759,
6977,
3672,
6155,
23235,
2953,
14595,
2094,
1010,
3105,
8706,
3593,
3105,
8706,
3593,
1010,
2051,
2051,
5833,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-java/src/main/java/org/apache/flink/api/java/operators/DataSink.java | DataSink.translateToDataFlow | protected GenericDataSinkBase<T> translateToDataFlow(Operator<T> input) {
// select the name (or create a default one)
String name = this.name != null ? this.name : this.format.toString();
GenericDataSinkBase<T> sink = new GenericDataSinkBase<>(this.format, new UnaryOperatorInformation<>(this.type, new NothingTypeInfo()), name);
// set input
sink.setInput(input);
// set parameters
if (this.parameters != null) {
sink.getParameters().addAll(this.parameters);
}
// set parallelism
if (this.parallelism > 0) {
// use specified parallelism
sink.setParallelism(this.parallelism);
} else {
// if no parallelism has been specified, use parallelism of input operator to enable chaining
sink.setParallelism(input.getParallelism());
}
if (this.sortKeyPositions != null) {
// configure output sorting
Ordering ordering = new Ordering();
for (int i = 0; i < this.sortKeyPositions.length; i++) {
ordering.appendOrdering(this.sortKeyPositions[i], null, this.sortOrders[i]);
}
sink.setLocalOrder(ordering);
}
return sink;
} | java | protected GenericDataSinkBase<T> translateToDataFlow(Operator<T> input) {
// select the name (or create a default one)
String name = this.name != null ? this.name : this.format.toString();
GenericDataSinkBase<T> sink = new GenericDataSinkBase<>(this.format, new UnaryOperatorInformation<>(this.type, new NothingTypeInfo()), name);
// set input
sink.setInput(input);
// set parameters
if (this.parameters != null) {
sink.getParameters().addAll(this.parameters);
}
// set parallelism
if (this.parallelism > 0) {
// use specified parallelism
sink.setParallelism(this.parallelism);
} else {
// if no parallelism has been specified, use parallelism of input operator to enable chaining
sink.setParallelism(input.getParallelism());
}
if (this.sortKeyPositions != null) {
// configure output sorting
Ordering ordering = new Ordering();
for (int i = 0; i < this.sortKeyPositions.length; i++) {
ordering.appendOrdering(this.sortKeyPositions[i], null, this.sortOrders[i]);
}
sink.setLocalOrder(ordering);
}
return sink;
} | [
"protected",
"GenericDataSinkBase",
"<",
"T",
">",
"translateToDataFlow",
"(",
"Operator",
"<",
"T",
">",
"input",
")",
"{",
"// select the name (or create a default one)",
"String",
"name",
"=",
"this",
".",
"name",
"!=",
"null",
"?",
"this",
".",
"name",
":",
... | -------------------------------------------------------------------------------------------- | [
"--------------------------------------------------------------------------------------------"
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-java/src/main/java/org/apache/flink/api/java/operators/DataSink.java#L233-L262 | train | Translates the given input operator to data flow. | [
30522,
5123,
12391,
2850,
10230,
19839,
15058,
1026,
1056,
1028,
17637,
3406,
2850,
2696,
12314,
1006,
6872,
1026,
1056,
1028,
7953,
1007,
1063,
1013,
1013,
7276,
1996,
2171,
1006,
2030,
3443,
1037,
12398,
2028,
1007,
5164,
2171,
1027,
2023... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
netty/netty | handler/src/main/java/io/netty/handler/traffic/TrafficCounter.java | TrafficCounter.writeTimeToWait | @Deprecated
public long writeTimeToWait(final long size, final long limitTraffic, final long maxTime) {
return writeTimeToWait(size, limitTraffic, maxTime, milliSecondFromNano());
} | java | @Deprecated
public long writeTimeToWait(final long size, final long limitTraffic, final long maxTime) {
return writeTimeToWait(size, limitTraffic, maxTime, milliSecondFromNano());
} | [
"@",
"Deprecated",
"public",
"long",
"writeTimeToWait",
"(",
"final",
"long",
"size",
",",
"final",
"long",
"limitTraffic",
",",
"final",
"long",
"maxTime",
")",
"{",
"return",
"writeTimeToWait",
"(",
"size",
",",
"limitTraffic",
",",
"maxTime",
",",
"milliSec... | Returns the time to wait (if any) for the given length message, using the given limitTraffic and
the max wait time.
@param size
the write size
@param limitTraffic
the traffic limit in bytes per second.
@param maxTime
the max time in ms to wait in case of excess of traffic.
@return the current time to wait (in ms) if needed for Write operation. | [
"Returns",
"the",
"time",
"to",
"wait",
"(",
"if",
"any",
")",
"for",
"the",
"given",
"length",
"message",
"using",
"the",
"given",
"limitTraffic",
"and",
"the",
"max",
"wait",
"time",
"."
] | ba06eafa1c1824bd154f1a380019e7ea2edf3c4c | https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/handler/src/main/java/io/netty/handler/traffic/TrafficCounter.java#L555-L558 | train | Write time to wait. | [
30522,
1030,
2139,
28139,
12921,
2270,
2146,
4339,
7292,
18790,
4886,
2102,
1006,
2345,
2146,
2946,
1010,
2345,
2146,
5787,
6494,
26989,
2278,
1010,
2345,
2146,
4098,
7292,
1007,
1063,
2709,
4339,
7292,
18790,
4886,
2102,
1006,
2946,
1010,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/spark | sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/VariableLengthRowBasedKeyValueBatch.java | VariableLengthRowBasedKeyValueBatch.getKeyRow | @Override
public UnsafeRow getKeyRow(int rowId) {
assert(rowId >= 0);
assert(rowId < numRows);
if (keyRowId != rowId) { // if keyRowId == rowId, desired keyRow is already cached
long offset = keyOffsets[rowId];
int klen = Platform.getInt(base, offset - 4);
keyRow.pointTo(base, offset, klen);
// set keyRowId so we can check if desired row is cached
keyRowId = rowId;
}
return keyRow;
} | java | @Override
public UnsafeRow getKeyRow(int rowId) {
assert(rowId >= 0);
assert(rowId < numRows);
if (keyRowId != rowId) { // if keyRowId == rowId, desired keyRow is already cached
long offset = keyOffsets[rowId];
int klen = Platform.getInt(base, offset - 4);
keyRow.pointTo(base, offset, klen);
// set keyRowId so we can check if desired row is cached
keyRowId = rowId;
}
return keyRow;
} | [
"@",
"Override",
"public",
"UnsafeRow",
"getKeyRow",
"(",
"int",
"rowId",
")",
"{",
"assert",
"(",
"rowId",
">=",
"0",
")",
";",
"assert",
"(",
"rowId",
"<",
"numRows",
")",
";",
"if",
"(",
"keyRowId",
"!=",
"rowId",
")",
"{",
"// if keyRowId == rowId, d... | Returns the key row in this batch at `rowId`. Returned key row is reused across calls. | [
"Returns",
"the",
"key",
"row",
"in",
"this",
"batch",
"at",
"rowId",
".",
"Returned",
"key",
"row",
"is",
"reused",
"across",
"calls",
"."
] | 25ee0474f47d9c30d6f553a7892d9549f91071cf | https://github.com/apache/spark/blob/25ee0474f47d9c30d6f553a7892d9549f91071cf/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/VariableLengthRowBasedKeyValueBatch.java#L76-L88 | train | Get the key row. | [
30522,
1030,
2058,
15637,
2270,
25135,
10524,
2131,
14839,
10524,
1006,
20014,
5216,
3593,
1007,
1063,
20865,
1006,
5216,
3593,
1028,
1027,
1014,
1007,
1025,
20865,
1006,
5216,
3593,
1026,
16371,
2213,
10524,
2015,
1007,
1025,
2065,
1006,
3... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
alibaba/canal | client/src/main/java/com/alibaba/otter/canal/client/CanalConnectors.java | CanalConnectors.newClusterConnector | public static CanalConnector newClusterConnector(String zkServers, String destination, String username,
String password) {
ClusterCanalConnector canalConnector = new ClusterCanalConnector(username,
password,
destination,
new ClusterNodeAccessStrategy(destination, ZkClientx.getZkClient(zkServers)));
canalConnector.setSoTimeout(60 * 1000);
canalConnector.setIdleTimeout(60 * 60 * 1000);
return canalConnector;
} | java | public static CanalConnector newClusterConnector(String zkServers, String destination, String username,
String password) {
ClusterCanalConnector canalConnector = new ClusterCanalConnector(username,
password,
destination,
new ClusterNodeAccessStrategy(destination, ZkClientx.getZkClient(zkServers)));
canalConnector.setSoTimeout(60 * 1000);
canalConnector.setIdleTimeout(60 * 60 * 1000);
return canalConnector;
} | [
"public",
"static",
"CanalConnector",
"newClusterConnector",
"(",
"String",
"zkServers",
",",
"String",
"destination",
",",
"String",
"username",
",",
"String",
"password",
")",
"{",
"ClusterCanalConnector",
"canalConnector",
"=",
"new",
"ClusterCanalConnector",
"(",
... | 创建带cluster模式的客户端链接,自动完成failover切换,服务器列表自动扫描
@param zkServers
@param destination
@param username
@param password
@return | [
"创建带cluster模式的客户端链接,自动完成failover切换,服务器列表自动扫描"
] | 8f088cddc0755f4350c5aaae95c6e4002d90a40f | https://github.com/alibaba/canal/blob/8f088cddc0755f4350c5aaae95c6e4002d90a40f/client/src/main/java/com/alibaba/otter/canal/client/CanalConnectors.java#L66-L75 | train | Create a new ClusterCanalConnector | [
30522,
2270,
10763,
5033,
8663,
2638,
16761,
2047,
20464,
19966,
2121,
8663,
2638,
16761,
1006,
5164,
1062,
5705,
2121,
14028,
1010,
5164,
7688,
1010,
5164,
5310,
18442,
1010,
5164,
20786,
1007,
1063,
9324,
28621,
22499,
10087,
16761,
5033,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/BootstrapTools.java | BootstrapTools.getStartCommand | public static String getStartCommand(String template,
Map<String, String> startCommandValues) {
for (Map.Entry<String, String> variable : startCommandValues
.entrySet()) {
template = template
.replace("%" + variable.getKey() + "%", variable.getValue());
}
return template;
} | java | public static String getStartCommand(String template,
Map<String, String> startCommandValues) {
for (Map.Entry<String, String> variable : startCommandValues
.entrySet()) {
template = template
.replace("%" + variable.getKey() + "%", variable.getValue());
}
return template;
} | [
"public",
"static",
"String",
"getStartCommand",
"(",
"String",
"template",
",",
"Map",
"<",
"String",
",",
"String",
">",
"startCommandValues",
")",
"{",
"for",
"(",
"Map",
".",
"Entry",
"<",
"String",
",",
"String",
">",
"variable",
":",
"startCommandValue... | Replaces placeholders in the template start command with values from startCommandValues.
<p>If the default template {@link ConfigConstants#DEFAULT_YARN_CONTAINER_START_COMMAND_TEMPLATE}
is used, the following keys must be present in the map or the resulting
command will still contain placeholders:
<ul>
<li><tt>java</tt> = path to the Java executable</li>
<li><tt>jvmmem</tt> = JVM memory limits and tweaks</li>
<li><tt>jvmopts</tt> = misc options for the Java VM</li>
<li><tt>logging</tt> = logging-related configuration settings</li>
<li><tt>class</tt> = main class to execute</li>
<li><tt>args</tt> = arguments for the main class</li>
<li><tt>redirects</tt> = output redirects</li>
</ul>
@param template
a template start command with placeholders
@param startCommandValues
a replacement map <tt>placeholder -> value</tt>
@return the start command with placeholders filled in | [
"Replaces",
"placeholders",
"in",
"the",
"template",
"start",
"command",
"with",
"values",
"from",
"startCommandValues",
"."
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/BootstrapTools.java#L507-L515 | train | Get start command. | [
30522,
2270,
10763,
5164,
4152,
7559,
13535,
5358,
2386,
2094,
1006,
5164,
23561,
1010,
4949,
1026,
5164,
1010,
30524,
1012,
4443,
1026,
5164,
1010,
5164,
1028,
8023,
1024,
2707,
9006,
2386,
2094,
10175,
15808,
1012,
4443,
13462,
1006,
1007... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-extra/src/main/java/cn/hutool/extra/servlet/ServletUtil.java | ServletUtil.write | public static void write(HttpServletResponse response, String text, String contentType) {
response.setContentType(contentType);
Writer writer = null;
try {
writer = response.getWriter();
writer.write(text);
writer.flush();
} catch (IOException e) {
throw new UtilException(e);
} finally {
IoUtil.close(writer);
}
} | java | public static void write(HttpServletResponse response, String text, String contentType) {
response.setContentType(contentType);
Writer writer = null;
try {
writer = response.getWriter();
writer.write(text);
writer.flush();
} catch (IOException e) {
throw new UtilException(e);
} finally {
IoUtil.close(writer);
}
} | [
"public",
"static",
"void",
"write",
"(",
"HttpServletResponse",
"response",
",",
"String",
"text",
",",
"String",
"contentType",
")",
"{",
"response",
".",
"setContentType",
"(",
"contentType",
")",
";",
"Writer",
"writer",
"=",
"null",
";",
"try",
"{",
"wr... | 返回数据给客户端
@param response 响应对象{@link HttpServletResponse}
@param text 返回的内容
@param contentType 返回的类型 | [
"返回数据给客户端"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-extra/src/main/java/cn/hutool/extra/servlet/ServletUtil.java#L480-L492 | train | Write text to the response using the specified content type. | [
30522,
2270,
10763,
11675,
4339,
1006,
16770,
2121,
2615,
7485,
6072,
26029,
3366,
3433,
1010,
5164,
3793,
1010,
5164,
4180,
13874,
1007,
1063,
3433,
1012,
2275,
8663,
6528,
15353,
5051,
1006,
4180,
13874,
1007,
1025,
3213,
3213,
1027,
1970... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
hankcs/HanLP | src/main/java/com/hankcs/hanlp/seg/NShort/Path/NShortPath.java | NShortPath.calculate | private void calculate(Graph inGraph, int nValueKind)
{
initNShortPath(inGraph, nValueKind);
QueueElement tmpElement;
CQueue queWork = new CQueue();
double eWeight;
for (int nCurNode = 1; nCurNode < vertexCount; ++nCurNode)
{
// 将所有到当前结点(nCurNode)可能到达的边根据eWeight排序并压入队列
enQueueCurNodeEdges(queWork, nCurNode);
// 初始化当前结点所有边的eWeight值
for (int i = 0; i < N; ++i)
weightArray[nCurNode - 1][i] = Double.MAX_VALUE;
// 将queWork中的内容装入fromArray
tmpElement = queWork.deQueue();
if (tmpElement != null)
{
for (int i = 0; i < N; ++i)
{
eWeight = tmpElement.weight;
weightArray[nCurNode - 1][i] = eWeight;
do
{
fromArray[nCurNode - 1][i].enQueue(new QueueElement(tmpElement.from, tmpElement.index, 0));
tmpElement = queWork.deQueue();
if (tmpElement == null)
{
i = N;
break;
}
} while (tmpElement.weight == eWeight);
}
}
}
} | java | private void calculate(Graph inGraph, int nValueKind)
{
initNShortPath(inGraph, nValueKind);
QueueElement tmpElement;
CQueue queWork = new CQueue();
double eWeight;
for (int nCurNode = 1; nCurNode < vertexCount; ++nCurNode)
{
// 将所有到当前结点(nCurNode)可能到达的边根据eWeight排序并压入队列
enQueueCurNodeEdges(queWork, nCurNode);
// 初始化当前结点所有边的eWeight值
for (int i = 0; i < N; ++i)
weightArray[nCurNode - 1][i] = Double.MAX_VALUE;
// 将queWork中的内容装入fromArray
tmpElement = queWork.deQueue();
if (tmpElement != null)
{
for (int i = 0; i < N; ++i)
{
eWeight = tmpElement.weight;
weightArray[nCurNode - 1][i] = eWeight;
do
{
fromArray[nCurNode - 1][i].enQueue(new QueueElement(tmpElement.from, tmpElement.index, 0));
tmpElement = queWork.deQueue();
if (tmpElement == null)
{
i = N;
break;
}
} while (tmpElement.weight == eWeight);
}
}
}
} | [
"private",
"void",
"calculate",
"(",
"Graph",
"inGraph",
",",
"int",
"nValueKind",
")",
"{",
"initNShortPath",
"(",
"inGraph",
",",
"nValueKind",
")",
";",
"QueueElement",
"tmpElement",
";",
"CQueue",
"queWork",
"=",
"new",
"CQueue",
"(",
")",
";",
"double",... | 计算出所有结点上可能的路径,为路径数据提供数据准备
@param inGraph 输入图
@param nValueKind 前N个结果 | [
"计算出所有结点上可能的路径,为路径数据提供数据准备"
] | a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce | https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/seg/NShort/Path/NShortPath.java#L90-L128 | train | Calculate the nValueKind of the nValueKind | [
30522,
30524,
24342,
1010,
1050,
10175,
5657,
18824,
1007,
1025,
24240,
12260,
3672,
1056,
8737,
12260,
3672,
1025,
1039,
4226,
5657,
10861,
6198,
1027,
2047,
1039,
4226,
5657,
1006,
1007,
1025,
3313,
1041,
11179,
1025,
2005,
1006,
20014,
1... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-core/src/main/java/org/apache/flink/core/fs/LimitedConnectionsFileSystem.java | LimitedConnectionsFileSystem.create | @Override
public FSDataOutputStream create(Path f, WriteMode overwriteMode) throws IOException {
return createOutputStream(() -> originalFs.create(f, overwriteMode));
} | java | @Override
public FSDataOutputStream create(Path f, WriteMode overwriteMode) throws IOException {
return createOutputStream(() -> originalFs.create(f, overwriteMode));
} | [
"@",
"Override",
"public",
"FSDataOutputStream",
"create",
"(",
"Path",
"f",
",",
"WriteMode",
"overwriteMode",
")",
"throws",
"IOException",
"{",
"return",
"createOutputStream",
"(",
"(",
")",
"->",
"originalFs",
".",
"create",
"(",
"f",
",",
"overwriteMode",
... | ------------------------------------------------------------------------ | [
"------------------------------------------------------------------------"
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/core/fs/LimitedConnectionsFileSystem.java#L270-L273 | train | Create a file in the FS. | [
30522,
1030,
2058,
15637,
2270,
1042,
16150,
6790,
5833,
18780,
21422,
3443,
1006,
4130,
1042,
1010,
4339,
5302,
3207,
2058,
26373,
5302,
3207,
1007,
11618,
22834,
10288,
24422,
1063,
2709,
3443,
5833,
18780,
21422,
1006,
1006,
1007,
1011,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/spark | common/network-common/src/main/java/org/apache/spark/network/server/TransportRequestHandler.java | TransportRequestHandler.processStreamUpload | private void processStreamUpload(final UploadStream req) {
assert (req.body() == null);
try {
RpcResponseCallback callback = new RpcResponseCallback() {
@Override
public void onSuccess(ByteBuffer response) {
respond(new RpcResponse(req.requestId, new NioManagedBuffer(response)));
}
@Override
public void onFailure(Throwable e) {
respond(new RpcFailure(req.requestId, Throwables.getStackTraceAsString(e)));
}
};
TransportFrameDecoder frameDecoder = (TransportFrameDecoder)
channel.pipeline().get(TransportFrameDecoder.HANDLER_NAME);
ByteBuffer meta = req.meta.nioByteBuffer();
StreamCallbackWithID streamHandler = rpcHandler.receiveStream(reverseClient, meta, callback);
if (streamHandler == null) {
throw new NullPointerException("rpcHandler returned a null streamHandler");
}
StreamCallbackWithID wrappedCallback = new StreamCallbackWithID() {
@Override
public void onData(String streamId, ByteBuffer buf) throws IOException {
streamHandler.onData(streamId, buf);
}
@Override
public void onComplete(String streamId) throws IOException {
try {
streamHandler.onComplete(streamId);
callback.onSuccess(ByteBuffer.allocate(0));
} catch (Exception ex) {
IOException ioExc = new IOException("Failure post-processing complete stream;" +
" failing this rpc and leaving channel active", ex);
callback.onFailure(ioExc);
streamHandler.onFailure(streamId, ioExc);
}
}
@Override
public void onFailure(String streamId, Throwable cause) throws IOException {
callback.onFailure(new IOException("Destination failed while reading stream", cause));
streamHandler.onFailure(streamId, cause);
}
@Override
public String getID() {
return streamHandler.getID();
}
};
if (req.bodyByteCount > 0) {
StreamInterceptor<RequestMessage> interceptor = new StreamInterceptor<>(
this, wrappedCallback.getID(), req.bodyByteCount, wrappedCallback);
frameDecoder.setInterceptor(interceptor);
} else {
wrappedCallback.onComplete(wrappedCallback.getID());
}
} catch (Exception e) {
logger.error("Error while invoking RpcHandler#receive() on RPC id " + req.requestId, e);
respond(new RpcFailure(req.requestId, Throwables.getStackTraceAsString(e)));
// We choose to totally fail the channel, rather than trying to recover as we do in other
// cases. We don't know how many bytes of the stream the client has already sent for the
// stream, it's not worth trying to recover.
channel.pipeline().fireExceptionCaught(e);
} finally {
req.meta.release();
}
} | java | private void processStreamUpload(final UploadStream req) {
assert (req.body() == null);
try {
RpcResponseCallback callback = new RpcResponseCallback() {
@Override
public void onSuccess(ByteBuffer response) {
respond(new RpcResponse(req.requestId, new NioManagedBuffer(response)));
}
@Override
public void onFailure(Throwable e) {
respond(new RpcFailure(req.requestId, Throwables.getStackTraceAsString(e)));
}
};
TransportFrameDecoder frameDecoder = (TransportFrameDecoder)
channel.pipeline().get(TransportFrameDecoder.HANDLER_NAME);
ByteBuffer meta = req.meta.nioByteBuffer();
StreamCallbackWithID streamHandler = rpcHandler.receiveStream(reverseClient, meta, callback);
if (streamHandler == null) {
throw new NullPointerException("rpcHandler returned a null streamHandler");
}
StreamCallbackWithID wrappedCallback = new StreamCallbackWithID() {
@Override
public void onData(String streamId, ByteBuffer buf) throws IOException {
streamHandler.onData(streamId, buf);
}
@Override
public void onComplete(String streamId) throws IOException {
try {
streamHandler.onComplete(streamId);
callback.onSuccess(ByteBuffer.allocate(0));
} catch (Exception ex) {
IOException ioExc = new IOException("Failure post-processing complete stream;" +
" failing this rpc and leaving channel active", ex);
callback.onFailure(ioExc);
streamHandler.onFailure(streamId, ioExc);
}
}
@Override
public void onFailure(String streamId, Throwable cause) throws IOException {
callback.onFailure(new IOException("Destination failed while reading stream", cause));
streamHandler.onFailure(streamId, cause);
}
@Override
public String getID() {
return streamHandler.getID();
}
};
if (req.bodyByteCount > 0) {
StreamInterceptor<RequestMessage> interceptor = new StreamInterceptor<>(
this, wrappedCallback.getID(), req.bodyByteCount, wrappedCallback);
frameDecoder.setInterceptor(interceptor);
} else {
wrappedCallback.onComplete(wrappedCallback.getID());
}
} catch (Exception e) {
logger.error("Error while invoking RpcHandler#receive() on RPC id " + req.requestId, e);
respond(new RpcFailure(req.requestId, Throwables.getStackTraceAsString(e)));
// We choose to totally fail the channel, rather than trying to recover as we do in other
// cases. We don't know how many bytes of the stream the client has already sent for the
// stream, it's not worth trying to recover.
channel.pipeline().fireExceptionCaught(e);
} finally {
req.meta.release();
}
} | [
"private",
"void",
"processStreamUpload",
"(",
"final",
"UploadStream",
"req",
")",
"{",
"assert",
"(",
"req",
".",
"body",
"(",
")",
"==",
"null",
")",
";",
"try",
"{",
"RpcResponseCallback",
"callback",
"=",
"new",
"RpcResponseCallback",
"(",
")",
"{",
"... | Handle a request from the client to upload a stream of data. | [
"Handle",
"a",
"request",
"from",
"the",
"client",
"to",
"upload",
"a",
"stream",
"of",
"data",
"."
] | 25ee0474f47d9c30d6f553a7892d9549f91071cf | https://github.com/apache/spark/blob/25ee0474f47d9c30d6f553a7892d9549f91071cf/common/network-common/src/main/java/org/apache/spark/network/server/TransportRequestHandler.java#L172-L240 | train | Process a stream upload request. | [
30522,
2797,
11675,
2832,
21422,
6279,
11066,
1006,
2345,
2039,
11066,
21422,
2128,
4160,
1007,
1063,
20865,
1006,
2128,
4160,
1012,
2303,
1006,
1007,
1027,
1027,
19701,
1007,
1025,
3046,
1063,
1054,
15042,
6072,
26029,
3366,
9289,
20850,
8... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
redisson/redisson | redisson/src/main/java/org/redisson/cache/AbstractCacheMap.java | AbstractCacheMap.remove | @Override
public V remove(Object key) {
CachedValue<K, V> entry = map.remove(key);
if (entry != null) {
onValueRemove(entry);
if (!isValueExpired(entry)) {
return (V) entry.getValue();
}
}
return null;
} | java | @Override
public V remove(Object key) {
CachedValue<K, V> entry = map.remove(key);
if (entry != null) {
onValueRemove(entry);
if (!isValueExpired(entry)) {
return (V) entry.getValue();
}
}
return null;
} | [
"@",
"Override",
"public",
"V",
"remove",
"(",
"Object",
"key",
")",
"{",
"CachedValue",
"<",
"K",
",",
"V",
">",
"entry",
"=",
"map",
".",
"remove",
"(",
"key",
")",
";",
"if",
"(",
"entry",
"!=",
"null",
")",
"{",
"onValueRemove",
"(",
"entry",
... | /*
(non-Javadoc)
@see java.util.Map#remove(java.lang.Object) | [
"/",
"*",
"(",
"non",
"-",
"Javadoc",
")"
] | d3acc0249b2d5d658d36d99e2c808ce49332ea44 | https://github.com/redisson/redisson/blob/d3acc0249b2d5d658d36d99e2c808ce49332ea44/redisson/src/main/java/org/redisson/cache/AbstractCacheMap.java#L237-L247 | train | Removes the value associated with the specified key from the cache. | [
30522,
1030,
2058,
15637,
2270,
1058,
6366,
1006,
4874,
3145,
1007,
1063,
17053,
2094,
10175,
5657,
1026,
1047,
1010,
1058,
1028,
4443,
1027,
4949,
1012,
6366,
1006,
3145,
1007,
1025,
2065,
1006,
4443,
999,
1027,
19701,
1007,
1063,
2006,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-db/src/main/java/cn/hutool/db/sql/SqlUtil.java | SqlUtil.blobToStr | public static String blobToStr(Blob blob, Charset charset) {
InputStream in = null;
try {
in = blob.getBinaryStream();
return IoUtil.read(in, charset);
} catch (SQLException e) {
throw new DbRuntimeException(e);
} finally {
IoUtil.close(in);
}
} | java | public static String blobToStr(Blob blob, Charset charset) {
InputStream in = null;
try {
in = blob.getBinaryStream();
return IoUtil.read(in, charset);
} catch (SQLException e) {
throw new DbRuntimeException(e);
} finally {
IoUtil.close(in);
}
} | [
"public",
"static",
"String",
"blobToStr",
"(",
"Blob",
"blob",
",",
"Charset",
"charset",
")",
"{",
"InputStream",
"in",
"=",
"null",
";",
"try",
"{",
"in",
"=",
"blob",
".",
"getBinaryStream",
"(",
")",
";",
"return",
"IoUtil",
".",
"read",
"(",
"in"... | Blob字段值转字符串
@param blob {@link Blob}
@param charset 编码
@return 字符串
@since 3.0.6 | [
"Blob字段值转字符串"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-db/src/main/java/cn/hutool/db/sql/SqlUtil.java#L173-L183 | train | Converts a blob to a string. | [
30522,
2270,
10763,
5164,
1038,
4135,
19279,
14122,
2099,
1006,
1038,
4135,
2497,
1038,
4135,
2497,
1010,
25869,
13462,
25869,
13462,
1007,
1063,
20407,
25379,
1999,
1027,
19701,
1025,
3046,
1063,
1999,
1027,
1038,
4135,
2497,
1012,
2131,
2... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-core/src/main/java/cn/hutool/core/date/DateUtil.java | DateUtil.range | public static DateRange range(Date start, Date end, final DateField unit) {
return new DateRange(start, end, unit);
} | java | public static DateRange range(Date start, Date end, final DateField unit) {
return new DateRange(start, end, unit);
} | [
"public",
"static",
"DateRange",
"range",
"(",
"Date",
"start",
",",
"Date",
"end",
",",
"final",
"DateField",
"unit",
")",
"{",
"return",
"new",
"DateRange",
"(",
"start",
",",
"end",
",",
"unit",
")",
";",
"}"
] | 创建日期范围生成器
@param start 起始日期时间
@param end 结束日期时间
@param unit 步进单位
@return {@link DateRange} | [
"创建日期范围生成器"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/date/DateUtil.java#L1632-L1634 | train | Creates a range of dates. | [
30522,
2270,
10763,
3058,
24388,
2063,
2846,
1006,
3058,
2707,
1010,
3058,
2203,
1010,
2345,
3058,
3790,
3131,
1007,
1063,
2709,
2047,
3058,
24388,
2063,
1006,
2707,
1010,
2203,
1010,
3131,
1007,
1025,
1065,
102,
0,
0,
0,
0,
0,
0,
0,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-core/src/main/java/org/apache/flink/api/common/typeinfo/Types.java | Types.EITHER | public static <L, R> TypeInformation<Either<L, R>> EITHER(TypeInformation<L> leftType, TypeInformation<R> rightType) {
return new EitherTypeInfo<>(leftType, rightType);
} | java | public static <L, R> TypeInformation<Either<L, R>> EITHER(TypeInformation<L> leftType, TypeInformation<R> rightType) {
return new EitherTypeInfo<>(leftType, rightType);
} | [
"public",
"static",
"<",
"L",
",",
"R",
">",
"TypeInformation",
"<",
"Either",
"<",
"L",
",",
"R",
">",
">",
"EITHER",
"(",
"TypeInformation",
"<",
"L",
">",
"leftType",
",",
"TypeInformation",
"<",
"R",
">",
"rightType",
")",
"{",
"return",
"new",
"... | Returns type information for Flink's {@link org.apache.flink.types.Either} type. Null values
are not supported.
<p>Either type can be used for a value of two possible types.
<p>Example use: <code>Types.EITHER(Types.VOID, Types.INT)</code>
@param leftType type information of left side / {@link org.apache.flink.types.Either.Left}
@param rightType type information of right side / {@link org.apache.flink.types.Either.Right} | [
"Returns",
"type",
"information",
"for",
"Flink",
"s",
"{",
"@link",
"org",
".",
"apache",
".",
"flink",
".",
"types",
".",
"Either",
"}",
"type",
".",
"Null",
"values",
"are",
"not",
"supported",
"."
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/Types.java#L455-L457 | train | Returns the type information for the either operation. | [
30522,
2270,
10763,
1026,
1048,
1010,
1054,
1028,
2828,
2378,
14192,
3370,
1026,
2593,
1026,
1048,
1010,
1054,
1028,
1028,
2593,
1006,
2828,
2378,
14192,
3370,
1026,
1048,
1028,
2187,
13874,
1010,
2828,
2378,
14192,
3370,
1026,
1054,
1028,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/operations/AliasOperationUtils.java | AliasOperationUtils.createAliasList | public static List<Expression> createAliasList(List<Expression> aliases, TableOperation child) {
TableSchema childSchema = child.getTableSchema();
if (aliases.size() > childSchema.getFieldCount()) {
throw new ValidationException("Aliasing more fields than we actually have.");
}
List<ValueLiteralExpression> fieldAliases = aliases.stream()
.map(f -> f.accept(aliasLiteralValidator))
.collect(Collectors.toList());
String[] childNames = childSchema.getFieldNames();
return IntStream.range(0, childNames.length)
.mapToObj(idx -> {
UnresolvedReferenceExpression oldField = new UnresolvedReferenceExpression(childNames[idx]);
if (idx < fieldAliases.size()) {
ValueLiteralExpression alias = fieldAliases.get(idx);
return new CallExpression(BuiltInFunctionDefinitions.AS, Arrays.asList(oldField, alias));
} else {
return oldField;
}
}).collect(Collectors.toList());
} | java | public static List<Expression> createAliasList(List<Expression> aliases, TableOperation child) {
TableSchema childSchema = child.getTableSchema();
if (aliases.size() > childSchema.getFieldCount()) {
throw new ValidationException("Aliasing more fields than we actually have.");
}
List<ValueLiteralExpression> fieldAliases = aliases.stream()
.map(f -> f.accept(aliasLiteralValidator))
.collect(Collectors.toList());
String[] childNames = childSchema.getFieldNames();
return IntStream.range(0, childNames.length)
.mapToObj(idx -> {
UnresolvedReferenceExpression oldField = new UnresolvedReferenceExpression(childNames[idx]);
if (idx < fieldAliases.size()) {
ValueLiteralExpression alias = fieldAliases.get(idx);
return new CallExpression(BuiltInFunctionDefinitions.AS, Arrays.asList(oldField, alias));
} else {
return oldField;
}
}).collect(Collectors.toList());
} | [
"public",
"static",
"List",
"<",
"Expression",
">",
"createAliasList",
"(",
"List",
"<",
"Expression",
">",
"aliases",
",",
"TableOperation",
"child",
")",
"{",
"TableSchema",
"childSchema",
"=",
"child",
".",
"getTableSchema",
"(",
")",
";",
"if",
"(",
"ali... | Creates a list of valid alias expressions. Resulting expression might still contain
{@link UnresolvedReferenceExpression}.
@param aliases aliases to validate
@param child relational operation on top of which to apply the aliases
@return validated list of aliases | [
"Creates",
"a",
"list",
"of",
"valid",
"alias",
"expressions",
".",
"Resulting",
"expression",
"might",
"still",
"contain",
"{",
"@link",
"UnresolvedReferenceExpression",
"}",
"."
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/operations/AliasOperationUtils.java#L55-L77 | train | Creates a list of aliases for a given child table operation. | [
30522,
2270,
10763,
2862,
1026,
3670,
1028,
3443,
22786,
14540,
2923,
1006,
2862,
1026,
3670,
1028,
14593,
2229,
1010,
2795,
25918,
3370,
2775,
1007,
1063,
7251,
5403,
2863,
2775,
22842,
2863,
1027,
2775,
1012,
2131,
10880,
22842,
2863,
100... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-runtime/src/main/java/org/apache/flink/runtime/throwable/ThrowableClassifier.java | ThrowableClassifier.getThrowableType | public static ThrowableType getThrowableType(Throwable cause) {
final ThrowableAnnotation annotation = cause.getClass().getAnnotation(ThrowableAnnotation.class);
return annotation == null ? ThrowableType.RecoverableError : annotation.value();
} | java | public static ThrowableType getThrowableType(Throwable cause) {
final ThrowableAnnotation annotation = cause.getClass().getAnnotation(ThrowableAnnotation.class);
return annotation == null ? ThrowableType.RecoverableError : annotation.value();
} | [
"public",
"static",
"ThrowableType",
"getThrowableType",
"(",
"Throwable",
"cause",
")",
"{",
"final",
"ThrowableAnnotation",
"annotation",
"=",
"cause",
".",
"getClass",
"(",
")",
".",
"getAnnotation",
"(",
"ThrowableAnnotation",
".",
"class",
")",
";",
"return",... | Classify the exceptions by extracting the {@link ThrowableType} from a potential {@link ThrowableAnnotation}.
@param cause the {@link Throwable} to classify.
@return The extracted {@link ThrowableType} or ThrowableType.RecoverableError if there is no such annotation. | [
"Classify",
"the",
"exceptions",
"by",
"extracting",
"the",
"{",
"@link",
"ThrowableType",
"}",
"from",
"a",
"potential",
"{",
"@link",
"ThrowableAnnotation",
"}",
"."
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/throwable/ThrowableClassifier.java#L32-L35 | train | Get the ThrowableType of the given Throwable. | [
30522,
2270,
10763,
5466,
3085,
13874,
2131,
2705,
10524,
3085,
13874,
1006,
5466,
3085,
3426,
1007,
1063,
2345,
5466,
3085,
11639,
17287,
3508,
5754,
17287,
3508,
1027,
3426,
1012,
2131,
26266,
1006,
1007,
1012,
2131,
11639,
17287,
3508,
1... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
alibaba/canal | parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/MysqlConnection.java | MysqlConnection.updateSettings | private void updateSettings() throws IOException {
try {
update("set wait_timeout=9999999");
} catch (Exception e) {
logger.warn("update wait_timeout failed", e);
}
try {
update("set net_write_timeout=1800");
} catch (Exception e) {
logger.warn("update net_write_timeout failed", e);
}
try {
update("set net_read_timeout=1800");
} catch (Exception e) {
logger.warn("update net_read_timeout failed", e);
}
try {
// 设置服务端返回结果时不做编码转化,直接按照数据库的二进制编码进行发送,由客户端自己根据需求进行编码转化
update("set names 'binary'");
} catch (Exception e) {
logger.warn("update names failed", e);
}
try {
// mysql5.6针对checksum支持需要设置session变量
// 如果不设置会出现错误: Slave can not handle replication events with the
// checksum that master is configured to log
// 但也不能乱设置,需要和mysql server的checksum配置一致,不然RotateLogEvent会出现乱码
// '@@global.binlog_checksum'需要去掉单引号,在mysql 5.6.29下导致master退出
update("set @master_binlog_checksum= @@global.binlog_checksum");
} catch (Exception e) {
if (!StringUtils.contains(e.getMessage(), "Unknown system variable")) {
logger.warn("update master_binlog_checksum failed", e);
}
}
try {
// 参考:https://github.com/alibaba/canal/issues/284
// mysql5.6需要设置slave_uuid避免被server kill链接
update("set @slave_uuid=uuid()");
} catch (Exception e) {
if (!StringUtils.contains(e.getMessage(), "Unknown system variable")) {
logger.warn("update slave_uuid failed", e);
}
}
try {
// mariadb针对特殊的类型,需要设置session变量
update("SET @mariadb_slave_capability='" + LogEvent.MARIA_SLAVE_CAPABILITY_MINE + "'");
} catch (Exception e) {
if (!StringUtils.contains(e.getMessage(), "Unknown system variable")) {
logger.warn("update mariadb_slave_capability failed", e);
}
}
/**
* MASTER_HEARTBEAT_PERIOD sets the interval in seconds between
* replication heartbeats. Whenever the master's binary log is updated
* with an event, the waiting period for the next heartbeat is reset.
* interval is a decimal value having the range 0 to 4294967 seconds and
* a resolution in milliseconds; the smallest nonzero value is 0.001.
* Heartbeats are sent by the master only if there are no unsent events
* in the binary log file for a period longer than interval.
*/
try {
long periodNano = TimeUnit.SECONDS.toNanos(MASTER_HEARTBEAT_PERIOD_SECONDS);
update("SET @master_heartbeat_period=" + periodNano);
} catch (Exception e) {
logger.warn("update master_heartbeat_period failed", e);
}
} | java | private void updateSettings() throws IOException {
try {
update("set wait_timeout=9999999");
} catch (Exception e) {
logger.warn("update wait_timeout failed", e);
}
try {
update("set net_write_timeout=1800");
} catch (Exception e) {
logger.warn("update net_write_timeout failed", e);
}
try {
update("set net_read_timeout=1800");
} catch (Exception e) {
logger.warn("update net_read_timeout failed", e);
}
try {
// 设置服务端返回结果时不做编码转化,直接按照数据库的二进制编码进行发送,由客户端自己根据需求进行编码转化
update("set names 'binary'");
} catch (Exception e) {
logger.warn("update names failed", e);
}
try {
// mysql5.6针对checksum支持需要设置session变量
// 如果不设置会出现错误: Slave can not handle replication events with the
// checksum that master is configured to log
// 但也不能乱设置,需要和mysql server的checksum配置一致,不然RotateLogEvent会出现乱码
// '@@global.binlog_checksum'需要去掉单引号,在mysql 5.6.29下导致master退出
update("set @master_binlog_checksum= @@global.binlog_checksum");
} catch (Exception e) {
if (!StringUtils.contains(e.getMessage(), "Unknown system variable")) {
logger.warn("update master_binlog_checksum failed", e);
}
}
try {
// 参考:https://github.com/alibaba/canal/issues/284
// mysql5.6需要设置slave_uuid避免被server kill链接
update("set @slave_uuid=uuid()");
} catch (Exception e) {
if (!StringUtils.contains(e.getMessage(), "Unknown system variable")) {
logger.warn("update slave_uuid failed", e);
}
}
try {
// mariadb针对特殊的类型,需要设置session变量
update("SET @mariadb_slave_capability='" + LogEvent.MARIA_SLAVE_CAPABILITY_MINE + "'");
} catch (Exception e) {
if (!StringUtils.contains(e.getMessage(), "Unknown system variable")) {
logger.warn("update mariadb_slave_capability failed", e);
}
}
/**
* MASTER_HEARTBEAT_PERIOD sets the interval in seconds between
* replication heartbeats. Whenever the master's binary log is updated
* with an event, the waiting period for the next heartbeat is reset.
* interval is a decimal value having the range 0 to 4294967 seconds and
* a resolution in milliseconds; the smallest nonzero value is 0.001.
* Heartbeats are sent by the master only if there are no unsent events
* in the binary log file for a period longer than interval.
*/
try {
long periodNano = TimeUnit.SECONDS.toNanos(MASTER_HEARTBEAT_PERIOD_SECONDS);
update("SET @master_heartbeat_period=" + periodNano);
} catch (Exception e) {
logger.warn("update master_heartbeat_period failed", e);
}
} | [
"private",
"void",
"updateSettings",
"(",
")",
"throws",
"IOException",
"{",
"try",
"{",
"update",
"(",
"\"set wait_timeout=9999999\"",
")",
";",
"}",
"catch",
"(",
"Exception",
"e",
")",
"{",
"logger",
".",
"warn",
"(",
"\"update wait_timeout failed\"",
",",
... | the settings that will need to be checked or set:<br>
<ol>
<li>wait_timeout</li>
<li>net_write_timeout</li>
<li>net_read_timeout</li>
</ol>
@throws IOException | [
"the",
"settings",
"that",
"will",
"need",
"to",
"be",
"checked",
"or",
"set",
":",
"<br",
">",
"<ol",
">",
"<li",
">",
"wait_timeout<",
"/",
"li",
">",
"<li",
">",
"net_write_timeout<",
"/",
"li",
">",
"<li",
">",
"net_read_timeout<",
"/",
"li",
">",
... | 8f088cddc0755f4350c5aaae95c6e4002d90a40f | https://github.com/alibaba/canal/blob/8f088cddc0755f4350c5aaae95c6e4002d90a40f/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/MysqlConnection.java#L389-L461 | train | Update the settings. | [
30522,
2797,
11675,
14409,
18319,
3070,
2015,
1006,
1007,
11618,
22834,
10288,
24422,
1063,
3046,
1063,
10651,
1006,
1000,
2275,
3524,
1035,
2051,
5833,
1027,
25897,
2683,
2683,
2683,
2683,
1000,
1007,
1025,
1065,
4608,
1006,
6453,
1041,
10... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-core/src/main/java/org/apache/flink/util/ExceptionUtils.java | ExceptionUtils.tryRethrowIOException | public static void tryRethrowIOException(Throwable t) throws IOException {
if (t instanceof IOException) {
throw (IOException) t;
}
else if (t instanceof RuntimeException) {
throw (RuntimeException) t;
}
else if (t instanceof Error) {
throw (Error) t;
}
} | java | public static void tryRethrowIOException(Throwable t) throws IOException {
if (t instanceof IOException) {
throw (IOException) t;
}
else if (t instanceof RuntimeException) {
throw (RuntimeException) t;
}
else if (t instanceof Error) {
throw (Error) t;
}
} | [
"public",
"static",
"void",
"tryRethrowIOException",
"(",
"Throwable",
"t",
")",
"throws",
"IOException",
"{",
"if",
"(",
"t",
"instanceof",
"IOException",
")",
"{",
"throw",
"(",
"IOException",
")",
"t",
";",
"}",
"else",
"if",
"(",
"t",
"instanceof",
"Ru... | Tries to throw the given {@code Throwable} in scenarios where the signatures allows only IOExceptions
(and RuntimeException and Error). Throws this exception directly, if it is an IOException,
a RuntimeException, or an Error. Otherwise does nothing.
@param t The Throwable to be thrown. | [
"Tries",
"to",
"throw",
"the",
"given",
"{",
"@code",
"Throwable",
"}",
"in",
"scenarios",
"where",
"the",
"signatures",
"allows",
"only",
"IOExceptions",
"(",
"and",
"RuntimeException",
"and",
"Error",
")",
".",
"Throws",
"this",
"exception",
"directly",
"if"... | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/util/ExceptionUtils.java#L281-L291 | train | Try to throw an IOException. | [
30522,
2270,
10763,
11675,
3046,
13465,
8093,
5004,
3695,
10288,
24422,
1006,
5466,
3085,
1056,
1007,
11618,
22834,
10288,
24422,
1063,
2065,
1006,
1056,
6013,
11253,
22834,
10288,
24422,
1007,
1063,
5466,
1006,
22834,
10288,
24422,
1007,
105... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-core/src/main/java/cn/hutool/core/io/IoUtil.java | IoUtil.copy | public static long copy(ReadableByteChannel in, WritableByteChannel out) throws IORuntimeException {
return copy(in, out, DEFAULT_BUFFER_SIZE);
} | java | public static long copy(ReadableByteChannel in, WritableByteChannel out) throws IORuntimeException {
return copy(in, out, DEFAULT_BUFFER_SIZE);
} | [
"public",
"static",
"long",
"copy",
"(",
"ReadableByteChannel",
"in",
",",
"WritableByteChannel",
"out",
")",
"throws",
"IORuntimeException",
"{",
"return",
"copy",
"(",
"in",
",",
"out",
",",
"DEFAULT_BUFFER_SIZE",
")",
";",
"}"
] | 拷贝流,使用NIO,不会关闭流
@param in {@link ReadableByteChannel}
@param out {@link WritableByteChannel}
@return 拷贝的字节数
@throws IORuntimeException IO异常
@since 4.5.0 | [
"拷贝流,使用NIO,不会关闭流"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/io/IoUtil.java#L236-L238 | train | Copies the contents of the given channel to the given channel. | [
30522,
2270,
10763,
2146,
6100,
1006,
3191,
3085,
3762,
15007,
20147,
2140,
1999,
1010,
25697,
3085,
3762,
15007,
20147,
2140,
2041,
1007,
11618,
22834,
15532,
7292,
10288,
24422,
1063,
2709,
6100,
1006,
1999,
1010,
2041,
1010,
12398,
1035,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-extra/src/main/java/cn/hutool/extra/mail/MailUtil.java | MailUtil.send | public static void send(MailAccount mailAccount, Collection<String> tos, Collection<String> ccs, Collection<String> bccs, String subject, String content, boolean isHtml, File... files) {
final Mail mail = Mail.create(mailAccount);
//可选抄送人
if(CollUtil.isNotEmpty(ccs)) {
mail.setCcs(ccs.toArray(new String[ccs.size()]));
}
//可选密送人
if(CollUtil.isNotEmpty(bccs)) {
mail.setBccs(bccs.toArray(new String[bccs.size()]));
}
mail.setTos(tos.toArray(new String[tos.size()]));
mail.setTitle(subject);
mail.setContent(content);
mail.setHtml(isHtml);
mail.setFiles(files);
mail.send();
} | java | public static void send(MailAccount mailAccount, Collection<String> tos, Collection<String> ccs, Collection<String> bccs, String subject, String content, boolean isHtml, File... files) {
final Mail mail = Mail.create(mailAccount);
//可选抄送人
if(CollUtil.isNotEmpty(ccs)) {
mail.setCcs(ccs.toArray(new String[ccs.size()]));
}
//可选密送人
if(CollUtil.isNotEmpty(bccs)) {
mail.setBccs(bccs.toArray(new String[bccs.size()]));
}
mail.setTos(tos.toArray(new String[tos.size()]));
mail.setTitle(subject);
mail.setContent(content);
mail.setHtml(isHtml);
mail.setFiles(files);
mail.send();
} | [
"public",
"static",
"void",
"send",
"(",
"MailAccount",
"mailAccount",
",",
"Collection",
"<",
"String",
">",
"tos",
",",
"Collection",
"<",
"String",
">",
"ccs",
",",
"Collection",
"<",
"String",
">",
"bccs",
",",
"String",
"subject",
",",
"String",
"cont... | 发送邮件给多人
@param mailAccount 邮件认证对象
@param tos 收件人列表
@param ccs 抄送人列表,可以为null或空
@param bccs 密送人列表,可以为null或空
@param subject 标题
@param content 正文
@param isHtml 是否为HTML格式
@param files 附件列表
@since 4.0.3 | [
"发送邮件给多人"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-extra/src/main/java/cn/hutool/extra/mail/MailUtil.java#L180-L199 | train | Creates and sends a single CE message to the specified EmailAccount. | [
30522,
2270,
10763,
11675,
4604,
1006,
5653,
6305,
3597,
16671,
5653,
6305,
3597,
16671,
1010,
3074,
1026,
5164,
1028,
2000,
2015,
1010,
3074,
1026,
5164,
1028,
10507,
2015,
1010,
3074,
1026,
5164,
1028,
4647,
6169,
1010,
5164,
3395,
1010,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-db/src/main/java/cn/hutool/db/DaoTemplate.java | DaoTemplate.findBySql | public List<Entity> findBySql(String sql, Object... params) throws SQLException {
String selectKeyword = StrUtil.subPre(sql.trim(), 6).toLowerCase();
if(false == "select".equals(selectKeyword)){
sql = "SELECT * FROM " + this.tableName + " " + sql;
}
return db.query(sql, new EntityListHandler(), params);
} | java | public List<Entity> findBySql(String sql, Object... params) throws SQLException {
String selectKeyword = StrUtil.subPre(sql.trim(), 6).toLowerCase();
if(false == "select".equals(selectKeyword)){
sql = "SELECT * FROM " + this.tableName + " " + sql;
}
return db.query(sql, new EntityListHandler(), params);
} | [
"public",
"List",
"<",
"Entity",
">",
"findBySql",
"(",
"String",
"sql",
",",
"Object",
"...",
"params",
")",
"throws",
"SQLException",
"{",
"String",
"selectKeyword",
"=",
"StrUtil",
".",
"subPre",
"(",
"sql",
".",
"trim",
"(",
")",
",",
"6",
")",
"."... | 根据SQL语句查询结果<br>
SQL语句可以是非完整SQL语句,可以只提供查询的条件部分(例如WHERE部分)<br>
此方法会自动补全SELECT * FROM [tableName] 部分,这样就无需关心表名,直接提供条件即可
@param sql SQL语句
@param params SQL占位符中对应的参数
@return 记录
@throws SQLException SQL执行异常 | [
"根据SQL语句查询结果<br",
">",
"SQL语句可以是非完整SQL语句,可以只提供查询的条件部分(例如WHERE部分)<br",
">",
"此方法会自动补全SELECT",
"*",
"FROM",
"[",
"tableName",
"]",
"部分,这样就无需关心表名,直接提供条件即可"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-db/src/main/java/cn/hutool/db/DaoTemplate.java#L292-L298 | train | Find by sql. | [
30522,
2270,
2862,
1026,
9178,
1028,
2424,
3762,
2015,
4160,
2140,
1006,
5164,
29296,
1010,
4874,
1012,
1012,
1012,
11498,
5244,
1007,
11618,
29296,
10288,
24422,
1063,
5164,
7276,
14839,
18351,
1027,
2358,
22134,
4014,
1012,
4942,
28139,
1... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
netty/netty | codec-http2/src/main/java/io/netty/handler/codec/http2/DefaultHttp2RemoteFlowController.java | DefaultHttp2RemoteFlowController.channelHandlerContext | @Override
public void channelHandlerContext(ChannelHandlerContext ctx) throws Http2Exception {
this.ctx = checkNotNull(ctx, "ctx");
// Writing the pending bytes will not check writability change and instead a writability change notification
// to be provided by an explicit call.
channelWritabilityChanged();
// Don't worry about cleaning up queued frames here if ctx is null. It is expected that all streams will be
// closed and the queue cleanup will occur when the stream state transitions occur.
// If any frames have been queued up, we should send them now that we have a channel context.
if (isChannelWritable()) {
writePendingBytes();
}
} | java | @Override
public void channelHandlerContext(ChannelHandlerContext ctx) throws Http2Exception {
this.ctx = checkNotNull(ctx, "ctx");
// Writing the pending bytes will not check writability change and instead a writability change notification
// to be provided by an explicit call.
channelWritabilityChanged();
// Don't worry about cleaning up queued frames here if ctx is null. It is expected that all streams will be
// closed and the queue cleanup will occur when the stream state transitions occur.
// If any frames have been queued up, we should send them now that we have a channel context.
if (isChannelWritable()) {
writePendingBytes();
}
} | [
"@",
"Override",
"public",
"void",
"channelHandlerContext",
"(",
"ChannelHandlerContext",
"ctx",
")",
"throws",
"Http2Exception",
"{",
"this",
".",
"ctx",
"=",
"checkNotNull",
"(",
"ctx",
",",
"\"ctx\"",
")",
";",
"// Writing the pending bytes will not check writability... | {@inheritDoc}
<p>
Any queued {@link FlowControlled} objects will be sent. | [
"{"
] | ba06eafa1c1824bd154f1a380019e7ea2edf3c4c | https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/codec-http2/src/main/java/io/netty/handler/codec/http2/DefaultHttp2RemoteFlowController.java#L133-L148 | train | Called by the Http2ServerChannelHandler interface to notify the channel that a context has been set. | [
30522,
1030,
2058,
15637,
2270,
11675,
3149,
11774,
3917,
8663,
18209,
1006,
3149,
11774,
3917,
8663,
18209,
14931,
2595,
1007,
11618,
8299,
2475,
10288,
24422,
1063,
2023,
1012,
14931,
2595,
1027,
4638,
17048,
11231,
3363,
1006,
14931,
2595,... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
spring-projects/spring-boot | spring-boot-project/spring-boot/src/main/java/org/springframework/boot/web/embedded/undertow/UndertowReactiveWebServerFactory.java | UndertowReactiveWebServerFactory.setBuilderCustomizers | public void setBuilderCustomizers(
Collection<? extends UndertowBuilderCustomizer> customizers) {
Assert.notNull(customizers, "Customizers must not be null");
this.builderCustomizers = new ArrayList<>(customizers);
} | java | public void setBuilderCustomizers(
Collection<? extends UndertowBuilderCustomizer> customizers) {
Assert.notNull(customizers, "Customizers must not be null");
this.builderCustomizers = new ArrayList<>(customizers);
} | [
"public",
"void",
"setBuilderCustomizers",
"(",
"Collection",
"<",
"?",
"extends",
"UndertowBuilderCustomizer",
">",
"customizers",
")",
"{",
"Assert",
".",
"notNull",
"(",
"customizers",
",",
"\"Customizers must not be null\"",
")",
";",
"this",
".",
"builderCustomiz... | Set {@link UndertowBuilderCustomizer}s that should be applied to the Undertow
{@link io.undertow.Undertow.Builder Builder}. Calling this method will replace any
existing customizers.
@param customizers the customizers to set | [
"Set",
"{"
] | 0b27f7c70e164b2b1a96477f1d9c1acba56790c1 | https://github.com/spring-projects/spring-boot/blob/0b27f7c70e164b2b1a96477f1d9c1acba56790c1/spring-boot-project/spring-boot/src/main/java/org/springframework/boot/web/embedded/undertow/UndertowReactiveWebServerFactory.java#L302-L306 | train | Sets the customizers to be used for building the underlying builder. | [
30522,
2270,
11675,
2275,
8569,
23891,
29006,
20389,
17629,
2015,
1006,
3074,
1026,
1029,
8908,
2104,
18790,
8569,
23891,
29006,
20389,
17629,
1028,
7661,
17629,
2015,
1007,
1063,
20865,
1012,
2025,
11231,
3363,
1006,
7661,
17629,
2015,
1010,... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
alibaba/canal | store/src/main/java/com/alibaba/otter/canal/store/helper/CanalEventUtils.java | CanalEventUtils.createPosition | public static LogPosition createPosition(Event event, boolean included) {
EntryPosition position = new EntryPosition();
position.setJournalName(event.getJournalName());
position.setPosition(event.getPosition());
position.setTimestamp(event.getExecuteTime());
position.setIncluded(included);
LogPosition logPosition = new LogPosition();
logPosition.setPostion(position);
logPosition.setIdentity(event.getLogIdentity());
return logPosition;
} | java | public static LogPosition createPosition(Event event, boolean included) {
EntryPosition position = new EntryPosition();
position.setJournalName(event.getJournalName());
position.setPosition(event.getPosition());
position.setTimestamp(event.getExecuteTime());
position.setIncluded(included);
LogPosition logPosition = new LogPosition();
logPosition.setPostion(position);
logPosition.setIdentity(event.getLogIdentity());
return logPosition;
} | [
"public",
"static",
"LogPosition",
"createPosition",
"(",
"Event",
"event",
",",
"boolean",
"included",
")",
"{",
"EntryPosition",
"position",
"=",
"new",
"EntryPosition",
"(",
")",
";",
"position",
".",
"setJournalName",
"(",
"event",
".",
"getJournalName",
"("... | 根据entry创建对应的Position对象 | [
"根据entry创建对应的Position对象"
] | 8f088cddc0755f4350c5aaae95c6e4002d90a40f | https://github.com/alibaba/canal/blob/8f088cddc0755f4350c5aaae95c6e4002d90a40f/store/src/main/java/com/alibaba/otter/canal/store/helper/CanalEventUtils.java#L67-L78 | train | Creates a log position object. | [
30522,
2270,
10763,
8833,
26994,
3443,
26994,
1006,
2724,
2724,
1010,
22017,
20898,
2443,
1007,
1063,
4443,
26994,
2597,
1027,
2047,
4443,
26994,
1006,
1007,
1025,
2597,
1012,
2275,
23099,
12789,
19666,
14074,
1006,
2724,
1012,
2131,
23099,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
networknt/light-4j | utility/src/main/java/com/networknt/utility/NioUtils.java | NioUtils.getTempDir | public static String getTempDir() {
// default is user home directory
String tempDir = System.getProperty("user.home");
try{
//create a temp file
File temp = File.createTempFile("A0393939", ".tmp");
//Get tempropary file path
String absolutePath = temp.getAbsolutePath();
tempDir = absolutePath.substring(0,absolutePath.lastIndexOf(File.separator));
}catch(IOException e){}
return tempDir;
} | java | public static String getTempDir() {
// default is user home directory
String tempDir = System.getProperty("user.home");
try{
//create a temp file
File temp = File.createTempFile("A0393939", ".tmp");
//Get tempropary file path
String absolutePath = temp.getAbsolutePath();
tempDir = absolutePath.substring(0,absolutePath.lastIndexOf(File.separator));
}catch(IOException e){}
return tempDir;
} | [
"public",
"static",
"String",
"getTempDir",
"(",
")",
"{",
"// default is user home directory",
"String",
"tempDir",
"=",
"System",
".",
"getProperty",
"(",
"\"user.home\"",
")",
";",
"try",
"{",
"//create a temp file",
"File",
"temp",
"=",
"File",
".",
"createTem... | get temp dir from OS.
@return String temp dir | [
"get",
"temp",
"dir",
"from",
"OS",
"."
] | 2a60257c60663684c8f6dc8b5ea3cf184e534db6 | https://github.com/networknt/light-4j/blob/2a60257c60663684c8f6dc8b5ea3cf184e534db6/utility/src/main/java/com/networknt/utility/NioUtils.java#L279-L290 | train | Get the temp directory. | [
30522,
2270,
10763,
5164,
2131,
18532,
17299,
4313,
1006,
1007,
1063,
1013,
1013,
12398,
2003,
5310,
2188,
14176,
5164,
8915,
8737,
4305,
2099,
1027,
2291,
1012,
2131,
21572,
4842,
3723,
1006,
1000,
5310,
1012,
30524,
1013,
1013,
3443,
1037... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
networknt/light-4j | dump/src/main/java/com/networknt/dump/BodyDumper.java | BodyDumper.dumpResponse | @Override
public void dumpResponse(Map<String, Object> result) {
byte[] responseBodyAttachment = exchange.getAttachment(StoreResponseStreamSinkConduit.RESPONSE);
if(responseBodyAttachment != null) {
this.bodyContent = config.isMaskEnabled() ? Mask.maskJson(new ByteArrayInputStream(responseBodyAttachment), "responseBody") : new String(responseBodyAttachment, UTF_8);
}
this.putDumpInfoTo(result);
} | java | @Override
public void dumpResponse(Map<String, Object> result) {
byte[] responseBodyAttachment = exchange.getAttachment(StoreResponseStreamSinkConduit.RESPONSE);
if(responseBodyAttachment != null) {
this.bodyContent = config.isMaskEnabled() ? Mask.maskJson(new ByteArrayInputStream(responseBodyAttachment), "responseBody") : new String(responseBodyAttachment, UTF_8);
}
this.putDumpInfoTo(result);
} | [
"@",
"Override",
"public",
"void",
"dumpResponse",
"(",
"Map",
"<",
"String",
",",
"Object",
">",
"result",
")",
"{",
"byte",
"[",
"]",
"responseBodyAttachment",
"=",
"exchange",
".",
"getAttachment",
"(",
"StoreResponseStreamSinkConduit",
".",
"RESPONSE",
")",
... | impl of dumping response body to result
@param result A map you want to put dump information to | [
"impl",
"of",
"dumping",
"response",
"body",
"to",
"result"
] | 2a60257c60663684c8f6dc8b5ea3cf184e534db6 | https://github.com/networknt/light-4j/blob/2a60257c60663684c8f6dc8b5ea3cf184e534db6/dump/src/main/java/com/networknt/dump/BodyDumper.java#L85-L92 | train | Dump response to result | [
30522,
1030,
2058,
15637,
2270,
11675,
15653,
6072,
26029,
3366,
1006,
4949,
1026,
5164,
1010,
4874,
1028,
2765,
1007,
1063,
24880,
1031,
1033,
3433,
23684,
19321,
6776,
3672,
1027,
3863,
1012,
2131,
19321,
6776,
3672,
1006,
3573,
6072,
260... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-crypto/src/main/java/cn/hutool/crypto/SecureUtil.java | SecureUtil.generatePublicKey | public static PublicKey generatePublicKey(String algorithm, KeySpec keySpec) {
return KeyUtil.generatePublicKey(algorithm, keySpec);
} | java | public static PublicKey generatePublicKey(String algorithm, KeySpec keySpec) {
return KeyUtil.generatePublicKey(algorithm, keySpec);
} | [
"public",
"static",
"PublicKey",
"generatePublicKey",
"(",
"String",
"algorithm",
",",
"KeySpec",
"keySpec",
")",
"{",
"return",
"KeyUtil",
".",
"generatePublicKey",
"(",
"algorithm",
",",
"keySpec",
")",
";",
"}"
] | 生成公钥,仅用于非对称加密<br>
算法见:https://docs.oracle.com/javase/7/docs/technotes/guides/security/StandardNames.html#KeyFactory
@param algorithm 算法
@param keySpec {@link KeySpec}
@return 公钥 {@link PublicKey}
@since 3.1.1 | [
"生成公钥,仅用于非对称加密<br",
">",
"算法见:https",
":",
"//",
"docs",
".",
"oracle",
".",
"com",
"/",
"javase",
"/",
"7",
"/",
"docs",
"/",
"technotes",
"/",
"guides",
"/",
"security",
"/",
"StandardNames",
".",
"html#KeyFactory"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-crypto/src/main/java/cn/hutool/crypto/SecureUtil.java#L191-L193 | train | Generates a public key using the specified algorithm and key spec. | [
30522,
2270,
10763,
30524,
1010,
6309,
5051,
2278,
6309,
5051,
2278,
1007,
1063,
2709,
3145,
21823,
2140,
1012,
9699,
14289,
16558,
6799,
3240,
1006,
9896,
1010,
6309,
5051,
2278,
1007,
1025,
1065,
102,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-core/src/main/java/cn/hutool/core/lang/Validator.java | Validator.validateCitizenIdNumber | public static <T extends CharSequence> T validateCitizenIdNumber(T value, String errorMsg) throws ValidateException {
if (false == isCitizenId(value)) {
throw new ValidateException(errorMsg);
}
return value;
} | java | public static <T extends CharSequence> T validateCitizenIdNumber(T value, String errorMsg) throws ValidateException {
if (false == isCitizenId(value)) {
throw new ValidateException(errorMsg);
}
return value;
} | [
"public",
"static",
"<",
"T",
"extends",
"CharSequence",
">",
"T",
"validateCitizenIdNumber",
"(",
"T",
"value",
",",
"String",
"errorMsg",
")",
"throws",
"ValidateException",
"{",
"if",
"(",
"false",
"==",
"isCitizenId",
"(",
"value",
")",
")",
"{",
"throw"... | 验证是否为身份证号码(18位中国)<br>
出生日期只支持到到2999年
@param <T> 字符串类型
@param value 值
@param errorMsg 验证错误的信息
@return 验证后的值
@throws ValidateException 验证异常 | [
"验证是否为身份证号码(18位中国)<br",
">",
"出生日期只支持到到2999年"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/lang/Validator.java#L705-L710 | train | Validate a Citizen Id number. | [
30522,
2270,
10763,
1026,
1056,
8908,
25869,
3366,
4226,
5897,
1028,
1056,
9398,
3686,
26243,
4697,
3490,
2094,
19172,
5677,
1006,
1056,
3643,
1010,
5164,
7561,
5244,
2290,
1007,
11618,
9398,
3686,
10288,
24422,
1063,
2065,
1006,
6270,
1027... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/incubator-shardingsphere | sharding-jdbc/sharding-jdbc-core/src/main/java/org/apache/shardingsphere/shardingjdbc/executor/StatementExecutor.java | StatementExecutor.executeUpdate | public int executeUpdate() throws SQLException {
return executeUpdate(new Updater() {
@Override
public int executeUpdate(final Statement statement, final String sql) throws SQLException {
return statement.executeUpdate(sql);
}
});
} | java | public int executeUpdate() throws SQLException {
return executeUpdate(new Updater() {
@Override
public int executeUpdate(final Statement statement, final String sql) throws SQLException {
return statement.executeUpdate(sql);
}
});
} | [
"public",
"int",
"executeUpdate",
"(",
")",
"throws",
"SQLException",
"{",
"return",
"executeUpdate",
"(",
"new",
"Updater",
"(",
")",
"{",
"@",
"Override",
"public",
"int",
"executeUpdate",
"(",
"final",
"Statement",
"statement",
",",
"final",
"String",
"sql"... | Execute update.
@return effected records count
@throws SQLException SQL exception | [
"Execute",
"update",
"."
] | f88fd29fc345dfb31fdce12e9e96cbfa0fd2402d | https://github.com/apache/incubator-shardingsphere/blob/f88fd29fc345dfb31fdce12e9e96cbfa0fd2402d/sharding-jdbc/sharding-jdbc-core/src/main/java/org/apache/shardingsphere/shardingjdbc/executor/StatementExecutor.java#L116-L124 | train | Execute an SQL update. | [
30522,
2270,
20014,
15389,
6279,
13701,
1006,
1007,
11618,
29296,
10288,
24422,
1063,
2709,
15389,
6279,
13701,
1006,
2047,
10651,
2099,
1006,
1007,
1063,
1030,
2058,
15637,
2270,
20014,
15389,
6279,
13701,
1006,
2345,
4861,
4861,
1010,
2345,... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
netty/netty | transport/src/main/java/io/netty/bootstrap/Bootstrap.java | Bootstrap.resolver | @SuppressWarnings("unchecked")
public Bootstrap resolver(AddressResolverGroup<?> resolver) {
this.resolver = (AddressResolverGroup<SocketAddress>) (resolver == null ? DEFAULT_RESOLVER : resolver);
return this;
} | java | @SuppressWarnings("unchecked")
public Bootstrap resolver(AddressResolverGroup<?> resolver) {
this.resolver = (AddressResolverGroup<SocketAddress>) (resolver == null ? DEFAULT_RESOLVER : resolver);
return this;
} | [
"@",
"SuppressWarnings",
"(",
"\"unchecked\"",
")",
"public",
"Bootstrap",
"resolver",
"(",
"AddressResolverGroup",
"<",
"?",
">",
"resolver",
")",
"{",
"this",
".",
"resolver",
"=",
"(",
"AddressResolverGroup",
"<",
"SocketAddress",
">",
")",
"(",
"resolver",
... | Sets the {@link NameResolver} which will resolve the address of the unresolved named address.
@param resolver the {@link NameResolver} for this {@code Bootstrap}; may be {@code null}, in which case a default
resolver will be used
@see io.netty.resolver.DefaultAddressResolverGroup | [
"Sets",
"the",
"{",
"@link",
"NameResolver",
"}",
"which",
"will",
"resolve",
"the",
"address",
"of",
"the",
"unresolved",
"named",
"address",
"."
] | ba06eafa1c1824bd154f1a380019e7ea2edf3c4c | https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/transport/src/main/java/io/netty/bootstrap/Bootstrap.java#L78-L82 | train | Sets the address resolver group. | [
30522,
1030,
16081,
9028,
5582,
2015,
1006,
1000,
4895,
5403,
18141,
1000,
1007,
2270,
6879,
6494,
2361,
10663,
2099,
1006,
4769,
6072,
4747,
6299,
17058,
1026,
1029,
1028,
10663,
2099,
1007,
1063,
2023,
1012,
10663,
2099,
1027,
1006,
4769,... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/spark | sql/catalyst/src/main/java/org/apache/spark/sql/catalog/v2/expressions/Expressions.java | Expressions.bucket | public Transform bucket(int numBuckets, String... columns) {
return LogicalExpressions.bucket(numBuckets,
JavaConverters.asScalaBuffer(Arrays.asList(columns)).toSeq());
} | java | public Transform bucket(int numBuckets, String... columns) {
return LogicalExpressions.bucket(numBuckets,
JavaConverters.asScalaBuffer(Arrays.asList(columns)).toSeq());
} | [
"public",
"Transform",
"bucket",
"(",
"int",
"numBuckets",
",",
"String",
"...",
"columns",
")",
"{",
"return",
"LogicalExpressions",
".",
"bucket",
"(",
"numBuckets",
",",
"JavaConverters",
".",
"asScalaBuffer",
"(",
"Arrays",
".",
"asList",
"(",
"columns",
"... | Create a bucket transform for one or more columns.
<p>
This transform represents a logical mapping from a value to a bucket id in [0, numBuckets)
based on a hash of the value.
<p>
The name reported by transforms created with this method is "bucket".
@param numBuckets the number of output buckets
@param columns input columns for the bucket transform
@return a logical bucket transform with name "bucket" | [
"Create",
"a",
"bucket",
"transform",
"for",
"one",
"or",
"more",
"columns",
".",
"<p",
">",
"This",
"transform",
"represents",
"a",
"logical",
"mapping",
"from",
"a",
"value",
"to",
"a",
"bucket",
"id",
"in",
"[",
"0",
"numBuckets",
")",
"based",
"on",
... | 25ee0474f47d9c30d6f553a7892d9549f91071cf | https://github.com/apache/spark/blob/25ee0474f47d9c30d6f553a7892d9549f91071cf/sql/catalyst/src/main/java/org/apache/spark/sql/catalog/v2/expressions/Expressions.java#L84-L87 | train | Transform a sequence of columns to a sequence of buckets. | [
30522,
2270,
10938,
13610,
1006,
20014,
15903,
12722,
8454,
1010,
5164,
1012,
1012,
1012,
7753,
1007,
1063,
2709,
11177,
10288,
20110,
8496,
1012,
13610,
1006,
15903,
12722,
8454,
1010,
9262,
8663,
16874,
2545,
1012,
4632,
25015,
8569,
12494,... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/incubator-shardingsphere | sharding-core/sharding-core-parse/sharding-core-parse-common/src/main/java/org/apache/shardingsphere/core/parse/old/parser/clause/expression/AliasExpressionParser.java | AliasExpressionParser.parseSelectItemAlias | public Optional<String> parseSelectItemAlias() {
if (lexerEngine.skipIfEqual(DefaultKeyword.AS)) {
return parseWithAs(null, false, null);
}
if (lexerEngine.equalAny(getDefaultAvailableKeywordsForSelectItemAlias()) || lexerEngine.equalAny(getCustomizedAvailableKeywordsForSelectItemAlias())) {
return parseAlias(null, false, null);
}
return Optional.absent();
} | java | public Optional<String> parseSelectItemAlias() {
if (lexerEngine.skipIfEqual(DefaultKeyword.AS)) {
return parseWithAs(null, false, null);
}
if (lexerEngine.equalAny(getDefaultAvailableKeywordsForSelectItemAlias()) || lexerEngine.equalAny(getCustomizedAvailableKeywordsForSelectItemAlias())) {
return parseAlias(null, false, null);
}
return Optional.absent();
} | [
"public",
"Optional",
"<",
"String",
">",
"parseSelectItemAlias",
"(",
")",
"{",
"if",
"(",
"lexerEngine",
".",
"skipIfEqual",
"(",
"DefaultKeyword",
".",
"AS",
")",
")",
"{",
"return",
"parseWithAs",
"(",
"null",
",",
"false",
",",
"null",
")",
";",
"}"... | Parse alias for select item.
@return alias for select item | [
"Parse",
"alias",
"for",
"select",
"item",
"."
] | f88fd29fc345dfb31fdce12e9e96cbfa0fd2402d | https://github.com/apache/incubator-shardingsphere/blob/f88fd29fc345dfb31fdce12e9e96cbfa0fd2402d/sharding-core/sharding-core-parse/sharding-core-parse-common/src/main/java/org/apache/shardingsphere/core/parse/old/parser/clause/expression/AliasExpressionParser.java#L49-L57 | train | Parse select item alias. | [
30522,
2270,
11887,
1026,
5164,
1028,
11968,
8583,
12260,
6593,
4221,
9067,
7951,
1006,
1007,
1063,
2065,
1006,
17244,
7869,
3070,
3170,
1012,
13558,
29323,
26426,
1006,
12398,
14839,
18351,
1012,
2004,
1007,
1007,
1063,
2709,
11968,
3366,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-json/src/main/java/cn/hutool/json/JSONObject.java | JSONObject.putOpt | public JSONObject putOpt(String key, Object value) throws JSONException {
if (key != null && value != null) {
this.put(key, value);
}
return this;
} | java | public JSONObject putOpt(String key, Object value) throws JSONException {
if (key != null && value != null) {
this.put(key, value);
}
return this;
} | [
"public",
"JSONObject",
"putOpt",
"(",
"String",
"key",
",",
"Object",
"value",
")",
"throws",
"JSONException",
"{",
"if",
"(",
"key",
"!=",
"null",
"&&",
"value",
"!=",
"null",
")",
"{",
"this",
".",
"put",
"(",
"key",
",",
"value",
")",
";",
"}",
... | 在键和值都为非空的情况下put到JSONObject中
@param key 键
@param value 值对象,可以是以下类型: Boolean, Double, Integer, JSONArray, JSONObject, Long, String, or the JSONNull.NULL.
@return this.
@throws JSONException 值是无穷数字 | [
"在键和值都为非空的情况下put到JSONObject中"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-json/src/main/java/cn/hutool/json/JSONObject.java#L422-L427 | train | Add an option to the
this object. | [
30522,
2270,
1046,
3385,
16429,
20614,
2404,
7361,
2102,
1006,
5164,
3145,
1010,
4874,
3643,
1007,
11618,
1046,
3385,
10288,
24422,
1063,
2065,
1006,
3145,
999,
1027,
19701,
1004,
1004,
3643,
999,
1027,
19701,
1007,
1063,
2023,
1012,
2404,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/plan/rules/logical/FlinkAggregateExpandDistinctAggregatesRule.java | FlinkAggregateExpandDistinctAggregatesRule.convertSingletonDistinct | private RelBuilder convertSingletonDistinct(RelBuilder relBuilder,
Aggregate aggregate, Set<Pair<List<Integer>, Integer>> argLists) {
// In this case, we are assuming that there is a single distinct function.
// So make sure that argLists is of size one.
Preconditions.checkArgument(argLists.size() == 1);
// For example,
// SELECT deptno, COUNT(*), SUM(bonus), MIN(DISTINCT sal)
// FROM emp
// GROUP BY deptno
//
// becomes
//
// SELECT deptno, SUM(cnt), SUM(bonus), MIN(sal)
// FROM (
// SELECT deptno, COUNT(*) as cnt, SUM(bonus), sal
// FROM EMP
// GROUP BY deptno, sal) // Aggregate B
// GROUP BY deptno // Aggregate A
relBuilder.push(aggregate.getInput());
final List<AggregateCall> originalAggCalls = aggregate.getAggCallList();
final ImmutableBitSet originalGroupSet = aggregate.getGroupSet();
// Add the distinct aggregate column(s) to the group-by columns,
// if not already a part of the group-by
final SortedSet<Integer> bottomGroupSet = new TreeSet<>();
bottomGroupSet.addAll(aggregate.getGroupSet().asList());
for (AggregateCall aggCall : originalAggCalls) {
if (aggCall.isDistinct()) {
bottomGroupSet.addAll(aggCall.getArgList());
break; // since we only have single distinct call
}
}
// Generate the intermediate aggregate B, the one on the bottom that converts
// a distinct call to group by call.
// Bottom aggregate is the same as the original aggregate, except that
// the bottom aggregate has converted the DISTINCT aggregate to a group by clause.
final List<AggregateCall> bottomAggregateCalls = new ArrayList<>();
for (AggregateCall aggCall : originalAggCalls) {
// Project the column corresponding to the distinct aggregate. Project
// as-is all the non-distinct aggregates
if (!aggCall.isDistinct()) {
final AggregateCall newCall =
AggregateCall.create(aggCall.getAggregation(), false,
aggCall.isApproximate(), aggCall.getArgList(), -1,
ImmutableBitSet.of(bottomGroupSet).cardinality(),
relBuilder.peek(), null, aggCall.name);
bottomAggregateCalls.add(newCall);
}
}
// Generate the aggregate B (see the reference example above)
relBuilder.push(
aggregate.copy(
aggregate.getTraitSet(), relBuilder.build(),
false, ImmutableBitSet.of(bottomGroupSet), null, bottomAggregateCalls));
// Add aggregate A (see the reference example above), the top aggregate
// to handle the rest of the aggregation that the bottom aggregate hasn't handled
final List<AggregateCall> topAggregateCalls = com.google.common.collect.Lists.newArrayList();
// Use the remapped arguments for the (non)distinct aggregate calls
int nonDistinctAggCallProcessedSoFar = 0;
for (AggregateCall aggCall : originalAggCalls) {
final AggregateCall newCall;
if (aggCall.isDistinct()) {
List<Integer> newArgList = new ArrayList<>();
for (int arg : aggCall.getArgList()) {
newArgList.add(bottomGroupSet.headSet(arg).size());
}
newCall =
AggregateCall.create(aggCall.getAggregation(),
false,
aggCall.isApproximate(),
newArgList,
-1,
originalGroupSet.cardinality(),
relBuilder.peek(),
aggCall.getType(),
aggCall.name);
} else {
// If aggregate B had a COUNT aggregate call the corresponding aggregate at
// aggregate A must be SUM. For other aggregates, it remains the same.
final List<Integer> newArgs =
com.google.common.collect.Lists.newArrayList(
bottomGroupSet.size() + nonDistinctAggCallProcessedSoFar);
if (aggCall.getAggregation().getKind() == SqlKind.COUNT) {
newCall =
AggregateCall.create(new SqlSumEmptyIsZeroAggFunction(), false,
aggCall.isApproximate(), newArgs, -1,
originalGroupSet.cardinality(), relBuilder.peek(),
aggCall.getType(), aggCall.getName());
} else {
newCall =
AggregateCall.create(aggCall.getAggregation(), false,
aggCall.isApproximate(), newArgs, -1,
originalGroupSet.cardinality(),
relBuilder.peek(), aggCall.getType(), aggCall.name);
}
nonDistinctAggCallProcessedSoFar++;
}
topAggregateCalls.add(newCall);
}
// Populate the group-by keys with the remapped arguments for aggregate A
// The top groupset is basically an identity (first X fields of aggregate B's
// output), minus the distinct aggCall's input.
final Set<Integer> topGroupSet = new HashSet<>();
int groupSetToAdd = 0;
for (int bottomGroup : bottomGroupSet) {
if (originalGroupSet.get(bottomGroup)) {
topGroupSet.add(groupSetToAdd);
}
groupSetToAdd++;
}
relBuilder.push(
aggregate.copy(aggregate.getTraitSet(),
relBuilder.build(), aggregate.indicator,
ImmutableBitSet.of(topGroupSet), null, topAggregateCalls));
return relBuilder;
} | java | private RelBuilder convertSingletonDistinct(RelBuilder relBuilder,
Aggregate aggregate, Set<Pair<List<Integer>, Integer>> argLists) {
// In this case, we are assuming that there is a single distinct function.
// So make sure that argLists is of size one.
Preconditions.checkArgument(argLists.size() == 1);
// For example,
// SELECT deptno, COUNT(*), SUM(bonus), MIN(DISTINCT sal)
// FROM emp
// GROUP BY deptno
//
// becomes
//
// SELECT deptno, SUM(cnt), SUM(bonus), MIN(sal)
// FROM (
// SELECT deptno, COUNT(*) as cnt, SUM(bonus), sal
// FROM EMP
// GROUP BY deptno, sal) // Aggregate B
// GROUP BY deptno // Aggregate A
relBuilder.push(aggregate.getInput());
final List<AggregateCall> originalAggCalls = aggregate.getAggCallList();
final ImmutableBitSet originalGroupSet = aggregate.getGroupSet();
// Add the distinct aggregate column(s) to the group-by columns,
// if not already a part of the group-by
final SortedSet<Integer> bottomGroupSet = new TreeSet<>();
bottomGroupSet.addAll(aggregate.getGroupSet().asList());
for (AggregateCall aggCall : originalAggCalls) {
if (aggCall.isDistinct()) {
bottomGroupSet.addAll(aggCall.getArgList());
break; // since we only have single distinct call
}
}
// Generate the intermediate aggregate B, the one on the bottom that converts
// a distinct call to group by call.
// Bottom aggregate is the same as the original aggregate, except that
// the bottom aggregate has converted the DISTINCT aggregate to a group by clause.
final List<AggregateCall> bottomAggregateCalls = new ArrayList<>();
for (AggregateCall aggCall : originalAggCalls) {
// Project the column corresponding to the distinct aggregate. Project
// as-is all the non-distinct aggregates
if (!aggCall.isDistinct()) {
final AggregateCall newCall =
AggregateCall.create(aggCall.getAggregation(), false,
aggCall.isApproximate(), aggCall.getArgList(), -1,
ImmutableBitSet.of(bottomGroupSet).cardinality(),
relBuilder.peek(), null, aggCall.name);
bottomAggregateCalls.add(newCall);
}
}
// Generate the aggregate B (see the reference example above)
relBuilder.push(
aggregate.copy(
aggregate.getTraitSet(), relBuilder.build(),
false, ImmutableBitSet.of(bottomGroupSet), null, bottomAggregateCalls));
// Add aggregate A (see the reference example above), the top aggregate
// to handle the rest of the aggregation that the bottom aggregate hasn't handled
final List<AggregateCall> topAggregateCalls = com.google.common.collect.Lists.newArrayList();
// Use the remapped arguments for the (non)distinct aggregate calls
int nonDistinctAggCallProcessedSoFar = 0;
for (AggregateCall aggCall : originalAggCalls) {
final AggregateCall newCall;
if (aggCall.isDistinct()) {
List<Integer> newArgList = new ArrayList<>();
for (int arg : aggCall.getArgList()) {
newArgList.add(bottomGroupSet.headSet(arg).size());
}
newCall =
AggregateCall.create(aggCall.getAggregation(),
false,
aggCall.isApproximate(),
newArgList,
-1,
originalGroupSet.cardinality(),
relBuilder.peek(),
aggCall.getType(),
aggCall.name);
} else {
// If aggregate B had a COUNT aggregate call the corresponding aggregate at
// aggregate A must be SUM. For other aggregates, it remains the same.
final List<Integer> newArgs =
com.google.common.collect.Lists.newArrayList(
bottomGroupSet.size() + nonDistinctAggCallProcessedSoFar);
if (aggCall.getAggregation().getKind() == SqlKind.COUNT) {
newCall =
AggregateCall.create(new SqlSumEmptyIsZeroAggFunction(), false,
aggCall.isApproximate(), newArgs, -1,
originalGroupSet.cardinality(), relBuilder.peek(),
aggCall.getType(), aggCall.getName());
} else {
newCall =
AggregateCall.create(aggCall.getAggregation(), false,
aggCall.isApproximate(), newArgs, -1,
originalGroupSet.cardinality(),
relBuilder.peek(), aggCall.getType(), aggCall.name);
}
nonDistinctAggCallProcessedSoFar++;
}
topAggregateCalls.add(newCall);
}
// Populate the group-by keys with the remapped arguments for aggregate A
// The top groupset is basically an identity (first X fields of aggregate B's
// output), minus the distinct aggCall's input.
final Set<Integer> topGroupSet = new HashSet<>();
int groupSetToAdd = 0;
for (int bottomGroup : bottomGroupSet) {
if (originalGroupSet.get(bottomGroup)) {
topGroupSet.add(groupSetToAdd);
}
groupSetToAdd++;
}
relBuilder.push(
aggregate.copy(aggregate.getTraitSet(),
relBuilder.build(), aggregate.indicator,
ImmutableBitSet.of(topGroupSet), null, topAggregateCalls));
return relBuilder;
} | [
"private",
"RelBuilder",
"convertSingletonDistinct",
"(",
"RelBuilder",
"relBuilder",
",",
"Aggregate",
"aggregate",
",",
"Set",
"<",
"Pair",
"<",
"List",
"<",
"Integer",
">",
",",
"Integer",
">",
">",
"argLists",
")",
"{",
"// In this case, we are assuming that the... | Converts an aggregate with one distinct aggregate and one or more
non-distinct aggregates to multi-phase aggregates (see reference example
below).
@param relBuilder Contains the input relational expression
@param aggregate Original aggregate
@param argLists Arguments and filters to the distinct aggregate function | [
"Converts",
"an",
"aggregate",
"with",
"one",
"distinct",
"aggregate",
"and",
"one",
"or",
"more",
"non",
"-",
"distinct",
"aggregates",
"to",
"multi",
"-",
"phase",
"aggregates",
"(",
"see",
"reference",
"example",
"below",
")",
"."
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/plan/rules/logical/FlinkAggregateExpandDistinctAggregatesRule.java#L275-L397 | train | Convert a singleton DISTINCT aggregate to a single - distinct relationship. | [
30522,
2797,
2128,
20850,
19231,
4063,
19884,
2075,
19263,
10521,
7629,
6593,
1006,
2128,
20850,
19231,
4063,
2128,
20850,
19231,
4063,
1010,
9572,
9572,
1010,
2275,
1026,
3940,
1026,
2862,
1026,
16109,
1028,
1010,
16109,
1028,
1028,
12098,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-table/flink-sql-client/src/main/java/org/apache/flink/table/client/gateway/local/result/MaterializedCollectStreamResult.java | MaterializedCollectStreamResult.processInsert | private void processInsert(Row row) {
// limit the materialized table
if (materializedTable.size() - validRowPosition >= maxRowCount) {
cleanUp();
}
materializedTable.add(row);
rowPositionCache.put(row, materializedTable.size() - 1);
} | java | private void processInsert(Row row) {
// limit the materialized table
if (materializedTable.size() - validRowPosition >= maxRowCount) {
cleanUp();
}
materializedTable.add(row);
rowPositionCache.put(row, materializedTable.size() - 1);
} | [
"private",
"void",
"processInsert",
"(",
"Row",
"row",
")",
"{",
"// limit the materialized table",
"if",
"(",
"materializedTable",
".",
"size",
"(",
")",
"-",
"validRowPosition",
">=",
"maxRowCount",
")",
"{",
"cleanUp",
"(",
")",
";",
"}",
"materializedTable",... | -------------------------------------------------------------------------------------------- | [
"--------------------------------------------------------------------------------------------"
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-table/flink-sql-client/src/main/java/org/apache/flink/table/client/gateway/local/result/MaterializedCollectStreamResult.java#L204-L211 | train | Insert a new row into the materialized table. | [
30522,
2797,
11675,
2832,
7076,
8743,
1006,
5216,
5216,
1007,
1063,
1013,
1013,
5787,
1996,
27075,
2795,
2065,
1006,
27075,
10880,
1012,
2946,
1006,
1007,
1011,
9398,
10524,
26994,
1028,
1027,
4098,
10524,
3597,
16671,
1007,
1063,
27686,
10... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-java/src/main/java/org/apache/flink/api/java/operators/UnsortedGrouping.java | UnsortedGrouping.sortGroup | public <K> SortedGrouping<T> sortGroup(KeySelector<T, K> keySelector, Order order) {
if (!(this.getKeys() instanceof Keys.SelectorFunctionKeys)) {
throw new InvalidProgramException("KeySelector group-sorting keys can only be used with KeySelector grouping keys.");
}
TypeInformation<K> keyType = TypeExtractor.getKeySelectorTypes(keySelector, this.inputDataSet.getType());
SortedGrouping<T> sg = new SortedGrouping<T>(this.inputDataSet, this.keys, new Keys.SelectorFunctionKeys<T, K>(keySelector, this.inputDataSet.getType(), keyType), order);
sg.customPartitioner = getCustomPartitioner();
return sg;
} | java | public <K> SortedGrouping<T> sortGroup(KeySelector<T, K> keySelector, Order order) {
if (!(this.getKeys() instanceof Keys.SelectorFunctionKeys)) {
throw new InvalidProgramException("KeySelector group-sorting keys can only be used with KeySelector grouping keys.");
}
TypeInformation<K> keyType = TypeExtractor.getKeySelectorTypes(keySelector, this.inputDataSet.getType());
SortedGrouping<T> sg = new SortedGrouping<T>(this.inputDataSet, this.keys, new Keys.SelectorFunctionKeys<T, K>(keySelector, this.inputDataSet.getType(), keyType), order);
sg.customPartitioner = getCustomPartitioner();
return sg;
} | [
"public",
"<",
"K",
">",
"SortedGrouping",
"<",
"T",
">",
"sortGroup",
"(",
"KeySelector",
"<",
"T",
",",
"K",
">",
"keySelector",
",",
"Order",
"order",
")",
"{",
"if",
"(",
"!",
"(",
"this",
".",
"getKeys",
"(",
")",
"instanceof",
"Keys",
".",
"S... | Sorts elements within a group on a key extracted by the specified {@link org.apache.flink.api.java.functions.KeySelector}
in the specified {@link Order}.
<p>Chaining {@link #sortGroup(KeySelector, Order)} calls is not supported.
@param keySelector The KeySelector with which the group is sorted.
@param order The Order in which the extracted key is sorted.
@return A SortedGrouping with specified order of group element.
@see Order | [
"Sorts",
"elements",
"within",
"a",
"group",
"on",
"a",
"key",
"extracted",
"by",
"the",
"specified",
"{",
"@link",
"org",
".",
"apache",
".",
"flink",
".",
"api",
".",
"java",
".",
"functions",
".",
"KeySelector",
"}",
"in",
"the",
"specified",
"{",
"... | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-java/src/main/java/org/apache/flink/api/java/operators/UnsortedGrouping.java#L316-L325 | train | Sort the data set using the given key selector and order. | [
30522,
2270,
1026,
1047,
1028,
19616,
17058,
2075,
1026,
1056,
1028,
4066,
17058,
1006,
6309,
12260,
16761,
1026,
1056,
1010,
1047,
1028,
6309,
12260,
16761,
1010,
2344,
2344,
1007,
1063,
2065,
1006,
999,
1006,
2023,
1012,
2131,
14839,
2015... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/dataformat/BinaryFormat.java | BinaryFormat.readBinaryFieldFromSegments | static byte[] readBinaryFieldFromSegments(
MemorySegment[] segments, int baseOffset, int fieldOffset,
long variablePartOffsetAndLen) {
long mark = variablePartOffsetAndLen & HIGHEST_FIRST_BIT;
if (mark == 0) {
final int subOffset = (int) (variablePartOffsetAndLen >> 32);
final int len = (int) variablePartOffsetAndLen;
return SegmentsUtil.copyToBytes(segments, baseOffset + subOffset, len);
} else {
int len = (int) ((variablePartOffsetAndLen & HIGHEST_SECOND_TO_EIGHTH_BIT) >>> 56);
if (SegmentsUtil.LITTLE_ENDIAN) {
return SegmentsUtil.copyToBytes(segments, fieldOffset, len);
} else {
// fieldOffset + 1 to skip header.
return SegmentsUtil.copyToBytes(segments, fieldOffset + 1, len);
}
}
} | java | static byte[] readBinaryFieldFromSegments(
MemorySegment[] segments, int baseOffset, int fieldOffset,
long variablePartOffsetAndLen) {
long mark = variablePartOffsetAndLen & HIGHEST_FIRST_BIT;
if (mark == 0) {
final int subOffset = (int) (variablePartOffsetAndLen >> 32);
final int len = (int) variablePartOffsetAndLen;
return SegmentsUtil.copyToBytes(segments, baseOffset + subOffset, len);
} else {
int len = (int) ((variablePartOffsetAndLen & HIGHEST_SECOND_TO_EIGHTH_BIT) >>> 56);
if (SegmentsUtil.LITTLE_ENDIAN) {
return SegmentsUtil.copyToBytes(segments, fieldOffset, len);
} else {
// fieldOffset + 1 to skip header.
return SegmentsUtil.copyToBytes(segments, fieldOffset + 1, len);
}
}
} | [
"static",
"byte",
"[",
"]",
"readBinaryFieldFromSegments",
"(",
"MemorySegment",
"[",
"]",
"segments",
",",
"int",
"baseOffset",
",",
"int",
"fieldOffset",
",",
"long",
"variablePartOffsetAndLen",
")",
"{",
"long",
"mark",
"=",
"variablePartOffsetAndLen",
"&",
"HI... | Get binary, if len less than 8, will be include in variablePartOffsetAndLen.
<p>Note: Need to consider the ByteOrder.
@param baseOffset base offset of composite binary format.
@param fieldOffset absolute start offset of 'variablePartOffsetAndLen'.
@param variablePartOffsetAndLen a long value, real data or offset and len. | [
"Get",
"binary",
"if",
"len",
"less",
"than",
"8",
"will",
"be",
"include",
"in",
"variablePartOffsetAndLen",
"."
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/dataformat/BinaryFormat.java#L119-L136 | train | Read binary field from segments. | [
30522,
10763,
24880,
1031,
1033,
3191,
21114,
2854,
3790,
19699,
22225,
13910,
8163,
1006,
3638,
3366,
21693,
4765,
1031,
1033,
9214,
1010,
20014,
2918,
27475,
3388,
1010,
20014,
2492,
27475,
3388,
1010,
2146,
8023,
19362,
3406,
21807,
12928,... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/Schema.java | Schema.field | public Schema field(String fieldName, TypeInformation<?> fieldType) {
field(fieldName, TypeStringUtils.writeTypeInfo(fieldType));
return this;
} | java | public Schema field(String fieldName, TypeInformation<?> fieldType) {
field(fieldName, TypeStringUtils.writeTypeInfo(fieldType));
return this;
} | [
"public",
"Schema",
"field",
"(",
"String",
"fieldName",
",",
"TypeInformation",
"<",
"?",
">",
"fieldType",
")",
"{",
"field",
"(",
"fieldName",
",",
"TypeStringUtils",
".",
"writeTypeInfo",
"(",
"fieldType",
")",
")",
";",
"return",
"this",
";",
"}"
] | Adds a field with the field name and the type information. Required.
This method can be called multiple times. The call order of this method defines
also the order of the fields in a row.
@param fieldName the field name
@param fieldType the type information of the field | [
"Adds",
"a",
"field",
"with",
"the",
"field",
"name",
"and",
"the",
"type",
"information",
".",
"Required",
".",
"This",
"method",
"can",
"be",
"called",
"multiple",
"times",
".",
"The",
"call",
"order",
"of",
"this",
"method",
"defines",
"also",
"the",
... | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/Schema.java#L76-L79 | train | Create a new field with the given name and type information. | [
30522,
2270,
8040,
28433,
2492,
1006,
5164,
2492,
18442,
1010,
2828,
2378,
14192,
30524,
2492,
13874,
1007,
1007,
1025,
2709,
2023,
1025,
1065,
102,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/spark | sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/Operation.java | Operation.validateFetchOrientation | protected void validateFetchOrientation(FetchOrientation orientation,
EnumSet<FetchOrientation> supportedOrientations) throws HiveSQLException {
if (!supportedOrientations.contains(orientation)) {
throw new HiveSQLException("The fetch type " + orientation.toString() +
" is not supported for this resultset", "HY106");
}
} | java | protected void validateFetchOrientation(FetchOrientation orientation,
EnumSet<FetchOrientation> supportedOrientations) throws HiveSQLException {
if (!supportedOrientations.contains(orientation)) {
throw new HiveSQLException("The fetch type " + orientation.toString() +
" is not supported for this resultset", "HY106");
}
} | [
"protected",
"void",
"validateFetchOrientation",
"(",
"FetchOrientation",
"orientation",
",",
"EnumSet",
"<",
"FetchOrientation",
">",
"supportedOrientations",
")",
"throws",
"HiveSQLException",
"{",
"if",
"(",
"!",
"supportedOrientations",
".",
"contains",
"(",
"orient... | Verify if the given fetch orientation is part of the supported orientation types.
@param orientation
@param supportedOrientations
@throws HiveSQLException | [
"Verify",
"if",
"the",
"given",
"fetch",
"orientation",
"is",
"part",
"of",
"the",
"supported",
"orientation",
"types",
"."
] | 25ee0474f47d9c30d6f553a7892d9549f91071cf | https://github.com/apache/spark/blob/25ee0474f47d9c30d6f553a7892d9549f91071cf/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/Operation.java#L306-L312 | train | Validate the fetch orientation | [
30522,
5123,
11675,
9398,
3686,
7959,
10649,
10050,
19304,
1006,
18584,
10050,
19304,
10296,
1010,
4372,
18163,
3388,
1026,
18584,
10050,
19304,
1028,
3569,
10050,
19304,
2015,
1007,
11618,
26736,
2015,
4160,
2571,
2595,
24422,
1063,
2065,
10... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-formats/flink-avro/src/main/java/org/apache/flink/formats/avro/utils/DataOutputEncoder.java | DataOutputEncoder.writeFixed | @Override
public void writeFixed(byte[] bytes, int start, int len) throws IOException {
out.write(bytes, start, len);
} | java | @Override
public void writeFixed(byte[] bytes, int start, int len) throws IOException {
out.write(bytes, start, len);
} | [
"@",
"Override",
"public",
"void",
"writeFixed",
"(",
"byte",
"[",
"]",
"bytes",
",",
"int",
"start",
",",
"int",
"len",
")",
"throws",
"IOException",
"{",
"out",
".",
"write",
"(",
"bytes",
",",
"start",
",",
"len",
")",
";",
"}"
] | -------------------------------------------------------------------------------------------- | [
"--------------------------------------------------------------------------------------------"
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-formats/flink-avro/src/main/java/org/apache/flink/formats/avro/utils/DataOutputEncoder.java#L83-L86 | train | Write fixed bytes. | [
30522,
1030,
2058,
15637,
2270,
11675,
4339,
23901,
1006,
24880,
1031,
1033,
27507,
1010,
20014,
2707,
1010,
20014,
18798,
1007,
11618,
22834,
10288,
24422,
1063,
2041,
1012,
4339,
1006,
27507,
1010,
2707,
1010,
18798,
1007,
1025,
1065,
102,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Graph.java | Graph.joinWithEdges | public <T> Graph<K, VV, EV> joinWithEdges(DataSet<Tuple3<K, K, T>> inputDataSet,
final EdgeJoinFunction<EV, T> edgeJoinFunction) {
DataSet<Edge<K, EV>> resultedEdges = this.getEdges()
.coGroup(inputDataSet).where(0, 1).equalTo(0, 1)
.with(new ApplyCoGroupToEdgeValues<>(edgeJoinFunction))
.name("Join with edges");
return new Graph<>(this.vertices, resultedEdges, this.context);
} | java | public <T> Graph<K, VV, EV> joinWithEdges(DataSet<Tuple3<K, K, T>> inputDataSet,
final EdgeJoinFunction<EV, T> edgeJoinFunction) {
DataSet<Edge<K, EV>> resultedEdges = this.getEdges()
.coGroup(inputDataSet).where(0, 1).equalTo(0, 1)
.with(new ApplyCoGroupToEdgeValues<>(edgeJoinFunction))
.name("Join with edges");
return new Graph<>(this.vertices, resultedEdges, this.context);
} | [
"public",
"<",
"T",
">",
"Graph",
"<",
"K",
",",
"VV",
",",
"EV",
">",
"joinWithEdges",
"(",
"DataSet",
"<",
"Tuple3",
"<",
"K",
",",
"K",
",",
"T",
">",
">",
"inputDataSet",
",",
"final",
"EdgeJoinFunction",
"<",
"EV",
",",
"T",
">",
"edgeJoinFunc... | Joins the edge DataSet with an input DataSet on the composite key of both
source and target IDs and applies a user-defined transformation on the values
of the matched records. The first two fields of the input DataSet are used as join keys.
@param inputDataSet the DataSet to join with.
The first two fields of the Tuple3 are used as the composite join key
and the third field is passed as a parameter to the transformation function.
@param edgeJoinFunction the transformation function to apply.
The first parameter is the current edge value and the second parameter is the value
of the matched Tuple3 from the input DataSet.
@param <T> the type of the third field of the input Tuple3 DataSet.
@return a new Graph, where the edge values have been updated according to the
result of the edgeJoinFunction. | [
"Joins",
"the",
"edge",
"DataSet",
"with",
"an",
"input",
"DataSet",
"on",
"the",
"composite",
"key",
"of",
"both",
"source",
"and",
"target",
"IDs",
"and",
"applies",
"a",
"user",
"-",
"defined",
"transformation",
"on",
"the",
"values",
"of",
"the",
"matc... | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Graph.java#L728-L736 | train | Join the graph with the input data set using the edge join function. | [
30522,
2270,
1026,
1056,
1028,
10629,
1026,
1047,
1010,
1058,
2615,
1010,
23408,
1028,
3693,
24415,
24225,
2015,
1006,
2951,
13462,
1026,
10722,
10814,
2509,
1026,
1047,
1010,
1047,
1010,
1056,
1028,
1028,
7953,
2850,
18260,
2102,
1010,
234... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-connectors/flink-connector-kinesis/src/main/java/org/apache/flink/streaming/connectors/kinesis/internals/DynamoDBStreamsDataFetcher.java | DynamoDBStreamsDataFetcher.createShardConsumer | @Override
protected ShardConsumer createShardConsumer(
Integer subscribedShardStateIndex,
StreamShardHandle handle,
SequenceNumber lastSeqNum,
ShardMetricsReporter shardMetricsReporter) {
return new ShardConsumer(
this,
subscribedShardStateIndex,
handle,
lastSeqNum,
DynamoDBStreamsProxy.create(getConsumerConfiguration()),
shardMetricsReporter);
} | java | @Override
protected ShardConsumer createShardConsumer(
Integer subscribedShardStateIndex,
StreamShardHandle handle,
SequenceNumber lastSeqNum,
ShardMetricsReporter shardMetricsReporter) {
return new ShardConsumer(
this,
subscribedShardStateIndex,
handle,
lastSeqNum,
DynamoDBStreamsProxy.create(getConsumerConfiguration()),
shardMetricsReporter);
} | [
"@",
"Override",
"protected",
"ShardConsumer",
"createShardConsumer",
"(",
"Integer",
"subscribedShardStateIndex",
",",
"StreamShardHandle",
"handle",
",",
"SequenceNumber",
"lastSeqNum",
",",
"ShardMetricsReporter",
"shardMetricsReporter",
")",
"{",
"return",
"new",
"Shard... | Create a new DynamoDB streams shard consumer.
@param subscribedShardStateIndex the state index of the shard this consumer is subscribed to
@param handle stream handle
@param lastSeqNum last sequence number
@param shardMetricsReporter the reporter to report metrics to
@return | [
"Create",
"a",
"new",
"DynamoDB",
"streams",
"shard",
"consumer",
"."
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-connectors/flink-connector-kinesis/src/main/java/org/apache/flink/streaming/connectors/kinesis/internals/DynamoDBStreamsDataFetcher.java#L94-L108 | train | Override to create a new instance of the ShardConsumer class. | [
30522,
1030,
2058,
15637,
5123,
21146,
4103,
8663,
23545,
2099,
9005,
11783,
8663,
23545,
2099,
1006,
16109,
4942,
29234,
5104,
11783,
9153,
9589,
3207,
2595,
1010,
9199,
11783,
11774,
2571,
5047,
1010,
5537,
19172,
5677,
16180,
2063,
4160,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-core/src/main/java/cn/hutool/core/collection/CollUtil.java | CollUtil.newArrayList | public static <T> ArrayList<T> newArrayList(Collection<T> collection) {
return (ArrayList<T>) list(false, collection);
} | java | public static <T> ArrayList<T> newArrayList(Collection<T> collection) {
return (ArrayList<T>) list(false, collection);
} | [
"public",
"static",
"<",
"T",
">",
"ArrayList",
"<",
"T",
">",
"newArrayList",
"(",
"Collection",
"<",
"T",
">",
"collection",
")",
"{",
"return",
"(",
"ArrayList",
"<",
"T",
">",
")",
"list",
"(",
"false",
",",
"collection",
")",
";",
"}"
] | 新建一个ArrayList
@param <T> 集合元素类型
@param collection 集合
@return ArrayList对象 | [
"新建一个ArrayList"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/collection/CollUtil.java#L642-L644 | train | Creates an ArrayList of the given collection. | [
30522,
2270,
10763,
1026,
1056,
1028,
9140,
9863,
1026,
1056,
1028,
2047,
2906,
9447,
9863,
1006,
3074,
1026,
1056,
1028,
3074,
1007,
1063,
2709,
1006,
9140,
9863,
1026,
1056,
1028,
1007,
2862,
1006,
6270,
1010,
3074,
1007,
1025,
1065,
10... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
spring-projects/spring-boot | spring-boot-project/spring-boot-tools/spring-boot-gradle-plugin/src/main/java/org/springframework/boot/gradle/tasks/run/BootRun.java | BootRun.sourceResources | public void sourceResources(SourceSet sourceSet) {
setClasspath(getProject()
.files(sourceSet.getResources().getSrcDirs(), getClasspath())
.filter((file) -> !file.equals(sourceSet.getOutput().getResourcesDir())));
} | java | public void sourceResources(SourceSet sourceSet) {
setClasspath(getProject()
.files(sourceSet.getResources().getSrcDirs(), getClasspath())
.filter((file) -> !file.equals(sourceSet.getOutput().getResourcesDir())));
} | [
"public",
"void",
"sourceResources",
"(",
"SourceSet",
"sourceSet",
")",
"{",
"setClasspath",
"(",
"getProject",
"(",
")",
".",
"files",
"(",
"sourceSet",
".",
"getResources",
"(",
")",
".",
"getSrcDirs",
"(",
")",
",",
"getClasspath",
"(",
")",
")",
".",
... | Adds the {@link SourceDirectorySet#getSrcDirs() source directories} of the given
{@code sourceSet's} {@link SourceSet#getResources() resources} to the start of the
classpath in place of the {@link SourceSet#getOutput output's}
{@link SourceSetOutput#getResourcesDir() resources directory}.
@param sourceSet the source set | [
"Adds",
"the",
"{"
] | 0b27f7c70e164b2b1a96477f1d9c1acba56790c1 | https://github.com/spring-projects/spring-boot/blob/0b27f7c70e164b2b1a96477f1d9c1acba56790c1/spring-boot-project/spring-boot-tools/spring-boot-gradle-plugin/src/main/java/org/springframework/boot/gradle/tasks/run/BootRun.java#L39-L43 | train | Source resources. | [
30522,
2270,
11675,
3120,
6072,
8162,
9623,
1006,
4216,
3388,
4216,
3388,
1007,
1063,
2275,
26266,
15069,
1006,
2131,
21572,
20614,
1006,
1007,
1012,
6764,
1006,
4216,
3388,
1012,
2131,
6072,
8162,
9623,
1006,
1007,
1012,
4152,
11890,
4305,... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/spark | launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java | CommandBuilderUtils.parseOptionString | static List<String> parseOptionString(String s) {
List<String> opts = new ArrayList<>();
StringBuilder opt = new StringBuilder();
boolean inOpt = false;
boolean inSingleQuote = false;
boolean inDoubleQuote = false;
boolean escapeNext = false;
// This is needed to detect when a quoted empty string is used as an argument ("" or '').
boolean hasData = false;
for (int i = 0; i < s.length(); i++) {
int c = s.codePointAt(i);
if (escapeNext) {
opt.appendCodePoint(c);
escapeNext = false;
} else if (inOpt) {
switch (c) {
case '\\':
if (inSingleQuote) {
opt.appendCodePoint(c);
} else {
escapeNext = true;
}
break;
case '\'':
if (inDoubleQuote) {
opt.appendCodePoint(c);
} else {
inSingleQuote = !inSingleQuote;
}
break;
case '"':
if (inSingleQuote) {
opt.appendCodePoint(c);
} else {
inDoubleQuote = !inDoubleQuote;
}
break;
default:
if (!Character.isWhitespace(c) || inSingleQuote || inDoubleQuote) {
opt.appendCodePoint(c);
} else {
opts.add(opt.toString());
opt.setLength(0);
inOpt = false;
hasData = false;
}
}
} else {
switch (c) {
case '\'':
inSingleQuote = true;
inOpt = true;
hasData = true;
break;
case '"':
inDoubleQuote = true;
inOpt = true;
hasData = true;
break;
case '\\':
escapeNext = true;
inOpt = true;
hasData = true;
break;
default:
if (!Character.isWhitespace(c)) {
inOpt = true;
hasData = true;
opt.appendCodePoint(c);
}
}
}
}
checkArgument(!inSingleQuote && !inDoubleQuote && !escapeNext, "Invalid option string: %s", s);
if (hasData) {
opts.add(opt.toString());
}
return opts;
} | java | static List<String> parseOptionString(String s) {
List<String> opts = new ArrayList<>();
StringBuilder opt = new StringBuilder();
boolean inOpt = false;
boolean inSingleQuote = false;
boolean inDoubleQuote = false;
boolean escapeNext = false;
// This is needed to detect when a quoted empty string is used as an argument ("" or '').
boolean hasData = false;
for (int i = 0; i < s.length(); i++) {
int c = s.codePointAt(i);
if (escapeNext) {
opt.appendCodePoint(c);
escapeNext = false;
} else if (inOpt) {
switch (c) {
case '\\':
if (inSingleQuote) {
opt.appendCodePoint(c);
} else {
escapeNext = true;
}
break;
case '\'':
if (inDoubleQuote) {
opt.appendCodePoint(c);
} else {
inSingleQuote = !inSingleQuote;
}
break;
case '"':
if (inSingleQuote) {
opt.appendCodePoint(c);
} else {
inDoubleQuote = !inDoubleQuote;
}
break;
default:
if (!Character.isWhitespace(c) || inSingleQuote || inDoubleQuote) {
opt.appendCodePoint(c);
} else {
opts.add(opt.toString());
opt.setLength(0);
inOpt = false;
hasData = false;
}
}
} else {
switch (c) {
case '\'':
inSingleQuote = true;
inOpt = true;
hasData = true;
break;
case '"':
inDoubleQuote = true;
inOpt = true;
hasData = true;
break;
case '\\':
escapeNext = true;
inOpt = true;
hasData = true;
break;
default:
if (!Character.isWhitespace(c)) {
inOpt = true;
hasData = true;
opt.appendCodePoint(c);
}
}
}
}
checkArgument(!inSingleQuote && !inDoubleQuote && !escapeNext, "Invalid option string: %s", s);
if (hasData) {
opts.add(opt.toString());
}
return opts;
} | [
"static",
"List",
"<",
"String",
">",
"parseOptionString",
"(",
"String",
"s",
")",
"{",
"List",
"<",
"String",
">",
"opts",
"=",
"new",
"ArrayList",
"<>",
"(",
")",
";",
"StringBuilder",
"opt",
"=",
"new",
"StringBuilder",
"(",
")",
";",
"boolean",
"i... | Parse a string as if it were a list of arguments, following bash semantics.
For example:
Input: "\"ab cd\" efgh 'i \" j'"
Output: [ "ab cd", "efgh", "i \" j" ] | [
"Parse",
"a",
"string",
"as",
"if",
"it",
"were",
"a",
"list",
"of",
"arguments",
"following",
"bash",
"semantics",
".",
"For",
"example",
":"
] | 25ee0474f47d9c30d6f553a7892d9549f91071cf | https://github.com/apache/spark/blob/25ee0474f47d9c30d6f553a7892d9549f91071cf/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java#L128-L209 | train | Parse a string containing a list of options. | [
30522,
10763,
2862,
1026,
5164,
1028,
11968,
3366,
7361,
9285,
18886,
3070,
1006,
5164,
1055,
1007,
1063,
2862,
1026,
5164,
1028,
23569,
2015,
1027,
2047,
9140,
9863,
1026,
1028,
1006,
1007,
1025,
5164,
8569,
23891,
2099,
23569,
1027,
2047,... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
netty/netty | codec-dns/src/main/java/io/netty/handler/codec/dns/DatagramDnsQueryEncoder.java | DatagramDnsQueryEncoder.encodeHeader | private static void encodeHeader(DnsQuery query, ByteBuf buf) {
buf.writeShort(query.id());
int flags = 0;
flags |= (query.opCode().byteValue() & 0xFF) << 14;
if (query.isRecursionDesired()) {
flags |= 1 << 8;
}
buf.writeShort(flags);
buf.writeShort(query.count(DnsSection.QUESTION));
buf.writeShort(0); // answerCount
buf.writeShort(0); // authorityResourceCount
buf.writeShort(query.count(DnsSection.ADDITIONAL));
} | java | private static void encodeHeader(DnsQuery query, ByteBuf buf) {
buf.writeShort(query.id());
int flags = 0;
flags |= (query.opCode().byteValue() & 0xFF) << 14;
if (query.isRecursionDesired()) {
flags |= 1 << 8;
}
buf.writeShort(flags);
buf.writeShort(query.count(DnsSection.QUESTION));
buf.writeShort(0); // answerCount
buf.writeShort(0); // authorityResourceCount
buf.writeShort(query.count(DnsSection.ADDITIONAL));
} | [
"private",
"static",
"void",
"encodeHeader",
"(",
"DnsQuery",
"query",
",",
"ByteBuf",
"buf",
")",
"{",
"buf",
".",
"writeShort",
"(",
"query",
".",
"id",
"(",
")",
")",
";",
"int",
"flags",
"=",
"0",
";",
"flags",
"|=",
"(",
"query",
".",
"opCode",
... | Encodes the header that is always 12 bytes long.
@param query the query header being encoded
@param buf the buffer the encoded data should be written to | [
"Encodes",
"the",
"header",
"that",
"is",
"always",
"12",
"bytes",
"long",
"."
] | ba06eafa1c1824bd154f1a380019e7ea2edf3c4c | https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/codec-dns/src/main/java/io/netty/handler/codec/dns/DatagramDnsQueryEncoder.java#L95-L107 | train | Encode the header of a DNS query. | [
30522,
2797,
10763,
11675,
4372,
16044,
4974,
2121,
1006,
1040,
3619,
4226,
2854,
23032,
1010,
24880,
8569,
2546,
20934,
2546,
1007,
1063,
20934,
2546,
1012,
7009,
27794,
1006,
23032,
1012,
8909,
1006,
1007,
1007,
1025,
20014,
9245,
1027,
1... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-connectors/flink-connector-kinesis/src/main/java/org/apache/flink/streaming/connectors/kinesis/internals/ShardConsumer.java | ShardConsumer.getShardIterator | protected String getShardIterator(SequenceNumber sequenceNumber) throws Exception {
if (isSentinelSequenceNumber(sequenceNumber)) {
return getShardIteratorForSentinel(sequenceNumber);
} else {
// we will be starting from an actual sequence number (due to restore from failure).
return getShardIteratorForRealSequenceNumber(sequenceNumber);
}
} | java | protected String getShardIterator(SequenceNumber sequenceNumber) throws Exception {
if (isSentinelSequenceNumber(sequenceNumber)) {
return getShardIteratorForSentinel(sequenceNumber);
} else {
// we will be starting from an actual sequence number (due to restore from failure).
return getShardIteratorForRealSequenceNumber(sequenceNumber);
}
} | [
"protected",
"String",
"getShardIterator",
"(",
"SequenceNumber",
"sequenceNumber",
")",
"throws",
"Exception",
"{",
"if",
"(",
"isSentinelSequenceNumber",
"(",
"sequenceNumber",
")",
")",
"{",
"return",
"getShardIteratorForSentinel",
"(",
"sequenceNumber",
")",
";",
... | Returns a shard iterator for the given {@link SequenceNumber}.
@return shard iterator
@throws Exception | [
"Returns",
"a",
"shard",
"iterator",
"for",
"the",
"given",
"{",
"@link",
"SequenceNumber",
"}",
"."
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-connectors/flink-connector-kinesis/src/main/java/org/apache/flink/streaming/connectors/kinesis/internals/ShardConsumer.java#L148-L156 | train | Get the shard iterator for the given sequence number. | [
30522,
5123,
5164,
4152,
11783,
21646,
8844,
1006,
5537,
19172,
5677,
5537,
19172,
5677,
1007,
11618,
6453,
1063,
2065,
1006,
26354,
4765,
3170,
4877,
2063,
4226,
5897,
19172,
5677,
1006,
5537,
19172,
5677,
1007,
1007,
1063,
2709,
4152,
117... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/runtime/rank/AbstractTopNFunction.java | AbstractTopNFunction.checkSortKeyInBufferRange | protected boolean checkSortKeyInBufferRange(BaseRow sortKey, TopNBuffer buffer) {
Comparator<BaseRow> comparator = buffer.getSortKeyComparator();
Map.Entry<BaseRow, Collection<BaseRow>> worstEntry = buffer.lastEntry();
if (worstEntry == null) {
// return true if the buffer is empty.
return true;
} else {
BaseRow worstKey = worstEntry.getKey();
int compare = comparator.compare(sortKey, worstKey);
if (compare < 0) {
return true;
} else {
return buffer.getCurrentTopNum() < getDefaultTopNSize();
}
}
} | java | protected boolean checkSortKeyInBufferRange(BaseRow sortKey, TopNBuffer buffer) {
Comparator<BaseRow> comparator = buffer.getSortKeyComparator();
Map.Entry<BaseRow, Collection<BaseRow>> worstEntry = buffer.lastEntry();
if (worstEntry == null) {
// return true if the buffer is empty.
return true;
} else {
BaseRow worstKey = worstEntry.getKey();
int compare = comparator.compare(sortKey, worstKey);
if (compare < 0) {
return true;
} else {
return buffer.getCurrentTopNum() < getDefaultTopNSize();
}
}
} | [
"protected",
"boolean",
"checkSortKeyInBufferRange",
"(",
"BaseRow",
"sortKey",
",",
"TopNBuffer",
"buffer",
")",
"{",
"Comparator",
"<",
"BaseRow",
">",
"comparator",
"=",
"buffer",
".",
"getSortKeyComparator",
"(",
")",
";",
"Map",
".",
"Entry",
"<",
"BaseRow"... | Checks whether the record should be put into the buffer.
@param sortKey sortKey to test
@param buffer buffer to add
@return true if the record should be put into the buffer. | [
"Checks",
"whether",
"the",
"record",
"should",
"be",
"put",
"into",
"the",
"buffer",
"."
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/runtime/rank/AbstractTopNFunction.java#L217-L232 | train | Check if the sort key is in the buffer range. | [
30522,
5123,
22017,
20898,
14148,
11589,
14839,
2378,
8569,
12494,
24388,
2063,
1006,
2918,
10524,
4066,
14839,
1010,
2327,
27698,
16093,
7512,
17698,
1007,
1063,
4012,
28689,
4263,
1026,
2918,
10524,
1028,
4012,
28689,
4263,
1027,
17698,
101... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
spring-projects/spring-boot | spring-boot-samples/spring-boot-sample-jooq/gensrc/main/java/sample/jooq/domain/BookToBookStore.java | BookToBookStore.getReferences | @Override
public List<ForeignKey<Record, ?>> getReferences() {
return Arrays.<ForeignKey<Record, ?>>asList(Keys.FK_B2BS_BOOK_STORE,
Keys.FK_B2BS_BOOK);
} | java | @Override
public List<ForeignKey<Record, ?>> getReferences() {
return Arrays.<ForeignKey<Record, ?>>asList(Keys.FK_B2BS_BOOK_STORE,
Keys.FK_B2BS_BOOK);
} | [
"@",
"Override",
"public",
"List",
"<",
"ForeignKey",
"<",
"Record",
",",
"?",
">",
">",
"getReferences",
"(",
")",
"{",
"return",
"Arrays",
".",
"<",
"ForeignKey",
"<",
"Record",
",",
"?",
">",
">",
"asList",
"(",
"Keys",
".",
"FK_B2BS_BOOK_STORE",
",... | {@inheritDoc} | [
"{"
] | 0b27f7c70e164b2b1a96477f1d9c1acba56790c1 | https://github.com/spring-projects/spring-boot/blob/0b27f7c70e164b2b1a96477f1d9c1acba56790c1/spring-boot-samples/spring-boot-sample-jooq/gensrc/main/java/sample/jooq/domain/BookToBookStore.java#L110-L114 | train | Returns the references of the B2BS book. | [
30522,
1030,
2058,
15637,
2270,
2862,
1026,
3097,
14839,
1026,
2501,
1010,
1029,
1028,
1028,
2131,
2890,
25523,
2015,
1006,
1007,
1063,
2709,
27448,
1012,
1026,
3097,
14839,
1026,
2501,
1010,
1029,
1028,
1028,
2004,
9863,
1006,
6309,
1012,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
hankcs/HanLP | src/main/java/com/hankcs/hanlp/mining/cluster/Cluster.java | Cluster.remove_document | void remove_document(int index)
{
ListIterator<Document<K>> listIterator = documents_.listIterator(index);
Document<K> document = listIterator.next();
listIterator.set(null);
composite_.sub_vector(document.feature());
} | java | void remove_document(int index)
{
ListIterator<Document<K>> listIterator = documents_.listIterator(index);
Document<K> document = listIterator.next();
listIterator.set(null);
composite_.sub_vector(document.feature());
} | [
"void",
"remove_document",
"(",
"int",
"index",
")",
"{",
"ListIterator",
"<",
"Document",
"<",
"K",
">>",
"listIterator",
"=",
"documents_",
".",
"listIterator",
"(",
"index",
")",
";",
"Document",
"<",
"K",
">",
"document",
"=",
"listIterator",
".",
"nex... | Remove a document from this cluster.
@param index the index of vector container of documents | [
"Remove",
"a",
"document",
"from",
"this",
"cluster",
"."
] | a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce | https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/mining/cluster/Cluster.java#L127-L133 | train | Removes a document from the composite. | [
30522,
11675,
6366,
1035,
6254,
1006,
20014,
5950,
1007,
1063,
2862,
21646,
8844,
1026,
6254,
1026,
1047,
1028,
1028,
2862,
21646,
8844,
1027,
5491,
1035,
1012,
2862,
21646,
8844,
1006,
5950,
1007,
1025,
6254,
1026,
1047,
1028,
6254,
1027,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
hankcs/HanLP | src/main/java/com/hankcs/hanlp/corpus/io/ByteArray.java | ByteArray.createByteArray | public static ByteArray createByteArray(String path)
{
byte[] bytes = IOUtil.readBytes(path);
if (bytes == null) return null;
return new ByteArray(bytes);
} | java | public static ByteArray createByteArray(String path)
{
byte[] bytes = IOUtil.readBytes(path);
if (bytes == null) return null;
return new ByteArray(bytes);
} | [
"public",
"static",
"ByteArray",
"createByteArray",
"(",
"String",
"path",
")",
"{",
"byte",
"[",
"]",
"bytes",
"=",
"IOUtil",
".",
"readBytes",
"(",
"path",
")",
";",
"if",
"(",
"bytes",
"==",
"null",
")",
"return",
"null",
";",
"return",
"new",
"Byte... | 从文件读取一个字节数组
@param path
@return | [
"从文件读取一个字节数组"
] | a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce | https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/corpus/io/ByteArray.java#L45-L50 | train | Create ByteArray from a file in the base directory. | [
30522,
2270,
10763,
24880,
2906,
9447,
3443,
3762,
27058,
11335,
2100,
1006,
5164,
4130,
1007,
1063,
24880,
1031,
1033,
27507,
1027,
22834,
21823,
2140,
1012,
3191,
3762,
4570,
1006,
4130,
1007,
1025,
2065,
1006,
27507,
1027,
1027,
19701,
1... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
netty/netty | codec-http2/src/main/java/io/netty/handler/codec/http2/HpackDynamicTable.java | HpackDynamicTable.setCapacity | public void setCapacity(long capacity) {
if (capacity < MIN_HEADER_TABLE_SIZE || capacity > MAX_HEADER_TABLE_SIZE) {
throw new IllegalArgumentException("capacity is invalid: " + capacity);
}
// initially capacity will be -1 so init won't return here
if (this.capacity == capacity) {
return;
}
this.capacity = capacity;
if (capacity == 0) {
clear();
} else {
// initially size will be 0 so remove won't be called
while (size > capacity) {
remove();
}
}
int maxEntries = (int) (capacity / HpackHeaderField.HEADER_ENTRY_OVERHEAD);
if (capacity % HpackHeaderField.HEADER_ENTRY_OVERHEAD != 0) {
maxEntries++;
}
// check if capacity change requires us to reallocate the array
if (hpackHeaderFields != null && hpackHeaderFields.length == maxEntries) {
return;
}
HpackHeaderField[] tmp = new HpackHeaderField[maxEntries];
// initially length will be 0 so there will be no copy
int len = length();
int cursor = tail;
for (int i = 0; i < len; i++) {
HpackHeaderField entry = hpackHeaderFields[cursor++];
tmp[i] = entry;
if (cursor == hpackHeaderFields.length) {
cursor = 0;
}
}
tail = 0;
head = tail + len;
hpackHeaderFields = tmp;
} | java | public void setCapacity(long capacity) {
if (capacity < MIN_HEADER_TABLE_SIZE || capacity > MAX_HEADER_TABLE_SIZE) {
throw new IllegalArgumentException("capacity is invalid: " + capacity);
}
// initially capacity will be -1 so init won't return here
if (this.capacity == capacity) {
return;
}
this.capacity = capacity;
if (capacity == 0) {
clear();
} else {
// initially size will be 0 so remove won't be called
while (size > capacity) {
remove();
}
}
int maxEntries = (int) (capacity / HpackHeaderField.HEADER_ENTRY_OVERHEAD);
if (capacity % HpackHeaderField.HEADER_ENTRY_OVERHEAD != 0) {
maxEntries++;
}
// check if capacity change requires us to reallocate the array
if (hpackHeaderFields != null && hpackHeaderFields.length == maxEntries) {
return;
}
HpackHeaderField[] tmp = new HpackHeaderField[maxEntries];
// initially length will be 0 so there will be no copy
int len = length();
int cursor = tail;
for (int i = 0; i < len; i++) {
HpackHeaderField entry = hpackHeaderFields[cursor++];
tmp[i] = entry;
if (cursor == hpackHeaderFields.length) {
cursor = 0;
}
}
tail = 0;
head = tail + len;
hpackHeaderFields = tmp;
} | [
"public",
"void",
"setCapacity",
"(",
"long",
"capacity",
")",
"{",
"if",
"(",
"capacity",
"<",
"MIN_HEADER_TABLE_SIZE",
"||",
"capacity",
">",
"MAX_HEADER_TABLE_SIZE",
")",
"{",
"throw",
"new",
"IllegalArgumentException",
"(",
"\"capacity is invalid: \"",
"+",
"cap... | Set the maximum size of the dynamic table. Entries are evicted from the dynamic table until
the size of the table is less than or equal to the maximum size. | [
"Set",
"the",
"maximum",
"size",
"of",
"the",
"dynamic",
"table",
".",
"Entries",
"are",
"evicted",
"from",
"the",
"dynamic",
"table",
"until",
"the",
"size",
"of",
"the",
"table",
"is",
"less",
"than",
"or",
"equal",
"to",
"the",
"maximum",
"size",
"."
... | ba06eafa1c1824bd154f1a380019e7ea2edf3c4c | https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/codec-http2/src/main/java/io/netty/handler/codec/http2/HpackDynamicTable.java#L153-L198 | train | Sets the capacity of the HpackHeaderTable. | [
30522,
2270,
11675,
2275,
17695,
6305,
3012,
1006,
2146,
3977,
1007,
1063,
2065,
1006,
3977,
1026,
8117,
1035,
20346,
1035,
2795,
1035,
2946,
1064,
1064,
3977,
1028,
4098,
1035,
20346,
1035,
2795,
1035,
2946,
1007,
1063,
5466,
2047,
6206,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-core/src/main/java/cn/hutool/core/date/DateUtil.java | DateUtil.calendar | public static Calendar calendar(Date date) {
if(date instanceof DateTime) {
return ((DateTime)date).toCalendar();
}else {
return calendar(date.getTime());
}
} | java | public static Calendar calendar(Date date) {
if(date instanceof DateTime) {
return ((DateTime)date).toCalendar();
}else {
return calendar(date.getTime());
}
} | [
"public",
"static",
"Calendar",
"calendar",
"(",
"Date",
"date",
")",
"{",
"if",
"(",
"date",
"instanceof",
"DateTime",
")",
"{",
"return",
"(",
"(",
"DateTime",
")",
"date",
")",
".",
"toCalendar",
"(",
")",
";",
"}",
"else",
"{",
"return",
"calendar"... | 转换为Calendar对象
@param date 日期对象
@return Calendar对象 | [
"转换为Calendar对象"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/date/DateUtil.java#L90-L96 | train | Returns a calendar object from a java. util. Date object. | [
30522,
2270,
10763,
8094,
8094,
1006,
3058,
3058,
1007,
1063,
2065,
1006,
3058,
6013,
11253,
3058,
7292,
1007,
1063,
2709,
1006,
1006,
3058,
7292,
1007,
3058,
1007,
1012,
2000,
9289,
10497,
2906,
1006,
1007,
1025,
1065,
2842,
1063,
2709,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
netty/netty | codec-http/src/main/java/io/netty/handler/codec/http/multipart/HttpPostRequestEncoder.java | HttpPostRequestEncoder.nextChunk | private HttpContent nextChunk() throws ErrorDataEncoderException {
if (isLastChunk) {
isLastChunkSent = true;
return LastHttpContent.EMPTY_LAST_CONTENT;
}
// first test if previous buffer is not empty
int size = calculateRemainingSize();
if (size <= 0) {
// NextChunk from buffer
ByteBuf buffer = fillByteBuf();
return new DefaultHttpContent(buffer);
}
// size > 0
if (currentData != null) {
// continue to read data
HttpContent chunk;
if (isMultipart) {
chunk = encodeNextChunkMultipart(size);
} else {
chunk = encodeNextChunkUrlEncoded(size);
}
if (chunk != null) {
// NextChunk from data
return chunk;
}
size = calculateRemainingSize();
}
if (!iterator.hasNext()) {
return lastChunk();
}
while (size > 0 && iterator.hasNext()) {
currentData = iterator.next();
HttpContent chunk;
if (isMultipart) {
chunk = encodeNextChunkMultipart(size);
} else {
chunk = encodeNextChunkUrlEncoded(size);
}
if (chunk == null) {
// not enough
size = calculateRemainingSize();
continue;
}
// NextChunk from data
return chunk;
}
// end since no more data
return lastChunk();
} | java | private HttpContent nextChunk() throws ErrorDataEncoderException {
if (isLastChunk) {
isLastChunkSent = true;
return LastHttpContent.EMPTY_LAST_CONTENT;
}
// first test if previous buffer is not empty
int size = calculateRemainingSize();
if (size <= 0) {
// NextChunk from buffer
ByteBuf buffer = fillByteBuf();
return new DefaultHttpContent(buffer);
}
// size > 0
if (currentData != null) {
// continue to read data
HttpContent chunk;
if (isMultipart) {
chunk = encodeNextChunkMultipart(size);
} else {
chunk = encodeNextChunkUrlEncoded(size);
}
if (chunk != null) {
// NextChunk from data
return chunk;
}
size = calculateRemainingSize();
}
if (!iterator.hasNext()) {
return lastChunk();
}
while (size > 0 && iterator.hasNext()) {
currentData = iterator.next();
HttpContent chunk;
if (isMultipart) {
chunk = encodeNextChunkMultipart(size);
} else {
chunk = encodeNextChunkUrlEncoded(size);
}
if (chunk == null) {
// not enough
size = calculateRemainingSize();
continue;
}
// NextChunk from data
return chunk;
}
// end since no more data
return lastChunk();
} | [
"private",
"HttpContent",
"nextChunk",
"(",
")",
"throws",
"ErrorDataEncoderException",
"{",
"if",
"(",
"isLastChunk",
")",
"{",
"isLastChunkSent",
"=",
"true",
";",
"return",
"LastHttpContent",
".",
"EMPTY_LAST_CONTENT",
";",
"}",
"// first test if previous buffer is n... | Returns the next available HttpChunk. The caller is responsible to test if this chunk is the last one (isLast()),
in order to stop calling this getMethod.
@return the next available HttpChunk
@throws ErrorDataEncoderException
if the encoding is in error | [
"Returns",
"the",
"next",
"available",
"HttpChunk",
".",
"The",
"caller",
"is",
"responsible",
"to",
"test",
"if",
"this",
"chunk",
"is",
"the",
"last",
"one",
"(",
"isLast",
"()",
")",
"in",
"order",
"to",
"stop",
"calling",
"this",
"getMethod",
"."
] | ba06eafa1c1824bd154f1a380019e7ea2edf3c4c | https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/codec-http/src/main/java/io/netty/handler/codec/http/multipart/HttpPostRequestEncoder.java#L1058-L1106 | train | Get the next chunk from the underlying iterator. | [
30522,
2797,
8299,
8663,
6528,
2102,
2279,
20760,
8950,
1006,
1007,
11618,
7561,
2850,
2696,
2368,
16044,
2890,
2595,
24422,
1063,
2065,
1006,
25340,
3367,
20760,
8950,
1007,
1063,
25340,
3367,
20760,
8950,
5054,
2102,
1027,
2995,
1025,
270... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-core/src/main/java/cn/hutool/core/io/IoUtil.java | IoUtil.read | public static String read(InputStream in, String charsetName) throws IORuntimeException {
FastByteArrayOutputStream out = read(in);
return StrUtil.isBlank(charsetName) ? out.toString() : out.toString(charsetName);
} | java | public static String read(InputStream in, String charsetName) throws IORuntimeException {
FastByteArrayOutputStream out = read(in);
return StrUtil.isBlank(charsetName) ? out.toString() : out.toString(charsetName);
} | [
"public",
"static",
"String",
"read",
"(",
"InputStream",
"in",
",",
"String",
"charsetName",
")",
"throws",
"IORuntimeException",
"{",
"FastByteArrayOutputStream",
"out",
"=",
"read",
"(",
"in",
")",
";",
"return",
"StrUtil",
".",
"isBlank",
"(",
"charsetName",... | 从流中读取内容
@param in 输入流
@param charsetName 字符集
@return 内容
@throws IORuntimeException IO异常 | [
"从流中读取内容"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/io/IoUtil.java#L396-L399 | train | Reads a sequence of bytes from the specified input stream and returns the resulting string. | [
30522,
2270,
10763,
5164,
3191,
1006,
20407,
25379,
1999,
1010,
5164,
25869,
13462,
18442,
1007,
11618,
22834,
15532,
7292,
10288,
24422,
1063,
3435,
3762,
27058,
11335,
29337,
25856,
16446,
25379,
2041,
1027,
3191,
1006,
1999,
1007,
1025,
27... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-poi/src/main/java/cn/hutool/poi/excel/ExcelWriter.java | ExcelWriter.setHeaderOrFooter | public ExcelWriter setHeaderOrFooter(String text, Align align, boolean isFooter) {
final HeaderFooter headerFooter = isFooter ? this.sheet.getFooter() : this.sheet.getHeader();
switch (align) {
case LEFT:
headerFooter.setLeft(text);
break;
case RIGHT:
headerFooter.setRight(text);
break;
case CENTER:
headerFooter.setCenter(text);
break;
default:
break;
}
return this;
} | java | public ExcelWriter setHeaderOrFooter(String text, Align align, boolean isFooter) {
final HeaderFooter headerFooter = isFooter ? this.sheet.getFooter() : this.sheet.getHeader();
switch (align) {
case LEFT:
headerFooter.setLeft(text);
break;
case RIGHT:
headerFooter.setRight(text);
break;
case CENTER:
headerFooter.setCenter(text);
break;
default:
break;
}
return this;
} | [
"public",
"ExcelWriter",
"setHeaderOrFooter",
"(",
"String",
"text",
",",
"Align",
"align",
",",
"boolean",
"isFooter",
")",
"{",
"final",
"HeaderFooter",
"headerFooter",
"=",
"isFooter",
"?",
"this",
".",
"sheet",
".",
"getFooter",
"(",
")",
":",
"this",
".... | 设置Excel页眉或页脚
@param text 页脚的文本
@param align 对齐方式枚举 {@link Align}
@param isFooter 是否为页脚,false表示页眉,true表示页脚
@return this
@since 4.1.0 | [
"设置Excel页眉或页脚"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-poi/src/main/java/cn/hutool/poi/excel/ExcelWriter.java#L465-L481 | train | Sets the header or footer of the Excel file. | [
30522,
2270,
24970,
15994,
6662,
13775,
10624,
12881,
17206,
2121,
1006,
5164,
3793,
1010,
25705,
25705,
1010,
22017,
20898,
2003,
13064,
2121,
1007,
1063,
2345,
20346,
13064,
2121,
20346,
13064,
2121,
1027,
2003,
30524,
1006,
1007,
1025,
694... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
hankcs/HanLP | src/main/java/com/hankcs/hanlp/corpus/occurrence/PairFrequency.java | PairFrequency.create | public static PairFrequency create(String first, char delimiter ,String second)
{
PairFrequency pairFrequency = new PairFrequency(first + delimiter + second);
pairFrequency.first = first;
pairFrequency.delimiter = delimiter;
pairFrequency.second = second;
return pairFrequency;
} | java | public static PairFrequency create(String first, char delimiter ,String second)
{
PairFrequency pairFrequency = new PairFrequency(first + delimiter + second);
pairFrequency.first = first;
pairFrequency.delimiter = delimiter;
pairFrequency.second = second;
return pairFrequency;
} | [
"public",
"static",
"PairFrequency",
"create",
"(",
"String",
"first",
",",
"char",
"delimiter",
",",
"String",
"second",
")",
"{",
"PairFrequency",
"pairFrequency",
"=",
"new",
"PairFrequency",
"(",
"first",
"+",
"delimiter",
"+",
"second",
")",
";",
"pairFre... | 构造一个pf
@param first
@param delimiter
@param second
@return | [
"构造一个pf"
] | a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce | https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/corpus/occurrence/PairFrequency.java#L57-L64 | train | Create PairFrequency object. | [
30522,
2270,
10763,
3940,
19699,
2063,
4226,
9407,
3443,
1006,
5164,
2034,
1010,
25869,
3972,
27605,
3334,
1010,
5164,
2117,
1007,
1063,
3940,
19699,
2063,
4226,
9407,
3940,
19699,
2063,
4226,
9407,
1027,
2047,
3940,
19699,
2063,
4226,
9407... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-connectors/flink-connector-kinesis/src/main/java/org/apache/flink/streaming/connectors/kinesis/proxy/KinesisProxy.java | KinesisProxy.getShardList | @Override
public GetShardListResult getShardList(Map<String, String> streamNamesWithLastSeenShardIds) throws InterruptedException {
GetShardListResult result = new GetShardListResult();
for (Map.Entry<String, String> streamNameWithLastSeenShardId : streamNamesWithLastSeenShardIds.entrySet()) {
String stream = streamNameWithLastSeenShardId.getKey();
String lastSeenShardId = streamNameWithLastSeenShardId.getValue();
result.addRetrievedShardsToStream(stream, getShardsOfStream(stream, lastSeenShardId));
}
return result;
} | java | @Override
public GetShardListResult getShardList(Map<String, String> streamNamesWithLastSeenShardIds) throws InterruptedException {
GetShardListResult result = new GetShardListResult();
for (Map.Entry<String, String> streamNameWithLastSeenShardId : streamNamesWithLastSeenShardIds.entrySet()) {
String stream = streamNameWithLastSeenShardId.getKey();
String lastSeenShardId = streamNameWithLastSeenShardId.getValue();
result.addRetrievedShardsToStream(stream, getShardsOfStream(stream, lastSeenShardId));
}
return result;
} | [
"@",
"Override",
"public",
"GetShardListResult",
"getShardList",
"(",
"Map",
"<",
"String",
",",
"String",
">",
"streamNamesWithLastSeenShardIds",
")",
"throws",
"InterruptedException",
"{",
"GetShardListResult",
"result",
"=",
"new",
"GetShardListResult",
"(",
")",
"... | {@inheritDoc} | [
"{"
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-connectors/flink-connector-kinesis/src/main/java/org/apache/flink/streaming/connectors/kinesis/proxy/KinesisProxy.java#L275-L285 | train | Get the shard list for the given stream names with last seen shards. | [
30522,
1030,
2058,
15637,
2270,
4152,
11783,
9863,
6072,
11314,
4152,
11783,
9863,
1006,
4949,
1026,
5164,
1010,
5164,
1028,
5460,
18442,
26760,
8939,
8523,
3215,
12129,
7377,
17080,
5104,
1007,
11618,
7153,
10288,
24422,
1063,
4152,
11783,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-streaming-java/src/main/java/org/apache/flink/streaming/api/functions/source/SerializedCheckpointData.java | SerializedCheckpointData.fromDeque | public static <T> SerializedCheckpointData[] fromDeque(
ArrayDeque<Tuple2<Long, Set<T>>> checkpoints,
TypeSerializer<T> serializer) throws IOException {
return fromDeque(checkpoints, serializer, new DataOutputSerializer(128));
} | java | public static <T> SerializedCheckpointData[] fromDeque(
ArrayDeque<Tuple2<Long, Set<T>>> checkpoints,
TypeSerializer<T> serializer) throws IOException {
return fromDeque(checkpoints, serializer, new DataOutputSerializer(128));
} | [
"public",
"static",
"<",
"T",
">",
"SerializedCheckpointData",
"[",
"]",
"fromDeque",
"(",
"ArrayDeque",
"<",
"Tuple2",
"<",
"Long",
",",
"Set",
"<",
"T",
">",
">",
">",
"checkpoints",
",",
"TypeSerializer",
"<",
"T",
">",
"serializer",
")",
"throws",
"I... | Converts a list of checkpoints with elements into an array of SerializedCheckpointData.
@param checkpoints The checkpoints to be converted into IdsCheckpointData.
@param serializer The serializer to serialize the IDs.
@param <T> The type of the ID.
@return An array of serializable SerializedCheckpointData, one per entry in the queue.
@throws IOException Thrown, if the serialization fails. | [
"Converts",
"a",
"list",
"of",
"checkpoints",
"with",
"elements",
"into",
"an",
"array",
"of",
"SerializedCheckpointData",
"."
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/functions/source/SerializedCheckpointData.java#L98-L102 | train | Converts a sequence of CID - addressable checkpoints from a deque of CID - addressable checkpoints. | [
30522,
2270,
10763,
1026,
1056,
1028,
30524,
22834,
10288,
24422,
1063,
2709,
2013,
3207,
4226,
1006,
26520,
2015,
1010,
7642,
17629,
1010,
2047,
2951,
5833,
18780,
8043,
4818,
17629,
1006,
11899,
1007,
1007,
1025,
1065,
102,
0,
0,
0,
0,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/spark | sql/core/src/main/java/org/apache/spark/sql/vectorized/ColumnVector.java | ColumnVector.getLongs | public long[] getLongs(int rowId, int count) {
long[] res = new long[count];
for (int i = 0; i < count; i++) {
res[i] = getLong(rowId + i);
}
return res;
} | java | public long[] getLongs(int rowId, int count) {
long[] res = new long[count];
for (int i = 0; i < count; i++) {
res[i] = getLong(rowId + i);
}
return res;
} | [
"public",
"long",
"[",
"]",
"getLongs",
"(",
"int",
"rowId",
",",
"int",
"count",
")",
"{",
"long",
"[",
"]",
"res",
"=",
"new",
"long",
"[",
"count",
"]",
";",
"for",
"(",
"int",
"i",
"=",
"0",
";",
"i",
"<",
"count",
";",
"i",
"++",
")",
... | Gets long type values from [rowId, rowId + count). The return values for the null slots
are undefined and can be anything. | [
"Gets",
"long",
"type",
"values",
"from",
"[",
"rowId",
"rowId",
"+",
"count",
")",
".",
"The",
"return",
"values",
"for",
"the",
"null",
"slots",
"are",
"undefined",
"and",
"can",
"be",
"anything",
"."
] | 25ee0474f47d9c30d6f553a7892d9549f91071cf | https://github.com/apache/spark/blob/25ee0474f47d9c30d6f553a7892d9549f91071cf/sql/core/src/main/java/org/apache/spark/sql/vectorized/ColumnVector.java#L164-L170 | train | Gets the longs from the specified row. | [
30522,
2270,
2146,
1031,
1033,
2131,
10052,
2015,
1006,
20014,
5216,
3593,
1010,
20014,
4175,
1007,
1063,
2146,
1031,
1033,
24501,
1027,
2047,
2146,
1031,
4175,
1033,
1025,
2005,
1006,
20014,
1045,
1027,
1014,
1025,
1045,
1026,
4175,
1025,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
netty/netty | codec-http2/src/main/java/io/netty/handler/codec/http2/Http2FrameCodec.java | Http2FrameCodec.write | @Override
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) {
if (msg instanceof Http2DataFrame) {
Http2DataFrame dataFrame = (Http2DataFrame) msg;
encoder().writeData(ctx, dataFrame.stream().id(), dataFrame.content(),
dataFrame.padding(), dataFrame.isEndStream(), promise);
} else if (msg instanceof Http2HeadersFrame) {
writeHeadersFrame(ctx, (Http2HeadersFrame) msg, promise);
} else if (msg instanceof Http2WindowUpdateFrame) {
Http2WindowUpdateFrame frame = (Http2WindowUpdateFrame) msg;
Http2FrameStream frameStream = frame.stream();
// It is legit to send a WINDOW_UPDATE frame for the connection stream. The parent channel doesn't attempt
// to set the Http2FrameStream so we assume if it is null the WINDOW_UPDATE is for the connection stream.
try {
if (frameStream == null) {
increaseInitialConnectionWindow(frame.windowSizeIncrement());
} else {
consumeBytes(frameStream.id(), frame.windowSizeIncrement());
}
promise.setSuccess();
} catch (Throwable t) {
promise.setFailure(t);
}
} else if (msg instanceof Http2ResetFrame) {
Http2ResetFrame rstFrame = (Http2ResetFrame) msg;
encoder().writeRstStream(ctx, rstFrame.stream().id(), rstFrame.errorCode(), promise);
} else if (msg instanceof Http2PingFrame) {
Http2PingFrame frame = (Http2PingFrame) msg;
encoder().writePing(ctx, frame.ack(), frame.content(), promise);
} else if (msg instanceof Http2SettingsFrame) {
encoder().writeSettings(ctx, ((Http2SettingsFrame) msg).settings(), promise);
} else if (msg instanceof Http2SettingsAckFrame) {
// In the event of manual SETTINGS ACK is is assumed the encoder will apply the earliest received but not
// yet ACKed settings.
encoder().writeSettingsAck(ctx, promise);
} else if (msg instanceof Http2GoAwayFrame) {
writeGoAwayFrame(ctx, (Http2GoAwayFrame) msg, promise);
} else if (msg instanceof Http2UnknownFrame) {
Http2UnknownFrame unknownFrame = (Http2UnknownFrame) msg;
encoder().writeFrame(ctx, unknownFrame.frameType(), unknownFrame.stream().id(),
unknownFrame.flags(), unknownFrame.content(), promise);
} else if (!(msg instanceof Http2Frame)) {
ctx.write(msg, promise);
} else {
ReferenceCountUtil.release(msg);
throw new UnsupportedMessageTypeException(msg);
}
} | java | @Override
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) {
if (msg instanceof Http2DataFrame) {
Http2DataFrame dataFrame = (Http2DataFrame) msg;
encoder().writeData(ctx, dataFrame.stream().id(), dataFrame.content(),
dataFrame.padding(), dataFrame.isEndStream(), promise);
} else if (msg instanceof Http2HeadersFrame) {
writeHeadersFrame(ctx, (Http2HeadersFrame) msg, promise);
} else if (msg instanceof Http2WindowUpdateFrame) {
Http2WindowUpdateFrame frame = (Http2WindowUpdateFrame) msg;
Http2FrameStream frameStream = frame.stream();
// It is legit to send a WINDOW_UPDATE frame for the connection stream. The parent channel doesn't attempt
// to set the Http2FrameStream so we assume if it is null the WINDOW_UPDATE is for the connection stream.
try {
if (frameStream == null) {
increaseInitialConnectionWindow(frame.windowSizeIncrement());
} else {
consumeBytes(frameStream.id(), frame.windowSizeIncrement());
}
promise.setSuccess();
} catch (Throwable t) {
promise.setFailure(t);
}
} else if (msg instanceof Http2ResetFrame) {
Http2ResetFrame rstFrame = (Http2ResetFrame) msg;
encoder().writeRstStream(ctx, rstFrame.stream().id(), rstFrame.errorCode(), promise);
} else if (msg instanceof Http2PingFrame) {
Http2PingFrame frame = (Http2PingFrame) msg;
encoder().writePing(ctx, frame.ack(), frame.content(), promise);
} else if (msg instanceof Http2SettingsFrame) {
encoder().writeSettings(ctx, ((Http2SettingsFrame) msg).settings(), promise);
} else if (msg instanceof Http2SettingsAckFrame) {
// In the event of manual SETTINGS ACK is is assumed the encoder will apply the earliest received but not
// yet ACKed settings.
encoder().writeSettingsAck(ctx, promise);
} else if (msg instanceof Http2GoAwayFrame) {
writeGoAwayFrame(ctx, (Http2GoAwayFrame) msg, promise);
} else if (msg instanceof Http2UnknownFrame) {
Http2UnknownFrame unknownFrame = (Http2UnknownFrame) msg;
encoder().writeFrame(ctx, unknownFrame.frameType(), unknownFrame.stream().id(),
unknownFrame.flags(), unknownFrame.content(), promise);
} else if (!(msg instanceof Http2Frame)) {
ctx.write(msg, promise);
} else {
ReferenceCountUtil.release(msg);
throw new UnsupportedMessageTypeException(msg);
}
} | [
"@",
"Override",
"public",
"void",
"write",
"(",
"ChannelHandlerContext",
"ctx",
",",
"Object",
"msg",
",",
"ChannelPromise",
"promise",
")",
"{",
"if",
"(",
"msg",
"instanceof",
"Http2DataFrame",
")",
"{",
"Http2DataFrame",
"dataFrame",
"=",
"(",
"Http2DataFram... | Processes all {@link Http2Frame}s. {@link Http2StreamFrame}s may only originate in child
streams. | [
"Processes",
"all",
"{"
] | ba06eafa1c1824bd154f1a380019e7ea2edf3c4c | https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/codec-http2/src/main/java/io/netty/handler/codec/http2/Http2FrameCodec.java#L272-L319 | train | Override method to write a message to the HTTP2 protocol. | [
30522,
1030,
2058,
15637,
2270,
11675,
4339,
1006,
3149,
11774,
3917,
8663,
18209,
14931,
2595,
1010,
4874,
5796,
2290,
1010,
3149,
21572,
28732,
4872,
1007,
1063,
2065,
1006,
5796,
2290,
6013,
11253,
8299,
2475,
2850,
2696,
15643,
1007,
10... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-db/src/main/java/cn/hutool/db/AbstractDb.java | AbstractDb.find | public <T> List<T> find(Entity where, Class<T> beanClass) throws SQLException {
return find(where.getFieldNames(), where, BeanListHandler.create(beanClass));
} | java | public <T> List<T> find(Entity where, Class<T> beanClass) throws SQLException {
return find(where.getFieldNames(), where, BeanListHandler.create(beanClass));
} | [
"public",
"<",
"T",
">",
"List",
"<",
"T",
">",
"find",
"(",
"Entity",
"where",
",",
"Class",
"<",
"T",
">",
"beanClass",
")",
"throws",
"SQLException",
"{",
"return",
"find",
"(",
"where",
".",
"getFieldNames",
"(",
")",
",",
"where",
",",
"BeanList... | 查询数据列表,返回字段由where参数指定<br>
查询条件为多个key value对表示,默认key = value,如果使用其它条件可以使用:where.put("key", " > 1"),value也可以传Condition对象,key被忽略
@param <T> Bean类型
@param where 条件实体类(包含表名)
@return 数据对象列表
@throws SQLException SQL执行异常
@since 3.2.2 | [
"查询数据列表,返回字段由where参数指定<br",
">",
"查询条件为多个key",
"value对表示,默认key",
"=",
"value,如果使用其它条件可以使用:where",
".",
"put",
"(",
"key",
">",
";",
"1",
")",
",value也可以传Condition对象,key被忽略"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-db/src/main/java/cn/hutool/db/AbstractDb.java#L505-L507 | train | Find entities by field name and type | [
30522,
2270,
1026,
1056,
1028,
2862,
1026,
1056,
1028,
2424,
1006,
9178,
2073,
1010,
2465,
1026,
1056,
1028,
14068,
26266,
1007,
11618,
29296,
10288,
24422,
1063,
2709,
2424,
1006,
2073,
1012,
2131,
3790,
18442,
2015,
1006,
1007,
1010,
2073... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
hankcs/HanLP | src/main/java/com/hankcs/hanlp/recognition/ns/PlaceRecognition.java | PlaceRecognition.viterbiCompute | public static List<NS> viterbiCompute(List<EnumItem<NS>> roleTagList)
{
return Viterbi.computeEnum(roleTagList, PlaceDictionary.transformMatrixDictionary);
} | java | public static List<NS> viterbiCompute(List<EnumItem<NS>> roleTagList)
{
return Viterbi.computeEnum(roleTagList, PlaceDictionary.transformMatrixDictionary);
} | [
"public",
"static",
"List",
"<",
"NS",
">",
"viterbiCompute",
"(",
"List",
"<",
"EnumItem",
"<",
"NS",
">",
">",
"roleTagList",
")",
"{",
"return",
"Viterbi",
".",
"computeEnum",
"(",
"roleTagList",
",",
"PlaceDictionary",
".",
"transformMatrixDictionary",
")"... | 维特比算法求解最优标签
@param roleTagList
@return | [
"维特比算法求解最优标签"
] | a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce | https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/recognition/ns/PlaceRecognition.java#L125-L128 | train | Compute the Viterbi role tag list. | [
30522,
2270,
10763,
2862,
1026,
24978,
1028,
6819,
3334,
13592,
25377,
10421,
1006,
2862,
1026,
4372,
12717,
18532,
1026,
24978,
1028,
1028,
2535,
15900,
9863,
1007,
1063,
2709,
6819,
3334,
5638,
1012,
24134,
2368,
2819,
1006,
2535,
15900,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/runtime/functions/SqlFunctionUtils.java | SqlFunctionUtils.regexpReplace | public static String regexpReplace(String str, String regex, String replacement) {
if (regex.isEmpty()) {
return str;
}
try {
// we should use StringBuffer here because Matcher only accept it
StringBuffer sb = new StringBuffer();
Matcher m = REGEXP_PATTERN_CACHE.get(regex).matcher(str);
while (m.find()) {
m.appendReplacement(sb, replacement);
}
m.appendTail(sb);
return sb.toString();
} catch (Exception e) {
LOG.error(
String.format("Exception in regexpReplace('%s', '%s', '%s')", str, regex, replacement),
e);
// return null if exception in regex replace
return null;
}
} | java | public static String regexpReplace(String str, String regex, String replacement) {
if (regex.isEmpty()) {
return str;
}
try {
// we should use StringBuffer here because Matcher only accept it
StringBuffer sb = new StringBuffer();
Matcher m = REGEXP_PATTERN_CACHE.get(regex).matcher(str);
while (m.find()) {
m.appendReplacement(sb, replacement);
}
m.appendTail(sb);
return sb.toString();
} catch (Exception e) {
LOG.error(
String.format("Exception in regexpReplace('%s', '%s', '%s')", str, regex, replacement),
e);
// return null if exception in regex replace
return null;
}
} | [
"public",
"static",
"String",
"regexpReplace",
"(",
"String",
"str",
",",
"String",
"regex",
",",
"String",
"replacement",
")",
"{",
"if",
"(",
"regex",
".",
"isEmpty",
"(",
")",
")",
"{",
"return",
"str",
";",
"}",
"try",
"{",
"// we should use StringBuff... | Returns a string resulting from replacing all substrings that match the regular
expression with replacement. | [
"Returns",
"a",
"string",
"resulting",
"from",
"replacing",
"all",
"substrings",
"that",
"match",
"the",
"regular",
"expression",
"with",
"replacement",
"."
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/runtime/functions/SqlFunctionUtils.java#L346-L366 | train | Replace the string with the given regular expression with the given replacement. | [
30522,
2270,
10763,
5164,
19723,
10288,
28139,
24759,
10732,
1006,
5164,
2358,
2099,
1010,
5164,
19723,
10288,
1010,
5164,
6110,
1007,
1063,
2065,
1006,
19723,
10288,
1012,
2003,
6633,
13876,
2100,
1006,
1007,
1007,
1063,
2709,
2358,
2099,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-core/src/main/java/cn/hutool/core/img/ImgUtil.java | ImgUtil.toBufferedImage | public static BufferedImage toBufferedImage(Image img) {
if (img instanceof BufferedImage) {
return (BufferedImage) img;
}
return copyImage(img, BufferedImage.TYPE_INT_RGB);
} | java | public static BufferedImage toBufferedImage(Image img) {
if (img instanceof BufferedImage) {
return (BufferedImage) img;
}
return copyImage(img, BufferedImage.TYPE_INT_RGB);
} | [
"public",
"static",
"BufferedImage",
"toBufferedImage",
"(",
"Image",
"img",
")",
"{",
"if",
"(",
"img",
"instanceof",
"BufferedImage",
")",
"{",
"return",
"(",
"BufferedImage",
")",
"img",
";",
"}",
"return",
"copyImage",
"(",
"img",
",",
"BufferedImage",
"... | {@link Image} 转 {@link BufferedImage}<br>
首先尝试强转,否则新建一个{@link BufferedImage}后重新绘制
@param img {@link Image}
@return {@link BufferedImage} | [
"{",
"@link",
"Image",
"}",
"转",
"{",
"@link",
"BufferedImage",
"}",
"<br",
">",
"首先尝试强转,否则新建一个",
"{",
"@link",
"BufferedImage",
"}",
"后重新绘制"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/img/ImgUtil.java#L1158-L1164 | train | Converts an image to a BufferedImage. | [
30522,
2270,
10763,
17698,
2098,
9581,
3351,
2000,
8569,
12494,
2098,
9581,
3351,
1006,
3746,
10047,
2290,
1007,
1063,
2065,
1006,
10047,
2290,
6013,
11253,
17698,
2098,
9581,
3351,
1007,
1063,
2709,
1006,
17698,
2098,
9581,
3351,
30524,
61... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-core/src/main/java/cn/hutool/core/lang/Console.java | Console.error | public static void error(Throwable t, String template, Object... values) {
err.println(StrUtil.format(template, values));
if (null != t) {
t.printStackTrace(err);
err.flush();
}
} | java | public static void error(Throwable t, String template, Object... values) {
err.println(StrUtil.format(template, values));
if (null != t) {
t.printStackTrace(err);
err.flush();
}
} | [
"public",
"static",
"void",
"error",
"(",
"Throwable",
"t",
",",
"String",
"template",
",",
"Object",
"...",
"values",
")",
"{",
"err",
".",
"println",
"(",
"StrUtil",
".",
"format",
"(",
"template",
",",
"values",
")",
")",
";",
"if",
"(",
"null",
"... | 同 System.err.println()方法,打印控制台日志
@param t 异常对象
@param template 文本模板,被替换的部分用 {} 表示
@param values 值 | [
"同",
"System",
".",
"err",
".",
"println",
"()",
"方法,打印控制台日志"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/lang/Console.java#L154-L160 | train | Print an error message with a throwable. | [
30522,
2270,
10763,
11675,
7561,
1006,
5466,
3085,
1056,
1010,
5164,
23561,
1010,
4874,
1012,
1012,
1012,
5300,
1007,
1063,
9413,
2099,
1012,
6140,
19666,
1006,
2358,
22134,
4014,
1012,
4289,
1006,
23561,
1010,
5300,
1007,
1007,
1025,
2065,... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-connectors/flink-connector-kafka-base/src/main/java/org/apache/flink/streaming/connectors/kafka/internals/AbstractFetcher.java | AbstractFetcher.emitRecord | protected void emitRecord(T record, KafkaTopicPartitionState<KPH> partitionState, long offset) throws Exception {
if (record != null) {
if (timestampWatermarkMode == NO_TIMESTAMPS_WATERMARKS) {
// fast path logic, in case there are no watermarks
// emit the record, using the checkpoint lock to guarantee
// atomicity of record emission and offset state update
synchronized (checkpointLock) {
sourceContext.collect(record);
partitionState.setOffset(offset);
}
} else if (timestampWatermarkMode == PERIODIC_WATERMARKS) {
emitRecordWithTimestampAndPeriodicWatermark(record, partitionState, offset, Long.MIN_VALUE);
} else {
emitRecordWithTimestampAndPunctuatedWatermark(record, partitionState, offset, Long.MIN_VALUE);
}
} else {
// if the record is null, simply just update the offset state for partition
synchronized (checkpointLock) {
partitionState.setOffset(offset);
}
}
} | java | protected void emitRecord(T record, KafkaTopicPartitionState<KPH> partitionState, long offset) throws Exception {
if (record != null) {
if (timestampWatermarkMode == NO_TIMESTAMPS_WATERMARKS) {
// fast path logic, in case there are no watermarks
// emit the record, using the checkpoint lock to guarantee
// atomicity of record emission and offset state update
synchronized (checkpointLock) {
sourceContext.collect(record);
partitionState.setOffset(offset);
}
} else if (timestampWatermarkMode == PERIODIC_WATERMARKS) {
emitRecordWithTimestampAndPeriodicWatermark(record, partitionState, offset, Long.MIN_VALUE);
} else {
emitRecordWithTimestampAndPunctuatedWatermark(record, partitionState, offset, Long.MIN_VALUE);
}
} else {
// if the record is null, simply just update the offset state for partition
synchronized (checkpointLock) {
partitionState.setOffset(offset);
}
}
} | [
"protected",
"void",
"emitRecord",
"(",
"T",
"record",
",",
"KafkaTopicPartitionState",
"<",
"KPH",
">",
"partitionState",
",",
"long",
"offset",
")",
"throws",
"Exception",
"{",
"if",
"(",
"record",
"!=",
"null",
")",
"{",
"if",
"(",
"timestampWatermarkMode",... | Emits a record without attaching an existing timestamp to it.
<p>Implementation Note: This method is kept brief to be JIT inlining friendly.
That makes the fast path efficient, the extended paths are called as separate methods.
@param record The record to emit
@param partitionState The state of the Kafka partition from which the record was fetched
@param offset The offset of the record | [
"Emits",
"a",
"record",
"without",
"attaching",
"an",
"existing",
"timestamp",
"to",
"it",
"."
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-connectors/flink-connector-kafka-base/src/main/java/org/apache/flink/streaming/connectors/kafka/internals/AbstractFetcher.java#L353-L376 | train | Emits a single record to the source context. | [
30522,
5123,
11675,
12495,
7913,
27108,
2094,
1006,
1056,
2501,
1010,
10556,
24316,
10610,
24330,
19362,
3775,
9285,
12259,
1026,
1047,
8458,
1028,
13571,
9153,
2618,
1010,
2146,
16396,
1007,
11618,
6453,
1063,
2065,
1006,
2501,
999,
1027,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
alibaba/canal | parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/tsdb/DatabaseTableMeta.java | DatabaseTableMeta.dumpTableMeta | private boolean dumpTableMeta(MysqlConnection connection, final CanalEventFilter filter) {
try {
ResultSetPacket packet = connection.query("show databases");
List<String> schemas = new ArrayList<String>();
for (String schema : packet.getFieldValues()) {
schemas.add(schema);
}
for (String schema : schemas) {
// filter views
packet = connection.query("show full tables from `" + schema + "` where Table_type = 'BASE TABLE'");
List<String> tables = new ArrayList<String>();
for (String table : packet.getFieldValues()) {
if ("BASE TABLE".equalsIgnoreCase(table)) {
continue;
}
String fullName = schema + "." + table;
if (blackFilter == null || !blackFilter.filter(fullName)) {
if (filter == null || filter.filter(fullName)) {
tables.add(table);
}
}
}
if (tables.isEmpty()) {
continue;
}
StringBuilder sql = new StringBuilder();
for (String table : tables) {
sql.append("show create table `" + schema + "`.`" + table + "`;");
}
List<ResultSetPacket> packets = connection.queryMulti(sql.toString());
for (ResultSetPacket onePacket : packets) {
if (onePacket.getFieldValues().size() > 1) {
String oneTableCreateSql = onePacket.getFieldValues().get(1);
memoryTableMeta.apply(INIT_POSITION, schema, oneTableCreateSql, null);
}
}
}
return true;
} catch (IOException e) {
throw new CanalParseException(e);
}
} | java | private boolean dumpTableMeta(MysqlConnection connection, final CanalEventFilter filter) {
try {
ResultSetPacket packet = connection.query("show databases");
List<String> schemas = new ArrayList<String>();
for (String schema : packet.getFieldValues()) {
schemas.add(schema);
}
for (String schema : schemas) {
// filter views
packet = connection.query("show full tables from `" + schema + "` where Table_type = 'BASE TABLE'");
List<String> tables = new ArrayList<String>();
for (String table : packet.getFieldValues()) {
if ("BASE TABLE".equalsIgnoreCase(table)) {
continue;
}
String fullName = schema + "." + table;
if (blackFilter == null || !blackFilter.filter(fullName)) {
if (filter == null || filter.filter(fullName)) {
tables.add(table);
}
}
}
if (tables.isEmpty()) {
continue;
}
StringBuilder sql = new StringBuilder();
for (String table : tables) {
sql.append("show create table `" + schema + "`.`" + table + "`;");
}
List<ResultSetPacket> packets = connection.queryMulti(sql.toString());
for (ResultSetPacket onePacket : packets) {
if (onePacket.getFieldValues().size() > 1) {
String oneTableCreateSql = onePacket.getFieldValues().get(1);
memoryTableMeta.apply(INIT_POSITION, schema, oneTableCreateSql, null);
}
}
}
return true;
} catch (IOException e) {
throw new CanalParseException(e);
}
} | [
"private",
"boolean",
"dumpTableMeta",
"(",
"MysqlConnection",
"connection",
",",
"final",
"CanalEventFilter",
"filter",
")",
"{",
"try",
"{",
"ResultSetPacket",
"packet",
"=",
"connection",
".",
"query",
"(",
"\"show databases\"",
")",
";",
"List",
"<",
"String",... | 初始化的时候dump一下表结构 | [
"初始化的时候dump一下表结构"
] | 8f088cddc0755f4350c5aaae95c6e4002d90a40f | https://github.com/alibaba/canal/blob/8f088cddc0755f4350c5aaae95c6e4002d90a40f/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/tsdb/DatabaseTableMeta.java#L197-L243 | train | Dump table meta. | [
30522,
2797,
22017,
20898,
15653,
10880,
11368,
2050,
1006,
2026,
2015,
4160,
22499,
10087,
7542,
4434,
1010,
2345,
5033,
18697,
3372,
8873,
21928,
11307,
1007,
1063,
3046,
1063,
3463,
3388,
23947,
3388,
14771,
1027,
4434,
1012,
23032,
1006,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-poi/src/main/java/cn/hutool/poi/excel/ExcelReader.java | ExcelReader.getExtractor | public ExcelExtractor getExtractor() {
ExcelExtractor extractor;
Workbook wb = this.workbook;
if (wb instanceof HSSFWorkbook) {
extractor = new org.apache.poi.hssf.extractor.ExcelExtractor((HSSFWorkbook) wb);
} else {
extractor = new XSSFExcelExtractor((XSSFWorkbook) wb);
}
return extractor;
} | java | public ExcelExtractor getExtractor() {
ExcelExtractor extractor;
Workbook wb = this.workbook;
if (wb instanceof HSSFWorkbook) {
extractor = new org.apache.poi.hssf.extractor.ExcelExtractor((HSSFWorkbook) wb);
} else {
extractor = new XSSFExcelExtractor((XSSFWorkbook) wb);
}
return extractor;
} | [
"public",
"ExcelExtractor",
"getExtractor",
"(",
")",
"{",
"ExcelExtractor",
"extractor",
";",
"Workbook",
"wb",
"=",
"this",
".",
"workbook",
";",
"if",
"(",
"wb",
"instanceof",
"HSSFWorkbook",
")",
"{",
"extractor",
"=",
"new",
"org",
".",
"apache",
".",
... | 获取 {@link ExcelExtractor} 对象
@return {@link ExcelExtractor}
@since 4.1.0 | [
"获取",
"{",
"@link",
"ExcelExtractor",
"}",
"对象"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-poi/src/main/java/cn/hutool/poi/excel/ExcelReader.java#L383-L392 | train | Gets the extractor object. | [
30522,
2270,
24970,
10288,
6494,
16761,
2131,
10288,
6494,
16761,
1006,
1007,
1063,
24970,
10288,
6494,
16761,
14817,
2953,
1025,
2147,
8654,
25610,
1027,
2023,
1012,
2147,
8654,
1025,
2065,
1006,
25610,
6013,
11253,
26236,
22747,
6198,
8654,... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-libraries/flink-cep/src/main/java/org/apache/flink/cep/PatternStream.java | PatternStream.process | public <R> SingleOutputStreamOperator<R> process(final PatternProcessFunction<T, R> patternProcessFunction) {
final TypeInformation<R> returnType = TypeExtractor.getUnaryOperatorReturnType(
patternProcessFunction,
PatternProcessFunction.class,
0,
1,
TypeExtractor.NO_INDEX,
builder.getInputType(),
null,
false);
return process(patternProcessFunction, returnType);
} | java | public <R> SingleOutputStreamOperator<R> process(final PatternProcessFunction<T, R> patternProcessFunction) {
final TypeInformation<R> returnType = TypeExtractor.getUnaryOperatorReturnType(
patternProcessFunction,
PatternProcessFunction.class,
0,
1,
TypeExtractor.NO_INDEX,
builder.getInputType(),
null,
false);
return process(patternProcessFunction, returnType);
} | [
"public",
"<",
"R",
">",
"SingleOutputStreamOperator",
"<",
"R",
">",
"process",
"(",
"final",
"PatternProcessFunction",
"<",
"T",
",",
"R",
">",
"patternProcessFunction",
")",
"{",
"final",
"TypeInformation",
"<",
"R",
">",
"returnType",
"=",
"TypeExtractor",
... | Applies a process function to the detected pattern sequence. For each pattern sequence the
provided {@link PatternProcessFunction} is called. In order to process timed out partial matches as well one can
use {@link TimedOutPartialMatchHandler} as additional interface.
@param patternProcessFunction The pattern process function which is called for each detected
pattern sequence.
@param <R> Type of the resulting elements
@return {@link DataStream} which contains the resulting elements from the pattern process
function. | [
"Applies",
"a",
"process",
"function",
"to",
"the",
"detected",
"pattern",
"sequence",
".",
"For",
"each",
"pattern",
"sequence",
"the",
"provided",
"{",
"@link",
"PatternProcessFunction",
"}",
"is",
"called",
".",
"In",
"order",
"to",
"process",
"timed",
"out... | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-libraries/flink-cep/src/main/java/org/apache/flink/cep/PatternStream.java#L82-L94 | train | Process a single - output pattern process function. | [
30522,
2270,
1026,
1054,
1028,
2309,
5833,
18780,
21422,
25918,
8844,
1026,
1054,
1028,
2832,
1006,
2345,
5418,
21572,
9623,
22747,
4609,
7542,
1026,
1056,
1010,
1054,
1028,
5418,
21572,
9623,
22747,
4609,
7542,
1007,
1063,
2345,
2828,
2378... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
redisson/redisson | redisson/src/main/java/org/redisson/api/CronSchedule.java | CronSchedule.weeklyOnDayAndHourAndMinute | public static CronSchedule weeklyOnDayAndHourAndMinute(int hour, int minute, Integer... daysOfWeek) {
if (daysOfWeek == null || daysOfWeek.length == 0) {
throw new IllegalArgumentException("You must specify at least one day of week.");
}
String expression = String.format("0 %d %d ? * %d", minute, hour, daysOfWeek[0]);
for (int i = 1; i < daysOfWeek.length; i++) {
expression = expression + "," + daysOfWeek[i];
}
return of(expression);
} | java | public static CronSchedule weeklyOnDayAndHourAndMinute(int hour, int minute, Integer... daysOfWeek) {
if (daysOfWeek == null || daysOfWeek.length == 0) {
throw new IllegalArgumentException("You must specify at least one day of week.");
}
String expression = String.format("0 %d %d ? * %d", minute, hour, daysOfWeek[0]);
for (int i = 1; i < daysOfWeek.length; i++) {
expression = expression + "," + daysOfWeek[i];
}
return of(expression);
} | [
"public",
"static",
"CronSchedule",
"weeklyOnDayAndHourAndMinute",
"(",
"int",
"hour",
",",
"int",
"minute",
",",
"Integer",
"...",
"daysOfWeek",
")",
"{",
"if",
"(",
"daysOfWeek",
"==",
"null",
"||",
"daysOfWeek",
".",
"length",
"==",
"0",
")",
"{",
"throw"... | Creates cron expression which schedule task execution
every given days of the week at the given time.
Use Calendar object constants to define day.
@param hour of schedule
@param minute of schedule
@param daysOfWeek - Calendar object constants
@return object | [
"Creates",
"cron",
"expression",
"which",
"schedule",
"task",
"execution",
"every",
"given",
"days",
"of",
"the",
"week",
"at",
"the",
"given",
"time",
".",
"Use",
"Calendar",
"object",
"constants",
"to",
"define",
"day",
"."
] | d3acc0249b2d5d658d36d99e2c808ce49332ea44 | https://github.com/redisson/redisson/blob/d3acc0249b2d5d658d36d99e2c808ce49332ea44/redisson/src/main/java/org/redisson/api/CronSchedule.java#L75-L86 | train | Create a CronSchedule that schedules a single instance of a CronSchedule for a single hour and minute on a given number of days of the week. | [
30522,
2270,
10763,
13675,
5644,
7690,
9307,
4882,
29067,
7054,
16425,
8162,
5685,
10020,
10421,
1006,
20014,
3178,
1010,
20014,
3371,
1010,
16109,
1012,
1012,
1012,
2420,
11253,
28075,
1007,
1063,
2065,
1006,
2420,
11253,
28075,
1027,
1027,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
alibaba/canal | parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/MysqlConnection.java | MysqlConnection.seek | public void seek(String binlogfilename, Long binlogPosition, String gtid, SinkFunction func) throws IOException {
updateSettings();
loadBinlogChecksum();
sendBinlogDump(binlogfilename, binlogPosition);
DirectLogFetcher fetcher = new DirectLogFetcher(connector.getReceiveBufferSize());
fetcher.start(connector.getChannel());
LogDecoder decoder = new LogDecoder();
decoder.handle(LogEvent.ROTATE_EVENT);
decoder.handle(LogEvent.FORMAT_DESCRIPTION_EVENT);
decoder.handle(LogEvent.QUERY_EVENT);
decoder.handle(LogEvent.XID_EVENT);
LogContext context = new LogContext();
// 若entry position存在gtid,则使用传入的gtid作为gtidSet
// 拼接的标准,否则同时开启gtid和tsdb时,会导致丢失gtid
// 而当源端数据库gtid 有purged时会有如下类似报错
// 'errno = 1236, sqlstate = HY000 errmsg = The slave is connecting
// using CHANGE MASTER TO MASTER_AUTO_POSITION = 1 ...
if (StringUtils.isNotEmpty(gtid)) {
decoder.handle(LogEvent.GTID_LOG_EVENT);
context.setGtidSet(MysqlGTIDSet.parse(gtid));
}
context.setFormatDescription(new FormatDescriptionLogEvent(4, binlogChecksum));
while (fetcher.fetch()) {
accumulateReceivedBytes(fetcher.limit());
LogEvent event = null;
event = decoder.decode(fetcher, context);
if (event == null) {
throw new CanalParseException("parse failed");
}
if (!func.sink(event)) {
break;
}
}
} | java | public void seek(String binlogfilename, Long binlogPosition, String gtid, SinkFunction func) throws IOException {
updateSettings();
loadBinlogChecksum();
sendBinlogDump(binlogfilename, binlogPosition);
DirectLogFetcher fetcher = new DirectLogFetcher(connector.getReceiveBufferSize());
fetcher.start(connector.getChannel());
LogDecoder decoder = new LogDecoder();
decoder.handle(LogEvent.ROTATE_EVENT);
decoder.handle(LogEvent.FORMAT_DESCRIPTION_EVENT);
decoder.handle(LogEvent.QUERY_EVENT);
decoder.handle(LogEvent.XID_EVENT);
LogContext context = new LogContext();
// 若entry position存在gtid,则使用传入的gtid作为gtidSet
// 拼接的标准,否则同时开启gtid和tsdb时,会导致丢失gtid
// 而当源端数据库gtid 有purged时会有如下类似报错
// 'errno = 1236, sqlstate = HY000 errmsg = The slave is connecting
// using CHANGE MASTER TO MASTER_AUTO_POSITION = 1 ...
if (StringUtils.isNotEmpty(gtid)) {
decoder.handle(LogEvent.GTID_LOG_EVENT);
context.setGtidSet(MysqlGTIDSet.parse(gtid));
}
context.setFormatDescription(new FormatDescriptionLogEvent(4, binlogChecksum));
while (fetcher.fetch()) {
accumulateReceivedBytes(fetcher.limit());
LogEvent event = null;
event = decoder.decode(fetcher, context);
if (event == null) {
throw new CanalParseException("parse failed");
}
if (!func.sink(event)) {
break;
}
}
} | [
"public",
"void",
"seek",
"(",
"String",
"binlogfilename",
",",
"Long",
"binlogPosition",
",",
"String",
"gtid",
",",
"SinkFunction",
"func",
")",
"throws",
"IOException",
"{",
"updateSettings",
"(",
")",
";",
"loadBinlogChecksum",
"(",
")",
";",
"sendBinlogDump... | 加速主备切换时的查找速度,做一些特殊优化,比如只解析事务头或者尾 | [
"加速主备切换时的查找速度,做一些特殊优化,比如只解析事务头或者尾"
] | 8f088cddc0755f4350c5aaae95c6e4002d90a40f | https://github.com/alibaba/canal/blob/8f088cddc0755f4350c5aaae95c6e4002d90a40f/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/MysqlConnection.java#L122-L157 | train | Seeks to the specified position in the specified binlog file. | [
30522,
2270,
11675,
6148,
1006,
5164,
8026,
21197,
8873,
20844,
4168,
1010,
2146,
8026,
21197,
26994,
1010,
5164,
14181,
3593,
1010,
7752,
11263,
27989,
4569,
2278,
1007,
11618,
22834,
10288,
24422,
1063,
14409,
18319,
3070,
2015,
1006,
1007,... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
spring-projects/spring-boot | spring-boot-project/spring-boot/src/main/java/org/springframework/boot/task/TaskExecutorBuilder.java | TaskExecutorBuilder.build | public <T extends ThreadPoolTaskExecutor> T build(Class<T> taskExecutorClass) {
return configure(BeanUtils.instantiateClass(taskExecutorClass));
} | java | public <T extends ThreadPoolTaskExecutor> T build(Class<T> taskExecutorClass) {
return configure(BeanUtils.instantiateClass(taskExecutorClass));
} | [
"public",
"<",
"T",
"extends",
"ThreadPoolTaskExecutor",
">",
"T",
"build",
"(",
"Class",
"<",
"T",
">",
"taskExecutorClass",
")",
"{",
"return",
"configure",
"(",
"BeanUtils",
".",
"instantiateClass",
"(",
"taskExecutorClass",
")",
")",
";",
"}"
] | Build a new {@link ThreadPoolTaskExecutor} instance of the specified type and
configure it using this builder.
@param <T> the type of task executor
@param taskExecutorClass the template type to create
@return a configured {@link ThreadPoolTaskExecutor} instance.
@see #build()
@see #configure(ThreadPoolTaskExecutor) | [
"Build",
"a",
"new",
"{"
] | 0b27f7c70e164b2b1a96477f1d9c1acba56790c1 | https://github.com/spring-projects/spring-boot/blob/0b27f7c70e164b2b1a96477f1d9c1acba56790c1/spring-boot-project/spring-boot/src/main/java/org/springframework/boot/task/TaskExecutorBuilder.java#L305-L307 | train | Build a new instance of the given class. | [
30522,
2270,
1026,
1056,
8908,
11689,
16869,
10230,
3489,
2595,
8586,
16161,
2099,
1028,
1056,
3857,
1006,
2465,
1026,
1056,
1028,
4708,
10288,
8586,
16161,
11890,
27102,
1007,
1063,
2709,
9530,
8873,
27390,
2063,
1006,
14068,
21823,
4877,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/consumer/RemoteInputChannel.java | RemoteInputChannel.releaseAllResources | @Override
void releaseAllResources() throws IOException {
if (isReleased.compareAndSet(false, true)) {
// Gather all exclusive buffers and recycle them to global pool in batch, because
// we do not want to trigger redistribution of buffers after each recycle.
final List<MemorySegment> exclusiveRecyclingSegments = new ArrayList<>();
synchronized (receivedBuffers) {
Buffer buffer;
while ((buffer = receivedBuffers.poll()) != null) {
if (buffer.getRecycler() == this) {
exclusiveRecyclingSegments.add(buffer.getMemorySegment());
} else {
buffer.recycleBuffer();
}
}
}
synchronized (bufferQueue) {
bufferQueue.releaseAll(exclusiveRecyclingSegments);
}
if (exclusiveRecyclingSegments.size() > 0) {
inputGate.returnExclusiveSegments(exclusiveRecyclingSegments);
}
// The released flag has to be set before closing the connection to ensure that
// buffers received concurrently with closing are properly recycled.
if (partitionRequestClient != null) {
partitionRequestClient.close(this);
} else {
connectionManager.closeOpenChannelConnections(connectionId);
}
}
} | java | @Override
void releaseAllResources() throws IOException {
if (isReleased.compareAndSet(false, true)) {
// Gather all exclusive buffers and recycle them to global pool in batch, because
// we do not want to trigger redistribution of buffers after each recycle.
final List<MemorySegment> exclusiveRecyclingSegments = new ArrayList<>();
synchronized (receivedBuffers) {
Buffer buffer;
while ((buffer = receivedBuffers.poll()) != null) {
if (buffer.getRecycler() == this) {
exclusiveRecyclingSegments.add(buffer.getMemorySegment());
} else {
buffer.recycleBuffer();
}
}
}
synchronized (bufferQueue) {
bufferQueue.releaseAll(exclusiveRecyclingSegments);
}
if (exclusiveRecyclingSegments.size() > 0) {
inputGate.returnExclusiveSegments(exclusiveRecyclingSegments);
}
// The released flag has to be set before closing the connection to ensure that
// buffers received concurrently with closing are properly recycled.
if (partitionRequestClient != null) {
partitionRequestClient.close(this);
} else {
connectionManager.closeOpenChannelConnections(connectionId);
}
}
} | [
"@",
"Override",
"void",
"releaseAllResources",
"(",
")",
"throws",
"IOException",
"{",
"if",
"(",
"isReleased",
".",
"compareAndSet",
"(",
"false",
",",
"true",
")",
")",
"{",
"// Gather all exclusive buffers and recycle them to global pool in batch, because",
"// we do ... | Releases all exclusive and floating buffers, closes the partition request client. | [
"Releases",
"all",
"exclusive",
"and",
"floating",
"buffers",
"closes",
"the",
"partition",
"request",
"client",
"."
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/consumer/RemoteInputChannel.java#L236-L270 | train | Releases all resources. | [
30522,
1030,
2058,
15637,
11675,
2713,
8095,
6072,
8162,
9623,
1006,
1007,
11618,
22834,
10288,
24422,
1063,
2065,
1006,
2003,
16570,
25063,
1012,
12826,
29560,
3388,
1006,
6270,
1010,
2995,
1007,
1007,
1063,
1013,
1013,
8587,
2035,
7262,
1... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
hankcs/HanLP | src/main/java/com/hankcs/hanlp/collection/AhoCorasick/AhoCorasickDoubleArrayTrie.java | AhoCorasickDoubleArrayTrie.fetch | private int fetch(State parent, List<Map.Entry<Integer, State>> siblings)
{
if (parent.isAcceptable())
{
State fakeNode = new State(-(parent.getDepth() + 1)); // 此节点是parent的子节点,同时具备parent的输出
fakeNode.addEmit(parent.getLargestValueId());
siblings.add(new AbstractMap.SimpleEntry<Integer, State>(0, fakeNode));
}
for (Map.Entry<Character, State> entry : parent.getSuccess().entrySet())
{
siblings.add(new AbstractMap.SimpleEntry<Integer, State>(entry.getKey() + 1, entry.getValue()));
}
return siblings.size();
} | java | private int fetch(State parent, List<Map.Entry<Integer, State>> siblings)
{
if (parent.isAcceptable())
{
State fakeNode = new State(-(parent.getDepth() + 1)); // 此节点是parent的子节点,同时具备parent的输出
fakeNode.addEmit(parent.getLargestValueId());
siblings.add(new AbstractMap.SimpleEntry<Integer, State>(0, fakeNode));
}
for (Map.Entry<Character, State> entry : parent.getSuccess().entrySet())
{
siblings.add(new AbstractMap.SimpleEntry<Integer, State>(entry.getKey() + 1, entry.getValue()));
}
return siblings.size();
} | [
"private",
"int",
"fetch",
"(",
"State",
"parent",
",",
"List",
"<",
"Map",
".",
"Entry",
"<",
"Integer",
",",
"State",
">",
">",
"siblings",
")",
"{",
"if",
"(",
"parent",
".",
"isAcceptable",
"(",
")",
")",
"{",
"State",
"fakeNode",
"=",
"new",
"... | 获取直接相连的子节点
@param parent 父节点
@param siblings (子)兄弟节点
@return 兄弟节点个数 | [
"获取直接相连的子节点"
] | a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce | https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/collection/AhoCorasick/AhoCorasickDoubleArrayTrie.java#L486-L499 | train | Fetch the number of siblings from the parent. | [
30522,
2797,
20014,
18584,
1006,
2110,
6687,
1010,
2862,
1026,
4949,
1012,
4443,
1026,
16109,
1010,
2110,
1028,
1028,
9504,
1007,
1063,
2065,
1006,
6687,
1012,
18061,
9468,
23606,
3085,
1006,
1007,
1007,
1063,
2110,
8275,
3630,
3207,
1027,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-core/src/main/java/cn/hutool/core/lang/Validator.java | Validator.validateLetter | public static <T extends CharSequence> T validateLetter(T value, String errorMsg) throws ValidateException {
if (false == isLetter(value)) {
throw new ValidateException(errorMsg);
}
return value;
} | java | public static <T extends CharSequence> T validateLetter(T value, String errorMsg) throws ValidateException {
if (false == isLetter(value)) {
throw new ValidateException(errorMsg);
}
return value;
} | [
"public",
"static",
"<",
"T",
"extends",
"CharSequence",
">",
"T",
"validateLetter",
"(",
"T",
"value",
",",
"String",
"errorMsg",
")",
"throws",
"ValidateException",
"{",
"if",
"(",
"false",
"==",
"isLetter",
"(",
"value",
")",
")",
"{",
"throw",
"new",
... | 验证是否全部为字母组成,包括大写和小写字母和汉字
@param <T> 字符串类型
@param value 表单值
@param errorMsg 验证错误的信息
@return 验证后的值
@throws ValidateException 验证异常
@since 3.3.0 | [
"验证是否全部为字母组成,包括大写和小写字母和汉字"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/lang/Validator.java#L453-L458 | train | Validate a string to ensure that it is a letter. | [
30522,
2270,
10763,
1026,
1056,
8908,
25869,
3366,
4226,
5897,
1028,
1056,
9398,
3686,
27901,
2099,
1006,
1056,
3643,
1010,
5164,
7561,
5244,
2290,
1007,
11618,
9398,
3686,
10288,
24422,
1063,
2065,
1006,
6270,
1027,
1027,
8842,
12079,
1006... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
netty/netty | codec-http2/src/main/java/io/netty/handler/codec/http2/HttpConversionUtil.java | HttpConversionUtil.toHttpRequest | public static HttpRequest toHttpRequest(int streamId, Http2Headers http2Headers, boolean validateHttpHeaders)
throws Http2Exception {
// HTTP/2 does not define a way to carry the version identifier that is included in the HTTP/1.1 request line.
final CharSequence method = checkNotNull(http2Headers.method(),
"method header cannot be null in conversion to HTTP/1.x");
final CharSequence path = checkNotNull(http2Headers.path(),
"path header cannot be null in conversion to HTTP/1.x");
HttpRequest msg = new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.valueOf(method.toString()),
path.toString(), validateHttpHeaders);
try {
addHttp2ToHttpHeaders(streamId, http2Headers, msg.headers(), msg.protocolVersion(), false, true);
} catch (Http2Exception e) {
throw e;
} catch (Throwable t) {
throw streamError(streamId, PROTOCOL_ERROR, t, "HTTP/2 to HTTP/1.x headers conversion error");
}
return msg;
} | java | public static HttpRequest toHttpRequest(int streamId, Http2Headers http2Headers, boolean validateHttpHeaders)
throws Http2Exception {
// HTTP/2 does not define a way to carry the version identifier that is included in the HTTP/1.1 request line.
final CharSequence method = checkNotNull(http2Headers.method(),
"method header cannot be null in conversion to HTTP/1.x");
final CharSequence path = checkNotNull(http2Headers.path(),
"path header cannot be null in conversion to HTTP/1.x");
HttpRequest msg = new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.valueOf(method.toString()),
path.toString(), validateHttpHeaders);
try {
addHttp2ToHttpHeaders(streamId, http2Headers, msg.headers(), msg.protocolVersion(), false, true);
} catch (Http2Exception e) {
throw e;
} catch (Throwable t) {
throw streamError(streamId, PROTOCOL_ERROR, t, "HTTP/2 to HTTP/1.x headers conversion error");
}
return msg;
} | [
"public",
"static",
"HttpRequest",
"toHttpRequest",
"(",
"int",
"streamId",
",",
"Http2Headers",
"http2Headers",
",",
"boolean",
"validateHttpHeaders",
")",
"throws",
"Http2Exception",
"{",
"// HTTP/2 does not define a way to carry the version identifier that is included in the HTT... | Create a new object to contain the request data.
@param streamId The stream associated with the request
@param http2Headers The initial set of HTTP/2 headers to create the request with
@param validateHttpHeaders <ul>
<li>{@code true} to validate HTTP headers in the http-codec</li>
<li>{@code false} not to validate HTTP headers in the http-codec</li>
</ul>
@return A new request object which represents headers for a chunked request
@throws Http2Exception see {@link #addHttp2ToHttpHeaders(int, Http2Headers, FullHttpMessage, boolean)} | [
"Create",
"a",
"new",
"object",
"to",
"contain",
"the",
"request",
"data",
"."
] | ba06eafa1c1824bd154f1a380019e7ea2edf3c4c | https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/codec-http2/src/main/java/io/netty/handler/codec/http2/HttpConversionUtil.java#L280-L297 | train | Converts a Http2 request to an HTTP request. | [
30522,
2270,
10763,
8299,
2890,
15500,
2000,
11039,
25856,
2890,
15500,
1006,
20014,
5460,
3593,
1010,
8299,
2475,
4974,
2545,
8299,
2475,
4974,
2545,
1010,
22017,
20898,
9398,
3686,
11039,
25856,
4974,
2545,
1007,
11618,
8299,
2475,
10288,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-core/src/main/java/cn/hutool/core/io/watch/WatchUtil.java | WatchUtil.createAll | public static WatchMonitor createAll(URI uri, int maxDepth, Watcher watcher) {
return createAll(Paths.get(uri), maxDepth, watcher);
} | java | public static WatchMonitor createAll(URI uri, int maxDepth, Watcher watcher) {
return createAll(Paths.get(uri), maxDepth, watcher);
} | [
"public",
"static",
"WatchMonitor",
"createAll",
"(",
"URI",
"uri",
",",
"int",
"maxDepth",
",",
"Watcher",
"watcher",
")",
"{",
"return",
"createAll",
"(",
"Paths",
".",
"get",
"(",
"uri",
")",
",",
"maxDepth",
",",
"watcher",
")",
";",
"}"
] | 创建并初始化监听,监听所有事件
@param uri URI
@param maxDepth 当监听目录时,监听目录的最大深度,当设置值为1(或小于1)时,表示不递归监听子目录
@param watcher {@link Watcher}
@return {@link WatchMonitor} | [
"创建并初始化监听,监听所有事件"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/io/watch/WatchUtil.java#L178-L180 | train | Creates a WatchMonitor for the given URI and max depth. | [
30522,
2270,
10763,
3422,
8202,
15660,
3443,
8095,
1006,
24471,
2072,
24471,
2072,
1010,
20014,
4098,
3207,
13876,
2232,
1010,
3422,
2121,
3422,
2121,
1007,
1063,
2709,
3443,
8095,
1006,
10425,
1012,
2131,
1006,
24471,
2072,
1007,
1010,
409... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-clients/src/main/java/org/apache/flink/client/cli/CliFrontend.java | CliFrontend.getActiveCustomCommandLine | public CustomCommandLine<?> getActiveCustomCommandLine(CommandLine commandLine) {
for (CustomCommandLine<?> cli : customCommandLines) {
if (cli.isActive(commandLine)) {
return cli;
}
}
throw new IllegalStateException("No command-line ran.");
} | java | public CustomCommandLine<?> getActiveCustomCommandLine(CommandLine commandLine) {
for (CustomCommandLine<?> cli : customCommandLines) {
if (cli.isActive(commandLine)) {
return cli;
}
}
throw new IllegalStateException("No command-line ran.");
} | [
"public",
"CustomCommandLine",
"<",
"?",
">",
"getActiveCustomCommandLine",
"(",
"CommandLine",
"commandLine",
")",
"{",
"for",
"(",
"CustomCommandLine",
"<",
"?",
">",
"cli",
":",
"customCommandLines",
")",
"{",
"if",
"(",
"cli",
".",
"isActive",
"(",
"comman... | Gets the custom command-line for the arguments.
@param commandLine The input to the command-line.
@return custom command-line which is active (may only be one at a time) | [
"Gets",
"the",
"custom",
"command",
"-",
"line",
"for",
"the",
"arguments",
"."
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-clients/src/main/java/org/apache/flink/client/cli/CliFrontend.java#L1143-L1150 | train | Get the active custom command - line. | [
30522,
2270,
7661,
9006,
2386,
19422,
3170,
1026,
1029,
1028,
2131,
19620,
7874,
20389,
9006,
2386,
19422,
3170,
1006,
3094,
4179,
3094,
4179,
1007,
1063,
2005,
1006,
7661,
9006,
2386,
19422,
3170,
1026,
1029,
1028,
18856,
2072,
1024,
7661,... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-core/src/main/java/org/apache/flink/api/common/operators/GenericDataSourceBase.java | GenericDataSourceBase.executeOnCollections | protected List<OUT> executeOnCollections(RuntimeContext ctx, ExecutionConfig executionConfig) throws Exception {
@SuppressWarnings("unchecked")
InputFormat<OUT, InputSplit> inputFormat = (InputFormat<OUT, InputSplit>) this.formatWrapper.getUserCodeObject();
//configure the input format
inputFormat.configure(this.parameters);
//open the input format
if (inputFormat instanceof RichInputFormat) {
((RichInputFormat) inputFormat).setRuntimeContext(ctx);
((RichInputFormat) inputFormat).openInputFormat();
}
List<OUT> result = new ArrayList<OUT>();
// splits
InputSplit[] splits = inputFormat.createInputSplits(1);
TypeSerializer<OUT> serializer = getOperatorInfo().getOutputType().createSerializer(executionConfig);
for (InputSplit split : splits) {
inputFormat.open(split);
while (!inputFormat.reachedEnd()) {
OUT next = inputFormat.nextRecord(serializer.createInstance());
if (next != null) {
result.add(serializer.copy(next));
}
}
inputFormat.close();
}
//close the input format
if (inputFormat instanceof RichInputFormat) {
((RichInputFormat) inputFormat).closeInputFormat();
}
return result;
} | java | protected List<OUT> executeOnCollections(RuntimeContext ctx, ExecutionConfig executionConfig) throws Exception {
@SuppressWarnings("unchecked")
InputFormat<OUT, InputSplit> inputFormat = (InputFormat<OUT, InputSplit>) this.formatWrapper.getUserCodeObject();
//configure the input format
inputFormat.configure(this.parameters);
//open the input format
if (inputFormat instanceof RichInputFormat) {
((RichInputFormat) inputFormat).setRuntimeContext(ctx);
((RichInputFormat) inputFormat).openInputFormat();
}
List<OUT> result = new ArrayList<OUT>();
// splits
InputSplit[] splits = inputFormat.createInputSplits(1);
TypeSerializer<OUT> serializer = getOperatorInfo().getOutputType().createSerializer(executionConfig);
for (InputSplit split : splits) {
inputFormat.open(split);
while (!inputFormat.reachedEnd()) {
OUT next = inputFormat.nextRecord(serializer.createInstance());
if (next != null) {
result.add(serializer.copy(next));
}
}
inputFormat.close();
}
//close the input format
if (inputFormat instanceof RichInputFormat) {
((RichInputFormat) inputFormat).closeInputFormat();
}
return result;
} | [
"protected",
"List",
"<",
"OUT",
">",
"executeOnCollections",
"(",
"RuntimeContext",
"ctx",
",",
"ExecutionConfig",
"executionConfig",
")",
"throws",
"Exception",
"{",
"@",
"SuppressWarnings",
"(",
"\"unchecked\"",
")",
"InputFormat",
"<",
"OUT",
",",
"InputSplit",
... | -------------------------------------------------------------------------------------------- | [
"--------------------------------------------------------------------------------------------"
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/api/common/operators/GenericDataSourceBase.java#L210-L247 | train | Execute on collections. | [
30522,
5123,
2862,
1026,
2041,
1028,
15389,
2239,
26895,
18491,
2015,
1006,
2448,
7292,
8663,
18209,
14931,
2595,
1010,
7781,
8663,
8873,
2290,
7781,
8663,
8873,
2290,
1007,
11618,
6453,
1063,
1030,
16081,
9028,
5582,
2015,
1006,
1000,
4895... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerSnapshot.java | PojoSerializerSnapshot.newPojoSerializerIsCompatibleAfterMigration | private static <T> boolean newPojoSerializerIsCompatibleAfterMigration(
PojoSerializer<T> newPojoSerializer,
IntermediateCompatibilityResult<T> fieldSerializerCompatibility,
IntermediateCompatibilityResult<T> preExistingRegistrationsCompatibility,
LinkedOptionalMap<Field, TypeSerializerSnapshot<?>> fieldSerializerSnapshots) {
return newPojoHasNewOrRemovedFields(fieldSerializerSnapshots, newPojoSerializer)
|| fieldSerializerCompatibility.isCompatibleAfterMigration()
|| preExistingRegistrationsCompatibility.isCompatibleAfterMigration();
} | java | private static <T> boolean newPojoSerializerIsCompatibleAfterMigration(
PojoSerializer<T> newPojoSerializer,
IntermediateCompatibilityResult<T> fieldSerializerCompatibility,
IntermediateCompatibilityResult<T> preExistingRegistrationsCompatibility,
LinkedOptionalMap<Field, TypeSerializerSnapshot<?>> fieldSerializerSnapshots) {
return newPojoHasNewOrRemovedFields(fieldSerializerSnapshots, newPojoSerializer)
|| fieldSerializerCompatibility.isCompatibleAfterMigration()
|| preExistingRegistrationsCompatibility.isCompatibleAfterMigration();
} | [
"private",
"static",
"<",
"T",
">",
"boolean",
"newPojoSerializerIsCompatibleAfterMigration",
"(",
"PojoSerializer",
"<",
"T",
">",
"newPojoSerializer",
",",
"IntermediateCompatibilityResult",
"<",
"T",
">",
"fieldSerializerCompatibility",
",",
"IntermediateCompatibilityResul... | Checks if the new {@link PojoSerializer} is compatible after migration. | [
"Checks",
"if",
"the",
"new",
"{"
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerSnapshot.java#L345-L353 | train | Checks if the new pojo serializer is compatible after migration. | [
30522,
2797,
10763,
1026,
1056,
1028,
22017,
20898,
2047,
6873,
19929,
11610,
28863,
2483,
9006,
24952,
3468,
10354,
3334,
4328,
29397,
1006,
13433,
19929,
11610,
28863,
1026,
1056,
1028,
2047,
6873,
19929,
11610,
28863,
1010,
7783,
9006,
249... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-libraries/flink-cep/src/main/java/org/apache/flink/cep/pattern/Pattern.java | Pattern.times | public Pattern<T, F> times(int times) {
checkIfNoNotPattern();
checkIfQuantifierApplied();
Preconditions.checkArgument(times > 0, "You should give a positive number greater than 0.");
this.quantifier = Quantifier.times(quantifier.getConsumingStrategy());
this.times = Times.of(times);
return this;
} | java | public Pattern<T, F> times(int times) {
checkIfNoNotPattern();
checkIfQuantifierApplied();
Preconditions.checkArgument(times > 0, "You should give a positive number greater than 0.");
this.quantifier = Quantifier.times(quantifier.getConsumingStrategy());
this.times = Times.of(times);
return this;
} | [
"public",
"Pattern",
"<",
"T",
",",
"F",
">",
"times",
"(",
"int",
"times",
")",
"{",
"checkIfNoNotPattern",
"(",
")",
";",
"checkIfQuantifierApplied",
"(",
")",
";",
"Preconditions",
".",
"checkArgument",
"(",
"times",
">",
"0",
",",
"\"You should give a po... | Specifies exact number of times that this pattern should be matched.
@param times number of times matching event must appear
@return The same pattern with number of times applied
@throws MalformedPatternException if the quantifier is not applicable to this pattern. | [
"Specifies",
"exact",
"number",
"of",
"times",
"that",
"this",
"pattern",
"should",
"be",
"matched",
"."
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-libraries/flink-cep/src/main/java/org/apache/flink/cep/pattern/Pattern.java#L381-L388 | train | Sets the times of this pattern. | [
30522,
2270,
5418,
1026,
1056,
1010,
1042,
1028,
2335,
1006,
20014,
2335,
1007,
1063,
4638,
10128,
8540,
4140,
4502,
12079,
2078,
1006,
1007,
1025,
4638,
10128,
16211,
16778,
8873,
6906,
9397,
8751,
2094,
1006,
1007,
1025,
3653,
8663,
20562... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/Runner.java | Runner.parameterize | private void parameterize(Parameterized parameterized) {
try {
parameterized.configure(parameters);
} catch (RuntimeException ex) {
throw new ProgramParametrizationException(ex.getMessage());
}
} | java | private void parameterize(Parameterized parameterized) {
try {
parameterized.configure(parameters);
} catch (RuntimeException ex) {
throw new ProgramParametrizationException(ex.getMessage());
}
} | [
"private",
"void",
"parameterize",
"(",
"Parameterized",
"parameterized",
")",
"{",
"try",
"{",
"parameterized",
".",
"configure",
"(",
"parameters",
")",
";",
"}",
"catch",
"(",
"RuntimeException",
"ex",
")",
"{",
"throw",
"new",
"ProgramParametrizationException"... | Configure a runtime component. Catch {@link RuntimeException} and
re-throw with a Flink internal exception which is processed by
CliFrontend for display to the user.
@param parameterized the component to be configured | [
"Configure",
"a",
"runtime",
"component",
".",
"Catch",
"{",
"@link",
"RuntimeException",
"}",
"and",
"re",
"-",
"throw",
"with",
"a",
"Flink",
"internal",
"exception",
"which",
"is",
"processed",
"by",
"CliFrontend",
"for",
"display",
"to",
"the",
"user",
"... | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/Runner.java#L276-L282 | train | Parameterizes the given parameterized object. | [
30522,
2797,
11675,
16381,
4697,
1006,
16381,
3550,
16381,
3550,
1007,
1063,
3046,
1063,
16381,
3550,
1012,
9530,
8873,
27390,
2063,
1006,
11709,
1007,
1025,
1065,
4608,
1006,
2448,
7292,
10288,
24422,
4654,
1007,
1063,
5466,
2047,
2565,
28... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-core/src/main/java/cn/hutool/core/io/resource/UrlResource.java | UrlResource.readStr | @Override
public String readStr(Charset charset) throws IORuntimeException{
BufferedReader reader = null;
try {
reader = getReader(charset);
return IoUtil.read(reader);
} finally {
IoUtil.close(reader);
}
} | java | @Override
public String readStr(Charset charset) throws IORuntimeException{
BufferedReader reader = null;
try {
reader = getReader(charset);
return IoUtil.read(reader);
} finally {
IoUtil.close(reader);
}
} | [
"@",
"Override",
"public",
"String",
"readStr",
"(",
"Charset",
"charset",
")",
"throws",
"IORuntimeException",
"{",
"BufferedReader",
"reader",
"=",
"null",
";",
"try",
"{",
"reader",
"=",
"getReader",
"(",
"charset",
")",
";",
"return",
"IoUtil",
".",
"rea... | ------------------------------------------------------------------------------- read | [
"-------------------------------------------------------------------------------",
"read"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/io/resource/UrlResource.java#L85-L94 | train | Reads a string from the underlying file using the specified encoding. | [
30522,
1030,
2058,
15637,
2270,
5164,
9631,
16344,
1006,
25869,
13462,
25869,
13462,
1007,
11618,
22834,
15532,
7292,
10288,
24422,
1063,
17698,
2098,
16416,
4063,
8068,
1027,
19701,
1025,
3046,
1063,
8068,
1027,
2131,
16416,
4063,
1006,
2586... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-crypto/src/main/java/cn/hutool/crypto/SecureUtil.java | SecureUtil.generatePrivateKey | public static PrivateKey generatePrivateKey(KeyStore keyStore, String alias, char[] password) {
return KeyUtil.generatePrivateKey(keyStore, alias, password);
} | java | public static PrivateKey generatePrivateKey(KeyStore keyStore, String alias, char[] password) {
return KeyUtil.generatePrivateKey(keyStore, alias, password);
} | [
"public",
"static",
"PrivateKey",
"generatePrivateKey",
"(",
"KeyStore",
"keyStore",
",",
"String",
"alias",
",",
"char",
"[",
"]",
"password",
")",
"{",
"return",
"KeyUtil",
".",
"generatePrivateKey",
"(",
"keyStore",
",",
"alias",
",",
"password",
")",
";",
... | 生成私钥,仅用于非对称加密
@param keyStore {@link KeyStore}
@param alias 别名
@param password 密码
@return 私钥 {@link PrivateKey} | [
"生成私钥,仅用于非对称加密"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-crypto/src/main/java/cn/hutool/crypto/SecureUtil.java#L166-L168 | train | Generates a private key from a key store. | [
30522,
2270,
10763,
2797,
14839,
9699,
18098,
21466,
14839,
1006,
6309,
19277,
6309,
19277,
1010,
5164,
14593,
1010,
25869,
1031,
1033,
20786,
1007,
1063,
2709,
3145,
21823,
2140,
1012,
9699,
18098,
21466,
14839,
1006,
6309,
19277,
1010,
1459... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/ResultPartition.java | ResultPartition.createSubpartitionView | public ResultSubpartitionView createSubpartitionView(int index, BufferAvailabilityListener availabilityListener) throws IOException {
int refCnt = pendingReferences.get();
checkState(refCnt != -1, "Partition released.");
checkState(refCnt > 0, "Partition not pinned.");
checkElementIndex(index, subpartitions.length, "Subpartition not found.");
ResultSubpartitionView readView = subpartitions[index].createReadView(availabilityListener);
LOG.debug("Created {}", readView);
return readView;
} | java | public ResultSubpartitionView createSubpartitionView(int index, BufferAvailabilityListener availabilityListener) throws IOException {
int refCnt = pendingReferences.get();
checkState(refCnt != -1, "Partition released.");
checkState(refCnt > 0, "Partition not pinned.");
checkElementIndex(index, subpartitions.length, "Subpartition not found.");
ResultSubpartitionView readView = subpartitions[index].createReadView(availabilityListener);
LOG.debug("Created {}", readView);
return readView;
} | [
"public",
"ResultSubpartitionView",
"createSubpartitionView",
"(",
"int",
"index",
",",
"BufferAvailabilityListener",
"availabilityListener",
")",
"throws",
"IOException",
"{",
"int",
"refCnt",
"=",
"pendingReferences",
".",
"get",
"(",
")",
";",
"checkState",
"(",
"r... | Returns the requested subpartition. | [
"Returns",
"the",
"requested",
"subpartition",
"."
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/ResultPartition.java#L343-L356 | train | Creates a subpartition view. | [
30522,
2270,
3463,
12083,
19362,
3775,
3508,
8584,
9005,
12083,
19362,
3775,
3508,
8584,
1006,
20014,
5950,
1010,
17698,
12462,
11733,
8553,
9863,
24454,
11343,
9863,
24454,
1007,
11618,
22834,
10288,
24422,
1063,
20014,
25416,
2278,
3372,
10... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
alibaba/canal | dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/LogBuffer.java | LogBuffer.position | public final LogBuffer position(final int newPosition) {
if (newPosition > limit || newPosition < 0) throw new IllegalArgumentException("limit excceed: " + newPosition);
this.position = origin + newPosition;
return this;
} | java | public final LogBuffer position(final int newPosition) {
if (newPosition > limit || newPosition < 0) throw new IllegalArgumentException("limit excceed: " + newPosition);
this.position = origin + newPosition;
return this;
} | [
"public",
"final",
"LogBuffer",
"position",
"(",
"final",
"int",
"newPosition",
")",
"{",
"if",
"(",
"newPosition",
">",
"limit",
"||",
"newPosition",
"<",
"0",
")",
"throw",
"new",
"IllegalArgumentException",
"(",
"\"limit excceed: \"",
"+",
"newPosition",
")",... | Sets this buffer's position. If the mark is defined and larger than the
new position then it is discarded. </p>
@param newPosition The new position value; must be non-negative and no
larger than the current limit
@return This buffer
@throws IllegalArgumentException If the preconditions on
<tt>newPosition</tt> do not hold | [
"Sets",
"this",
"buffer",
"s",
"position",
".",
"If",
"the",
"mark",
"is",
"defined",
"and",
"larger",
"than",
"the",
"new",
"position",
"then",
"it",
"is",
"discarded",
".",
"<",
"/",
"p",
">"
] | 8f088cddc0755f4350c5aaae95c6e4002d90a40f | https://github.com/alibaba/canal/blob/8f088cddc0755f4350c5aaae95c6e4002d90a40f/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/LogBuffer.java#L101-L106 | train | Sets the position of the buffer to the new position. | [
30522,
2270,
2345,
8833,
8569,
12494,
2597,
1006,
2345,
20014,
2047,
26994,
1007,
1063,
2065,
1006,
2047,
26994,
1028,
5787,
1064,
1064,
2047,
26994,
1026,
1014,
1007,
5466,
2047,
6206,
2906,
22850,
15781,
2595,
24422,
1006,
1000,
5787,
465... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/spark | sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeMapData.java | UnsafeMapData.pointTo | public void pointTo(Object baseObject, long baseOffset, int sizeInBytes) {
// Read the numBytes of key array from the first 8 bytes.
final long keyArraySize = Platform.getLong(baseObject, baseOffset);
assert keyArraySize >= 0 : "keyArraySize (" + keyArraySize + ") should >= 0";
assert keyArraySize <= Integer.MAX_VALUE :
"keyArraySize (" + keyArraySize + ") should <= Integer.MAX_VALUE";
final int valueArraySize = sizeInBytes - (int)keyArraySize - 8;
assert valueArraySize >= 0 : "valueArraySize (" + valueArraySize + ") should >= 0";
keys.pointTo(baseObject, baseOffset + 8, (int)keyArraySize);
values.pointTo(baseObject, baseOffset + 8 + keyArraySize, valueArraySize);
assert keys.numElements() == values.numElements();
this.baseObject = baseObject;
this.baseOffset = baseOffset;
this.sizeInBytes = sizeInBytes;
} | java | public void pointTo(Object baseObject, long baseOffset, int sizeInBytes) {
// Read the numBytes of key array from the first 8 bytes.
final long keyArraySize = Platform.getLong(baseObject, baseOffset);
assert keyArraySize >= 0 : "keyArraySize (" + keyArraySize + ") should >= 0";
assert keyArraySize <= Integer.MAX_VALUE :
"keyArraySize (" + keyArraySize + ") should <= Integer.MAX_VALUE";
final int valueArraySize = sizeInBytes - (int)keyArraySize - 8;
assert valueArraySize >= 0 : "valueArraySize (" + valueArraySize + ") should >= 0";
keys.pointTo(baseObject, baseOffset + 8, (int)keyArraySize);
values.pointTo(baseObject, baseOffset + 8 + keyArraySize, valueArraySize);
assert keys.numElements() == values.numElements();
this.baseObject = baseObject;
this.baseOffset = baseOffset;
this.sizeInBytes = sizeInBytes;
} | [
"public",
"void",
"pointTo",
"(",
"Object",
"baseObject",
",",
"long",
"baseOffset",
",",
"int",
"sizeInBytes",
")",
"{",
"// Read the numBytes of key array from the first 8 bytes.",
"final",
"long",
"keyArraySize",
"=",
"Platform",
".",
"getLong",
"(",
"baseObject",
... | Update this UnsafeMapData to point to different backing data.
@param baseObject the base object
@param baseOffset the offset within the base object
@param sizeInBytes the size of this map's backing data, in bytes | [
"Update",
"this",
"UnsafeMapData",
"to",
"point",
"to",
"different",
"backing",
"data",
"."
] | 25ee0474f47d9c30d6f553a7892d9549f91071cf | https://github.com/apache/spark/blob/25ee0474f47d9c30d6f553a7892d9549f91071cf/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeMapData.java#L81-L98 | train | Point the contents of this block to the specified byte array. | [
30522,
2270,
11675,
2391,
3406,
1006,
4874,
2918,
16429,
20614,
1010,
2146,
2918,
27475,
3388,
1010,
20014,
2946,
2378,
3762,
4570,
1007,
1063,
1013,
1013,
3191,
1996,
15903,
17250,
2015,
1997,
3145,
9140,
2013,
1996,
2034,
1022,
27507,
101... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-core/src/main/java/cn/hutool/core/lang/Validator.java | Validator.validateNotEqual | public static void validateNotEqual(Object t1, Object t2, String errorMsg) throws ValidateException {
if (equal(t1, t2)) {
throw new ValidateException(errorMsg);
}
} | java | public static void validateNotEqual(Object t1, Object t2, String errorMsg) throws ValidateException {
if (equal(t1, t2)) {
throw new ValidateException(errorMsg);
}
} | [
"public",
"static",
"void",
"validateNotEqual",
"(",
"Object",
"t1",
",",
"Object",
"t2",
",",
"String",
"errorMsg",
")",
"throws",
"ValidateException",
"{",
"if",
"(",
"equal",
"(",
"t1",
",",
"t2",
")",
")",
"{",
"throw",
"new",
"ValidateException",
"(",... | 验证是否不等,相等抛出异常<br>
@param t1 对象1
@param t2 对象2
@param errorMsg 错误信息
@throws ValidateException 验证异常 | [
"验证是否不等,相等抛出异常<br",
">"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/lang/Validator.java#L263-L267 | train | ValidateNotEqual method. | [
30522,
2270,
10763,
11675,
9398,
3686,
22074,
26426,
1006,
4874,
1056,
2487,
1010,
4874,
1056,
2475,
1010,
5164,
7561,
5244,
2290,
1007,
11618,
9398,
3686,
10288,
24422,
1063,
2065,
1006,
5020,
1006,
1056,
2487,
1010,
1056,
2475,
1007,
1007... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-core/src/main/java/org/apache/flink/core/memory/MemorySegment.java | MemorySegment.putShort | public final void putShort(int index, short value) {
final long pos = address + index;
if (index >= 0 && pos <= addressLimit - 2) {
UNSAFE.putShort(heapMemory, pos, value);
}
else if (address > addressLimit) {
throw new IllegalStateException("segment has been freed");
}
else {
// index is in fact invalid
throw new IndexOutOfBoundsException();
}
} | java | public final void putShort(int index, short value) {
final long pos = address + index;
if (index >= 0 && pos <= addressLimit - 2) {
UNSAFE.putShort(heapMemory, pos, value);
}
else if (address > addressLimit) {
throw new IllegalStateException("segment has been freed");
}
else {
// index is in fact invalid
throw new IndexOutOfBoundsException();
}
} | [
"public",
"final",
"void",
"putShort",
"(",
"int",
"index",
",",
"short",
"value",
")",
"{",
"final",
"long",
"pos",
"=",
"address",
"+",
"index",
";",
"if",
"(",
"index",
">=",
"0",
"&&",
"pos",
"<=",
"addressLimit",
"-",
"2",
")",
"{",
"UNSAFE",
... | Writes the given short value into this buffer at the given position, using
the native byte order of the system.
@param index The position at which the value will be written.
@param value The short value to be written.
@throws IndexOutOfBoundsException Thrown, if the index is negative, or larger than the segment
size minus 2. | [
"Writes",
"the",
"given",
"short",
"value",
"into",
"this",
"buffer",
"at",
"the",
"given",
"position",
"using",
"the",
"native",
"byte",
"order",
"of",
"the",
"system",
"."
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/core/memory/MemorySegment.java#L620-L632 | train | Puts a short value into the memory. | [
30522,
2270,
2345,
11675,
8509,
27794,
1006,
20014,
5950,
1010,
2460,
3643,
1007,
1063,
2345,
2146,
13433,
2015,
1027,
4769,
1009,
5950,
1025,
2065,
1006,
5950,
1028,
1027,
1014,
1004,
1004,
13433,
2015,
1026,
1027,
4769,
17960,
4183,
1011,... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-java/src/main/java/org/apache/flink/api/java/utils/RequiredParameters.java | RequiredParameters.checkAndApplyDefaultValue | private void checkAndApplyDefaultValue(Option o, Map<String, String> data) throws RequiredParametersException {
if (hasNoDefaultValueAndNoValuePassedOnAlternativeName(o, data)) {
throw new RequiredParametersException("No default value for undefined parameter " + o.getName());
}
} | java | private void checkAndApplyDefaultValue(Option o, Map<String, String> data) throws RequiredParametersException {
if (hasNoDefaultValueAndNoValuePassedOnAlternativeName(o, data)) {
throw new RequiredParametersException("No default value for undefined parameter " + o.getName());
}
} | [
"private",
"void",
"checkAndApplyDefaultValue",
"(",
"Option",
"o",
",",
"Map",
"<",
"String",
",",
"String",
">",
"data",
")",
"throws",
"RequiredParametersException",
"{",
"if",
"(",
"hasNoDefaultValueAndNoValuePassedOnAlternativeName",
"(",
"o",
",",
"data",
")",... | else throw an exception | [
"else",
"throw",
"an",
"exception"
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-java/src/main/java/org/apache/flink/api/java/utils/RequiredParameters.java#L121-L125 | train | Check if the default value is set and if not then throw an exception. | [
30522,
2797,
11675,
4638,
13832,
9397,
2135,
3207,
7011,
11314,
10175,
5657,
1006,
5724,
1051,
1010,
4949,
1026,
5164,
1010,
5164,
1028,
2951,
1007,
11618,
3223,
28689,
22828,
3366,
2595,
24422,
1063,
2065,
1006,
8440,
10244,
7011,
11314,
1... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
networknt/light-4j | handler/src/main/java/com/networknt/handler/Handler.java | Handler.initPaths | static void initPaths() {
if (config != null && config.getPaths() != null) {
for (PathChain pathChain : config.getPaths()) {
pathChain.validate(configName + " config"); // raises exception on misconfiguration
if(pathChain.getPath() == null) {
addSourceChain(pathChain);
} else {
addPathChain(pathChain);
}
}
}
} | java | static void initPaths() {
if (config != null && config.getPaths() != null) {
for (PathChain pathChain : config.getPaths()) {
pathChain.validate(configName + " config"); // raises exception on misconfiguration
if(pathChain.getPath() == null) {
addSourceChain(pathChain);
} else {
addPathChain(pathChain);
}
}
}
} | [
"static",
"void",
"initPaths",
"(",
")",
"{",
"if",
"(",
"config",
"!=",
"null",
"&&",
"config",
".",
"getPaths",
"(",
")",
"!=",
"null",
")",
"{",
"for",
"(",
"PathChain",
"pathChain",
":",
"config",
".",
"getPaths",
"(",
")",
")",
"{",
"pathChain",... | Build "handlerListById" and "reqTypeMatcherMap" from the paths in the config. | [
"Build",
"handlerListById",
"and",
"reqTypeMatcherMap",
"from",
"the",
"paths",
"in",
"the",
"config",
"."
] | 2a60257c60663684c8f6dc8b5ea3cf184e534db6 | https://github.com/networknt/light-4j/blob/2a60257c60663684c8f6dc8b5ea3cf184e534db6/handler/src/main/java/com/networknt/handler/Handler.java#L121-L132 | train | Initializes the paths. | [
30522,
10763,
11675,
1999,
4183,
15069,
2015,
1006,
1007,
1063,
2065,
1006,
9530,
8873,
2290,
999,
1027,
19701,
1004,
1004,
9530,
8873,
2290,
1012,
2131,
15069,
2015,
1006,
1007,
999,
1027,
19701,
1007,
1063,
2005,
1006,
4130,
24925,
2078,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-poi/src/main/java/cn/hutool/poi/excel/StyleSet.java | StyleSet.setFont | public StyleSet setFont(short color, short fontSize, String fontName, boolean ignoreHead) {
final Font font = StyleUtil.createFont(this.workbook, color, fontSize, fontName);
return setFont(font, ignoreHead);
} | java | public StyleSet setFont(short color, short fontSize, String fontName, boolean ignoreHead) {
final Font font = StyleUtil.createFont(this.workbook, color, fontSize, fontName);
return setFont(font, ignoreHead);
} | [
"public",
"StyleSet",
"setFont",
"(",
"short",
"color",
",",
"short",
"fontSize",
",",
"String",
"fontName",
",",
"boolean",
"ignoreHead",
")",
"{",
"final",
"Font",
"font",
"=",
"StyleUtil",
".",
"createFont",
"(",
"this",
".",
"workbook",
",",
"color",
"... | 设置全局字体
@param color 字体颜色
@param fontSize 字体大小,-1表示默认大小
@param fontName 字体名,null表示默认字体
@param ignoreHead 是否跳过头部样式
@return this | [
"设置全局字体"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-poi/src/main/java/cn/hutool/poi/excel/StyleSet.java#L149-L152 | train | Sets the font to be used. | [
30522,
2270,
6782,
3388,
2275,
14876,
3372,
1006,
2460,
3609,
1010,
2460,
15489,
5332,
4371,
1010,
5164,
15489,
18442,
1010,
22017,
20898,
8568,
4974,
1007,
1063,
2345,
15489,
15489,
1027,
2806,
21823,
2140,
1012,
3443,
14876,
3372,
1006,
2... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-json/src/main/java/cn/hutool/json/JSONTokener.java | JSONTokener.nextTo | public String nextTo(char delimiter) throws JSONException {
StringBuilder sb = new StringBuilder();
for (;;) {
char c = this.next();
if (c == delimiter || c == 0 || c == '\n' || c == '\r') {
if (c != 0) {
this.back();
}
return sb.toString().trim();
}
sb.append(c);
}
} | java | public String nextTo(char delimiter) throws JSONException {
StringBuilder sb = new StringBuilder();
for (;;) {
char c = this.next();
if (c == delimiter || c == 0 || c == '\n' || c == '\r') {
if (c != 0) {
this.back();
}
return sb.toString().trim();
}
sb.append(c);
}
} | [
"public",
"String",
"nextTo",
"(",
"char",
"delimiter",
")",
"throws",
"JSONException",
"{",
"StringBuilder",
"sb",
"=",
"new",
"StringBuilder",
"(",
")",
";",
"for",
"(",
";",
";",
")",
"{",
"char",
"c",
"=",
"this",
".",
"next",
"(",
")",
";",
"if"... | Get the text up but not including the specified character or the end of line, whichever comes first. <br>
获得从当前位置直到分隔符(不包括分隔符)或行尾的的所有字符。
@param delimiter 分隔符
@return 字符串 | [
"Get",
"the",
"text",
"up",
"but",
"not",
"including",
"the",
"specified",
"character",
"or",
"the",
"end",
"of",
"line",
"whichever",
"comes",
"first",
".",
"<br",
">",
"获得从当前位置直到分隔符(不包括分隔符)或行尾的的所有字符。"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-json/src/main/java/cn/hutool/json/JSONTokener.java#L257-L269 | train | Get the next value from the JSON string. | [
30522,
2270,
5164,
2279,
3406,
1006,
25869,
3972,
27605,
3334,
1007,
11618,
1046,
3385,
10288,
24422,
1063,
5164,
8569,
23891,
2099,
24829,
1027,
2047,
5164,
8569,
23891,
2099,
1006,
1007,
1025,
2005,
1006,
1025,
1025,
1007,
1063,
25869,
10... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-log/src/main/java/cn/hutool/log/dialect/jdk/JdkLog.java | JdkLog.logIfEnabled | private void logIfEnabled(Level level, Throwable throwable, String format, Object[] arguments){
this.logIfEnabled(FQCN_SELF, level, throwable, format, arguments);
} | java | private void logIfEnabled(Level level, Throwable throwable, String format, Object[] arguments){
this.logIfEnabled(FQCN_SELF, level, throwable, format, arguments);
} | [
"private",
"void",
"logIfEnabled",
"(",
"Level",
"level",
",",
"Throwable",
"throwable",
",",
"String",
"format",
",",
"Object",
"[",
"]",
"arguments",
")",
"{",
"this",
".",
"logIfEnabled",
"(",
"FQCN_SELF",
",",
"level",
",",
"throwable",
",",
"format",
... | 打印对应等级的日志
@param level 等级
@param throwable 异常对象
@param format 消息模板
@param arguments 参数 | [
"打印对应等级的日志"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-log/src/main/java/cn/hutool/log/dialect/jdk/JdkLog.java#L167-L169 | train | Logs a throwable if the specified log level is enabled. | [
30522,
2797,
11675,
8833,
29323,
22966,
2094,
1006,
2504,
2504,
1010,
5466,
3085,
5466,
3085,
1010,
5164,
4289,
1010,
4874,
1031,
1033,
9918,
1007,
1063,
2023,
1012,
8833,
29323,
22966,
2094,
1006,
1042,
4160,
2278,
2078,
1035,
2969,
1010,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-table/flink-sql-client/src/main/java/org/apache/flink/table/client/cli/CliClient.java | CliClient.parseCommand | private Optional<SqlCommandCall> parseCommand(String line) {
final Optional<SqlCommandCall> parsedLine = SqlCommandParser.parse(line);
if (!parsedLine.isPresent()) {
printError(CliStrings.MESSAGE_UNKNOWN_SQL);
}
return parsedLine;
} | java | private Optional<SqlCommandCall> parseCommand(String line) {
final Optional<SqlCommandCall> parsedLine = SqlCommandParser.parse(line);
if (!parsedLine.isPresent()) {
printError(CliStrings.MESSAGE_UNKNOWN_SQL);
}
return parsedLine;
} | [
"private",
"Optional",
"<",
"SqlCommandCall",
">",
"parseCommand",
"(",
"String",
"line",
")",
"{",
"final",
"Optional",
"<",
"SqlCommandCall",
">",
"parsedLine",
"=",
"SqlCommandParser",
".",
"parse",
"(",
"line",
")",
";",
"if",
"(",
"!",
"parsedLine",
"."... | -------------------------------------------------------------------------------------------- | [
"--------------------------------------------------------------------------------------------"
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-table/flink-sql-client/src/main/java/org/apache/flink/table/client/cli/CliClient.java#L241-L247 | train | Parses a command line. | [
30522,
2797,
11887,
1026,
29296,
9006,
2386,
16409,
8095,
1028,
11968,
3366,
9006,
2386,
2094,
1006,
5164,
2240,
1007,
1063,
2345,
11887,
1026,
29296,
9006,
2386,
16409,
8095,
1028,
11968,
6924,
4179,
1027,
29296,
9006,
2386,
18927,
11650,
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
apache/flink | flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/StateAssignmentOperation.java | StateAssignmentOperation.getRawKeyedStateHandles | public static List<KeyedStateHandle> getRawKeyedStateHandles(
OperatorState operatorState,
KeyGroupRange subtaskKeyGroupRange) {
final int parallelism = operatorState.getParallelism();
List<KeyedStateHandle> extractedKeyedStateHandles = null;
for (int i = 0; i < parallelism; i++) {
if (operatorState.getState(i) != null) {
Collection<KeyedStateHandle> rawKeyedState = operatorState.getState(i).getRawKeyedState();
if (extractedKeyedStateHandles == null) {
extractedKeyedStateHandles = new ArrayList<>(parallelism * rawKeyedState.size());
}
extractIntersectingState(
rawKeyedState,
subtaskKeyGroupRange,
extractedKeyedStateHandles);
}
}
return extractedKeyedStateHandles;
} | java | public static List<KeyedStateHandle> getRawKeyedStateHandles(
OperatorState operatorState,
KeyGroupRange subtaskKeyGroupRange) {
final int parallelism = operatorState.getParallelism();
List<KeyedStateHandle> extractedKeyedStateHandles = null;
for (int i = 0; i < parallelism; i++) {
if (operatorState.getState(i) != null) {
Collection<KeyedStateHandle> rawKeyedState = operatorState.getState(i).getRawKeyedState();
if (extractedKeyedStateHandles == null) {
extractedKeyedStateHandles = new ArrayList<>(parallelism * rawKeyedState.size());
}
extractIntersectingState(
rawKeyedState,
subtaskKeyGroupRange,
extractedKeyedStateHandles);
}
}
return extractedKeyedStateHandles;
} | [
"public",
"static",
"List",
"<",
"KeyedStateHandle",
">",
"getRawKeyedStateHandles",
"(",
"OperatorState",
"operatorState",
",",
"KeyGroupRange",
"subtaskKeyGroupRange",
")",
"{",
"final",
"int",
"parallelism",
"=",
"operatorState",
".",
"getParallelism",
"(",
")",
";... | Collect {@link KeyGroupsStateHandle rawKeyedStateHandles} which have intersection with given
{@link KeyGroupRange} from {@link TaskState operatorState}
@param operatorState all state handles of a operator
@param subtaskKeyGroupRange the KeyGroupRange of a subtask
@return all rawKeyedStateHandles which have intersection with given KeyGroupRange | [
"Collect",
"{",
"@link",
"KeyGroupsStateHandle",
"rawKeyedStateHandles",
"}",
"which",
"have",
"intersection",
"with",
"given",
"{",
"@link",
"KeyGroupRange",
"}",
"from",
"{",
"@link",
"TaskState",
"operatorState",
"}"
] | b62db93bf63cb3bb34dd03d611a779d9e3fc61ac | https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/StateAssignmentOperation.java#L434-L459 | train | Get the raw keyed state handles for the given operator state. | [
30522,
2270,
10763,
2862,
1026,
3145,
2098,
9153,
2618,
11774,
2571,
1028,
2131,
2527,
26291,
17683,
5104,
12259,
11774,
4244,
1006,
9224,
12259,
9224,
12259,
1010,
3145,
17058,
24388,
2063,
4942,
10230,
19658,
3240,
17058,
24388,
2063,
1007,... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
looly/hutool | hutool-core/src/main/java/cn/hutool/core/bean/BeanUtil.java | BeanUtil.beanToMap | public static Map<String, Object> beanToMap(Object bean, Map<String, Object> targetMap, boolean ignoreNullValue, Editor<String> keyEditor) {
if (bean == null) {
return null;
}
final Collection<PropDesc> props = BeanUtil.getBeanDesc(bean.getClass()).getProps();
String key;
Method getter;
Object value;
for (PropDesc prop : props) {
key = prop.getFieldName();
// 过滤class属性
// 得到property对应的getter方法
getter = prop.getGetter();
if (null != getter) {
// 只读取有getter方法的属性
try {
value = getter.invoke(bean);
} catch (Exception ignore) {
continue;
}
if (false == ignoreNullValue || (null != value && false == value.equals(bean))) {
key = keyEditor.edit(key);
if (null != key) {
targetMap.put(key, value);
}
}
}
}
return targetMap;
} | java | public static Map<String, Object> beanToMap(Object bean, Map<String, Object> targetMap, boolean ignoreNullValue, Editor<String> keyEditor) {
if (bean == null) {
return null;
}
final Collection<PropDesc> props = BeanUtil.getBeanDesc(bean.getClass()).getProps();
String key;
Method getter;
Object value;
for (PropDesc prop : props) {
key = prop.getFieldName();
// 过滤class属性
// 得到property对应的getter方法
getter = prop.getGetter();
if (null != getter) {
// 只读取有getter方法的属性
try {
value = getter.invoke(bean);
} catch (Exception ignore) {
continue;
}
if (false == ignoreNullValue || (null != value && false == value.equals(bean))) {
key = keyEditor.edit(key);
if (null != key) {
targetMap.put(key, value);
}
}
}
}
return targetMap;
} | [
"public",
"static",
"Map",
"<",
"String",
",",
"Object",
">",
"beanToMap",
"(",
"Object",
"bean",
",",
"Map",
"<",
"String",
",",
"Object",
">",
"targetMap",
",",
"boolean",
"ignoreNullValue",
",",
"Editor",
"<",
"String",
">",
"keyEditor",
")",
"{",
"if... | 对象转Map<br>
通过实现{@link Editor} 可以自定义字段值,如果这个Editor返回null则忽略这个字段,以便实现:
<pre>
1. 字段筛选,可以去除不需要的字段
2. 字段变换,例如实现驼峰转下划线
3. 自定义字段前缀或后缀等等
</pre>
@param bean bean对象
@param targetMap 目标的Map
@param ignoreNullValue 是否忽略值为空的字段
@param keyEditor 属性字段(Map的key)编辑器,用于筛选、编辑key
@return Map
@since 4.0.5 | [
"对象转Map<br",
">",
"通过实现",
"{",
"@link",
"Editor",
"}",
"可以自定义字段值,如果这个Editor返回null则忽略这个字段,以便实现:"
] | bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a | https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/bean/BeanUtil.java#L542-L573 | train | Returns a map of all the properties of the specified bean from the specified target map. | [
30522,
2270,
10763,
4949,
1026,
5164,
1010,
4874,
1028,
14068,
20389,
9331,
1006,
4874,
14068,
1010,
4949,
1026,
5164,
1010,
4874,
1028,
4539,
2863,
2361,
1010,
22017,
20898,
8568,
11231,
3363,
10175,
5657,
1010,
3559,
1026,
5164,
1028,
314... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | [
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.