repo
stringclasses
11 values
path
stringlengths
41
234
func_name
stringlengths
5
78
original_string
stringlengths
71
14.1k
language
stringclasses
1 value
code
stringlengths
71
14.1k
code_tokens
listlengths
22
2.65k
docstring
stringlengths
2
5.35k
docstring_tokens
listlengths
1
369
sha
stringclasses
11 values
url
stringlengths
129
339
partition
stringclasses
1 value
summary
stringlengths
7
175
input_ids
listlengths
502
502
token_type_ids
listlengths
502
502
attention_mask
listlengths
502
502
labels
listlengths
502
502
looly/hutool
hutool-core/src/main/java/cn/hutool/core/img/ImgUtil.java
ImgUtil.cut
public static void cut(Image srcImage, ImageOutputStream destImageStream, Rectangle rectangle) throws IORuntimeException { writeJpg(cut(srcImage, rectangle), destImageStream); }
java
public static void cut(Image srcImage, ImageOutputStream destImageStream, Rectangle rectangle) throws IORuntimeException { writeJpg(cut(srcImage, rectangle), destImageStream); }
[ "public", "static", "void", "cut", "(", "Image", "srcImage", ",", "ImageOutputStream", "destImageStream", ",", "Rectangle", "rectangle", ")", "throws", "IORuntimeException", "{", "writeJpg", "(", "cut", "(", "srcImage", ",", "rectangle", ")", ",", "destImageStream...
图像切割(按指定起点坐标和宽高切割),此方法并不关闭流 @param srcImage 源图像 @param destImageStream 切片后的图像输出流 @param rectangle 矩形对象,表示矩形区域的x,y,width,height @since 3.1.0 @throws IORuntimeException IO异常
[ "图像切割", "(", "按指定起点坐标和宽高切割", ")", ",此方法并不关闭流" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/img/ImgUtil.java#L319-L321
train
Cut the image from srcImage to destImageStream using the rectangle.
[ 30522, 2270, 10763, 11675, 3013, 1006, 3746, 5034, 6895, 26860, 1010, 3746, 5833, 18780, 21422, 4078, 3775, 26860, 21422, 1010, 28667, 23395, 28667, 23395, 1007, 11618, 22834, 15532, 7292, 10288, 24422, 1063, 4339, 3501, 26952, 1006, 3013, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/concurrent/FutureUtils.java
FutureUtils.retryOperation
private static <T> void retryOperation( final CompletableFuture<T> resultFuture, final Supplier<CompletableFuture<T>> operation, final int retries, final Executor executor) { if (!resultFuture.isDone()) { final CompletableFuture<T> operationFuture = operation.get(); operationFuture.whenCompleteAsync( (t, throwable) -> { if (throwable != null) { if (throwable instanceof CancellationException) { resultFuture.completeExceptionally(new RetryException("Operation future was cancelled.", throwable)); } else { if (retries > 0) { retryOperation( resultFuture, operation, retries - 1, executor); } else { resultFuture.completeExceptionally(new RetryException("Could not complete the operation. Number of retries " + "has been exhausted.", throwable)); } } } else { resultFuture.complete(t); } }, executor); resultFuture.whenComplete( (t, throwable) -> operationFuture.cancel(false)); } }
java
private static <T> void retryOperation( final CompletableFuture<T> resultFuture, final Supplier<CompletableFuture<T>> operation, final int retries, final Executor executor) { if (!resultFuture.isDone()) { final CompletableFuture<T> operationFuture = operation.get(); operationFuture.whenCompleteAsync( (t, throwable) -> { if (throwable != null) { if (throwable instanceof CancellationException) { resultFuture.completeExceptionally(new RetryException("Operation future was cancelled.", throwable)); } else { if (retries > 0) { retryOperation( resultFuture, operation, retries - 1, executor); } else { resultFuture.completeExceptionally(new RetryException("Could not complete the operation. Number of retries " + "has been exhausted.", throwable)); } } } else { resultFuture.complete(t); } }, executor); resultFuture.whenComplete( (t, throwable) -> operationFuture.cancel(false)); } }
[ "private", "static", "<", "T", ">", "void", "retryOperation", "(", "final", "CompletableFuture", "<", "T", ">", "resultFuture", ",", "final", "Supplier", "<", "CompletableFuture", "<", "T", ">", ">", "operation", ",", "final", "int", "retries", ",", "final",...
Helper method which retries the provided operation in case of a failure. @param resultFuture to complete @param operation to retry @param retries until giving up @param executor to run the futures @param <T> type of the future's result
[ "Helper", "method", "which", "retries", "the", "provided", "operation", "in", "case", "of", "a", "failure", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/concurrent/FutureUtils.java#L97-L132
train
Retry the operation with retries times.
[ 30522, 2797, 10763, 1026, 1056, 1028, 11675, 2128, 11129, 25918, 3370, 1006, 2345, 4012, 10814, 10880, 11263, 11244, 1026, 1056, 1028, 2765, 11263, 11244, 1010, 2345, 17024, 1026, 4012, 10814, 10880, 11263, 11244, 1026, 1056, 1028, 1028, 3169...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-java/src/main/java/org/apache/flink/api/java/DataSet.java
DataSet.sortPartition
public SortPartitionOperator<T> sortPartition(int field, Order order) { return new SortPartitionOperator<>(this, field, order, Utils.getCallLocationName()); }
java
public SortPartitionOperator<T> sortPartition(int field, Order order) { return new SortPartitionOperator<>(this, field, order, Utils.getCallLocationName()); }
[ "public", "SortPartitionOperator", "<", "T", ">", "sortPartition", "(", "int", "field", ",", "Order", "order", ")", "{", "return", "new", "SortPartitionOperator", "<>", "(", "this", ",", "field", ",", "order", ",", "Utils", ".", "getCallLocationName", "(", "...
Locally sorts the partitions of the DataSet on the specified field in the specified order. DataSet can be sorted on multiple fields by chaining sortPartition() calls. @param field The field index on which the DataSet is sorted. @param order The order in which the DataSet is sorted. @return The DataSet with sorted local partitions.
[ "Locally", "sorts", "the", "partitions", "of", "the", "DataSet", "on", "the", "specified", "field", "in", "the", "specified", "order", ".", "DataSet", "can", "be", "sorted", "on", "multiple", "fields", "by", "chaining", "sortPartition", "()", "calls", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-java/src/main/java/org/apache/flink/api/java/DataSet.java#L1401-L1403
train
Create a new SortPartitionOperator
[ 30522, 2270, 4066, 19362, 3775, 3508, 25918, 8844, 1026, 1056, 1028, 4066, 19362, 3775, 3508, 1006, 20014, 2492, 1010, 2344, 2344, 1007, 1063, 2709, 2047, 4066, 19362, 3775, 3508, 25918, 8844, 1026, 1028, 1006, 2023, 1010, 2492, 1010, 2344,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/util/SegmentsUtil.java
SegmentsUtil.getShort
public static short getShort(MemorySegment[] segments, int offset) { if (inFirstSegment(segments, offset, 2)) { return segments[0].getShort(offset); } else { return getShortMultiSegments(segments, offset); } }
java
public static short getShort(MemorySegment[] segments, int offset) { if (inFirstSegment(segments, offset, 2)) { return segments[0].getShort(offset); } else { return getShortMultiSegments(segments, offset); } }
[ "public", "static", "short", "getShort", "(", "MemorySegment", "[", "]", "segments", ",", "int", "offset", ")", "{", "if", "(", "inFirstSegment", "(", "segments", ",", "offset", ",", "2", ")", ")", "{", "return", "segments", "[", "0", "]", ".", "getSho...
get short from segments. @param segments target segments. @param offset value offset.
[ "get", "short", "from", "segments", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/util/SegmentsUtil.java#L800-L806
train
get short from segments.
[ 30522, 2270, 10763, 2460, 4152, 27794, 1006, 3638, 3366, 21693, 4765, 1031, 1033, 9214, 1010, 20014, 16396, 1007, 1063, 2065, 1006, 1999, 8873, 12096, 3366, 21693, 4765, 1006, 9214, 1010, 16396, 1010, 1016, 1007, 1007, 1063, 2709, 9214, 103...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/dataformat/BinaryString.java
BinaryString.trim
public BinaryString trim(BinaryString trimStr) { if (trimStr == null) { return null; } return trimLeft(trimStr).trimRight(trimStr); }
java
public BinaryString trim(BinaryString trimStr) { if (trimStr == null) { return null; } return trimLeft(trimStr).trimRight(trimStr); }
[ "public", "BinaryString", "trim", "(", "BinaryString", "trimStr", ")", "{", "if", "(", "trimStr", "==", "null", ")", "{", "return", "null", ";", "}", "return", "trimLeft", "(", "trimStr", ")", ".", "trimRight", "(", "trimStr", ")", ";", "}" ]
Walk each character of current string from both ends, remove the character if it is in trim string. Return the new substring which both ends trim characters have been removed. @param trimStr the trim string @return A subString which both ends trim characters have been removed.
[ "Walk", "each", "character", "of", "current", "string", "from", "both", "ends", "remove", "the", "character", "if", "it", "is", "in", "trim", "string", ".", "Return", "the", "new", "substring", "which", "both", "ends", "trim", "characters", "have", "been", ...
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/dataformat/BinaryString.java#L664-L669
train
Trims the left and right of the given string.
[ 30522, 2270, 12441, 3367, 4892, 12241, 1006, 12441, 3367, 4892, 12241, 3367, 2099, 1007, 1063, 2065, 1006, 12241, 3367, 2099, 1027, 1027, 19701, 1007, 1063, 2709, 19701, 1025, 1065, 2709, 12241, 2571, 6199, 1006, 12241, 3367, 2099, 1007, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/spark
launcher/src/main/java/org/apache/spark/launcher/Main.java
Main.buildCommand
private static List<String> buildCommand( AbstractCommandBuilder builder, Map<String, String> env, boolean printLaunchCommand) throws IOException, IllegalArgumentException { List<String> cmd = builder.buildCommand(env); if (printLaunchCommand) { System.err.println("Spark Command: " + join(" ", cmd)); System.err.println("========================================"); } return cmd; }
java
private static List<String> buildCommand( AbstractCommandBuilder builder, Map<String, String> env, boolean printLaunchCommand) throws IOException, IllegalArgumentException { List<String> cmd = builder.buildCommand(env); if (printLaunchCommand) { System.err.println("Spark Command: " + join(" ", cmd)); System.err.println("========================================"); } return cmd; }
[ "private", "static", "List", "<", "String", ">", "buildCommand", "(", "AbstractCommandBuilder", "builder", ",", "Map", "<", "String", ",", "String", ">", "env", ",", "boolean", "printLaunchCommand", ")", "throws", "IOException", ",", "IllegalArgumentException", "{...
Prepare spark commands with the appropriate command builder. If printLaunchCommand is set then the commands will be printed to the stderr.
[ "Prepare", "spark", "commands", "with", "the", "appropriate", "command", "builder", ".", "If", "printLaunchCommand", "is", "set", "then", "the", "commands", "will", "be", "printed", "to", "the", "stderr", "." ]
25ee0474f47d9c30d6f553a7892d9549f91071cf
https://github.com/apache/spark/blob/25ee0474f47d9c30d6f553a7892d9549f91071cf/launcher/src/main/java/org/apache/spark/launcher/Main.java#L106-L116
train
Build Spark Command
[ 30522, 2797, 10763, 2862, 1026, 5164, 1028, 3857, 9006, 2386, 2094, 1006, 10061, 9006, 2386, 18939, 19231, 4063, 12508, 1010, 4949, 1026, 5164, 1010, 5164, 1028, 4372, 2615, 1010, 22017, 20898, 6140, 17298, 12680, 9006, 2386, 2094, 1007, 11...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-http/src/main/java/cn/hutool/http/webservice/SoapClient.java
SoapClient.setHeader
public SoapClient setHeader(QName name) { return setHeader(name, null, null, null, null); }
java
public SoapClient setHeader(QName name) { return setHeader(name, null, null, null, null); }
[ "public", "SoapClient", "setHeader", "(", "QName", "name", ")", "{", "return", "setHeader", "(", "name", ",", "null", ",", "null", ",", "null", ",", "null", ")", ";", "}" ]
设置头信息 @param name 头信息标签名 @return this
[ "设置头信息" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-http/src/main/java/cn/hutool/http/webservice/SoapClient.java#L176-L178
train
Sets the SOAP header.
[ 30522, 2270, 7815, 20464, 11638, 6662, 13775, 2121, 1006, 1053, 18442, 2171, 1007, 1063, 2709, 6662, 13775, 2121, 1006, 2171, 1010, 19701, 1010, 19701, 1010, 19701, 1010, 19701, 1007, 1025, 1065, 102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
hankcs/HanLP
src/main/java/com/hankcs/hanlp/corpus/document/sentence/Sentence.java
Sentence.toStandoff
public String toStandoff(boolean withComment) { StringBuilder sb = new StringBuilder(size() * 4); String delimiter = " "; String text = text(delimiter); sb.append(text).append('\n'); int i = 1; int offset = 0; for (IWord word : wordList) { assert text.charAt(offset) == word.getValue().charAt(0); printWord(word, sb, i, offset, withComment); ++i; if (word instanceof CompoundWord) { int offsetChild = offset; for (Word child : ((CompoundWord) word).innerList) { printWord(child, sb, i, offsetChild, withComment); offsetChild += child.length(); offsetChild += delimiter.length(); ++i; } offset += delimiter.length() * ((CompoundWord) word).innerList.size(); } else { offset += delimiter.length(); } offset += word.length(); } return sb.toString(); }
java
public String toStandoff(boolean withComment) { StringBuilder sb = new StringBuilder(size() * 4); String delimiter = " "; String text = text(delimiter); sb.append(text).append('\n'); int i = 1; int offset = 0; for (IWord word : wordList) { assert text.charAt(offset) == word.getValue().charAt(0); printWord(word, sb, i, offset, withComment); ++i; if (word instanceof CompoundWord) { int offsetChild = offset; for (Word child : ((CompoundWord) word).innerList) { printWord(child, sb, i, offsetChild, withComment); offsetChild += child.length(); offsetChild += delimiter.length(); ++i; } offset += delimiter.length() * ((CompoundWord) word).innerList.size(); } else { offset += delimiter.length(); } offset += word.length(); } return sb.toString(); }
[ "public", "String", "toStandoff", "(", "boolean", "withComment", ")", "{", "StringBuilder", "sb", "=", "new", "StringBuilder", "(", "size", "(", ")", "*", "4", ")", ";", "String", "delimiter", "=", "\" \"", ";", "String", "text", "=", "text", "(", "delim...
brat standoff format<br> http://brat.nlplab.org/standoff.html @param withComment @return
[ "brat", "standoff", "format<br", ">", "http", ":", "//", "brat", ".", "nlplab", ".", "org", "/", "standoff", ".", "html" ]
a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce
https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/corpus/document/sentence/Sentence.java#L110-L142
train
Print the Sequence to stand off.
[ 30522, 2270, 5164, 2000, 21515, 7245, 1006, 22017, 20898, 2007, 9006, 3672, 1007, 1063, 5164, 8569, 23891, 2099, 24829, 1027, 2047, 5164, 8569, 23891, 2099, 1006, 2946, 1006, 1007, 1008, 1018, 1007, 1025, 5164, 3972, 27605, 3334, 1027, 1000...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/util/SegmentsUtil.java
SegmentsUtil.copyToUnsafe
public static void copyToUnsafe( MemorySegment[] segments, int offset, Object target, int pointer, int numBytes) { if (inFirstSegment(segments, offset, numBytes)) { segments[0].copyToUnsafe(offset, target, pointer, numBytes); } else { copyMultiSegmentsToUnsafe(segments, offset, target, pointer, numBytes); } }
java
public static void copyToUnsafe( MemorySegment[] segments, int offset, Object target, int pointer, int numBytes) { if (inFirstSegment(segments, offset, numBytes)) { segments[0].copyToUnsafe(offset, target, pointer, numBytes); } else { copyMultiSegmentsToUnsafe(segments, offset, target, pointer, numBytes); } }
[ "public", "static", "void", "copyToUnsafe", "(", "MemorySegment", "[", "]", "segments", ",", "int", "offset", ",", "Object", "target", ",", "int", "pointer", ",", "int", "numBytes", ")", "{", "if", "(", "inFirstSegment", "(", "segments", ",", "offset", ","...
Copy segments to target unsafe pointer. @param segments Source segments. @param offset The position where the bytes are started to be read from these memory segments. @param target The unsafe memory to copy the bytes to. @param pointer The position in the target unsafe memory to copy the chunk to. @param numBytes the number bytes to copy.
[ "Copy", "segments", "to", "target", "unsafe", "pointer", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/util/SegmentsUtil.java#L160-L171
train
Copies the contents of the given array of segment to the given target object using the unsafe version of the underlying MemorySegmentFactory.
[ 30522, 2270, 10763, 11675, 6100, 24826, 3619, 10354, 2063, 1006, 3638, 3366, 21693, 4765, 1031, 1033, 9214, 1010, 20014, 16396, 1010, 4874, 4539, 1010, 20014, 20884, 1010, 20014, 15903, 17250, 2015, 1007, 1063, 2065, 1006, 1999, 8873, 12096, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
hankcs/HanLP
src/main/java/com/hankcs/hanlp/dependency/nnparser/NeuralNetworkParser.java
NeuralNetworkParser.load
public boolean load(ByteArray byteArray) { if (byteArray == null) return false; model_header = byteArray.nextString(); root = byteArray.nextString(); use_distance = byteArray.nextInt() == 1; use_valency = byteArray.nextInt() == 1; use_cluster = byteArray.nextInt() == 1; W1 = new Matrix(); W1.load(byteArray); W2 = new Matrix(); W2.load(byteArray); E = new Matrix(); E .load(byteArray); b1 = new Matrix(); b1 .load(byteArray); saved = new Matrix(); saved .load(byteArray); forms_alphabet = new Alphabet(); forms_alphabet .load(byteArray); postags_alphabet = new Alphabet(); postags_alphabet .load(byteArray); deprels_alphabet = new Alphabet(); deprels_alphabet .load(byteArray); precomputation_id_encoder = read_map(byteArray); if (use_cluster) { cluster4_types_alphabet = new Alphabet(); cluster4_types_alphabet.load(byteArray); cluster6_types_alphabet = new Alphabet(); cluster6_types_alphabet .load(byteArray); cluster_types_alphabet = new Alphabet(); cluster_types_alphabet .load(byteArray); form_to_cluster4 = read_map(byteArray); form_to_cluster6 = read_map(byteArray); form_to_cluster = read_map(byteArray); } assert !byteArray.hasMore() : "文件有残留,可能是读取逻辑不对"; classifier = new NeuralNetworkClassifier(W1, W2, E, b1, saved, precomputation_id_encoder); classifier.canonical(); return true; }
java
public boolean load(ByteArray byteArray) { if (byteArray == null) return false; model_header = byteArray.nextString(); root = byteArray.nextString(); use_distance = byteArray.nextInt() == 1; use_valency = byteArray.nextInt() == 1; use_cluster = byteArray.nextInt() == 1; W1 = new Matrix(); W1.load(byteArray); W2 = new Matrix(); W2.load(byteArray); E = new Matrix(); E .load(byteArray); b1 = new Matrix(); b1 .load(byteArray); saved = new Matrix(); saved .load(byteArray); forms_alphabet = new Alphabet(); forms_alphabet .load(byteArray); postags_alphabet = new Alphabet(); postags_alphabet .load(byteArray); deprels_alphabet = new Alphabet(); deprels_alphabet .load(byteArray); precomputation_id_encoder = read_map(byteArray); if (use_cluster) { cluster4_types_alphabet = new Alphabet(); cluster4_types_alphabet.load(byteArray); cluster6_types_alphabet = new Alphabet(); cluster6_types_alphabet .load(byteArray); cluster_types_alphabet = new Alphabet(); cluster_types_alphabet .load(byteArray); form_to_cluster4 = read_map(byteArray); form_to_cluster6 = read_map(byteArray); form_to_cluster = read_map(byteArray); } assert !byteArray.hasMore() : "文件有残留,可能是读取逻辑不对"; classifier = new NeuralNetworkClassifier(W1, W2, E, b1, saved, precomputation_id_encoder); classifier.canonical(); return true; }
[ "public", "boolean", "load", "(", "ByteArray", "byteArray", ")", "{", "if", "(", "byteArray", "==", "null", ")", "return", "false", ";", "model_header", "=", "byteArray", ".", "nextString", "(", ")", ";", "root", "=", "byteArray", ".", "nextString", "(", ...
从bin加载 @param byteArray @return
[ "从bin加载" ]
a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce
https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/dependency/nnparser/NeuralNetworkParser.java#L238-L288
train
Load the class file from the specified byte array.
[ 30522, 2270, 22017, 20898, 7170, 1006, 24880, 2906, 9447, 24880, 2906, 9447, 1007, 1063, 2065, 1006, 24880, 2906, 9447, 1027, 1027, 19701, 1007, 2709, 6270, 1025, 2944, 1035, 20346, 1027, 24880, 2906, 9447, 1012, 2279, 3367, 4892, 1006, 100...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
hankcs/HanLP
src/main/java/com/hankcs/hanlp/dependency/perceptron/transition/parser/ArcEager.java
ArcEager.isTerminal
public static boolean isTerminal(ArrayList<Configuration> beam) { for (Configuration configuration : beam) if (!configuration.state.isTerminalState()) return false; return true; }
java
public static boolean isTerminal(ArrayList<Configuration> beam) { for (Configuration configuration : beam) if (!configuration.state.isTerminalState()) return false; return true; }
[ "public", "static", "boolean", "isTerminal", "(", "ArrayList", "<", "Configuration", ">", "beam", ")", "{", "for", "(", "Configuration", "configuration", ":", "beam", ")", "if", "(", "!", "configuration", ".", "state", ".", "isTerminalState", "(", ")", ")", ...
Shows true if all of the configurations in the beam are in the terminal state @param beam the current beam @return true if all of the configurations in the beam are in the terminal state
[ "Shows", "true", "if", "all", "of", "the", "configurations", "in", "the", "beam", "are", "in", "the", "terminal", "state" ]
a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce
https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/dependency/perceptron/transition/parser/ArcEager.java#L102-L108
train
Is the beam a terminal.
[ 30522, 2270, 10763, 22017, 20898, 21541, 2121, 22311, 2140, 1006, 9140, 9863, 1026, 9563, 1028, 7504, 1007, 1063, 2005, 1006, 9563, 9563, 30524, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-connectors/flink-connector-kafka-0.8/src/main/java/org/apache/flink/streaming/connectors/kafka/FlinkKafkaConsumer08.java
FlinkKafkaConsumer08.validateAutoOffsetResetValue
private static void validateAutoOffsetResetValue(Properties config) { final String val = config.getProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "largest"); if (!(val.equals("largest") || val.equals("latest") || val.equals("earliest") || val.equals("smallest"))) { // largest/smallest is kafka 0.8, latest/earliest is kafka 0.9 throw new IllegalArgumentException("Cannot use '" + ConsumerConfig.AUTO_OFFSET_RESET_CONFIG + "' value '" + val + "'. Possible values: 'latest', 'largest', 'earliest', or 'smallest'."); } }
java
private static void validateAutoOffsetResetValue(Properties config) { final String val = config.getProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "largest"); if (!(val.equals("largest") || val.equals("latest") || val.equals("earliest") || val.equals("smallest"))) { // largest/smallest is kafka 0.8, latest/earliest is kafka 0.9 throw new IllegalArgumentException("Cannot use '" + ConsumerConfig.AUTO_OFFSET_RESET_CONFIG + "' value '" + val + "'. Possible values: 'latest', 'largest', 'earliest', or 'smallest'."); } }
[ "private", "static", "void", "validateAutoOffsetResetValue", "(", "Properties", "config", ")", "{", "final", "String", "val", "=", "config", ".", "getProperty", "(", "ConsumerConfig", ".", "AUTO_OFFSET_RESET_CONFIG", ",", "\"largest\"", ")", ";", "if", "(", "!", ...
Check for invalid "auto.offset.reset" values. Should be called in constructor for eager checking before submitting the job. Note that 'none' is also considered invalid, as we don't want to deliberately throw an exception right after a task is started. @param config kafka consumer properties to check
[ "Check", "for", "invalid", "auto", ".", "offset", ".", "reset", "values", ".", "Should", "be", "called", "in", "constructor", "for", "eager", "checking", "before", "submitting", "the", "job", ".", "Note", "that", "none", "is", "also", "considered", "invalid"...
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-connectors/flink-connector-kafka-0.8/src/main/java/org/apache/flink/streaming/connectors/kafka/FlinkKafkaConsumer08.java#L326-L333
train
Validate the auto - offset - reset value.
[ 30522, 2797, 10763, 11675, 9398, 3686, 4887, 3406, 27475, 3388, 6072, 3388, 10175, 5657, 1006, 5144, 9530, 8873, 2290, 1007, 1063, 2345, 5164, 11748, 1027, 9530, 8873, 2290, 1012, 2131, 21572, 4842, 3723, 1006, 7325, 8663, 8873, 2290, 1012,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-optimizer/src/main/java/org/apache/flink/optimizer/dataproperties/GlobalProperties.java
GlobalProperties.filterBySemanticProperties
public GlobalProperties filterBySemanticProperties(SemanticProperties props, int input) { if (props == null) { throw new NullPointerException("SemanticProperties may not be null."); } GlobalProperties gp = new GlobalProperties(); // filter partitioning switch(this.partitioning) { case RANGE_PARTITIONED: // check if ordering is preserved Ordering newOrdering = new Ordering(); for (int i = 0; i < this.ordering.getInvolvedIndexes().size(); i++) { int sourceField = this.ordering.getInvolvedIndexes().get(i); FieldSet targetField = props.getForwardingTargetFields(input, sourceField); if (targetField == null || targetField.size() == 0) { // partitioning is destroyed newOrdering = null; break; } else { // use any field of target fields for now. We should use something like field equivalence sets in the future. if(targetField.size() > 1) { LOG.warn("Found that a field is forwarded to more than one target field in " + "semantic forwarded field information. Will only use the field with the lowest index."); } newOrdering.appendOrdering(targetField.toArray()[0], this.ordering.getType(i), this.ordering.getOrder(i)); } } if(newOrdering != null) { gp.partitioning = PartitioningProperty.RANGE_PARTITIONED; gp.ordering = newOrdering; gp.partitioningFields = newOrdering.getInvolvedIndexes(); gp.distribution = this.distribution; } break; case HASH_PARTITIONED: case ANY_PARTITIONING: case CUSTOM_PARTITIONING: FieldList newPartitioningFields = new FieldList(); for (int sourceField : this.partitioningFields) { FieldSet targetField = props.getForwardingTargetFields(input, sourceField); if (targetField == null || targetField.size() == 0) { newPartitioningFields = null; break; } else { // use any field of target fields for now. We should use something like field equivalence sets in the future. if(targetField.size() > 1) { LOG.warn("Found that a field is forwarded to more than one target field in " + "semantic forwarded field information. Will only use the field with the lowest index."); } newPartitioningFields = newPartitioningFields.addField(targetField.toArray()[0]); } } if(newPartitioningFields != null) { gp.partitioning = this.partitioning; gp.partitioningFields = newPartitioningFields; gp.customPartitioner = this.customPartitioner; } break; case FORCED_REBALANCED: case FULL_REPLICATION: case RANDOM_PARTITIONED: gp.partitioning = this.partitioning; break; default: throw new RuntimeException("Unknown partitioning type."); } // filter unique field combinations if (this.uniqueFieldCombinations != null) { Set<FieldSet> newUniqueFieldCombinations = new HashSet<FieldSet>(); for (FieldSet fieldCombo : this.uniqueFieldCombinations) { FieldSet newFieldCombo = new FieldSet(); for (Integer sourceField : fieldCombo) { FieldSet targetField = props.getForwardingTargetFields(input, sourceField); if (targetField == null || targetField.size() == 0) { newFieldCombo = null; break; } else { // use any field of target fields for now. We should use something like field equivalence sets in the future. if(targetField.size() > 1) { LOG.warn("Found that a field is forwarded to more than one target field in " + "semantic forwarded field information. Will only use the field with the lowest index."); } newFieldCombo = newFieldCombo.addField(targetField.toArray()[0]); } } if (newFieldCombo != null) { newUniqueFieldCombinations.add(newFieldCombo); } } if(!newUniqueFieldCombinations.isEmpty()) { gp.uniqueFieldCombinations = newUniqueFieldCombinations; } } return gp; }
java
public GlobalProperties filterBySemanticProperties(SemanticProperties props, int input) { if (props == null) { throw new NullPointerException("SemanticProperties may not be null."); } GlobalProperties gp = new GlobalProperties(); // filter partitioning switch(this.partitioning) { case RANGE_PARTITIONED: // check if ordering is preserved Ordering newOrdering = new Ordering(); for (int i = 0; i < this.ordering.getInvolvedIndexes().size(); i++) { int sourceField = this.ordering.getInvolvedIndexes().get(i); FieldSet targetField = props.getForwardingTargetFields(input, sourceField); if (targetField == null || targetField.size() == 0) { // partitioning is destroyed newOrdering = null; break; } else { // use any field of target fields for now. We should use something like field equivalence sets in the future. if(targetField.size() > 1) { LOG.warn("Found that a field is forwarded to more than one target field in " + "semantic forwarded field information. Will only use the field with the lowest index."); } newOrdering.appendOrdering(targetField.toArray()[0], this.ordering.getType(i), this.ordering.getOrder(i)); } } if(newOrdering != null) { gp.partitioning = PartitioningProperty.RANGE_PARTITIONED; gp.ordering = newOrdering; gp.partitioningFields = newOrdering.getInvolvedIndexes(); gp.distribution = this.distribution; } break; case HASH_PARTITIONED: case ANY_PARTITIONING: case CUSTOM_PARTITIONING: FieldList newPartitioningFields = new FieldList(); for (int sourceField : this.partitioningFields) { FieldSet targetField = props.getForwardingTargetFields(input, sourceField); if (targetField == null || targetField.size() == 0) { newPartitioningFields = null; break; } else { // use any field of target fields for now. We should use something like field equivalence sets in the future. if(targetField.size() > 1) { LOG.warn("Found that a field is forwarded to more than one target field in " + "semantic forwarded field information. Will only use the field with the lowest index."); } newPartitioningFields = newPartitioningFields.addField(targetField.toArray()[0]); } } if(newPartitioningFields != null) { gp.partitioning = this.partitioning; gp.partitioningFields = newPartitioningFields; gp.customPartitioner = this.customPartitioner; } break; case FORCED_REBALANCED: case FULL_REPLICATION: case RANDOM_PARTITIONED: gp.partitioning = this.partitioning; break; default: throw new RuntimeException("Unknown partitioning type."); } // filter unique field combinations if (this.uniqueFieldCombinations != null) { Set<FieldSet> newUniqueFieldCombinations = new HashSet<FieldSet>(); for (FieldSet fieldCombo : this.uniqueFieldCombinations) { FieldSet newFieldCombo = new FieldSet(); for (Integer sourceField : fieldCombo) { FieldSet targetField = props.getForwardingTargetFields(input, sourceField); if (targetField == null || targetField.size() == 0) { newFieldCombo = null; break; } else { // use any field of target fields for now. We should use something like field equivalence sets in the future. if(targetField.size() > 1) { LOG.warn("Found that a field is forwarded to more than one target field in " + "semantic forwarded field information. Will only use the field with the lowest index."); } newFieldCombo = newFieldCombo.addField(targetField.toArray()[0]); } } if (newFieldCombo != null) { newUniqueFieldCombinations.add(newFieldCombo); } } if(!newUniqueFieldCombinations.isEmpty()) { gp.uniqueFieldCombinations = newUniqueFieldCombinations; } } return gp; }
[ "public", "GlobalProperties", "filterBySemanticProperties", "(", "SemanticProperties", "props", ",", "int", "input", ")", "{", "if", "(", "props", "==", "null", ")", "{", "throw", "new", "NullPointerException", "(", "\"SemanticProperties may not be null.\"", ")", ";",...
Filters these GlobalProperties by the fields that are forwarded to the output as described by the SemanticProperties. @param props The semantic properties holding information about forwarded fields. @param input The index of the input. @return The filtered GlobalProperties
[ "Filters", "these", "GlobalProperties", "by", "the", "fields", "that", "are", "forwarded", "to", "the", "output", "as", "described", "by", "the", "SemanticProperties", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-optimizer/src/main/java/org/apache/flink/optimizer/dataproperties/GlobalProperties.java#L280-L381
train
Filters the global properties by semantic properties.
[ 30522, 2270, 3795, 21572, 4842, 7368, 11307, 3762, 3366, 2386, 4588, 21572, 4842, 7368, 1006, 21641, 21572, 4842, 7368, 24387, 1010, 20014, 7953, 1007, 1063, 2065, 1006, 24387, 1027, 1027, 19701, 1007, 1063, 5466, 2047, 19701, 8400, 7869, 2...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/spark
sql/core/src/main/java/org/apache/spark/sql/vectorized/ColumnVector.java
ColumnVector.getBooleans
public boolean[] getBooleans(int rowId, int count) { boolean[] res = new boolean[count]; for (int i = 0; i < count; i++) { res[i] = getBoolean(rowId + i); } return res; }
java
public boolean[] getBooleans(int rowId, int count) { boolean[] res = new boolean[count]; for (int i = 0; i < count; i++) { res[i] = getBoolean(rowId + i); } return res; }
[ "public", "boolean", "[", "]", "getBooleans", "(", "int", "rowId", ",", "int", "count", ")", "{", "boolean", "[", "]", "res", "=", "new", "boolean", "[", "count", "]", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "count", ";", "i", "++...
Gets boolean type values from [rowId, rowId + count). The return values for the null slots are undefined and can be anything.
[ "Gets", "boolean", "type", "values", "from", "[", "rowId", "rowId", "+", "count", ")", ".", "The", "return", "values", "for", "the", "null", "slots", "are", "undefined", "and", "can", "be", "anything", "." ]
25ee0474f47d9c30d6f553a7892d9549f91071cf
https://github.com/apache/spark/blob/25ee0474f47d9c30d6f553a7892d9549f91071cf/sql/core/src/main/java/org/apache/spark/sql/vectorized/ColumnVector.java#L92-L98
train
Gets the boolean values from the row with the given row ID and count.
[ 30522, 2270, 22017, 20898, 1031, 1033, 2131, 5092, 9890, 6962, 1006, 20014, 5216, 3593, 1010, 20014, 4175, 1007, 1063, 22017, 20898, 1031, 1033, 24501, 1027, 2047, 22017, 20898, 1031, 4175, 1033, 1025, 2005, 1006, 20014, 1045, 1027, 1014, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/contrib/streaming/state/restore/AbstractRocksDBRestoreOperation.java
AbstractRocksDBRestoreOperation.close
@Override public void close() { IOUtils.closeQuietly(defaultColumnFamilyHandle); IOUtils.closeQuietly(nativeMetricMonitor); IOUtils.closeQuietly(db); // Making sure the already created column family options will be closed columnFamilyDescriptors.forEach((cfd) -> IOUtils.closeQuietly(cfd.getOptions())); }
java
@Override public void close() { IOUtils.closeQuietly(defaultColumnFamilyHandle); IOUtils.closeQuietly(nativeMetricMonitor); IOUtils.closeQuietly(db); // Making sure the already created column family options will be closed columnFamilyDescriptors.forEach((cfd) -> IOUtils.closeQuietly(cfd.getOptions())); }
[ "@", "Override", "public", "void", "close", "(", ")", "{", "IOUtils", ".", "closeQuietly", "(", "defaultColumnFamilyHandle", ")", ";", "IOUtils", ".", "closeQuietly", "(", "nativeMetricMonitor", ")", ";", "IOUtils", ".", "closeQuietly", "(", "db", ")", ";", ...
Necessary clean up iff restore operation failed.
[ "Necessary", "clean", "up", "iff", "restore", "operation", "failed", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/contrib/streaming/state/restore/AbstractRocksDBRestoreOperation.java#L206-L213
train
Closes all the underlying databases.
[ 30522, 1030, 2058, 15637, 2270, 11675, 2485, 1006, 1007, 1063, 22834, 21823, 4877, 1012, 2485, 15549, 3388, 2135, 1006, 12398, 25778, 2819, 2078, 7011, 4328, 2135, 11774, 2571, 1007, 1025, 22834, 21823, 4877, 1012, 30524, 15660, 1007, 1025, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
alibaba/canal
driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/utils/ByteHelper.java
ByteHelper.readUnsignedIntLittleEndian
public static long readUnsignedIntLittleEndian(byte[] data, int index) { long result = (long) (data[index] & 0xFF) | (long) ((data[index + 1] & 0xFF) << 8) | (long) ((data[index + 2] & 0xFF) << 16) | (long) ((data[index + 3] & 0xFF) << 24); return result; }
java
public static long readUnsignedIntLittleEndian(byte[] data, int index) { long result = (long) (data[index] & 0xFF) | (long) ((data[index + 1] & 0xFF) << 8) | (long) ((data[index + 2] & 0xFF) << 16) | (long) ((data[index + 3] & 0xFF) << 24); return result; }
[ "public", "static", "long", "readUnsignedIntLittleEndian", "(", "byte", "[", "]", "data", ",", "int", "index", ")", "{", "long", "result", "=", "(", "long", ")", "(", "data", "[", "index", "]", "&", "0xFF", ")", "|", "(", "long", ")", "(", "(", "da...
Read 4 bytes in Little-endian byte order. @param data, the original byte array @param index, start to read from. @return
[ "Read", "4", "bytes", "in", "Little", "-", "endian", "byte", "order", "." ]
8f088cddc0755f4350c5aaae95c6e4002d90a40f
https://github.com/alibaba/canal/blob/8f088cddc0755f4350c5aaae95c6e4002d90a40f/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/utils/ByteHelper.java#L45-L49
train
Read an unsigned int from the byte array at the given index in big endian order.
[ 30522, 2270, 10763, 2146, 3191, 4609, 5332, 19225, 18447, 15909, 9286, 10497, 2937, 1006, 24880, 1031, 1033, 2951, 1010, 20014, 5950, 1007, 1063, 2146, 2765, 1027, 1006, 2146, 1007, 1006, 2951, 1031, 5950, 1033, 1004, 1014, 2595, 4246, 1007...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
spring-projects/spring-boot
spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/jms/DefaultJmsListenerContainerFactoryConfigurer.java
DefaultJmsListenerContainerFactoryConfigurer.configure
public void configure(DefaultJmsListenerContainerFactory factory, ConnectionFactory connectionFactory) { Assert.notNull(factory, "Factory must not be null"); Assert.notNull(connectionFactory, "ConnectionFactory must not be null"); factory.setConnectionFactory(connectionFactory); factory.setPubSubDomain(this.jmsProperties.isPubSubDomain()); if (this.transactionManager != null) { factory.setTransactionManager(this.transactionManager); } else { factory.setSessionTransacted(true); } if (this.destinationResolver != null) { factory.setDestinationResolver(this.destinationResolver); } if (this.messageConverter != null) { factory.setMessageConverter(this.messageConverter); } JmsProperties.Listener listener = this.jmsProperties.getListener(); factory.setAutoStartup(listener.isAutoStartup()); if (listener.getAcknowledgeMode() != null) { factory.setSessionAcknowledgeMode(listener.getAcknowledgeMode().getMode()); } String concurrency = listener.formatConcurrency(); if (concurrency != null) { factory.setConcurrency(concurrency); } }
java
public void configure(DefaultJmsListenerContainerFactory factory, ConnectionFactory connectionFactory) { Assert.notNull(factory, "Factory must not be null"); Assert.notNull(connectionFactory, "ConnectionFactory must not be null"); factory.setConnectionFactory(connectionFactory); factory.setPubSubDomain(this.jmsProperties.isPubSubDomain()); if (this.transactionManager != null) { factory.setTransactionManager(this.transactionManager); } else { factory.setSessionTransacted(true); } if (this.destinationResolver != null) { factory.setDestinationResolver(this.destinationResolver); } if (this.messageConverter != null) { factory.setMessageConverter(this.messageConverter); } JmsProperties.Listener listener = this.jmsProperties.getListener(); factory.setAutoStartup(listener.isAutoStartup()); if (listener.getAcknowledgeMode() != null) { factory.setSessionAcknowledgeMode(listener.getAcknowledgeMode().getMode()); } String concurrency = listener.formatConcurrency(); if (concurrency != null) { factory.setConcurrency(concurrency); } }
[ "public", "void", "configure", "(", "DefaultJmsListenerContainerFactory", "factory", ",", "ConnectionFactory", "connectionFactory", ")", "{", "Assert", ".", "notNull", "(", "factory", ",", "\"Factory must not be null\"", ")", ";", "Assert", ".", "notNull", "(", "conne...
Configure the specified jms listener container factory. The factory can be further tuned and default settings can be overridden. @param factory the {@link DefaultJmsListenerContainerFactory} instance to configure @param connectionFactory the {@link ConnectionFactory} to use
[ "Configure", "the", "specified", "jms", "listener", "container", "factory", ".", "The", "factory", "can", "be", "further", "tuned", "and", "default", "settings", "can", "be", "overridden", "." ]
0b27f7c70e164b2b1a96477f1d9c1acba56790c1
https://github.com/spring-projects/spring-boot/blob/0b27f7c70e164b2b1a96477f1d9c1acba56790c1/spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/jms/DefaultJmsListenerContainerFactoryConfigurer.java#L84-L111
train
Configures the given container factory with the given connection factory.
[ 30522, 2270, 11675, 9530, 8873, 27390, 2063, 1006, 12398, 24703, 14540, 27870, 3678, 8663, 18249, 2121, 21450, 4713, 1010, 4434, 21450, 4434, 21450, 1007, 1063, 20865, 1012, 2025, 11231, 3363, 1006, 4713, 1010, 1000, 4713, 2442, 2025, 2022, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
alibaba/canal
dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/FileLogFetcher.java
FileLogFetcher.fetch
public boolean fetch() throws IOException { if (limit == 0) { final int len = fin.read(buffer, 0, buffer.length); if (len >= 0) { limit += len; position = 0; origin = 0; /* More binlog to fetch */ return true; } } else if (origin == 0) { if (limit > buffer.length / 2) { ensureCapacity(buffer.length + limit); } final int len = fin.read(buffer, limit, buffer.length - limit); if (len >= 0) { limit += len; /* More binlog to fetch */ return true; } } else if (limit > 0) { if (limit >= FormatDescriptionLogEvent.LOG_EVENT_HEADER_LEN) { int lenPosition = position + 4 + 1 + 4; long eventLen = ((long) (0xff & buffer[lenPosition++])) | ((long) (0xff & buffer[lenPosition++]) << 8) | ((long) (0xff & buffer[lenPosition++]) << 16) | ((long) (0xff & buffer[lenPosition++]) << 24); if (limit >= eventLen) { return true; } else { ensureCapacity((int) eventLen); } } System.arraycopy(buffer, origin, buffer, 0, limit); position -= origin; origin = 0; final int len = fin.read(buffer, limit, buffer.length - limit); if (len >= 0) { limit += len; /* More binlog to fetch */ return true; } } else { /* Should not happen. */ throw new IllegalArgumentException("Unexcepted limit: " + limit); } /* Reach binlog file end */ return false; }
java
public boolean fetch() throws IOException { if (limit == 0) { final int len = fin.read(buffer, 0, buffer.length); if (len >= 0) { limit += len; position = 0; origin = 0; /* More binlog to fetch */ return true; } } else if (origin == 0) { if (limit > buffer.length / 2) { ensureCapacity(buffer.length + limit); } final int len = fin.read(buffer, limit, buffer.length - limit); if (len >= 0) { limit += len; /* More binlog to fetch */ return true; } } else if (limit > 0) { if (limit >= FormatDescriptionLogEvent.LOG_EVENT_HEADER_LEN) { int lenPosition = position + 4 + 1 + 4; long eventLen = ((long) (0xff & buffer[lenPosition++])) | ((long) (0xff & buffer[lenPosition++]) << 8) | ((long) (0xff & buffer[lenPosition++]) << 16) | ((long) (0xff & buffer[lenPosition++]) << 24); if (limit >= eventLen) { return true; } else { ensureCapacity((int) eventLen); } } System.arraycopy(buffer, origin, buffer, 0, limit); position -= origin; origin = 0; final int len = fin.read(buffer, limit, buffer.length - limit); if (len >= 0) { limit += len; /* More binlog to fetch */ return true; } } else { /* Should not happen. */ throw new IllegalArgumentException("Unexcepted limit: " + limit); } /* Reach binlog file end */ return false; }
[ "public", "boolean", "fetch", "(", ")", "throws", "IOException", "{", "if", "(", "limit", "==", "0", ")", "{", "final", "int", "len", "=", "fin", ".", "read", "(", "buffer", ",", "0", ",", "buffer", ".", "length", ")", ";", "if", "(", "len", ">="...
{@inheritDoc} @see com.taobao.tddl.dbsync.binlog.LogFetcher#fetch()
[ "{", "@inheritDoc", "}" ]
8f088cddc0755f4350c5aaae95c6e4002d90a40f
https://github.com/alibaba/canal/blob/8f088cddc0755f4350c5aaae95c6e4002d90a40f/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/FileLogFetcher.java#L110-L163
train
Fetches the next N bytes of data from the underlying binary file.
[ 30522, 2270, 22017, 20898, 18584, 1006, 1007, 11618, 22834, 10288, 24422, 1063, 2065, 1006, 5787, 1027, 1027, 1014, 1007, 1063, 2345, 20014, 18798, 1027, 10346, 1012, 3191, 1006, 17698, 1010, 1014, 1010, 17698, 1012, 3091, 1007, 1025, 2065, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
codec-http/src/main/java/io/netty/handler/codec/http/ServerCookieEncoder.java
ServerCookieEncoder.encode
@Deprecated public static String encode(Cookie cookie) { return io.netty.handler.codec.http.cookie.ServerCookieEncoder.LAX.encode(cookie); }
java
@Deprecated public static String encode(Cookie cookie) { return io.netty.handler.codec.http.cookie.ServerCookieEncoder.LAX.encode(cookie); }
[ "@", "Deprecated", "public", "static", "String", "encode", "(", "Cookie", "cookie", ")", "{", "return", "io", ".", "netty", ".", "handler", ".", "codec", ".", "http", ".", "cookie", ".", "ServerCookieEncoder", ".", "LAX", ".", "encode", "(", "cookie", ")...
Encodes the specified cookie into a Set-Cookie header value. @param cookie the cookie @return a single Set-Cookie header value
[ "Encodes", "the", "specified", "cookie", "into", "a", "Set", "-", "Cookie", "header", "value", "." ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/codec-http/src/main/java/io/netty/handler/codec/http/ServerCookieEncoder.java#L62-L65
train
Encode a Cookie object to a Base64 string.
[ 30522, 1030, 2139, 28139, 12921, 2270, 10763, 5164, 4372, 16044, 1006, 17387, 17387, 1007, 1063, 2709, 22834, 1012, 5658, 3723, 1012, 28213, 1012, 3642, 2278, 1012, 8299, 1012, 17387, 1012, 8241, 3597, 23212, 12129, 16044, 2099, 1012, 27327, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
Configuration.getInteger
public int getInteger(String key, int defaultValue) { Object o = getRawValue(key); if (o == null) { return defaultValue; } return convertToInt(o, defaultValue); }
java
public int getInteger(String key, int defaultValue) { Object o = getRawValue(key); if (o == null) { return defaultValue; } return convertToInt(o, defaultValue); }
[ "public", "int", "getInteger", "(", "String", "key", ",", "int", "defaultValue", ")", "{", "Object", "o", "=", "getRawValue", "(", "key", ")", ";", "if", "(", "o", "==", "null", ")", "{", "return", "defaultValue", ";", "}", "return", "convertToInt", "(...
Returns the value associated with the given key as an integer. @param key the key pointing to the associated value @param defaultValue the default value which is returned in case there is no value associated with the given key @return the (default) value associated with the given key
[ "Returns", "the", "value", "associated", "with", "the", "given", "key", "as", "an", "integer", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/configuration/Configuration.java#L204-L211
train
Returns the integer value for the given key. The defaultValue is returned if the key does not exist or if the value is not an integer.
[ 30522, 2270, 20014, 2131, 18447, 26320, 1006, 5164, 3145, 1010, 20014, 12398, 10175, 5657, 1007, 1063, 4874, 1051, 1027, 2131, 2527, 2860, 10175, 5657, 1006, 3145, 1007, 1025, 2065, 1006, 1051, 1027, 1027, 19701, 1007, 1063, 2709, 12398, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
hankcs/HanLP
src/main/java/com/hankcs/hanlp/corpus/dictionary/DictionaryMaker.java
DictionaryMaker.addNotCombine
public void addNotCombine(Item item) { Item innerItem = trie.get(item.key); if (innerItem == null) { innerItem = item; trie.put(innerItem.key, innerItem); } }
java
public void addNotCombine(Item item) { Item innerItem = trie.get(item.key); if (innerItem == null) { innerItem = item; trie.put(innerItem.key, innerItem); } }
[ "public", "void", "addNotCombine", "(", "Item", "item", ")", "{", "Item", "innerItem", "=", "trie", ".", "get", "(", "item", ".", "key", ")", ";", "if", "(", "innerItem", "==", "null", ")", "{", "innerItem", "=", "item", ";", "trie", ".", "put", "(...
插入条目,但是不合并,如果已有则忽略 @param item
[ "插入条目,但是不合并,如果已有则忽略" ]
a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce
https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/corpus/dictionary/DictionaryMaker.java#L205-L213
train
Add a non - combine item to the trie.
[ 30522, 2270, 11675, 5587, 17048, 18274, 3170, 1006, 8875, 8875, 1007, 1063, 8875, 5110, 4221, 2213, 1027, 13012, 2063, 1012, 2131, 1006, 8875, 1012, 3145, 1007, 1025, 2065, 1006, 5110, 4221, 2213, 1027, 1027, 19701, 1007, 1063, 5110, 4221, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java
FileSystem.initialize
public static void initialize( Configuration config, PluginManager pluginManager) throws IllegalConfigurationException { LOCK.lock(); try { // make sure file systems are re-instantiated after re-configuration CACHE.clear(); FS_FACTORIES.clear(); Collection<Supplier<Iterator<FileSystemFactory>>> factorySuppliers = new ArrayList<>(2); factorySuppliers.add(() -> ServiceLoader.load(FileSystemFactory.class).iterator()); if (pluginManager != null) { factorySuppliers.add(() -> pluginManager.load(FileSystemFactory.class)); } final List<FileSystemFactory> fileSystemFactories = loadFileSystemFactories(factorySuppliers); // configure all file system factories for (FileSystemFactory factory : fileSystemFactories) { factory.configure(config); String scheme = factory.getScheme(); FileSystemFactory fsf = ConnectionLimitingFactory.decorateIfLimited(factory, scheme, config); FS_FACTORIES.put(scheme, fsf); } // configure the default (fallback) factory FALLBACK_FACTORY.configure(config); // also read the default file system scheme final String stringifiedUri = config.getString(CoreOptions.DEFAULT_FILESYSTEM_SCHEME, null); if (stringifiedUri == null) { defaultScheme = null; } else { try { defaultScheme = new URI(stringifiedUri); } catch (URISyntaxException e) { throw new IllegalConfigurationException("The default file system scheme ('" + CoreOptions.DEFAULT_FILESYSTEM_SCHEME + "') is invalid: " + stringifiedUri, e); } } } finally { LOCK.unlock(); } }
java
public static void initialize( Configuration config, PluginManager pluginManager) throws IllegalConfigurationException { LOCK.lock(); try { // make sure file systems are re-instantiated after re-configuration CACHE.clear(); FS_FACTORIES.clear(); Collection<Supplier<Iterator<FileSystemFactory>>> factorySuppliers = new ArrayList<>(2); factorySuppliers.add(() -> ServiceLoader.load(FileSystemFactory.class).iterator()); if (pluginManager != null) { factorySuppliers.add(() -> pluginManager.load(FileSystemFactory.class)); } final List<FileSystemFactory> fileSystemFactories = loadFileSystemFactories(factorySuppliers); // configure all file system factories for (FileSystemFactory factory : fileSystemFactories) { factory.configure(config); String scheme = factory.getScheme(); FileSystemFactory fsf = ConnectionLimitingFactory.decorateIfLimited(factory, scheme, config); FS_FACTORIES.put(scheme, fsf); } // configure the default (fallback) factory FALLBACK_FACTORY.configure(config); // also read the default file system scheme final String stringifiedUri = config.getString(CoreOptions.DEFAULT_FILESYSTEM_SCHEME, null); if (stringifiedUri == null) { defaultScheme = null; } else { try { defaultScheme = new URI(stringifiedUri); } catch (URISyntaxException e) { throw new IllegalConfigurationException("The default file system scheme ('" + CoreOptions.DEFAULT_FILESYSTEM_SCHEME + "') is invalid: " + stringifiedUri, e); } } } finally { LOCK.unlock(); } }
[ "public", "static", "void", "initialize", "(", "Configuration", "config", ",", "PluginManager", "pluginManager", ")", "throws", "IllegalConfigurationException", "{", "LOCK", ".", "lock", "(", ")", ";", "try", "{", "// make sure file systems are re-instantiated after re-co...
Initializes the shared file system settings. <p>The given configuration is passed to each file system factory to initialize the respective file systems. Because the configuration of file systems may be different subsequent to the call of this method, this method clears the file system instance cache. <p>This method also reads the default file system URI from the configuration key {@link CoreOptions#DEFAULT_FILESYSTEM_SCHEME}. All calls to {@link FileSystem#get(URI)} where the URI has no scheme will be interpreted as relative to that URI. As an example, assume the default file system URI is set to {@code 'hdfs://localhost:9000/'}. A file path of {@code '/user/USERNAME/in.txt'} is interpreted as {@code 'hdfs://localhost:9000/user/USERNAME/in.txt'}. @param config the configuration from where to fetch the parameter. @param pluginManager optional plugin manager that is used to initialized filesystems provided as plugins.
[ "Initializes", "the", "shared", "file", "system", "settings", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java#L282-L331
train
Initializes the configuration.
[ 30522, 2270, 10763, 11675, 3988, 4697, 1006, 9563, 9530, 8873, 2290, 1010, 13354, 2378, 24805, 4590, 13354, 2378, 24805, 4590, 1007, 11618, 6206, 8663, 8873, 27390, 3370, 10288, 24422, 1063, 5843, 1012, 5843, 1006, 1007, 1025, 3046, 1063, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/spark
core/src/main/java/org/apache/spark/util/collection/TimSort.java
TimSort.sort
public void sort(Buffer a, int lo, int hi, Comparator<? super K> c) { assert c != null; int nRemaining = hi - lo; if (nRemaining < 2) return; // Arrays of size 0 and 1 are always sorted // If array is small, do a "mini-TimSort" with no merges if (nRemaining < MIN_MERGE) { int initRunLen = countRunAndMakeAscending(a, lo, hi, c); binarySort(a, lo, hi, lo + initRunLen, c); return; } /** * March over the array once, left to right, finding natural runs, * extending short natural runs to minRun elements, and merging runs * to maintain stack invariant. */ SortState sortState = new SortState(a, c, hi - lo); int minRun = minRunLength(nRemaining); do { // Identify next run int runLen = countRunAndMakeAscending(a, lo, hi, c); // If run is short, extend to min(minRun, nRemaining) if (runLen < minRun) { int force = nRemaining <= minRun ? nRemaining : minRun; binarySort(a, lo, lo + force, lo + runLen, c); runLen = force; } // Push run onto pending-run stack, and maybe merge sortState.pushRun(lo, runLen); sortState.mergeCollapse(); // Advance to find next run lo += runLen; nRemaining -= runLen; } while (nRemaining != 0); // Merge all remaining runs to complete sort assert lo == hi; sortState.mergeForceCollapse(); assert sortState.stackSize == 1; }
java
public void sort(Buffer a, int lo, int hi, Comparator<? super K> c) { assert c != null; int nRemaining = hi - lo; if (nRemaining < 2) return; // Arrays of size 0 and 1 are always sorted // If array is small, do a "mini-TimSort" with no merges if (nRemaining < MIN_MERGE) { int initRunLen = countRunAndMakeAscending(a, lo, hi, c); binarySort(a, lo, hi, lo + initRunLen, c); return; } /** * March over the array once, left to right, finding natural runs, * extending short natural runs to minRun elements, and merging runs * to maintain stack invariant. */ SortState sortState = new SortState(a, c, hi - lo); int minRun = minRunLength(nRemaining); do { // Identify next run int runLen = countRunAndMakeAscending(a, lo, hi, c); // If run is short, extend to min(minRun, nRemaining) if (runLen < minRun) { int force = nRemaining <= minRun ? nRemaining : minRun; binarySort(a, lo, lo + force, lo + runLen, c); runLen = force; } // Push run onto pending-run stack, and maybe merge sortState.pushRun(lo, runLen); sortState.mergeCollapse(); // Advance to find next run lo += runLen; nRemaining -= runLen; } while (nRemaining != 0); // Merge all remaining runs to complete sort assert lo == hi; sortState.mergeForceCollapse(); assert sortState.stackSize == 1; }
[ "public", "void", "sort", "(", "Buffer", "a", ",", "int", "lo", ",", "int", "hi", ",", "Comparator", "<", "?", "super", "K", ">", "c", ")", "{", "assert", "c", "!=", "null", ";", "int", "nRemaining", "=", "hi", "-", "lo", ";", "if", "(", "nRema...
A stable, adaptive, iterative mergesort that requires far fewer than n lg(n) comparisons when running on partially sorted arrays, while offering performance comparable to a traditional mergesort when run on random arrays. Like all proper mergesorts, this sort is stable and runs O(n log n) time (worst case). In the worst case, this sort requires temporary storage space for n/2 object references; in the best case, it requires only a small constant amount of space. This implementation was adapted from Tim Peters's list sort for Python, which is described in detail here: http://svn.python.org/projects/python/trunk/Objects/listsort.txt Tim's C code may be found here: http://svn.python.org/projects/python/trunk/Objects/listobject.c The underlying techniques are described in this paper (and may have even earlier origins): "Optimistic Sorting and Information Theoretic Complexity" Peter McIlroy SODA (Fourth Annual ACM-SIAM Symposium on Discrete Algorithms), pp 467-474, Austin, Texas, 25-27 January 1993. While the API to this class consists solely of static methods, it is (privately) instantiable; a TimSort instance holds the state of an ongoing sort, assuming the input array is large enough to warrant the full-blown TimSort. Small arrays are sorted in place, using a binary insertion sort. @author Josh Bloch
[ "A", "stable", "adaptive", "iterative", "mergesort", "that", "requires", "far", "fewer", "than", "n", "lg", "(", "n", ")", "comparisons", "when", "running", "on", "partially", "sorted", "arrays", "while", "offering", "performance", "comparable", "to", "a", "tr...
25ee0474f47d9c30d6f553a7892d9549f91071cf
https://github.com/apache/spark/blob/25ee0474f47d9c30d6f553a7892d9549f91071cf/core/src/main/java/org/apache/spark/util/collection/TimSort.java#L119-L164
train
Sort the given array of elements in the given order.
[ 30522, 2270, 11675, 4066, 1006, 17698, 1037, 1010, 20014, 8840, 1010, 20014, 7632, 1010, 4012, 28689, 4263, 1026, 1029, 3565, 1047, 1028, 1039, 1007, 1063, 20865, 1039, 999, 1027, 19701, 1025, 20014, 17212, 14545, 24002, 1027, 7632, 1011, 8...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
spring-projects/spring-boot
spring-boot-project/spring-boot-tools/spring-boot-maven-plugin/src/main/java/org/springframework/boot/maven/SpringApplicationAdminClient.java
SpringApplicationAdminClient.stop
public void stop() throws MojoExecutionException, IOException, InstanceNotFoundException { try { this.connection.invoke(this.objectName, "shutdown", null, null); } catch (ReflectionException ex) { throw new MojoExecutionException("Shutdown failed", ex.getCause()); } catch (MBeanException ex) { throw new MojoExecutionException("Could not invoke shutdown operation", ex); } }
java
public void stop() throws MojoExecutionException, IOException, InstanceNotFoundException { try { this.connection.invoke(this.objectName, "shutdown", null, null); } catch (ReflectionException ex) { throw new MojoExecutionException("Shutdown failed", ex.getCause()); } catch (MBeanException ex) { throw new MojoExecutionException("Could not invoke shutdown operation", ex); } }
[ "public", "void", "stop", "(", ")", "throws", "MojoExecutionException", ",", "IOException", ",", "InstanceNotFoundException", "{", "try", "{", "this", ".", "connection", ".", "invoke", "(", "this", ".", "objectName", ",", "\"shutdown\"", ",", "null", ",", "nul...
Stop the application managed by this instance. @throws MojoExecutionException if the JMX service could not be contacted @throws IOException if an I/O error occurs @throws InstanceNotFoundException if the lifecycle mbean cannot be found
[ "Stop", "the", "application", "managed", "by", "this", "instance", "." ]
0b27f7c70e164b2b1a96477f1d9c1acba56790c1
https://github.com/spring-projects/spring-boot/blob/0b27f7c70e164b2b1a96477f1d9c1acba56790c1/spring-boot-project/spring-boot-tools/spring-boot-maven-plugin/src/main/java/org/springframework/boot/maven/SpringApplicationAdminClient.java#L87-L98
train
Stop the managed object.
[ 30522, 2270, 11675, 2644, 1006, 1007, 11618, 28017, 10288, 8586, 13700, 10288, 24422, 1010, 22834, 10288, 24422, 1010, 6013, 17048, 14876, 8630, 10288, 24422, 1063, 3046, 1063, 2023, 1012, 4434, 1012, 1999, 6767, 3489, 1006, 2023, 1012, 4874,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-core/src/main/java/org/apache/flink/configuration/ConfigOption.java
ConfigOption.withFallbackKeys
public ConfigOption<T> withFallbackKeys(String... fallbackKeys) { final Stream<FallbackKey> newFallbackKeys = Arrays.stream(fallbackKeys).map(FallbackKey::createFallbackKey); final Stream<FallbackKey> currentAlternativeKeys = Arrays.stream(this.fallbackKeys); // put fallback keys first so that they are prioritized final FallbackKey[] mergedAlternativeKeys = Stream.concat(newFallbackKeys, currentAlternativeKeys) .toArray(FallbackKey[]::new); return new ConfigOption<>(key, description, defaultValue, mergedAlternativeKeys); }
java
public ConfigOption<T> withFallbackKeys(String... fallbackKeys) { final Stream<FallbackKey> newFallbackKeys = Arrays.stream(fallbackKeys).map(FallbackKey::createFallbackKey); final Stream<FallbackKey> currentAlternativeKeys = Arrays.stream(this.fallbackKeys); // put fallback keys first so that they are prioritized final FallbackKey[] mergedAlternativeKeys = Stream.concat(newFallbackKeys, currentAlternativeKeys) .toArray(FallbackKey[]::new); return new ConfigOption<>(key, description, defaultValue, mergedAlternativeKeys); }
[ "public", "ConfigOption", "<", "T", ">", "withFallbackKeys", "(", "String", "...", "fallbackKeys", ")", "{", "final", "Stream", "<", "FallbackKey", ">", "newFallbackKeys", "=", "Arrays", ".", "stream", "(", "fallbackKeys", ")", ".", "map", "(", "FallbackKey", ...
Creates a new config option, using this option's key and default value, and adding the given fallback keys. <p>When obtaining a value from the configuration via {@link Configuration#getValue(ConfigOption)}, the fallback keys will be checked in the order provided to this method. The first key for which a value is found will be used - that value will be returned. @param fallbackKeys The fallback keys, in the order in which they should be checked. @return A new config options, with the given fallback keys.
[ "Creates", "a", "new", "config", "option", "using", "this", "option", "s", "key", "and", "default", "value", "and", "adding", "the", "given", "fallback", "keys", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/configuration/ConfigOption.java#L119-L127
train
Create a new option with the given fallback keys.
[ 30522, 2270, 9530, 8873, 3995, 16790, 1026, 1056, 1028, 2007, 13976, 5963, 14839, 2015, 1006, 5164, 1012, 1012, 1012, 2991, 5963, 14839, 2015, 1007, 1063, 2345, 5460, 1026, 2991, 5963, 14839, 1028, 2047, 13976, 5963, 14839, 2015, 1027, 2744...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-extra/src/main/java/cn/hutool/extra/template/engine/velocity/SimpleStringResourceLoader.java
SimpleStringResourceLoader.getResourceStream
public InputStream getResourceStream(String source) throws ResourceNotFoundException { return IoUtil.toStream(source, CharsetUtil.CHARSET_UTF_8); }
java
public InputStream getResourceStream(String source) throws ResourceNotFoundException { return IoUtil.toStream(source, CharsetUtil.CHARSET_UTF_8); }
[ "public", "InputStream", "getResourceStream", "(", "String", "source", ")", "throws", "ResourceNotFoundException", "{", "return", "IoUtil", ".", "toStream", "(", "source", ",", "CharsetUtil", ".", "CHARSET_UTF_8", ")", ";", "}" ]
获取资源流 @param source 字符串模板 @return 流 @throws ResourceNotFoundException 资源未找到
[ "获取资源流" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-extra/src/main/java/cn/hutool/extra/template/engine/velocity/SimpleStringResourceLoader.java#L35-L37
train
Returns an InputStream for the given source string.
[ 30522, 2270, 20407, 25379, 2131, 6072, 8162, 9623, 25379, 1006, 5164, 3120, 1007, 11618, 7692, 17048, 14876, 8630, 10288, 24422, 1063, 2709, 22834, 21823, 2140, 1012, 2000, 21422, 1006, 3120, 1010, 25869, 13462, 21823, 2140, 1012, 25869, 1346...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/iterative/concurrent/Broker.java
Broker.get
public V get(String key) { try { BlockingQueue<V> queue = retrieveSharedQueue(key); V objToShare = queue.take(); if (!queue.offer(objToShare)) { throw new RuntimeException("Error: Concurrent modification of the broker slot for key '" + key + "'."); } return objToShare; } catch (InterruptedException e) { throw new RuntimeException(e); } }
java
public V get(String key) { try { BlockingQueue<V> queue = retrieveSharedQueue(key); V objToShare = queue.take(); if (!queue.offer(objToShare)) { throw new RuntimeException("Error: Concurrent modification of the broker slot for key '" + key + "'."); } return objToShare; } catch (InterruptedException e) { throw new RuntimeException(e); } }
[ "public", "V", "get", "(", "String", "key", ")", "{", "try", "{", "BlockingQueue", "<", "V", ">", "queue", "=", "retrieveSharedQueue", "(", "key", ")", ";", "V", "objToShare", "=", "queue", ".", "take", "(", ")", ";", "if", "(", "!", "queue", ".", ...
Blocking retrieval and removal of the object to share.
[ "Blocking", "retrieval", "and", "removal", "of", "the", "object", "to", "share", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/iterative/concurrent/Broker.java#L59-L70
train
Retrieve the object from the shared queue.
[ 30522, 2270, 1058, 2131, 1006, 5164, 3145, 1007, 1063, 3046, 1063, 10851, 4226, 5657, 1026, 1058, 1028, 24240, 1027, 12850, 7377, 5596, 4226, 5657, 1006, 3145, 1007, 1025, 1058, 27885, 3501, 13122, 8167, 2063, 1027, 24240, 1012, 2202, 1006,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/state/SharedStateRegistry.java
SharedStateRegistry.registerAll
public void registerAll(Iterable<? extends CompositeStateHandle> stateHandles) { if (stateHandles == null) { return; } synchronized (registeredStates) { for (CompositeStateHandle stateHandle : stateHandles) { stateHandle.registerSharedStates(this); } } }
java
public void registerAll(Iterable<? extends CompositeStateHandle> stateHandles) { if (stateHandles == null) { return; } synchronized (registeredStates) { for (CompositeStateHandle stateHandle : stateHandles) { stateHandle.registerSharedStates(this); } } }
[ "public", "void", "registerAll", "(", "Iterable", "<", "?", "extends", "CompositeStateHandle", ">", "stateHandles", ")", "{", "if", "(", "stateHandles", "==", "null", ")", "{", "return", ";", "}", "synchronized", "(", "registeredStates", ")", "{", "for", "("...
Register given shared states in the registry. @param stateHandles The shared states to register.
[ "Register", "given", "shared", "states", "in", "the", "registry", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/state/SharedStateRegistry.java#L172-L183
train
Registers all the given composite state handles with this state manager.
[ 30522, 2270, 11675, 4236, 8095, 1006, 2009, 6906, 3468, 1026, 1029, 8908, 12490, 9153, 2618, 11774, 2571, 1028, 2110, 11774, 4244, 1007, 1063, 2065, 1006, 2110, 11774, 4244, 1027, 1027, 19701, 1007, 1063, 2709, 1025, 1065, 25549, 1006, 5068...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/util/NumberUtil.java
NumberUtil.div
public static BigDecimal div(BigDecimal v1, BigDecimal v2, int scale, RoundingMode roundingMode) { Assert.notNull(v2, "Divisor must be not null !"); if (null == v1) { return BigDecimal.ZERO; } if (scale < 0) { scale = -scale; } return v1.divide(v2, scale, roundingMode); }
java
public static BigDecimal div(BigDecimal v1, BigDecimal v2, int scale, RoundingMode roundingMode) { Assert.notNull(v2, "Divisor must be not null !"); if (null == v1) { return BigDecimal.ZERO; } if (scale < 0) { scale = -scale; } return v1.divide(v2, scale, roundingMode); }
[ "public", "static", "BigDecimal", "div", "(", "BigDecimal", "v1", ",", "BigDecimal", "v2", ",", "int", "scale", ",", "RoundingMode", "roundingMode", ")", "{", "Assert", ".", "notNull", "(", "v2", ",", "\"Divisor must be not null !\"", ")", ";", "if", "(", "n...
提供(相对)精确的除法运算,当发生除不尽的情况时,由scale指定精确度 @param v1 被除数 @param v2 除数 @param scale 精确度,如果为负值,取绝对值 @param roundingMode 保留小数的模式 {@link RoundingMode} @return 两个参数的商 @since 3.0.9
[ "提供", "(", "相对", ")", "精确的除法运算", "当发生除不尽的情况时", "由scale指定精确度" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/util/NumberUtil.java#L741-L750
train
Divide two BigDecimals.
[ 30522, 2270, 10763, 2502, 3207, 6895, 9067, 4487, 2615, 1006, 2502, 3207, 6895, 9067, 1058, 2487, 1010, 2502, 3207, 6895, 9067, 1058, 2475, 1010, 20014, 4094, 1010, 26939, 5302, 3207, 26939, 5302, 3207, 1007, 1063, 20865, 1012, 2025, 11231,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-db/src/main/java/cn/hutool/db/ActiveEntity.java
ActiveEntity.update
public ActiveEntity update(String primaryKey) { try { db.update(this, Entity.create().set(primaryKey, this.get(primaryKey))); } catch (SQLException e) { throw new DbRuntimeException(e); } return this; }
java
public ActiveEntity update(String primaryKey) { try { db.update(this, Entity.create().set(primaryKey, this.get(primaryKey))); } catch (SQLException e) { throw new DbRuntimeException(e); } return this; }
[ "public", "ActiveEntity", "update", "(", "String", "primaryKey", ")", "{", "try", "{", "db", ".", "update", "(", "this", ",", "Entity", ".", "create", "(", ")", ".", "set", "(", "primaryKey", ",", "this", ".", "get", "(", "primaryKey", ")", ")", ")",...
根据现有Entity中的条件删除与之匹配的数据库记录 @param primaryKey 主键名 @return this
[ "根据现有Entity中的条件删除与之匹配的数据库记录" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-db/src/main/java/cn/hutool/db/ActiveEntity.java#L223-L230
train
Updates an existing ActiveEntity with the primary key.
[ 30522, 2270, 3161, 4765, 3012, 10651, 1006, 5164, 3078, 14839, 1007, 1063, 3046, 1063, 16962, 1012, 10651, 1006, 2023, 1010, 9178, 1012, 3443, 1006, 1007, 1012, 2275, 1006, 3078, 14839, 1010, 2023, 1012, 2131, 1006, 3078, 14839, 1007, 1007,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-db/src/main/java/cn/hutool/db/Session.java
Session.setTransactionIsolation
public void setTransactionIsolation(int level) throws SQLException { if (getConnection().getMetaData().supportsTransactionIsolationLevel(level) == false) { throw new SQLException(StrUtil.format("Transaction isolation [{}] not support!", level)); } getConnection().setTransactionIsolation(level); }
java
public void setTransactionIsolation(int level) throws SQLException { if (getConnection().getMetaData().supportsTransactionIsolationLevel(level) == false) { throw new SQLException(StrUtil.format("Transaction isolation [{}] not support!", level)); } getConnection().setTransactionIsolation(level); }
[ "public", "void", "setTransactionIsolation", "(", "int", "level", ")", "throws", "SQLException", "{", "if", "(", "getConnection", "(", ")", ".", "getMetaData", "(", ")", ".", "supportsTransactionIsolationLevel", "(", "level", ")", "==", "false", ")", "{", "thr...
设置事务的隔离级别<br> Connection.TRANSACTION_NONE 驱动不支持事务<br> Connection.TRANSACTION_READ_UNCOMMITTED 允许脏读、不可重复读和幻读<br> Connection.TRANSACTION_READ_COMMITTED 禁止脏读,但允许不可重复读和幻读<br> Connection.TRANSACTION_REPEATABLE_READ 禁止脏读和不可重复读,单运行幻读<br> Connection.TRANSACTION_SERIALIZABLE 禁止脏读、不可重复读和幻读<br> @param level 隔离级别 @throws SQLException SQL执行异常
[ "设置事务的隔离级别<br", ">" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-db/src/main/java/cn/hutool/db/Session.java#L246-L251
train
Sets the transaction isolation level.
[ 30522, 2270, 11675, 2275, 6494, 3619, 18908, 3258, 19565, 13490, 1006, 20014, 2504, 1007, 11618, 29296, 10288, 24422, 1063, 2065, 1006, 2131, 8663, 2638, 7542, 1006, 1007, 1012, 2131, 11368, 8447, 2696, 1006, 1007, 1012, 6753, 30524, 2884, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-core/src/main/java/org/apache/flink/api/java/tuple/Tuple.java
Tuple.newInstance
public static Tuple newInstance(int arity) { switch (arity) { case 0: return Tuple0.INSTANCE; case 1: return new Tuple1(); case 2: return new Tuple2(); case 3: return new Tuple3(); case 4: return new Tuple4(); case 5: return new Tuple5(); case 6: return new Tuple6(); case 7: return new Tuple7(); case 8: return new Tuple8(); case 9: return new Tuple9(); case 10: return new Tuple10(); case 11: return new Tuple11(); case 12: return new Tuple12(); case 13: return new Tuple13(); case 14: return new Tuple14(); case 15: return new Tuple15(); case 16: return new Tuple16(); case 17: return new Tuple17(); case 18: return new Tuple18(); case 19: return new Tuple19(); case 20: return new Tuple20(); case 21: return new Tuple21(); case 22: return new Tuple22(); case 23: return new Tuple23(); case 24: return new Tuple24(); case 25: return new Tuple25(); default: throw new IllegalArgumentException("The tuple arity must be in [0, " + MAX_ARITY + "]."); } }
java
public static Tuple newInstance(int arity) { switch (arity) { case 0: return Tuple0.INSTANCE; case 1: return new Tuple1(); case 2: return new Tuple2(); case 3: return new Tuple3(); case 4: return new Tuple4(); case 5: return new Tuple5(); case 6: return new Tuple6(); case 7: return new Tuple7(); case 8: return new Tuple8(); case 9: return new Tuple9(); case 10: return new Tuple10(); case 11: return new Tuple11(); case 12: return new Tuple12(); case 13: return new Tuple13(); case 14: return new Tuple14(); case 15: return new Tuple15(); case 16: return new Tuple16(); case 17: return new Tuple17(); case 18: return new Tuple18(); case 19: return new Tuple19(); case 20: return new Tuple20(); case 21: return new Tuple21(); case 22: return new Tuple22(); case 23: return new Tuple23(); case 24: return new Tuple24(); case 25: return new Tuple25(); default: throw new IllegalArgumentException("The tuple arity must be in [0, " + MAX_ARITY + "]."); } }
[ "public", "static", "Tuple", "newInstance", "(", "int", "arity", ")", "{", "switch", "(", "arity", ")", "{", "case", "0", ":", "return", "Tuple0", ".", "INSTANCE", ";", "case", "1", ":", "return", "new", "Tuple1", "(", ")", ";", "case", "2", ":", "...
GENERATED FROM org.apache.flink.api.java.tuple.TupleGenerator.
[ "GENERATED", "FROM", "org", ".", "apache", ".", "flink", ".", "api", ".", "java", ".", "tuple", ".", "TupleGenerator", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/api/java/tuple/Tuple.java#L116-L146
train
Creates a new Tuple of the specified arity.
[ 30522, 2270, 10763, 10722, 10814, 2047, 7076, 26897, 1006, 20014, 10488, 3723, 1007, 1063, 6942, 1006, 10488, 3723, 1007, 1063, 2553, 1014, 1024, 2709, 10722, 10814, 2692, 1012, 6013, 1025, 2553, 1015, 1024, 2709, 2047, 10722, 10814, 2487, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/codec/Morse.java
Morse.encode
public String encode(String text) { Assert.notNull(text, "Text should not be null."); text = text.toUpperCase(); final StringBuilder morseBuilder = new StringBuilder(); final int len = text.codePointCount(0, text.length()); for (int i = 0; i < len; i++) { int codePoint = text.codePointAt(i); String word = alphabets.get(codePoint); if (word == null) { word = Integer.toBinaryString(codePoint); } morseBuilder.append(word.replace('0', dit).replace('1', dah)).append(split); } return morseBuilder.toString(); }
java
public String encode(String text) { Assert.notNull(text, "Text should not be null."); text = text.toUpperCase(); final StringBuilder morseBuilder = new StringBuilder(); final int len = text.codePointCount(0, text.length()); for (int i = 0; i < len; i++) { int codePoint = text.codePointAt(i); String word = alphabets.get(codePoint); if (word == null) { word = Integer.toBinaryString(codePoint); } morseBuilder.append(word.replace('0', dit).replace('1', dah)).append(split); } return morseBuilder.toString(); }
[ "public", "String", "encode", "(", "String", "text", ")", "{", "Assert", ".", "notNull", "(", "text", ",", "\"Text should not be null.\"", ")", ";", "text", "=", "text", ".", "toUpperCase", "(", ")", ";", "final", "StringBuilder", "morseBuilder", "=", "new",...
编码 @param text 文本 @return 密文
[ "编码" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/codec/Morse.java#L124-L139
train
Encodes the given text using the ISO 639 alphabet.
[ 30522, 2270, 5164, 4372, 16044, 1006, 5164, 3793, 1007, 1063, 20865, 1012, 2025, 11231, 3363, 1006, 3793, 1010, 1000, 3793, 2323, 2025, 2022, 19701, 1012, 1000, 1007, 1025, 3793, 1027, 3793, 1012, 2000, 29547, 18992, 3366, 1006, 1007, 1025,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
alibaba/canal
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/support/ESSyncUtil.java
ESSyncUtil.blobToBytes
private static byte[] blobToBytes(Blob blob) { try (InputStream is = blob.getBinaryStream()) { byte[] b = new byte[(int) blob.length()]; if (is.read(b) != -1) { return b; } else { return new byte[0]; } } catch (IOException | SQLException e) { logger.error(e.getMessage()); return null; } }
java
private static byte[] blobToBytes(Blob blob) { try (InputStream is = blob.getBinaryStream()) { byte[] b = new byte[(int) blob.length()]; if (is.read(b) != -1) { return b; } else { return new byte[0]; } } catch (IOException | SQLException e) { logger.error(e.getMessage()); return null; } }
[ "private", "static", "byte", "[", "]", "blobToBytes", "(", "Blob", "blob", ")", "{", "try", "(", "InputStream", "is", "=", "blob", ".", "getBinaryStream", "(", ")", ")", "{", "byte", "[", "]", "b", "=", "new", "byte", "[", "(", "int", ")", "blob", ...
Blob转byte[]
[ "Blob转byte", "[]" ]
8f088cddc0755f4350c5aaae95c6e4002d90a40f
https://github.com/alibaba/canal/blob/8f088cddc0755f4350c5aaae95c6e4002d90a40f/client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/support/ESSyncUtil.java#L237-L249
train
Convert a blob to a byte array.
[ 30522, 2797, 10763, 24880, 1031, 1033, 1038, 4135, 19279, 16429, 17250, 2015, 1006, 1038, 4135, 2497, 1038, 4135, 2497, 1007, 1063, 3046, 1006, 20407, 25379, 2003, 1027, 1038, 4135, 2497, 1012, 2131, 21114, 24769, 25379, 1006, 1007, 1007, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/img/ImgUtil.java
ImgUtil.scale
public static void scale(Image srcImg, ImageOutputStream destImageStream, float scale) throws IORuntimeException { writeJpg(scale(srcImg, scale), destImageStream); }
java
public static void scale(Image srcImg, ImageOutputStream destImageStream, float scale) throws IORuntimeException { writeJpg(scale(srcImg, scale), destImageStream); }
[ "public", "static", "void", "scale", "(", "Image", "srcImg", ",", "ImageOutputStream", "destImageStream", ",", "float", "scale", ")", "throws", "IORuntimeException", "{", "writeJpg", "(", "scale", "(", "srcImg", ",", "scale", ")", ",", "destImageStream", ")", ...
缩放图像(按比例缩放)<br> 缩放后默认为jpeg格式,此方法并不关闭流 @param srcImg 源图像来源流 @param destImageStream 缩放后的图像写出到的流 @param scale 缩放比例。比例大于1时为放大,小于1大于0为缩小 @throws IORuntimeException IO异常 @since 3.1.0
[ "缩放图像(按比例缩放)<br", ">", "缩放后默认为jpeg格式,此方法并不关闭流" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/img/ImgUtil.java#L143-L145
train
Scales the image using the specified scale factor and writes the result to the image output stream.
[ 30522, 2270, 10763, 11675, 4094, 1006, 3746, 5034, 6895, 24798, 1010, 3746, 5833, 18780, 21422, 4078, 3775, 26860, 21422, 1010, 14257, 4094, 1007, 11618, 22834, 15532, 7292, 10288, 24422, 1063, 4339, 3501, 26952, 1006, 4094, 1006, 5034, 6895,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/operators/BatchTask.java
BatchTask.initBroadcastInputReaders
protected void initBroadcastInputReaders() throws Exception { final int numBroadcastInputs = this.config.getNumBroadcastInputs(); final MutableReader<?>[] broadcastInputReaders = new MutableReader<?>[numBroadcastInputs]; int currentReaderOffset = config.getNumInputs(); for (int i = 0; i < this.config.getNumBroadcastInputs(); i++) { // ---------------- create the input readers --------------------- // in case where a logical input unions multiple physical inputs, create a union reader final int groupSize = this.config.getBroadcastGroupSize(i); if (groupSize == 1) { // non-union case broadcastInputReaders[i] = new MutableRecordReader<IOReadableWritable>( getEnvironment().getInputGate(currentReaderOffset), getEnvironment().getTaskManagerInfo().getTmpDirectories()); } else if (groupSize > 1){ // union case InputGate[] readers = new InputGate[groupSize]; for (int j = 0; j < groupSize; ++j) { readers[j] = getEnvironment().getInputGate(currentReaderOffset + j); } broadcastInputReaders[i] = new MutableRecordReader<IOReadableWritable>( new UnionInputGate(readers), getEnvironment().getTaskManagerInfo().getTmpDirectories()); } else { throw new Exception("Illegal input group size in task configuration: " + groupSize); } currentReaderOffset += groupSize; } this.broadcastInputReaders = broadcastInputReaders; }
java
protected void initBroadcastInputReaders() throws Exception { final int numBroadcastInputs = this.config.getNumBroadcastInputs(); final MutableReader<?>[] broadcastInputReaders = new MutableReader<?>[numBroadcastInputs]; int currentReaderOffset = config.getNumInputs(); for (int i = 0; i < this.config.getNumBroadcastInputs(); i++) { // ---------------- create the input readers --------------------- // in case where a logical input unions multiple physical inputs, create a union reader final int groupSize = this.config.getBroadcastGroupSize(i); if (groupSize == 1) { // non-union case broadcastInputReaders[i] = new MutableRecordReader<IOReadableWritable>( getEnvironment().getInputGate(currentReaderOffset), getEnvironment().getTaskManagerInfo().getTmpDirectories()); } else if (groupSize > 1){ // union case InputGate[] readers = new InputGate[groupSize]; for (int j = 0; j < groupSize; ++j) { readers[j] = getEnvironment().getInputGate(currentReaderOffset + j); } broadcastInputReaders[i] = new MutableRecordReader<IOReadableWritable>( new UnionInputGate(readers), getEnvironment().getTaskManagerInfo().getTmpDirectories()); } else { throw new Exception("Illegal input group size in task configuration: " + groupSize); } currentReaderOffset += groupSize; } this.broadcastInputReaders = broadcastInputReaders; }
[ "protected", "void", "initBroadcastInputReaders", "(", ")", "throws", "Exception", "{", "final", "int", "numBroadcastInputs", "=", "this", ".", "config", ".", "getNumBroadcastInputs", "(", ")", ";", "final", "MutableReader", "<", "?", ">", "[", "]", "broadcastIn...
Creates the record readers for the extra broadcast inputs as configured by {@link TaskConfig#getNumBroadcastInputs()}. This method requires that the task configuration, the driver, and the user-code class loader are set.
[ "Creates", "the", "record", "readers", "for", "the", "extra", "broadcast", "inputs", "as", "configured", "by", "{", "@link", "TaskConfig#getNumBroadcastInputs", "()", "}", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/operators/BatchTask.java#L708-L739
train
Initialize the input readers for the broadcast input.
[ 30522, 5123, 11675, 1999, 4183, 12618, 4215, 10526, 2378, 18780, 16416, 13375, 1006, 1007, 11618, 6453, 1063, 2345, 20014, 15903, 3217, 4215, 10526, 2378, 18780, 2015, 1027, 2023, 1012, 9530, 8873, 2290, 1012, 2131, 19172, 12618, 4215, 10526,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
hankcs/HanLP
src/main/java/com/hankcs/hanlp/dependency/perceptron/parser/KBeamArcEagerDependencyParser.java
KBeamArcEagerDependencyParser.train
public static KBeamArcEagerDependencyParser train(String trainCorpus, String devCorpus, String clusterPath, String modelPath) throws InterruptedException, ExecutionException, IOException, ClassNotFoundException { Options options = new Options(); options.train = true; options.inputFile = trainCorpus; options.devPath = devCorpus; options.clusterFile = clusterPath; options.modelFile = modelPath; Main.train(options); return new KBeamArcEagerDependencyParser(modelPath); }
java
public static KBeamArcEagerDependencyParser train(String trainCorpus, String devCorpus, String clusterPath, String modelPath) throws InterruptedException, ExecutionException, IOException, ClassNotFoundException { Options options = new Options(); options.train = true; options.inputFile = trainCorpus; options.devPath = devCorpus; options.clusterFile = clusterPath; options.modelFile = modelPath; Main.train(options); return new KBeamArcEagerDependencyParser(modelPath); }
[ "public", "static", "KBeamArcEagerDependencyParser", "train", "(", "String", "trainCorpus", ",", "String", "devCorpus", ",", "String", "clusterPath", ",", "String", "modelPath", ")", "throws", "InterruptedException", ",", "ExecutionException", ",", "IOException", ",", ...
训练依存句法分析器 @param trainCorpus 训练集 @param devCorpus 开发集 @param clusterPath Brown词聚类文件 @param modelPath 模型储存路径 @throws InterruptedException @throws ExecutionException @throws IOException @throws ClassNotFoundException
[ "训练依存句法分析器" ]
a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce
https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/dependency/perceptron/parser/KBeamArcEagerDependencyParser.java#L75-L85
train
This method is used to train a classifier on a dataset.
[ 30522, 2270, 10763, 21677, 5243, 7849, 21456, 4590, 3207, 11837, 4181, 5666, 19362, 8043, 3345, 1006, 5164, 3345, 24586, 2271, 1010, 5164, 16475, 24586, 2271, 1010, 5164, 9324, 15069, 1010, 5164, 2944, 15069, 1007, 11618, 7153, 10288, 24422, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-queryable-state/flink-queryable-state-client-java/src/main/java/org/apache/flink/queryablestate/network/messages/MessageSerializer.java
MessageSerializer.deserializeServerFailure
public static Throwable deserializeServerFailure(final ByteBuf buf) throws IOException, ClassNotFoundException { try (ByteBufInputStream bis = new ByteBufInputStream(buf); ObjectInputStream in = new ObjectInputStream(bis)) { return (Throwable) in.readObject(); } }
java
public static Throwable deserializeServerFailure(final ByteBuf buf) throws IOException, ClassNotFoundException { try (ByteBufInputStream bis = new ByteBufInputStream(buf); ObjectInputStream in = new ObjectInputStream(bis)) { return (Throwable) in.readObject(); } }
[ "public", "static", "Throwable", "deserializeServerFailure", "(", "final", "ByteBuf", "buf", ")", "throws", "IOException", ",", "ClassNotFoundException", "{", "try", "(", "ByteBufInputStream", "bis", "=", "new", "ByteBufInputStream", "(", "buf", ")", ";", "ObjectInp...
De-serializes the failure message sent to the {@link org.apache.flink.queryablestate.network.Client} in case of server related errors. <pre> <b>The buffer is expected to be at the correct position.</b> </pre> @param buf The {@link ByteBuf} containing the serialized failure message. @return The failure message.
[ "De", "-", "serializes", "the", "failure", "message", "sent", "to", "the", "{" ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-queryable-state/flink-queryable-state-client-java/src/main/java/org/apache/flink/queryablestate/network/messages/MessageSerializer.java#L314-L319
train
Deserialize server failure exception.
[ 30522, 2270, 10763, 5466, 3085, 4078, 11610, 3669, 11254, 2121, 6299, 7011, 4014, 5397, 1006, 2345, 24880, 8569, 2546, 20934, 2546, 1007, 11618, 22834, 10288, 24422, 1010, 2465, 17048, 14876, 8630, 10288, 24422, 1063, 3046, 1006, 24880, 8569,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/convert/Convert.java
Convert.toInt
public static Integer toInt(Object value, Integer defaultValue) { return convert(Integer.class, value, defaultValue); }
java
public static Integer toInt(Object value, Integer defaultValue) { return convert(Integer.class, value, defaultValue); }
[ "public", "static", "Integer", "toInt", "(", "Object", "value", ",", "Integer", "defaultValue", ")", "{", "return", "convert", "(", "Integer", ".", "class", ",", "value", ",", "defaultValue", ")", ";", "}" ]
转换为int<br> 如果给定的值为空,或者转换失败,返回默认值<br> 转换失败不会报错 @param value 被转换的值 @param defaultValue 转换错误时的默认值 @return 结果
[ "转换为int<br", ">", "如果给定的值为空,或者转换失败,返回默认值<br", ">", "转换失败不会报错" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/convert/Convert.java#L221-L223
train
Converts value of type Integer to Integer.
[ 30522, 2270, 10763, 16109, 2000, 18447, 1006, 4874, 3643, 1010, 16109, 12398, 10175, 5657, 1007, 1063, 2709, 10463, 1006, 16109, 1012, 2465, 1010, 3643, 1010, 12398, 10175, 5657, 1007, 1025, 1065, 102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
handler/src/main/java/io/netty/handler/ssl/SslHandler.java
SslHandler.applicationProtocol
public String applicationProtocol() { SSLEngine engine = engine(); if (!(engine instanceof ApplicationProtocolAccessor)) { return null; } return ((ApplicationProtocolAccessor) engine).getNegotiatedApplicationProtocol(); }
java
public String applicationProtocol() { SSLEngine engine = engine(); if (!(engine instanceof ApplicationProtocolAccessor)) { return null; } return ((ApplicationProtocolAccessor) engine).getNegotiatedApplicationProtocol(); }
[ "public", "String", "applicationProtocol", "(", ")", "{", "SSLEngine", "engine", "=", "engine", "(", ")", ";", "if", "(", "!", "(", "engine", "instanceof", "ApplicationProtocolAccessor", ")", ")", "{", "return", "null", ";", "}", "return", "(", "(", "Appli...
Returns the name of the current application-level protocol. @return the protocol name or {@code null} if application-level protocol has not been negotiated
[ "Returns", "the", "name", "of", "the", "current", "application", "-", "level", "protocol", "." ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/handler/src/main/java/io/netty/handler/ssl/SslHandler.java#L615-L622
train
Returns the application protocol of this session.
[ 30522, 2270, 5164, 4646, 21572, 3406, 25778, 1006, 1007, 1063, 7020, 7770, 11528, 2063, 3194, 1027, 3194, 1006, 1007, 1025, 2065, 1006, 999, 1006, 3194, 6013, 11253, 4646, 21572, 3406, 26289, 9468, 7971, 2953, 1007, 1007, 1063, 2709, 19701,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/util/ReUtil.java
ReUtil.delFirst
public static String delFirst(Pattern pattern, CharSequence content) { if (null == pattern || StrUtil.isBlank(content)) { return StrUtil.str(content); } return pattern.matcher(content).replaceFirst(StrUtil.EMPTY); }
java
public static String delFirst(Pattern pattern, CharSequence content) { if (null == pattern || StrUtil.isBlank(content)) { return StrUtil.str(content); } return pattern.matcher(content).replaceFirst(StrUtil.EMPTY); }
[ "public", "static", "String", "delFirst", "(", "Pattern", "pattern", ",", "CharSequence", "content", ")", "{", "if", "(", "null", "==", "pattern", "||", "StrUtil", ".", "isBlank", "(", "content", ")", ")", "{", "return", "StrUtil", ".", "str", "(", "cont...
删除匹配的第一个内容 @param pattern 正则 @param content 被匹配的内容 @return 删除后剩余的内容
[ "删除匹配的第一个内容" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/util/ReUtil.java#L294-L300
train
Removes the first occurrence of a pattern from a CharSequence.
[ 30522, 2270, 10763, 5164, 3972, 8873, 12096, 1006, 5418, 5418, 1010, 25869, 3366, 4226, 5897, 4180, 1007, 1063, 2065, 1006, 19701, 1027, 1027, 5418, 1064, 1064, 2358, 22134, 4014, 1012, 2003, 28522, 8950, 1006, 4180, 1007, 1007, 1063, 2709,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-crypto/src/main/java/cn/hutool/crypto/KeyUtil.java
KeyUtil.readCertificate
public static Certificate readCertificate(String type, InputStream in) { try { return getCertificateFactory(type).generateCertificate(in); } catch (CertificateException e) { throw new CryptoException(e); } }
java
public static Certificate readCertificate(String type, InputStream in) { try { return getCertificateFactory(type).generateCertificate(in); } catch (CertificateException e) { throw new CryptoException(e); } }
[ "public", "static", "Certificate", "readCertificate", "(", "String", "type", ",", "InputStream", "in", ")", "{", "try", "{", "return", "getCertificateFactory", "(", "type", ")", ".", "generateCertificate", "(", "in", ")", ";", "}", "catch", "(", "CertificateEx...
读取Certification文件<br> Certification为证书文件<br> see: http://snowolf.iteye.com/blog/391931 @param type 类型,例如X.509 @param in {@link InputStream} 如果想从文件读取.cer文件,使用 {@link FileUtil#getInputStream(java.io.File)} 读取 @return {@link Certificate}
[ "读取Certification文件<br", ">", "Certification为证书文件<br", ">", "see", ":", "http", ":", "//", "snowolf", ".", "iteye", ".", "com", "/", "blog", "/", "391931" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-crypto/src/main/java/cn/hutool/crypto/KeyUtil.java#L693-L699
train
Reads a single certificate from the specified stream.
[ 30522, 2270, 10763, 8196, 3191, 17119, 3775, 8873, 16280, 1006, 5164, 2828, 1010, 20407, 25379, 1999, 1007, 1063, 3046, 1063, 2709, 2131, 17119, 3775, 8873, 16280, 21450, 1006, 2828, 1007, 1012, 9699, 17119, 3775, 8873, 16280, 1006, 1999, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/runtime/functions/SqlDateTimeUtils.java
SqlDateTimeUtils.internalToTime
public static java.sql.Time internalToTime(int v, TimeZone tz) { // note that, in this case, can't handle Daylight Saving Time return new java.sql.Time(v - tz.getOffset(v)); }
java
public static java.sql.Time internalToTime(int v, TimeZone tz) { // note that, in this case, can't handle Daylight Saving Time return new java.sql.Time(v - tz.getOffset(v)); }
[ "public", "static", "java", ".", "sql", ".", "Time", "internalToTime", "(", "int", "v", ",", "TimeZone", "tz", ")", "{", "// note that, in this case, can't handle Daylight Saving Time", "return", "new", "java", ".", "sql", ".", "Time", "(", "v", "-", "tz", "."...
Converts the internal representation of a SQL TIME (int) to the Java type used for UDF parameters ({@link java.sql.Time}). <p>The internal int represents the seconds since "00:00:00". When we convert it to {@link java.sql.Time} (time milliseconds since January 1, 1970, 00:00:00 GMT), we need a TimeZone.
[ "Converts", "the", "internal", "representation", "of", "a", "SQL", "TIME", "(", "int", ")", "to", "the", "Java", "type", "used", "for", "UDF", "parameters", "(", "{", "@link", "java", ".", "sql", ".", "Time", "}", ")", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/runtime/functions/SqlDateTimeUtils.java#L153-L156
train
Convert an int value to a Time object.
[ 30522, 2270, 10763, 9262, 1012, 29296, 1012, 2051, 4722, 3406, 7292, 1006, 20014, 1058, 1010, 2051, 15975, 1056, 2480, 1007, 1063, 1013, 1013, 3602, 2008, 1010, 1999, 2023, 2553, 1010, 2064, 1005, 1056, 5047, 11695, 7494, 2051, 2709, 2047, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/contrib/streaming/state/RocksDBKeyedStateBackend.java
RocksDBKeyedStateBackend.snapshot
@Nonnull @Override public RunnableFuture<SnapshotResult<KeyedStateHandle>> snapshot( final long checkpointId, final long timestamp, @Nonnull final CheckpointStreamFactory streamFactory, @Nonnull CheckpointOptions checkpointOptions) throws Exception { long startTime = System.currentTimeMillis(); // flush everything into db before taking a snapshot writeBatchWrapper.flush(); RocksDBSnapshotStrategyBase<K> chosenSnapshotStrategy = checkpointOptions.getCheckpointType().isSavepoint() ? savepointSnapshotStrategy : checkpointSnapshotStrategy; RunnableFuture<SnapshotResult<KeyedStateHandle>> snapshotRunner = chosenSnapshotStrategy.snapshot(checkpointId, timestamp, streamFactory, checkpointOptions); chosenSnapshotStrategy.logSyncCompleted(streamFactory, startTime); return snapshotRunner; }
java
@Nonnull @Override public RunnableFuture<SnapshotResult<KeyedStateHandle>> snapshot( final long checkpointId, final long timestamp, @Nonnull final CheckpointStreamFactory streamFactory, @Nonnull CheckpointOptions checkpointOptions) throws Exception { long startTime = System.currentTimeMillis(); // flush everything into db before taking a snapshot writeBatchWrapper.flush(); RocksDBSnapshotStrategyBase<K> chosenSnapshotStrategy = checkpointOptions.getCheckpointType().isSavepoint() ? savepointSnapshotStrategy : checkpointSnapshotStrategy; RunnableFuture<SnapshotResult<KeyedStateHandle>> snapshotRunner = chosenSnapshotStrategy.snapshot(checkpointId, timestamp, streamFactory, checkpointOptions); chosenSnapshotStrategy.logSyncCompleted(streamFactory, startTime); return snapshotRunner; }
[ "@", "Nonnull", "@", "Override", "public", "RunnableFuture", "<", "SnapshotResult", "<", "KeyedStateHandle", ">", ">", "snapshot", "(", "final", "long", "checkpointId", ",", "final", "long", "timestamp", ",", "@", "Nonnull", "final", "CheckpointStreamFactory", "st...
Triggers an asynchronous snapshot of the keyed state backend from RocksDB. This snapshot can be canceled and is also stopped when the backend is closed through {@link #dispose()}. For each backend, this method must always be called by the same thread. @param checkpointId The Id of the checkpoint. @param timestamp The timestamp of the checkpoint. @param streamFactory The factory that we can use for writing our state to streams. @param checkpointOptions Options for how to perform this checkpoint. @return Future to the state handle of the snapshot data. @throws Exception indicating a problem in the synchronous part of the checkpoint.
[ "Triggers", "an", "asynchronous", "snapshot", "of", "the", "keyed", "state", "backend", "from", "RocksDB", ".", "This", "snapshot", "can", "be", "canceled", "and", "is", "also", "stopped", "when", "the", "backend", "is", "closed", "through", "{", "@link", "#...
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/contrib/streaming/state/RocksDBKeyedStateBackend.java#L418-L440
train
Performs a snapshot of the state represented by the passed in checkpointId and timestamp.
[ 30522, 1030, 2512, 11231, 3363, 1030, 2058, 15637, 2270, 2448, 22966, 11263, 11244, 1026, 20057, 12326, 6072, 11314, 1026, 3145, 2098, 9153, 2618, 11774, 2571, 1028, 1028, 20057, 12326, 1006, 2345, 2146, 26520, 3593, 1010, 2345, 2146, 2335, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
codec-http/src/main/java/io/netty/handler/codec/http/HttpHeaders.java
HttpHeaders.getDate
@Deprecated public static Date getDate(HttpMessage message, Date defaultValue) { return getDateHeader(message, HttpHeaderNames.DATE, defaultValue); }
java
@Deprecated public static Date getDate(HttpMessage message, Date defaultValue) { return getDateHeader(message, HttpHeaderNames.DATE, defaultValue); }
[ "@", "Deprecated", "public", "static", "Date", "getDate", "(", "HttpMessage", "message", ",", "Date", "defaultValue", ")", "{", "return", "getDateHeader", "(", "message", ",", "HttpHeaderNames", ".", "DATE", ",", "defaultValue", ")", ";", "}" ]
@deprecated Use {@link #getTimeMillis(CharSequence, long)} instead. Returns the value of the {@code "Date"} header. If there is no such header or the header is not a formatted date, the {@code defaultValue} is returned.
[ "@deprecated", "Use", "{", "@link", "#getTimeMillis", "(", "CharSequence", "long", ")", "}", "instead", "." ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/codec-http/src/main/java/io/netty/handler/codec/http/HttpHeaders.java#L1059-L1062
train
Gets the Date value from the message.
[ 30522, 1030, 2139, 28139, 12921, 2270, 10763, 3058, 2131, 13701, 1006, 8299, 7834, 3736, 3351, 4471, 1010, 3058, 12398, 10175, 5657, 1007, 1063, 2709, 2131, 13701, 4974, 2121, 1006, 4471, 1010, 8299, 4974, 11795, 14074, 2015, 1012, 3058, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/taskmanager/Task.java
Task.failExternally
@Override public void failExternally(Throwable cause) { LOG.info("Attempting to fail task externally {} ({}).", taskNameWithSubtask, executionId); cancelOrFailAndCancelInvokable(ExecutionState.FAILED, cause); }
java
@Override public void failExternally(Throwable cause) { LOG.info("Attempting to fail task externally {} ({}).", taskNameWithSubtask, executionId); cancelOrFailAndCancelInvokable(ExecutionState.FAILED, cause); }
[ "@", "Override", "public", "void", "failExternally", "(", "Throwable", "cause", ")", "{", "LOG", ".", "info", "(", "\"Attempting to fail task externally {} ({}).\"", ",", "taskNameWithSubtask", ",", "executionId", ")", ";", "cancelOrFailAndCancelInvokable", "(", "Execut...
Marks task execution failed for an external reason (a reason other than the task code itself throwing an exception). If the task is already in a terminal state (such as FINISHED, CANCELED, FAILED), or if the task is already canceling this does nothing. Otherwise it sets the state to FAILED, and, if the invokable code is running, starts an asynchronous thread that aborts that code. <p>This method never blocks.</p>
[ "Marks", "task", "execution", "failed", "for", "an", "external", "reason", "(", "a", "reason", "other", "than", "the", "task", "code", "itself", "throwing", "an", "exception", ")", ".", "If", "the", "task", "is", "already", "in", "a", "terminal", "state", ...
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/taskmanager/Task.java#L1002-L1006
train
Fail the task externally.
[ 30522, 1030, 2058, 15637, 2270, 11675, 8246, 10288, 16451, 3973, 1006, 5466, 3085, 3426, 1007, 1063, 8833, 1012, 18558, 1006, 1000, 7161, 2000, 8246, 4708, 27223, 1063, 1065, 1006, 1063, 1065, 1007, 1012, 1000, 1010, 4708, 18442, 24415, 634...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
spring-projects/spring-boot
spring-boot-project/spring-boot-actuator-autoconfigure/src/main/java/org/springframework/boot/actuate/autoconfigure/cloudfoundry/servlet/CloudFoundrySecurityService.java
CloudFoundrySecurityService.getUaaUrl
public String getUaaUrl() { if (this.uaaUrl == null) { try { Map<?, ?> response = this.restTemplate .getForObject(this.cloudControllerUrl + "/info", Map.class); this.uaaUrl = (String) response.get("token_endpoint"); } catch (HttpStatusCodeException ex) { throw new CloudFoundryAuthorizationException(Reason.SERVICE_UNAVAILABLE, "Unable to fetch token keys from UAA"); } } return this.uaaUrl; }
java
public String getUaaUrl() { if (this.uaaUrl == null) { try { Map<?, ?> response = this.restTemplate .getForObject(this.cloudControllerUrl + "/info", Map.class); this.uaaUrl = (String) response.get("token_endpoint"); } catch (HttpStatusCodeException ex) { throw new CloudFoundryAuthorizationException(Reason.SERVICE_UNAVAILABLE, "Unable to fetch token keys from UAA"); } } return this.uaaUrl; }
[ "public", "String", "getUaaUrl", "(", ")", "{", "if", "(", "this", ".", "uaaUrl", "==", "null", ")", "{", "try", "{", "Map", "<", "?", ",", "?", ">", "response", "=", "this", ".", "restTemplate", ".", "getForObject", "(", "this", ".", "cloudControlle...
Return the URL of the UAA. @return the UAA url
[ "Return", "the", "URL", "of", "the", "UAA", "." ]
0b27f7c70e164b2b1a96477f1d9c1acba56790c1
https://github.com/spring-projects/spring-boot/blob/0b27f7c70e164b2b1a96477f1d9c1acba56790c1/spring-boot-project/spring-boot-actuator-autoconfigure/src/main/java/org/springframework/boot/actuate/autoconfigure/cloudfoundry/servlet/CloudFoundrySecurityService.java#L133-L146
train
Get the UAA URL
[ 30522, 2270, 5164, 2131, 6692, 21159, 2140, 1006, 1007, 1063, 2065, 1006, 2023, 1012, 25423, 21159, 2140, 1027, 1027, 19701, 1007, 1063, 3046, 1063, 4949, 1026, 1029, 1010, 1029, 1028, 3433, 1027, 2023, 1012, 2717, 18532, 15725, 1012, 2131,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/incubator-shardingsphere
sharding-jdbc/sharding-jdbc-orchestration/src/main/java/org/apache/shardingsphere/shardingjdbc/orchestration/internal/datasource/OrchestrationShardingDataSource.java
OrchestrationShardingDataSource.renew
@Subscribe public synchronized void renew(final DisabledStateChangedEvent disabledStateChangedEvent) { OrchestrationShardingSchema shardingSchema = disabledStateChangedEvent.getShardingSchema(); if (ShardingConstant.LOGIC_SCHEMA_NAME.equals(shardingSchema.getSchemaName())) { for (MasterSlaveRule each : dataSource.getShardingContext().getShardingRule().getMasterSlaveRules()) { ((OrchestrationMasterSlaveRule) each).updateDisabledDataSourceNames(shardingSchema.getDataSourceName(), disabledStateChangedEvent.isDisabled()); } } }
java
@Subscribe public synchronized void renew(final DisabledStateChangedEvent disabledStateChangedEvent) { OrchestrationShardingSchema shardingSchema = disabledStateChangedEvent.getShardingSchema(); if (ShardingConstant.LOGIC_SCHEMA_NAME.equals(shardingSchema.getSchemaName())) { for (MasterSlaveRule each : dataSource.getShardingContext().getShardingRule().getMasterSlaveRules()) { ((OrchestrationMasterSlaveRule) each).updateDisabledDataSourceNames(shardingSchema.getDataSourceName(), disabledStateChangedEvent.isDisabled()); } } }
[ "@", "Subscribe", "public", "synchronized", "void", "renew", "(", "final", "DisabledStateChangedEvent", "disabledStateChangedEvent", ")", "{", "OrchestrationShardingSchema", "shardingSchema", "=", "disabledStateChangedEvent", ".", "getShardingSchema", "(", ")", ";", "if", ...
Renew disabled data source names. @param disabledStateChangedEvent disabled state changed event
[ "Renew", "disabled", "data", "source", "names", "." ]
f88fd29fc345dfb31fdce12e9e96cbfa0fd2402d
https://github.com/apache/incubator-shardingsphere/blob/f88fd29fc345dfb31fdce12e9e96cbfa0fd2402d/sharding-jdbc/sharding-jdbc-orchestration/src/main/java/org/apache/shardingsphere/shardingjdbc/orchestration/internal/datasource/OrchestrationShardingDataSource.java#L123-L131
train
Renews the disabled state.
[ 30522, 1030, 4942, 29234, 2270, 25549, 11675, 20687, 1006, 2345, 9776, 9153, 15007, 22043, 24844, 4765, 9776, 9153, 15007, 22043, 24844, 4765, 1007, 1063, 4032, 9285, 11783, 8613, 5403, 2863, 21146, 17080, 3070, 22842, 2863, 1027, 9776, 9153,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/builder/HashCodeBuilder.java
HashCodeBuilder.unregister
static void unregister(final Object value) { Set<IDKey> registry = getRegistry(); if (registry != null) { registry.remove(new IDKey(value)); synchronized (HashCodeBuilder.class) { //read again registry = getRegistry(); if (registry != null && registry.isEmpty()) { REGISTRY.remove(); } } } }
java
static void unregister(final Object value) { Set<IDKey> registry = getRegistry(); if (registry != null) { registry.remove(new IDKey(value)); synchronized (HashCodeBuilder.class) { //read again registry = getRegistry(); if (registry != null && registry.isEmpty()) { REGISTRY.remove(); } } } }
[ "static", "void", "unregister", "(", "final", "Object", "value", ")", "{", "Set", "<", "IDKey", ">", "registry", "=", "getRegistry", "(", ")", ";", "if", "(", "registry", "!=", "null", ")", "{", "registry", ".", "remove", "(", "new", "IDKey", "(", "v...
<p> Unregisters the given object. </p> <p> Used by the reflection methods to avoid infinite loops. @param value The object to unregister. @since 2.3
[ "<p", ">", "Unregisters", "the", "given", "object", ".", "<", "/", "p", ">" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/builder/HashCodeBuilder.java#L491-L503
train
Unregister a value from the cache.
[ 30522, 10763, 11675, 4895, 2890, 24063, 2121, 1006, 2345, 4874, 3643, 1007, 1063, 2275, 1026, 8909, 14839, 1028, 15584, 1027, 2131, 2890, 24063, 2854, 1006, 1007, 1025, 2065, 1006, 15584, 999, 1027, 19701, 1007, 1063, 15584, 1012, 6366, 100...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-poi/src/main/java/cn/hutool/poi/excel/sax/Excel03SaxReader.java
Excel03SaxReader.processLastCell
private void processLastCell(LastCellOfRowDummyRecord lastCell) { // 每行结束时, 调用handle() 方法 this.rowHandler.handle(curSheetIndex, lastCell.getRow(), this.rowCellList); // 清空行Cache this.rowCellList.clear(); }
java
private void processLastCell(LastCellOfRowDummyRecord lastCell) { // 每行结束时, 调用handle() 方法 this.rowHandler.handle(curSheetIndex, lastCell.getRow(), this.rowCellList); // 清空行Cache this.rowCellList.clear(); }
[ "private", "void", "processLastCell", "(", "LastCellOfRowDummyRecord", "lastCell", ")", "{", "// 每行结束时, 调用handle() 方法\r", "this", ".", "rowHandler", ".", "handle", "(", "curSheetIndex", ",", "lastCell", ".", "getRow", "(", ")", ",", "this", ".", "rowCellList", ")"...
处理行结束后的操作,{@link LastCellOfRowDummyRecord}是行结束的标识Record @param lastCell 行结束的标识Record
[ "处理行结束后的操作,", "{", "@link", "LastCellOfRowDummyRecord", "}", "是行结束的标识Record" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-poi/src/main/java/cn/hutool/poi/excel/sax/Excel03SaxReader.java#L279-L284
train
Process the last cell of the row.
[ 30522, 2797, 11675, 2832, 8523, 13535, 5349, 1006, 2197, 29109, 4135, 19699, 5004, 8566, 18879, 2890, 27108, 2094, 2197, 29109, 2140, 1007, 1063, 1013, 1013, 100, 1945, 100, 100, 100, 1989, 100, 100, 5047, 1006, 1007, 1863, 1901, 2023, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
alibaba/canal
dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/LogBuffer.java
LogBuffer.getFullString
public final String getFullString(final int len, String charsetName) { if (position + len > origin + limit) throw new IllegalArgumentException("limit excceed: " + (position + len - origin)); try { String string = new String(buffer, position, len, charsetName); position += len; return string; } catch (UnsupportedEncodingException e) { throw new IllegalArgumentException("Unsupported encoding: " + charsetName, e); } }
java
public final String getFullString(final int len, String charsetName) { if (position + len > origin + limit) throw new IllegalArgumentException("limit excceed: " + (position + len - origin)); try { String string = new String(buffer, position, len, charsetName); position += len; return string; } catch (UnsupportedEncodingException e) { throw new IllegalArgumentException("Unsupported encoding: " + charsetName, e); } }
[ "public", "final", "String", "getFullString", "(", "final", "int", "len", ",", "String", "charsetName", ")", "{", "if", "(", "position", "+", "len", ">", "origin", "+", "limit", ")", "throw", "new", "IllegalArgumentException", "(", "\"limit excceed: \"", "+", ...
Return next fix-length string from buffer without null-terminate checking. Fix bug #17 {@link https ://github.com/AlibabaTech/canal/issues/17 }
[ "Return", "next", "fix", "-", "length", "string", "from", "buffer", "without", "null", "-", "terminate", "checking", ".", "Fix", "bug", "#17", "{" ]
8f088cddc0755f4350c5aaae95c6e4002d90a40f
https://github.com/alibaba/canal/blob/8f088cddc0755f4350c5aaae95c6e4002d90a40f/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/LogBuffer.java#L1122-L1133
train
Gets a full string of the specified length from the buffer.
[ 30522, 2270, 2345, 5164, 2131, 3993, 4877, 18886, 3070, 1006, 2345, 20014, 18798, 1010, 5164, 25869, 13462, 18442, 1007, 1063, 2065, 1006, 2597, 1009, 18798, 1028, 4761, 1009, 5787, 1007, 5466, 2047, 6206, 2906, 22850, 15781, 2595, 24422, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/join/WindowJoin.java
WindowJoin.main
public static void main(String[] args) throws Exception { // parse the parameters final ParameterTool params = ParameterTool.fromArgs(args); final long windowSize = params.getLong("windowSize", 2000); final long rate = params.getLong("rate", 3L); System.out.println("Using windowSize=" + windowSize + ", data rate=" + rate); System.out.println("To customize example, use: WindowJoin [--windowSize <window-size-in-millis>] [--rate <elements-per-second>]"); // obtain execution environment, run this example in "ingestion time" StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime); // make parameters available in the web interface env.getConfig().setGlobalJobParameters(params); // create the data sources for both grades and salaries DataStream<Tuple2<String, Integer>> grades = GradeSource.getSource(env, rate); DataStream<Tuple2<String, Integer>> salaries = SalarySource.getSource(env, rate); // run the actual window join program // for testability, this functionality is in a separate method. DataStream<Tuple3<String, Integer, Integer>> joinedStream = runWindowJoin(grades, salaries, windowSize); // print the results with a single thread, rather than in parallel joinedStream.print().setParallelism(1); // execute program env.execute("Windowed Join Example"); }
java
public static void main(String[] args) throws Exception { // parse the parameters final ParameterTool params = ParameterTool.fromArgs(args); final long windowSize = params.getLong("windowSize", 2000); final long rate = params.getLong("rate", 3L); System.out.println("Using windowSize=" + windowSize + ", data rate=" + rate); System.out.println("To customize example, use: WindowJoin [--windowSize <window-size-in-millis>] [--rate <elements-per-second>]"); // obtain execution environment, run this example in "ingestion time" StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime); // make parameters available in the web interface env.getConfig().setGlobalJobParameters(params); // create the data sources for both grades and salaries DataStream<Tuple2<String, Integer>> grades = GradeSource.getSource(env, rate); DataStream<Tuple2<String, Integer>> salaries = SalarySource.getSource(env, rate); // run the actual window join program // for testability, this functionality is in a separate method. DataStream<Tuple3<String, Integer, Integer>> joinedStream = runWindowJoin(grades, salaries, windowSize); // print the results with a single thread, rather than in parallel joinedStream.print().setParallelism(1); // execute program env.execute("Windowed Join Example"); }
[ "public", "static", "void", "main", "(", "String", "[", "]", "args", ")", "throws", "Exception", "{", "// parse the parameters", "final", "ParameterTool", "params", "=", "ParameterTool", ".", "fromArgs", "(", "args", ")", ";", "final", "long", "windowSize", "=...
*************************************************************************
[ "*************************************************************************" ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/join/WindowJoin.java#L49-L78
train
Main method to run the windowed join example.
[ 30522, 2270, 30524, 2345, 2146, 3645, 4697, 1027, 11498, 5244, 1012, 2131, 10052, 1006, 1000, 3645, 4697, 1000, 1010, 2456, 1007, 1025, 2345, 2146, 3446, 1027, 11498, 5244, 1012, 2131, 10052, 1006, 1000, 3446, 1000, 1010, 1017, 2140, 1007, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
handler/src/main/java/io/netty/handler/logging/LoggingHandler.java
LoggingHandler.formatByteBuf
private static String formatByteBuf(ChannelHandlerContext ctx, String eventName, ByteBuf msg) { String chStr = ctx.channel().toString(); int length = msg.readableBytes(); if (length == 0) { StringBuilder buf = new StringBuilder(chStr.length() + 1 + eventName.length() + 4); buf.append(chStr).append(' ').append(eventName).append(": 0B"); return buf.toString(); } else { int rows = length / 16 + (length % 15 == 0? 0 : 1) + 4; StringBuilder buf = new StringBuilder(chStr.length() + 1 + eventName.length() + 2 + 10 + 1 + 2 + rows * 80); buf.append(chStr).append(' ').append(eventName).append(": ").append(length).append('B').append(NEWLINE); appendPrettyHexDump(buf, msg); return buf.toString(); } }
java
private static String formatByteBuf(ChannelHandlerContext ctx, String eventName, ByteBuf msg) { String chStr = ctx.channel().toString(); int length = msg.readableBytes(); if (length == 0) { StringBuilder buf = new StringBuilder(chStr.length() + 1 + eventName.length() + 4); buf.append(chStr).append(' ').append(eventName).append(": 0B"); return buf.toString(); } else { int rows = length / 16 + (length % 15 == 0? 0 : 1) + 4; StringBuilder buf = new StringBuilder(chStr.length() + 1 + eventName.length() + 2 + 10 + 1 + 2 + rows * 80); buf.append(chStr).append(' ').append(eventName).append(": ").append(length).append('B').append(NEWLINE); appendPrettyHexDump(buf, msg); return buf.toString(); } }
[ "private", "static", "String", "formatByteBuf", "(", "ChannelHandlerContext", "ctx", ",", "String", "eventName", ",", "ByteBuf", "msg", ")", "{", "String", "chStr", "=", "ctx", ".", "channel", "(", ")", ".", "toString", "(", ")", ";", "int", "length", "=",...
Generates the default log message of the specified event whose argument is a {@link ByteBuf}.
[ "Generates", "the", "default", "log", "message", "of", "the", "specified", "event", "whose", "argument", "is", "a", "{" ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/handler/src/main/java/io/netty/handler/logging/LoggingHandler.java#L323-L339
train
Format a ByteBuf to be used in a HTTP request.
[ 30522, 2797, 10763, 5164, 4289, 3762, 2618, 8569, 2546, 1006, 3149, 11774, 3917, 8663, 18209, 14931, 2595, 1010, 5164, 2724, 18442, 1010, 24880, 8569, 2546, 5796, 2290, 1007, 1063, 5164, 10381, 3367, 2099, 1027, 14931, 2595, 1012, 3149, 100...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/iterative/concurrent/Broker.java
Broker.getAndRemove
public V getAndRemove(String key) { try { V objToShare = retrieveSharedQueue(key).take(); mediations.remove(key); return objToShare; } catch (InterruptedException e) { throw new RuntimeException(e); } }
java
public V getAndRemove(String key) { try { V objToShare = retrieveSharedQueue(key).take(); mediations.remove(key); return objToShare; } catch (InterruptedException e) { throw new RuntimeException(e); } }
[ "public", "V", "getAndRemove", "(", "String", "key", ")", "{", "try", "{", "V", "objToShare", "=", "retrieveSharedQueue", "(", "key", ")", ".", "take", "(", ")", ";", "mediations", ".", "remove", "(", "key", ")", ";", "return", "objToShare", ";", "}", ...
Blocking retrieval and removal of the object to share.
[ "Blocking", "retrieval", "and", "removal", "of", "the", "object", "to", "share", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/iterative/concurrent/Broker.java#L43-L51
train
Get and remove the object from the shared queue.
[ 30522, 2270, 1058, 2131, 5685, 28578, 21818, 1006, 5164, 3145, 1007, 1063, 3046, 1063, 1058, 27885, 3501, 13122, 8167, 2063, 1027, 12850, 7377, 5596, 4226, 5657, 1006, 3145, 1007, 1012, 2202, 1006, 1007, 1025, 26435, 2015, 1012, 6366, 1006,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
common/src/main/java/io/netty/util/AsciiString.java
AsciiString.contentEqualsIgnoreCase
public static boolean contentEqualsIgnoreCase(CharSequence a, CharSequence b) { if (a == null || b == null) { return a == b; } if (a.getClass() == AsciiString.class) { return ((AsciiString) a).contentEqualsIgnoreCase(b); } if (b.getClass() == AsciiString.class) { return ((AsciiString) b).contentEqualsIgnoreCase(a); } if (a.length() != b.length()) { return false; } for (int i = 0; i < a.length(); ++i) { if (!equalsIgnoreCase(a.charAt(i), b.charAt(i))) { return false; } } return true; }
java
public static boolean contentEqualsIgnoreCase(CharSequence a, CharSequence b) { if (a == null || b == null) { return a == b; } if (a.getClass() == AsciiString.class) { return ((AsciiString) a).contentEqualsIgnoreCase(b); } if (b.getClass() == AsciiString.class) { return ((AsciiString) b).contentEqualsIgnoreCase(a); } if (a.length() != b.length()) { return false; } for (int i = 0; i < a.length(); ++i) { if (!equalsIgnoreCase(a.charAt(i), b.charAt(i))) { return false; } } return true; }
[ "public", "static", "boolean", "contentEqualsIgnoreCase", "(", "CharSequence", "a", ",", "CharSequence", "b", ")", "{", "if", "(", "a", "==", "null", "||", "b", "==", "null", ")", "{", "return", "a", "==", "b", ";", "}", "if", "(", "a", ".", "getClas...
Returns {@code true} if both {@link CharSequence}'s are equals when ignore the case. This only supports 8-bit ASCII.
[ "Returns", "{" ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/common/src/main/java/io/netty/util/AsciiString.java#L1440-L1461
train
contentEqualsIgnoreCase Method.
[ 30522, 2270, 10763, 22017, 20898, 4180, 2063, 26426, 5332, 26745, 2890, 18382, 1006, 25869, 3366, 4226, 5897, 1037, 1010, 25869, 3366, 4226, 5897, 1038, 1007, 1063, 2065, 1006, 1037, 1027, 1027, 19701, 1064, 1064, 1038, 1027, 1027, 19701, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/io/disk/iomanager/AsynchronousBufferFileWriter.java
AsynchronousBufferFileWriter.writeBlock
@Override public void writeBlock(Buffer buffer) throws IOException { try { // if successfully added, the buffer will be recycled after the write operation addRequest(new BufferWriteRequest(this, buffer)); } catch (Throwable e) { // if not added, we need to recycle here buffer.recycleBuffer(); ExceptionUtils.rethrowIOException(e); } }
java
@Override public void writeBlock(Buffer buffer) throws IOException { try { // if successfully added, the buffer will be recycled after the write operation addRequest(new BufferWriteRequest(this, buffer)); } catch (Throwable e) { // if not added, we need to recycle here buffer.recycleBuffer(); ExceptionUtils.rethrowIOException(e); } }
[ "@", "Override", "public", "void", "writeBlock", "(", "Buffer", "buffer", ")", "throws", "IOException", "{", "try", "{", "// if successfully added, the buffer will be recycled after the write operation", "addRequest", "(", "new", "BufferWriteRequest", "(", "this", ",", "b...
Writes the given block asynchronously. @param buffer the buffer to be written (will be recycled when done) @throws IOException thrown if adding the write operation fails
[ "Writes", "the", "given", "block", "asynchronously", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/io/disk/iomanager/AsynchronousBufferFileWriter.java#L44-L55
train
Write a block to the output stream.
[ 30522, 1030, 2058, 15637, 2270, 11675, 4339, 23467, 1006, 17698, 17698, 1007, 11618, 22834, 10288, 24422, 1063, 3046, 1063, 1013, 1013, 2065, 5147, 2794, 1010, 1996, 17698, 2097, 2022, 22207, 2044, 1996, 4339, 3169, 5587, 2890, 15500, 1006, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
spring-projects/spring-boot
spring-boot-project/spring-boot/src/main/java/org/springframework/boot/web/client/RestTemplateBuilder.java
RestTemplateBuilder.customizers
public RestTemplateBuilder customizers( Collection<? extends RestTemplateCustomizer> restTemplateCustomizers) { Assert.notNull(restTemplateCustomizers, "RestTemplateCustomizers must not be null"); return new RestTemplateBuilder(this.detectRequestFactory, this.rootUri, this.messageConverters, this.requestFactorySupplier, this.uriTemplateHandler, this.errorHandler, this.basicAuthentication, Collections.unmodifiableSet(new LinkedHashSet<RestTemplateCustomizer>( restTemplateCustomizers)), this.requestFactoryCustomizer, this.interceptors); }
java
public RestTemplateBuilder customizers( Collection<? extends RestTemplateCustomizer> restTemplateCustomizers) { Assert.notNull(restTemplateCustomizers, "RestTemplateCustomizers must not be null"); return new RestTemplateBuilder(this.detectRequestFactory, this.rootUri, this.messageConverters, this.requestFactorySupplier, this.uriTemplateHandler, this.errorHandler, this.basicAuthentication, Collections.unmodifiableSet(new LinkedHashSet<RestTemplateCustomizer>( restTemplateCustomizers)), this.requestFactoryCustomizer, this.interceptors); }
[ "public", "RestTemplateBuilder", "customizers", "(", "Collection", "<", "?", "extends", "RestTemplateCustomizer", ">", "restTemplateCustomizers", ")", "{", "Assert", ".", "notNull", "(", "restTemplateCustomizers", ",", "\"RestTemplateCustomizers must not be null\"", ")", ";...
Set the {@link RestTemplateCustomizer RestTemplateCustomizers} that should be applied to the {@link RestTemplate}. Customizers are applied in the order that they were added after builder configuration has been applied. Setting this value will replace any previously configured customizers. @param restTemplateCustomizers the customizers to set @return a new builder instance @see #additionalCustomizers(RestTemplateCustomizer...)
[ "Set", "the", "{" ]
0b27f7c70e164b2b1a96477f1d9c1acba56790c1
https://github.com/spring-projects/spring-boot/blob/0b27f7c70e164b2b1a96477f1d9c1acba56790c1/spring-boot-project/spring-boot/src/main/java/org/springframework/boot/web/client/RestTemplateBuilder.java#L415-L425
train
Create a new RestTemplateBuilder with the specified customizers.
[ 30522, 2270, 2717, 18532, 15725, 8569, 23891, 2099, 7661, 17629, 2015, 1006, 3074, 1026, 1029, 8908, 2717, 18532, 15725, 7874, 20389, 17629, 1028, 2717, 18532, 15725, 7874, 20389, 17629, 2015, 1007, 1063, 20865, 1012, 2025, 11231, 3363, 1006,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/spark
core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java
TaskMemoryManager.getOffsetInPage
public long getOffsetInPage(long pagePlusOffsetAddress) { final long offsetInPage = decodeOffset(pagePlusOffsetAddress); if (tungstenMemoryMode == MemoryMode.ON_HEAP) { return offsetInPage; } else { // In off-heap mode, an offset is an absolute address. In encodePageNumberAndOffset, we // converted the absolute address into a relative address. Here, we invert that operation: final int pageNumber = decodePageNumber(pagePlusOffsetAddress); assert (pageNumber >= 0 && pageNumber < PAGE_TABLE_SIZE); final MemoryBlock page = pageTable[pageNumber]; assert (page != null); return page.getBaseOffset() + offsetInPage; } }
java
public long getOffsetInPage(long pagePlusOffsetAddress) { final long offsetInPage = decodeOffset(pagePlusOffsetAddress); if (tungstenMemoryMode == MemoryMode.ON_HEAP) { return offsetInPage; } else { // In off-heap mode, an offset is an absolute address. In encodePageNumberAndOffset, we // converted the absolute address into a relative address. Here, we invert that operation: final int pageNumber = decodePageNumber(pagePlusOffsetAddress); assert (pageNumber >= 0 && pageNumber < PAGE_TABLE_SIZE); final MemoryBlock page = pageTable[pageNumber]; assert (page != null); return page.getBaseOffset() + offsetInPage; } }
[ "public", "long", "getOffsetInPage", "(", "long", "pagePlusOffsetAddress", ")", "{", "final", "long", "offsetInPage", "=", "decodeOffset", "(", "pagePlusOffsetAddress", ")", ";", "if", "(", "tungstenMemoryMode", "==", "MemoryMode", ".", "ON_HEAP", ")", "{", "retur...
Get the offset associated with an address encoded by {@link TaskMemoryManager#encodePageNumberAndOffset(MemoryBlock, long)}
[ "Get", "the", "offset", "associated", "with", "an", "address", "encoded", "by", "{" ]
25ee0474f47d9c30d6f553a7892d9549f91071cf
https://github.com/apache/spark/blob/25ee0474f47d9c30d6f553a7892d9549f91071cf/core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java#L409-L422
train
Get the offset in the page.
[ 30522, 2270, 2146, 2131, 27475, 20624, 16275, 4270, 1006, 2146, 3931, 24759, 26658, 21807, 12928, 14141, 8303, 1007, 1063, 2345, 2146, 16396, 2378, 13704, 1027, 21933, 3207, 27475, 3388, 1006, 3931, 24759, 26658, 21807, 12928, 14141, 8303, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/state/KeyGroupRangeAssignment.java
KeyGroupRangeAssignment.assignKeyToParallelOperator
public static int assignKeyToParallelOperator(Object key, int maxParallelism, int parallelism) { return computeOperatorIndexForKeyGroup(maxParallelism, parallelism, assignToKeyGroup(key, maxParallelism)); }
java
public static int assignKeyToParallelOperator(Object key, int maxParallelism, int parallelism) { return computeOperatorIndexForKeyGroup(maxParallelism, parallelism, assignToKeyGroup(key, maxParallelism)); }
[ "public", "static", "int", "assignKeyToParallelOperator", "(", "Object", "key", ",", "int", "maxParallelism", ",", "int", "parallelism", ")", "{", "return", "computeOperatorIndexForKeyGroup", "(", "maxParallelism", ",", "parallelism", ",", "assignToKeyGroup", "(", "ke...
Assigns the given key to a parallel operator index. @param key the key to assign @param maxParallelism the maximum supported parallelism, aka the number of key-groups. @param parallelism the current parallelism of the operator @return the index of the parallel operator to which the given key should be routed.
[ "Assigns", "the", "given", "key", "to", "a", "parallel", "operator", "index", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/state/KeyGroupRangeAssignment.java#L47-L49
train
Assigns a key to a parallel operator.
[ 30522, 2270, 10763, 20014, 23911, 14839, 14399, 5400, 6216, 4135, 4842, 8844, 1006, 4874, 3145, 1010, 20014, 4098, 28689, 6216, 28235, 1010, 20014, 5903, 2964, 1007, 1063, 2709, 24134, 25918, 8844, 22254, 10288, 29278, 14839, 17058, 1006, 409...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
spring-projects/spring-boot
spring-boot-project/spring-boot-tools/spring-boot-configuration-processor/src/main/java/org/springframework/boot/configurationprocessor/TypeUtils.java
TypeUtils.extractElementType
public TypeMirror extractElementType(TypeMirror type) { if (!this.env.getTypeUtils().isAssignable(type, this.collectionType)) { return null; } return getCollectionElementType(type); }
java
public TypeMirror extractElementType(TypeMirror type) { if (!this.env.getTypeUtils().isAssignable(type, this.collectionType)) { return null; } return getCollectionElementType(type); }
[ "public", "TypeMirror", "extractElementType", "(", "TypeMirror", "type", ")", "{", "if", "(", "!", "this", ".", "env", ".", "getTypeUtils", "(", ")", ".", "isAssignable", "(", "type", ",", "this", ".", "collectionType", ")", ")", "{", "return", "null", "...
Extract the target element type from the specified container type or {@code null} if no element type was found. @param type a type, potentially wrapping an element type @return the element type or {@code null} if no specific type was found
[ "Extract", "the", "target", "element", "type", "from", "the", "specified", "container", "type", "or", "{" ]
0b27f7c70e164b2b1a96477f1d9c1acba56790c1
https://github.com/spring-projects/spring-boot/blob/0b27f7c70e164b2b1a96477f1d9c1acba56790c1/spring-boot-project/spring-boot-tools/spring-boot-configuration-processor/src/main/java/org/springframework/boot/configurationprocessor/TypeUtils.java#L151-L156
train
Extract the element type from the given type.
[ 30522, 2270, 2828, 14503, 29165, 14817, 12260, 3672, 13874, 1006, 2828, 14503, 29165, 2828, 1007, 1063, 2065, 1006, 999, 2023, 1012, 4372, 2615, 1012, 2131, 13874, 21823, 4877, 1006, 1007, 1012, 18061, 18719, 16989, 3468, 1006, 2828, 1010, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/io/FileUtil.java
FileUtil.readUtf8Lines
public static <T extends Collection<String>> T readUtf8Lines(URL url, T collection) throws IORuntimeException { return readLines(url, CharsetUtil.CHARSET_UTF_8, collection); }
java
public static <T extends Collection<String>> T readUtf8Lines(URL url, T collection) throws IORuntimeException { return readLines(url, CharsetUtil.CHARSET_UTF_8, collection); }
[ "public", "static", "<", "T", "extends", "Collection", "<", "String", ">", ">", "T", "readUtf8Lines", "(", "URL", "url", ",", "T", "collection", ")", "throws", "IORuntimeException", "{", "return", "readLines", "(", "url", ",", "CharsetUtil", ".", "CHARSET_UT...
从文件中读取每一行数据,编码为UTF-8 @param <T> 集合类型 @param url 文件的URL @param collection 集合 @return 文件中的每行内容的集合 @throws IORuntimeException IO异常
[ "从文件中读取每一行数据,编码为UTF", "-", "8" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/io/FileUtil.java#L2247-L2249
train
Reads the UTF - 8 lines from the given URL into the given collection.
[ 30522, 2270, 10763, 1026, 1056, 8908, 3074, 1026, 5164, 1028, 1028, 1056, 3191, 4904, 2546, 2620, 12735, 1006, 24471, 2140, 24471, 2140, 1010, 1056, 3074, 1007, 11618, 22834, 15532, 7292, 10288, 24422, 1063, 2709, 3191, 12735, 1006, 24471, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
spring-projects/spring-boot
spring-boot-project/spring-boot/src/main/java/org/springframework/boot/web/embedded/tomcat/TomcatServletWebServerFactory.java
TomcatServletWebServerFactory.setTldSkipPatterns
public void setTldSkipPatterns(Collection<String> patterns) { Assert.notNull(patterns, "Patterns must not be null"); this.tldSkipPatterns = new LinkedHashSet<>(patterns); }
java
public void setTldSkipPatterns(Collection<String> patterns) { Assert.notNull(patterns, "Patterns must not be null"); this.tldSkipPatterns = new LinkedHashSet<>(patterns); }
[ "public", "void", "setTldSkipPatterns", "(", "Collection", "<", "String", ">", "patterns", ")", "{", "Assert", ".", "notNull", "(", "patterns", ",", "\"Patterns must not be null\"", ")", ";", "this", ".", "tldSkipPatterns", "=", "new", "LinkedHashSet", "<>", "("...
Set the patterns that match jars to ignore for TLD scanning. See Tomcat's catalina.properties for typical values. Defaults to a list drawn from that source. @param patterns the jar patterns to skip when scanning for TLDs etc
[ "Set", "the", "patterns", "that", "match", "jars", "to", "ignore", "for", "TLD", "scanning", ".", "See", "Tomcat", "s", "catalina", ".", "properties", "for", "typical", "values", ".", "Defaults", "to", "a", "list", "drawn", "from", "that", "source", "." ]
0b27f7c70e164b2b1a96477f1d9c1acba56790c1
https://github.com/spring-projects/spring-boot/blob/0b27f7c70e164b2b1a96477f1d9c1acba56790c1/spring-boot-project/spring-boot/src/main/java/org/springframework/boot/web/embedded/tomcat/TomcatServletWebServerFactory.java#L466-L469
train
Sets the list of patterns to be skipped for the TLDs.
[ 30522, 30524, 1012, 2025, 11231, 3363, 1006, 7060, 1010, 1000, 7060, 2442, 2025, 2022, 19701, 1000, 1007, 1025, 2023, 1012, 1056, 6392, 5488, 13944, 12079, 3619, 1027, 2047, 5799, 14949, 7898, 3388, 1026, 1028, 1006, 7060, 1007, 1025, 1065,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
codec-http2/src/main/java/io/netty/handler/codec/http2/Http2ConnectionHandler.java
Http2ConnectionHandler.onError
@Override public void onError(ChannelHandlerContext ctx, boolean outbound, Throwable cause) { Http2Exception embedded = getEmbeddedHttp2Exception(cause); if (isStreamError(embedded)) { onStreamError(ctx, outbound, cause, (StreamException) embedded); } else if (embedded instanceof CompositeStreamException) { CompositeStreamException compositException = (CompositeStreamException) embedded; for (StreamException streamException : compositException) { onStreamError(ctx, outbound, cause, streamException); } } else { onConnectionError(ctx, outbound, cause, embedded); } ctx.flush(); }
java
@Override public void onError(ChannelHandlerContext ctx, boolean outbound, Throwable cause) { Http2Exception embedded = getEmbeddedHttp2Exception(cause); if (isStreamError(embedded)) { onStreamError(ctx, outbound, cause, (StreamException) embedded); } else if (embedded instanceof CompositeStreamException) { CompositeStreamException compositException = (CompositeStreamException) embedded; for (StreamException streamException : compositException) { onStreamError(ctx, outbound, cause, streamException); } } else { onConnectionError(ctx, outbound, cause, embedded); } ctx.flush(); }
[ "@", "Override", "public", "void", "onError", "(", "ChannelHandlerContext", "ctx", ",", "boolean", "outbound", ",", "Throwable", "cause", ")", "{", "Http2Exception", "embedded", "=", "getEmbeddedHttp2Exception", "(", "cause", ")", ";", "if", "(", "isStreamError", ...
Central handler for all exceptions caught during HTTP/2 processing.
[ "Central", "handler", "for", "all", "exceptions", "caught", "during", "HTTP", "/", "2", "processing", "." ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/codec-http2/src/main/java/io/netty/handler/codec/http2/Http2ConnectionHandler.java#L622-L636
train
Override onError in order to handle errors.
[ 30522, 1030, 2058, 15637, 2270, 11675, 2028, 18933, 2099, 1006, 3149, 11774, 3917, 8663, 18209, 14931, 2595, 1010, 22017, 20898, 2041, 15494, 1010, 5466, 3085, 3426, 1007, 1063, 8299, 2475, 10288, 24422, 11157, 1027, 2131, 6633, 8270, 5732, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/contrib/streaming/state/restore/RocksDBFullRestoreOperation.java
RocksDBFullRestoreOperation.restoreKVStateMetaData
private void restoreKVStateMetaData() throws IOException, StateMigrationException { KeyedBackendSerializationProxy<K> serializationProxy = readMetaData(currentStateHandleInView); this.keygroupStreamCompressionDecorator = serializationProxy.isUsingKeyGroupCompression() ? SnappyStreamCompressionDecorator.INSTANCE : UncompressedStreamCompressionDecorator.INSTANCE; List<StateMetaInfoSnapshot> restoredMetaInfos = serializationProxy.getStateMetaInfoSnapshots(); currentStateHandleKVStateColumnFamilies = new ArrayList<>(restoredMetaInfos.size()); for (StateMetaInfoSnapshot restoredMetaInfo : restoredMetaInfos) { RocksDbKvStateInfo registeredStateCFHandle = getOrRegisterStateColumnFamilyHandle(null, restoredMetaInfo); currentStateHandleKVStateColumnFamilies.add(registeredStateCFHandle.columnFamilyHandle); } }
java
private void restoreKVStateMetaData() throws IOException, StateMigrationException { KeyedBackendSerializationProxy<K> serializationProxy = readMetaData(currentStateHandleInView); this.keygroupStreamCompressionDecorator = serializationProxy.isUsingKeyGroupCompression() ? SnappyStreamCompressionDecorator.INSTANCE : UncompressedStreamCompressionDecorator.INSTANCE; List<StateMetaInfoSnapshot> restoredMetaInfos = serializationProxy.getStateMetaInfoSnapshots(); currentStateHandleKVStateColumnFamilies = new ArrayList<>(restoredMetaInfos.size()); for (StateMetaInfoSnapshot restoredMetaInfo : restoredMetaInfos) { RocksDbKvStateInfo registeredStateCFHandle = getOrRegisterStateColumnFamilyHandle(null, restoredMetaInfo); currentStateHandleKVStateColumnFamilies.add(registeredStateCFHandle.columnFamilyHandle); } }
[ "private", "void", "restoreKVStateMetaData", "(", ")", "throws", "IOException", ",", "StateMigrationException", "{", "KeyedBackendSerializationProxy", "<", "K", ">", "serializationProxy", "=", "readMetaData", "(", "currentStateHandleInView", ")", ";", "this", ".", "keyg...
Restore the KV-state / ColumnFamily meta data for all key-groups referenced by the current state handle.
[ "Restore", "the", "KV", "-", "state", "/", "ColumnFamily", "meta", "data", "for", "all", "key", "-", "groups", "referenced", "by", "the", "current", "state", "handle", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/contrib/streaming/state/restore/RocksDBFullRestoreOperation.java#L169-L184
train
Restore the KV state meta data from the state file.
[ 30522, 2797, 11675, 9239, 2243, 15088, 12259, 11368, 8447, 2696, 1006, 1007, 11618, 22834, 10288, 24422, 1010, 2110, 4328, 29397, 10288, 24422, 1063, 3145, 2098, 5963, 10497, 8043, 4818, 3989, 21572, 18037, 1026, 1047, 1028, 7642, 3989, 21572...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-setting/src/main/java/cn/hutool/setting/AbsSetting.java
AbsSetting.getBool
public Boolean getBool(String key, String group, Boolean defaultValue) { return Convert.toBool(getByGroup(key, group), defaultValue); }
java
public Boolean getBool(String key, String group, Boolean defaultValue) { return Convert.toBool(getByGroup(key, group), defaultValue); }
[ "public", "Boolean", "getBool", "(", "String", "key", ",", "String", "group", ",", "Boolean", "defaultValue", ")", "{", "return", "Convert", ".", "toBool", "(", "getByGroup", "(", "key", ",", "group", ")", ",", "defaultValue", ")", ";", "}" ]
获取波尔型型属性值 @param key 属性名 @param group 分组名 @param defaultValue 默认值 @return 属性值
[ "获取波尔型型属性值" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-setting/src/main/java/cn/hutool/setting/AbsSetting.java#L188-L190
train
Get a Boolean value from the database by key and group.
[ 30522, 2270, 22017, 20898, 2131, 5092, 4747, 1006, 5164, 3145, 1010, 5164, 2177, 1010, 22017, 20898, 12398, 10175, 5657, 1007, 1063, 2709, 10463, 1012, 2000, 5092, 4747, 1006, 2131, 3762, 17058, 1006, 3145, 1010, 2177, 1007, 1010, 12398, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/img/ImgUtil.java
ImgUtil.pressImage
public static void pressImage(Image srcImage, OutputStream out, Image pressImg, int x, int y, float alpha) throws IORuntimeException { pressImage(srcImage, getImageOutputStream(out), pressImg, x, y, alpha); }
java
public static void pressImage(Image srcImage, OutputStream out, Image pressImg, int x, int y, float alpha) throws IORuntimeException { pressImage(srcImage, getImageOutputStream(out), pressImg, x, y, alpha); }
[ "public", "static", "void", "pressImage", "(", "Image", "srcImage", ",", "OutputStream", "out", ",", "Image", "pressImg", ",", "int", "x", ",", "int", "y", ",", "float", "alpha", ")", "throws", "IORuntimeException", "{", "pressImage", "(", "srcImage", ",", ...
给图片添加图片水印<br> 此方法并不关闭流 @param srcImage 源图像流 @param out 目标图像流 @param pressImg 水印图片,可以使用{@link ImageIO#read(File)}方法读取文件 @param x 修正值。 默认在中间,偏移量相对于中间偏移 @param y 修正值。 默认在中间,偏移量相对于中间偏移 @param alpha 透明度:alpha 必须是范围 [0.0, 1.0] 之内(包含边界值)的一个浮点数字 @throws IORuntimeException IO异常 @since 3.2.2
[ "给图片添加图片水印<br", ">", "此方法并不关闭流" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/img/ImgUtil.java#L941-L943
train
Press an image using the specified alpha value.
[ 30522, 2270, 10763, 11675, 2811, 9581, 3351, 1006, 3746, 5034, 6895, 26860, 1010, 27852, 25379, 2041, 1010, 3746, 2811, 5714, 2290, 1010, 20014, 1060, 1010, 20014, 1061, 1010, 14257, 6541, 1007, 11618, 22834, 15532, 7292, 10288, 24422, 1063, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
hankcs/HanLP
src/main/java/com/hankcs/hanlp/corpus/io/IOUtil.java
IOUtil.readTxt
public static String readTxt(String path) { if (path == null) return null; try { InputStream in = IOAdapter == null ? new FileInputStream(path) : IOAdapter.open(path); byte[] fileContent = new byte[in.available()]; int read = readBytesFromOtherInputStream(in, fileContent); in.close(); // 处理 UTF-8 BOM if (read >= 3 && fileContent[0] == -17 && fileContent[1] == -69 && fileContent[2] == -65) return new String(fileContent, 3, fileContent.length - 3, Charset.forName("UTF-8")); return new String(fileContent, Charset.forName("UTF-8")); } catch (FileNotFoundException e) { logger.warning("找不到" + path + e); return null; } catch (IOException e) { logger.warning("读取" + path + "发生IO异常" + e); return null; } }
java
public static String readTxt(String path) { if (path == null) return null; try { InputStream in = IOAdapter == null ? new FileInputStream(path) : IOAdapter.open(path); byte[] fileContent = new byte[in.available()]; int read = readBytesFromOtherInputStream(in, fileContent); in.close(); // 处理 UTF-8 BOM if (read >= 3 && fileContent[0] == -17 && fileContent[1] == -69 && fileContent[2] == -65) return new String(fileContent, 3, fileContent.length - 3, Charset.forName("UTF-8")); return new String(fileContent, Charset.forName("UTF-8")); } catch (FileNotFoundException e) { logger.warning("找不到" + path + e); return null; } catch (IOException e) { logger.warning("读取" + path + "发生IO异常" + e); return null; } }
[ "public", "static", "String", "readTxt", "(", "String", "path", ")", "{", "if", "(", "path", "==", "null", ")", "return", "null", ";", "try", "{", "InputStream", "in", "=", "IOAdapter", "==", "null", "?", "new", "FileInputStream", "(", "path", ")", ":"...
一次性读入纯文本 @param path @return
[ "一次性读入纯文本" ]
a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce
https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/corpus/io/IOUtil.java#L90-L115
train
read txt file
[ 30522, 2270, 10763, 5164, 3191, 2102, 18413, 1006, 5164, 4130, 1007, 1063, 2065, 1006, 4130, 1027, 1027, 19701, 1007, 2709, 19701, 1025, 3046, 1063, 20407, 25379, 1999, 1027, 22834, 8447, 13876, 2121, 1027, 1027, 19701, 1029, 2047, 5371, 23...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/security/SecurityUtils.java
SecurityUtils.install
public static void install(SecurityConfiguration config) throws Exception { // install the security modules List<SecurityModule> modules = new ArrayList<>(); try { for (SecurityModuleFactory moduleFactory : config.getSecurityModuleFactories()) { SecurityModule module = moduleFactory.createModule(config); // can be null if a SecurityModule is not supported in the current environment if (module != null) { module.install(); modules.add(module); } } } catch (Exception ex) { throw new Exception("unable to establish the security context", ex); } installedModules = modules; // First check if we have Hadoop in the ClassPath. If not, we simply don't do anything. try { Class.forName( "org.apache.hadoop.security.UserGroupInformation", false, SecurityUtils.class.getClassLoader()); // install a security context // use the Hadoop login user as the subject of the installed security context if (!(installedContext instanceof NoOpSecurityContext)) { LOG.warn("overriding previous security context"); } UserGroupInformation loginUser = UserGroupInformation.getLoginUser(); installedContext = new HadoopSecurityContext(loginUser); } catch (ClassNotFoundException e) { LOG.info("Cannot install HadoopSecurityContext because Hadoop cannot be found in the Classpath."); } catch (LinkageError e) { LOG.error("Cannot install HadoopSecurityContext.", e); } }
java
public static void install(SecurityConfiguration config) throws Exception { // install the security modules List<SecurityModule> modules = new ArrayList<>(); try { for (SecurityModuleFactory moduleFactory : config.getSecurityModuleFactories()) { SecurityModule module = moduleFactory.createModule(config); // can be null if a SecurityModule is not supported in the current environment if (module != null) { module.install(); modules.add(module); } } } catch (Exception ex) { throw new Exception("unable to establish the security context", ex); } installedModules = modules; // First check if we have Hadoop in the ClassPath. If not, we simply don't do anything. try { Class.forName( "org.apache.hadoop.security.UserGroupInformation", false, SecurityUtils.class.getClassLoader()); // install a security context // use the Hadoop login user as the subject of the installed security context if (!(installedContext instanceof NoOpSecurityContext)) { LOG.warn("overriding previous security context"); } UserGroupInformation loginUser = UserGroupInformation.getLoginUser(); installedContext = new HadoopSecurityContext(loginUser); } catch (ClassNotFoundException e) { LOG.info("Cannot install HadoopSecurityContext because Hadoop cannot be found in the Classpath."); } catch (LinkageError e) { LOG.error("Cannot install HadoopSecurityContext.", e); } }
[ "public", "static", "void", "install", "(", "SecurityConfiguration", "config", ")", "throws", "Exception", "{", "// install the security modules", "List", "<", "SecurityModule", ">", "modules", "=", "new", "ArrayList", "<>", "(", ")", ";", "try", "{", "for", "("...
Installs a process-wide security configuration. <p>Applies the configuration using the available security modules (i.e. Hadoop, JAAS).
[ "Installs", "a", "process", "-", "wide", "security", "configuration", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/security/SecurityUtils.java#L58-L96
train
Installs the security context.
[ 30522, 2270, 10763, 11675, 16500, 1006, 3036, 8663, 8873, 27390, 3370, 9530, 8873, 2290, 1007, 11618, 6453, 1063, 1013, 1013, 16500, 1996, 3036, 14184, 2862, 1026, 3036, 5302, 8566, 2571, 1028, 14184, 1027, 2047, 9140, 9863, 1026, 1028, 100...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
SeleniumHQ/selenium
java/client/src/org/openqa/selenium/chrome/ChromeOptions.java
ChromeOptions.addEncodedExtensions
public ChromeOptions addEncodedExtensions(List<String> encoded) { for (String extension : encoded) { checkNotNull(extension); } extensions.addAll(encoded); return this; }
java
public ChromeOptions addEncodedExtensions(List<String> encoded) { for (String extension : encoded) { checkNotNull(extension); } extensions.addAll(encoded); return this; }
[ "public", "ChromeOptions", "addEncodedExtensions", "(", "List", "<", "String", ">", "encoded", ")", "{", "for", "(", "String", "extension", ":", "encoded", ")", "{", "checkNotNull", "(", "extension", ")", ";", "}", "extensions", ".", "addAll", "(", "encoded"...
Adds a new Chrome extension to install on browser startup. Each string data should specify a Base64 encoded string of packed Chrome extension (CRX file). @param encoded Base64 encoded data of the extensions to install.
[ "Adds", "a", "new", "Chrome", "extension", "to", "install", "on", "browser", "startup", ".", "Each", "string", "data", "should", "specify", "a", "Base64", "encoded", "string", "of", "packed", "Chrome", "extension", "(", "CRX", "file", ")", "." ]
7af172729f17b20269c8ca4ea6f788db48616535
https://github.com/SeleniumHQ/selenium/blob/7af172729f17b20269c8ca4ea6f788db48616535/java/client/src/org/openqa/selenium/chrome/ChromeOptions.java#L182-L188
train
Adds the extensions to the list of encoded extensions.
[ 30522, 2270, 18546, 7361, 9285, 5587, 2368, 16044, 3207, 18413, 6132, 8496, 1006, 2862, 1026, 5164, 1028, 12359, 1007, 1063, 2005, 1006, 5164, 5331, 1024, 12359, 1007, 1063, 4638, 17048, 11231, 3363, 1006, 5331, 1007, 1025, 1065, 14305, 101...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/spark
sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedRleValuesReader.java
VectorizedRleValuesReader.readUnsignedVarInt
private int readUnsignedVarInt() throws IOException { int value = 0; int shift = 0; int b; do { b = in.read(); value |= (b & 0x7F) << shift; shift += 7; } while ((b & 0x80) != 0); return value; }
java
private int readUnsignedVarInt() throws IOException { int value = 0; int shift = 0; int b; do { b = in.read(); value |= (b & 0x7F) << shift; shift += 7; } while ((b & 0x80) != 0); return value; }
[ "private", "int", "readUnsignedVarInt", "(", ")", "throws", "IOException", "{", "int", "value", "=", "0", ";", "int", "shift", "=", "0", ";", "int", "b", ";", "do", "{", "b", "=", "in", ".", "read", "(", ")", ";", "value", "|=", "(", "b", "&", ...
Reads the next varint encoded int.
[ "Reads", "the", "next", "varint", "encoded", "int", "." ]
25ee0474f47d9c30d6f553a7892d9549f91071cf
https://github.com/apache/spark/blob/25ee0474f47d9c30d6f553a7892d9549f91071cf/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedRleValuesReader.java#L554-L564
train
Read an unsigned integer from the stream.
[ 30522, 2797, 20014, 3191, 4609, 5332, 19225, 10755, 18447, 1006, 1007, 11618, 22834, 10288, 24422, 1063, 20014, 3643, 1027, 30524, 1999, 1012, 3191, 1006, 1007, 1025, 3643, 1064, 1027, 1006, 1038, 1004, 1014, 2595, 2581, 2546, 1007, 1026, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-formats/flink-avro-confluent-registry/src/main/java/org/apache/flink/formats/avro/registry/confluent/ConfluentRegistryAvroDeserializationSchema.java
ConfluentRegistryAvroDeserializationSchema.forGeneric
public static ConfluentRegistryAvroDeserializationSchema<GenericRecord> forGeneric(Schema schema, String url, int identityMapCapacity) { return new ConfluentRegistryAvroDeserializationSchema<>( GenericRecord.class, schema, new CachedSchemaCoderProvider(url, identityMapCapacity)); }
java
public static ConfluentRegistryAvroDeserializationSchema<GenericRecord> forGeneric(Schema schema, String url, int identityMapCapacity) { return new ConfluentRegistryAvroDeserializationSchema<>( GenericRecord.class, schema, new CachedSchemaCoderProvider(url, identityMapCapacity)); }
[ "public", "static", "ConfluentRegistryAvroDeserializationSchema", "<", "GenericRecord", ">", "forGeneric", "(", "Schema", "schema", ",", "String", "url", ",", "int", "identityMapCapacity", ")", "{", "return", "new", "ConfluentRegistryAvroDeserializationSchema", "<>", "(",...
Creates {@link ConfluentRegistryAvroDeserializationSchema} that produces {@link GenericRecord} using provided reader schema and looks up writer schema in Confluent Schema Registry. @param schema schema of produced records @param url url of schema registry to connect @param identityMapCapacity maximum number of cached schema versions (default: 1000) @return deserialized record in form of {@link GenericRecord}
[ "Creates", "{", "@link", "ConfluentRegistryAvroDeserializationSchema", "}", "that", "produces", "{", "@link", "GenericRecord", "}", "using", "provided", "reader", "schema", "and", "looks", "up", "writer", "schema", "in", "Confluent", "Schema", "Registry", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-formats/flink-avro-confluent-registry/src/main/java/org/apache/flink/formats/avro/registry/confluent/ConfluentRegistryAvroDeserializationSchema.java#L79-L85
train
Creates a new avro deserialization schema for a generic schema.
[ 30522, 2270, 10763, 9530, 10258, 24997, 7913, 24063, 20444, 19716, 19847, 11610, 22731, 22842, 2863, 1026, 12391, 2890, 27108, 2094, 1028, 15681, 3678, 2594, 1006, 8040, 28433, 8040, 28433, 1010, 5164, 24471, 2140, 1010, 20014, 4767, 2863, 15...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/runtime/join/TimeBoundedStreamJoin.java
TimeBoundedStreamJoin.calExpirationTime
private long calExpirationTime(long operatorTime, long relativeSize) { if (operatorTime < Long.MAX_VALUE) { return operatorTime - relativeSize - allowedLateness - 1; } else { // When operatorTime = Long.MaxValue, it means the stream has reached the end. return Long.MAX_VALUE; } }
java
private long calExpirationTime(long operatorTime, long relativeSize) { if (operatorTime < Long.MAX_VALUE) { return operatorTime - relativeSize - allowedLateness - 1; } else { // When operatorTime = Long.MaxValue, it means the stream has reached the end. return Long.MAX_VALUE; } }
[ "private", "long", "calExpirationTime", "(", "long", "operatorTime", ",", "long", "relativeSize", ")", "{", "if", "(", "operatorTime", "<", "Long", ".", "MAX_VALUE", ")", "{", "return", "operatorTime", "-", "relativeSize", "-", "allowedLateness", "-", "1", ";"...
Calculate the expiration time with the given operator time and relative window size. @param operatorTime the operator time @param relativeSize the relative window size @return the expiration time for cached rows
[ "Calculate", "the", "expiration", "time", "with", "the", "given", "operator", "time", "and", "relative", "window", "size", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/runtime/join/TimeBoundedStreamJoin.java#L348-L355
train
Cal expiration time.
[ 30522, 2797, 2146, 21854, 2595, 16781, 7292, 1006, 2146, 6872, 7292, 1010, 2146, 9064, 4697, 1007, 1063, 2065, 1006, 6872, 7292, 1026, 2146, 1012, 4098, 1035, 3643, 1007, 1063, 2709, 6872, 7292, 1011, 9064, 4697, 1011, 3039, 13806, 2791, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/lang/Assert.java
Assert.notEmpty
public static <T> Collection<T> notEmpty(Collection<T> collection) throws IllegalArgumentException { return notEmpty(collection, "[Assertion failed] - this collection must not be empty: it must contain at least 1 element"); }
java
public static <T> Collection<T> notEmpty(Collection<T> collection) throws IllegalArgumentException { return notEmpty(collection, "[Assertion failed] - this collection must not be empty: it must contain at least 1 element"); }
[ "public", "static", "<", "T", ">", "Collection", "<", "T", ">", "notEmpty", "(", "Collection", "<", "T", ">", "collection", ")", "throws", "IllegalArgumentException", "{", "return", "notEmpty", "(", "collection", ",", "\"[Assertion failed] - this collection must not...
断言给定集合非空 <pre class="code"> Assert.notEmpty(collection); </pre> @param <T> 集合元素类型 @param collection 被检查的集合 @return 被检查集合 @throws IllegalArgumentException if the collection is {@code null} or has no elements
[ "断言给定集合非空" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/lang/Assert.java#L370-L372
train
Assert that the given collection is not empty.
[ 30522, 2270, 10763, 1026, 1056, 1028, 3074, 1026, 1056, 1028, 3602, 27718, 2100, 1006, 3074, 1026, 1056, 1028, 3074, 1007, 11618, 6206, 2906, 22850, 15781, 2595, 24422, 1063, 2709, 3602, 27718, 2100, 1006, 3074, 1010, 1000, 1031, 23617, 347...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-db/src/main/java/cn/hutool/db/handler/HandleHelper.java
HandleHelper.handleRow
public static Entity handleRow(ResultSet rs) throws SQLException { final ResultSetMetaData meta = rs.getMetaData(); final int columnCount = meta.getColumnCount(); return handleRow(columnCount, meta, rs); }
java
public static Entity handleRow(ResultSet rs) throws SQLException { final ResultSetMetaData meta = rs.getMetaData(); final int columnCount = meta.getColumnCount(); return handleRow(columnCount, meta, rs); }
[ "public", "static", "Entity", "handleRow", "(", "ResultSet", "rs", ")", "throws", "SQLException", "{", "final", "ResultSetMetaData", "meta", "=", "rs", ".", "getMetaData", "(", ")", ";", "final", "int", "columnCount", "=", "meta", ".", "getColumnCount", "(", ...
处理单条数据 @param rs 数据集 @return 每一行的Entity @throws SQLException SQL执行异常
[ "处理单条数据" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-db/src/main/java/cn/hutool/db/handler/HandleHelper.java#L150-L154
train
Handles a single row of a ResultSet.
[ 30522, 2270, 10763, 9178, 28213, 5004, 1006, 3463, 3388, 12667, 1007, 11618, 29296, 10288, 24422, 1063, 2345, 3463, 3388, 11368, 8447, 2696, 18804, 1027, 12667, 1012, 2131, 11368, 8447, 2696, 1006, 1007, 1025, 2345, 20014, 5930, 3597, 16671, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/img/ImgUtil.java
ImgUtil.createGraphics
public static Graphics2D createGraphics(BufferedImage image, Color color) { return GraphicsUtil.createGraphics(image, color); }
java
public static Graphics2D createGraphics(BufferedImage image, Color color) { return GraphicsUtil.createGraphics(image, color); }
[ "public", "static", "Graphics2D", "createGraphics", "(", "BufferedImage", "image", ",", "Color", "color", ")", "{", "return", "GraphicsUtil", ".", "createGraphics", "(", "image", ",", "color", ")", ";", "}" ]
创建{@link Graphics2D} @param image {@link BufferedImage} @param color {@link Color}背景颜色以及当前画笔颜色 @return {@link Graphics2D} @since 3.2.3 @see GraphicsUtil#createGraphics(BufferedImage, Color)
[ "创建", "{", "@link", "Graphics2D", "}" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/img/ImgUtil.java#L1343-L1345
train
Create a graphics2D object that can be used to draw the image.
[ 30522, 2270, 10763, 8389, 2475, 2094, 3443, 14773, 2015, 1006, 17698, 2098, 9581, 3351, 3746, 1010, 3609, 3609, 1007, 1063, 2709, 8389, 21823, 2140, 1012, 3443, 14773, 2015, 1006, 3746, 1010, 3609, 1007, 1025, 1065, 102, 0, 0, 0, 0, 0, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
spring-projects/spring-boot
spring-boot-project/spring-boot-actuator/src/main/java/org/springframework/boot/actuate/endpoint/invoker/cache/CachingOperationInvoker.java
CachingOperationInvoker.apply
public static OperationInvoker apply(OperationInvoker invoker, long timeToLive) { if (timeToLive > 0) { return new CachingOperationInvoker(invoker, timeToLive); } return invoker; }
java
public static OperationInvoker apply(OperationInvoker invoker, long timeToLive) { if (timeToLive > 0) { return new CachingOperationInvoker(invoker, timeToLive); } return invoker; }
[ "public", "static", "OperationInvoker", "apply", "(", "OperationInvoker", "invoker", ",", "long", "timeToLive", ")", "{", "if", "(", "timeToLive", ">", "0", ")", "{", "return", "new", "CachingOperationInvoker", "(", "invoker", ",", "timeToLive", ")", ";", "}",...
Apply caching configuration when appropriate to the given invoker. @param invoker the invoker to wrap @param timeToLive the maximum time in milliseconds that a response can be cached @return a caching version of the invoker or the original instance if caching is not required
[ "Apply", "caching", "configuration", "when", "appropriate", "to", "the", "given", "invoker", "." ]
0b27f7c70e164b2b1a96477f1d9c1acba56790c1
https://github.com/spring-projects/spring-boot/blob/0b27f7c70e164b2b1a96477f1d9c1acba56790c1/spring-boot-project/spring-boot-actuator/src/main/java/org/springframework/boot/actuate/endpoint/invoker/cache/CachingOperationInvoker.java#L95-L100
train
Apply the given time to the given operation invoker.
[ 30522, 2270, 10763, 3169, 2378, 6767, 5484, 6611, 1006, 3169, 2378, 6767, 5484, 1999, 6767, 5484, 1010, 2146, 2051, 3406, 3669, 3726, 1007, 1063, 2065, 1006, 2051, 3406, 3669, 3726, 1028, 1014, 1007, 1063, 2709, 2047, 6187, 8450, 25918, 3...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
common/src/main/java/io/netty/util/internal/logging/CommonsLogger.java
CommonsLogger.error
@Override public void error(String format, Object... arguments) { if (logger.isErrorEnabled()) { FormattingTuple ft = MessageFormatter.arrayFormat(format, arguments); logger.error(ft.getMessage(), ft.getThrowable()); } }
java
@Override public void error(String format, Object... arguments) { if (logger.isErrorEnabled()) { FormattingTuple ft = MessageFormatter.arrayFormat(format, arguments); logger.error(ft.getMessage(), ft.getThrowable()); } }
[ "@", "Override", "public", "void", "error", "(", "String", "format", ",", "Object", "...", "arguments", ")", "{", "if", "(", "logger", ".", "isErrorEnabled", "(", ")", ")", "{", "FormattingTuple", "ft", "=", "MessageFormatter", ".", "arrayFormat", "(", "fo...
Delegates to the {@link Log#error(Object)} method of the underlying {@link Log} instance. <p> However, this form avoids superfluous object creation when the logger is disabled for level ERROR. </p> @param format the format string @param arguments a list of 3 or more arguments
[ "Delegates", "to", "the", "{", "@link", "Log#error", "(", "Object", ")", "}", "method", "of", "the", "underlying", "{", "@link", "Log", "}", "instance", "." ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/common/src/main/java/io/netty/util/internal/logging/CommonsLogger.java#L547-L553
train
Log an error message with the specified arguments.
[ 30522, 1030, 2058, 15637, 2270, 11675, 7561, 1006, 5164, 4289, 1010, 4874, 1012, 1012, 1012, 9918, 1007, 1063, 2065, 1006, 8833, 4590, 1012, 2003, 2121, 29165, 8189, 23242, 1006, 1007, 1007, 1063, 4289, 3436, 8525, 10814, 3027, 1027, 4471, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-core/src/main/java/org/apache/flink/util/AbstractID.java
AbstractID.getBytes
public byte[] getBytes() { byte[] bytes = new byte[SIZE]; longToByteArray(lowerPart, bytes, 0); longToByteArray(upperPart, bytes, SIZE_OF_LONG); return bytes; }
java
public byte[] getBytes() { byte[] bytes = new byte[SIZE]; longToByteArray(lowerPart, bytes, 0); longToByteArray(upperPart, bytes, SIZE_OF_LONG); return bytes; }
[ "public", "byte", "[", "]", "getBytes", "(", ")", "{", "byte", "[", "]", "bytes", "=", "new", "byte", "[", "SIZE", "]", ";", "longToByteArray", "(", "lowerPart", ",", "bytes", ",", "0", ")", ";", "longToByteArray", "(", "upperPart", ",", "bytes", ","...
Gets the bytes underlying this ID. @return The bytes underlying this ID.
[ "Gets", "the", "bytes", "underlying", "this", "ID", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/util/AbstractID.java#L123-L128
train
Returns the bytes of this long.
[ 30522, 2270, 24880, 1031, 1033, 2131, 3762, 4570, 1006, 1007, 1063, 24880, 1031, 1033, 27507, 1027, 2047, 24880, 1031, 2946, 1033, 1025, 2146, 3406, 3762, 27058, 11335, 2100, 1006, 2896, 19362, 2102, 1010, 27507, 1010, 1014, 1007, 1025, 214...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
redisson/redisson
redisson/src/main/java/org/redisson/executor/TasksRunnerService.java
TasksRunnerService.asyncScheduledServiceAtFixed
private RemoteExecutorServiceAsync asyncScheduledServiceAtFixed(String executorId, String requestId) { ScheduledTasksService scheduledRemoteService = new ScheduledTasksService(codec, name, commandExecutor, executorId, responses); scheduledRemoteService.setTerminationTopicName(terminationTopicName); scheduledRemoteService.setTasksCounterName(tasksCounterName); scheduledRemoteService.setStatusName(statusName); scheduledRemoteService.setSchedulerQueueName(schedulerQueueName); scheduledRemoteService.setSchedulerChannelName(schedulerChannelName); scheduledRemoteService.setTasksName(tasksName); scheduledRemoteService.setRequestId(new RequestId(requestId)); scheduledRemoteService.setTasksRetryIntervalName(tasksRetryIntervalName); RemoteExecutorServiceAsync asyncScheduledServiceAtFixed = scheduledRemoteService.get(RemoteExecutorServiceAsync.class, RemoteInvocationOptions.defaults().noAck().noResult()); return asyncScheduledServiceAtFixed; }
java
private RemoteExecutorServiceAsync asyncScheduledServiceAtFixed(String executorId, String requestId) { ScheduledTasksService scheduledRemoteService = new ScheduledTasksService(codec, name, commandExecutor, executorId, responses); scheduledRemoteService.setTerminationTopicName(terminationTopicName); scheduledRemoteService.setTasksCounterName(tasksCounterName); scheduledRemoteService.setStatusName(statusName); scheduledRemoteService.setSchedulerQueueName(schedulerQueueName); scheduledRemoteService.setSchedulerChannelName(schedulerChannelName); scheduledRemoteService.setTasksName(tasksName); scheduledRemoteService.setRequestId(new RequestId(requestId)); scheduledRemoteService.setTasksRetryIntervalName(tasksRetryIntervalName); RemoteExecutorServiceAsync asyncScheduledServiceAtFixed = scheduledRemoteService.get(RemoteExecutorServiceAsync.class, RemoteInvocationOptions.defaults().noAck().noResult()); return asyncScheduledServiceAtFixed; }
[ "private", "RemoteExecutorServiceAsync", "asyncScheduledServiceAtFixed", "(", "String", "executorId", ",", "String", "requestId", ")", "{", "ScheduledTasksService", "scheduledRemoteService", "=", "new", "ScheduledTasksService", "(", "codec", ",", "name", ",", "commandExecut...
Creates RemoteExecutorServiceAsync with special executor which overrides requestId generation and uses current requestId. Because recurring tasks should use the same requestId. @return
[ "Creates", "RemoteExecutorServiceAsync", "with", "special", "executor", "which", "overrides", "requestId", "generation", "and", "uses", "current", "requestId", ".", "Because", "recurring", "tasks", "should", "use", "the", "same", "requestId", "." ]
d3acc0249b2d5d658d36d99e2c808ce49332ea44
https://github.com/redisson/redisson/blob/d3acc0249b2d5d658d36d99e2c808ce49332ea44/redisson/src/main/java/org/redisson/executor/TasksRunnerService.java#L163-L175
train
Create a ScheduledTasksService with the specified executorId and request id.
[ 30522, 2797, 6556, 10288, 8586, 16161, 22573, 2099, 7903, 5243, 6508, 12273, 2004, 6038, 6169, 7690, 18696, 8043, 7903, 5243, 24475, 7646, 2098, 1006, 5164, 4654, 8586, 16161, 14615, 1010, 5164, 5227, 3593, 1007, 1063, 5115, 10230, 5705, 80...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-crypto/src/main/java/cn/hutool/crypto/symmetric/SymmetricCrypto.java
SymmetricCrypto.decryptStr
public String decryptStr(String data, Charset charset) { return StrUtil.str(decrypt(data), charset); }
java
public String decryptStr(String data, Charset charset) { return StrUtil.str(decrypt(data), charset); }
[ "public", "String", "decryptStr", "(", "String", "data", ",", "Charset", "charset", ")", "{", "return", "StrUtil", ".", "str", "(", "decrypt", "(", "data", ")", ",", "charset", ")", ";", "}" ]
解密Hex(16进制)或Base64表示的字符串 @param data 被解密的String @param charset 解密后的charset @return 解密后的String
[ "解密Hex(16进制)或Base64表示的字符串" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-crypto/src/main/java/cn/hutool/crypto/symmetric/SymmetricCrypto.java#L347-L349
train
Decrypt a String using the specified charset.
[ 30522, 2270, 5164, 11703, 2854, 22798, 16344, 1006, 5164, 2951, 1010, 25869, 13462, 25869, 13462, 1007, 1063, 2709, 2358, 22134, 4014, 1012, 2358, 2099, 1006, 11703, 2854, 13876, 1006, 2951, 1007, 1010, 25869, 13462, 1007, 1025, 1065, 102, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-queryable-state/flink-queryable-state-client-java/src/main/java/org/apache/flink/queryablestate/network/AbstractServerBase.java
AbstractServerBase.shutdownServer
public CompletableFuture<Void> shutdownServer() { CompletableFuture<Void> shutdownFuture = new CompletableFuture<>(); if (serverShutdownFuture.compareAndSet(null, shutdownFuture)) { log.info("Shutting down {} @ {}", serverName, serverAddress); final CompletableFuture<Void> groupShutdownFuture = new CompletableFuture<>(); if (bootstrap != null) { EventLoopGroup group = bootstrap.group(); if (group != null && !group.isShutdown()) { group.shutdownGracefully(0L, 0L, TimeUnit.MILLISECONDS) .addListener(finished -> { if (finished.isSuccess()) { groupShutdownFuture.complete(null); } else { groupShutdownFuture.completeExceptionally(finished.cause()); } }); } else { groupShutdownFuture.complete(null); } } else { groupShutdownFuture.complete(null); } final CompletableFuture<Void> handlerShutdownFuture = new CompletableFuture<>(); if (handler == null) { handlerShutdownFuture.complete(null); } else { handler.shutdown().whenComplete((result, throwable) -> { if (throwable != null) { handlerShutdownFuture.completeExceptionally(throwable); } else { handlerShutdownFuture.complete(null); } }); } final CompletableFuture<Void> queryExecShutdownFuture = CompletableFuture.runAsync(() -> { if (queryExecutor != null) { ExecutorUtils.gracefulShutdown(10L, TimeUnit.MINUTES, queryExecutor); } }); CompletableFuture.allOf( queryExecShutdownFuture, groupShutdownFuture, handlerShutdownFuture ).whenComplete((result, throwable) -> { if (throwable != null) { shutdownFuture.completeExceptionally(throwable); } else { shutdownFuture.complete(null); } }); } return serverShutdownFuture.get(); }
java
public CompletableFuture<Void> shutdownServer() { CompletableFuture<Void> shutdownFuture = new CompletableFuture<>(); if (serverShutdownFuture.compareAndSet(null, shutdownFuture)) { log.info("Shutting down {} @ {}", serverName, serverAddress); final CompletableFuture<Void> groupShutdownFuture = new CompletableFuture<>(); if (bootstrap != null) { EventLoopGroup group = bootstrap.group(); if (group != null && !group.isShutdown()) { group.shutdownGracefully(0L, 0L, TimeUnit.MILLISECONDS) .addListener(finished -> { if (finished.isSuccess()) { groupShutdownFuture.complete(null); } else { groupShutdownFuture.completeExceptionally(finished.cause()); } }); } else { groupShutdownFuture.complete(null); } } else { groupShutdownFuture.complete(null); } final CompletableFuture<Void> handlerShutdownFuture = new CompletableFuture<>(); if (handler == null) { handlerShutdownFuture.complete(null); } else { handler.shutdown().whenComplete((result, throwable) -> { if (throwable != null) { handlerShutdownFuture.completeExceptionally(throwable); } else { handlerShutdownFuture.complete(null); } }); } final CompletableFuture<Void> queryExecShutdownFuture = CompletableFuture.runAsync(() -> { if (queryExecutor != null) { ExecutorUtils.gracefulShutdown(10L, TimeUnit.MINUTES, queryExecutor); } }); CompletableFuture.allOf( queryExecShutdownFuture, groupShutdownFuture, handlerShutdownFuture ).whenComplete((result, throwable) -> { if (throwable != null) { shutdownFuture.completeExceptionally(throwable); } else { shutdownFuture.complete(null); } }); } return serverShutdownFuture.get(); }
[ "public", "CompletableFuture", "<", "Void", ">", "shutdownServer", "(", ")", "{", "CompletableFuture", "<", "Void", ">", "shutdownFuture", "=", "new", "CompletableFuture", "<>", "(", ")", ";", "if", "(", "serverShutdownFuture", ".", "compareAndSet", "(", "null",...
Shuts down the server and all related thread pools. @return A {@link CompletableFuture} that will be completed upon termination of the shutdown process.
[ "Shuts", "down", "the", "server", "and", "all", "related", "thread", "pools", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-queryable-state/flink-queryable-state-client-java/src/main/java/org/apache/flink/queryablestate/network/AbstractServerBase.java#L285-L339
train
Shutdowns the server.
[ 30522, 2270, 4012, 10814, 10880, 11263, 11244, 1026, 11675, 1028, 3844, 7698, 8043, 6299, 1006, 1007, 1063, 4012, 10814, 10880, 11263, 11244, 1026, 11675, 1028, 3844, 7698, 11263, 11244, 1027, 2047, 4012, 10814, 10880, 11263, 11244, 1026, 102...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-json/src/main/java/cn/hutool/json/JSONObject.java
JSONObject.populateMap
private void populateMap(Object bean) { final Collection<PropDesc> props = BeanUtil.getBeanDesc(bean.getClass()).getProps(); Method getter; Object value; for (PropDesc prop : props) { // 得到property对应的getter方法 getter = prop.getGetter(); if (null == getter) { // 无Getter跳过 continue; } // 只读取有getter方法的属性 try { value = getter.invoke(bean); } catch (Exception ignore) { // 忽略读取失败的属性 continue; } if (ObjectUtil.isNull(value) && this.config.isIgnoreNullValue()) { // 值为null且用户定义跳过则跳过 continue; } if (value != bean) { // 防止循环引用 this.rawHashMap.put(prop.getFieldName(), JSONUtil.wrap(value, this.config.isIgnoreNullValue())); } } }
java
private void populateMap(Object bean) { final Collection<PropDesc> props = BeanUtil.getBeanDesc(bean.getClass()).getProps(); Method getter; Object value; for (PropDesc prop : props) { // 得到property对应的getter方法 getter = prop.getGetter(); if (null == getter) { // 无Getter跳过 continue; } // 只读取有getter方法的属性 try { value = getter.invoke(bean); } catch (Exception ignore) { // 忽略读取失败的属性 continue; } if (ObjectUtil.isNull(value) && this.config.isIgnoreNullValue()) { // 值为null且用户定义跳过则跳过 continue; } if (value != bean) { // 防止循环引用 this.rawHashMap.put(prop.getFieldName(), JSONUtil.wrap(value, this.config.isIgnoreNullValue())); } } }
[ "private", "void", "populateMap", "(", "Object", "bean", ")", "{", "final", "Collection", "<", "PropDesc", ">", "props", "=", "BeanUtil", ".", "getBeanDesc", "(", "bean", ".", "getClass", "(", ")", ")", ".", "getProps", "(", ")", ";", "Method", "getter",...
Bean对象转Map @param bean Bean对象 @param ignoreNullValue 是否忽略空值
[ "Bean对象转Map" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-json/src/main/java/cn/hutool/json/JSONObject.java#L673-L704
train
Populate the map with data from the given bean.
[ 30522, 2797, 11675, 3769, 9869, 2863, 2361, 1006, 4874, 14068, 1007, 1063, 2345, 3074, 1026, 17678, 6155, 2278, 1028, 24387, 1027, 14068, 21823, 2140, 1012, 2131, 4783, 5685, 2229, 2278, 1006, 14068, 1012, 2131, 26266, 1006, 1007, 1007, 101...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/runtime/util/collections/FloatHashSet.java
FloatHashSet.contains
public boolean contains(final float k) { int intKey = Float.floatToIntBits(k); if (intKey == 0) { return this.containsZero; } else { float[] key = this.key; int curr; int pos; if ((curr = Float.floatToIntBits(key[pos = MurmurHashUtil.fmix(intKey) & this.mask])) == 0) { return false; } else if (intKey == curr) { return true; } else { while ((curr = Float.floatToIntBits(key[pos = pos + 1 & this.mask])) != 0) { if (intKey == curr) { return true; } } return false; } } }
java
public boolean contains(final float k) { int intKey = Float.floatToIntBits(k); if (intKey == 0) { return this.containsZero; } else { float[] key = this.key; int curr; int pos; if ((curr = Float.floatToIntBits(key[pos = MurmurHashUtil.fmix(intKey) & this.mask])) == 0) { return false; } else if (intKey == curr) { return true; } else { while ((curr = Float.floatToIntBits(key[pos = pos + 1 & this.mask])) != 0) { if (intKey == curr) { return true; } } return false; } } }
[ "public", "boolean", "contains", "(", "final", "float", "k", ")", "{", "int", "intKey", "=", "Float", ".", "floatToIntBits", "(", "k", ")", ";", "if", "(", "intKey", "==", "0", ")", "{", "return", "this", ".", "containsZero", ";", "}", "else", "{", ...
See {@link Float#equals(Object)}.
[ "See", "{" ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/runtime/util/collections/FloatHashSet.java#L82-L104
train
Checks if the set contains the specified key.
[ 30522, 2270, 22017, 20898, 3397, 1006, 2345, 14257, 1047, 1007, 1063, 20014, 20014, 14839, 1027, 14257, 1012, 14257, 3406, 18447, 16313, 2015, 1006, 1047, 1007, 1025, 2065, 1006, 20014, 14839, 1027, 1027, 1014, 1007, 1063, 2709, 2023, 1012, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-connectors/flink-connector-cassandra/src/main/java/org/apache/flink/streaming/connectors/cassandra/CassandraSink.java
CassandraSink.setUidHash
@PublicEvolving public CassandraSink<IN> setUidHash(String uidHash) { if (useDataStreamSink) { getSinkTransformation().setUidHash(uidHash); } else { getStreamTransformation().setUidHash(uidHash); } return this; }
java
@PublicEvolving public CassandraSink<IN> setUidHash(String uidHash) { if (useDataStreamSink) { getSinkTransformation().setUidHash(uidHash); } else { getStreamTransformation().setUidHash(uidHash); } return this; }
[ "@", "PublicEvolving", "public", "CassandraSink", "<", "IN", ">", "setUidHash", "(", "String", "uidHash", ")", "{", "if", "(", "useDataStreamSink", ")", "{", "getSinkTransformation", "(", ")", ".", "setUidHash", "(", "uidHash", ")", ";", "}", "else", "{", ...
Sets an user provided hash for this operator. This will be used AS IS the create the JobVertexID. <p>The user provided hash is an alternative to the generated hashes, that is considered when identifying an operator through the default hash mechanics fails (e.g. because of changes between Flink versions). <p><strong>Important</strong>: this should be used as a workaround or for trouble shooting. The provided hash needs to be unique per transformation and job. Otherwise, job submission will fail. Furthermore, you cannot assign user-specified hash to intermediate nodes in an operator chain and trying so will let your job fail. <p>A use case for this is in migration between Flink versions or changing the jobs in a way that changes the automatically generated hashes. In this case, providing the previous hashes directly through this method (e.g. obtained from old logs) can help to reestablish a lost mapping from states to their target operator. @param uidHash The user provided hash for this operator. This will become the JobVertexID, which is shown in the logs and web ui. @return The operator with the user provided hash.
[ "Sets", "an", "user", "provided", "hash", "for", "this", "operator", ".", "This", "will", "be", "used", "AS", "IS", "the", "create", "the", "JobVertexID", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-connectors/flink-connector-cassandra/src/main/java/org/apache/flink/streaming/connectors/cassandra/CassandraSink.java#L127-L135
train
Sets the UID hash for the sink.
[ 30522, 1030, 2270, 6777, 4747, 6455, 2270, 15609, 11493, 2243, 1026, 1999, 1028, 2275, 21272, 14949, 2232, 1006, 5164, 21318, 17516, 4095, 1007, 1063, 2065, 1006, 2109, 6790, 21422, 11493, 2243, 1007, 1063, 4152, 19839, 6494, 3619, 14192, 3...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-http/src/main/java/cn/hutool/http/HtmlUtil.java
HtmlUtil.encode
private static String encode(String text, char[][] array) { int len; if ((text == null) || ((len = text.length()) == 0)) { return StrUtil.EMPTY; } StringBuilder buffer = new StringBuilder(len + (len >> 2)); char c; for (int i = 0; i < len; i++) { c = text.charAt(i); if (c < 64) { buffer.append(array[c]); } else { buffer.append(c); } } return buffer.toString(); }
java
private static String encode(String text, char[][] array) { int len; if ((text == null) || ((len = text.length()) == 0)) { return StrUtil.EMPTY; } StringBuilder buffer = new StringBuilder(len + (len >> 2)); char c; for (int i = 0; i < len; i++) { c = text.charAt(i); if (c < 64) { buffer.append(array[c]); } else { buffer.append(c); } } return buffer.toString(); }
[ "private", "static", "String", "encode", "(", "String", "text", ",", "char", "[", "]", "[", "]", "array", ")", "{", "int", "len", ";", "if", "(", "(", "text", "==", "null", ")", "||", "(", "(", "len", "=", "text", ".", "length", "(", ")", ")", ...
Encoder @param text 被编码的文本 @param array 特殊字符集合 @return 编码后的字符
[ "Encoder" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-http/src/main/java/cn/hutool/http/HtmlUtil.java#L182-L198
train
Encodes the given text using the UTF - 8 encoding.
[ 30522, 2797, 10763, 5164, 4372, 16044, 1006, 5164, 3793, 1010, 25869, 1031, 1033, 1031, 1033, 9140, 1007, 1063, 20014, 18798, 1025, 2065, 1006, 1006, 3793, 1027, 1027, 19701, 1007, 1064, 1064, 1006, 1006, 18798, 1027, 3793, 1012, 3091, 1006...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/taskmanager/TaskManagerLocation.java
TaskManagerLocation.getFqdnHostName
private static String getFqdnHostName(InetAddress inetAddress) { String fqdnHostName; try { fqdnHostName = inetAddress.getCanonicalHostName(); } catch (Throwable t) { LOG.warn("Unable to determine the canonical hostname. Input split assignment (such as " + "for HDFS files) may be non-local when the canonical hostname is missing."); LOG.debug("getCanonicalHostName() Exception:", t); fqdnHostName = inetAddress.getHostAddress(); } return fqdnHostName; }
java
private static String getFqdnHostName(InetAddress inetAddress) { String fqdnHostName; try { fqdnHostName = inetAddress.getCanonicalHostName(); } catch (Throwable t) { LOG.warn("Unable to determine the canonical hostname. Input split assignment (such as " + "for HDFS files) may be non-local when the canonical hostname is missing."); LOG.debug("getCanonicalHostName() Exception:", t); fqdnHostName = inetAddress.getHostAddress(); } return fqdnHostName; }
[ "private", "static", "String", "getFqdnHostName", "(", "InetAddress", "inetAddress", ")", "{", "String", "fqdnHostName", ";", "try", "{", "fqdnHostName", "=", "inetAddress", ".", "getCanonicalHostName", "(", ")", ";", "}", "catch", "(", "Throwable", "t", ")", ...
Gets the fully qualified hostname of the TaskManager based on the network address. @param inetAddress the network address that the TaskManager binds its sockets to @return fully qualified hostname of the TaskManager
[ "Gets", "the", "fully", "qualified", "hostname", "of", "the", "TaskManager", "based", "on", "the", "network", "address", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/taskmanager/TaskManagerLocation.java#L173-L185
train
Get the hostname of the hostname of the host.
[ 30522, 2797, 10763, 5164, 2131, 2546, 4160, 2094, 25311, 14122, 18442, 1006, 1999, 12928, 14141, 8303, 1999, 12928, 14141, 8303, 1007, 1063, 5164, 1042, 4160, 2094, 25311, 14122, 18442, 1025, 3046, 1063, 1042, 4160, 2094, 25311, 14122, 18442,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
alibaba/canal
sink/src/main/java/com/alibaba/otter/canal/sink/entry/group/TimelineBarrier.java
TimelineBarrier.await
public void await(Event event, long timeout, TimeUnit unit) throws InterruptedException, TimeoutException { long timestamp = getTimestamp(event); try { lock.lockInterruptibly(); single(timestamp); while (isPermit(event, timestamp) == false) { condition.await(timeout, unit); } } finally { lock.unlock(); } }
java
public void await(Event event, long timeout, TimeUnit unit) throws InterruptedException, TimeoutException { long timestamp = getTimestamp(event); try { lock.lockInterruptibly(); single(timestamp); while (isPermit(event, timestamp) == false) { condition.await(timeout, unit); } } finally { lock.unlock(); } }
[ "public", "void", "await", "(", "Event", "event", ",", "long", "timeout", ",", "TimeUnit", "unit", ")", "throws", "InterruptedException", ",", "TimeoutException", "{", "long", "timestamp", "=", "getTimestamp", "(", "event", ")", ";", "try", "{", "lock", ".",...
判断自己的timestamp是否可以通过,带超时控制 @throws InterruptedException @throws TimeoutException
[ "判断自己的timestamp是否可以通过", "带超时控制" ]
8f088cddc0755f4350c5aaae95c6e4002d90a40f
https://github.com/alibaba/canal/blob/8f088cddc0755f4350c5aaae95c6e4002d90a40f/sink/src/main/java/com/alibaba/otter/canal/sink/entry/group/TimelineBarrier.java#L66-L77
train
Await for an event.
[ 30522, 2270, 11675, 26751, 1006, 2724, 2724, 1010, 2146, 2051, 5833, 1010, 2051, 19496, 2102, 3131, 1007, 11618, 7153, 10288, 24422, 1010, 2051, 5833, 10288, 24422, 1063, 2146, 2335, 15464, 2361, 1027, 2131, 7292, 9153, 8737, 1006, 2724, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-core/src/main/java/org/apache/flink/api/common/operators/GenericDataSinkBase.java
GenericDataSinkBase.executeOnCollections
@SuppressWarnings("unchecked") protected void executeOnCollections(List<IN> inputData, RuntimeContext ctx, ExecutionConfig executionConfig) throws Exception { OutputFormat<IN> format = this.formatWrapper.getUserCodeObject(); TypeInformation<IN> inputType = getInput().getOperatorInfo().getOutputType(); if (this.localOrdering != null) { int[] sortColumns = this.localOrdering.getFieldPositions(); boolean[] sortOrderings = this.localOrdering.getFieldSortDirections(); final TypeComparator<IN> sortComparator; if (inputType instanceof CompositeType) { sortComparator = ((CompositeType<IN>) inputType).createComparator(sortColumns, sortOrderings, 0, executionConfig); } else if (inputType instanceof AtomicType) { sortComparator = ((AtomicType<IN>) inputType).createComparator(sortOrderings[0], executionConfig); } else { throw new UnsupportedOperationException("Local output sorting does not support type "+inputType+" yet."); } Collections.sort(inputData, new Comparator<IN>() { @Override public int compare(IN o1, IN o2) { return sortComparator.compare(o1, o2); } }); } if(format instanceof InitializeOnMaster) { ((InitializeOnMaster)format).initializeGlobal(1); } format.configure(this.parameters); if(format instanceof RichOutputFormat){ ((RichOutputFormat<?>) format).setRuntimeContext(ctx); } format.open(0, 1); for (IN element : inputData) { format.writeRecord(element); } format.close(); if(format instanceof FinalizeOnMaster) { ((FinalizeOnMaster)format).finalizeGlobal(1); } }
java
@SuppressWarnings("unchecked") protected void executeOnCollections(List<IN> inputData, RuntimeContext ctx, ExecutionConfig executionConfig) throws Exception { OutputFormat<IN> format = this.formatWrapper.getUserCodeObject(); TypeInformation<IN> inputType = getInput().getOperatorInfo().getOutputType(); if (this.localOrdering != null) { int[] sortColumns = this.localOrdering.getFieldPositions(); boolean[] sortOrderings = this.localOrdering.getFieldSortDirections(); final TypeComparator<IN> sortComparator; if (inputType instanceof CompositeType) { sortComparator = ((CompositeType<IN>) inputType).createComparator(sortColumns, sortOrderings, 0, executionConfig); } else if (inputType instanceof AtomicType) { sortComparator = ((AtomicType<IN>) inputType).createComparator(sortOrderings[0], executionConfig); } else { throw new UnsupportedOperationException("Local output sorting does not support type "+inputType+" yet."); } Collections.sort(inputData, new Comparator<IN>() { @Override public int compare(IN o1, IN o2) { return sortComparator.compare(o1, o2); } }); } if(format instanceof InitializeOnMaster) { ((InitializeOnMaster)format).initializeGlobal(1); } format.configure(this.parameters); if(format instanceof RichOutputFormat){ ((RichOutputFormat<?>) format).setRuntimeContext(ctx); } format.open(0, 1); for (IN element : inputData) { format.writeRecord(element); } format.close(); if(format instanceof FinalizeOnMaster) { ((FinalizeOnMaster)format).finalizeGlobal(1); } }
[ "@", "SuppressWarnings", "(", "\"unchecked\"", ")", "protected", "void", "executeOnCollections", "(", "List", "<", "IN", ">", "inputData", ",", "RuntimeContext", "ctx", ",", "ExecutionConfig", "executionConfig", ")", "throws", "Exception", "{", "OutputFormat", "<", ...
--------------------------------------------------------------------------------------------
[ "--------------------------------------------------------------------------------------------" ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/api/common/operators/GenericDataSinkBase.java#L227-L271
train
Execute on collections.
[ 30522, 1030, 16081, 9028, 5582, 2015, 1006, 1000, 4895, 5403, 18141, 1000, 1007, 5123, 11675, 15389, 2239, 26895, 18491, 2015, 1006, 2862, 1026, 1999, 1028, 7953, 2850, 2696, 1010, 2448, 7292, 8663, 18209, 14931, 2595, 1010, 7781, 8663, 887...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/spark
common/network-shuffle/src/main/java/org/apache/spark/network/sasl/ShuffleSecretManager.java
ShuffleSecretManager.registerApp
public void registerApp(String appId, ByteBuffer shuffleSecret) { registerApp(appId, JavaUtils.bytesToString(shuffleSecret)); }
java
public void registerApp(String appId, ByteBuffer shuffleSecret) { registerApp(appId, JavaUtils.bytesToString(shuffleSecret)); }
[ "public", "void", "registerApp", "(", "String", "appId", ",", "ByteBuffer", "shuffleSecret", ")", "{", "registerApp", "(", "appId", ",", "JavaUtils", ".", "bytesToString", "(", "shuffleSecret", ")", ")", ";", "}" ]
Register an application with its secret specified as a byte buffer.
[ "Register", "an", "application", "with", "its", "secret", "specified", "as", "a", "byte", "buffer", "." ]
25ee0474f47d9c30d6f553a7892d9549f91071cf
https://github.com/apache/spark/blob/25ee0474f47d9c30d6f553a7892d9549f91071cf/common/network-shuffle/src/main/java/org/apache/spark/network/sasl/ShuffleSecretManager.java#L60-L62
train
Register an application with the given secret.
[ 30522, 2270, 11675, 4236, 29098, 1006, 5164, 10439, 3593, 1010, 24880, 8569, 12494, 23046, 3366, 16748, 2102, 1007, 1063, 4236, 29098, 1006, 10439, 3593, 1010, 9262, 21823, 4877, 1012, 27507, 13122, 18886, 3070, 1006, 23046, 3366, 16748, 2102...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-crypto/src/main/java/cn/hutool/crypto/KeyUtil.java
KeyUtil.getKeyGenerator
public static KeyGenerator getKeyGenerator(String algorithm) { final Provider provider = GlobalBouncyCastleProvider.INSTANCE.getProvider(); KeyGenerator generator; try { generator = (null == provider) // ? KeyGenerator.getInstance(getMainAlgorithm(algorithm)) // : KeyGenerator.getInstance(getMainAlgorithm(algorithm), provider); } catch (NoSuchAlgorithmException e) { throw new CryptoException(e); } return generator; }
java
public static KeyGenerator getKeyGenerator(String algorithm) { final Provider provider = GlobalBouncyCastleProvider.INSTANCE.getProvider(); KeyGenerator generator; try { generator = (null == provider) // ? KeyGenerator.getInstance(getMainAlgorithm(algorithm)) // : KeyGenerator.getInstance(getMainAlgorithm(algorithm), provider); } catch (NoSuchAlgorithmException e) { throw new CryptoException(e); } return generator; }
[ "public", "static", "KeyGenerator", "getKeyGenerator", "(", "String", "algorithm", ")", "{", "final", "Provider", "provider", "=", "GlobalBouncyCastleProvider", ".", "INSTANCE", ".", "getProvider", "(", ")", ";", "KeyGenerator", "generator", ";", "try", "{", "gene...
获取{@link KeyGenerator} @param algorithm 对称加密算法 @return {@link KeyGenerator} @since 4.5.2
[ "获取", "{", "@link", "KeyGenerator", "}" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-crypto/src/main/java/cn/hutool/crypto/KeyUtil.java#L501-L513
train
Gets the KeyGenerator object for the given algorithm.
[ 30522, 2270, 10763, 3145, 6914, 6906, 4263, 2131, 14839, 6914, 6906, 4263, 1006, 5164, 9896, 1007, 1063, 2345, 10802, 10802, 1027, 3795, 5092, 4609, 5666, 23662, 21572, 17258, 2121, 1012, 6013, 1012, 2131, 21572, 17258, 2121, 1006, 1007, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
SeleniumHQ/selenium
java/server/src/org/openqa/selenium/remote/server/log/PerSessionLogHandler.java
PerSessionLogHandler.getLog
public synchronized String getLog(SessionId sessionId) throws IOException { // TODO(chandra): Provide option to clear logs after getLog() String logs = formattedRecords(sessionId); logs = "\n<RC_Logs RC_Session_ID=" + sessionId + ">\n" + logs + "\n</RC_Logs>\n"; return logs; }
java
public synchronized String getLog(SessionId sessionId) throws IOException { // TODO(chandra): Provide option to clear logs after getLog() String logs = formattedRecords(sessionId); logs = "\n<RC_Logs RC_Session_ID=" + sessionId + ">\n" + logs + "\n</RC_Logs>\n"; return logs; }
[ "public", "synchronized", "String", "getLog", "(", "SessionId", "sessionId", ")", "throws", "IOException", "{", "// TODO(chandra): Provide option to clear logs after getLog()", "String", "logs", "=", "formattedRecords", "(", "sessionId", ")", ";", "logs", "=", "\"\\n<RC_L...
This returns Selenium Remote Control logs associated with the sessionId. @param sessionId session-id for which the RC logs will be returned. @return String RC logs for the sessionId @throws IOException when the elves go bad
[ "This", "returns", "Selenium", "Remote", "Control", "logs", "associated", "with", "the", "sessionId", "." ]
7af172729f17b20269c8ca4ea6f788db48616535
https://github.com/SeleniumHQ/selenium/blob/7af172729f17b20269c8ca4ea6f788db48616535/java/server/src/org/openqa/selenium/remote/server/log/PerSessionLogHandler.java#L171-L177
train
Returns the log of the specified session.
[ 30522, 2270, 25549, 5164, 2131, 21197, 1006, 5219, 3593, 5219, 3593, 1007, 11618, 22834, 10288, 24422, 1063, 1013, 1013, 28681, 2080, 1006, 16469, 1007, 1024, 3073, 5724, 2000, 3154, 15664, 2044, 2131, 21197, 1006, 1007, 5164, 15664, 1027, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
hankcs/HanLP
src/main/java/com/hankcs/hanlp/model/perceptron/model/LinearModel.java
LinearModel.update
public void update(Collection<Integer> x, int y) { assert y == 1 || y == -1 : "感知机的标签y必须是±1"; for (Integer f : x) parameter[f] += y; }
java
public void update(Collection<Integer> x, int y) { assert y == 1 || y == -1 : "感知机的标签y必须是±1"; for (Integer f : x) parameter[f] += y; }
[ "public", "void", "update", "(", "Collection", "<", "Integer", ">", "x", ",", "int", "y", ")", "{", "assert", "y", "==", "1", "||", "y", "==", "-", "1", ":", "\"感知机的标签y必须是±1\";", "", "for", "(", "Integer", "f", ":", "x", ")", "parameter", "[", "]...
参数更新 @param x 特征向量 @param y 正确答案
[ "参数更新" ]
a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce
https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/model/perceptron/model/LinearModel.java#L234-L239
train
Update the count of the parameter with the given values.
[ 30522, 2270, 11675, 10651, 1006, 3074, 1026, 16109, 1028, 1060, 1010, 20014, 1061, 1007, 1063, 20865, 1061, 1027, 1027, 1015, 1064, 1064, 1061, 1027, 1027, 1011, 1015, 1024, 1000, 100, 100, 100, 1916, 100, 100, 1061, 100, 100, 100, 1081, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-mesos/src/main/java/org/apache/flink/mesos/runtime/clusterframework/services/MesosServicesUtils.java
MesosServicesUtils.createMesosServices
public static MesosServices createMesosServices(Configuration configuration, String hostname) throws Exception { ActorSystem localActorSystem = AkkaUtils.createLocalActorSystem(configuration); MesosArtifactServer artifactServer = createArtifactServer(configuration, hostname); HighAvailabilityMode highAvailabilityMode = HighAvailabilityMode.fromConfig(configuration); switch (highAvailabilityMode) { case NONE: return new StandaloneMesosServices(localActorSystem, artifactServer); case ZOOKEEPER: final String zkMesosRootPath = configuration.getString( HighAvailabilityOptions.HA_ZOOKEEPER_MESOS_WORKERS_PATH); ZooKeeperUtilityFactory zooKeeperUtilityFactory = new ZooKeeperUtilityFactory( configuration, zkMesosRootPath); return new ZooKeeperMesosServices(localActorSystem, artifactServer, zooKeeperUtilityFactory); default: throw new Exception("High availability mode " + highAvailabilityMode + " is not supported."); } }
java
public static MesosServices createMesosServices(Configuration configuration, String hostname) throws Exception { ActorSystem localActorSystem = AkkaUtils.createLocalActorSystem(configuration); MesosArtifactServer artifactServer = createArtifactServer(configuration, hostname); HighAvailabilityMode highAvailabilityMode = HighAvailabilityMode.fromConfig(configuration); switch (highAvailabilityMode) { case NONE: return new StandaloneMesosServices(localActorSystem, artifactServer); case ZOOKEEPER: final String zkMesosRootPath = configuration.getString( HighAvailabilityOptions.HA_ZOOKEEPER_MESOS_WORKERS_PATH); ZooKeeperUtilityFactory zooKeeperUtilityFactory = new ZooKeeperUtilityFactory( configuration, zkMesosRootPath); return new ZooKeeperMesosServices(localActorSystem, artifactServer, zooKeeperUtilityFactory); default: throw new Exception("High availability mode " + highAvailabilityMode + " is not supported."); } }
[ "public", "static", "MesosServices", "createMesosServices", "(", "Configuration", "configuration", ",", "String", "hostname", ")", "throws", "Exception", "{", "ActorSystem", "localActorSystem", "=", "AkkaUtils", ".", "createLocalActorSystem", "(", "configuration", ")", ...
Creates a {@link MesosServices} instance depending on the high availability settings. @param configuration containing the high availability settings @param hostname the hostname to advertise to remote clients @return a mesos services instance @throws Exception if the mesos services instance could not be created
[ "Creates", "a", "{", "@link", "MesosServices", "}", "instance", "depending", "on", "the", "high", "availability", "settings", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-mesos/src/main/java/org/apache/flink/mesos/runtime/clusterframework/services/MesosServicesUtils.java#L46-L71
train
Creates the MesosServices object based on the configuration.
[ 30522, 2270, 10763, 2033, 17063, 8043, 7903, 2229, 3443, 7834, 15094, 2121, 7903, 2229, 1006, 9563, 9563, 1010, 5164, 3677, 18442, 1007, 11618, 6453, 1063, 5889, 27268, 6633, 2334, 18908, 5668, 27268, 6633, 1027, 17712, 2912, 21823, 4877, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/resourcemanager/slotmanager/SlotManager.java
SlotManager.registerSlotRequest
public boolean registerSlotRequest(SlotRequest slotRequest) throws SlotManagerException { checkInit(); if (checkDuplicateRequest(slotRequest.getAllocationId())) { LOG.debug("Ignoring a duplicate slot request with allocation id {}.", slotRequest.getAllocationId()); return false; } else { PendingSlotRequest pendingSlotRequest = new PendingSlotRequest(slotRequest); pendingSlotRequests.put(slotRequest.getAllocationId(), pendingSlotRequest); try { internalRequestSlot(pendingSlotRequest); } catch (ResourceManagerException e) { // requesting the slot failed --> remove pending slot request pendingSlotRequests.remove(slotRequest.getAllocationId()); throw new SlotManagerException("Could not fulfill slot request " + slotRequest.getAllocationId() + '.', e); } return true; } }
java
public boolean registerSlotRequest(SlotRequest slotRequest) throws SlotManagerException { checkInit(); if (checkDuplicateRequest(slotRequest.getAllocationId())) { LOG.debug("Ignoring a duplicate slot request with allocation id {}.", slotRequest.getAllocationId()); return false; } else { PendingSlotRequest pendingSlotRequest = new PendingSlotRequest(slotRequest); pendingSlotRequests.put(slotRequest.getAllocationId(), pendingSlotRequest); try { internalRequestSlot(pendingSlotRequest); } catch (ResourceManagerException e) { // requesting the slot failed --> remove pending slot request pendingSlotRequests.remove(slotRequest.getAllocationId()); throw new SlotManagerException("Could not fulfill slot request " + slotRequest.getAllocationId() + '.', e); } return true; } }
[ "public", "boolean", "registerSlotRequest", "(", "SlotRequest", "slotRequest", ")", "throws", "SlotManagerException", "{", "checkInit", "(", ")", ";", "if", "(", "checkDuplicateRequest", "(", "slotRequest", ".", "getAllocationId", "(", ")", ")", ")", "{", "LOG", ...
Requests a slot with the respective resource profile. @param slotRequest specifying the requested slot specs @return true if the slot request was registered; false if the request is a duplicate @throws SlotManagerException if the slot request failed (e.g. not enough resources left)
[ "Requests", "a", "slot", "with", "the", "respective", "resource", "profile", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/resourcemanager/slotmanager/SlotManager.java#L284-L307
train
Registers a slot request.
[ 30522, 2270, 22017, 20898, 18687, 10994, 2890, 15500, 1006, 10453, 2890, 15500, 10453, 2890, 15500, 1007, 11618, 10453, 24805, 4590, 10288, 24422, 1063, 4638, 5498, 2102, 1006, 1007, 1025, 2065, 1006, 4638, 8566, 24759, 24695, 2890, 15500, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/KeyedStream.java
KeyedStream.countWindow
public WindowedStream<T, KEY, GlobalWindow> countWindow(long size, long slide) { return window(GlobalWindows.create()) .evictor(CountEvictor.of(size)) .trigger(CountTrigger.of(slide)); }
java
public WindowedStream<T, KEY, GlobalWindow> countWindow(long size, long slide) { return window(GlobalWindows.create()) .evictor(CountEvictor.of(size)) .trigger(CountTrigger.of(slide)); }
[ "public", "WindowedStream", "<", "T", ",", "KEY", ",", "GlobalWindow", ">", "countWindow", "(", "long", "size", ",", "long", "slide", ")", "{", "return", "window", "(", "GlobalWindows", ".", "create", "(", ")", ")", ".", "evictor", "(", "CountEvictor", "...
Windows this {@code KeyedStream} into sliding count windows. @param size The size of the windows in number of elements. @param slide The slide interval in number of elements.
[ "Windows", "this", "{", "@code", "KeyedStream", "}", "into", "sliding", "count", "windows", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/KeyedStream.java#L652-L656
train
Create a new windowed stream that contains a count of the specified size and slide.
[ 30522, 2270, 3332, 2098, 21422, 1026, 1056, 1010, 3145, 1010, 3795, 11101, 5004, 1028, 4175, 11101, 5004, 1006, 2146, 2946, 1010, 2146, 7358, 1007, 1063, 2709, 3332, 1006, 3795, 11101, 15568, 1012, 3443, 1006, 1007, 1007, 1012, 23408, 2594,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/convert/NumberWordFormater.java
NumberWordFormater.transThree
private static String transThree(String s) { String value = ""; if (s.startsWith("0")) {// 是否小於100 value = transTwo(s.substring(1)); } else if (s.substring(1).equals("00")) {// 是否被100整除 value = parseFirst(s.substring(0, 1)) + " HUNDRED"; } else { value = parseFirst(s.substring(0, 1)) + " HUNDRED AND " + transTwo(s.substring(1)); } return value; }
java
private static String transThree(String s) { String value = ""; if (s.startsWith("0")) {// 是否小於100 value = transTwo(s.substring(1)); } else if (s.substring(1).equals("00")) {// 是否被100整除 value = parseFirst(s.substring(0, 1)) + " HUNDRED"; } else { value = parseFirst(s.substring(0, 1)) + " HUNDRED AND " + transTwo(s.substring(1)); } return value; }
[ "private", "static", "String", "transThree", "(", "String", "s", ")", "{", "String", "value", "=", "\"\"", ";", "if", "(", "s", ".", "startsWith", "(", "\"0\"", ")", ")", "{", "// 是否小於100\r", "value", "=", "transTwo", "(", "s", ".", "substring", "(", ...
s.length = 3
[ "s", ".", "length", "=", "3" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/convert/NumberWordFormater.java#L132-L142
train
Trans three string.
[ 30522, 2797, 10763, 5164, 9099, 2705, 9910, 1006, 5164, 1055, 1007, 1063, 5164, 3643, 1027, 1000, 1000, 1025, 2065, 1006, 1055, 1012, 4627, 24415, 1006, 1000, 1014, 1000, 1007, 1007, 1063, 1013, 1013, 100, 100, 1829, 100, 2531, 3643, 1027...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
handler/src/main/java/io/netty/handler/ssl/SslContextBuilder.java
SslContextBuilder.ciphers
public SslContextBuilder ciphers(Iterable<String> ciphers, CipherSuiteFilter cipherFilter) { checkNotNull(cipherFilter, "cipherFilter"); this.ciphers = ciphers; this.cipherFilter = cipherFilter; return this; }
java
public SslContextBuilder ciphers(Iterable<String> ciphers, CipherSuiteFilter cipherFilter) { checkNotNull(cipherFilter, "cipherFilter"); this.ciphers = ciphers; this.cipherFilter = cipherFilter; return this; }
[ "public", "SslContextBuilder", "ciphers", "(", "Iterable", "<", "String", ">", "ciphers", ",", "CipherSuiteFilter", "cipherFilter", ")", "{", "checkNotNull", "(", "cipherFilter", ",", "\"cipherFilter\"", ")", ";", "this", ".", "ciphers", "=", "ciphers", ";", "th...
The cipher suites to enable, in the order of preference. {@code cipherFilter} will be applied to the ciphers before use. If {@code ciphers} is {@code null}, then the default cipher suites will be used.
[ "The", "cipher", "suites", "to", "enable", "in", "the", "order", "of", "preference", ".", "{" ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/handler/src/main/java/io/netty/handler/ssl/SslContextBuilder.java#L369-L374
train
Sets the ciphers for this context.
[ 30522, 2270, 7020, 22499, 10111, 18413, 8569, 23891, 2099, 27715, 2015, 1006, 2009, 6906, 3468, 1026, 5164, 1028, 27715, 2015, 1010, 27715, 28880, 12879, 4014, 3334, 27715, 8873, 21928, 1007, 1063, 4638, 17048, 11231, 3363, 1006, 27715, 8873,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-mesos/src/main/java/org/apache/flink/mesos/Utils.java
Utils.rangeValues
public static LongStream rangeValues(Protos.Value.Range range) { checkNotNull(range); return LongStream.rangeClosed(range.getBegin(), range.getEnd()); }
java
public static LongStream rangeValues(Protos.Value.Range range) { checkNotNull(range); return LongStream.rangeClosed(range.getBegin(), range.getEnd()); }
[ "public", "static", "LongStream", "rangeValues", "(", "Protos", ".", "Value", ".", "Range", "range", ")", "{", "checkNotNull", "(", "range", ")", ";", "return", "LongStream", ".", "rangeClosed", "(", "range", ".", "getBegin", "(", ")", ",", "range", ".", ...
Gets a stream of values from a range.
[ "Gets", "a", "stream", "of", "values", "from", "a", "range", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-mesos/src/main/java/org/apache/flink/mesos/Utils.java#L233-L236
train
Gets range values.
[ 30522, 2270, 10763, 2146, 21422, 2846, 10175, 15808, 1006, 15053, 2015, 30524, 1007, 1063, 4638, 17048, 11231, 3363, 1006, 2846, 1007, 1025, 2709, 2146, 21422, 1012, 2846, 20464, 24768, 1006, 2846, 1012, 2131, 4783, 11528, 1006, 1007, 1010, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/metrics/groups/TaskManagerMetricGroup.java
TaskManagerMetricGroup.addTaskForJob
public TaskMetricGroup addTaskForJob( final JobID jobId, final String jobName, final JobVertexID jobVertexId, final ExecutionAttemptID executionAttemptId, final String taskName, final int subtaskIndex, final int attemptNumber) { Preconditions.checkNotNull(jobId); String resolvedJobName = jobName == null || jobName.isEmpty() ? jobId.toString() : jobName; // we cannot strictly lock both our map modification and the job group modification // because it might lead to a deadlock while (true) { // get or create a jobs metric group TaskManagerJobMetricGroup currentJobGroup; synchronized (this) { currentJobGroup = jobs.get(jobId); if (currentJobGroup == null || currentJobGroup.isClosed()) { currentJobGroup = new TaskManagerJobMetricGroup(registry, this, jobId, resolvedJobName); jobs.put(jobId, currentJobGroup); } } // try to add another task. this may fail if we found a pre-existing job metrics // group and it is closed concurrently TaskMetricGroup taskGroup = currentJobGroup.addTask( jobVertexId, executionAttemptId, taskName, subtaskIndex, attemptNumber); if (taskGroup != null) { // successfully added the next task return taskGroup; } // else fall through the loop } }
java
public TaskMetricGroup addTaskForJob( final JobID jobId, final String jobName, final JobVertexID jobVertexId, final ExecutionAttemptID executionAttemptId, final String taskName, final int subtaskIndex, final int attemptNumber) { Preconditions.checkNotNull(jobId); String resolvedJobName = jobName == null || jobName.isEmpty() ? jobId.toString() : jobName; // we cannot strictly lock both our map modification and the job group modification // because it might lead to a deadlock while (true) { // get or create a jobs metric group TaskManagerJobMetricGroup currentJobGroup; synchronized (this) { currentJobGroup = jobs.get(jobId); if (currentJobGroup == null || currentJobGroup.isClosed()) { currentJobGroup = new TaskManagerJobMetricGroup(registry, this, jobId, resolvedJobName); jobs.put(jobId, currentJobGroup); } } // try to add another task. this may fail if we found a pre-existing job metrics // group and it is closed concurrently TaskMetricGroup taskGroup = currentJobGroup.addTask( jobVertexId, executionAttemptId, taskName, subtaskIndex, attemptNumber); if (taskGroup != null) { // successfully added the next task return taskGroup; } // else fall through the loop } }
[ "public", "TaskMetricGroup", "addTaskForJob", "(", "final", "JobID", "jobId", ",", "final", "String", "jobName", ",", "final", "JobVertexID", "jobVertexId", ",", "final", "ExecutionAttemptID", "executionAttemptId", ",", "final", "String", "taskName", ",", "final", "...
------------------------------------------------------------------------
[ "------------------------------------------------------------------------" ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/metrics/groups/TaskManagerMetricGroup.java#L72-L116
train
Adds a task to the job metric group.
[ 30522, 2270, 4708, 12589, 17058, 5587, 10230, 2243, 29278, 5558, 2497, 1006, 2345, 3105, 3593, 3105, 3593, 1010, 2345, 5164, 3105, 18442, 1010, 2345, 3105, 16874, 10288, 3593, 3105, 16874, 10288, 3593, 1010, 2345, 7781, 19321, 6633, 13876, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-java/src/main/java/org/apache/flink/api/java/operators/Operator.java
Operator.setParallelism
public O setParallelism(int parallelism) { Preconditions.checkArgument(parallelism > 0 || parallelism == ExecutionConfig.PARALLELISM_DEFAULT, "The parallelism must be at least one, or ExecutionConfig.PARALLELISM_DEFAULT (use system default)."); this.parallelism = parallelism; @SuppressWarnings("unchecked") O returnType = (O) this; return returnType; }
java
public O setParallelism(int parallelism) { Preconditions.checkArgument(parallelism > 0 || parallelism == ExecutionConfig.PARALLELISM_DEFAULT, "The parallelism must be at least one, or ExecutionConfig.PARALLELISM_DEFAULT (use system default)."); this.parallelism = parallelism; @SuppressWarnings("unchecked") O returnType = (O) this; return returnType; }
[ "public", "O", "setParallelism", "(", "int", "parallelism", ")", "{", "Preconditions", ".", "checkArgument", "(", "parallelism", ">", "0", "||", "parallelism", "==", "ExecutionConfig", ".", "PARALLELISM_DEFAULT", ",", "\"The parallelism must be at least one, or ExecutionC...
Sets the parallelism for this operator. The parallelism must be 1 or more. @param parallelism The parallelism for this operator. A value equal to {@link ExecutionConfig#PARALLELISM_DEFAULT} will use the system default. @return The operator with set parallelism.
[ "Sets", "the", "parallelism", "for", "this", "operator", ".", "The", "parallelism", "must", "be", "1", "or", "more", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-java/src/main/java/org/apache/flink/api/java/operators/Operator.java#L121-L130
train
Sets the parallelism of the operation.
[ 30522, 2270, 1051, 2275, 28689, 6216, 28235, 1006, 20014, 5903, 2964, 1007, 1063, 3653, 8663, 20562, 2015, 1012, 4638, 2906, 22850, 4765, 1006, 5903, 2964, 1028, 1014, 1064, 1064, 5903, 2964, 1027, 1027, 7781, 8663, 8873, 2290, 1012, 5903, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...