repo
stringclasses
11 values
path
stringlengths
41
214
func_name
stringlengths
7
82
original_string
stringlengths
77
11.9k
language
stringclasses
1 value
code
stringlengths
77
11.9k
code_tokens
listlengths
22
1.57k
docstring
stringlengths
2
2.27k
docstring_tokens
listlengths
1
352
sha
stringclasses
11 values
url
stringlengths
129
319
partition
stringclasses
1 value
summary
stringlengths
7
191
input_ids
listlengths
502
502
token_type_ids
listlengths
502
502
attention_mask
listlengths
502
502
labels
listlengths
502
502
apache/flink
flink-java/src/main/java/org/apache/flink/api/java/operators/SortPartitionOperator.java
SortPartitionOperator.translateToDataFlow
protected org.apache.flink.api.common.operators.SingleInputOperator<?, T, ?> translateToDataFlow(Operator<T> input) { String name = "Sort at " + sortLocationName; if (useKeySelector) { return translateToDataFlowWithKeyExtractor(input, (Keys.SelectorFunctionKeys<T, ?>) keys.get(0), orders.get(0), name); } // flatten sort key positions List<Integer> allKeyPositions = new ArrayList<>(); List<Order> allOrders = new ArrayList<>(); for (int i = 0, length = keys.size(); i < length; i++) { int[] sortKeyPositions = keys.get(i).computeLogicalKeyPositions(); Order order = orders.get(i); for (int sortKeyPosition : sortKeyPositions) { allKeyPositions.add(sortKeyPosition); allOrders.add(order); } } Ordering partitionOrdering = new Ordering(); for (int i = 0, length = allKeyPositions.size(); i < length; i++) { partitionOrdering.appendOrdering(allKeyPositions.get(i), null, allOrders.get(i)); } // distinguish between partition types UnaryOperatorInformation<T, T> operatorInfo = new UnaryOperatorInformation<>(getType(), getType()); SortPartitionOperatorBase<T> noop = new SortPartitionOperatorBase<>(operatorInfo, partitionOrdering, name); noop.setInput(input); if (this.getParallelism() < 0) { // use parallelism of input if not explicitly specified noop.setParallelism(input.getParallelism()); } else { // use explicitly specified parallelism noop.setParallelism(this.getParallelism()); } return noop; }
java
protected org.apache.flink.api.common.operators.SingleInputOperator<?, T, ?> translateToDataFlow(Operator<T> input) { String name = "Sort at " + sortLocationName; if (useKeySelector) { return translateToDataFlowWithKeyExtractor(input, (Keys.SelectorFunctionKeys<T, ?>) keys.get(0), orders.get(0), name); } // flatten sort key positions List<Integer> allKeyPositions = new ArrayList<>(); List<Order> allOrders = new ArrayList<>(); for (int i = 0, length = keys.size(); i < length; i++) { int[] sortKeyPositions = keys.get(i).computeLogicalKeyPositions(); Order order = orders.get(i); for (int sortKeyPosition : sortKeyPositions) { allKeyPositions.add(sortKeyPosition); allOrders.add(order); } } Ordering partitionOrdering = new Ordering(); for (int i = 0, length = allKeyPositions.size(); i < length; i++) { partitionOrdering.appendOrdering(allKeyPositions.get(i), null, allOrders.get(i)); } // distinguish between partition types UnaryOperatorInformation<T, T> operatorInfo = new UnaryOperatorInformation<>(getType(), getType()); SortPartitionOperatorBase<T> noop = new SortPartitionOperatorBase<>(operatorInfo, partitionOrdering, name); noop.setInput(input); if (this.getParallelism() < 0) { // use parallelism of input if not explicitly specified noop.setParallelism(input.getParallelism()); } else { // use explicitly specified parallelism noop.setParallelism(this.getParallelism()); } return noop; }
[ "protected", "org", ".", "apache", ".", "flink", ".", "api", ".", "common", ".", "operators", ".", "SingleInputOperator", "<", "?", ",", "T", ",", "?", ">", "translateToDataFlow", "(", "Operator", "<", "T", ">", "input", ")", "{", "String", "name", "="...
--------------------------------------------------------------------------------------------
[ "--------------------------------------------------------------------------------------------" ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-java/src/main/java/org/apache/flink/api/java/operators/SortPartitionOperator.java#L165-L205
train
Translate the input operator to a data flow.
[ 30522, 5123, 8917, 1012, 15895, 1012, 13109, 19839, 1012, 17928, 1012, 2691, 1012, 9224, 1012, 2309, 2378, 18780, 25918, 8844, 1026, 1029, 1010, 1056, 1010, 1029, 1028, 17637, 3406, 2850, 2696, 12314, 1006, 6872, 1026, 1056, 1028, 7953, 100...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
spring-projects/spring-boot
spring-boot-project/spring-boot/src/main/java/org/springframework/boot/web/servlet/context/ServletWebServerApplicationContext.java
ServletWebServerApplicationContext.postProcessBeanFactory
@Override protected void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) { beanFactory.addBeanPostProcessor( new WebApplicationContextServletContextAwareProcessor(this)); beanFactory.ignoreDependencyInterface(ServletContextAware.class); registerWebApplicationScopes(); }
java
@Override protected void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) { beanFactory.addBeanPostProcessor( new WebApplicationContextServletContextAwareProcessor(this)); beanFactory.ignoreDependencyInterface(ServletContextAware.class); registerWebApplicationScopes(); }
[ "@", "Override", "protected", "void", "postProcessBeanFactory", "(", "ConfigurableListableBeanFactory", "beanFactory", ")", "{", "beanFactory", ".", "addBeanPostProcessor", "(", "new", "WebApplicationContextServletContextAwareProcessor", "(", "this", ")", ")", ";", "beanFac...
Register ServletContextAwareProcessor. @see ServletContextAwareProcessor
[ "Register", "ServletContextAwareProcessor", "." ]
0b27f7c70e164b2b1a96477f1d9c1acba56790c1
https://github.com/spring-projects/spring-boot/blob/0b27f7c70e164b2b1a96477f1d9c1acba56790c1/spring-boot-project/spring-boot/src/main/java/org/springframework/boot/web/servlet/context/ServletWebServerApplicationContext.java#L131-L137
train
Add a post processor to the bean factory.
[ 30522, 1030, 2058, 15637, 5123, 11675, 2695, 21572, 9623, 19022, 11219, 21450, 1006, 9530, 8873, 27390, 3085, 9863, 3085, 4783, 2319, 21450, 14068, 21450, 1007, 1063, 14068, 21450, 1012, 5587, 4783, 2319, 19894, 21572, 9623, 21748, 1006, 2047...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/date/DateUtil.java
DateUtil.weekCount
public static int weekCount(Date start, Date end) { final Calendar startCalendar = Calendar.getInstance(); startCalendar.setTime(start); final Calendar endCalendar = Calendar.getInstance(); endCalendar.setTime(end); final int startWeekofYear = startCalendar.get(Calendar.WEEK_OF_YEAR); final int endWeekofYear = endCalendar.get(Calendar.WEEK_OF_YEAR); int count = endWeekofYear - startWeekofYear + 1; if (Calendar.SUNDAY != startCalendar.get(Calendar.DAY_OF_WEEK)) { count--; } return count; }
java
public static int weekCount(Date start, Date end) { final Calendar startCalendar = Calendar.getInstance(); startCalendar.setTime(start); final Calendar endCalendar = Calendar.getInstance(); endCalendar.setTime(end); final int startWeekofYear = startCalendar.get(Calendar.WEEK_OF_YEAR); final int endWeekofYear = endCalendar.get(Calendar.WEEK_OF_YEAR); int count = endWeekofYear - startWeekofYear + 1; if (Calendar.SUNDAY != startCalendar.get(Calendar.DAY_OF_WEEK)) { count--; } return count; }
[ "public", "static", "int", "weekCount", "(", "Date", "start", ",", "Date", "end", ")", "{", "final", "Calendar", "startCalendar", "=", "Calendar", ".", "getInstance", "(", ")", ";", "startCalendar", ".", "setTime", "(", "start", ")", ";", "final", "Calenda...
计算指定指定时间区间内的周数 @param start 开始时间 @param end 结束时间 @return 周数
[ "计算指定指定时间区间内的周数" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/date/DateUtil.java#L1455-L1471
train
Returns the number of days between two dates.
[ 30522, 2270, 10763, 20014, 2733, 3597, 16671, 1006, 3058, 2707, 1010, 3058, 2203, 1007, 1063, 2345, 8094, 2707, 9289, 10497, 2906, 1027, 8094, 1012, 2131, 7076, 26897, 1006, 1007, 1025, 2707, 9289, 10497, 2906, 1012, 2275, 7292, 1006, 2707,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
spring-projects/spring-boot
spring-boot-project/spring-boot-cli/src/main/java/org/springframework/boot/cli/compiler/AstUtils.java
AstUtils.subclasses
public static boolean subclasses(ClassNode node, String... types) { for (String type : types) { if (node.getSuperClass().getName().equals(type)) { return true; } } return false; }
java
public static boolean subclasses(ClassNode node, String... types) { for (String type : types) { if (node.getSuperClass().getName().equals(type)) { return true; } } return false; }
[ "public", "static", "boolean", "subclasses", "(", "ClassNode", "node", ",", "String", "...", "types", ")", "{", "for", "(", "String", "type", ":", "types", ")", "{", "if", "(", "node", ".", "getSuperClass", "(", ")", ".", "getName", "(", ")", ".", "e...
Determine if a {@link ClassNode} subclasses any of the specified types N.B. the type names are not normally fully qualified. @param node the class to examine @param types the types that may have been sub-classed @return {@code true} if the class subclasses any of the specified types, otherwise {@code false}
[ "Determine", "if", "a", "{" ]
0b27f7c70e164b2b1a96477f1d9c1acba56790c1
https://github.com/spring-projects/spring-boot/blob/0b27f7c70e164b2b1a96477f1d9c1acba56790c1/spring-boot-project/spring-boot-cli/src/main/java/org/springframework/boot/cli/compiler/AstUtils.java#L123-L130
train
Checks if the class node is subclasses of the given types.
[ 30522, 2270, 10763, 22017, 20898, 4942, 26266, 2229, 1006, 2465, 3630, 3207, 13045, 1010, 5164, 1012, 1012, 30524, 1007, 1007, 1063, 2709, 2995, 1025, 1065, 1065, 2709, 6270, 1025, 1065, 102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/util/ArrayUtil.java
ArrayUtil.swap
public static int[] swap(int[] array, int index1, int index2) { if (isEmpty(array)) { throw new IllegalArgumentException("Number array must not empty !"); } int tmp = array[index1]; array[index1] = array[index2]; array[index2] = tmp; return array; }
java
public static int[] swap(int[] array, int index1, int index2) { if (isEmpty(array)) { throw new IllegalArgumentException("Number array must not empty !"); } int tmp = array[index1]; array[index1] = array[index2]; array[index2] = tmp; return array; }
[ "public", "static", "int", "[", "]", "swap", "(", "int", "[", "]", "array", ",", "int", "index1", ",", "int", "index2", ")", "{", "if", "(", "isEmpty", "(", "array", ")", ")", "{", "throw", "new", "IllegalArgumentException", "(", "\"Number array must not...
交换数组中两个位置的值 @param array 数组 @param index1 位置1 @param index2 位置2 @return 交换后的数组,与传入数组为同一对象 @since 4.0.7
[ "交换数组中两个位置的值" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/util/ArrayUtil.java#L3590-L3598
train
Swaps the elements of an int array at the specified positions.
[ 30522, 2270, 10763, 20014, 1031, 1033, 19948, 1006, 20014, 1031, 1033, 9140, 1010, 20014, 5950, 2487, 1010, 20014, 5950, 2475, 1007, 1063, 2065, 1006, 2003, 6633, 13876, 2100, 1006, 9140, 1007, 1007, 1063, 5466, 2047, 6206, 2906, 22850, 157...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/transformations/FeedbackTransformation.java
FeedbackTransformation.addFeedbackEdge
public void addFeedbackEdge(StreamTransformation<T> transform) { if (transform.getParallelism() != this.getParallelism()) { throw new UnsupportedOperationException( "Parallelism of the feedback stream must match the parallelism of the original" + " stream. Parallelism of original stream: " + this.getParallelism() + "; parallelism of feedback stream: " + transform.getParallelism() + ". Parallelism can be modified using DataStream#setParallelism() method"); } feedbackEdges.add(transform); }
java
public void addFeedbackEdge(StreamTransformation<T> transform) { if (transform.getParallelism() != this.getParallelism()) { throw new UnsupportedOperationException( "Parallelism of the feedback stream must match the parallelism of the original" + " stream. Parallelism of original stream: " + this.getParallelism() + "; parallelism of feedback stream: " + transform.getParallelism() + ". Parallelism can be modified using DataStream#setParallelism() method"); } feedbackEdges.add(transform); }
[ "public", "void", "addFeedbackEdge", "(", "StreamTransformation", "<", "T", ">", "transform", ")", "{", "if", "(", "transform", ".", "getParallelism", "(", ")", "!=", "this", ".", "getParallelism", "(", ")", ")", "{", "throw", "new", "UnsupportedOperationExcep...
Adds a feedback edge. The parallelism of the {@code StreamTransformation} must match the parallelism of the input {@code StreamTransformation} of this {@code FeedbackTransformation} @param transform The new feedback {@code StreamTransformation}.
[ "Adds", "a", "feedback", "edge", ".", "The", "parallelism", "of", "the", "{", "@code", "StreamTransformation", "}", "must", "match", "the", "parallelism", "of", "the", "input", "{", "@code", "StreamTransformation", "}", "of", "this", "{", "@code", "FeedbackTra...
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/transformations/FeedbackTransformation.java#L84-L95
train
Adds a feedback edge to this data stream.
[ 30522, 2270, 11675, 5587, 7959, 2098, 5963, 24225, 1006, 5460, 6494, 3619, 14192, 3370, 1026, 1056, 1028, 10938, 1007, 1063, 2065, 1006, 10938, 1012, 2131, 28689, 6216, 28235, 1006, 1007, 999, 1027, 2023, 1012, 2131, 28689, 6216, 28235, 100...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-yarn/src/main/java/org/apache/flink/yarn/highavailability/YarnIntraNonHaMasterServices.java
YarnIntraNonHaMasterServices.close
@Override public void close() throws Exception { if (enterUnlessClosed()) { try { try { // this class' own cleanup logic resourceManagerLeaderElectionService.shutdown(); dispatcher.shutdownNow(); } finally { // in any case must we call the parent cleanup logic super.close(); } } finally { exit(); } } }
java
@Override public void close() throws Exception { if (enterUnlessClosed()) { try { try { // this class' own cleanup logic resourceManagerLeaderElectionService.shutdown(); dispatcher.shutdownNow(); } finally { // in any case must we call the parent cleanup logic super.close(); } } finally { exit(); } } }
[ "@", "Override", "public", "void", "close", "(", ")", "throws", "Exception", "{", "if", "(", "enterUnlessClosed", "(", ")", ")", "{", "try", "{", "try", "{", "// this class' own cleanup logic", "resourceManagerLeaderElectionService", ".", "shutdown", "(", ")", "...
------------------------------------------------------------------------
[ "------------------------------------------------------------------------" ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-yarn/src/main/java/org/apache/flink/yarn/highavailability/YarnIntraNonHaMasterServices.java#L227-L245
train
Close the connection.
[ 30522, 1030, 2058, 15637, 2270, 11675, 2485, 1006, 1007, 11618, 6453, 1063, 2065, 1006, 4607, 4609, 3238, 20464, 24768, 1006, 1007, 1007, 1063, 3046, 1063, 3046, 1063, 1013, 30524, 1006, 1007, 1025, 1065, 2633, 1063, 1013, 1013, 1999, 2151,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-extra/src/main/java/cn/hutool/extra/template/engine/thymeleaf/ThymeleafTemplate.java
ThymeleafTemplate.wrap
public static ThymeleafTemplate wrap(TemplateEngine engine, String template, Charset charset) { return (null == engine) ? null : new ThymeleafTemplate(engine, template, charset); }
java
public static ThymeleafTemplate wrap(TemplateEngine engine, String template, Charset charset) { return (null == engine) ? null : new ThymeleafTemplate(engine, template, charset); }
[ "public", "static", "ThymeleafTemplate", "wrap", "(", "TemplateEngine", "engine", ",", "String", "template", ",", "Charset", "charset", ")", "{", "return", "(", "null", "==", "engine", ")", "?", "null", ":", "new", "ThymeleafTemplate", "(", "engine", ",", "t...
包装Thymeleaf模板 @param engine Thymeleaf的模板引擎对象 {@link TemplateEngine} @param template 模板路径或模板内容 @param charset 编码 @return {@link ThymeleafTemplate}
[ "包装Thymeleaf模板" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-extra/src/main/java/cn/hutool/extra/template/engine/thymeleaf/ThymeleafTemplate.java#L41-L43
train
Wrap a template in a ThymeleafTemplate.
[ 30522, 2270, 10763, 15177, 10199, 5243, 6199, 6633, 15725, 10236, 1006, 23561, 13159, 3170, 3194, 1010, 5164, 23561, 1010, 25869, 13462, 25869, 13462, 1007, 1063, 2709, 1006, 19701, 1027, 1027, 3194, 1007, 1029, 19701, 1024, 2047, 15177, 1019...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Graph.java
Graph.joinWithEdgesOnSource
public <T> Graph<K, VV, EV> joinWithEdgesOnSource(DataSet<Tuple2<K, T>> inputDataSet, final EdgeJoinFunction<EV, T> edgeJoinFunction) { DataSet<Edge<K, EV>> resultedEdges = this.getEdges() .coGroup(inputDataSet).where(0).equalTo(0) .with(new ApplyCoGroupToEdgeValuesOnEitherSourceOrTarget<>(edgeJoinFunction)) .name("Join with edges on source"); return new Graph<>(this.vertices, resultedEdges, this.context); }
java
public <T> Graph<K, VV, EV> joinWithEdgesOnSource(DataSet<Tuple2<K, T>> inputDataSet, final EdgeJoinFunction<EV, T> edgeJoinFunction) { DataSet<Edge<K, EV>> resultedEdges = this.getEdges() .coGroup(inputDataSet).where(0).equalTo(0) .with(new ApplyCoGroupToEdgeValuesOnEitherSourceOrTarget<>(edgeJoinFunction)) .name("Join with edges on source"); return new Graph<>(this.vertices, resultedEdges, this.context); }
[ "public", "<", "T", ">", "Graph", "<", "K", ",", "VV", ",", "EV", ">", "joinWithEdgesOnSource", "(", "DataSet", "<", "Tuple2", "<", "K", ",", "T", ">", ">", "inputDataSet", ",", "final", "EdgeJoinFunction", "<", "EV", ",", "T", ">", "edgeJoinFunction",...
Joins the edge DataSet with an input Tuple2 DataSet and applies a user-defined transformation on the values of the matched records. The source ID of the edges input and the first field of the input DataSet are used as join keys. @param inputDataSet the DataSet to join with. The first field of the Tuple2 is used as the join key and the second field is passed as a parameter to the transformation function. @param edgeJoinFunction the transformation function to apply. The first parameter is the current edge value and the second parameter is the value of the matched Tuple2 from the input DataSet. @param <T> the type of the second field of the input Tuple2 DataSet. @return a new Graph, where the edge values have been updated according to the result of the edgeJoinFunction.
[ "Joins", "the", "edge", "DataSet", "with", "an", "input", "Tuple2", "DataSet", "and", "applies", "a", "user", "-", "defined", "transformation", "on", "the", "values", "of", "the", "matched", "records", ".", "The", "source", "ID", "of", "the", "edges", "inp...
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Graph.java#L785-L794
train
Join the graph with edges on the source DataSet.
[ 30522, 2270, 1026, 1056, 1028, 10629, 1026, 1047, 1010, 1058, 2615, 1010, 23408, 1028, 3693, 24415, 24225, 23345, 8162, 3401, 1006, 2951, 13462, 1026, 10722, 10814, 2475, 1026, 1047, 1010, 1056, 1028, 1028, 7953, 2850, 18260, 2102, 1010, 23...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-http/src/main/java/cn/hutool/http/HttpConnection.java
HttpConnection.header
public HttpConnection header(Header header, String value, boolean isOverride) { return header(header.toString(), value, isOverride); }
java
public HttpConnection header(Header header, String value, boolean isOverride) { return header(header.toString(), value, isOverride); }
[ "public", "HttpConnection", "header", "(", "Header", "header", ",", "String", "value", ",", "boolean", "isOverride", ")", "{", "return", "header", "(", "header", ".", "toString", "(", ")", ",", "value", ",", "isOverride", ")", ";", "}" ]
设置请求头<br> 当请求头存在时,覆盖之 @param header 头名 @param value 头值 @param isOverride 是否覆盖旧值 @return HttpConnection
[ "设置请求头<br", ">", "当请求头存在时,覆盖之" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-http/src/main/java/cn/hutool/http/HttpConnection.java#L196-L198
train
Adds a header to the connection.
[ 30522, 2270, 8299, 8663, 2638, 7542, 20346, 1006, 20346, 20346, 1010, 5164, 3643, 1010, 22017, 20898, 11163, 6299, 15637, 1007, 1063, 2709, 20346, 1006, 20346, 1012, 2000, 3367, 4892, 1006, 1007, 1010, 3643, 1010, 11163, 6299, 15637, 1007, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
spring-projects/spring-boot
spring-boot-project/spring-boot/src/main/java/org/springframework/boot/origin/OriginTrackedValue.java
OriginTrackedValue.of
public static OriginTrackedValue of(Object value, Origin origin) { if (value == null) { return null; } if (value instanceof CharSequence) { return new OriginTrackedCharSequence((CharSequence) value, origin); } return new OriginTrackedValue(value, origin); }
java
public static OriginTrackedValue of(Object value, Origin origin) { if (value == null) { return null; } if (value instanceof CharSequence) { return new OriginTrackedCharSequence((CharSequence) value, origin); } return new OriginTrackedValue(value, origin); }
[ "public", "static", "OriginTrackedValue", "of", "(", "Object", "value", ",", "Origin", "origin", ")", "{", "if", "(", "value", "==", "null", ")", "{", "return", "null", ";", "}", "if", "(", "value", "instanceof", "CharSequence", ")", "{", "return", "new"...
Create an {@link OriginTrackedValue} containing the specified {@code value} and {@code origin}. If the source value implements {@link CharSequence} then so will the resulting {@link OriginTrackedValue}. @param value the source value @param origin the origin @return an {@link OriginTrackedValue} or {@code null} if the source value was {@code null}.
[ "Create", "an", "{" ]
0b27f7c70e164b2b1a96477f1d9c1acba56790c1
https://github.com/spring-projects/spring-boot/blob/0b27f7c70e164b2b1a96477f1d9c1acba56790c1/spring-boot-project/spring-boot/src/main/java/org/springframework/boot/origin/OriginTrackedValue.java#L85-L93
train
Create an OriginTrackedValue from the given value and origin.
[ 30522, 2270, 10763, 4761, 6494, 18141, 10175, 5657, 1997, 1006, 4874, 3643, 1010, 4761, 4761, 1007, 1063, 2065, 1006, 3643, 1027, 1027, 19701, 1007, 1063, 2709, 19701, 1025, 1065, 2065, 1006, 3643, 6013, 11253, 25869, 3366, 4226, 5897, 1007...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-connectors/flink-connector-kinesis/src/main/java/org/apache/flink/streaming/connectors/kinesis/util/KinesisConfigUtil.java
KinesisConfigUtil.backfillConsumerKeys
public static Properties backfillConsumerKeys(Properties configProps) { HashMap<String, String> oldKeyToNewKeys = new HashMap<>(); oldKeyToNewKeys.put(ConsumerConfigConstants.STREAM_DESCRIBE_BACKOFF_BASE, ConsumerConfigConstants.LIST_SHARDS_BACKOFF_BASE); oldKeyToNewKeys.put(ConsumerConfigConstants.STREAM_DESCRIBE_BACKOFF_MAX, ConsumerConfigConstants.LIST_SHARDS_BACKOFF_MAX); oldKeyToNewKeys.put(ConsumerConfigConstants.STREAM_DESCRIBE_BACKOFF_EXPONENTIAL_CONSTANT, ConsumerConfigConstants.LIST_SHARDS_BACKOFF_EXPONENTIAL_CONSTANT); for (Map.Entry<String, String> entry : oldKeyToNewKeys.entrySet()) { String oldKey = entry.getKey(); String newKey = entry.getValue(); if (configProps.containsKey(oldKey)) { configProps.setProperty(newKey, configProps.getProperty(oldKey)); // Do not remove the oldKey since they may be used in the context of talking to DynamoDB streams } } return configProps; }
java
public static Properties backfillConsumerKeys(Properties configProps) { HashMap<String, String> oldKeyToNewKeys = new HashMap<>(); oldKeyToNewKeys.put(ConsumerConfigConstants.STREAM_DESCRIBE_BACKOFF_BASE, ConsumerConfigConstants.LIST_SHARDS_BACKOFF_BASE); oldKeyToNewKeys.put(ConsumerConfigConstants.STREAM_DESCRIBE_BACKOFF_MAX, ConsumerConfigConstants.LIST_SHARDS_BACKOFF_MAX); oldKeyToNewKeys.put(ConsumerConfigConstants.STREAM_DESCRIBE_BACKOFF_EXPONENTIAL_CONSTANT, ConsumerConfigConstants.LIST_SHARDS_BACKOFF_EXPONENTIAL_CONSTANT); for (Map.Entry<String, String> entry : oldKeyToNewKeys.entrySet()) { String oldKey = entry.getKey(); String newKey = entry.getValue(); if (configProps.containsKey(oldKey)) { configProps.setProperty(newKey, configProps.getProperty(oldKey)); // Do not remove the oldKey since they may be used in the context of talking to DynamoDB streams } } return configProps; }
[ "public", "static", "Properties", "backfillConsumerKeys", "(", "Properties", "configProps", ")", "{", "HashMap", "<", "String", ",", "String", ">", "oldKeyToNewKeys", "=", "new", "HashMap", "<>", "(", ")", ";", "oldKeyToNewKeys", ".", "put", "(", "ConsumerConfig...
<p> A set of configuration paremeters associated with the describeStreams API may be used if: 1) an legacy client wants to consume from Kinesis 2) a current client wants to consumer from DynamoDB streams In the context of 1), the set of configurations needs to be translated to the corresponding configurations in the Kinesis listShards API. In the mean time, keep these configs since they are applicable in the context of 2), i.e., polling data from a DynamoDB stream. </p> @param configProps original config properties. @return backfilled config properties.
[ "<p", ">", "A", "set", "of", "configuration", "paremeters", "associated", "with", "the", "describeStreams", "API", "may", "be", "used", "if", ":", "1", ")", "an", "legacy", "client", "wants", "to", "consume", "from", "Kinesis", "2", ")", "a", "current", ...
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-connectors/flink-connector-kinesis/src/main/java/org/apache/flink/streaming/connectors/kinesis/util/KinesisConfigUtil.java#L210-L224
train
Backfill the consumer keys in the given configuration properties.
[ 30522, 2270, 10763, 5144, 2067, 8873, 3363, 8663, 23545, 25074, 7274, 1006, 5144, 9530, 8873, 21600, 18981, 2015, 1007, 1063, 23325, 2863, 2361, 1026, 5164, 1010, 5164, 1028, 2214, 14839, 5524, 26291, 3240, 2015, 1027, 2047, 23325, 2863, 23...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
codec-http2/src/main/java/io/netty/handler/codec/http2/Http2ConnectionHandler.java
Http2ConnectionHandler.handleServerHeaderDecodeSizeError
protected void handleServerHeaderDecodeSizeError(ChannelHandlerContext ctx, Http2Stream stream) { encoder().writeHeaders(ctx, stream.id(), HEADERS_TOO_LARGE_HEADERS, 0, true, ctx.newPromise()); }
java
protected void handleServerHeaderDecodeSizeError(ChannelHandlerContext ctx, Http2Stream stream) { encoder().writeHeaders(ctx, stream.id(), HEADERS_TOO_LARGE_HEADERS, 0, true, ctx.newPromise()); }
[ "protected", "void", "handleServerHeaderDecodeSizeError", "(", "ChannelHandlerContext", "ctx", ",", "Http2Stream", "stream", ")", "{", "encoder", "(", ")", ".", "writeHeaders", "(", "ctx", ",", "stream", ".", "id", "(", ")", ",", "HEADERS_TOO_LARGE_HEADERS", ",", ...
Notifies client that this server has received headers that are larger than what it is willing to accept. Override to change behavior. @param ctx the channel context @param stream the Http2Stream on which the header was received
[ "Notifies", "client", "that", "this", "server", "has", "received", "headers", "that", "are", "larger", "than", "what", "it", "is", "willing", "to", "accept", ".", "Override", "to", "change", "behavior", "." ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/codec-http2/src/main/java/io/netty/handler/codec/http2/Http2ConnectionHandler.java#L732-L734
train
Handle server header decode size error.
[ 30522, 5123, 11675, 16024, 2121, 6299, 4974, 2121, 3207, 23237, 4697, 2121, 29165, 1006, 3149, 11774, 3917, 8663, 18209, 14931, 2595, 1010, 8299, 2475, 21422, 5460, 1007, 1063, 4372, 16044, 2099, 1006, 1007, 1012, 4339, 4974, 2545, 1006, 14...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/map/MapUtil.java
MapUtil.get
public static <T> T get(Map<?, ?> map, Object key, Class<T> type) { return null == map ? null : Convert.convert(type, map.get(key)); }
java
public static <T> T get(Map<?, ?> map, Object key, Class<T> type) { return null == map ? null : Convert.convert(type, map.get(key)); }
[ "public", "static", "<", "T", ">", "T", "get", "(", "Map", "<", "?", ",", "?", ">", "map", ",", "Object", "key", ",", "Class", "<", "T", ">", "type", ")", "{", "return", "null", "==", "map", "?", "null", ":", "Convert", ".", "convert", "(", "...
获取Map指定key的值,并转换为指定类型 @param <T> 目标值类型 @param map Map @param key 键 @param type 值类型 @return 值 @since 4.0.6
[ "获取Map指定key的值,并转换为指定类型" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/map/MapUtil.java#L866-L868
train
Get the value of the given key from the map.
[ 30522, 2270, 10763, 1026, 1056, 1028, 1056, 2131, 1006, 4949, 1026, 1029, 1010, 1029, 1028, 4949, 1010, 4874, 3145, 1010, 2465, 1026, 1056, 1028, 2828, 1007, 1063, 2709, 19701, 1027, 1027, 4949, 1029, 19701, 1024, 10463, 1012, 10463, 1006, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
codec-http2/src/main/java/io/netty/handler/codec/http2/HttpConversionUtil.java
HttpConversionUtil.toHttp2Headers
public static Http2Headers toHttp2Headers(HttpMessage in, boolean validateHeaders) { HttpHeaders inHeaders = in.headers(); final Http2Headers out = new DefaultHttp2Headers(validateHeaders, inHeaders.size()); if (in instanceof HttpRequest) { HttpRequest request = (HttpRequest) in; URI requestTargetUri = URI.create(request.uri()); out.path(toHttp2Path(requestTargetUri)); out.method(request.method().asciiName()); setHttp2Scheme(inHeaders, requestTargetUri, out); if (!isOriginForm(requestTargetUri) && !isAsteriskForm(requestTargetUri)) { // Attempt to take from HOST header before taking from the request-line String host = inHeaders.getAsString(HttpHeaderNames.HOST); setHttp2Authority((host == null || host.isEmpty()) ? requestTargetUri.getAuthority() : host, out); } } else if (in instanceof HttpResponse) { HttpResponse response = (HttpResponse) in; out.status(response.status().codeAsText()); } // Add the HTTP headers which have not been consumed above toHttp2Headers(inHeaders, out); return out; }
java
public static Http2Headers toHttp2Headers(HttpMessage in, boolean validateHeaders) { HttpHeaders inHeaders = in.headers(); final Http2Headers out = new DefaultHttp2Headers(validateHeaders, inHeaders.size()); if (in instanceof HttpRequest) { HttpRequest request = (HttpRequest) in; URI requestTargetUri = URI.create(request.uri()); out.path(toHttp2Path(requestTargetUri)); out.method(request.method().asciiName()); setHttp2Scheme(inHeaders, requestTargetUri, out); if (!isOriginForm(requestTargetUri) && !isAsteriskForm(requestTargetUri)) { // Attempt to take from HOST header before taking from the request-line String host = inHeaders.getAsString(HttpHeaderNames.HOST); setHttp2Authority((host == null || host.isEmpty()) ? requestTargetUri.getAuthority() : host, out); } } else if (in instanceof HttpResponse) { HttpResponse response = (HttpResponse) in; out.status(response.status().codeAsText()); } // Add the HTTP headers which have not been consumed above toHttp2Headers(inHeaders, out); return out; }
[ "public", "static", "Http2Headers", "toHttp2Headers", "(", "HttpMessage", "in", ",", "boolean", "validateHeaders", ")", "{", "HttpHeaders", "inHeaders", "=", "in", ".", "headers", "(", ")", ";", "final", "Http2Headers", "out", "=", "new", "DefaultHttp2Headers", ...
Converts the given HTTP/1.x headers into HTTP/2 headers. The following headers are only used if they can not be found in from the {@code HOST} header or the {@code Request-Line} as defined by <a href="https://tools.ietf.org/html/rfc7230">rfc7230</a> <ul> <li>{@link ExtensionHeaderNames#SCHEME}</li> </ul> {@link ExtensionHeaderNames#PATH} is ignored and instead extracted from the {@code Request-Line}.
[ "Converts", "the", "given", "HTTP", "/", "1", ".", "x", "headers", "into", "HTTP", "/", "2", "headers", ".", "The", "following", "headers", "are", "only", "used", "if", "they", "can", "not", "be", "found", "in", "from", "the", "{" ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/codec-http2/src/main/java/io/netty/handler/codec/http2/HttpConversionUtil.java#L389-L412
train
Converts a message to a Http2Headers object.
[ 30522, 2270, 10763, 8299, 2475, 4974, 2545, 2000, 11039, 25856, 2475, 4974, 2545, 1006, 8299, 7834, 3736, 3351, 1999, 1010, 22017, 20898, 9398, 3686, 4974, 2545, 1007, 1063, 8299, 4974, 2545, 1999, 4974, 2545, 1027, 1999, 1012, 20346, 2015,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
hankcs/HanLP
src/main/java/com/hankcs/hanlp/corpus/dictionary/CommonDictionaryMaker.java
CommonDictionaryMaker.train
public void train(String corpus) { CorpusLoader.walk(corpus, new CorpusLoader.Handler() { @Override public void handle(Document document) { List<List<Word>> simpleSentenceList = document.getSimpleSentenceList(); List<List<IWord>> compatibleList = new LinkedList<List<IWord>>(); for (List<Word> wordList : simpleSentenceList) { compatibleList.add(new LinkedList<IWord>(wordList)); } CommonDictionaryMaker.this.compute(compatibleList); } }); }
java
public void train(String corpus) { CorpusLoader.walk(corpus, new CorpusLoader.Handler() { @Override public void handle(Document document) { List<List<Word>> simpleSentenceList = document.getSimpleSentenceList(); List<List<IWord>> compatibleList = new LinkedList<List<IWord>>(); for (List<Word> wordList : simpleSentenceList) { compatibleList.add(new LinkedList<IWord>(wordList)); } CommonDictionaryMaker.this.compute(compatibleList); } }); }
[ "public", "void", "train", "(", "String", "corpus", ")", "{", "CorpusLoader", ".", "walk", "(", "corpus", ",", "new", "CorpusLoader", ".", "Handler", "(", ")", "{", "@", "Override", "public", "void", "handle", "(", "Document", "document", ")", "{", "List...
训练 @param corpus 语料库路径
[ "训练" ]
a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce
https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/corpus/dictionary/CommonDictionaryMaker.java#L100-L116
train
Train the CRA - Trees
[ 30522, 2270, 11675, 3345, 1006, 5164, 13931, 1007, 1063, 13931, 11066, 2121, 1012, 3328, 1006, 13931, 1010, 2047, 13931, 11066, 2121, 1012, 28213, 1006, 1007, 1063, 1030, 2058, 15637, 2270, 11675, 5047, 1006, 6254, 6254, 1007, 1063, 2862, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobVertexBackPressureHandler.java
JobVertexBackPressureHandler.getBackPressureLevel
private static JobVertexBackPressureInfo.VertexBackPressureLevel getBackPressureLevel(double backPressureRatio) { if (backPressureRatio <= 0.10) { return JobVertexBackPressureInfo.VertexBackPressureLevel.OK; } else if (backPressureRatio <= 0.5) { return JobVertexBackPressureInfo.VertexBackPressureLevel.LOW; } else { return JobVertexBackPressureInfo.VertexBackPressureLevel.HIGH; } }
java
private static JobVertexBackPressureInfo.VertexBackPressureLevel getBackPressureLevel(double backPressureRatio) { if (backPressureRatio <= 0.10) { return JobVertexBackPressureInfo.VertexBackPressureLevel.OK; } else if (backPressureRatio <= 0.5) { return JobVertexBackPressureInfo.VertexBackPressureLevel.LOW; } else { return JobVertexBackPressureInfo.VertexBackPressureLevel.HIGH; } }
[ "private", "static", "JobVertexBackPressureInfo", ".", "VertexBackPressureLevel", "getBackPressureLevel", "(", "double", "backPressureRatio", ")", "{", "if", "(", "backPressureRatio", "<=", "0.10", ")", "{", "return", "JobVertexBackPressureInfo", ".", "VertexBackPressureLev...
Returns the back pressure level as a String. @param backPressureRatio Ratio of back pressures samples to total number of samples. @return Back pressure level ('ok', 'low', or 'high')
[ "Returns", "the", "back", "pressure", "level", "as", "a", "String", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobVertexBackPressureHandler.java#L96-L104
train
Get the back pressure level.
[ 30522, 2797, 10763, 3105, 16874, 10288, 5963, 20110, 5397, 2378, 14876, 1012, 19449, 5963, 20110, 5397, 20414, 2884, 2131, 5963, 20110, 5397, 20414, 2884, 1006, 3313, 2067, 20110, 27595, 10450, 2080, 1007, 1063, 2065, 1006, 2067, 20110, 27595...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/text/csv/CsvParser.java
CsvParser.nextRow
public CsvRow nextRow() throws IORuntimeException { long startingLineNo; List<String> currentFields; int fieldCount; while (false == finished) { startingLineNo = ++lineNo; currentFields = readLine(); if(null == currentFields) { break; } fieldCount = currentFields.size(); // 末尾 if (fieldCount == 0) { break; } // 跳过空行 if (config.skipEmptyRows && fieldCount == 1 && currentFields.get(0).isEmpty()) { continue; } // 检查每行的字段数是否一致 if (config.errorOnDifferentFieldCount) { if (firstLineFieldCount == -1) { firstLineFieldCount = fieldCount; } else if (fieldCount != firstLineFieldCount) { throw new IORuntimeException(String.format("Line %d has %d fields, but first line has %d fields", lineNo, fieldCount, firstLineFieldCount)); } } // 记录最大字段数 if (fieldCount > maxFieldCount) { maxFieldCount = fieldCount; } //初始化标题 if (config.containsHeader && null == header) { initHeader(currentFields); // 作为标题行后,此行跳过,下一行做为第一行 continue; } return new CsvRow(startingLineNo, null == header ? null : header.headerMap, currentFields); } return null; }
java
public CsvRow nextRow() throws IORuntimeException { long startingLineNo; List<String> currentFields; int fieldCount; while (false == finished) { startingLineNo = ++lineNo; currentFields = readLine(); if(null == currentFields) { break; } fieldCount = currentFields.size(); // 末尾 if (fieldCount == 0) { break; } // 跳过空行 if (config.skipEmptyRows && fieldCount == 1 && currentFields.get(0).isEmpty()) { continue; } // 检查每行的字段数是否一致 if (config.errorOnDifferentFieldCount) { if (firstLineFieldCount == -1) { firstLineFieldCount = fieldCount; } else if (fieldCount != firstLineFieldCount) { throw new IORuntimeException(String.format("Line %d has %d fields, but first line has %d fields", lineNo, fieldCount, firstLineFieldCount)); } } // 记录最大字段数 if (fieldCount > maxFieldCount) { maxFieldCount = fieldCount; } //初始化标题 if (config.containsHeader && null == header) { initHeader(currentFields); // 作为标题行后,此行跳过,下一行做为第一行 continue; } return new CsvRow(startingLineNo, null == header ? null : header.headerMap, currentFields); } return null; }
[ "public", "CsvRow", "nextRow", "(", ")", "throws", "IORuntimeException", "{", "long", "startingLineNo", ";", "List", "<", "String", ">", "currentFields", ";", "int", "fieldCount", ";", "while", "(", "false", "==", "finished", ")", "{", "startingLineNo", "=", ...
读取下一行数据 @return CsvRow @throws IORuntimeException IO读取异常
[ "读取下一行数据" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/text/csv/CsvParser.java#L90-L136
train
Returns the next row.
[ 30522, 2270, 20116, 19716, 5004, 2279, 10524, 1006, 1007, 11618, 22834, 15532, 7292, 10288, 24422, 1063, 2146, 3225, 4179, 3630, 1025, 2862, 1026, 5164, 1028, 2783, 15155, 1025, 20014, 2492, 3597, 16671, 1025, 2096, 1006, 6270, 1027, 1027, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-java/src/main/java/org/apache/flink/api/java/io/CsvReader.java
CsvReader.lineDelimiter
public CsvReader lineDelimiter(String delimiter) { if (delimiter == null || delimiter.length() == 0) { throw new IllegalArgumentException("The delimiter must not be null or an empty string"); } this.lineDelimiter = delimiter; return this; }
java
public CsvReader lineDelimiter(String delimiter) { if (delimiter == null || delimiter.length() == 0) { throw new IllegalArgumentException("The delimiter must not be null or an empty string"); } this.lineDelimiter = delimiter; return this; }
[ "public", "CsvReader", "lineDelimiter", "(", "String", "delimiter", ")", "{", "if", "(", "delimiter", "==", "null", "||", "delimiter", ".", "length", "(", ")", "==", "0", ")", "{", "throw", "new", "IllegalArgumentException", "(", "\"The delimiter must not be nul...
Configures the delimiter that separates the lines/rows. The linebreak character ({@code '\n'}) is used by default. @param delimiter The delimiter that separates the rows. @return The CSV reader instance itself, to allow for fluent function chaining.
[ "Configures", "the", "delimiter", "that", "separates", "the", "lines", "/", "rows", ".", "The", "linebreak", "character", "(", "{", "@code", "\\", "n", "}", ")", "is", "used", "by", "default", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-java/src/main/java/org/apache/flink/api/java/io/CsvReader.java#L97-L104
train
Sets the delimiter used to read the line.
[ 30522, 2270, 20116, 12229, 9648, 2099, 7732, 20806, 23419, 2099, 1006, 5164, 3972, 27605, 3334, 1007, 1063, 2065, 1006, 3972, 27605, 3334, 1027, 1027, 19701, 1064, 1064, 3972, 27605, 3334, 1012, 3091, 1006, 1007, 1027, 1027, 1014, 1007, 106...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
common/src/main/java/io/netty/util/ReferenceCountUtil.java
ReferenceCountUtil.touch
@SuppressWarnings("unchecked") public static <T> T touch(T msg) { if (msg instanceof ReferenceCounted) { return (T) ((ReferenceCounted) msg).touch(); } return msg; }
java
@SuppressWarnings("unchecked") public static <T> T touch(T msg) { if (msg instanceof ReferenceCounted) { return (T) ((ReferenceCounted) msg).touch(); } return msg; }
[ "@", "SuppressWarnings", "(", "\"unchecked\"", ")", "public", "static", "<", "T", ">", "T", "touch", "(", "T", "msg", ")", "{", "if", "(", "msg", "instanceof", "ReferenceCounted", ")", "{", "return", "(", "T", ")", "(", "(", "ReferenceCounted", ")", "m...
Tries to call {@link ReferenceCounted#touch()} if the specified message implements {@link ReferenceCounted}. If the specified message doesn't implement {@link ReferenceCounted}, this method does nothing.
[ "Tries", "to", "call", "{" ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/common/src/main/java/io/netty/util/ReferenceCountUtil.java#L61-L67
train
touch a message.
[ 30522, 1030, 16081, 9028, 5582, 2015, 1006, 1000, 4895, 5403, 18141, 1000, 1007, 2270, 10763, 1026, 1056, 1028, 1056, 3543, 1006, 1056, 5796, 2290, 1007, 1063, 2065, 1006, 5796, 2290, 6013, 11253, 4431, 3597, 16671, 2098, 1007, 1063, 2709, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/img/ImgUtil.java
ImgUtil.cut
public static void cut(Image srcImage, File destFile, Rectangle rectangle) throws IORuntimeException { write(cut(srcImage, rectangle), destFile); }
java
public static void cut(Image srcImage, File destFile, Rectangle rectangle) throws IORuntimeException { write(cut(srcImage, rectangle), destFile); }
[ "public", "static", "void", "cut", "(", "Image", "srcImage", ",", "File", "destFile", ",", "Rectangle", "rectangle", ")", "throws", "IORuntimeException", "{", "write", "(", "cut", "(", "srcImage", ",", "rectangle", ")", ",", "destFile", ")", ";", "}" ]
图像切割(按指定起点坐标和宽高切割),此方法并不关闭流 @param srcImage 源图像 @param destFile 输出的文件 @param rectangle 矩形对象,表示矩形区域的x,y,width,height @since 3.2.2 @throws IORuntimeException IO异常
[ "图像切割", "(", "按指定起点坐标和宽高切割", ")", ",此方法并不关闭流" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/img/ImgUtil.java#L293-L295
train
Cut the image from srcImage to destFile using the specified rectangle.
[ 30522, 2270, 10763, 11675, 3013, 1006, 3746, 5034, 6895, 26860, 1010, 5371, 4078, 24475, 9463, 1010, 28667, 23395, 28667, 23395, 1007, 11618, 22834, 15532, 7292, 10288, 24422, 1063, 4339, 1006, 3013, 1006, 5034, 6895, 26860, 1010, 28667, 2339...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/img/Img.java
Img.cut
public Img cut(Rectangle rectangle) { final BufferedImage srcImage = getValidSrcImg(); rectangle = fixRectangle(rectangle, srcImage.getWidth(), srcImage.getHeight()); final ImageFilter cropFilter = new CropImageFilter(rectangle.x, rectangle.y, rectangle.width, rectangle.height); final Image image = Toolkit.getDefaultToolkit().createImage(new FilteredImageSource(srcImage.getSource(), cropFilter)); this.targetImage = ImgUtil.toBufferedImage(image); return this; }
java
public Img cut(Rectangle rectangle) { final BufferedImage srcImage = getValidSrcImg(); rectangle = fixRectangle(rectangle, srcImage.getWidth(), srcImage.getHeight()); final ImageFilter cropFilter = new CropImageFilter(rectangle.x, rectangle.y, rectangle.width, rectangle.height); final Image image = Toolkit.getDefaultToolkit().createImage(new FilteredImageSource(srcImage.getSource(), cropFilter)); this.targetImage = ImgUtil.toBufferedImage(image); return this; }
[ "public", "Img", "cut", "(", "Rectangle", "rectangle", ")", "{", "final", "BufferedImage", "srcImage", "=", "getValidSrcImg", "(", ")", ";", "rectangle", "=", "fixRectangle", "(", "rectangle", ",", "srcImage", ".", "getWidth", "(", ")", ",", "srcImage", ".",...
图像切割(按指定起点坐标和宽高切割) @param rectangle 矩形对象,表示矩形区域的x,y,width,height @return this
[ "图像切割", "(", "按指定起点坐标和宽高切割", ")" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/img/Img.java#L285-L293
train
Cut the image to the specified rectangle.
[ 30522, 2270, 10047, 2290, 3013, 1006, 28667, 23395, 28667, 23395, 1007, 1063, 2345, 17698, 2098, 9581, 3351, 5034, 6895, 26860, 1027, 2131, 10175, 9821, 11890, 5714, 2290, 1006, 1007, 1025, 28667, 23395, 1027, 8081, 2890, 25572, 3070, 2571, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-table/flink-sql-client/src/main/java/org/apache/flink/table/client/config/entries/DeploymentEntry.java
DeploymentEntry.create
public static DeploymentEntry create(Map<String, Object> config) { return new DeploymentEntry(ConfigUtil.normalizeYaml(config)); }
java
public static DeploymentEntry create(Map<String, Object> config) { return new DeploymentEntry(ConfigUtil.normalizeYaml(config)); }
[ "public", "static", "DeploymentEntry", "create", "(", "Map", "<", "String", ",", "Object", ">", "config", ")", "{", "return", "new", "DeploymentEntry", "(", "ConfigUtil", ".", "normalizeYaml", "(", "config", ")", ")", ";", "}" ]
--------------------------------------------------------------------------------------------
[ "--------------------------------------------------------------------------------------------" ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-table/flink-sql-client/src/main/java/org/apache/flink/table/client/config/entries/DeploymentEntry.java#L131-L133
train
Create a new deployment entry from a map of configuration objects.
[ 30522, 2270, 10763, 10813, 4765, 2854, 3443, 1006, 4949, 1026, 5164, 1010, 4874, 1028, 9530, 8873, 2290, 1007, 1063, 2709, 2047, 10813, 4765, 2854, 1006, 9530, 8873, 27920, 4014, 1012, 3671, 4697, 14852, 2140, 1006, 9530, 8873, 2290, 1007, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-connectors/flink-connector-filesystem/src/main/java/org/apache/flink/streaming/connectors/fs/bucketing/BucketingSink.java
BucketingSink.createHadoopFileSystem
public static FileSystem createHadoopFileSystem( Path path, @Nullable Configuration extraUserConf) throws IOException { // try to get the Hadoop File System via the Flink File Systems // that way we get the proper configuration final org.apache.flink.core.fs.FileSystem flinkFs = org.apache.flink.core.fs.FileSystem.getUnguardedFileSystem(path.toUri()); final FileSystem hadoopFs = (flinkFs instanceof HadoopFileSystem) ? ((HadoopFileSystem) flinkFs).getHadoopFileSystem() : null; // fast path: if the Flink file system wraps Hadoop anyways and we need no extra config, // then we use it directly if (extraUserConf == null && hadoopFs != null) { return hadoopFs; } else { // we need to re-instantiate the Hadoop file system, because we either have // a special config, or the Path gave us a Flink FS that is not backed by // Hadoop (like file://) final org.apache.hadoop.conf.Configuration hadoopConf; if (hadoopFs != null) { // have a Hadoop FS but need to apply extra config hadoopConf = hadoopFs.getConf(); } else { // the Path gave us a Flink FS that is not backed by Hadoop (like file://) // we need to get access to the Hadoop file system first // we access the Hadoop FS in Flink, which carries the proper // Hadoop configuration. we should get rid of this once the bucketing sink is // properly implemented against Flink's FS abstraction URI genericHdfsUri = URI.create("hdfs://localhost:12345/"); org.apache.flink.core.fs.FileSystem accessor = org.apache.flink.core.fs.FileSystem.getUnguardedFileSystem(genericHdfsUri); if (!(accessor instanceof HadoopFileSystem)) { throw new IOException( "Cannot instantiate a Hadoop file system to access the Hadoop configuration. " + "FS for hdfs:// is " + accessor.getClass().getName()); } hadoopConf = ((HadoopFileSystem) accessor).getHadoopFileSystem().getConf(); } // finalize the configuration final org.apache.hadoop.conf.Configuration finalConf; if (extraUserConf == null) { finalConf = hadoopConf; } else { finalConf = new org.apache.hadoop.conf.Configuration(hadoopConf); for (String key : extraUserConf.keySet()) { finalConf.set(key, extraUserConf.getString(key, null)); } } // we explicitly re-instantiate the file system here in order to make sure // that the configuration is applied. URI fsUri = path.toUri(); final String scheme = fsUri.getScheme(); final String authority = fsUri.getAuthority(); if (scheme == null && authority == null) { fsUri = FileSystem.getDefaultUri(finalConf); } else if (scheme != null && authority == null) { URI defaultUri = FileSystem.getDefaultUri(finalConf); if (scheme.equals(defaultUri.getScheme()) && defaultUri.getAuthority() != null) { fsUri = defaultUri; } } final Class<? extends FileSystem> fsClass = FileSystem.getFileSystemClass(fsUri.getScheme(), finalConf); final FileSystem fs; try { fs = fsClass.newInstance(); } catch (Exception e) { throw new IOException("Cannot instantiate the Hadoop file system", e); } fs.initialize(fsUri, finalConf); // We don't perform checksums on Hadoop's local filesystem and use the raw filesystem. // Otherwise buffers are not flushed entirely during checkpointing which results in data loss. if (fs instanceof LocalFileSystem) { return ((LocalFileSystem) fs).getRaw(); } return fs; } }
java
public static FileSystem createHadoopFileSystem( Path path, @Nullable Configuration extraUserConf) throws IOException { // try to get the Hadoop File System via the Flink File Systems // that way we get the proper configuration final org.apache.flink.core.fs.FileSystem flinkFs = org.apache.flink.core.fs.FileSystem.getUnguardedFileSystem(path.toUri()); final FileSystem hadoopFs = (flinkFs instanceof HadoopFileSystem) ? ((HadoopFileSystem) flinkFs).getHadoopFileSystem() : null; // fast path: if the Flink file system wraps Hadoop anyways and we need no extra config, // then we use it directly if (extraUserConf == null && hadoopFs != null) { return hadoopFs; } else { // we need to re-instantiate the Hadoop file system, because we either have // a special config, or the Path gave us a Flink FS that is not backed by // Hadoop (like file://) final org.apache.hadoop.conf.Configuration hadoopConf; if (hadoopFs != null) { // have a Hadoop FS but need to apply extra config hadoopConf = hadoopFs.getConf(); } else { // the Path gave us a Flink FS that is not backed by Hadoop (like file://) // we need to get access to the Hadoop file system first // we access the Hadoop FS in Flink, which carries the proper // Hadoop configuration. we should get rid of this once the bucketing sink is // properly implemented against Flink's FS abstraction URI genericHdfsUri = URI.create("hdfs://localhost:12345/"); org.apache.flink.core.fs.FileSystem accessor = org.apache.flink.core.fs.FileSystem.getUnguardedFileSystem(genericHdfsUri); if (!(accessor instanceof HadoopFileSystem)) { throw new IOException( "Cannot instantiate a Hadoop file system to access the Hadoop configuration. " + "FS for hdfs:// is " + accessor.getClass().getName()); } hadoopConf = ((HadoopFileSystem) accessor).getHadoopFileSystem().getConf(); } // finalize the configuration final org.apache.hadoop.conf.Configuration finalConf; if (extraUserConf == null) { finalConf = hadoopConf; } else { finalConf = new org.apache.hadoop.conf.Configuration(hadoopConf); for (String key : extraUserConf.keySet()) { finalConf.set(key, extraUserConf.getString(key, null)); } } // we explicitly re-instantiate the file system here in order to make sure // that the configuration is applied. URI fsUri = path.toUri(); final String scheme = fsUri.getScheme(); final String authority = fsUri.getAuthority(); if (scheme == null && authority == null) { fsUri = FileSystem.getDefaultUri(finalConf); } else if (scheme != null && authority == null) { URI defaultUri = FileSystem.getDefaultUri(finalConf); if (scheme.equals(defaultUri.getScheme()) && defaultUri.getAuthority() != null) { fsUri = defaultUri; } } final Class<? extends FileSystem> fsClass = FileSystem.getFileSystemClass(fsUri.getScheme(), finalConf); final FileSystem fs; try { fs = fsClass.newInstance(); } catch (Exception e) { throw new IOException("Cannot instantiate the Hadoop file system", e); } fs.initialize(fsUri, finalConf); // We don't perform checksums on Hadoop's local filesystem and use the raw filesystem. // Otherwise buffers are not flushed entirely during checkpointing which results in data loss. if (fs instanceof LocalFileSystem) { return ((LocalFileSystem) fs).getRaw(); } return fs; } }
[ "public", "static", "FileSystem", "createHadoopFileSystem", "(", "Path", "path", ",", "@", "Nullable", "Configuration", "extraUserConf", ")", "throws", "IOException", "{", "// try to get the Hadoop File System via the Flink File Systems", "// that way we get the proper configuratio...
------------------------------------------------------------------------
[ "------------------------------------------------------------------------" ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-connectors/flink-connector-filesystem/src/main/java/org/apache/flink/streaming/connectors/fs/bucketing/BucketingSink.java#L1219-L1316
train
Create a Hadoop file system from the Hadoop file system path.
[ 30522, 2270, 10763, 6764, 27268, 6633, 3443, 16102, 18589, 8873, 4244, 27268, 6633, 1006, 4130, 4130, 1010, 1030, 19701, 3085, 9563, 4469, 20330, 8663, 2546, 1007, 11618, 22834, 10288, 24422, 1063, 1013, 1013, 3046, 2000, 2131, 1996, 2018, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-core/src/main/java/org/apache/flink/util/ExceptionUtils.java
ExceptionUtils.tryRethrowIOException
public static void tryRethrowIOException(Throwable t) throws IOException { if (t instanceof IOException) { throw (IOException) t; } else if (t instanceof RuntimeException) { throw (RuntimeException) t; } else if (t instanceof Error) { throw (Error) t; } }
java
public static void tryRethrowIOException(Throwable t) throws IOException { if (t instanceof IOException) { throw (IOException) t; } else if (t instanceof RuntimeException) { throw (RuntimeException) t; } else if (t instanceof Error) { throw (Error) t; } }
[ "public", "static", "void", "tryRethrowIOException", "(", "Throwable", "t", ")", "throws", "IOException", "{", "if", "(", "t", "instanceof", "IOException", ")", "{", "throw", "(", "IOException", ")", "t", ";", "}", "else", "if", "(", "t", "instanceof", "Ru...
Tries to throw the given {@code Throwable} in scenarios where the signatures allows only IOExceptions (and RuntimeException and Error). Throws this exception directly, if it is an IOException, a RuntimeException, or an Error. Otherwise does nothing. @param t The Throwable to be thrown.
[ "Tries", "to", "throw", "the", "given", "{", "@code", "Throwable", "}", "in", "scenarios", "where", "the", "signatures", "allows", "only", "IOExceptions", "(", "and", "RuntimeException", "and", "Error", ")", ".", "Throws", "this", "exception", "directly", "if"...
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/util/ExceptionUtils.java#L281-L291
train
Try to throw an IOException.
[ 30522, 2270, 10763, 11675, 3046, 13465, 8093, 5004, 3695, 10288, 24422, 1006, 5466, 3085, 1056, 1007, 11618, 22834, 10288, 24422, 1063, 2065, 1006, 1056, 6013, 11253, 22834, 10288, 24422, 1007, 1063, 5466, 1006, 22834, 10288, 24422, 1007, 105...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/spark
common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java
UTF8String.concatWs
public static UTF8String concatWs(UTF8String separator, UTF8String... inputs) { if (separator == null) { return null; } int numInputBytes = 0; // total number of bytes from the inputs int numInputs = 0; // number of non-null inputs for (int i = 0; i < inputs.length; i++) { if (inputs[i] != null) { numInputBytes += inputs[i].numBytes; numInputs++; } } if (numInputs == 0) { // Return an empty string if there is no input, or all the inputs are null. return EMPTY_UTF8; } // Allocate a new byte array, and copy the inputs one by one into it. // The size of the new array is the size of all inputs, plus the separators. final byte[] result = new byte[numInputBytes + (numInputs - 1) * separator.numBytes]; int offset = 0; for (int i = 0, j = 0; i < inputs.length; i++) { if (inputs[i] != null) { int len = inputs[i].numBytes; copyMemory( inputs[i].base, inputs[i].offset, result, BYTE_ARRAY_OFFSET + offset, len); offset += len; j++; // Add separator if this is not the last input. if (j < numInputs) { copyMemory( separator.base, separator.offset, result, BYTE_ARRAY_OFFSET + offset, separator.numBytes); offset += separator.numBytes; } } } return fromBytes(result); }
java
public static UTF8String concatWs(UTF8String separator, UTF8String... inputs) { if (separator == null) { return null; } int numInputBytes = 0; // total number of bytes from the inputs int numInputs = 0; // number of non-null inputs for (int i = 0; i < inputs.length; i++) { if (inputs[i] != null) { numInputBytes += inputs[i].numBytes; numInputs++; } } if (numInputs == 0) { // Return an empty string if there is no input, or all the inputs are null. return EMPTY_UTF8; } // Allocate a new byte array, and copy the inputs one by one into it. // The size of the new array is the size of all inputs, plus the separators. final byte[] result = new byte[numInputBytes + (numInputs - 1) * separator.numBytes]; int offset = 0; for (int i = 0, j = 0; i < inputs.length; i++) { if (inputs[i] != null) { int len = inputs[i].numBytes; copyMemory( inputs[i].base, inputs[i].offset, result, BYTE_ARRAY_OFFSET + offset, len); offset += len; j++; // Add separator if this is not the last input. if (j < numInputs) { copyMemory( separator.base, separator.offset, result, BYTE_ARRAY_OFFSET + offset, separator.numBytes); offset += separator.numBytes; } } } return fromBytes(result); }
[ "public", "static", "UTF8String", "concatWs", "(", "UTF8String", "separator", ",", "UTF8String", "...", "inputs", ")", "{", "if", "(", "separator", "==", "null", ")", "{", "return", "null", ";", "}", "int", "numInputBytes", "=", "0", ";", "// total number of...
Concatenates input strings together into a single string using the separator. A null input is skipped. For example, concat(",", "a", null, "c") would yield "a,c".
[ "Concatenates", "input", "strings", "together", "into", "a", "single", "string", "using", "the", "separator", ".", "A", "null", "input", "is", "skipped", ".", "For", "example", "concat", "(", "a", "null", "c", ")", "would", "yield", "a", "c", "." ]
25ee0474f47d9c30d6f553a7892d9549f91071cf
https://github.com/apache/spark/blob/25ee0474f47d9c30d6f553a7892d9549f91071cf/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java#L913-L958
train
Concatenate two UTF8Strings.
[ 30522, 2270, 10763, 21183, 2546, 2620, 3367, 4892, 9530, 11266, 9333, 1006, 21183, 2546, 2620, 3367, 4892, 19802, 25879, 2953, 1010, 21183, 2546, 2620, 3367, 4892, 1012, 1012, 1012, 20407, 1007, 1063, 2065, 1006, 19802, 25879, 2953, 1027, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
spring-projects/spring-boot
spring-boot-project/spring-boot/src/main/java/org/springframework/boot/webservices/client/WebServiceTemplateBuilder.java
WebServiceTemplateBuilder.messageSenders
public WebServiceTemplateBuilder messageSenders( Collection<? extends WebServiceMessageSender> messageSenders) { Assert.notNull(messageSenders, "MessageSenders must not be null"); return new WebServiceTemplateBuilder(this.detectHttpMessageSender, this.interceptors, this.internalCustomizers, this.customizers, this.messageSenders.set(messageSenders), this.marshaller, this.unmarshaller, this.destinationProvider, this.transformerFactoryClass, this.messageFactory); }
java
public WebServiceTemplateBuilder messageSenders( Collection<? extends WebServiceMessageSender> messageSenders) { Assert.notNull(messageSenders, "MessageSenders must not be null"); return new WebServiceTemplateBuilder(this.detectHttpMessageSender, this.interceptors, this.internalCustomizers, this.customizers, this.messageSenders.set(messageSenders), this.marshaller, this.unmarshaller, this.destinationProvider, this.transformerFactoryClass, this.messageFactory); }
[ "public", "WebServiceTemplateBuilder", "messageSenders", "(", "Collection", "<", "?", "extends", "WebServiceMessageSender", ">", "messageSenders", ")", "{", "Assert", ".", "notNull", "(", "messageSenders", ",", "\"MessageSenders must not be null\"", ")", ";", "return", ...
Sets the {@link WebServiceMessageSender WebServiceMessageSenders} that should be used with the {@link WebServiceTemplate}. Setting this value will replace any previously defined message senders, including the HTTP-based message sender, if any. Consider using {@link #additionalMessageSenders(Collection)} to keep it with user-defined message senders. @param messageSenders the message senders to set @return a new builder instance. @see #additionalMessageSenders(Collection) @see #detectHttpMessageSender(boolean)
[ "Sets", "the", "{" ]
0b27f7c70e164b2b1a96477f1d9c1acba56790c1
https://github.com/spring-projects/spring-boot/blob/0b27f7c70e164b2b1a96477f1d9c1acba56790c1/spring-boot-project/spring-boot/src/main/java/org/springframework/boot/webservices/client/WebServiceTemplateBuilder.java#L157-L165
train
Sets the message senders.
[ 30522, 2270, 4773, 8043, 7903, 12870, 8737, 13806, 8569, 23891, 2099, 7696, 10497, 2545, 1006, 3074, 1026, 1029, 8908, 4773, 8043, 7903, 21382, 11488, 8449, 10497, 2121, 1028, 7696, 10497, 2545, 1007, 1063, 20865, 1012, 2025, 11231, 3363, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
common/src/main/java/io/netty/util/concurrent/AbstractEventExecutor.java
AbstractEventExecutor.safeExecute
protected static void safeExecute(Runnable task) { try { task.run(); } catch (Throwable t) { logger.warn("A task raised an exception. Task: {}", task, t); } }
java
protected static void safeExecute(Runnable task) { try { task.run(); } catch (Throwable t) { logger.warn("A task raised an exception. Task: {}", task, t); } }
[ "protected", "static", "void", "safeExecute", "(", "Runnable", "task", ")", "{", "try", "{", "task", ".", "run", "(", ")", ";", "}", "catch", "(", "Throwable", "t", ")", "{", "logger", ".", "warn", "(", "\"A task raised an exception. Task: {}\"", ",", "tas...
Try to execute the given {@link Runnable} and just log if it throws a {@link Throwable}.
[ "Try", "to", "execute", "the", "given", "{" ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/common/src/main/java/io/netty/util/concurrent/AbstractEventExecutor.java#L161-L167
train
Execute a Runnable.
[ 30522, 5123, 10763, 11675, 3647, 10288, 8586, 10421, 1006, 2448, 22966, 4708, 1007, 1063, 3046, 1063, 4708, 1012, 2448, 1006, 1007, 1025, 1065, 4608, 1006, 5466, 3085, 1056, 1007, 1063, 8833, 4590, 1012, 11582, 1006, 1000, 1037, 4708, 2992,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/checkpoints/CheckpointStatsCache.java
CheckpointStatsCache.tryAdd
public void tryAdd(AbstractCheckpointStats checkpoint) { // Don't add in progress checkpoints as they will be replaced by their // completed/failed version eventually. if (cache != null && checkpoint != null && !checkpoint.getStatus().isInProgress()) { cache.put(checkpoint.getCheckpointId(), checkpoint); } }
java
public void tryAdd(AbstractCheckpointStats checkpoint) { // Don't add in progress checkpoints as they will be replaced by their // completed/failed version eventually. if (cache != null && checkpoint != null && !checkpoint.getStatus().isInProgress()) { cache.put(checkpoint.getCheckpointId(), checkpoint); } }
[ "public", "void", "tryAdd", "(", "AbstractCheckpointStats", "checkpoint", ")", "{", "// Don't add in progress checkpoints as they will be replaced by their", "// completed/failed version eventually.", "if", "(", "cache", "!=", "null", "&&", "checkpoint", "!=", "null", "&&", "...
Try to add the checkpoint to the cache. @param checkpoint Checkpoint to be added.
[ "Try", "to", "add", "the", "checkpoint", "to", "the", "cache", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/checkpoints/CheckpointStatsCache.java#L59-L65
train
Add a checkpoint to the cache.
[ 30522, 2270, 11675, 3046, 4215, 2094, 1006, 10061, 5403, 3600, 26521, 29336, 2015, 26520, 1007, 1063, 1013, 1013, 2123, 1005, 1056, 5587, 1999, 5082, 26520, 2015, 2004, 2027, 2097, 2022, 2999, 2011, 2037, 1013, 1013, 2949, 1013, 3478, 2544,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/util/HashUtil.java
HashUtil.mixHash
public static long mixHash(String str) { long hash = str.hashCode(); hash <<= 32; hash |= fnvHash(str); return hash; }
java
public static long mixHash(String str) { long hash = str.hashCode(); hash <<= 32; hash |= fnvHash(str); return hash; }
[ "public", "static", "long", "mixHash", "(", "String", "str", ")", "{", "long", "hash", "=", "str", ".", "hashCode", "(", ")", ";", "hash", "<<=", "32", ";", "hash", "|=", "fnvHash", "(", "str", ")", ";", "return", "hash", ";", "}" ]
混合hash算法,输出64位的值 @param str 字符串 @return hash值
[ "混合hash算法,输出64位的值" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/util/HashUtil.java#L426-L431
train
Returns the hash value of the given string.
[ 30522, 2270, 10763, 2146, 4666, 14949, 2232, 1006, 5164, 2358, 2099, 1007, 1063, 2146, 23325, 1027, 2358, 2099, 1012, 23325, 16044, 1006, 1007, 1025, 23325, 1026, 1026, 1027, 3590, 1025, 23325, 1064, 1027, 1042, 2078, 2615, 14949, 2232, 100...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/runtime/functions/SqlDateTimeUtils.java
SqlDateTimeUtils.dateToInternal
public static int dateToInternal(java.sql.Date date, TimeZone tz) { long ts = date.getTime() + tz.getOffset(date.getTime()); return (int) (ts / MILLIS_PER_DAY); }
java
public static int dateToInternal(java.sql.Date date, TimeZone tz) { long ts = date.getTime() + tz.getOffset(date.getTime()); return (int) (ts / MILLIS_PER_DAY); }
[ "public", "static", "int", "dateToInternal", "(", "java", ".", "sql", ".", "Date", "date", ",", "TimeZone", "tz", ")", "{", "long", "ts", "=", "date", ".", "getTime", "(", ")", "+", "tz", ".", "getOffset", "(", "date", ".", "getTime", "(", ")", ")"...
Converts the Java type used for UDF parameters of SQL DATE type ({@link java.sql.Date}) to internal representation (int). <p>Converse of {@link #internalToDate(int)}.
[ "Converts", "the", "Java", "type", "used", "for", "UDF", "parameters", "of", "SQL", "DATE", "type", "(", "{", "@link", "java", ".", "sql", ".", "Date", "}", ")", "to", "internal", "representation", "(", "int", ")", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/runtime/functions/SqlDateTimeUtils.java#L180-L183
train
Convert java. sql. Date to internal int.
[ 30522, 2270, 10763, 20014, 3058, 3406, 18447, 11795, 2389, 1006, 9262, 1012, 29296, 1012, 30524, 1056, 2480, 1012, 2131, 27475, 3388, 1006, 3058, 1012, 2131, 7292, 1006, 1007, 1007, 1025, 2709, 1006, 20014, 1007, 1006, 24529, 1013, 4971, 24...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/collection/CollUtil.java
CollUtil.reverseNew
public static <T> List<T> reverseNew(List<T> list) { final List<T> list2 = ObjectUtil.clone(list); return reverse(list2); }
java
public static <T> List<T> reverseNew(List<T> list) { final List<T> list2 = ObjectUtil.clone(list); return reverse(list2); }
[ "public", "static", "<", "T", ">", "List", "<", "T", ">", "reverseNew", "(", "List", "<", "T", ">", "list", ")", "{", "final", "List", "<", "T", ">", "list2", "=", "ObjectUtil", ".", "clone", "(", "list", ")", ";", "return", "reverse", "(", "list...
反序给定List,会创建一个新的List,原List数据不变 @param <T> 元素类型 @param list 被反转的List @return 反转后的List @since 4.0.6
[ "反序给定List,会创建一个新的List,原List数据不变" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/collection/CollUtil.java#L2373-L2376
train
Returns a new list with the reverse of the given list.
[ 30522, 2270, 10763, 1026, 1056, 1028, 2862, 1026, 1056, 1028, 7901, 2638, 2860, 1006, 2862, 1026, 1056, 1028, 2862, 1007, 1063, 2345, 2862, 1026, 1056, 1028, 2862, 2475, 1027, 4874, 21823, 2140, 1012, 17598, 1006, 2862, 1007, 1025, 2709, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
hankcs/HanLP
src/main/java/com/hankcs/hanlp/algorithm/ahocorasick/trie/Trie.java
Trie.constructFailureStates
private void constructFailureStates() { Queue<State> queue = new LinkedBlockingDeque<State>(); // 第一步,将深度为1的节点的failure设为根节点 for (State depthOneState : this.rootState.getStates()) { depthOneState.setFailure(this.rootState); queue.add(depthOneState); } this.failureStatesConstructed = true; // 第二步,为深度 > 1 的节点建立failure表,这是一个bfs while (!queue.isEmpty()) { State currentState = queue.remove(); for (Character transition : currentState.getTransitions()) { State targetState = currentState.nextState(transition); queue.add(targetState); State traceFailureState = currentState.failure(); while (traceFailureState.nextState(transition) == null) { traceFailureState = traceFailureState.failure(); } State newFailureState = traceFailureState.nextState(transition); targetState.setFailure(newFailureState); targetState.addEmit(newFailureState.emit()); } } }
java
private void constructFailureStates() { Queue<State> queue = new LinkedBlockingDeque<State>(); // 第一步,将深度为1的节点的failure设为根节点 for (State depthOneState : this.rootState.getStates()) { depthOneState.setFailure(this.rootState); queue.add(depthOneState); } this.failureStatesConstructed = true; // 第二步,为深度 > 1 的节点建立failure表,这是一个bfs while (!queue.isEmpty()) { State currentState = queue.remove(); for (Character transition : currentState.getTransitions()) { State targetState = currentState.nextState(transition); queue.add(targetState); State traceFailureState = currentState.failure(); while (traceFailureState.nextState(transition) == null) { traceFailureState = traceFailureState.failure(); } State newFailureState = traceFailureState.nextState(transition); targetState.setFailure(newFailureState); targetState.addEmit(newFailureState.emit()); } } }
[ "private", "void", "constructFailureStates", "(", ")", "{", "Queue", "<", "State", ">", "queue", "=", "new", "LinkedBlockingDeque", "<", "State", ">", "(", ")", ";", "// 第一步,将深度为1的节点的failure设为根节点", "for", "(", "State", "depthOneState", ":", "this", ".", "rootS...
建立failure表
[ "建立failure表" ]
a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce
https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/algorithm/ahocorasick/trie/Trie.java#L233-L265
train
Construct failure states.
[ 30522, 2797, 11675, 9570, 7011, 4014, 14900, 12259, 2015, 1006, 1007, 1063, 24240, 1026, 2110, 1028, 24240, 1027, 2047, 5799, 23467, 2075, 3207, 4226, 1026, 2110, 1028, 1006, 1007, 1025, 1013, 1013, 100, 1740, 100, 1989, 100, 100, 100, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-db/src/main/java/cn/hutool/db/SqlConnRunner.java
SqlConnRunner.page
public PageResult<Entity> page(Connection conn, Collection<String> fields, Entity where, int page, int numPerPage) throws SQLException { checkConn(conn); final int count = count(conn, where); PageResultHandler pageResultHandler = PageResultHandler.create(new PageResult<Entity>(page, numPerPage, count)); return this.page(conn, fields, where, page, numPerPage, pageResultHandler); }
java
public PageResult<Entity> page(Connection conn, Collection<String> fields, Entity where, int page, int numPerPage) throws SQLException { checkConn(conn); final int count = count(conn, where); PageResultHandler pageResultHandler = PageResultHandler.create(new PageResult<Entity>(page, numPerPage, count)); return this.page(conn, fields, where, page, numPerPage, pageResultHandler); }
[ "public", "PageResult", "<", "Entity", ">", "page", "(", "Connection", "conn", ",", "Collection", "<", "String", ">", "fields", ",", "Entity", "where", ",", "int", "page", ",", "int", "numPerPage", ")", "throws", "SQLException", "{", "checkConn", "(", "con...
分页查询<br> 此方法不会关闭Connection @param conn 数据库连接对象 @param fields 返回的字段列表,null则返回所有字段 @param where 条件实体类(包含表名) @param page 页码 @param numPerPage 每页条目数 @return 结果对象 @throws SQLException SQL执行异常
[ "分页查询<br", ">", "此方法不会关闭Connection" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-db/src/main/java/cn/hutool/db/SqlConnRunner.java#L509-L515
train
Retrieves a page of entities from a database connection.
[ 30522, 2270, 3931, 6072, 11314, 1026, 9178, 1028, 3931, 1006, 4434, 9530, 2078, 1010, 3074, 1026, 5164, 1028, 4249, 1010, 9178, 2073, 1010, 20014, 3931, 1010, 20014, 16371, 8737, 2121, 13704, 1007, 11618, 29296, 10288, 24422, 1063, 4638, 86...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
handler/src/main/java/io/netty/handler/ssl/OpenSsl.java
OpenSsl.selfSignedCertificate
static X509Certificate selfSignedCertificate() throws CertificateException { return (X509Certificate) SslContext.X509_CERT_FACTORY.generateCertificate( new ByteArrayInputStream(CERT.getBytes(CharsetUtil.US_ASCII)) ); }
java
static X509Certificate selfSignedCertificate() throws CertificateException { return (X509Certificate) SslContext.X509_CERT_FACTORY.generateCertificate( new ByteArrayInputStream(CERT.getBytes(CharsetUtil.US_ASCII)) ); }
[ "static", "X509Certificate", "selfSignedCertificate", "(", ")", "throws", "CertificateException", "{", "return", "(", "X509Certificate", ")", "SslContext", ".", "X509_CERT_FACTORY", ".", "generateCertificate", "(", "new", "ByteArrayInputStream", "(", "CERT", ".", "getBy...
Returns a self-signed {@link X509Certificate} for {@code netty.io}.
[ "Returns", "a", "self", "-", "signed", "{" ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/handler/src/main/java/io/netty/handler/ssl/OpenSsl.java#L367-L371
train
This method is used to generate a self - signed certificate.
[ 30522, 10763, 1060, 12376, 2683, 17119, 3775, 8873, 16280, 2969, 5332, 19225, 17119, 3775, 8873, 16280, 1006, 1007, 11618, 8196, 10288, 24422, 1063, 2709, 1006, 1060, 12376, 2683, 17119, 3775, 8873, 16280, 1007, 7020, 22499, 10111, 18413, 101...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/io/network/api/reader/AbstractReader.java
AbstractReader.handleEvent
protected boolean handleEvent(AbstractEvent event) throws IOException { final Class<?> eventType = event.getClass(); try { // ------------------------------------------------------------ // Runtime events // ------------------------------------------------------------ // This event is also checked at the (single) input gate to release the respective // channel, at which it was received. if (eventType == EndOfPartitionEvent.class) { return true; } else if (eventType == EndOfSuperstepEvent.class) { return incrementEndOfSuperstepEventAndCheck(); } // ------------------------------------------------------------ // Task events (user) // ------------------------------------------------------------ else if (event instanceof TaskEvent) { taskEventHandler.publish((TaskEvent) event); return false; } else { throw new IllegalStateException("Received unexpected event of type " + eventType + " at reader."); } } catch (Throwable t) { throw new IOException("Error while handling event of type " + eventType + ": " + t.getMessage(), t); } }
java
protected boolean handleEvent(AbstractEvent event) throws IOException { final Class<?> eventType = event.getClass(); try { // ------------------------------------------------------------ // Runtime events // ------------------------------------------------------------ // This event is also checked at the (single) input gate to release the respective // channel, at which it was received. if (eventType == EndOfPartitionEvent.class) { return true; } else if (eventType == EndOfSuperstepEvent.class) { return incrementEndOfSuperstepEventAndCheck(); } // ------------------------------------------------------------ // Task events (user) // ------------------------------------------------------------ else if (event instanceof TaskEvent) { taskEventHandler.publish((TaskEvent) event); return false; } else { throw new IllegalStateException("Received unexpected event of type " + eventType + " at reader."); } } catch (Throwable t) { throw new IOException("Error while handling event of type " + eventType + ": " + t.getMessage(), t); } }
[ "protected", "boolean", "handleEvent", "(", "AbstractEvent", "event", ")", "throws", "IOException", "{", "final", "Class", "<", "?", ">", "eventType", "=", "event", ".", "getClass", "(", ")", ";", "try", "{", "// -----------------------------------------------------...
Handles the event and returns whether the reader reached an end-of-stream event (either the end of the whole stream or the end of an superstep).
[ "Handles", "the", "event", "and", "returns", "whether", "the", "reader", "reached", "an", "end", "-", "of", "-", "stream", "event", "(", "either", "the", "end", "of", "the", "whole", "stream", "or", "the", "end", "of", "an", "superstep", ")", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/io/network/api/reader/AbstractReader.java#L80-L112
train
Handle an event.
[ 30522, 5123, 22017, 20898, 5047, 18697, 3372, 1006, 10061, 18697, 3372, 2724, 1007, 11618, 22834, 10288, 24422, 1063, 2345, 2465, 1026, 1029, 1028, 2724, 13874, 1027, 2724, 1012, 2131, 26266, 1006, 1007, 1025, 3046, 1063, 1013, 1013, 1011, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/operations/ColumnOperationUtils.java
ColumnOperationUtils.renameColumns
public static List<Expression> renameColumns(List<String> inputFields, List<Expression> newAliases) { LinkedHashMap<String, Expression> finalFields = new LinkedHashMap<>(); inputFields.forEach(field -> finalFields.put(field, new UnresolvedReferenceExpression(field))); newAliases.forEach(expr -> { String name = expr.accept(renameColumnExtractor); finalFields.put(name, expr); }); return new ArrayList<>(finalFields.values()); }
java
public static List<Expression> renameColumns(List<String> inputFields, List<Expression> newAliases) { LinkedHashMap<String, Expression> finalFields = new LinkedHashMap<>(); inputFields.forEach(field -> finalFields.put(field, new UnresolvedReferenceExpression(field))); newAliases.forEach(expr -> { String name = expr.accept(renameColumnExtractor); finalFields.put(name, expr); }); return new ArrayList<>(finalFields.values()); }
[ "public", "static", "List", "<", "Expression", ">", "renameColumns", "(", "List", "<", "String", ">", "inputFields", ",", "List", "<", "Expression", ">", "newAliases", ")", "{", "LinkedHashMap", "<", "String", ",", "Expression", ">", "finalFields", "=", "new...
Creates a projection list that renames existing columns to new names. <p><b>NOTE:</b> Resulting expression are still unresolved. @param inputFields names of current columns @param newAliases new aliases for current columns @return projection expressions
[ "Creates", "a", "projection", "list", "that", "renames", "existing", "columns", "to", "new", "names", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/operations/ColumnOperationUtils.java#L57-L67
train
Returns a list of expressions that rename the input columns to the new columns.
[ 30522, 2270, 10763, 2862, 1026, 3670, 1028, 14916, 14074, 25778, 2819, 3619, 1006, 2862, 1026, 5164, 1028, 7953, 15155, 1010, 2862, 1026, 3670, 1028, 2047, 22786, 8583, 1007, 1063, 5799, 14949, 22444, 2361, 1026, 5164, 1010, 3670, 1028, 234...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/util/RandomUtil.java
RandomUtil.randomBigDecimal
public static BigDecimal randomBigDecimal(BigDecimal limit) { return NumberUtil.toBigDecimal(getRandom().nextDouble(limit.doubleValue())); }
java
public static BigDecimal randomBigDecimal(BigDecimal limit) { return NumberUtil.toBigDecimal(getRandom().nextDouble(limit.doubleValue())); }
[ "public", "static", "BigDecimal", "randomBigDecimal", "(", "BigDecimal", "limit", ")", "{", "return", "NumberUtil", ".", "toBigDecimal", "(", "getRandom", "(", ")", ".", "nextDouble", "(", "limit", ".", "doubleValue", "(", ")", ")", ")", ";", "}" ]
获得指定范围内的随机数 [0,limit) @param limit 最大数(不包含) @return 随机数 @since 4.0.9
[ "获得指定范围内的随机数", "[", "0", "limit", ")" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/util/RandomUtil.java#L227-L229
train
Returns a random BigDecimal in the specified limit.
[ 30522, 2270, 10763, 2502, 3207, 6895, 9067, 6721, 5638, 2290, 3207, 6895, 9067, 1006, 2502, 3207, 6895, 9067, 5787, 1007, 1063, 2709, 2193, 21823, 2140, 1012, 2000, 5638, 2290, 3207, 6895, 9067, 1006, 2131, 13033, 5358, 1006, 1007, 1012, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/map/MapUtil.java
MapUtil.getStr
public static String getStr(Map<?, ?> map, Object key) { return get(map, key, String.class); }
java
public static String getStr(Map<?, ?> map, Object key) { return get(map, key, String.class); }
[ "public", "static", "String", "getStr", "(", "Map", "<", "?", ",", "?", ">", "map", ",", "Object", "key", ")", "{", "return", "get", "(", "map", ",", "key", ",", "String", ".", "class", ")", ";", "}" ]
获取Map指定key的值,并转换为字符串 @param map Map @param key 键 @return 值 @since 4.0.6
[ "获取Map指定key的值,并转换为字符串" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/map/MapUtil.java#L756-L758
train
Gets the string value from the map.
[ 30522, 2270, 10763, 5164, 4152, 16344, 1006, 4949, 1026, 1029, 1010, 1029, 1028, 4949, 1010, 4874, 3145, 1007, 1063, 2709, 2131, 1006, 4949, 1010, 3145, 1010, 5164, 1012, 2465, 1007, 1025, 1065, 102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-http/src/main/java/cn/hutool/http/HttpRequest.java
HttpRequest.form
public HttpRequest form(String name, File file) { return form(name, file, file.getName()); }
java
public HttpRequest form(String name, File file) { return form(name, file, file.getName()); }
[ "public", "HttpRequest", "form", "(", "String", "name", ",", "File", "file", ")", "{", "return", "form", "(", "name", ",", "file", ",", "file", ".", "getName", "(", ")", ")", ";", "}" ]
文件表单项<br> 一旦有文件加入,表单变为multipart/form-data @param name 名 @param file 需要上传的文件 @return this
[ "文件表单项<br", ">", "一旦有文件加入,表单变为multipart", "/", "form", "-", "data" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-http/src/main/java/cn/hutool/http/HttpRequest.java#L515-L517
train
Adds a form file to the request.
[ 30522, 2270, 8299, 2890, 15500, 2433, 1006, 5164, 2171, 1010, 5371, 5371, 1007, 1063, 2709, 2433, 1006, 2171, 1010, 5371, 1010, 5371, 1012, 2131, 18442, 1006, 1007, 1007, 1025, 1065, 102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/operators/DataSinkTask.java
DataSinkTask.initOutputFormat
private void initOutputFormat() { ClassLoader userCodeClassLoader = getUserCodeClassLoader(); // obtain task configuration (including stub parameters) Configuration taskConf = getTaskConfiguration(); this.config = new TaskConfig(taskConf); try { this.format = config.<OutputFormat<IT>>getStubWrapper(userCodeClassLoader).getUserCodeObject(OutputFormat.class, userCodeClassLoader); // check if the class is a subclass, if the check is required if (!OutputFormat.class.isAssignableFrom(this.format.getClass())) { throw new RuntimeException("The class '" + this.format.getClass().getName() + "' is not a subclass of '" + OutputFormat.class.getName() + "' as is required."); } } catch (ClassCastException ccex) { throw new RuntimeException("The stub class is not a proper subclass of " + OutputFormat.class.getName(), ccex); } Thread thread = Thread.currentThread(); ClassLoader original = thread.getContextClassLoader(); // configure the stub. catch exceptions here extra, to report them as originating from the user code try { thread.setContextClassLoader(userCodeClassLoader); this.format.configure(this.config.getStubParameters()); } catch (Throwable t) { throw new RuntimeException("The user defined 'configure()' method in the Output Format caused an error: " + t.getMessage(), t); } finally { thread.setContextClassLoader(original); } }
java
private void initOutputFormat() { ClassLoader userCodeClassLoader = getUserCodeClassLoader(); // obtain task configuration (including stub parameters) Configuration taskConf = getTaskConfiguration(); this.config = new TaskConfig(taskConf); try { this.format = config.<OutputFormat<IT>>getStubWrapper(userCodeClassLoader).getUserCodeObject(OutputFormat.class, userCodeClassLoader); // check if the class is a subclass, if the check is required if (!OutputFormat.class.isAssignableFrom(this.format.getClass())) { throw new RuntimeException("The class '" + this.format.getClass().getName() + "' is not a subclass of '" + OutputFormat.class.getName() + "' as is required."); } } catch (ClassCastException ccex) { throw new RuntimeException("The stub class is not a proper subclass of " + OutputFormat.class.getName(), ccex); } Thread thread = Thread.currentThread(); ClassLoader original = thread.getContextClassLoader(); // configure the stub. catch exceptions here extra, to report them as originating from the user code try { thread.setContextClassLoader(userCodeClassLoader); this.format.configure(this.config.getStubParameters()); } catch (Throwable t) { throw new RuntimeException("The user defined 'configure()' method in the Output Format caused an error: " + t.getMessage(), t); } finally { thread.setContextClassLoader(original); } }
[ "private", "void", "initOutputFormat", "(", ")", "{", "ClassLoader", "userCodeClassLoader", "=", "getUserCodeClassLoader", "(", ")", ";", "// obtain task configuration (including stub parameters)", "Configuration", "taskConf", "=", "getTaskConfiguration", "(", ")", ";", "th...
Initializes the OutputFormat implementation and configuration. @throws RuntimeException Throws if instance of OutputFormat implementation can not be obtained.
[ "Initializes", "the", "OutputFormat", "implementation", "and", "configuration", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/operators/DataSinkTask.java#L319-L352
train
Initialize the output format.
[ 30522, 2797, 11675, 1999, 9956, 4904, 18780, 14192, 4017, 1006, 1007, 1063, 2465, 11066, 2121, 5310, 16044, 26266, 11066, 2121, 1027, 2131, 20330, 16044, 26266, 11066, 2121, 1006, 1007, 1025, 1013, 1013, 6855, 4708, 9563, 1006, 2164, 24646, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Graph.java
Graph.runScatterGatherIteration
public <M> Graph<K, VV, EV> runScatterGatherIteration( ScatterFunction<K, VV, M, EV> scatterFunction, org.apache.flink.graph.spargel.GatherFunction<K, VV, M> gatherFunction, int maximumNumberOfIterations) { return this.runScatterGatherIteration(scatterFunction, gatherFunction, maximumNumberOfIterations, null); }
java
public <M> Graph<K, VV, EV> runScatterGatherIteration( ScatterFunction<K, VV, M, EV> scatterFunction, org.apache.flink.graph.spargel.GatherFunction<K, VV, M> gatherFunction, int maximumNumberOfIterations) { return this.runScatterGatherIteration(scatterFunction, gatherFunction, maximumNumberOfIterations, null); }
[ "public", "<", "M", ">", "Graph", "<", "K", ",", "VV", ",", "EV", ">", "runScatterGatherIteration", "(", "ScatterFunction", "<", "K", ",", "VV", ",", "M", ",", "EV", ">", "scatterFunction", ",", "org", ".", "apache", ".", "flink", ".", "graph", ".", ...
Runs a ScatterGather iteration on the graph. No configuration options are provided. @param scatterFunction the scatter function @param gatherFunction the gather function @param maximumNumberOfIterations maximum number of iterations to perform @return the updated Graph after the scatter-gather iteration has converged or after maximumNumberOfIterations.
[ "Runs", "a", "ScatterGather", "iteration", "on", "the", "graph", ".", "No", "configuration", "options", "are", "provided", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Graph.java#L1730-L1737
train
Run a ScatterGather iteration on the graph.
[ 30522, 2270, 1026, 1049, 1028, 10629, 1026, 1047, 1010, 1058, 2615, 1010, 23408, 1028, 3216, 11266, 3334, 20697, 5886, 21646, 3370, 1006, 8040, 20097, 11263, 27989, 1026, 1047, 1010, 1058, 2615, 1010, 1049, 1010, 23408, 1028, 8040, 20097, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-poi/src/main/java/cn/hutool/poi/excel/sax/ExcelSaxUtil.java
ExcelSaxUtil.countNullCell
public static int countNullCell(String preRef, String ref) { // excel2007最大行数是1048576,最大列数是16384,最后一列列名是XFD // 数字代表列,去掉列信息 String preXfd = StrUtil.nullToDefault(preRef, "@").replaceAll("\\d+", ""); String xfd = StrUtil.nullToDefault(ref, "@").replaceAll("\\d+", ""); // A表示65,@表示64,如果A算作1,那@代表0 // 填充最大位数3 preXfd = StrUtil.fillBefore(preXfd, CELL_FILL_CHAR, MAX_CELL_BIT); xfd = StrUtil.fillBefore(xfd, CELL_FILL_CHAR, MAX_CELL_BIT); char[] preLetter = preXfd.toCharArray(); char[] letter = xfd.toCharArray(); // 用字母表示则最多三位,每26个字母进一位 int res = (letter[0] - preLetter[0]) * 26 * 26 + (letter[1] - preLetter[1]) * 26 + (letter[2] - preLetter[2]); return res - 1; }
java
public static int countNullCell(String preRef, String ref) { // excel2007最大行数是1048576,最大列数是16384,最后一列列名是XFD // 数字代表列,去掉列信息 String preXfd = StrUtil.nullToDefault(preRef, "@").replaceAll("\\d+", ""); String xfd = StrUtil.nullToDefault(ref, "@").replaceAll("\\d+", ""); // A表示65,@表示64,如果A算作1,那@代表0 // 填充最大位数3 preXfd = StrUtil.fillBefore(preXfd, CELL_FILL_CHAR, MAX_CELL_BIT); xfd = StrUtil.fillBefore(xfd, CELL_FILL_CHAR, MAX_CELL_BIT); char[] preLetter = preXfd.toCharArray(); char[] letter = xfd.toCharArray(); // 用字母表示则最多三位,每26个字母进一位 int res = (letter[0] - preLetter[0]) * 26 * 26 + (letter[1] - preLetter[1]) * 26 + (letter[2] - preLetter[2]); return res - 1; }
[ "public", "static", "int", "countNullCell", "(", "String", "preRef", ",", "String", "ref", ")", "{", "// excel2007最大行数是1048576,最大列数是16384,最后一列列名是XFD\r", "// 数字代表列,去掉列信息\r", "String", "preXfd", "=", "StrUtil", ".", "nullToDefault", "(", "preRef", ",", "\"@\"", ")", "...
计算两个单元格之间的单元格数目(同一行) @param preRef 前一个单元格位置,例如A1 @param ref 当前单元格位置,例如A8 @return 同一行中两个单元格之间的空单元格数
[ "计算两个单元格之间的单元格数目", "(", "同一行", ")" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-poi/src/main/java/cn/hutool/poi/excel/sax/ExcelSaxUtil.java#L103-L119
train
count null cell
[ 30522, 2270, 10763, 20014, 4175, 11231, 3363, 29109, 2140, 1006, 5164, 3653, 2890, 2546, 1010, 5164, 25416, 1007, 1063, 1013, 1013, 24970, 28332, 2581, 100, 1810, 1945, 100, 100, 9645, 27531, 2581, 2575, 1989, 100, 1810, 100, 100, 100, 27...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-core/src/main/java/org/apache/flink/util/FileUtils.java
FileUtils.readAllBytes
public static byte[] readAllBytes(java.nio.file.Path path) throws IOException { try (SeekableByteChannel channel = Files.newByteChannel(path); InputStream in = Channels.newInputStream(channel)) { long size = channel.size(); if (size > (long) MAX_BUFFER_SIZE) { throw new OutOfMemoryError("Required array size too large"); } return read(in, (int) size); } }
java
public static byte[] readAllBytes(java.nio.file.Path path) throws IOException { try (SeekableByteChannel channel = Files.newByteChannel(path); InputStream in = Channels.newInputStream(channel)) { long size = channel.size(); if (size > (long) MAX_BUFFER_SIZE) { throw new OutOfMemoryError("Required array size too large"); } return read(in, (int) size); } }
[ "public", "static", "byte", "[", "]", "readAllBytes", "(", "java", ".", "nio", ".", "file", ".", "Path", "path", ")", "throws", "IOException", "{", "try", "(", "SeekableByteChannel", "channel", "=", "Files", ".", "newByteChannel", "(", "path", ")", ";", ...
Reads all the bytes from a file. The method ensures that the file is closed when all bytes have been read or an I/O error, or other runtime exception, is thrown. <p>This is an implementation that follow {@link java.nio.file.Files#readAllBytes(java.nio.file.Path)}, and the difference is that it limits the size of the direct buffer to avoid direct-buffer OutOfMemoryError. When {@link java.nio.file.Files#readAllBytes(java.nio.file.Path)} or other interfaces in java API can do this in the future, we should remove it. @param path the path to the file @return a byte array containing the bytes read from the file @throws IOException if an I/O error occurs reading from the stream @throws OutOfMemoryError if an array of the required size cannot be allocated, for example the file is larger that {@code 2GB}
[ "Reads", "all", "the", "bytes", "from", "a", "file", ".", "The", "method", "ensures", "that", "the", "file", "is", "closed", "when", "all", "bytes", "have", "been", "read", "or", "an", "I", "/", "O", "error", "or", "other", "runtime", "exception", "is"...
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/util/FileUtils.java#L143-L154
train
Read all bytes from the given file.
[ 30522, 2270, 10763, 24880, 1031, 1033, 3191, 8095, 3762, 4570, 1006, 9262, 1012, 9152, 2080, 1012, 5371, 1012, 4130, 4130, 1007, 11618, 22834, 10288, 24422, 1063, 3046, 1006, 6148, 3085, 3762, 15007, 20147, 2140, 3149, 1027, 6764, 1012, 204...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
codec-http/src/main/java/io/netty/handler/codec/http/multipart/HttpPostStandardRequestDecoder.java
HttpPostStandardRequestDecoder.offer
@Override public HttpPostStandardRequestDecoder offer(HttpContent content) { checkDestroyed(); // Maybe we should better not copy here for performance reasons but this will need // more care by the caller to release the content in a correct manner later // So maybe something to optimize on a later stage ByteBuf buf = content.content(); if (undecodedChunk == null) { undecodedChunk = buf.copy(); } else { undecodedChunk.writeBytes(buf); } if (content instanceof LastHttpContent) { isLastChunk = true; } parseBody(); if (undecodedChunk != null && undecodedChunk.writerIndex() > discardThreshold) { undecodedChunk.discardReadBytes(); } return this; }
java
@Override public HttpPostStandardRequestDecoder offer(HttpContent content) { checkDestroyed(); // Maybe we should better not copy here for performance reasons but this will need // more care by the caller to release the content in a correct manner later // So maybe something to optimize on a later stage ByteBuf buf = content.content(); if (undecodedChunk == null) { undecodedChunk = buf.copy(); } else { undecodedChunk.writeBytes(buf); } if (content instanceof LastHttpContent) { isLastChunk = true; } parseBody(); if (undecodedChunk != null && undecodedChunk.writerIndex() > discardThreshold) { undecodedChunk.discardReadBytes(); } return this; }
[ "@", "Override", "public", "HttpPostStandardRequestDecoder", "offer", "(", "HttpContent", "content", ")", "{", "checkDestroyed", "(", ")", ";", "// Maybe we should better not copy here for performance reasons but this will need", "// more care by the caller to release the content in a ...
Initialized the internals from a new chunk @param content the new received chunk @throws ErrorDataDecoderException if there is a problem with the charset decoding or other errors
[ "Initialized", "the", "internals", "from", "a", "new", "chunk" ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/codec-http/src/main/java/io/netty/handler/codec/http/multipart/HttpPostStandardRequestDecoder.java#L273-L294
train
Offer a content to the request.
[ 30522, 1030, 2058, 15637, 2270, 8299, 19894, 21515, 4232, 2890, 15500, 3207, 16044, 2099, 3749, 1006, 8299, 8663, 6528, 2102, 4180, 1007, 1063, 4638, 6155, 13181, 20821, 1006, 1007, 1025, 1013, 1013, 2672, 2057, 2323, 2488, 2025, 6100, 2182...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-cron/src/main/java/cn/hutool/cron/Scheduler.java
Scheduler.schedule
public String schedule(String pattern, Runnable task) { return schedule(pattern, new RunnableTask(task)); }
java
public String schedule(String pattern, Runnable task) { return schedule(pattern, new RunnableTask(task)); }
[ "public", "String", "schedule", "(", "String", "pattern", ",", "Runnable", "task", ")", "{", "return", "schedule", "(", "pattern", ",", "new", "RunnableTask", "(", "task", ")", ")", ";", "}" ]
新增Task,使用随机UUID @param pattern {@link CronPattern}对应的String表达式 @param task {@link Runnable} @return ID
[ "新增Task,使用随机UUID" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-cron/src/main/java/cn/hutool/cron/Scheduler.java#L206-L208
train
Schedule a Runnable task to run on the specified pattern.
[ 30522, 2270, 5164, 6134, 1006, 5164, 5418, 1010, 2448, 22966, 4708, 1007, 1063, 2709, 6134, 1006, 5418, 1010, 2047, 2448, 22966, 10230, 2243, 1006, 4708, 1007, 1007, 1025, 1065, 102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
hankcs/HanLP
src/main/java/com/hankcs/hanlp/classification/statistics/ContinuousDistributions.java
ContinuousDistributions.Gcf
protected static double Gcf(double x, double A) { // Good for X>A+1 double A0 = 0; double B0 = 1; double A1 = 1; double B1 = x; double AOLD = 0; double N = 0; while (Math.abs((A1 - AOLD) / A1) > .00001) { AOLD = A1; N = N + 1; A0 = A1 + (N - A) * A0; B0 = B1 + (N - A) * B0; A1 = x * A0 + N * A1; B1 = x * B0 + N * B1; A0 = A0 / B1; B0 = B0 / B1; A1 = A1 / B1; B1 = 1; } double Prob = Math.exp(A * Math.log(x) - x - LogGamma(A)) * A1; return 1.0 - Prob; }
java
protected static double Gcf(double x, double A) { // Good for X>A+1 double A0 = 0; double B0 = 1; double A1 = 1; double B1 = x; double AOLD = 0; double N = 0; while (Math.abs((A1 - AOLD) / A1) > .00001) { AOLD = A1; N = N + 1; A0 = A1 + (N - A) * A0; B0 = B1 + (N - A) * B0; A1 = x * A0 + N * A1; B1 = x * B0 + N * B1; A0 = A0 / B1; B0 = B0 / B1; A1 = A1 / B1; B1 = 1; } double Prob = Math.exp(A * Math.log(x) - x - LogGamma(A)) * A1; return 1.0 - Prob; }
[ "protected", "static", "double", "Gcf", "(", "double", "x", ",", "double", "A", ")", "{", "// Good for X>A+1", "double", "A0", "=", "0", ";", "double", "B0", "=", "1", ";", "double", "A1", "=", "1", ";", "double", "B1", "=", "x", ";", "double", "AO...
Internal function used by GammaCdf @param x @param A @return
[ "Internal", "function", "used", "by", "GammaCdf" ]
a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce
https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/classification/statistics/ContinuousDistributions.java#L125-L150
train
Compute the GCF of a single logarithmic problem
[ 30522, 5123, 10763, 3313, 1043, 2278, 2546, 1006, 3313, 1060, 1010, 3313, 1037, 1007, 1063, 1013, 1013, 2204, 2005, 1060, 1028, 1037, 1009, 1015, 3313, 1037, 2692, 1027, 1014, 1025, 3313, 1038, 2692, 1027, 1015, 1025, 3313, 17350, 1027, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-optimizer/src/main/java/org/apache/flink/optimizer/dag/OptimizerNode.java
OptimizerNode.prunePlanAlternatives
protected void prunePlanAlternatives(List<PlanNode> plans) { if (plans.isEmpty()) { throw new CompilerException("No plan meeting the requirements could be created @ " + this + ". Most likely reason: Too restrictive plan hints."); } // shortcut for the simple case if (plans.size() == 1) { return; } // we can only compare plan candidates that made equal choices // at the branching points. for each choice at a branching point, // we need to keep the cheapest (wrt. interesting properties). // if we do not keep candidates for each branch choice, we might not // find branch compatible candidates when joining the branches back. // for pruning, we are quasi AFTER the node, so in the presence of // branches, we need form the per-branch-choice groups by the choice // they made at the latest un-joined branching node. Note that this is // different from the check for branch compatibility of candidates, as // this happens on the input sub-plans and hence BEFORE the node (therefore // it is relevant to find the latest (partially) joined branch point. if (this.openBranches == null || this.openBranches.isEmpty()) { prunePlanAlternativesWithCommonBranching(plans); } else { // partition the candidates into groups that made the same sub-plan candidate // choice at the latest unclosed branch point final OptimizerNode[] branchDeterminers = new OptimizerNode[this.openBranches.size()]; for (int i = 0; i < branchDeterminers.length; i++) { branchDeterminers[i] = this.openBranches.get(this.openBranches.size() - 1 - i).getBranchingNode(); } // this sorter sorts by the candidate choice at the branch point Comparator<PlanNode> sorter = new Comparator<PlanNode>() { @Override public int compare(PlanNode o1, PlanNode o2) { for (OptimizerNode branchDeterminer : branchDeterminers) { PlanNode n1 = o1.getCandidateAtBranchPoint(branchDeterminer); PlanNode n2 = o2.getCandidateAtBranchPoint(branchDeterminer); int hash1 = System.identityHashCode(n1); int hash2 = System.identityHashCode(n2); if (hash1 != hash2) { return hash1 - hash2; } } return 0; } }; Collections.sort(plans, sorter); List<PlanNode> result = new ArrayList<PlanNode>(); List<PlanNode> turn = new ArrayList<PlanNode>(); final PlanNode[] determinerChoice = new PlanNode[branchDeterminers.length]; while (!plans.isEmpty()) { // take one as the determiner turn.clear(); PlanNode determiner = plans.remove(plans.size() - 1); turn.add(determiner); for (int i = 0; i < determinerChoice.length; i++) { determinerChoice[i] = determiner.getCandidateAtBranchPoint(branchDeterminers[i]); } // go backwards through the plans and find all that are equal boolean stillEqual = true; for (int k = plans.size() - 1; k >= 0 && stillEqual; k--) { PlanNode toCheck = plans.get(k); for (int i = 0; i < branchDeterminers.length; i++) { PlanNode checkerChoice = toCheck.getCandidateAtBranchPoint(branchDeterminers[i]); if (checkerChoice != determinerChoice[i]) { // not the same anymore stillEqual = false; break; } } if (stillEqual) { // the same plans.remove(k); turn.add(toCheck); } } // now that we have only plans with the same branch alternatives, prune! if (turn.size() > 1) { prunePlanAlternativesWithCommonBranching(turn); } result.addAll(turn); } // after all turns are complete plans.clear(); plans.addAll(result); } }
java
protected void prunePlanAlternatives(List<PlanNode> plans) { if (plans.isEmpty()) { throw new CompilerException("No plan meeting the requirements could be created @ " + this + ". Most likely reason: Too restrictive plan hints."); } // shortcut for the simple case if (plans.size() == 1) { return; } // we can only compare plan candidates that made equal choices // at the branching points. for each choice at a branching point, // we need to keep the cheapest (wrt. interesting properties). // if we do not keep candidates for each branch choice, we might not // find branch compatible candidates when joining the branches back. // for pruning, we are quasi AFTER the node, so in the presence of // branches, we need form the per-branch-choice groups by the choice // they made at the latest un-joined branching node. Note that this is // different from the check for branch compatibility of candidates, as // this happens on the input sub-plans and hence BEFORE the node (therefore // it is relevant to find the latest (partially) joined branch point. if (this.openBranches == null || this.openBranches.isEmpty()) { prunePlanAlternativesWithCommonBranching(plans); } else { // partition the candidates into groups that made the same sub-plan candidate // choice at the latest unclosed branch point final OptimizerNode[] branchDeterminers = new OptimizerNode[this.openBranches.size()]; for (int i = 0; i < branchDeterminers.length; i++) { branchDeterminers[i] = this.openBranches.get(this.openBranches.size() - 1 - i).getBranchingNode(); } // this sorter sorts by the candidate choice at the branch point Comparator<PlanNode> sorter = new Comparator<PlanNode>() { @Override public int compare(PlanNode o1, PlanNode o2) { for (OptimizerNode branchDeterminer : branchDeterminers) { PlanNode n1 = o1.getCandidateAtBranchPoint(branchDeterminer); PlanNode n2 = o2.getCandidateAtBranchPoint(branchDeterminer); int hash1 = System.identityHashCode(n1); int hash2 = System.identityHashCode(n2); if (hash1 != hash2) { return hash1 - hash2; } } return 0; } }; Collections.sort(plans, sorter); List<PlanNode> result = new ArrayList<PlanNode>(); List<PlanNode> turn = new ArrayList<PlanNode>(); final PlanNode[] determinerChoice = new PlanNode[branchDeterminers.length]; while (!plans.isEmpty()) { // take one as the determiner turn.clear(); PlanNode determiner = plans.remove(plans.size() - 1); turn.add(determiner); for (int i = 0; i < determinerChoice.length; i++) { determinerChoice[i] = determiner.getCandidateAtBranchPoint(branchDeterminers[i]); } // go backwards through the plans and find all that are equal boolean stillEqual = true; for (int k = plans.size() - 1; k >= 0 && stillEqual; k--) { PlanNode toCheck = plans.get(k); for (int i = 0; i < branchDeterminers.length; i++) { PlanNode checkerChoice = toCheck.getCandidateAtBranchPoint(branchDeterminers[i]); if (checkerChoice != determinerChoice[i]) { // not the same anymore stillEqual = false; break; } } if (stillEqual) { // the same plans.remove(k); turn.add(toCheck); } } // now that we have only plans with the same branch alternatives, prune! if (turn.size() > 1) { prunePlanAlternativesWithCommonBranching(turn); } result.addAll(turn); } // after all turns are complete plans.clear(); plans.addAll(result); } }
[ "protected", "void", "prunePlanAlternatives", "(", "List", "<", "PlanNode", ">", "plans", ")", "{", "if", "(", "plans", ".", "isEmpty", "(", ")", ")", "{", "throw", "new", "CompilerException", "(", "\"No plan meeting the requirements could be created @ \"", "+", "...
--------------------------------------------------------------------------------------------
[ "--------------------------------------------------------------------------------------------" ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-optimizer/src/main/java/org/apache/flink/optimizer/dag/OptimizerNode.java#L681-L783
train
Prunes the alternatives of the given plans.
[ 30522, 5123, 11675, 10975, 9816, 24759, 27953, 16451, 8082, 2015, 1006, 2862, 1026, 2933, 3630, 3207, 1028, 3488, 1007, 1063, 2065, 1006, 3488, 1012, 2003, 6633, 13876, 2100, 1006, 1007, 1007, 1063, 5466, 2047, 21624, 10288, 24422, 1006, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/util/NonReusingKeyGroupedIterator.java
NonReusingKeyGroupedIterator.nextKey
public boolean nextKey() throws IOException { if (lookahead != null) { // common case: whole value-iterator was consumed and a new key group is available. this.comparator.setReference(this.lookahead); this.valuesIterator.next = this.lookahead; this.lastKeyRecord = this.lookahead; this.lookahead = null; this.valuesIterator.iteratorAvailable = true; return true; } // first element, empty/done, or the values iterator was not entirely consumed if (this.done) { return false; } if (this.valuesIterator != null) { // values was not entirely consumed. move to the next key // Required if user code / reduce() method did not read the whole value iterator. E next; while (true) { if ((next = this.iterator.next()) != null) { if (!this.comparator.equalToReference(next)) { // the keys do not match, so we have a new group. store the current key this.comparator.setReference(next); this.valuesIterator.next = next; this.lastKeyRecord = next; this.valuesIterator.iteratorAvailable = true; return true; } } else { // input exhausted this.valuesIterator.next = null; this.valuesIterator = null; this.lastKeyRecord = null; this.done = true; return false; } } } else { // first element // get the next element E first = this.iterator.next(); if (first != null) { this.comparator.setReference(first); this.valuesIterator = new ValuesIterator(first); this.lastKeyRecord = first; return true; } else { // empty input, set everything null this.done = true; return false; } } }
java
public boolean nextKey() throws IOException { if (lookahead != null) { // common case: whole value-iterator was consumed and a new key group is available. this.comparator.setReference(this.lookahead); this.valuesIterator.next = this.lookahead; this.lastKeyRecord = this.lookahead; this.lookahead = null; this.valuesIterator.iteratorAvailable = true; return true; } // first element, empty/done, or the values iterator was not entirely consumed if (this.done) { return false; } if (this.valuesIterator != null) { // values was not entirely consumed. move to the next key // Required if user code / reduce() method did not read the whole value iterator. E next; while (true) { if ((next = this.iterator.next()) != null) { if (!this.comparator.equalToReference(next)) { // the keys do not match, so we have a new group. store the current key this.comparator.setReference(next); this.valuesIterator.next = next; this.lastKeyRecord = next; this.valuesIterator.iteratorAvailable = true; return true; } } else { // input exhausted this.valuesIterator.next = null; this.valuesIterator = null; this.lastKeyRecord = null; this.done = true; return false; } } } else { // first element // get the next element E first = this.iterator.next(); if (first != null) { this.comparator.setReference(first); this.valuesIterator = new ValuesIterator(first); this.lastKeyRecord = first; return true; } else { // empty input, set everything null this.done = true; return false; } } }
[ "public", "boolean", "nextKey", "(", ")", "throws", "IOException", "{", "if", "(", "lookahead", "!=", "null", ")", "{", "// common case: whole value-iterator was consumed and a new key group is available.", "this", ".", "comparator", ".", "setReference", "(", "this", "....
Moves the iterator to the next key. This method may skip any values that have not yet been returned by the iterator created by the {@link #getValues()} method. Hence, if called multiple times it "removes" key groups. @return true, if the input iterator has an other group of records with the same key.
[ "Moves", "the", "iterator", "to", "the", "next", "key", ".", "This", "method", "may", "skip", "any", "values", "that", "have", "not", "yet", "been", "returned", "by", "the", "iterator", "created", "by", "the", "{", "@link", "#getValues", "()", "}", "meth...
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/util/NonReusingKeyGroupedIterator.java#L68-L126
train
Returns true if there is at least one key in the input stream.
[ 30522, 2270, 22017, 20898, 2279, 14839, 1006, 1007, 11618, 22834, 10288, 24422, 1063, 2065, 1006, 2298, 4430, 13775, 999, 1027, 19701, 1007, 1063, 1013, 1013, 2691, 2553, 1024, 2878, 3643, 1011, 2009, 6906, 4263, 2001, 10202, 1998, 1037, 20...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
hankcs/HanLP
src/main/java/com/hankcs/hanlp/collection/dartsclone/details/DoubleArrayBuilder.java
DoubleArrayBuilder.buildDawg
private void buildDawg(Keyset keyset, DawgBuilder dawgBuilder) { dawgBuilder.init(); for (int i = 0; i < keyset.numKeys(); ++i) { dawgBuilder.insert(keyset.getKey(i), keyset.getValue(i)); } dawgBuilder.finish(); }
java
private void buildDawg(Keyset keyset, DawgBuilder dawgBuilder) { dawgBuilder.init(); for (int i = 0; i < keyset.numKeys(); ++i) { dawgBuilder.insert(keyset.getKey(i), keyset.getValue(i)); } dawgBuilder.finish(); }
[ "private", "void", "buildDawg", "(", "Keyset", "keyset", ",", "DawgBuilder", "dawgBuilder", ")", "{", "dawgBuilder", ".", "init", "(", ")", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "keyset", ".", "numKeys", "(", ")", ";", "++", "i", ")...
构建 @param keyset @param dawgBuilder
[ "构建" ]
a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce
https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/collection/dartsclone/details/DoubleArrayBuilder.java#L81-L89
train
Build a Dawg from a Keyset.
[ 30522, 2797, 11675, 3857, 2850, 27767, 1006, 6309, 3388, 6309, 3388, 1010, 4830, 27767, 8569, 23891, 2099, 4830, 27767, 8569, 23891, 2099, 1007, 1063, 4830, 27767, 8569, 23891, 2099, 1012, 1999, 4183, 1006, 1007, 1025, 2005, 1006, 20014, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
spring-projects/spring-boot
spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/AutoConfigurationImportSelector.java
AutoConfigurationImportSelector.getExclusions
protected Set<String> getExclusions(AnnotationMetadata metadata, AnnotationAttributes attributes) { Set<String> excluded = new LinkedHashSet<>(); excluded.addAll(asList(attributes, "exclude")); excluded.addAll(Arrays.asList(attributes.getStringArray("excludeName"))); excluded.addAll(getExcludeAutoConfigurationsProperty()); return excluded; }
java
protected Set<String> getExclusions(AnnotationMetadata metadata, AnnotationAttributes attributes) { Set<String> excluded = new LinkedHashSet<>(); excluded.addAll(asList(attributes, "exclude")); excluded.addAll(Arrays.asList(attributes.getStringArray("excludeName"))); excluded.addAll(getExcludeAutoConfigurationsProperty()); return excluded; }
[ "protected", "Set", "<", "String", ">", "getExclusions", "(", "AnnotationMetadata", "metadata", ",", "AnnotationAttributes", "attributes", ")", "{", "Set", "<", "String", ">", "excluded", "=", "new", "LinkedHashSet", "<>", "(", ")", ";", "excluded", ".", "addA...
Return any exclusions that limit the candidate configurations. @param metadata the source metadata @param attributes the {@link #getAttributes(AnnotationMetadata) annotation attributes} @return exclusions or an empty set
[ "Return", "any", "exclusions", "that", "limit", "the", "candidate", "configurations", "." ]
0b27f7c70e164b2b1a96477f1d9c1acba56790c1
https://github.com/spring-projects/spring-boot/blob/0b27f7c70e164b2b1a96477f1d9c1acba56790c1/spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/AutoConfigurationImportSelector.java#L234-L241
train
Returns the set of excluded annotations.
[ 30522, 5123, 2275, 1026, 5164, 1028, 2131, 10288, 20464, 22016, 1006, 5754, 17287, 3508, 11368, 8447, 2696, 27425, 1010, 5754, 17287, 3508, 19321, 3089, 8569, 4570, 12332, 1007, 1063, 2275, 1026, 5164, 1028, 12421, 1027, 2047, 5799, 14949, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
hankcs/HanLP
src/main/java/com/hankcs/hanlp/HanLP.java
HanLP.extractSummary
public static List<String> extractSummary(String document, int size, String sentence_separator) { return TextRankSentence.getTopSentenceList(document, size, sentence_separator); }
java
public static List<String> extractSummary(String document, int size, String sentence_separator) { return TextRankSentence.getTopSentenceList(document, size, sentence_separator); }
[ "public", "static", "List", "<", "String", ">", "extractSummary", "(", "String", "document", ",", "int", "size", ",", "String", "sentence_separator", ")", "{", "return", "TextRankSentence", ".", "getTopSentenceList", "(", "document", ",", "size", ",", "sentence_...
自动摘要 @param document 目标文档 @param size 需要的关键句的个数 @param sentence_separator 分割目标文档时的句子分割符,正则格式, 如:[。??!!;;] @return 关键句列表
[ "自动摘要" ]
a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce
https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/HanLP.java#L843-L846
train
Extract summary from a text document.
[ 30522, 2270, 10763, 2862, 1026, 5164, 1028, 27059, 2819, 7849, 2100, 1006, 5164, 6254, 1010, 20014, 2946, 1010, 5164, 6251, 1035, 19802, 25879, 2953, 1007, 1063, 2709, 3793, 26763, 5054, 6528, 3401, 1012, 2131, 25181, 15781, 5897, 9863, 100...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rpc/RpcUtils.java
RpcUtils.terminateRpcService
public static void terminateRpcService(RpcService rpcService, Time timeout) throws InterruptedException, ExecutionException, TimeoutException { rpcService.stopService().get(timeout.toMilliseconds(), TimeUnit.MILLISECONDS); }
java
public static void terminateRpcService(RpcService rpcService, Time timeout) throws InterruptedException, ExecutionException, TimeoutException { rpcService.stopService().get(timeout.toMilliseconds(), TimeUnit.MILLISECONDS); }
[ "public", "static", "void", "terminateRpcService", "(", "RpcService", "rpcService", ",", "Time", "timeout", ")", "throws", "InterruptedException", ",", "ExecutionException", ",", "TimeoutException", "{", "rpcService", ".", "stopService", "(", ")", ".", "get", "(", ...
Shuts the given rpc service down and waits for its termination. @param rpcService to shut down @param timeout for this operation @throws InterruptedException if the operation has been interrupted @throws ExecutionException if a problem occurred @throws TimeoutException if a timeout occurred
[ "Shuts", "the", "given", "rpc", "service", "down", "and", "waits", "for", "its", "termination", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/rpc/RpcUtils.java#L89-L91
train
Terminate the RPC service.
[ 30522, 2270, 10763, 11675, 20320, 14536, 6169, 2121, 7903, 2063, 1006, 1054, 15042, 8043, 7903, 2063, 1054, 15042, 8043, 7903, 2063, 1010, 2051, 2051, 5833, 1007, 11618, 7153, 10288, 24422, 1010, 7781, 10288, 24422, 1010, 2051, 5833, 10288, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/incubator-shardingsphere
sharding-core/sharding-core-common/src/main/java/org/apache/shardingsphere/core/metadata/datasource/DataSourceMetaDataFactory.java
DataSourceMetaDataFactory.newInstance
public static DataSourceMetaData newInstance(final DatabaseType databaseType, final String url) { switch (databaseType) { case H2: return new H2DataSourceMetaData(url); case MySQL: return new MySQLDataSourceMetaData(url); case Oracle: return new OracleDataSourceMetaData(url); case PostgreSQL: return new PostgreSQLDataSourceMetaData(url); case SQLServer: return new SQLServerDataSourceMetaData(url); default: throw new UnsupportedOperationException(String.format("Cannot support database [%s].", databaseType)); } }
java
public static DataSourceMetaData newInstance(final DatabaseType databaseType, final String url) { switch (databaseType) { case H2: return new H2DataSourceMetaData(url); case MySQL: return new MySQLDataSourceMetaData(url); case Oracle: return new OracleDataSourceMetaData(url); case PostgreSQL: return new PostgreSQLDataSourceMetaData(url); case SQLServer: return new SQLServerDataSourceMetaData(url); default: throw new UnsupportedOperationException(String.format("Cannot support database [%s].", databaseType)); } }
[ "public", "static", "DataSourceMetaData", "newInstance", "(", "final", "DatabaseType", "databaseType", ",", "final", "String", "url", ")", "{", "switch", "(", "databaseType", ")", "{", "case", "H2", ":", "return", "new", "H2DataSourceMetaData", "(", "url", ")", ...
Create new instance of data source meta data. @param databaseType database type @param url data source URL @return data source meta data
[ "Create", "new", "instance", "of", "data", "source", "meta", "data", "." ]
f88fd29fc345dfb31fdce12e9e96cbfa0fd2402d
https://github.com/apache/incubator-shardingsphere/blob/f88fd29fc345dfb31fdce12e9e96cbfa0fd2402d/sharding-core/sharding-core-common/src/main/java/org/apache/shardingsphere/core/metadata/datasource/DataSourceMetaDataFactory.java#L44-L59
train
Create a new instance of the data source meta data class.
[ 30522, 2270, 10763, 2951, 6499, 3126, 3401, 11368, 8447, 2696, 2047, 7076, 26897, 1006, 2345, 7809, 13874, 7809, 13874, 1010, 2345, 5164, 24471, 2140, 1007, 1063, 6942, 1006, 7809, 13874, 1007, 1063, 2553, 1044, 2475, 1024, 2709, 2047, 1044...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/net/NetUtil.java
NetUtil.longToIpv4
public static String longToIpv4(long longIP) { final StringBuilder sb = new StringBuilder(); // 直接右移24位 sb.append(String.valueOf(longIP >>> 24)); sb.append("."); // 将高8位置0,然后右移16位 sb.append(String.valueOf((longIP & 0x00FFFFFF) >>> 16)); sb.append("."); sb.append(String.valueOf((longIP & 0x0000FFFF) >>> 8)); sb.append("."); sb.append(String.valueOf(longIP & 0x000000FF)); return sb.toString(); }
java
public static String longToIpv4(long longIP) { final StringBuilder sb = new StringBuilder(); // 直接右移24位 sb.append(String.valueOf(longIP >>> 24)); sb.append("."); // 将高8位置0,然后右移16位 sb.append(String.valueOf((longIP & 0x00FFFFFF) >>> 16)); sb.append("."); sb.append(String.valueOf((longIP & 0x0000FFFF) >>> 8)); sb.append("."); sb.append(String.valueOf(longIP & 0x000000FF)); return sb.toString(); }
[ "public", "static", "String", "longToIpv4", "(", "long", "longIP", ")", "{", "final", "StringBuilder", "sb", "=", "new", "StringBuilder", "(", ")", ";", "// 直接右移24位\r", "sb", ".", "append", "(", "String", ".", "valueOf", "(", "longIP", ">>>", "24", ")", ...
根据long值获取ip v4地址 @param longIP IP的long表示形式 @return IP V4 地址
[ "根据long值获取ip", "v4地址" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/net/NetUtil.java#L53-L65
train
Converts a long to a IPv4 address.
[ 30522, 2270, 10763, 5164, 2146, 3406, 11514, 2615, 2549, 1006, 2146, 2146, 11514, 1007, 1063, 2345, 5164, 8569, 23891, 2099, 24829, 1027, 2047, 5164, 8569, 23891, 2099, 1006, 1007, 1025, 1013, 1013, 100, 100, 100, 100, 2484, 100, 24829, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-poi/src/main/java/cn/hutool/poi/excel/ExcelUtil.java
ExcelUtil.getReader
public static ExcelReader getReader(String bookFilePath, int sheetIndex) { try { return new ExcelReader(bookFilePath, sheetIndex); } catch (NoClassDefFoundError e) { throw new DependencyException(ObjectUtil.defaultIfNull(e.getCause(), e), PoiChecker.NO_POI_ERROR_MSG); } }
java
public static ExcelReader getReader(String bookFilePath, int sheetIndex) { try { return new ExcelReader(bookFilePath, sheetIndex); } catch (NoClassDefFoundError e) { throw new DependencyException(ObjectUtil.defaultIfNull(e.getCause(), e), PoiChecker.NO_POI_ERROR_MSG); } }
[ "public", "static", "ExcelReader", "getReader", "(", "String", "bookFilePath", ",", "int", "sheetIndex", ")", "{", "try", "{", "return", "new", "ExcelReader", "(", "bookFilePath", ",", "sheetIndex", ")", ";", "}", "catch", "(", "NoClassDefFoundError", "e", ")"...
获取Excel读取器,通过调用{@link ExcelReader}的read或readXXX方法读取Excel内容 @param bookFilePath Excel文件路径,绝对路径或相对于ClassPath路径 @param sheetIndex sheet序号,0表示第一个sheet @return {@link ExcelReader} @since 3.1.1
[ "获取Excel读取器,通过调用", "{", "@link", "ExcelReader", "}", "的read或readXXX方法读取Excel内容" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-poi/src/main/java/cn/hutool/poi/excel/ExcelUtil.java#L215-L221
train
Returns an ExcelReader object for the specified sheet index.
[ 30522, 2270, 10763, 24970, 16416, 4063, 2131, 16416, 4063, 1006, 5164, 2338, 8873, 2571, 15069, 1010, 20014, 7123, 22254, 10288, 1007, 1063, 3046, 1063, 2709, 2047, 24970, 16416, 4063, 1006, 2338, 8873, 2571, 15069, 1010, 7123, 22254, 10288, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-db/src/main/java/cn/hutool/db/nosql/mongo/MongoDS.java
MongoDS.getCollection
public MongoCollection<Document> getCollection(String dbName, String collectionName) { return getDb(dbName).getCollection(collectionName); }
java
public MongoCollection<Document> getCollection(String dbName, String collectionName) { return getDb(dbName).getCollection(collectionName); }
[ "public", "MongoCollection", "<", "Document", ">", "getCollection", "(", "String", "dbName", ",", "String", "collectionName", ")", "{", "return", "getDb", "(", "dbName", ")", ".", "getCollection", "(", "collectionName", ")", ";", "}" ]
获得MongoDB中指定集合对象 @param dbName 库名 @param collectionName 集合名 @return DBCollection
[ "获得MongoDB中指定集合对象" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-db/src/main/java/cn/hutool/db/nosql/mongo/MongoDS.java#L244-L246
train
Returns a collection of documents in the database with the given name.
[ 30522, 2270, 12256, 3995, 26895, 18491, 1026, 6254, 1028, 2131, 26895, 18491, 1006, 5164, 16962, 18442, 1010, 5164, 3074, 18442, 1007, 1063, 2709, 2131, 18939, 1006, 16962, 18442, 1007, 1012, 2131, 26895, 18491, 1006, 3074, 18442, 1007, 1025,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/GraphAnalyticBase.java
GraphAnalyticBase.setParallelism
public GraphAnalyticBase<K, VV, EV, T> setParallelism(int parallelism) { Preconditions.checkArgument(parallelism > 0 || parallelism == PARALLELISM_DEFAULT, "The parallelism must be at least one, or ExecutionConfig.PARALLELISM_DEFAULT (use system default)."); this.parallelism = parallelism; return this; }
java
public GraphAnalyticBase<K, VV, EV, T> setParallelism(int parallelism) { Preconditions.checkArgument(parallelism > 0 || parallelism == PARALLELISM_DEFAULT, "The parallelism must be at least one, or ExecutionConfig.PARALLELISM_DEFAULT (use system default)."); this.parallelism = parallelism; return this; }
[ "public", "GraphAnalyticBase", "<", "K", ",", "VV", ",", "EV", ",", "T", ">", "setParallelism", "(", "int", "parallelism", ")", "{", "Preconditions", ".", "checkArgument", "(", "parallelism", ">", "0", "||", "parallelism", "==", "PARALLELISM_DEFAULT", ",", "...
Set the parallelism for this analytic's operators. This parameter is necessary because processing a small amount of data with high operator parallelism is slow and wasteful with memory and buffers. <p>Operator parallelism should be set to this given value unless processing asymptotically more data, in which case the default job parallelism should be inherited. @param parallelism operator parallelism @return this
[ "Set", "the", "parallelism", "for", "this", "analytic", "s", "operators", ".", "This", "parameter", "is", "necessary", "because", "processing", "a", "small", "amount", "of", "data", "with", "high", "operator", "parallelism", "is", "slow", "and", "wasteful", "w...
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/GraphAnalyticBase.java#L60-L67
train
Sets the parallelism of the graph.
[ 30522, 2270, 10629, 27953, 21252, 15058, 1026, 1047, 1010, 1058, 2615, 1010, 23408, 1010, 1056, 1028, 2275, 28689, 6216, 28235, 1006, 20014, 5903, 2964, 1007, 1063, 3653, 8663, 20562, 2015, 1012, 4638, 2906, 22850, 4765, 1006, 5903, 2964, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/dataformat/NestedRow.java
NestedRow.setNullAt
@Override public void setNullAt(int i) { assertIndexIsValid(i); SegmentsUtil.bitSet(segments, offset, i + 8); SegmentsUtil.setLong(segments, getFieldOffset(i), 0); }
java
@Override public void setNullAt(int i) { assertIndexIsValid(i); SegmentsUtil.bitSet(segments, offset, i + 8); SegmentsUtil.setLong(segments, getFieldOffset(i), 0); }
[ "@", "Override", "public", "void", "setNullAt", "(", "int", "i", ")", "{", "assertIndexIsValid", "(", "i", ")", ";", "SegmentsUtil", ".", "bitSet", "(", "segments", ",", "offset", ",", "i", "+", "8", ")", ";", "SegmentsUtil", ".", "setLong", "(", "segm...
See {@link BinaryRow#setNullAt(int)}.
[ "See", "{" ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/dataformat/NestedRow.java#L83-L88
train
Sets the value of the null field at the given index.
[ 30522, 1030, 2058, 15637, 2270, 11675, 2275, 11231, 4571, 2102, 1006, 20014, 1045, 1007, 1063, 20865, 22254, 10288, 2483, 10175, 3593, 1006, 1045, 1007, 1025, 9214, 21823, 2140, 1012, 9017, 3388, 1006, 9214, 1010, 16396, 1010, 1045, 1009, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
spring-projects/spring-boot
spring-boot-project/spring-boot/src/main/java/org/springframework/boot/web/embedded/jetty/JettyReactiveWebServerFactory.java
JettyReactiveWebServerFactory.setServerCustomizers
public void setServerCustomizers( Collection<? extends JettyServerCustomizer> customizers) { Assert.notNull(customizers, "Customizers must not be null"); this.jettyServerCustomizers = new ArrayList<>(customizers); }
java
public void setServerCustomizers( Collection<? extends JettyServerCustomizer> customizers) { Assert.notNull(customizers, "Customizers must not be null"); this.jettyServerCustomizers = new ArrayList<>(customizers); }
[ "public", "void", "setServerCustomizers", "(", "Collection", "<", "?", "extends", "JettyServerCustomizer", ">", "customizers", ")", "{", "Assert", ".", "notNull", "(", "customizers", ",", "\"Customizers must not be null\"", ")", ";", "this", ".", "jettyServerCustomize...
Sets {@link JettyServerCustomizer}s that will be applied to the {@link Server} before it is started. Calling this method will replace any existing customizers. @param customizers the Jetty customizers to apply
[ "Sets", "{" ]
0b27f7c70e164b2b1a96477f1d9c1acba56790c1
https://github.com/spring-projects/spring-boot/blob/0b27f7c70e164b2b1a96477f1d9c1acba56790c1/spring-boot-project/spring-boot/src/main/java/org/springframework/boot/web/embedded/jetty/JettyReactiveWebServerFactory.java#L121-L125
train
Sets the server customizers.
[ 30522, 2270, 11675, 4520, 2121, 6299, 7874, 20389, 17629, 2015, 1006, 3074, 1026, 1029, 8908, 22962, 23274, 2099, 6299, 7874, 20389, 17629, 1028, 7661, 17629, 2015, 1007, 1063, 20865, 1012, 2025, 11231, 3363, 1006, 7661, 17629, 2015, 1010, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-mesos/src/main/java/org/apache/flink/mesos/util/MesosArtifactServer.java
MesosArtifactServer.addPath
public synchronized URL addPath(Path path, Path remoteFile) throws IOException, MalformedURLException { if (paths.containsKey(remoteFile)) { throw new IllegalArgumentException("duplicate path registered"); } if (remoteFile.isAbsolute()) { throw new IllegalArgumentException("not expecting an absolute path"); } URL fileURL = new URL(baseURL, remoteFile.toString()); router.addAny(fileURL.getPath(), new VirtualFileServerHandler(path)); paths.put(remoteFile, fileURL); return fileURL; }
java
public synchronized URL addPath(Path path, Path remoteFile) throws IOException, MalformedURLException { if (paths.containsKey(remoteFile)) { throw new IllegalArgumentException("duplicate path registered"); } if (remoteFile.isAbsolute()) { throw new IllegalArgumentException("not expecting an absolute path"); } URL fileURL = new URL(baseURL, remoteFile.toString()); router.addAny(fileURL.getPath(), new VirtualFileServerHandler(path)); paths.put(remoteFile, fileURL); return fileURL; }
[ "public", "synchronized", "URL", "addPath", "(", "Path", "path", ",", "Path", "remoteFile", ")", "throws", "IOException", ",", "MalformedURLException", "{", "if", "(", "paths", ".", "containsKey", "(", "remoteFile", ")", ")", "{", "throw", "new", "IllegalArgum...
Adds a path to the artifact server. @param path the qualified FS path to serve (local, hdfs, etc). @param remoteFile the remote path with which to locate the file. @return the fully-qualified remote path to the file. @throws MalformedURLException if the remote path is invalid.
[ "Adds", "a", "path", "to", "the", "artifact", "server", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-mesos/src/main/java/org/apache/flink/mesos/util/MesosArtifactServer.java#L209-L222
train
Add a path to the virtual file system.
[ 30522, 2270, 25549, 24471, 2140, 5587, 15069, 1006, 4130, 4130, 1010, 4130, 6556, 8873, 2571, 1007, 11618, 22834, 10288, 24422, 1010, 15451, 29021, 3126, 2571, 2595, 24422, 1063, 2065, 1006, 10425, 1012, 3397, 14839, 1006, 6556, 8873, 2571, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-db/src/main/java/cn/hutool/db/ds/AbstractDSFactory.java
AbstractDSFactory.createDataSource
private DataSourceWrapper createDataSource(String group) { if (group == null) { group = StrUtil.EMPTY; } final Setting config = setting.getSetting(group); if (CollectionUtil.isEmpty(config)) { throw new DbRuntimeException("No config for group: [{}]", group); } // 基本信息 final String url = config.getAndRemoveStr(KEY_ALIAS_URL); if (StrUtil.isBlank(url)) { throw new DbRuntimeException("No JDBC URL for group: [{}]", group); } // 自动识别Driver String driver = config.getAndRemoveStr(KEY_ALIAS_DRIVER); if (StrUtil.isBlank(driver)) { driver = DriverUtil.identifyDriver(url); } final String user = config.getAndRemoveStr(KEY_ALIAS_USER); final String pass = config.getAndRemoveStr(KEY_ALIAS_PASSWORD); return DataSourceWrapper.wrap(createDataSource(url, driver, user, pass, config), driver); }
java
private DataSourceWrapper createDataSource(String group) { if (group == null) { group = StrUtil.EMPTY; } final Setting config = setting.getSetting(group); if (CollectionUtil.isEmpty(config)) { throw new DbRuntimeException("No config for group: [{}]", group); } // 基本信息 final String url = config.getAndRemoveStr(KEY_ALIAS_URL); if (StrUtil.isBlank(url)) { throw new DbRuntimeException("No JDBC URL for group: [{}]", group); } // 自动识别Driver String driver = config.getAndRemoveStr(KEY_ALIAS_DRIVER); if (StrUtil.isBlank(driver)) { driver = DriverUtil.identifyDriver(url); } final String user = config.getAndRemoveStr(KEY_ALIAS_USER); final String pass = config.getAndRemoveStr(KEY_ALIAS_PASSWORD); return DataSourceWrapper.wrap(createDataSource(url, driver, user, pass, config), driver); }
[ "private", "DataSourceWrapper", "createDataSource", "(", "String", "group", ")", "{", "if", "(", "group", "==", "null", ")", "{", "group", "=", "StrUtil", ".", "EMPTY", ";", "}", "final", "Setting", "config", "=", "setting", ".", "getSetting", "(", "group"...
创建数据源 @param group 分组 @return {@link DataSourceWrapper} 数据源包装
[ "创建数据源" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-db/src/main/java/cn/hutool/db/ds/AbstractDSFactory.java#L104-L128
train
Create data source wrapper.
[ 30522, 2797, 2951, 6499, 3126, 3401, 13088, 29098, 2121, 2580, 6790, 6499, 3126, 3401, 1006, 5164, 2177, 1007, 1063, 2065, 1006, 2177, 1027, 1027, 19701, 1007, 1063, 2177, 1027, 2358, 22134, 4014, 1012, 4064, 1025, 1065, 2345, 4292, 9530, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/bean/BeanUtil.java
BeanUtil.isEmpty
public static boolean isEmpty(Object bean) { if (null != bean) { for (Field field : ReflectUtil.getFields(bean.getClass())) { if (null != ReflectUtil.getFieldValue(bean, field)) { return false; } } } return true; }
java
public static boolean isEmpty(Object bean) { if (null != bean) { for (Field field : ReflectUtil.getFields(bean.getClass())) { if (null != ReflectUtil.getFieldValue(bean, field)) { return false; } } } return true; }
[ "public", "static", "boolean", "isEmpty", "(", "Object", "bean", ")", "{", "if", "(", "null", "!=", "bean", ")", "{", "for", "(", "Field", "field", ":", "ReflectUtil", ".", "getFields", "(", "bean", ".", "getClass", "(", ")", ")", ")", "{", "if", "...
判断Bean是否为空对象,空对象表示本身为<code>null</code>或者所有属性都为<code>null</code> @param bean Bean对象 @return 是否为空,<code>true</code> - 空 / <code>false</code> - 非空 @since 4.1.10
[ "判断Bean是否为空对象,空对象表示本身为<code", ">", "null<", "/", "code", ">", "或者所有属性都为<code", ">", "null<", "/", "code", ">" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/bean/BeanUtil.java#L682-L691
train
Checks if the given object is empty.
[ 30522, 2270, 10763, 22017, 20898, 2003, 6633, 13876, 2100, 1006, 4874, 14068, 1007, 1063, 2065, 1006, 19701, 999, 1027, 14068, 1007, 1063, 2005, 1006, 2492, 2492, 1024, 8339, 21823, 2140, 1012, 2131, 15155, 1006, 14068, 1012, 2131, 26266, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/spark
sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVectorUtils.java
ColumnVectorUtils.populate
public static void populate(WritableColumnVector col, InternalRow row, int fieldIdx) { int capacity = col.capacity; DataType t = col.dataType(); if (row.isNullAt(fieldIdx)) { col.putNulls(0, capacity); } else { if (t == DataTypes.BooleanType) { col.putBooleans(0, capacity, row.getBoolean(fieldIdx)); } else if (t == DataTypes.ByteType) { col.putBytes(0, capacity, row.getByte(fieldIdx)); } else if (t == DataTypes.ShortType) { col.putShorts(0, capacity, row.getShort(fieldIdx)); } else if (t == DataTypes.IntegerType) { col.putInts(0, capacity, row.getInt(fieldIdx)); } else if (t == DataTypes.LongType) { col.putLongs(0, capacity, row.getLong(fieldIdx)); } else if (t == DataTypes.FloatType) { col.putFloats(0, capacity, row.getFloat(fieldIdx)); } else if (t == DataTypes.DoubleType) { col.putDoubles(0, capacity, row.getDouble(fieldIdx)); } else if (t == DataTypes.StringType) { UTF8String v = row.getUTF8String(fieldIdx); byte[] bytes = v.getBytes(); for (int i = 0; i < capacity; i++) { col.putByteArray(i, bytes); } } else if (t instanceof DecimalType) { DecimalType dt = (DecimalType)t; Decimal d = row.getDecimal(fieldIdx, dt.precision(), dt.scale()); if (dt.precision() <= Decimal.MAX_INT_DIGITS()) { col.putInts(0, capacity, (int)d.toUnscaledLong()); } else if (dt.precision() <= Decimal.MAX_LONG_DIGITS()) { col.putLongs(0, capacity, d.toUnscaledLong()); } else { final BigInteger integer = d.toJavaBigDecimal().unscaledValue(); byte[] bytes = integer.toByteArray(); for (int i = 0; i < capacity; i++) { col.putByteArray(i, bytes, 0, bytes.length); } } } else if (t instanceof CalendarIntervalType) { CalendarInterval c = (CalendarInterval)row.get(fieldIdx, t); col.getChild(0).putInts(0, capacity, c.months); col.getChild(1).putLongs(0, capacity, c.microseconds); } else if (t instanceof DateType) { col.putInts(0, capacity, row.getInt(fieldIdx)); } else if (t instanceof TimestampType) { col.putLongs(0, capacity, row.getLong(fieldIdx)); } } }
java
public static void populate(WritableColumnVector col, InternalRow row, int fieldIdx) { int capacity = col.capacity; DataType t = col.dataType(); if (row.isNullAt(fieldIdx)) { col.putNulls(0, capacity); } else { if (t == DataTypes.BooleanType) { col.putBooleans(0, capacity, row.getBoolean(fieldIdx)); } else if (t == DataTypes.ByteType) { col.putBytes(0, capacity, row.getByte(fieldIdx)); } else if (t == DataTypes.ShortType) { col.putShorts(0, capacity, row.getShort(fieldIdx)); } else if (t == DataTypes.IntegerType) { col.putInts(0, capacity, row.getInt(fieldIdx)); } else if (t == DataTypes.LongType) { col.putLongs(0, capacity, row.getLong(fieldIdx)); } else if (t == DataTypes.FloatType) { col.putFloats(0, capacity, row.getFloat(fieldIdx)); } else if (t == DataTypes.DoubleType) { col.putDoubles(0, capacity, row.getDouble(fieldIdx)); } else if (t == DataTypes.StringType) { UTF8String v = row.getUTF8String(fieldIdx); byte[] bytes = v.getBytes(); for (int i = 0; i < capacity; i++) { col.putByteArray(i, bytes); } } else if (t instanceof DecimalType) { DecimalType dt = (DecimalType)t; Decimal d = row.getDecimal(fieldIdx, dt.precision(), dt.scale()); if (dt.precision() <= Decimal.MAX_INT_DIGITS()) { col.putInts(0, capacity, (int)d.toUnscaledLong()); } else if (dt.precision() <= Decimal.MAX_LONG_DIGITS()) { col.putLongs(0, capacity, d.toUnscaledLong()); } else { final BigInteger integer = d.toJavaBigDecimal().unscaledValue(); byte[] bytes = integer.toByteArray(); for (int i = 0; i < capacity; i++) { col.putByteArray(i, bytes, 0, bytes.length); } } } else if (t instanceof CalendarIntervalType) { CalendarInterval c = (CalendarInterval)row.get(fieldIdx, t); col.getChild(0).putInts(0, capacity, c.months); col.getChild(1).putLongs(0, capacity, c.microseconds); } else if (t instanceof DateType) { col.putInts(0, capacity, row.getInt(fieldIdx)); } else if (t instanceof TimestampType) { col.putLongs(0, capacity, row.getLong(fieldIdx)); } } }
[ "public", "static", "void", "populate", "(", "WritableColumnVector", "col", ",", "InternalRow", "row", ",", "int", "fieldIdx", ")", "{", "int", "capacity", "=", "col", ".", "capacity", ";", "DataType", "t", "=", "col", ".", "dataType", "(", ")", ";", "if...
Populates the entire `col` with `row[fieldIdx]`
[ "Populates", "the", "entire", "col", "with", "row", "[", "fieldIdx", "]" ]
25ee0474f47d9c30d6f553a7892d9549f91071cf
https://github.com/apache/spark/blob/25ee0474f47d9c30d6f553a7892d9549f91071cf/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVectorUtils.java#L48-L99
train
Populates a column vector with data from the given row.
[ 30522, 2270, 10763, 11675, 3769, 9869, 1006, 25697, 3085, 25778, 2819, 2078, 3726, 16761, 8902, 1010, 4722, 10524, 5216, 1010, 20014, 2492, 3593, 2595, 1007, 1063, 20014, 3977, 1027, 8902, 1012, 30524, 3977, 1007, 1025, 1065, 2842, 1063, 20...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-connectors/flink-connector-kafka-base/src/main/java/org/apache/flink/streaming/connectors/kafka/internals/ClosableBlockingQueue.java
ClosableBlockingQueue.close
public boolean close() { lock.lock(); try { if (open) { if (elements.isEmpty()) { open = false; nonEmpty.signalAll(); return true; } else { return false; } } else { // already closed return true; } } finally { lock.unlock(); } }
java
public boolean close() { lock.lock(); try { if (open) { if (elements.isEmpty()) { open = false; nonEmpty.signalAll(); return true; } else { return false; } } else { // already closed return true; } } finally { lock.unlock(); } }
[ "public", "boolean", "close", "(", ")", "{", "lock", ".", "lock", "(", ")", ";", "try", "{", "if", "(", "open", ")", "{", "if", "(", "elements", ".", "isEmpty", "(", ")", ")", "{", "open", "=", "false", ";", "nonEmpty", ".", "signalAll", "(", "...
Tries to close the queue. Closing the queue only succeeds when no elements are in the queue when this method is called. Checking whether the queue is empty, and marking the queue as closed is one atomic operation. @return True, if the queue is closed, false if the queue remains open.
[ "Tries", "to", "close", "the", "queue", ".", "Closing", "the", "queue", "only", "succeeds", "when", "no", "elements", "are", "in", "the", "queue", "when", "this", "method", "is", "called", ".", "Checking", "whether", "the", "queue", "is", "empty", "and", ...
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-connectors/flink-connector-kafka-base/src/main/java/org/apache/flink/streaming/connectors/kafka/internals/ClosableBlockingQueue.java#L140-L159
train
Close the list.
[ 30522, 2270, 22017, 20898, 2485, 1006, 1007, 1063, 5843, 1012, 5843, 1006, 1007, 1025, 3046, 1063, 2065, 1006, 2330, 1007, 1063, 2065, 1006, 3787, 1012, 2003, 6633, 13876, 2100, 1006, 1007, 1007, 1063, 2330, 1027, 6270, 1025, 3904, 27718, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-libraries/flink-streaming-python/src/main/java/org/apache/flink/streaming/python/api/datastream/PythonDataStream.java
PythonDataStream.filter
public PythonSingleOutputStreamOperator filter(FilterFunction<PyObject> filter) throws IOException { return new PythonSingleOutputStreamOperator(stream.filter(new PythonFilterFunction(filter))); }
java
public PythonSingleOutputStreamOperator filter(FilterFunction<PyObject> filter) throws IOException { return new PythonSingleOutputStreamOperator(stream.filter(new PythonFilterFunction(filter))); }
[ "public", "PythonSingleOutputStreamOperator", "filter", "(", "FilterFunction", "<", "PyObject", ">", "filter", ")", "throws", "IOException", "{", "return", "new", "PythonSingleOutputStreamOperator", "(", "stream", ".", "filter", "(", "new", "PythonFilterFunction", "(", ...
A thin wrapper layer over {@link DataStream#filter(FilterFunction)}. @param filter The FilterFunction that is called for each element of the DataStream. @return The filtered {@link PythonDataStream}.
[ "A", "thin", "wrapper", "layer", "over", "{", "@link", "DataStream#filter", "(", "FilterFunction", ")", "}", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-libraries/flink-streaming-python/src/main/java/org/apache/flink/streaming/python/api/datastream/PythonDataStream.java#L101-L103
train
Filter the stream.
[ 30522, 2270, 18750, 7741, 2571, 5833, 18780, 21422, 25918, 8844, 11307, 1006, 11307, 11263, 27989, 1026, 1052, 7677, 2497, 20614, 1028, 11307, 1007, 11618, 22834, 10288, 24422, 1063, 2709, 2047, 18750, 7741, 2571, 5833, 18780, 21422, 25918, 8...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
spring-projects/spring-boot
spring-boot-project/spring-boot-devtools/src/main/java/org/springframework/boot/devtools/filewatch/FileSystemWatcher.java
FileSystemWatcher.start
public void start() { synchronized (this.monitor) { saveInitialSnapshots(); if (this.watchThread == null) { Map<File, FolderSnapshot> localFolders = new HashMap<>(); localFolders.putAll(this.folders); this.watchThread = new Thread(new Watcher(this.remainingScans, new ArrayList<>(this.listeners), this.triggerFilter, this.pollInterval, this.quietPeriod, localFolders)); this.watchThread.setName("File Watcher"); this.watchThread.setDaemon(this.daemon); this.watchThread.start(); } } }
java
public void start() { synchronized (this.monitor) { saveInitialSnapshots(); if (this.watchThread == null) { Map<File, FolderSnapshot> localFolders = new HashMap<>(); localFolders.putAll(this.folders); this.watchThread = new Thread(new Watcher(this.remainingScans, new ArrayList<>(this.listeners), this.triggerFilter, this.pollInterval, this.quietPeriod, localFolders)); this.watchThread.setName("File Watcher"); this.watchThread.setDaemon(this.daemon); this.watchThread.start(); } } }
[ "public", "void", "start", "(", ")", "{", "synchronized", "(", "this", ".", "monitor", ")", "{", "saveInitialSnapshots", "(", ")", ";", "if", "(", "this", ".", "watchThread", "==", "null", ")", "{", "Map", "<", "File", ",", "FolderSnapshot", ">", "loca...
Start monitoring the source folder for changes.
[ "Start", "monitoring", "the", "source", "folder", "for", "changes", "." ]
0b27f7c70e164b2b1a96477f1d9c1acba56790c1
https://github.com/spring-projects/spring-boot/blob/0b27f7c70e164b2b1a96477f1d9c1acba56790c1/spring-boot-project/spring-boot-devtools/src/main/java/org/springframework/boot/devtools/filewatch/FileSystemWatcher.java#L154-L168
train
Starts the watch thread.
[ 30522, 2270, 11675, 2707, 1006, 1007, 1063, 25549, 1006, 2023, 1012, 8080, 1007, 1063, 3828, 5498, 20925, 2015, 2532, 4523, 12326, 2015, 1006, 1007, 1025, 2065, 1006, 2023, 1012, 3422, 2705, 16416, 2094, 1027, 1027, 19701, 1007, 1063, 4949,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/spark
launcher/src/main/java/org/apache/spark/launcher/ChildProcAppHandle.java
ChildProcAppHandle.getError
@Override public Optional<Throwable> getError() { return redirector != null ? Optional.ofNullable(redirector.getError()) : Optional.empty(); }
java
@Override public Optional<Throwable> getError() { return redirector != null ? Optional.ofNullable(redirector.getError()) : Optional.empty(); }
[ "@", "Override", "public", "Optional", "<", "Throwable", ">", "getError", "(", ")", "{", "return", "redirector", "!=", "null", "?", "Optional", ".", "ofNullable", "(", "redirector", ".", "getError", "(", ")", ")", ":", "Optional", ".", "empty", "(", ")",...
Parses the logs of {@code spark-submit} and returns the last exception thrown. <p> Since {@link SparkLauncher} runs {@code spark-submit} in a sub-process, it's difficult to accurately retrieve the full {@link Throwable} from the {@code spark-submit} process. This method parses the logs of the sub-process and provides a best-effort attempt at returning the last exception thrown by the {@code spark-submit} process. Only the exception message is parsed, the associated stacktrace is meaningless. @return an {@link Optional} containing a {@link RuntimeException} with the parsed exception, otherwise returns a {@link Optional#EMPTY}
[ "Parses", "the", "logs", "of", "{", "@code", "spark", "-", "submit", "}", "and", "returns", "the", "last", "exception", "thrown", ".", "<p", ">", "Since", "{", "@link", "SparkLauncher", "}", "runs", "{", "@code", "spark", "-", "submit", "}", "in", "a",...
25ee0474f47d9c30d6f553a7892d9549f91071cf
https://github.com/apache/spark/blob/25ee0474f47d9c30d6f553a7892d9549f91071cf/launcher/src/main/java/org/apache/spark/launcher/ChildProcAppHandle.java#L62-L65
train
Get the error.
[ 30522, 1030, 2058, 15637, 2270, 11887, 1026, 5466, 3085, 1028, 2131, 2121, 29165, 1006, 1007, 1063, 2709, 2417, 7442, 16761, 999, 1027, 19701, 1029, 11887, 1012, 1997, 11231, 4571, 3468, 1006, 2417, 7442, 16761, 1012, 2131, 2121, 29165, 100...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-crypto/src/main/java/cn/hutool/crypto/SecureUtil.java
SecureUtil.readCertificate
public static Certificate readCertificate(String type, InputStream in) { return KeyUtil.readCertificate(type, in); }
java
public static Certificate readCertificate(String type, InputStream in) { return KeyUtil.readCertificate(type, in); }
[ "public", "static", "Certificate", "readCertificate", "(", "String", "type", ",", "InputStream", "in", ")", "{", "return", "KeyUtil", ".", "readCertificate", "(", "type", ",", "in", ")", ";", "}" ]
读取Certification文件<br> Certification为证书文件<br> see: http://snowolf.iteye.com/blog/391931 @param type 类型,例如X.509 @param in {@link InputStream} 如果想从文件读取.cer文件,使用 {@link FileUtil#getInputStream(java.io.File)} 读取 @return {@link Certificate}
[ "读取Certification文件<br", ">", "Certification为证书文件<br", ">", "see", ":", "http", ":", "//", "snowolf", ".", "iteye", ".", "com", "/", "blog", "/", "391931" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-crypto/src/main/java/cn/hutool/crypto/SecureUtil.java#L377-L379
train
Reads a certificate from an input stream.
[ 30522, 2270, 10763, 8196, 3191, 17119, 3775, 8873, 16280, 1006, 5164, 2828, 1010, 20407, 25379, 1999, 1007, 1063, 2709, 3145, 21823, 2140, 1012, 3191, 17119, 3775, 8873, 16280, 1006, 2828, 1010, 1999, 1007, 1025, 1065, 102, 0, 0, 0, 0, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/convert/impl/Jdk8DateConverter.java
Jdk8DateConverter.parseFromCharSequence
private Object parseFromCharSequence(CharSequence value) { Method method; if (null != this.format) { final Object dateTimeFormatter = getDateTimeFormatter(); method = ReflectUtil.getMethod(this.targetType, "parse", CharSequence.class, dateTimeFormatter.getClass()); return ReflectUtil.invokeStatic(method, value, dateTimeFormatter); } else { method = ReflectUtil.getMethod(this.targetType, "parse", CharSequence.class); return ReflectUtil.invokeStatic(method, value); } }
java
private Object parseFromCharSequence(CharSequence value) { Method method; if (null != this.format) { final Object dateTimeFormatter = getDateTimeFormatter(); method = ReflectUtil.getMethod(this.targetType, "parse", CharSequence.class, dateTimeFormatter.getClass()); return ReflectUtil.invokeStatic(method, value, dateTimeFormatter); } else { method = ReflectUtil.getMethod(this.targetType, "parse", CharSequence.class); return ReflectUtil.invokeStatic(method, value); } }
[ "private", "Object", "parseFromCharSequence", "(", "CharSequence", "value", ")", "{", "Method", "method", ";", "if", "(", "null", "!=", "this", ".", "format", ")", "{", "final", "Object", "dateTimeFormatter", "=", "getDateTimeFormatter", "(", ")", ";", "method...
通过反射从字符串转java.time中的对象 @param value 字符串值 @return 日期对象
[ "通过反射从字符串转java", ".", "time中的对象" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/convert/impl/Jdk8DateConverter.java#L98-L108
train
Parse a single value from a CharSequence.
[ 30522, 2797, 4874, 11968, 20106, 21716, 7507, 22573, 4226, 5897, 1006, 25869, 3366, 4226, 5897, 3643, 1007, 1063, 4118, 4118, 1025, 2065, 1006, 19701, 999, 1027, 2023, 1012, 4289, 1007, 1063, 2345, 4874, 3058, 7292, 14192, 20097, 1027, 2131...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/util/NumberUtil.java
NumberUtil.toBytes
public static byte[] toBytes(int value) { final byte[] result = new byte[4]; result[0] = (byte) (value >> 24); result[1] = (byte) (value >> 16); result[2] = (byte) (value >> 8); result[3] = (byte) (value /* >> 0 */); return result; }
java
public static byte[] toBytes(int value) { final byte[] result = new byte[4]; result[0] = (byte) (value >> 24); result[1] = (byte) (value >> 16); result[2] = (byte) (value >> 8); result[3] = (byte) (value /* >> 0 */); return result; }
[ "public", "static", "byte", "[", "]", "toBytes", "(", "int", "value", ")", "{", "final", "byte", "[", "]", "result", "=", "new", "byte", "[", "4", "]", ";", "result", "[", "0", "]", "=", "(", "byte", ")", "(", "value", ">>", "24", ")", ";", "...
int值转byte数组,使用大端字节序(高位字节在前,低位字节在后)<br> 见:http://www.ruanyifeng.com/blog/2016/11/byte-order.html @param value 值 @return byte数组 @since 4.4.5
[ "int值转byte数组,使用大端字节序(高位字节在前,低位字节在后)<br", ">", "见:http", ":", "//", "www", ".", "ruanyifeng", ".", "com", "/", "blog", "/", "2016", "/", "11", "/", "byte", "-", "order", ".", "html" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/util/NumberUtil.java#L2202-L2211
train
Converts an int to a byte array.
[ 30522, 2270, 10763, 24880, 1031, 1033, 11291, 4570, 1006, 20014, 3643, 1007, 1063, 2345, 24880, 1031, 1033, 2765, 1027, 2047, 24880, 1031, 1018, 1033, 1025, 2765, 1031, 1014, 1033, 1027, 1006, 24880, 1007, 1006, 3643, 1028, 1028, 2484, 1007...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
redisson/redisson
redisson/src/main/java/org/redisson/spring/cache/CacheConfig.java
CacheConfig.fromJSON
public static Map<String, ? extends CacheConfig> fromJSON(URL url) throws IOException { return new CacheConfigSupport().fromJSON(url); }
java
public static Map<String, ? extends CacheConfig> fromJSON(URL url) throws IOException { return new CacheConfigSupport().fromJSON(url); }
[ "public", "static", "Map", "<", "String", ",", "?", "extends", "CacheConfig", ">", "fromJSON", "(", "URL", "url", ")", "throws", "IOException", "{", "return", "new", "CacheConfigSupport", "(", ")", ".", "fromJSON", "(", "url", ")", ";", "}" ]
Read config objects stored in JSON format from <code>URL</code> @param url of config @return config @throws IOException error
[ "Read", "config", "objects", "stored", "in", "JSON", "format", "from", "<code", ">", "URL<", "/", "code", ">" ]
d3acc0249b2d5d658d36d99e2c808ce49332ea44
https://github.com/redisson/redisson/blob/d3acc0249b2d5d658d36d99e2c808ce49332ea44/redisson/src/main/java/org/redisson/spring/cache/CacheConfig.java#L146-L148
train
Creates a map of cache configs from a JSON file.
[ 30522, 2270, 10763, 4949, 1026, 5164, 1010, 1029, 8908, 17053, 8663, 8873, 2290, 1028, 2013, 22578, 2239, 1006, 24471, 2140, 24471, 2140, 1007, 11618, 22834, 10288, 24422, 1063, 2709, 2047, 17053, 8663, 8873, 5620, 6279, 6442, 1006, 1007, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
handler/src/main/java/io/netty/handler/ssl/OpenSslSessionStats.java
OpenSslSessionStats.connectGood
public long connectGood() { Lock readerLock = context.ctxLock.readLock(); readerLock.lock(); try { return SSLContext.sessionConnectGood(context.ctx); } finally { readerLock.unlock(); } }
java
public long connectGood() { Lock readerLock = context.ctxLock.readLock(); readerLock.lock(); try { return SSLContext.sessionConnectGood(context.ctx); } finally { readerLock.unlock(); } }
[ "public", "long", "connectGood", "(", ")", "{", "Lock", "readerLock", "=", "context", ".", "ctxLock", ".", "readLock", "(", ")", ";", "readerLock", ".", "lock", "(", ")", ";", "try", "{", "return", "SSLContext", ".", "sessionConnectGood", "(", "context", ...
Returns the number of successfully established SSL/TLS sessions in client mode.
[ "Returns", "the", "number", "of", "successfully", "established", "SSL", "/", "TLS", "sessions", "in", "client", "mode", "." ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/handler/src/main/java/io/netty/handler/ssl/OpenSslSessionStats.java#L69-L77
train
Get the number of good sessions.
[ 30522, 2270, 2146, 7532, 24146, 1006, 1007, 1063, 5843, 8068, 7878, 1027, 6123, 1012, 14931, 2595, 7878, 1012, 3191, 7878, 1006, 1007, 1025, 8068, 7878, 1012, 5843, 1006, 1007, 1025, 3046, 1063, 2709, 7020, 22499, 10111, 18413, 1012, 5219, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/io/FileUtil.java
FileUtil.getOutputStream
public static BufferedOutputStream getOutputStream(File file) throws IORuntimeException { try { return new BufferedOutputStream(new FileOutputStream(touch(file))); } catch (Exception e) { throw new IORuntimeException(e); } }
java
public static BufferedOutputStream getOutputStream(File file) throws IORuntimeException { try { return new BufferedOutputStream(new FileOutputStream(touch(file))); } catch (Exception e) { throw new IORuntimeException(e); } }
[ "public", "static", "BufferedOutputStream", "getOutputStream", "(", "File", "file", ")", "throws", "IORuntimeException", "{", "try", "{", "return", "new", "BufferedOutputStream", "(", "new", "FileOutputStream", "(", "touch", "(", "file", ")", ")", ")", ";", "}",...
获得一个输出流对象 @param file 文件 @return 输出流对象 @throws IORuntimeException IO异常
[ "获得一个输出流对象" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/io/FileUtil.java#L2557-L2563
train
Creates a BufferedOutputStream for writing to the specified file.
[ 30522, 2270, 10763, 17698, 26010, 4904, 18780, 21422, 2131, 5833, 18780, 21422, 1006, 5371, 5371, 1007, 11618, 22834, 15532, 7292, 10288, 24422, 1063, 3046, 1063, 2709, 2047, 17698, 26010, 4904, 18780, 21422, 1006, 2047, 5371, 5833, 18780, 21...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
common/src/main/java/io/netty/util/internal/StringUtil.java
StringUtil.toHexString
public static <T extends Appendable> T toHexString(T dst, byte[] src) { return toHexString(dst, src, 0, src.length); }
java
public static <T extends Appendable> T toHexString(T dst, byte[] src) { return toHexString(dst, src, 0, src.length); }
[ "public", "static", "<", "T", "extends", "Appendable", ">", "T", "toHexString", "(", "T", "dst", ",", "byte", "[", "]", "src", ")", "{", "return", "toHexString", "(", "dst", ",", "src", ",", "0", ",", "src", ".", "length", ")", ";", "}" ]
Converts the specified byte array into a hexadecimal value and appends it to the specified buffer.
[ "Converts", "the", "specified", "byte", "array", "into", "a", "hexadecimal", "value", "and", "appends", "it", "to", "the", "specified", "buffer", "." ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/common/src/main/java/io/netty/util/internal/StringUtil.java#L174-L176
train
Convert a byte array to a hexidecimal string.
[ 30522, 2270, 10763, 1026, 1056, 8908, 10439, 10497, 3085, 1028, 1056, 2000, 5369, 2595, 3367, 4892, 1006, 1056, 16233, 2102, 1010, 24880, 1031, 1033, 5034, 2278, 1007, 30524, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-core/src/main/java/org/apache/flink/types/StringValue.java
StringValue.startsWith
public boolean startsWith(CharSequence prefix, int startIndex) { final char[] thisChars = this.value; final int pLen = this.len; final int sLen = prefix.length(); if ((startIndex < 0) || (startIndex > pLen - sLen)) { return false; } int sPos = 0; while (sPos < sLen) { if (thisChars[startIndex++] != prefix.charAt(sPos++)) { return false; } } return true; }
java
public boolean startsWith(CharSequence prefix, int startIndex) { final char[] thisChars = this.value; final int pLen = this.len; final int sLen = prefix.length(); if ((startIndex < 0) || (startIndex > pLen - sLen)) { return false; } int sPos = 0; while (sPos < sLen) { if (thisChars[startIndex++] != prefix.charAt(sPos++)) { return false; } } return true; }
[ "public", "boolean", "startsWith", "(", "CharSequence", "prefix", ",", "int", "startIndex", ")", "{", "final", "char", "[", "]", "thisChars", "=", "this", ".", "value", ";", "final", "int", "pLen", "=", "this", ".", "len", ";", "final", "int", "sLen", ...
Checks whether the substring, starting at the specified index, starts with the given prefix string. @param prefix The prefix character sequence. @param startIndex The position to start checking for the prefix. @return True, if this StringValue substring, starting at position <code>startIndex</code> has <code>prefix</code> as its prefix.
[ "Checks", "whether", "the", "substring", "starting", "at", "the", "specified", "index", "starts", "with", "the", "given", "prefix", "string", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/types/StringValue.java#L383-L399
train
Checks if this string starts with the given prefix.
[ 30522, 2270, 22017, 20898, 4627, 24415, 1006, 25869, 3366, 4226, 5897, 17576, 1010, 20014, 2707, 22254, 10288, 1007, 1063, 2345, 25869, 1031, 1033, 2023, 7507, 2869, 1027, 2023, 1012, 3643, 1025, 2345, 20014, 20228, 2368, 1027, 2023, 1012, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
codec-http/src/main/java/io/netty/handler/codec/http/HttpUtil.java
HttpUtil.getWebSocketContentLength
private static int getWebSocketContentLength(HttpMessage message) { // WebSocket messages have constant content-lengths. HttpHeaders h = message.headers(); if (message instanceof HttpRequest) { HttpRequest req = (HttpRequest) message; if (HttpMethod.GET.equals(req.method()) && h.contains(HttpHeaderNames.SEC_WEBSOCKET_KEY1) && h.contains(HttpHeaderNames.SEC_WEBSOCKET_KEY2)) { return 8; } } else if (message instanceof HttpResponse) { HttpResponse res = (HttpResponse) message; if (res.status().code() == 101 && h.contains(HttpHeaderNames.SEC_WEBSOCKET_ORIGIN) && h.contains(HttpHeaderNames.SEC_WEBSOCKET_LOCATION)) { return 16; } } // Not a web socket message return -1; }
java
private static int getWebSocketContentLength(HttpMessage message) { // WebSocket messages have constant content-lengths. HttpHeaders h = message.headers(); if (message instanceof HttpRequest) { HttpRequest req = (HttpRequest) message; if (HttpMethod.GET.equals(req.method()) && h.contains(HttpHeaderNames.SEC_WEBSOCKET_KEY1) && h.contains(HttpHeaderNames.SEC_WEBSOCKET_KEY2)) { return 8; } } else if (message instanceof HttpResponse) { HttpResponse res = (HttpResponse) message; if (res.status().code() == 101 && h.contains(HttpHeaderNames.SEC_WEBSOCKET_ORIGIN) && h.contains(HttpHeaderNames.SEC_WEBSOCKET_LOCATION)) { return 16; } } // Not a web socket message return -1; }
[ "private", "static", "int", "getWebSocketContentLength", "(", "HttpMessage", "message", ")", "{", "// WebSocket messages have constant content-lengths.", "HttpHeaders", "h", "=", "message", ".", "headers", "(", ")", ";", "if", "(", "message", "instanceof", "HttpRequest"...
Returns the content length of the specified web socket message. If the specified message is not a web socket message, {@code -1} is returned.
[ "Returns", "the", "content", "length", "of", "the", "specified", "web", "socket", "message", ".", "If", "the", "specified", "message", "is", "not", "a", "web", "socket", "message", "{" ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/codec-http/src/main/java/io/netty/handler/codec/http/HttpUtil.java#L203-L224
train
Gets the content length of a web socket message.
[ 30522, 2797, 10763, 20014, 2131, 8545, 5910, 7432, 3388, 8663, 6528, 9286, 3070, 2705, 1006, 8299, 7834, 3736, 3351, 4471, 1007, 1063, 1013, 1013, 4773, 6499, 19869, 2102, 7696, 2031, 5377, 4180, 1011, 10742, 1012, 8299, 4974, 2545, 1044, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-db/src/main/java/cn/hutool/db/handler/HandleHelper.java
HandleHelper.handleRow
public static <T> T handleRow(int columnCount, ResultSetMetaData meta, ResultSet rs, T bean) throws SQLException { return handleRow(columnCount, meta, rs).toBeanIgnoreCase(bean); }
java
public static <T> T handleRow(int columnCount, ResultSetMetaData meta, ResultSet rs, T bean) throws SQLException { return handleRow(columnCount, meta, rs).toBeanIgnoreCase(bean); }
[ "public", "static", "<", "T", ">", "T", "handleRow", "(", "int", "columnCount", ",", "ResultSetMetaData", "meta", ",", "ResultSet", "rs", ",", "T", "bean", ")", "throws", "SQLException", "{", "return", "handleRow", "(", "columnCount", ",", "meta", ",", "rs...
处理单条数据 @param columnCount 列数 @param meta ResultSetMetaData @param rs 数据集 @param bean 目标Bean @return 每一行的Entity @throws SQLException SQL执行异常 @since 3.3.1
[ "处理单条数据" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-db/src/main/java/cn/hutool/db/handler/HandleHelper.java#L41-L43
train
Handle a single row of a ResultSet.
[ 30522, 2270, 10763, 1026, 1056, 1028, 1056, 28213, 5004, 1006, 20014, 5930, 3597, 16671, 1010, 3463, 3388, 11368, 8447, 2696, 18804, 1010, 3463, 3388, 12667, 1010, 1056, 14068, 1007, 11618, 29296, 10288, 24422, 1063, 2709, 28213, 5004, 1006, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/operators/sort/NormalizedKeySorter.java
NormalizedKeySorter.write
@Override public boolean write(T record) throws IOException { //check whether we need a new memory segment for the sort index if (this.currentSortIndexOffset > this.lastIndexEntryOffset) { if (memoryAvailable()) { this.currentSortIndexSegment = nextMemorySegment(); this.sortIndex.add(this.currentSortIndexSegment); this.currentSortIndexOffset = 0; this.sortIndexBytes += this.segmentSize; } else { return false; } } // serialize the record into the data buffers try { this.serializer.serialize(record, this.recordCollector); } catch (EOFException e) { return false; } final long newOffset = this.recordCollector.getCurrentOffset(); final boolean shortRecord = newOffset - this.currentDataBufferOffset < LARGE_RECORD_THRESHOLD; if (!shortRecord && LOG.isDebugEnabled()) { LOG.debug("Put a large record ( >" + LARGE_RECORD_THRESHOLD + " into the sort buffer"); } // add the pointer and the normalized key this.currentSortIndexSegment.putLong(this.currentSortIndexOffset, shortRecord ? this.currentDataBufferOffset : (this.currentDataBufferOffset | LARGE_RECORD_TAG)); if (this.numKeyBytes != 0) { this.comparator.putNormalizedKey(record, this.currentSortIndexSegment, this.currentSortIndexOffset + OFFSET_LEN, this.numKeyBytes); } this.currentSortIndexOffset += this.indexEntrySize; this.currentDataBufferOffset = newOffset; this.numRecords++; return true; }
java
@Override public boolean write(T record) throws IOException { //check whether we need a new memory segment for the sort index if (this.currentSortIndexOffset > this.lastIndexEntryOffset) { if (memoryAvailable()) { this.currentSortIndexSegment = nextMemorySegment(); this.sortIndex.add(this.currentSortIndexSegment); this.currentSortIndexOffset = 0; this.sortIndexBytes += this.segmentSize; } else { return false; } } // serialize the record into the data buffers try { this.serializer.serialize(record, this.recordCollector); } catch (EOFException e) { return false; } final long newOffset = this.recordCollector.getCurrentOffset(); final boolean shortRecord = newOffset - this.currentDataBufferOffset < LARGE_RECORD_THRESHOLD; if (!shortRecord && LOG.isDebugEnabled()) { LOG.debug("Put a large record ( >" + LARGE_RECORD_THRESHOLD + " into the sort buffer"); } // add the pointer and the normalized key this.currentSortIndexSegment.putLong(this.currentSortIndexOffset, shortRecord ? this.currentDataBufferOffset : (this.currentDataBufferOffset | LARGE_RECORD_TAG)); if (this.numKeyBytes != 0) { this.comparator.putNormalizedKey(record, this.currentSortIndexSegment, this.currentSortIndexOffset + OFFSET_LEN, this.numKeyBytes); } this.currentSortIndexOffset += this.indexEntrySize; this.currentDataBufferOffset = newOffset; this.numRecords++; return true; }
[ "@", "Override", "public", "boolean", "write", "(", "T", "record", ")", "throws", "IOException", "{", "//check whether we need a new memory segment for the sort index", "if", "(", "this", ".", "currentSortIndexOffset", ">", "this", ".", "lastIndexEntryOffset", ")", "{",...
Writes a given record to this sort buffer. The written record will be appended and take the last logical position. @param record The record to be written. @return True, if the record was successfully written, false, if the sort buffer was full. @throws IOException Thrown, if an error occurred while serializing the record into the buffers.
[ "Writes", "a", "given", "record", "to", "this", "sort", "buffer", ".", "The", "written", "record", "will", "be", "appended", "and", "take", "the", "last", "logical", "position", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/operators/sort/NormalizedKeySorter.java#L265-L306
train
Write the record into the data buffers.
[ 30522, 1030, 2058, 15637, 2270, 22017, 20898, 4339, 1006, 1056, 2501, 1007, 11618, 22834, 10288, 24422, 1063, 1013, 1013, 4638, 3251, 2057, 2342, 1037, 2047, 3638, 6903, 2005, 1996, 4066, 5950, 2065, 1006, 2023, 1012, 14731, 11589, 22254, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
redisson/redisson
redisson/src/main/java/org/redisson/spring/cache/CacheConfig.java
CacheConfig.fromYAML
public static Map<String, ? extends CacheConfig> fromYAML(String content) throws IOException { return new CacheConfigSupport().fromYAML(content); }
java
public static Map<String, ? extends CacheConfig> fromYAML(String content) throws IOException { return new CacheConfigSupport().fromYAML(content); }
[ "public", "static", "Map", "<", "String", ",", "?", "extends", "CacheConfig", ">", "fromYAML", "(", "String", "content", ")", "throws", "IOException", "{", "return", "new", "CacheConfigSupport", "(", ")", ".", "fromYAML", "(", "content", ")", ";", "}" ]
Read config objects stored in YAML format from <code>String</code> @param content of config @return config @throws IOException error
[ "Read", "config", "objects", "stored", "in", "YAML", "format", "from", "<code", ">", "String<", "/", "code", ">" ]
d3acc0249b2d5d658d36d99e2c808ce49332ea44
https://github.com/redisson/redisson/blob/d3acc0249b2d5d658d36d99e2c808ce49332ea44/redisson/src/main/java/org/redisson/spring/cache/CacheConfig.java#L179-L181
train
Creates a map from a YAML string.
[ 30522, 2270, 10763, 4949, 1026, 5164, 1010, 1029, 8908, 17053, 8663, 8873, 2290, 1028, 2013, 14852, 2140, 1006, 5164, 4180, 1007, 11618, 22834, 10288, 24422, 1063, 2709, 2047, 17053, 8663, 8873, 5620, 6279, 6442, 1006, 1007, 1012, 2013, 148...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-db/src/main/java/cn/hutool/db/sql/SqlUtil.java
SqlUtil.toSqlDate
public static java.sql.Date toSqlDate(java.util.Date date) { return new java.sql.Date(date.getTime()); }
java
public static java.sql.Date toSqlDate(java.util.Date date) { return new java.sql.Date(date.getTime()); }
[ "public", "static", "java", ".", "sql", ".", "Date", "toSqlDate", "(", "java", ".", "util", ".", "Date", "date", ")", "{", "return", "new", "java", ".", "sql", ".", "Date", "(", "date", ".", "getTime", "(", ")", ")", ";", "}" ]
转换为{@link java.sql.Date} @param date {@link java.util.Date} @return {@link java.sql.Date} @since 3.1.2
[ "转换为", "{", "@link", "java", ".", "sql", ".", "Date", "}" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-db/src/main/java/cn/hutool/db/sql/SqlUtil.java#L192-L194
train
Converts a java. util. Date to a java. sql. Date.
[ 30522, 2270, 10763, 9262, 1012, 29296, 1012, 3058, 2000, 2015, 4160, 15150, 2618, 1006, 9262, 1012, 21183, 4014, 1012, 3058, 3058, 1007, 1063, 2709, 2047, 9262, 1012, 29296, 1012, 3058, 1006, 3058, 1012, 2131, 7292, 1006, 1007, 1007, 1025, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
spring-projects/spring-boot
spring-boot-project/spring-boot/src/main/java/org/springframework/boot/task/TaskSchedulerBuilder.java
TaskSchedulerBuilder.customizers
public TaskSchedulerBuilder customizers( Iterable<TaskSchedulerCustomizer> customizers) { Assert.notNull(customizers, "Customizers must not be null"); return new TaskSchedulerBuilder(this.poolSize, this.awaitTermination, this.awaitTerminationPeriod, this.threadNamePrefix, append(null, customizers)); }
java
public TaskSchedulerBuilder customizers( Iterable<TaskSchedulerCustomizer> customizers) { Assert.notNull(customizers, "Customizers must not be null"); return new TaskSchedulerBuilder(this.poolSize, this.awaitTermination, this.awaitTerminationPeriod, this.threadNamePrefix, append(null, customizers)); }
[ "public", "TaskSchedulerBuilder", "customizers", "(", "Iterable", "<", "TaskSchedulerCustomizer", ">", "customizers", ")", "{", "Assert", ".", "notNull", "(", "customizers", ",", "\"Customizers must not be null\"", ")", ";", "return", "new", "TaskSchedulerBuilder", "(",...
Set the {@link TaskSchedulerCustomizer taskSchedulerCustomizers} that should be applied to the {@link ThreadPoolTaskScheduler}. Customizers are applied in the order that they were added after builder configuration has been applied. Setting this value will replace any previously configured customizers. @param customizers the customizers to set @return a new builder instance @see #additionalCustomizers(TaskSchedulerCustomizer...)
[ "Set", "the", "{" ]
0b27f7c70e164b2b1a96477f1d9c1acba56790c1
https://github.com/spring-projects/spring-boot/blob/0b27f7c70e164b2b1a96477f1d9c1acba56790c1/spring-boot-project/spring-boot/src/main/java/org/springframework/boot/task/TaskSchedulerBuilder.java#L142-L148
train
Create a new TaskSchedulerBuilder with customizers.
[ 30522, 2270, 8518, 7690, 9307, 15185, 19231, 4063, 7661, 17629, 2015, 1006, 2009, 6906, 3468, 1026, 8518, 7690, 9307, 29006, 20389, 17629, 1028, 7661, 17629, 2015, 1007, 1063, 20865, 1012, 2025, 11231, 3363, 1006, 7661, 17629, 2015, 1010, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-cron/src/main/java/cn/hutool/cron/TaskTable.java
TaskTable.executeTaskIfMatch
public void executeTaskIfMatch(long millis) { final Lock readLock = lock.readLock(); try { readLock.lock(); executeTaskIfMatchInternal(millis); } finally { readLock.unlock(); } }
java
public void executeTaskIfMatch(long millis) { final Lock readLock = lock.readLock(); try { readLock.lock(); executeTaskIfMatchInternal(millis); } finally { readLock.unlock(); } }
[ "public", "void", "executeTaskIfMatch", "(", "long", "millis", ")", "{", "final", "Lock", "readLock", "=", "lock", ".", "readLock", "(", ")", ";", "try", "{", "readLock", ".", "lock", "(", ")", ";", "executeTaskIfMatchInternal", "(", "millis", ")", ";", ...
如果时间匹配则执行相应的Task,带读锁 @param millis 时间毫秒
[ "如果时间匹配则执行相应的Task,带读锁" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-cron/src/main/java/cn/hutool/cron/TaskTable.java#L200-L208
train
Executes a task if the specified time has passed.
[ 30522, 2270, 11675, 15389, 10230, 3211, 16715, 4017, 2818, 1006, 2146, 4971, 2483, 1007, 1063, 2345, 5843, 3191, 7878, 1027, 5843, 1012, 3191, 7878, 1006, 1007, 1025, 3046, 1063, 3191, 7878, 1012, 5843, 1006, 1007, 1025, 15389, 10230, 3211,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
codec/src/main/java/io/netty/handler/codec/MessageToByteEncoder.java
MessageToByteEncoder.allocateBuffer
protected ByteBuf allocateBuffer(ChannelHandlerContext ctx, @SuppressWarnings("unused") I msg, boolean preferDirect) throws Exception { if (preferDirect) { return ctx.alloc().ioBuffer(); } else { return ctx.alloc().heapBuffer(); } }
java
protected ByteBuf allocateBuffer(ChannelHandlerContext ctx, @SuppressWarnings("unused") I msg, boolean preferDirect) throws Exception { if (preferDirect) { return ctx.alloc().ioBuffer(); } else { return ctx.alloc().heapBuffer(); } }
[ "protected", "ByteBuf", "allocateBuffer", "(", "ChannelHandlerContext", "ctx", ",", "@", "SuppressWarnings", "(", "\"unused\"", ")", "I", "msg", ",", "boolean", "preferDirect", ")", "throws", "Exception", "{", "if", "(", "preferDirect", ")", "{", "return", "ctx"...
Allocate a {@link ByteBuf} which will be used as argument of {@link #encode(ChannelHandlerContext, I, ByteBuf)}. Sub-classes may override this method to return {@link ByteBuf} with a perfect matching {@code initialCapacity}.
[ "Allocate", "a", "{" ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/codec/src/main/java/io/netty/handler/codec/MessageToByteEncoder.java#L137-L144
train
Allocate a buffer for a single message.
[ 30522, 5123, 24880, 8569, 2546, 2035, 24755, 2618, 8569, 12494, 1006, 3149, 11774, 3917, 8663, 18209, 14931, 2595, 1010, 1030, 16081, 9028, 5582, 2015, 1006, 1000, 15171, 1000, 1007, 1045, 5796, 2290, 1010, 22017, 20898, 9544, 4305, 2890, 6...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/util/NumberUtil.java
NumberUtil.sub
public static double sub(float v1, float v2) { return sub(Float.toString(v1), Float.toString(v2)).doubleValue(); }
java
public static double sub(float v1, float v2) { return sub(Float.toString(v1), Float.toString(v2)).doubleValue(); }
[ "public", "static", "double", "sub", "(", "float", "v1", ",", "float", "v2", ")", "{", "return", "sub", "(", "Float", ".", "toString", "(", "v1", ")", ",", "Float", ".", "toString", "(", "v2", ")", ")", ".", "doubleValue", "(", ")", ";", "}" ]
提供精确的减法运算 @param v1 被减数 @param v2 减数 @return 差
[ "提供精确的减法运算" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/util/NumberUtil.java#L192-L194
train
Subtract two floating point numbers.
[ 30522, 2270, 10763, 3313, 4942, 1006, 14257, 1058, 2487, 1010, 14257, 1058, 2475, 1007, 1063, 2709, 4942, 1006, 14257, 1012, 2000, 3367, 4892, 1006, 1058, 2487, 1007, 1010, 14257, 1012, 2000, 3367, 4892, 1006, 1058, 2475, 1007, 1007, 1012, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-core/src/main/java/org/apache/flink/types/StringValue.java
StringValue.setValue
public void setValue(CharBuffer buffer) { checkNotNull(buffer); final int len = buffer.length(); ensureSize(len); buffer.get(this.value, 0, len); this.len = len; this.hashCode = 0; }
java
public void setValue(CharBuffer buffer) { checkNotNull(buffer); final int len = buffer.length(); ensureSize(len); buffer.get(this.value, 0, len); this.len = len; this.hashCode = 0; }
[ "public", "void", "setValue", "(", "CharBuffer", "buffer", ")", "{", "checkNotNull", "(", "buffer", ")", ";", "final", "int", "len", "=", "buffer", ".", "length", "(", ")", ";", "ensureSize", "(", "len", ")", ";", "buffer", ".", "get", "(", "this", "...
Sets the contents of this string to the contents of the given <tt>CharBuffer</tt>. The characters between the buffer's current position (inclusive) and the buffer's limit (exclusive) will be stored in this string. @param buffer The character buffer to read the characters from.
[ "Sets", "the", "contents", "of", "this", "string", "to", "the", "contents", "of", "the", "given", "<tt", ">", "CharBuffer<", "/", "tt", ">", ".", "The", "characters", "between", "the", "buffer", "s", "current", "position", "(", "inclusive", ")", "and", "...
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/types/StringValue.java#L205-L212
train
Sets the value of this object from the specified char buffer.
[ 30522, 2270, 11675, 2275, 10175, 5657, 1006, 25869, 8569, 12494, 17698, 1007, 1063, 4638, 17048, 11231, 3363, 1006, 17698, 1007, 1025, 2345, 20014, 18798, 1027, 17698, 1012, 3091, 1006, 1007, 1025, 21312, 4697, 1006, 18798, 1007, 1025, 17698,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-db/src/main/java/cn/hutool/db/handler/BeanHandler.java
BeanHandler.create
public static <E> BeanHandler<E> create(Class<E> beanType) { return new BeanHandler<E>(beanType); }
java
public static <E> BeanHandler<E> create(Class<E> beanType) { return new BeanHandler<E>(beanType); }
[ "public", "static", "<", "E", ">", "BeanHandler", "<", "E", ">", "create", "(", "Class", "<", "E", ">", "beanType", ")", "{", "return", "new", "BeanHandler", "<", "E", ">", "(", "beanType", ")", ";", "}" ]
创建一个 BeanHandler对象 @param <E> 处理对象类型 @param beanType Bean类型 @return BeanHandler对象
[ "创建一个", "BeanHandler对象" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-db/src/main/java/cn/hutool/db/handler/BeanHandler.java#L25-L27
train
Creates a new instance of the BeanHandler class.
[ 30522, 2270, 10763, 1026, 1041, 1028, 14068, 11774, 3917, 1026, 1041, 1028, 3443, 1006, 2465, 1026, 1041, 1028, 14068, 13874, 1007, 1063, 2709, 2047, 14068, 11774, 3917, 1026, 1041, 1028, 1006, 14068, 13874, 1007, 1025, 1065, 102, 0, 0, 0...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
codec-http2/src/main/java/io/netty/handler/codec/http2/ReadOnlyHttp2Headers.java
ReadOnlyHttp2Headers.serverHeaders
public static ReadOnlyHttp2Headers serverHeaders(boolean validateHeaders, AsciiString status, AsciiString... otherHeaders) { return new ReadOnlyHttp2Headers(validateHeaders, new AsciiString[] { PseudoHeaderName.STATUS.value(), status }, otherHeaders); }
java
public static ReadOnlyHttp2Headers serverHeaders(boolean validateHeaders, AsciiString status, AsciiString... otherHeaders) { return new ReadOnlyHttp2Headers(validateHeaders, new AsciiString[] { PseudoHeaderName.STATUS.value(), status }, otherHeaders); }
[ "public", "static", "ReadOnlyHttp2Headers", "serverHeaders", "(", "boolean", "validateHeaders", ",", "AsciiString", "status", ",", "AsciiString", "...", "otherHeaders", ")", "{", "return", "new", "ReadOnlyHttp2Headers", "(", "validateHeaders", ",", "new", "AsciiString",...
Create a new read only representation of headers used by servers. @param validateHeaders {@code true} will run validation on each header name/value pair to ensure protocol compliance. @param status The value for {@link PseudoHeaderName#STATUS}. @param otherHeaders A an array of key:value pairs. Must not contain any <a href="https://tools.ietf.org/html/rfc7540#section-8.1.2.1">pseudo headers</a> or {@code null} names/values. A copy will <strong>NOT</strong> be made of this array. If the contents of this array may be modified externally you are responsible for passing in a copy. @return a new read only representation of headers used by servers.
[ "Create", "a", "new", "read", "only", "representation", "of", "headers", "used", "by", "servers", "." ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/codec-http2/src/main/java/io/netty/handler/codec/http2/ReadOnlyHttp2Headers.java#L107-L113
train
Create a ReadOnlyHttp2Headers object with the server headers.
[ 30522, 2270, 10763, 3191, 2239, 2135, 11039, 25856, 2475, 4974, 2545, 8241, 4974, 2545, 1006, 22017, 20898, 9398, 3686, 4974, 2545, 1010, 2004, 6895, 2923, 4892, 3570, 1010, 2004, 6895, 2923, 4892, 1012, 1012, 1012, 2060, 4974, 2545, 1007, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/instance/InstanceManager.java
InstanceManager.unregisterAllTaskManagers
public void unregisterAllTaskManagers() { for(Instance instance: registeredHostsById.values()) { deadHosts.add(instance.getTaskManagerID()); instance.markDead(); totalNumberOfAliveTaskSlots -= instance.getTotalNumberOfSlots(); notifyDeadInstance(instance); } registeredHostsById.clear(); registeredHostsByResource.clear(); }
java
public void unregisterAllTaskManagers() { for(Instance instance: registeredHostsById.values()) { deadHosts.add(instance.getTaskManagerID()); instance.markDead(); totalNumberOfAliveTaskSlots -= instance.getTotalNumberOfSlots(); notifyDeadInstance(instance); } registeredHostsById.clear(); registeredHostsByResource.clear(); }
[ "public", "void", "unregisterAllTaskManagers", "(", ")", "{", "for", "(", "Instance", "instance", ":", "registeredHostsById", ".", "values", "(", ")", ")", "{", "deadHosts", ".", "add", "(", "instance", ".", "getTaskManagerID", "(", ")", ")", ";", "instance"...
Unregisters all currently registered TaskManagers from the InstanceManager.
[ "Unregisters", "all", "currently", "registered", "TaskManagers", "from", "the", "InstanceManager", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/instance/InstanceManager.java#L230-L243
train
Unregister all TaskManagers.
[ 30522, 2270, 11675, 4895, 2890, 24063, 21673, 24458, 6711, 24805, 15776, 1006, 1007, 1063, 2005, 1006, 6013, 6013, 1024, 5068, 15006, 3215, 3762, 3593, 1012, 5300, 1006, 1007, 1007, 1063, 2757, 15006, 3215, 1012, 5587, 1006, 6013, 1012, 213...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-db/src/main/java/cn/hutool/db/Entity.java
Entity.parseBean
@Override public <T> Entity parseBean(T bean) { if (StrUtil.isBlank(this.tableName)) { this.setTableName(StrUtil.lowerFirst(bean.getClass().getSimpleName())); } return (Entity) super.parseBean(bean); }
java
@Override public <T> Entity parseBean(T bean) { if (StrUtil.isBlank(this.tableName)) { this.setTableName(StrUtil.lowerFirst(bean.getClass().getSimpleName())); } return (Entity) super.parseBean(bean); }
[ "@", "Override", "public", "<", "T", ">", "Entity", "parseBean", "(", "T", "bean", ")", "{", "if", "(", "StrUtil", ".", "isBlank", "(", "this", ".", "tableName", ")", ")", "{", "this", ".", "setTableName", "(", "StrUtil", ".", "lowerFirst", "(", "bea...
将值对象转换为Entity<br> 类名会被当作表名,小写第一个字母 @param <T> Bean对象类型 @param bean Bean对象 @return 自己
[ "将值对象转换为Entity<br", ">", "类名会被当作表名,小写第一个字母" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-db/src/main/java/cn/hutool/db/Entity.java#L193-L199
train
Override this method to set the table name and the entity type.
[ 30522, 1030, 2058, 15637, 2270, 1026, 1056, 1028, 9178, 11968, 3366, 4783, 2319, 1006, 1056, 14068, 1007, 1063, 2065, 1006, 2358, 22134, 4014, 1012, 2003, 28522, 8950, 1006, 2023, 1012, 2795, 18442, 1007, 1007, 1063, 2023, 1012, 2275, 10880...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
common/src/main/java/io/netty/util/internal/ResourcesUtil.java
ResourcesUtil.getFile
public static File getFile(Class resourceClass, String fileName) { try { return new File(URLDecoder.decode(resourceClass.getResource(fileName).getFile(), "UTF-8")); } catch (UnsupportedEncodingException e) { return new File(resourceClass.getResource(fileName).getFile()); } }
java
public static File getFile(Class resourceClass, String fileName) { try { return new File(URLDecoder.decode(resourceClass.getResource(fileName).getFile(), "UTF-8")); } catch (UnsupportedEncodingException e) { return new File(resourceClass.getResource(fileName).getFile()); } }
[ "public", "static", "File", "getFile", "(", "Class", "resourceClass", ",", "String", "fileName", ")", "{", "try", "{", "return", "new", "File", "(", "URLDecoder", ".", "decode", "(", "resourceClass", ".", "getResource", "(", "fileName", ")", ".", "getFile", ...
Returns a {@link File} named {@code fileName} associated with {@link Class} {@code resourceClass} . @param resourceClass The associated class @param fileName The file name @return The file named {@code fileName} associated with {@link Class} {@code resourceClass} .
[ "Returns", "a", "{", "@link", "File", "}", "named", "{", "@code", "fileName", "}", "associated", "with", "{", "@link", "Class", "}", "{", "@code", "resourceClass", "}", "." ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/common/src/main/java/io/netty/util/internal/ResourcesUtil.java#L34-L40
train
Gets the File from the resource class and the file name.
[ 30522, 2270, 10763, 5371, 2131, 8873, 2571, 1006, 2465, 7692, 26266, 1010, 5164, 5371, 18442, 1007, 1063, 3046, 1063, 2709, 2047, 5371, 1006, 24471, 17920, 16044, 2099, 1012, 21933, 3207, 1006, 7692, 26266, 1012, 2131, 6072, 8162, 3401, 100...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-core/src/main/java/org/apache/flink/api/common/operators/CollectionExecutor.java
CollectionExecutor.executeDataSink
private <IN> void executeDataSink(GenericDataSinkBase<?> sink, int superStep) throws Exception { Operator<?> inputOp = sink.getInput(); if (inputOp == null) { throw new InvalidProgramException("The data sink " + sink.getName() + " has no input."); } @SuppressWarnings("unchecked") List<IN> input = (List<IN>) execute(inputOp); @SuppressWarnings("unchecked") GenericDataSinkBase<IN> typedSink = (GenericDataSinkBase<IN>) sink; // build the runtime context and compute broadcast variables, if necessary TaskInfo taskInfo = new TaskInfo(typedSink.getName(), 1, 0, 1, 0); RuntimeUDFContext ctx; MetricGroup metrics = new UnregisteredMetricsGroup(); if (RichOutputFormat.class.isAssignableFrom(typedSink.getUserCodeWrapper().getUserCodeClass())) { ctx = superStep == 0 ? new RuntimeUDFContext(taskInfo, userCodeClassLoader, executionConfig, cachedFiles, accumulators, metrics) : new IterationRuntimeUDFContext(taskInfo, userCodeClassLoader, executionConfig, cachedFiles, accumulators, metrics); } else { ctx = null; } typedSink.executeOnCollections(input, ctx, executionConfig); }
java
private <IN> void executeDataSink(GenericDataSinkBase<?> sink, int superStep) throws Exception { Operator<?> inputOp = sink.getInput(); if (inputOp == null) { throw new InvalidProgramException("The data sink " + sink.getName() + " has no input."); } @SuppressWarnings("unchecked") List<IN> input = (List<IN>) execute(inputOp); @SuppressWarnings("unchecked") GenericDataSinkBase<IN> typedSink = (GenericDataSinkBase<IN>) sink; // build the runtime context and compute broadcast variables, if necessary TaskInfo taskInfo = new TaskInfo(typedSink.getName(), 1, 0, 1, 0); RuntimeUDFContext ctx; MetricGroup metrics = new UnregisteredMetricsGroup(); if (RichOutputFormat.class.isAssignableFrom(typedSink.getUserCodeWrapper().getUserCodeClass())) { ctx = superStep == 0 ? new RuntimeUDFContext(taskInfo, userCodeClassLoader, executionConfig, cachedFiles, accumulators, metrics) : new IterationRuntimeUDFContext(taskInfo, userCodeClassLoader, executionConfig, cachedFiles, accumulators, metrics); } else { ctx = null; } typedSink.executeOnCollections(input, ctx, executionConfig); }
[ "private", "<", "IN", ">", "void", "executeDataSink", "(", "GenericDataSinkBase", "<", "?", ">", "sink", ",", "int", "superStep", ")", "throws", "Exception", "{", "Operator", "<", "?", ">", "inputOp", "=", "sink", ".", "getInput", "(", ")", ";", "if", ...
--------------------------------------------------------------------------------------------
[ "--------------------------------------------------------------------------------------------" ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/api/common/operators/CollectionExecutor.java#L175-L201
train
Execute a data sink.
[ 30522, 2797, 1026, 1999, 1028, 11675, 6472, 6790, 11493, 2243, 1006, 12391, 2850, 10230, 19839, 15058, 1026, 1029, 1028, 7752, 1010, 20014, 3565, 13473, 2361, 1007, 11618, 6453, 1063, 6872, 1026, 1029, 1028, 7953, 7361, 1027, 7752, 1012, 21...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
hankcs/HanLP
src/main/java/com/hankcs/hanlp/corpus/document/Document.java
Document.getWordList
public List<IWord> getWordList() { List<IWord> wordList = new LinkedList<IWord>(); for (Sentence sentence : sentenceList) { wordList.addAll(sentence.wordList); } return wordList; }
java
public List<IWord> getWordList() { List<IWord> wordList = new LinkedList<IWord>(); for (Sentence sentence : sentenceList) { wordList.addAll(sentence.wordList); } return wordList; }
[ "public", "List", "<", "IWord", ">", "getWordList", "(", ")", "{", "List", "<", "IWord", ">", "wordList", "=", "new", "LinkedList", "<", "IWord", ">", "(", ")", ";", "for", "(", "Sentence", "sentence", ":", "sentenceList", ")", "{", "wordList", ".", ...
获取单词序列 @return
[ "获取单词序列" ]
a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce
https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/corpus/document/Document.java#L66-L74
train
Get the word list.
[ 30522, 2270, 2862, 1026, 1045, 18351, 1028, 2131, 18351, 9863, 1006, 1007, 1063, 2862, 1026, 1045, 18351, 1028, 2773, 9863, 1027, 2047, 5799, 9863, 1026, 1045, 18351, 1028, 1006, 1007, 1025, 2005, 1006, 6251, 6251, 1024, 6251, 9863, 1007, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/taskexecutor/TaskExecutor.java
TaskExecutor.requestStackTraceSample
@Override public CompletableFuture<StackTraceSampleResponse> requestStackTraceSample( final ExecutionAttemptID executionAttemptId, final int sampleId, final int numSamples, final Time delayBetweenSamples, final int maxStackTraceDepth, final Time timeout) { final Task task = taskSlotTable.getTask(executionAttemptId); if (task == null) { return FutureUtils.completedExceptionally( new IllegalStateException(String.format("Cannot sample task %s. " + "Task is not known to the task manager.", executionAttemptId))); } final CompletableFuture<List<StackTraceElement[]>> stackTracesFuture = stackTraceSampleService.requestStackTraceSample( TaskStackTraceSampleableTaskAdapter.fromTask(task), numSamples, delayBetweenSamples, maxStackTraceDepth); return stackTracesFuture.thenApply(stackTraces -> new StackTraceSampleResponse(sampleId, executionAttemptId, stackTraces)); }
java
@Override public CompletableFuture<StackTraceSampleResponse> requestStackTraceSample( final ExecutionAttemptID executionAttemptId, final int sampleId, final int numSamples, final Time delayBetweenSamples, final int maxStackTraceDepth, final Time timeout) { final Task task = taskSlotTable.getTask(executionAttemptId); if (task == null) { return FutureUtils.completedExceptionally( new IllegalStateException(String.format("Cannot sample task %s. " + "Task is not known to the task manager.", executionAttemptId))); } final CompletableFuture<List<StackTraceElement[]>> stackTracesFuture = stackTraceSampleService.requestStackTraceSample( TaskStackTraceSampleableTaskAdapter.fromTask(task), numSamples, delayBetweenSamples, maxStackTraceDepth); return stackTracesFuture.thenApply(stackTraces -> new StackTraceSampleResponse(sampleId, executionAttemptId, stackTraces)); }
[ "@", "Override", "public", "CompletableFuture", "<", "StackTraceSampleResponse", ">", "requestStackTraceSample", "(", "final", "ExecutionAttemptID", "executionAttemptId", ",", "final", "int", "sampleId", ",", "final", "int", "numSamples", ",", "final", "Time", "delayBet...
======================================================================
[ "======================================================================" ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/taskexecutor/TaskExecutor.java#L403-L427
train
Request a stack trace sample.
[ 30522, 1030, 2058, 15637, 2270, 4012, 10814, 10880, 11263, 11244, 1026, 9991, 6494, 9623, 16613, 3917, 2229, 26029, 3366, 1028, 11186, 2696, 3600, 6494, 9623, 16613, 2571, 1006, 2345, 7781, 19321, 6633, 13876, 3593, 7781, 19321, 6633, 13876, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
spring-projects/spring-boot
spring-boot-project/spring-boot-tools/spring-boot-configuration-processor/src/json-shade/java/org/springframework/boot/configurationprocessor/json/JSONObject.java
JSONObject.optDouble
public double optDouble(String name, double fallback) { Object object = opt(name); Double result = JSON.toDouble(object); return result != null ? result : fallback; }
java
public double optDouble(String name, double fallback) { Object object = opt(name); Double result = JSON.toDouble(object); return result != null ? result : fallback; }
[ "public", "double", "optDouble", "(", "String", "name", ",", "double", "fallback", ")", "{", "Object", "object", "=", "opt", "(", "name", ")", ";", "Double", "result", "=", "JSON", ".", "toDouble", "(", "object", ")", ";", "return", "result", "!=", "nu...
Returns the value mapped by {@code name} if it exists and is a double or can be coerced to a double. Returns {@code fallback} otherwise. @param name the name of the property @param fallback a fallback value @return the value or {@code fallback}
[ "Returns", "the", "value", "mapped", "by", "{" ]
0b27f7c70e164b2b1a96477f1d9c1acba56790c1
https://github.com/spring-projects/spring-boot/blob/0b27f7c70e164b2b1a96477f1d9c1acba56790c1/spring-boot-project/spring-boot-tools/spring-boot-configuration-processor/src/json-shade/java/org/springframework/boot/configurationprocessor/json/JSONObject.java#L465-L469
train
Get the property as a double or fallback if the property is not present or is not a double.
[ 30522, 2270, 3313, 23569, 26797, 3468, 1006, 5164, 2171, 1010, 3313, 2991, 5963, 1007, 1063, 4874, 4874, 1027, 23569, 1006, 2171, 1007, 1025, 3313, 2765, 1027, 1046, 3385, 1012, 28681, 7140, 3468, 1006, 4874, 1007, 1025, 2709, 2765, 999, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
transport/src/main/java/io/netty/channel/AbstractCoalescingBufferQueue.java
AbstractCoalescingBufferQueue.composeIntoComposite
protected final ByteBuf composeIntoComposite(ByteBufAllocator alloc, ByteBuf cumulation, ByteBuf next) { // Create a composite buffer to accumulate this pair and potentially all the buffers // in the queue. Using +2 as we have already dequeued current and next. CompositeByteBuf composite = alloc.compositeBuffer(size() + 2); try { composite.addComponent(true, cumulation); composite.addComponent(true, next); } catch (Throwable cause) { composite.release(); safeRelease(next); throwException(cause); } return composite; }
java
protected final ByteBuf composeIntoComposite(ByteBufAllocator alloc, ByteBuf cumulation, ByteBuf next) { // Create a composite buffer to accumulate this pair and potentially all the buffers // in the queue. Using +2 as we have already dequeued current and next. CompositeByteBuf composite = alloc.compositeBuffer(size() + 2); try { composite.addComponent(true, cumulation); composite.addComponent(true, next); } catch (Throwable cause) { composite.release(); safeRelease(next); throwException(cause); } return composite; }
[ "protected", "final", "ByteBuf", "composeIntoComposite", "(", "ByteBufAllocator", "alloc", ",", "ByteBuf", "cumulation", ",", "ByteBuf", "next", ")", "{", "// Create a composite buffer to accumulate this pair and potentially all the buffers", "// in the queue. Using +2 as we have alr...
Compose {@code cumulation} and {@code next} into a new {@link CompositeByteBuf}.
[ "Compose", "{" ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/transport/src/main/java/io/netty/channel/AbstractCoalescingBufferQueue.java#L270-L283
train
Compose two buffers into a composite buffer.
[ 30522, 5123, 2345, 24880, 8569, 2546, 17202, 18447, 24163, 8737, 20049, 2618, 1006, 24880, 8569, 13976, 24755, 4263, 2035, 10085, 1010, 24880, 8569, 2546, 13988, 9513, 1010, 24880, 8569, 2546, 2279, 1007, 1063, 1013, 1013, 3443, 1037, 12490, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-crypto/src/main/java/cn/hutool/crypto/BCUtil.java
BCUtil.readPublicKey
public static PublicKey readPublicKey(InputStream pemStream) { final Certificate certificate = KeyUtil.readX509Certificate(pemStream); if(null == certificate) { return null; } return certificate.getPublicKey(); }
java
public static PublicKey readPublicKey(InputStream pemStream) { final Certificate certificate = KeyUtil.readX509Certificate(pemStream); if(null == certificate) { return null; } return certificate.getPublicKey(); }
[ "public", "static", "PublicKey", "readPublicKey", "(", "InputStream", "pemStream", ")", "{", "final", "Certificate", "certificate", "=", "KeyUtil", ".", "readX509Certificate", "(", "pemStream", ")", ";", "if", "(", "null", "==", "certificate", ")", "{", "return"...
读取PEM格式的公钥 @param pemStream pem流 @return {@link PublicKey} @since 4.5.2
[ "读取PEM格式的公钥" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-crypto/src/main/java/cn/hutool/crypto/BCUtil.java#L109-L115
train
Reads a public key from the specified input stream.
[ 30522, 2270, 10763, 2270, 14839, 3191, 14289, 16558, 6799, 3240, 1006, 20407, 25379, 21877, 5244, 25379, 1007, 1063, 2345, 8196, 8196, 1027, 3145, 21823, 2140, 1012, 3191, 2595, 12376, 2683, 17119, 3775, 8873, 16280, 1006, 21877, 5244, 25379,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/concurrent/FutureUtils.java
FutureUtils.waitForAll
public static ConjunctFuture<Void> waitForAll(Collection<? extends CompletableFuture<?>> futures) { checkNotNull(futures, "futures"); return new WaitingConjunctFuture(futures); }
java
public static ConjunctFuture<Void> waitForAll(Collection<? extends CompletableFuture<?>> futures) { checkNotNull(futures, "futures"); return new WaitingConjunctFuture(futures); }
[ "public", "static", "ConjunctFuture", "<", "Void", ">", "waitForAll", "(", "Collection", "<", "?", "extends", "CompletableFuture", "<", "?", ">", ">", "futures", ")", "{", "checkNotNull", "(", "futures", ",", "\"futures\"", ")", ";", "return", "new", "Waitin...
Creates a future that is complete once all of the given futures have completed. The future fails (completes exceptionally) once one of the given futures fails. <p>The ConjunctFuture gives access to how many Futures have already completed successfully, via {@link ConjunctFuture#getNumFuturesCompleted()}. @param futures The futures to wait on. No null entries are allowed. @return The WaitingFuture that completes once all given futures are complete (or one fails).
[ "Creates", "a", "future", "that", "is", "complete", "once", "all", "of", "the", "given", "futures", "have", "completed", ".", "The", "future", "fails", "(", "completes", "exceptionally", ")", "once", "one", "of", "the", "given", "futures", "fails", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/concurrent/FutureUtils.java#L528-L532
train
Waits all the given futures to complete.
[ 30522, 2270, 10763, 9530, 19792, 6593, 11263, 11244, 1026, 11675, 1028, 3524, 29278, 8095, 1006, 3074, 1026, 1029, 8908, 4012, 10814, 10880, 11263, 11244, 1026, 1029, 1028, 1028, 17795, 1007, 1063, 4638, 17048, 11231, 3363, 1006, 17795, 1010,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-optimizer/src/main/java/org/apache/flink/optimizer/dag/BulkPartialSolutionNode.java
BulkPartialSolutionNode.setCandidateProperties
public void setCandidateProperties(GlobalProperties gProps, LocalProperties lProps, Channel initialInput) { if (this.cachedPlans != null) { throw new IllegalStateException(); } else { this.cachedPlans = Collections.<PlanNode>singletonList(new BulkPartialSolutionPlanNode(this, "PartialSolution ("+this.getOperator().getName()+")", gProps, lProps, initialInput)); } }
java
public void setCandidateProperties(GlobalProperties gProps, LocalProperties lProps, Channel initialInput) { if (this.cachedPlans != null) { throw new IllegalStateException(); } else { this.cachedPlans = Collections.<PlanNode>singletonList(new BulkPartialSolutionPlanNode(this, "PartialSolution ("+this.getOperator().getName()+")", gProps, lProps, initialInput)); } }
[ "public", "void", "setCandidateProperties", "(", "GlobalProperties", "gProps", ",", "LocalProperties", "lProps", ",", "Channel", "initialInput", ")", "{", "if", "(", "this", ".", "cachedPlans", "!=", "null", ")", "{", "throw", "new", "IllegalStateException", "(", ...
--------------------------------------------------------------------------------------------
[ "--------------------------------------------------------------------------------------------" ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-optimizer/src/main/java/org/apache/flink/optimizer/dag/BulkPartialSolutionNode.java#L47-L54
train
Sets the candidate properties of this partial solution.
[ 30522, 2270, 11675, 2275, 9336, 4305, 13701, 21572, 4842, 7368, 1006, 3795, 21572, 4842, 7368, 14246, 18981, 2015, 1010, 2334, 21572, 4842, 7368, 6948, 18981, 2015, 1010, 3149, 3988, 2378, 18780, 1007, 1063, 2065, 1006, 2023, 1012, 17053, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
common/src/main/java/io/netty/util/CharsetUtil.java
CharsetUtil.encoder
public static CharsetEncoder encoder(Charset charset, CodingErrorAction codingErrorAction) { return encoder(charset, codingErrorAction, codingErrorAction); }
java
public static CharsetEncoder encoder(Charset charset, CodingErrorAction codingErrorAction) { return encoder(charset, codingErrorAction, codingErrorAction); }
[ "public", "static", "CharsetEncoder", "encoder", "(", "Charset", "charset", ",", "CodingErrorAction", "codingErrorAction", ")", "{", "return", "encoder", "(", "charset", ",", "codingErrorAction", ",", "codingErrorAction", ")", ";", "}" ]
Returns a new {@link CharsetEncoder} for the {@link Charset} with the specified error action. @param charset The specified charset @param codingErrorAction The encoder's action for malformed-input and unmappable-character errors @return The encoder for the specified {@code charset}
[ "Returns", "a", "new", "{", "@link", "CharsetEncoder", "}", "for", "the", "{", "@link", "Charset", "}", "with", "the", "specified", "error", "action", "." ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/common/src/main/java/io/netty/util/CharsetUtil.java#L103-L105
train
Create a charset encoder for the given charset.
[ 30522, 2270, 10763, 25869, 13462, 2368, 16044, 2099, 4372, 16044, 2099, 1006, 25869, 13462, 25869, 13462, 1010, 16861, 2121, 29165, 18908, 3258, 16861, 2121, 29165, 18908, 3258, 1007, 1063, 2709, 4372, 16044, 2099, 1006, 25869, 13462, 1010, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...