repo
stringclasses
11 values
path
stringlengths
41
234
func_name
stringlengths
5
78
original_string
stringlengths
71
14.1k
language
stringclasses
1 value
code
stringlengths
71
14.1k
code_tokens
listlengths
22
2.65k
docstring
stringlengths
2
5.35k
docstring_tokens
listlengths
1
369
sha
stringclasses
11 values
url
stringlengths
129
339
partition
stringclasses
1 value
summary
stringlengths
7
175
input_ids
listlengths
502
502
token_type_ids
listlengths
502
502
attention_mask
listlengths
502
502
labels
listlengths
502
502
looly/hutool
hutool-core/src/main/java/cn/hutool/core/date/format/FastDatePrinter.java
FastDatePrinter.parsePattern
protected List<Rule> parsePattern() { final DateFormatSymbols symbols = new DateFormatSymbols(locale); final List<Rule> rules = new ArrayList<>(); final String[] ERAs = symbols.getEras(); final String[] months = symbols.getMonths(); final String[] shortMonths = symbols.getShortMonths(); final String[] weekdays = symbols.getWeekdays(); final String[] shortWeekdays = symbols.getShortWeekdays(); final String[] AmPmStrings = symbols.getAmPmStrings(); final int length = pattern.length(); final int[] indexRef = new int[1]; for (int i = 0; i < length; i++) { indexRef[0] = i; final String token = parseToken(pattern, indexRef); i = indexRef[0]; final int tokenLen = token.length(); if (tokenLen == 0) { break; } Rule rule; final char c = token.charAt(0); switch (c) { case 'G': // era designator (text) rule = new TextField(Calendar.ERA, ERAs); break; case 'y': // year (number) case 'Y': // week year if (tokenLen == 2) { rule = TwoDigitYearField.INSTANCE; } else { rule = selectNumberRule(Calendar.YEAR, tokenLen < 4 ? 4 : tokenLen); } if (c == 'Y') { rule = new WeekYear((NumberRule) rule); } break; case 'M': // month in year (text and number) if (tokenLen >= 4) { rule = new TextField(Calendar.MONTH, months); } else if (tokenLen == 3) { rule = new TextField(Calendar.MONTH, shortMonths); } else if (tokenLen == 2) { rule = TwoDigitMonthField.INSTANCE; } else { rule = UnpaddedMonthField.INSTANCE; } break; case 'd': // day in month (number) rule = selectNumberRule(Calendar.DAY_OF_MONTH, tokenLen); break; case 'h': // hour in am/pm (number, 1..12) rule = new TwelveHourField(selectNumberRule(Calendar.HOUR, tokenLen)); break; case 'H': // hour in day (number, 0..23) rule = selectNumberRule(Calendar.HOUR_OF_DAY, tokenLen); break; case 'm': // minute in hour (number) rule = selectNumberRule(Calendar.MINUTE, tokenLen); break; case 's': // second in minute (number) rule = selectNumberRule(Calendar.SECOND, tokenLen); break; case 'S': // millisecond (number) rule = selectNumberRule(Calendar.MILLISECOND, tokenLen); break; case 'E': // day in week (text) rule = new TextField(Calendar.DAY_OF_WEEK, tokenLen < 4 ? shortWeekdays : weekdays); break; case 'u': // day in week (number) rule = new DayInWeekField(selectNumberRule(Calendar.DAY_OF_WEEK, tokenLen)); break; case 'D': // day in year (number) rule = selectNumberRule(Calendar.DAY_OF_YEAR, tokenLen); break; case 'F': // day of week in month (number) rule = selectNumberRule(Calendar.DAY_OF_WEEK_IN_MONTH, tokenLen); break; case 'w': // week in year (number) rule = selectNumberRule(Calendar.WEEK_OF_YEAR, tokenLen); break; case 'W': // week in month (number) rule = selectNumberRule(Calendar.WEEK_OF_MONTH, tokenLen); break; case 'a': // am/pm marker (text) rule = new TextField(Calendar.AM_PM, AmPmStrings); break; case 'k': // hour in day (1..24) rule = new TwentyFourHourField(selectNumberRule(Calendar.HOUR_OF_DAY, tokenLen)); break; case 'K': // hour in am/pm (0..11) rule = selectNumberRule(Calendar.HOUR, tokenLen); break; case 'X': // ISO 8601 rule = Iso8601_Rule.getRule(tokenLen); break; case 'z': // time zone (text) if (tokenLen >= 4) { rule = new TimeZoneNameRule(timeZone, locale, TimeZone.LONG); } else { rule = new TimeZoneNameRule(timeZone, locale, TimeZone.SHORT); } break; case 'Z': // time zone (value) if (tokenLen == 1) { rule = TimeZoneNumberRule.INSTANCE_NO_COLON; } else if (tokenLen == 2) { rule = Iso8601_Rule.ISO8601_HOURS_COLON_MINUTES; } else { rule = TimeZoneNumberRule.INSTANCE_COLON; } break; case '\'': // literal text final String sub = token.substring(1); if (sub.length() == 1) { rule = new CharacterLiteral(sub.charAt(0)); } else { rule = new StringLiteral(sub); } break; default: throw new IllegalArgumentException("Illegal pattern component: " + token); } rules.add(rule); } return rules; }
java
protected List<Rule> parsePattern() { final DateFormatSymbols symbols = new DateFormatSymbols(locale); final List<Rule> rules = new ArrayList<>(); final String[] ERAs = symbols.getEras(); final String[] months = symbols.getMonths(); final String[] shortMonths = symbols.getShortMonths(); final String[] weekdays = symbols.getWeekdays(); final String[] shortWeekdays = symbols.getShortWeekdays(); final String[] AmPmStrings = symbols.getAmPmStrings(); final int length = pattern.length(); final int[] indexRef = new int[1]; for (int i = 0; i < length; i++) { indexRef[0] = i; final String token = parseToken(pattern, indexRef); i = indexRef[0]; final int tokenLen = token.length(); if (tokenLen == 0) { break; } Rule rule; final char c = token.charAt(0); switch (c) { case 'G': // era designator (text) rule = new TextField(Calendar.ERA, ERAs); break; case 'y': // year (number) case 'Y': // week year if (tokenLen == 2) { rule = TwoDigitYearField.INSTANCE; } else { rule = selectNumberRule(Calendar.YEAR, tokenLen < 4 ? 4 : tokenLen); } if (c == 'Y') { rule = new WeekYear((NumberRule) rule); } break; case 'M': // month in year (text and number) if (tokenLen >= 4) { rule = new TextField(Calendar.MONTH, months); } else if (tokenLen == 3) { rule = new TextField(Calendar.MONTH, shortMonths); } else if (tokenLen == 2) { rule = TwoDigitMonthField.INSTANCE; } else { rule = UnpaddedMonthField.INSTANCE; } break; case 'd': // day in month (number) rule = selectNumberRule(Calendar.DAY_OF_MONTH, tokenLen); break; case 'h': // hour in am/pm (number, 1..12) rule = new TwelveHourField(selectNumberRule(Calendar.HOUR, tokenLen)); break; case 'H': // hour in day (number, 0..23) rule = selectNumberRule(Calendar.HOUR_OF_DAY, tokenLen); break; case 'm': // minute in hour (number) rule = selectNumberRule(Calendar.MINUTE, tokenLen); break; case 's': // second in minute (number) rule = selectNumberRule(Calendar.SECOND, tokenLen); break; case 'S': // millisecond (number) rule = selectNumberRule(Calendar.MILLISECOND, tokenLen); break; case 'E': // day in week (text) rule = new TextField(Calendar.DAY_OF_WEEK, tokenLen < 4 ? shortWeekdays : weekdays); break; case 'u': // day in week (number) rule = new DayInWeekField(selectNumberRule(Calendar.DAY_OF_WEEK, tokenLen)); break; case 'D': // day in year (number) rule = selectNumberRule(Calendar.DAY_OF_YEAR, tokenLen); break; case 'F': // day of week in month (number) rule = selectNumberRule(Calendar.DAY_OF_WEEK_IN_MONTH, tokenLen); break; case 'w': // week in year (number) rule = selectNumberRule(Calendar.WEEK_OF_YEAR, tokenLen); break; case 'W': // week in month (number) rule = selectNumberRule(Calendar.WEEK_OF_MONTH, tokenLen); break; case 'a': // am/pm marker (text) rule = new TextField(Calendar.AM_PM, AmPmStrings); break; case 'k': // hour in day (1..24) rule = new TwentyFourHourField(selectNumberRule(Calendar.HOUR_OF_DAY, tokenLen)); break; case 'K': // hour in am/pm (0..11) rule = selectNumberRule(Calendar.HOUR, tokenLen); break; case 'X': // ISO 8601 rule = Iso8601_Rule.getRule(tokenLen); break; case 'z': // time zone (text) if (tokenLen >= 4) { rule = new TimeZoneNameRule(timeZone, locale, TimeZone.LONG); } else { rule = new TimeZoneNameRule(timeZone, locale, TimeZone.SHORT); } break; case 'Z': // time zone (value) if (tokenLen == 1) { rule = TimeZoneNumberRule.INSTANCE_NO_COLON; } else if (tokenLen == 2) { rule = Iso8601_Rule.ISO8601_HOURS_COLON_MINUTES; } else { rule = TimeZoneNumberRule.INSTANCE_COLON; } break; case '\'': // literal text final String sub = token.substring(1); if (sub.length() == 1) { rule = new CharacterLiteral(sub.charAt(0)); } else { rule = new StringLiteral(sub); } break; default: throw new IllegalArgumentException("Illegal pattern component: " + token); } rules.add(rule); } return rules; }
[ "protected", "List", "<", "Rule", ">", "parsePattern", "(", ")", "{", "final", "DateFormatSymbols", "symbols", "=", "new", "DateFormatSymbols", "(", "locale", ")", ";", "final", "List", "<", "Rule", ">", "rules", "=", "new", "ArrayList", "<>", "(", ")", ...
<p> Returns a list of Rules given a pattern. </p> @return a {@code List} of Rule objects @throws IllegalArgumentException if pattern is invalid
[ "<p", ">", "Returns", "a", "list", "of", "Rules", "given", "a", "pattern", ".", "<", "/", "p", ">" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/date/format/FastDatePrinter.java#L71-L204
train
Parses the pattern.
[ 30522, 5123, 2862, 1026, 3627, 1028, 11968, 3366, 4502, 12079, 2078, 1006, 1007, 1063, 2345, 3058, 14192, 11149, 24335, 14956, 2015, 9255, 1027, 2047, 3058, 14192, 11149, 24335, 14956, 2015, 1006, 2334, 2063, 1007, 1025, 2345, 2862, 1026, 3...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/io/watch/watchers/DelayWatcher.java
DelayWatcher.startHandleModifyThread
private void startHandleModifyThread(final WatchEvent<?> event, final Path currentPath) { ThreadUtil.execute(new Runnable(){ @Override public void run() { ThreadUtil.sleep(delay); eventSet.remove(Paths.get(currentPath.toString(), event.context().toString())); watcher.onModify(event, currentPath); } }); }
java
private void startHandleModifyThread(final WatchEvent<?> event, final Path currentPath) { ThreadUtil.execute(new Runnable(){ @Override public void run() { ThreadUtil.sleep(delay); eventSet.remove(Paths.get(currentPath.toString(), event.context().toString())); watcher.onModify(event, currentPath); } }); }
[ "private", "void", "startHandleModifyThread", "(", "final", "WatchEvent", "<", "?", ">", "event", ",", "final", "Path", "currentPath", ")", "{", "ThreadUtil", ".", "execute", "(", "new", "Runnable", "(", ")", "{", "@", "Override", "public", "void", "run", ...
开启处理线程 @param event 事件 @param currentPath 事件发生的当前Path路径
[ "开启处理线程" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/io/watch/watchers/DelayWatcher.java#L97-L106
train
Start handle modify thread.
[ 30522, 2797, 11675, 2707, 11774, 16930, 7716, 8757, 2705, 16416, 2094, 1006, 2345, 3422, 18697, 3372, 1026, 1029, 1028, 2724, 1010, 2345, 4130, 2783, 15069, 1007, 1063, 11689, 21823, 2140, 1012, 15389, 1006, 2047, 2448, 22966, 1006, 1007, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/runtime/hashtable/BinaryHashTable.java
BinaryHashTable.endBuild
public void endBuild() throws IOException { // finalize the partitions int buildWriteBuffers = 0; for (BinaryHashPartition p : this.partitionsBeingBuilt) { buildWriteBuffers += p.finalizeBuildPhase(this.ioManager, this.currentEnumerator); } buildSpillRetBufferNumbers += buildWriteBuffers; // the first prober is the probe-side input, but the input is null at beginning this.probeIterator = new ProbeIterator(this.binaryProbeSideSerializer.createInstance()); // the bucket iterator can remain constant over the time this.bucketIterator = new LookupBucketIterator(this); }
java
public void endBuild() throws IOException { // finalize the partitions int buildWriteBuffers = 0; for (BinaryHashPartition p : this.partitionsBeingBuilt) { buildWriteBuffers += p.finalizeBuildPhase(this.ioManager, this.currentEnumerator); } buildSpillRetBufferNumbers += buildWriteBuffers; // the first prober is the probe-side input, but the input is null at beginning this.probeIterator = new ProbeIterator(this.binaryProbeSideSerializer.createInstance()); // the bucket iterator can remain constant over the time this.bucketIterator = new LookupBucketIterator(this); }
[ "public", "void", "endBuild", "(", ")", "throws", "IOException", "{", "// finalize the partitions", "int", "buildWriteBuffers", "=", "0", ";", "for", "(", "BinaryHashPartition", "p", ":", "this", ".", "partitionsBeingBuilt", ")", "{", "buildWriteBuffers", "+=", "p...
End build phase.
[ "End", "build", "phase", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/runtime/hashtable/BinaryHashTable.java#L252-L265
train
This method is called when the build phase of the binary hash is finished.
[ 30522, 2270, 11675, 2203, 8569, 4014, 2094, 1006, 1007, 11618, 22834, 10288, 24422, 1063, 1013, 1013, 2345, 4697, 1996, 13571, 2015, 20014, 3857, 26373, 8569, 12494, 2015, 1027, 1014, 1025, 2005, 1006, 12441, 14949, 22269, 8445, 22753, 1052, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-json/src/main/java/cn/hutool/json/JSONObject.java
JSONObject.toJSONArray
public JSONArray toJSONArray(Collection<String> names) throws JSONException { if (CollectionUtil.isEmpty(names)) { return null; } final JSONArray ja = new JSONArray(); Object value; for (String name : names) { value = this.get(name); if (null != value) { ja.put(value); } } return ja; }
java
public JSONArray toJSONArray(Collection<String> names) throws JSONException { if (CollectionUtil.isEmpty(names)) { return null; } final JSONArray ja = new JSONArray(); Object value; for (String name : names) { value = this.get(name); if (null != value) { ja.put(value); } } return ja; }
[ "public", "JSONArray", "toJSONArray", "(", "Collection", "<", "String", ">", "names", ")", "throws", "JSONException", "{", "if", "(", "CollectionUtil", ".", "isEmpty", "(", "names", ")", ")", "{", "return", "null", ";", "}", "final", "JSONArray", "ja", "="...
将指定KEY列表的值组成新的JSONArray @param names KEY列表 @return A JSONArray of values. @throws JSONException If any of the values are non-finite numbers.
[ "将指定KEY列表的值组成新的JSONArray" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-json/src/main/java/cn/hutool/json/JSONObject.java#L262-L275
train
Converts this JSONObject to a JSONArray.
[ 30522, 2270, 1046, 3385, 2906, 9447, 2000, 22578, 7856, 11335, 2100, 1006, 3074, 1026, 5164, 1028, 3415, 1007, 11618, 1046, 3385, 10288, 24422, 1063, 2065, 1006, 3074, 21823, 2140, 1012, 2003, 6633, 13876, 2100, 1006, 3415, 1007, 1007, 1063...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-poi/src/main/java/cn/hutool/poi/excel/WorkbookUtil.java
WorkbookUtil.toSXSSFBook
private static SXSSFWorkbook toSXSSFBook(Workbook book) { if(book instanceof SXSSFWorkbook) { return (SXSSFWorkbook) book; } if(book instanceof XSSFWorkbook) { return new SXSSFWorkbook((XSSFWorkbook) book); } throw new POIException("The input is not a [xlsx] format."); }
java
private static SXSSFWorkbook toSXSSFBook(Workbook book) { if(book instanceof SXSSFWorkbook) { return (SXSSFWorkbook) book; } if(book instanceof XSSFWorkbook) { return new SXSSFWorkbook((XSSFWorkbook) book); } throw new POIException("The input is not a [xlsx] format."); }
[ "private", "static", "SXSSFWorkbook", "toSXSSFBook", "(", "Workbook", "book", ")", "{", "if", "(", "book", "instanceof", "SXSSFWorkbook", ")", "{", "return", "(", "SXSSFWorkbook", ")", "book", ";", "}", "if", "(", "book", "instanceof", "XSSFWorkbook", ")", "...
将普通工作簿转换为SXSSFWorkbook @param book 工作簿 @return SXSSFWorkbook @since 4.1.13
[ "将普通工作簿转换为SXSSFWorkbook" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-poi/src/main/java/cn/hutool/poi/excel/WorkbookUtil.java#L251-L259
train
Converts a Excel workbook to an SXSSFWorkbook.
[ 30522, 2797, 10763, 1055, 2595, 4757, 2546, 6198, 8654, 2000, 2015, 2595, 4757, 26337, 14659, 1006, 2147, 8654, 2338, 1007, 1063, 2065, 1006, 2338, 6013, 11253, 1055, 2595, 4757, 2546, 6198, 8654, 1007, 1063, 2709, 1006, 1055, 2595, 4757, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/net/SSLUtils.java
SSLUtils.createRestClientSSLEngineFactory
public static SSLHandlerFactory createRestClientSSLEngineFactory(final Configuration config) throws Exception { SSLContext sslContext = createRestClientSSLContext(config); if (sslContext == null) { throw new IllegalConfigurationException("SSL is not enabled for REST endpoints."); } return new SSLHandlerFactory( sslContext, getEnabledProtocols(config), getEnabledCipherSuites(config), true, isRestSSLAuthenticationEnabled(config), -1, -1); }
java
public static SSLHandlerFactory createRestClientSSLEngineFactory(final Configuration config) throws Exception { SSLContext sslContext = createRestClientSSLContext(config); if (sslContext == null) { throw new IllegalConfigurationException("SSL is not enabled for REST endpoints."); } return new SSLHandlerFactory( sslContext, getEnabledProtocols(config), getEnabledCipherSuites(config), true, isRestSSLAuthenticationEnabled(config), -1, -1); }
[ "public", "static", "SSLHandlerFactory", "createRestClientSSLEngineFactory", "(", "final", "Configuration", "config", ")", "throws", "Exception", "{", "SSLContext", "sslContext", "=", "createRestClientSSLContext", "(", "config", ")", ";", "if", "(", "sslContext", "==", ...
Creates a {@link SSLHandlerFactory} to be used by the REST Clients. @param config The application configuration.
[ "Creates", "a", "{", "@link", "SSLHandlerFactory", "}", "to", "be", "used", "by", "the", "REST", "Clients", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/net/SSLUtils.java#L174-L188
train
Create a REST SSL engine factory.
[ 30522, 2270, 10763, 7020, 2140, 11774, 3917, 21450, 3443, 28533, 20464, 11638, 4757, 7770, 11528, 12879, 18908, 10253, 1006, 2345, 9563, 9530, 8873, 2290, 1007, 11618, 6453, 1063, 7020, 22499, 10111, 18413, 7020, 22499, 10111, 18413, 1027, 34...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/ExecutionVertex.java
ExecutionVertex.getPreferredLocations
public Collection<CompletableFuture<TaskManagerLocation>> getPreferredLocations() { Collection<CompletableFuture<TaskManagerLocation>> basedOnState = getPreferredLocationsBasedOnState(); return basedOnState != null ? basedOnState : getPreferredLocationsBasedOnInputs(); }
java
public Collection<CompletableFuture<TaskManagerLocation>> getPreferredLocations() { Collection<CompletableFuture<TaskManagerLocation>> basedOnState = getPreferredLocationsBasedOnState(); return basedOnState != null ? basedOnState : getPreferredLocationsBasedOnInputs(); }
[ "public", "Collection", "<", "CompletableFuture", "<", "TaskManagerLocation", ">", ">", "getPreferredLocations", "(", ")", "{", "Collection", "<", "CompletableFuture", "<", "TaskManagerLocation", ">>", "basedOnState", "=", "getPreferredLocationsBasedOnState", "(", ")", ...
Gets the overall preferred execution location for this vertex's current execution. The preference is determined as follows: <ol> <li>If the task execution has state to load (from a checkpoint), then the location preference is the location of the previous execution (if there is a previous execution attempt). <li>If the task execution has no state or no previous location, then the location preference is based on the task's inputs. </ol> <p>These rules should result in the following behavior: <ul> <li>Stateless tasks are always scheduled based on co-location with inputs. <li>Stateful tasks are on their initial attempt executed based on co-location with inputs. <li>Repeated executions of stateful tasks try to co-locate the execution with its state. </ul> @see #getPreferredLocationsBasedOnState() @see #getPreferredLocationsBasedOnInputs() @return The preferred execution locations for the execution attempt.
[ "Gets", "the", "overall", "preferred", "execution", "location", "for", "this", "vertex", "s", "current", "execution", ".", "The", "preference", "is", "determined", "as", "follows", ":" ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/ExecutionVertex.java#L492-L495
train
Get the list of locations that should be used for the task manager.
[ 30522, 2270, 3074, 1026, 4012, 10814, 10880, 11263, 11244, 1026, 4708, 24805, 4590, 4135, 10719, 1028, 1028, 2131, 28139, 7512, 5596, 4135, 10719, 2015, 1006, 1007, 1063, 3074, 1026, 4012, 10814, 10880, 11263, 11244, 1026, 4708, 24805, 4590, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/incubator-shardingsphere
sharding-core/sharding-core-parse/sharding-core-parse-common/src/main/java/org/apache/shardingsphere/core/parse/old/parser/clause/expression/BasicExpressionParser.java
BasicExpressionParser.parseExpression
private SQLExpression parseExpression(final SQLStatement sqlStatement) { String literals = lexerEngine.getCurrentToken().getLiterals(); final int beginPosition = lexerEngine.getCurrentToken().getEndPosition() - literals.length(); final SQLExpression expression = getExpression(literals, sqlStatement); lexerEngine.nextToken(); if (lexerEngine.skipIfEqual(Symbol.DOT)) { String property = lexerEngine.getCurrentToken().getLiterals(); lexerEngine.nextToken(); return skipIfCompositeExpression(sqlStatement) ? new SQLIgnoreExpression(lexerEngine.getInput().substring(beginPosition, lexerEngine.getCurrentToken().getEndPosition())) : new SQLPropertyExpression(new SQLIdentifierExpression(literals), property); } if (lexerEngine.equalAny(Symbol.LEFT_PAREN)) { lexerEngine.skipParentheses(sqlStatement); skipRestCompositeExpression(sqlStatement); return new SQLIgnoreExpression(lexerEngine.getInput().substring(beginPosition, lexerEngine.getCurrentToken().getEndPosition() - lexerEngine.getCurrentToken().getLiterals().length()).trim()); } return skipIfCompositeExpression(sqlStatement) ? new SQLIgnoreExpression(lexerEngine.getInput().substring(beginPosition, lexerEngine.getCurrentToken().getEndPosition())) : expression; }
java
private SQLExpression parseExpression(final SQLStatement sqlStatement) { String literals = lexerEngine.getCurrentToken().getLiterals(); final int beginPosition = lexerEngine.getCurrentToken().getEndPosition() - literals.length(); final SQLExpression expression = getExpression(literals, sqlStatement); lexerEngine.nextToken(); if (lexerEngine.skipIfEqual(Symbol.DOT)) { String property = lexerEngine.getCurrentToken().getLiterals(); lexerEngine.nextToken(); return skipIfCompositeExpression(sqlStatement) ? new SQLIgnoreExpression(lexerEngine.getInput().substring(beginPosition, lexerEngine.getCurrentToken().getEndPosition())) : new SQLPropertyExpression(new SQLIdentifierExpression(literals), property); } if (lexerEngine.equalAny(Symbol.LEFT_PAREN)) { lexerEngine.skipParentheses(sqlStatement); skipRestCompositeExpression(sqlStatement); return new SQLIgnoreExpression(lexerEngine.getInput().substring(beginPosition, lexerEngine.getCurrentToken().getEndPosition() - lexerEngine.getCurrentToken().getLiterals().length()).trim()); } return skipIfCompositeExpression(sqlStatement) ? new SQLIgnoreExpression(lexerEngine.getInput().substring(beginPosition, lexerEngine.getCurrentToken().getEndPosition())) : expression; }
[ "private", "SQLExpression", "parseExpression", "(", "final", "SQLStatement", "sqlStatement", ")", "{", "String", "literals", "=", "lexerEngine", ".", "getCurrentToken", "(", ")", ".", "getLiterals", "(", ")", ";", "final", "int", "beginPosition", "=", "lexerEngine...
TODO complete more expression parse
[ "TODO", "complete", "more", "expression", "parse" ]
f88fd29fc345dfb31fdce12e9e96cbfa0fd2402d
https://github.com/apache/incubator-shardingsphere/blob/f88fd29fc345dfb31fdce12e9e96cbfa0fd2402d/sharding-core/sharding-core-parse/sharding-core-parse-common/src/main/java/org/apache/shardingsphere/core/parse/old/parser/clause/expression/BasicExpressionParser.java#L64-L84
train
Parse an SQL expression.
[ 30522, 2797, 29296, 10288, 20110, 3258, 11968, 19763, 2595, 20110, 3258, 1006, 2345, 29296, 9153, 18532, 4765, 29296, 9153, 18532, 4765, 1007, 1063, 5164, 18204, 2015, 1027, 17244, 7869, 3070, 3170, 1012, 2131, 10841, 14343, 3372, 18715, 2368...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
SeleniumHQ/selenium
java/client/src/org/openqa/selenium/opera/OperaOptions.java
OperaOptions.addExtensions
public OperaOptions addExtensions(List<File> paths) { for (File path : paths) { checkNotNull(path); checkArgument(path.exists(), "%s does not exist", path.getAbsolutePath()); checkArgument(!path.isDirectory(), "%s is a directory", path.getAbsolutePath()); } extensionFiles.addAll(paths); return this; }
java
public OperaOptions addExtensions(List<File> paths) { for (File path : paths) { checkNotNull(path); checkArgument(path.exists(), "%s does not exist", path.getAbsolutePath()); checkArgument(!path.isDirectory(), "%s is a directory", path.getAbsolutePath()); } extensionFiles.addAll(paths); return this; }
[ "public", "OperaOptions", "addExtensions", "(", "List", "<", "File", ">", "paths", ")", "{", "for", "(", "File", "path", ":", "paths", ")", "{", "checkNotNull", "(", "path", ")", ";", "checkArgument", "(", "path", ".", "exists", "(", ")", ",", "\"%s do...
Adds a new Opera extension to install on browser startup. Each path should specify a packed Opera extension (CRX file). @param paths Paths to the extensions to install.
[ "Adds", "a", "new", "Opera", "extension", "to", "install", "on", "browser", "startup", ".", "Each", "path", "should", "specify", "a", "packed", "Opera", "extension", "(", "CRX", "file", ")", "." ]
7af172729f17b20269c8ca4ea6f788db48616535
https://github.com/SeleniumHQ/selenium/blob/7af172729f17b20269c8ca4ea6f788db48616535/java/client/src/org/openqa/selenium/opera/OperaOptions.java#L152-L161
train
Adds the extensions to the Opera options.
[ 30522, 2270, 3850, 7361, 9285, 5587, 10288, 29048, 2015, 1006, 2862, 1026, 5371, 1028, 10425, 1007, 1063, 2005, 1006, 5371, 4130, 1024, 10425, 1007, 1063, 4638, 17048, 11231, 3363, 30524, 2890, 16761, 2100, 1006, 1007, 1010, 1000, 1003, 105...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-extra/src/main/java/cn/hutool/extra/template/engine/velocity/VelocityUtil.java
VelocityUtil.getContent
public static String getContent(VelocityEngine ve, String templateFileName, VelocityContext context) { final StringWriter writer = new StringWriter(); // StringWriter不需要关闭 toWriter(ve, templateFileName, context, writer); return writer.toString(); }
java
public static String getContent(VelocityEngine ve, String templateFileName, VelocityContext context) { final StringWriter writer = new StringWriter(); // StringWriter不需要关闭 toWriter(ve, templateFileName, context, writer); return writer.toString(); }
[ "public", "static", "String", "getContent", "(", "VelocityEngine", "ve", ",", "String", "templateFileName", ",", "VelocityContext", "context", ")", "{", "final", "StringWriter", "writer", "=", "new", "StringWriter", "(", ")", ";", "// StringWriter不需要关闭\r", "toWriter...
获得指定模板填充后的内容 @param ve 模板引擎 @param templateFileName 模板名称 @param context 上下文(变量值的容器) @return 模板和内容匹配后的内容
[ "获得指定模板填充后的内容" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-extra/src/main/java/cn/hutool/extra/template/engine/velocity/VelocityUtil.java#L114-L118
train
Gets the content of a tag.
[ 30522, 2270, 10763, 5164, 2131, 8663, 6528, 2102, 1006, 10146, 13159, 3170, 2310, 1010, 5164, 23561, 8873, 20844, 4168, 1010, 10146, 8663, 18209, 6123, 1007, 1063, 2345, 5164, 15994, 3213, 1027, 2047, 5164, 15994, 1006, 1007, 1025, 1013, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
networknt/light-4j
dump/src/main/java/com/networknt/dump/RootDumper.java
RootDumper.dumpResponse
public void dumpResponse(Map<String, Object> result) { if(!dumpConfig.isResponseEnabled()) { return; } Map<String, Object> responseResult = new LinkedHashMap<>(); for(IResponseDumpable dumper: dumperFactory.createResponseDumpers(dumpConfig, exchange)) { if (dumper.isApplicableForResponse()) { dumper.dumpResponse(responseResult); } } result.put(DumpConstants.RESPONSE, responseResult); }
java
public void dumpResponse(Map<String, Object> result) { if(!dumpConfig.isResponseEnabled()) { return; } Map<String, Object> responseResult = new LinkedHashMap<>(); for(IResponseDumpable dumper: dumperFactory.createResponseDumpers(dumpConfig, exchange)) { if (dumper.isApplicableForResponse()) { dumper.dumpResponse(responseResult); } } result.put(DumpConstants.RESPONSE, responseResult); }
[ "public", "void", "dumpResponse", "(", "Map", "<", "String", ",", "Object", ">", "result", ")", "{", "if", "(", "!", "dumpConfig", ".", "isResponseEnabled", "(", ")", ")", "{", "return", ";", "}", "Map", "<", "String", ",", "Object", ">", "responseResu...
create dumpers that can dump http response info, and put http response info into Map<String, Object> result @param result a Map<String, Object> to put http response info to
[ "create", "dumpers", "that", "can", "dump", "http", "response", "info", "and", "put", "http", "response", "info", "into", "Map<String", "Object", ">", "result" ]
2a60257c60663684c8f6dc8b5ea3cf184e534db6
https://github.com/networknt/light-4j/blob/2a60257c60663684c8f6dc8b5ea3cf184e534db6/dump/src/main/java/com/networknt/dump/RootDumper.java#L59-L69
train
Dump the response.
[ 30522, 2270, 11675, 15653, 6072, 26029, 3366, 1006, 4949, 1026, 5164, 1010, 4874, 1028, 2765, 1007, 1063, 2065, 1006, 999, 15653, 8663, 8873, 2290, 1012, 2003, 6072, 26029, 19763, 22966, 2094, 1006, 1007, 1007, 1063, 2709, 1025, 1065, 4949,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
hankcs/HanLP
src/main/java/com/hankcs/hanlp/tokenizer/lexical/AbstractLexicalAnalyzer.java
AbstractLexicalAnalyzer.segment
public List<String> segment(final String sentence, final String normalized) { final List<String> wordList = new LinkedList<String>(); segment(sentence, normalized, wordList); return wordList; }
java
public List<String> segment(final String sentence, final String normalized) { final List<String> wordList = new LinkedList<String>(); segment(sentence, normalized, wordList); return wordList; }
[ "public", "List", "<", "String", ">", "segment", "(", "final", "String", "sentence", ",", "final", "String", "normalized", ")", "{", "final", "List", "<", "String", ">", "wordList", "=", "new", "LinkedList", "<", "String", ">", "(", ")", ";", "segment", ...
这个方法会查询用户词典 @param sentence @param normalized @return
[ "这个方法会查询用户词典" ]
a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce
https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/tokenizer/lexical/AbstractLexicalAnalyzer.java#L309-L314
train
segment a sentence into a list of words.
[ 30522, 2270, 2862, 1026, 5164, 1028, 6903, 1006, 2345, 5164, 6251, 1010, 2345, 5164, 3671, 3550, 1007, 1063, 2345, 2862, 1026, 5164, 1028, 2773, 9863, 1027, 2047, 5799, 9863, 1026, 5164, 1028, 1006, 1007, 1025, 6903, 1006, 6251, 1010, 367...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-connectors/flink-connector-kafka-base/src/main/java/org/apache/flink/streaming/connectors/kafka/FlinkKafkaProducerBase.java
FlinkKafkaProducerBase.invoke
@Override public void invoke(IN next, Context context) throws Exception { // propagate asynchronous errors checkErroneous(); byte[] serializedKey = schema.serializeKey(next); byte[] serializedValue = schema.serializeValue(next); String targetTopic = schema.getTargetTopic(next); if (targetTopic == null) { targetTopic = defaultTopicId; } int[] partitions = this.topicPartitionsMap.get(targetTopic); if (null == partitions) { partitions = getPartitionsByTopic(targetTopic, producer); this.topicPartitionsMap.put(targetTopic, partitions); } ProducerRecord<byte[], byte[]> record; if (flinkKafkaPartitioner == null) { record = new ProducerRecord<>(targetTopic, serializedKey, serializedValue); } else { record = new ProducerRecord<>( targetTopic, flinkKafkaPartitioner.partition(next, serializedKey, serializedValue, targetTopic, partitions), serializedKey, serializedValue); } if (flushOnCheckpoint) { synchronized (pendingRecordsLock) { pendingRecords++; } } producer.send(record, callback); }
java
@Override public void invoke(IN next, Context context) throws Exception { // propagate asynchronous errors checkErroneous(); byte[] serializedKey = schema.serializeKey(next); byte[] serializedValue = schema.serializeValue(next); String targetTopic = schema.getTargetTopic(next); if (targetTopic == null) { targetTopic = defaultTopicId; } int[] partitions = this.topicPartitionsMap.get(targetTopic); if (null == partitions) { partitions = getPartitionsByTopic(targetTopic, producer); this.topicPartitionsMap.put(targetTopic, partitions); } ProducerRecord<byte[], byte[]> record; if (flinkKafkaPartitioner == null) { record = new ProducerRecord<>(targetTopic, serializedKey, serializedValue); } else { record = new ProducerRecord<>( targetTopic, flinkKafkaPartitioner.partition(next, serializedKey, serializedValue, targetTopic, partitions), serializedKey, serializedValue); } if (flushOnCheckpoint) { synchronized (pendingRecordsLock) { pendingRecords++; } } producer.send(record, callback); }
[ "@", "Override", "public", "void", "invoke", "(", "IN", "next", ",", "Context", "context", ")", "throws", "Exception", "{", "// propagate asynchronous errors", "checkErroneous", "(", ")", ";", "byte", "[", "]", "serializedKey", "=", "schema", ".", "serializeKey"...
Called when new data arrives to the sink, and forwards it to Kafka. @param next The incoming data
[ "Called", "when", "new", "data", "arrives", "to", "the", "sink", "and", "forwards", "it", "to", "Kafka", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-connectors/flink-connector-kafka-base/src/main/java/org/apache/flink/streaming/connectors/kafka/FlinkKafkaProducerBase.java#L280-L314
train
This method is called by the producer when a message is received from the Kafka producer.
[ 30522, 1030, 2058, 15637, 2270, 11675, 1999, 6767, 3489, 1006, 1999, 2279, 1010, 6123, 6123, 1007, 11618, 6453, 1063, 1013, 1013, 17678, 16098, 2618, 2004, 6038, 2818, 4948, 3560, 10697, 4638, 2121, 20793, 3560, 1006, 1007, 1025, 24880, 103...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
Configuration.setValueInternal
<T> void setValueInternal(String key, T value) { if (key == null) { throw new NullPointerException("Key must not be null."); } if (value == null) { throw new NullPointerException("Value must not be null."); } synchronized (this.confData) { this.confData.put(key, value); } }
java
<T> void setValueInternal(String key, T value) { if (key == null) { throw new NullPointerException("Key must not be null."); } if (value == null) { throw new NullPointerException("Value must not be null."); } synchronized (this.confData) { this.confData.put(key, value); } }
[ "<", "T", ">", "void", "setValueInternal", "(", "String", "key", ",", "T", "value", ")", "{", "if", "(", "key", "==", "null", ")", "{", "throw", "new", "NullPointerException", "(", "\"Key must not be null.\"", ")", ";", "}", "if", "(", "value", "==", "...
--------------------------------------------------------------------------------------------
[ "--------------------------------------------------------------------------------------------" ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/configuration/Configuration.java#L789-L800
train
Sets the value for the given key.
[ 30522, 1026, 1056, 1028, 11675, 2275, 10175, 5657, 18447, 11795, 2389, 1006, 5164, 3145, 1010, 1056, 3643, 1007, 1063, 2065, 1006, 3145, 1027, 1027, 19701, 1007, 1063, 5466, 2047, 19701, 8400, 7869, 2595, 24422, 1006, 1000, 3145, 2442, 2025...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
SeleniumHQ/selenium
java/client/src/org/openqa/selenium/interactions/Actions.java
Actions.keyDown
public Actions keyDown(CharSequence key) { if (isBuildingActions()) { action.addAction(new KeyDownAction(jsonKeyboard, jsonMouse, asKeys(key))); } return addKeyAction(key, codePoint -> tick(defaultKeyboard.createKeyDown(codePoint))); }
java
public Actions keyDown(CharSequence key) { if (isBuildingActions()) { action.addAction(new KeyDownAction(jsonKeyboard, jsonMouse, asKeys(key))); } return addKeyAction(key, codePoint -> tick(defaultKeyboard.createKeyDown(codePoint))); }
[ "public", "Actions", "keyDown", "(", "CharSequence", "key", ")", "{", "if", "(", "isBuildingActions", "(", ")", ")", "{", "action", ".", "addAction", "(", "new", "KeyDownAction", "(", "jsonKeyboard", ",", "jsonMouse", ",", "asKeys", "(", "key", ")", ")", ...
Performs a modifier key press. Does not release the modifier key - subsequent interactions may assume it's kept pressed. Note that the modifier key is <b>never</b> released implicitly - either <i>keyUp(theKey)</i> or <i>sendKeys(Keys.NULL)</i> must be called to release the modifier. @param key Either {@link Keys#SHIFT}, {@link Keys#ALT} or {@link Keys#CONTROL}. If the provided key is none of those, {@link IllegalArgumentException} is thrown. @return A self reference.
[ "Performs", "a", "modifier", "key", "press", ".", "Does", "not", "release", "the", "modifier", "key", "-", "subsequent", "interactions", "may", "assume", "it", "s", "kept", "pressed", ".", "Note", "that", "the", "modifier", "key", "is", "<b", ">", "never<"...
7af172729f17b20269c8ca4ea6f788db48616535
https://github.com/SeleniumHQ/selenium/blob/7af172729f17b20269c8ca4ea6f788db48616535/java/client/src/org/openqa/selenium/interactions/Actions.java#L87-L92
train
Add a key down action.
[ 30522, 2270, 4506, 3145, 7698, 1006, 25869, 3366, 4226, 5897, 3145, 1007, 1063, 2065, 1006, 2003, 25820, 18908, 8496, 1006, 1007, 1007, 1063, 2895, 1012, 5587, 18908, 3258, 1006, 2047, 3145, 7698, 18908, 3258, 1006, 1046, 3385, 14839, 6277,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-core/src/main/java/org/apache/flink/core/fs/Path.java
Path.isAbsolute
public boolean isAbsolute() { final int start = hasWindowsDrive(uri.getPath(), true) ? 3 : 0; return uri.getPath().startsWith(SEPARATOR, start); }
java
public boolean isAbsolute() { final int start = hasWindowsDrive(uri.getPath(), true) ? 3 : 0; return uri.getPath().startsWith(SEPARATOR, start); }
[ "public", "boolean", "isAbsolute", "(", ")", "{", "final", "int", "start", "=", "hasWindowsDrive", "(", "uri", ".", "getPath", "(", ")", ",", "true", ")", "?", "3", ":", "0", ";", "return", "uri", ".", "getPath", "(", ")", ".", "startsWith", "(", "...
Checks if the directory of this path is absolute. @return <code>true</code> if the directory of this path is absolute, <code>false</code> otherwise
[ "Checks", "if", "the", "directory", "of", "this", "path", "is", "absolute", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/core/fs/Path.java#L306-L309
train
Checks if the path is absolute.
[ 30522, 2270, 22017, 20898, 18061, 5910, 4747, 10421, 1006, 1007, 1063, 2345, 20014, 2707, 1027, 2038, 11101, 15568, 23663, 1006, 24471, 2072, 1012, 2131, 15069, 1006, 1007, 1010, 2995, 1007, 1029, 1017, 1024, 1014, 1025, 2709, 24471, 2072, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
spring-projects/spring-boot
spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/orm/jpa/DatabaseLookup.java
DatabaseLookup.getDatabase
public static Database getDatabase(DataSource dataSource) { if (dataSource == null) { return Database.DEFAULT; } try { String url = JdbcUtils.extractDatabaseMetaData(dataSource, "getURL"); DatabaseDriver driver = DatabaseDriver.fromJdbcUrl(url); Database database = LOOKUP.get(driver); if (database != null) { return database; } } catch (MetaDataAccessException ex) { logger.warn("Unable to determine jdbc url from datasource", ex); } return Database.DEFAULT; }
java
public static Database getDatabase(DataSource dataSource) { if (dataSource == null) { return Database.DEFAULT; } try { String url = JdbcUtils.extractDatabaseMetaData(dataSource, "getURL"); DatabaseDriver driver = DatabaseDriver.fromJdbcUrl(url); Database database = LOOKUP.get(driver); if (database != null) { return database; } } catch (MetaDataAccessException ex) { logger.warn("Unable to determine jdbc url from datasource", ex); } return Database.DEFAULT; }
[ "public", "static", "Database", "getDatabase", "(", "DataSource", "dataSource", ")", "{", "if", "(", "dataSource", "==", "null", ")", "{", "return", "Database", ".", "DEFAULT", ";", "}", "try", "{", "String", "url", "=", "JdbcUtils", ".", "extractDatabaseMet...
Return the most suitable {@link Database} for the given {@link DataSource}. @param dataSource the source {@link DataSource} @return the most suitable {@link Database}
[ "Return", "the", "most", "suitable", "{" ]
0b27f7c70e164b2b1a96477f1d9c1acba56790c1
https://github.com/spring-projects/spring-boot/blob/0b27f7c70e164b2b1a96477f1d9c1acba56790c1/spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/orm/jpa/DatabaseLookup.java#L68-L84
train
Get the database from the given datasource
[ 30522, 2270, 10763, 7809, 2131, 2850, 2696, 15058, 1006, 2951, 6499, 3126, 3401, 2951, 6499, 3126, 3401, 1007, 1063, 2065, 1006, 2951, 6499, 3126, 3401, 1027, 1027, 19701, 1007, 1063, 2709, 7809, 1012, 12398, 1025, 1065, 3046, 1063, 5164, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-core/src/main/java/org/apache/flink/api/common/typeutils/NestedSerializersSnapshotDelegate.java
NestedSerializersSnapshotDelegate.resolveCompatibilityWithNested
@Deprecated public <T> TypeSerializerSchemaCompatibility<T> resolveCompatibilityWithNested( TypeSerializerSchemaCompatibility<?> outerCompatibility, TypeSerializer<?>... newNestedSerializers) { checkArgument(newNestedSerializers.length == nestedSnapshots.length, "Different number of new serializers and existing serializer configuration snapshots"); // compatibility of the outer serializer's format if (outerCompatibility.isIncompatible()) { return TypeSerializerSchemaCompatibility.incompatible(); } // check nested serializers for compatibility boolean nestedSerializerRequiresMigration = false; for (int i = 0; i < nestedSnapshots.length; i++) { TypeSerializerSchemaCompatibility<?> compatibility = resolveCompatibility(newNestedSerializers[i], nestedSnapshots[i]); if (compatibility.isIncompatible()) { return TypeSerializerSchemaCompatibility.incompatible(); } if (compatibility.isCompatibleAfterMigration()) { nestedSerializerRequiresMigration = true; } } return (nestedSerializerRequiresMigration || !outerCompatibility.isCompatibleAsIs()) ? TypeSerializerSchemaCompatibility.compatibleAfterMigration() : TypeSerializerSchemaCompatibility.compatibleAsIs(); }
java
@Deprecated public <T> TypeSerializerSchemaCompatibility<T> resolveCompatibilityWithNested( TypeSerializerSchemaCompatibility<?> outerCompatibility, TypeSerializer<?>... newNestedSerializers) { checkArgument(newNestedSerializers.length == nestedSnapshots.length, "Different number of new serializers and existing serializer configuration snapshots"); // compatibility of the outer serializer's format if (outerCompatibility.isIncompatible()) { return TypeSerializerSchemaCompatibility.incompatible(); } // check nested serializers for compatibility boolean nestedSerializerRequiresMigration = false; for (int i = 0; i < nestedSnapshots.length; i++) { TypeSerializerSchemaCompatibility<?> compatibility = resolveCompatibility(newNestedSerializers[i], nestedSnapshots[i]); if (compatibility.isIncompatible()) { return TypeSerializerSchemaCompatibility.incompatible(); } if (compatibility.isCompatibleAfterMigration()) { nestedSerializerRequiresMigration = true; } } return (nestedSerializerRequiresMigration || !outerCompatibility.isCompatibleAsIs()) ? TypeSerializerSchemaCompatibility.compatibleAfterMigration() : TypeSerializerSchemaCompatibility.compatibleAsIs(); }
[ "@", "Deprecated", "public", "<", "T", ">", "TypeSerializerSchemaCompatibility", "<", "T", ">", "resolveCompatibilityWithNested", "(", "TypeSerializerSchemaCompatibility", "<", "?", ">", "outerCompatibility", ",", "TypeSerializer", "<", "?", ">", "...", "newNestedSerial...
Resolves the compatibility of the nested serializer snapshots with the nested serializers of the new outer serializer. @deprecated this no method will be removed in the future. Resolving compatibility for nested serializers is now handled by {@link CompositeTypeSerializerSnapshot}.
[ "Resolves", "the", "compatibility", "of", "the", "nested", "serializer", "snapshots", "with", "the", "nested", "serializers", "of", "the", "new", "outer", "serializer", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/api/common/typeutils/NestedSerializersSnapshotDelegate.java#L114-L144
train
Resolves compatibility with nested serializers.
[ 30522, 1030, 2139, 28139, 12921, 2270, 1026, 1056, 1028, 4127, 11610, 28863, 22842, 22911, 25377, 10450, 8553, 1026, 1056, 1028, 10663, 9006, 24952, 8553, 24415, 5267, 3064, 1006, 4127, 11610, 28863, 22842, 22911, 25377, 10450, 8553, 1026, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-libraries/flink-python/src/main/java/org/apache/flink/python/api/PythonPlanBinder.java
PythonPlanBinder.main
public static void main(String[] args) throws Exception { Configuration globalConfig = GlobalConfiguration.loadConfiguration(); PythonPlanBinder binder = new PythonPlanBinder(globalConfig); try { binder.runPlan(args); } catch (Exception e) { System.out.println("Failed to run plan: " + e.getMessage()); LOG.error("Failed to run plan.", e); } }
java
public static void main(String[] args) throws Exception { Configuration globalConfig = GlobalConfiguration.loadConfiguration(); PythonPlanBinder binder = new PythonPlanBinder(globalConfig); try { binder.runPlan(args); } catch (Exception e) { System.out.println("Failed to run plan: " + e.getMessage()); LOG.error("Failed to run plan.", e); } }
[ "public", "static", "void", "main", "(", "String", "[", "]", "args", ")", "throws", "Exception", "{", "Configuration", "globalConfig", "=", "GlobalConfiguration", ".", "loadConfiguration", "(", ")", ";", "PythonPlanBinder", "binder", "=", "new", "PythonPlanBinder"...
Entry point for the execution of a python plan. @param args planPath[ package1[ packageX[ - parameter1[ parameterX]]]] @throws Exception
[ "Entry", "point", "for", "the", "execution", "of", "a", "python", "plan", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-libraries/flink-python/src/main/java/org/apache/flink/python/api/PythonPlanBinder.java#L94-L103
train
Main method for running a python plan.
[ 30522, 2270, 10763, 11675, 2364, 1006, 5164, 1031, 1033, 12098, 5620, 1007, 11618, 6453, 1063, 9563, 3795, 8663, 8873, 2290, 1027, 3795, 8663, 8873, 27390, 3370, 1012, 7170, 8663, 8873, 27390, 3370, 1006, 1007, 1025, 18750, 24759, 2319, 842...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/date/format/FastDateFormat.java
FastDateFormat.getDateInstance
public static FastDateFormat getDateInstance(final int style, final TimeZone timeZone, final Locale locale) { return cache.getDateInstance(style, timeZone, locale); }
java
public static FastDateFormat getDateInstance(final int style, final TimeZone timeZone, final Locale locale) { return cache.getDateInstance(style, timeZone, locale); }
[ "public", "static", "FastDateFormat", "getDateInstance", "(", "final", "int", "style", ",", "final", "TimeZone", "timeZone", ",", "final", "Locale", "locale", ")", "{", "return", "cache", ".", "getDateInstance", "(", "style", ",", "timeZone", ",", "locale", ")...
获得 {@link FastDateFormat} 实例<br> 支持缓存 @param style date style: FULL, LONG, MEDIUM, or SHORT @param timeZone 时区{@link TimeZone} @param locale {@link Locale} 日期地理位置 @return 本地化 {@link FastDateFormat}
[ "获得", "{", "@link", "FastDateFormat", "}", "实例<br", ">", "支持缓存" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/date/format/FastDateFormat.java#L158-L160
train
Gets the date instance.
[ 30522, 2270, 10763, 3435, 13701, 14192, 4017, 2131, 13701, 7076, 26897, 1006, 2345, 20014, 2806, 1010, 2345, 2051, 15975, 2051, 15975, 1010, 2345, 2334, 2063, 2334, 2063, 1007, 1063, 2709, 17053, 1012, 2131, 13701, 7076, 26897, 1006, 2806, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-setting/src/main/java/cn/hutool/setting/GroupedMap.java
GroupedMap.isEmpty
public boolean isEmpty(String group) { group = StrUtil.nullToEmpty(group).trim(); readLock.lock(); try { final LinkedHashMap<String, String> valueMap = this.get(group); if (MapUtil.isNotEmpty(valueMap)) { return valueMap.isEmpty(); } } finally { readLock.unlock(); } return true; }
java
public boolean isEmpty(String group) { group = StrUtil.nullToEmpty(group).trim(); readLock.lock(); try { final LinkedHashMap<String, String> valueMap = this.get(group); if (MapUtil.isNotEmpty(valueMap)) { return valueMap.isEmpty(); } } finally { readLock.unlock(); } return true; }
[ "public", "boolean", "isEmpty", "(", "String", "group", ")", "{", "group", "=", "StrUtil", ".", "nullToEmpty", "(", "group", ")", ".", "trim", "(", ")", ";", "readLock", ".", "lock", "(", ")", ";", "try", "{", "final", "LinkedHashMap", "<", "String", ...
某个分组对应的键值对是否为空 @param group 分组 @return 是否为空
[ "某个分组对应的键值对是否为空" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-setting/src/main/java/cn/hutool/setting/GroupedMap.java#L146-L158
train
Returns true if the specified group is empty.
[ 30522, 2270, 22017, 20898, 2003, 6633, 13876, 2100, 1006, 5164, 2177, 1007, 1063, 2177, 1027, 2358, 22134, 4014, 1012, 19701, 3406, 6633, 13876, 2100, 1006, 2177, 1007, 1012, 12241, 1006, 1007, 1025, 3191, 7878, 1012, 5843, 1006, 1007, 1025...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-core/src/main/java/org/apache/flink/api/java/typeutils/TypeExtractor.java
TypeExtractor.isHadoopWritable
static boolean isHadoopWritable(Class<?> typeClass) { // check if this is directly the writable interface if (typeClass.getName().equals(HADOOP_WRITABLE_CLASS)) { return false; } final HashSet<Class<?>> alreadySeen = new HashSet<>(); alreadySeen.add(typeClass); return hasHadoopWritableInterface(typeClass, alreadySeen); }
java
static boolean isHadoopWritable(Class<?> typeClass) { // check if this is directly the writable interface if (typeClass.getName().equals(HADOOP_WRITABLE_CLASS)) { return false; } final HashSet<Class<?>> alreadySeen = new HashSet<>(); alreadySeen.add(typeClass); return hasHadoopWritableInterface(typeClass, alreadySeen); }
[ "static", "boolean", "isHadoopWritable", "(", "Class", "<", "?", ">", "typeClass", ")", "{", "// check if this is directly the writable interface", "if", "(", "typeClass", ".", "getName", "(", ")", ".", "equals", "(", "HADOOP_WRITABLE_CLASS", ")", ")", "{", "retur...
visible for testing
[ "visible", "for", "testing" ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/api/java/typeutils/TypeExtractor.java#L2047-L2056
train
Checks if the given class is a hadoop writable interface.
[ 30522, 10763, 22017, 20898, 2003, 16102, 18589, 13088, 6590, 3468, 1006, 2465, 1026, 1029, 1028, 2828, 26266, 1007, 1063, 1013, 1013, 4638, 2065, 2023, 2003, 3495, 1996, 25697, 3085, 8278, 2065, 1006, 2828, 26266, 1012, 2131, 18442, 1006, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/spark
sql/core/src/main/java/org/apache/spark/sql/vectorized/ColumnVector.java
ColumnVector.getFloats
public float[] getFloats(int rowId, int count) { float[] res = new float[count]; for (int i = 0; i < count; i++) { res[i] = getFloat(rowId + i); } return res; }
java
public float[] getFloats(int rowId, int count) { float[] res = new float[count]; for (int i = 0; i < count; i++) { res[i] = getFloat(rowId + i); } return res; }
[ "public", "float", "[", "]", "getFloats", "(", "int", "rowId", ",", "int", "count", ")", "{", "float", "[", "]", "res", "=", "new", "float", "[", "count", "]", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "count", ";", "i", "++", ")"...
Gets float type values from [rowId, rowId + count). The return values for the null slots are undefined and can be anything.
[ "Gets", "float", "type", "values", "from", "[", "rowId", "rowId", "+", "count", ")", ".", "The", "return", "values", "for", "the", "null", "slots", "are", "undefined", "and", "can", "be", "anything", "." ]
25ee0474f47d9c30d6f553a7892d9549f91071cf
https://github.com/apache/spark/blob/25ee0474f47d9c30d6f553a7892d9549f91071cf/sql/core/src/main/java/org/apache/spark/sql/vectorized/ColumnVector.java#L182-L188
train
Gets the float values from the specified row.
[ 30522, 2270, 14257, 30524, 1025, 2005, 1006, 20014, 1045, 1027, 1014, 1025, 1045, 1026, 4175, 1025, 1045, 1009, 1009, 1007, 1063, 24501, 1031, 1045, 1033, 1027, 2131, 10258, 16503, 1006, 5216, 3593, 1009, 1045, 1007, 1025, 1065, 2709, 24501...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/io/FileUtil.java
FileUtil.appendString
public static File appendString(String content, String path, String charset) throws IORuntimeException { return appendString(content, touch(path), charset); }
java
public static File appendString(String content, String path, String charset) throws IORuntimeException { return appendString(content, touch(path), charset); }
[ "public", "static", "File", "appendString", "(", "String", "content", ",", "String", "path", ",", "String", "charset", ")", "throws", "IORuntimeException", "{", "return", "appendString", "(", "content", ",", "touch", "(", "path", ")", ",", "charset", ")", ";...
将String写入文件,追加模式 @param content 写入的内容 @param path 文件路径 @param charset 字符集 @return 写入的文件 @throws IORuntimeException IO异常
[ "将String写入文件,追加模式" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/io/FileUtil.java#L2787-L2789
train
Append a string to a file.
[ 30522, 2270, 10763, 5371, 10439, 10497, 3367, 4892, 1006, 5164, 4180, 1010, 5164, 4130, 1010, 5164, 25869, 13462, 1007, 11618, 22834, 15532, 7292, 10288, 24422, 1063, 2709, 10439, 10497, 3367, 4892, 1006, 4180, 1010, 3543, 1006, 4130, 1007, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-core/src/main/java/org/apache/flink/util/WrappingProxyUtil.java
WrappingProxyUtil.stripProxy
@SuppressWarnings("unchecked") public static <T> T stripProxy(@Nullable final WrappingProxy<T> wrappingProxy) { if (wrappingProxy == null) { return null; } T delegate = wrappingProxy.getWrappedDelegate(); int numProxiesStripped = 0; while (delegate instanceof WrappingProxy) { throwIfSafetyNetExceeded(++numProxiesStripped); delegate = ((WrappingProxy<T>) delegate).getWrappedDelegate(); } return delegate; }
java
@SuppressWarnings("unchecked") public static <T> T stripProxy(@Nullable final WrappingProxy<T> wrappingProxy) { if (wrappingProxy == null) { return null; } T delegate = wrappingProxy.getWrappedDelegate(); int numProxiesStripped = 0; while (delegate instanceof WrappingProxy) { throwIfSafetyNetExceeded(++numProxiesStripped); delegate = ((WrappingProxy<T>) delegate).getWrappedDelegate(); } return delegate; }
[ "@", "SuppressWarnings", "(", "\"unchecked\"", ")", "public", "static", "<", "T", ">", "T", "stripProxy", "(", "@", "Nullable", "final", "WrappingProxy", "<", "T", ">", "wrappingProxy", ")", "{", "if", "(", "wrappingProxy", "==", "null", ")", "{", "return"...
Expects a proxy, and returns the unproxied delegate. @param wrappingProxy The initial proxy. @param <T> The type of the delegate. Note that all proxies in the chain must be assignable to T. @return The unproxied delegate.
[ "Expects", "a", "proxy", "and", "returns", "the", "unproxied", "delegate", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/util/WrappingProxyUtil.java#L48-L63
train
Strip a wrapping proxy.
[ 30522, 1030, 16081, 9028, 5582, 2015, 1006, 1000, 4895, 5403, 18141, 1000, 1007, 2270, 10763, 1026, 1056, 1028, 1056, 6167, 21572, 18037, 1006, 1030, 19701, 3085, 2345, 12252, 21572, 18037, 1026, 1056, 1028, 12252, 21572, 18037, 1007, 1063, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
hankcs/HanLP
src/main/java/com/hankcs/hanlp/dependency/nnparser/NeuralNetworkParser.java
NeuralNetworkParser.loadTxt
public boolean loadTxt(String path) { IOUtil.LineIterator lineIterator = new IOUtil.LineIterator(path); model_header = lineIterator.next(); if (model_header == null) return false; root = lineIterator.next(); use_distance = "1".equals(lineIterator.next()); use_valency = "1".equals(lineIterator.next()); use_cluster = "1".equals(lineIterator.next()); W1 = read_matrix(lineIterator); W2 = read_matrix(lineIterator); E = read_matrix(lineIterator); b1 = read_vector(lineIterator); saved = read_matrix(lineIterator); forms_alphabet = read_alphabet(lineIterator); postags_alphabet = read_alphabet(lineIterator); deprels_alphabet = read_alphabet(lineIterator); precomputation_id_encoder = read_map(lineIterator); if (use_cluster) { cluster4_types_alphabet = read_alphabet(lineIterator); cluster6_types_alphabet = read_alphabet(lineIterator); cluster_types_alphabet = read_alphabet(lineIterator); form_to_cluster4 = read_map(lineIterator); form_to_cluster6 = read_map(lineIterator); form_to_cluster = read_map(lineIterator); } assert !lineIterator.hasNext() : "文件有残留,可能是读取逻辑不对"; classifier = new NeuralNetworkClassifier(W1, W2, E, b1, saved, precomputation_id_encoder); classifier.canonical(); return true; }
java
public boolean loadTxt(String path) { IOUtil.LineIterator lineIterator = new IOUtil.LineIterator(path); model_header = lineIterator.next(); if (model_header == null) return false; root = lineIterator.next(); use_distance = "1".equals(lineIterator.next()); use_valency = "1".equals(lineIterator.next()); use_cluster = "1".equals(lineIterator.next()); W1 = read_matrix(lineIterator); W2 = read_matrix(lineIterator); E = read_matrix(lineIterator); b1 = read_vector(lineIterator); saved = read_matrix(lineIterator); forms_alphabet = read_alphabet(lineIterator); postags_alphabet = read_alphabet(lineIterator); deprels_alphabet = read_alphabet(lineIterator); precomputation_id_encoder = read_map(lineIterator); if (use_cluster) { cluster4_types_alphabet = read_alphabet(lineIterator); cluster6_types_alphabet = read_alphabet(lineIterator); cluster_types_alphabet = read_alphabet(lineIterator); form_to_cluster4 = read_map(lineIterator); form_to_cluster6 = read_map(lineIterator); form_to_cluster = read_map(lineIterator); } assert !lineIterator.hasNext() : "文件有残留,可能是读取逻辑不对"; classifier = new NeuralNetworkClassifier(W1, W2, E, b1, saved, precomputation_id_encoder); classifier.canonical(); return true; }
[ "public", "boolean", "loadTxt", "(", "String", "path", ")", "{", "IOUtil", ".", "LineIterator", "lineIterator", "=", "new", "IOUtil", ".", "LineIterator", "(", "path", ")", ";", "model_header", "=", "lineIterator", ".", "next", "(", ")", ";", "if", "(", ...
从txt加载 @param path @return
[ "从txt加载" ]
a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce
https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/dependency/nnparser/NeuralNetworkParser.java#L154-L193
train
Load a TXT file containing the NCSM language.
[ 30522, 2270, 22017, 20898, 7170, 2102, 18413, 1006, 5164, 4130, 1007, 1063, 22834, 21823, 2140, 1012, 2240, 21646, 8844, 2240, 21646, 8844, 1027, 2047, 22834, 21823, 2140, 1012, 2240, 21646, 8844, 1006, 4130, 1007, 1025, 2944, 1035, 20346, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-db/src/main/java/cn/hutool/db/DaoTemplate.java
DaoTemplate.get
public Entity get(Entity where) throws SQLException { return db.find(null, fixEntity(where), new EntityHandler()); }
java
public Entity get(Entity where) throws SQLException { return db.find(null, fixEntity(where), new EntityHandler()); }
[ "public", "Entity", "get", "(", "Entity", "where", ")", "throws", "SQLException", "{", "return", "db", ".", "find", "(", "null", ",", "fixEntity", "(", "where", ")", ",", "new", "EntityHandler", "(", ")", ")", ";", "}" ]
根据条件实体查询单个记录,当有多条返回时,只显示查询到的第一条 @param where 条件 @return 记录 @throws SQLException SQL执行异常
[ "根据条件实体查询单个记录,当有多条返回时,只显示查询到的第一条" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-db/src/main/java/cn/hutool/db/DaoTemplate.java#L243-L245
train
Get an entity from the database.
[ 30522, 2270, 9178, 2131, 1006, 9178, 2073, 1007, 11618, 29296, 10288, 24422, 1063, 2709, 16962, 1012, 2424, 1006, 19701, 1010, 8081, 4765, 3012, 1006, 2073, 1007, 1010, 2047, 9178, 11774, 3917, 1006, 1007, 1007, 1025, 1065, 102, 0, 0, 0, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-java/src/main/java/org/apache/flink/api/java/io/CsvReader.java
CsvReader.includeFields
public CsvReader includeFields(long mask) { if (mask == 0) { throw new IllegalArgumentException("The description of fields to parse excluded all fields. At least one fields must be included."); } ArrayList<Boolean> fields = new ArrayList<Boolean>(); while (mask != 0) { fields.add((mask & 0x1L) != 0); mask >>>= 1; } boolean[] fieldsArray = new boolean[fields.size()]; for (int i = 0; i < fieldsArray.length; i++) { fieldsArray[i] = fields.get(i); } return includeFields(fieldsArray); }
java
public CsvReader includeFields(long mask) { if (mask == 0) { throw new IllegalArgumentException("The description of fields to parse excluded all fields. At least one fields must be included."); } ArrayList<Boolean> fields = new ArrayList<Boolean>(); while (mask != 0) { fields.add((mask & 0x1L) != 0); mask >>>= 1; } boolean[] fieldsArray = new boolean[fields.size()]; for (int i = 0; i < fieldsArray.length; i++) { fieldsArray[i] = fields.get(i); } return includeFields(fieldsArray); }
[ "public", "CsvReader", "includeFields", "(", "long", "mask", ")", "{", "if", "(", "mask", "==", "0", ")", "{", "throw", "new", "IllegalArgumentException", "(", "\"The description of fields to parse excluded all fields. At least one fields must be included.\"", ")", ";", "...
Configures which fields of the CSV file should be included and which should be skipped. The bits in the value (read from least significant to most significant) define whether the field at the corresponding position in the CSV schema should be included. parser will look at the first {@code n} fields, where {@code n} is the position of the most significant non-zero bit. The parser will skip over all fields where the character at the corresponding bit is zero, and include the fields where the corresponding bit is one. <p>Examples: <ul> <li>A mask of {@code 0x7} would include the first three fields.</li> <li>A mask of {@code 0x26} (binary {@code 100110} would skip the first fields, include fields two and three, skip fields four and five, and include field six.</li> </ul> @param mask The bit mask defining which fields to include and which to skip. @return The CSV reader instance itself, to allow for fluent function chaining.
[ "Configures", "which", "fields", "of", "the", "CSV", "file", "should", "be", "included", "and", "which", "should", "be", "skipped", ".", "The", "bits", "in", "the", "value", "(", "read", "from", "least", "significant", "to", "most", "significant", ")", "de...
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-java/src/main/java/org/apache/flink/api/java/io/CsvReader.java#L267-L285
train
Returns a reader that reads the fields in the specified mask.
[ 30522, 2270, 20116, 12229, 30524, 2906, 22850, 15781, 2595, 24422, 1006, 1000, 1996, 6412, 1997, 4249, 2000, 11968, 3366, 12421, 2035, 4249, 1012, 2012, 2560, 2028, 4249, 2442, 2022, 2443, 1012, 1000, 1007, 1025, 1065, 9140, 9863, 1026, 220...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/accumulators/AccumulatorRegistry.java
AccumulatorRegistry.getSnapshot
public AccumulatorSnapshot getSnapshot() { try { return new AccumulatorSnapshot(jobID, taskID, userAccumulators); } catch (Throwable e) { LOG.warn("Failed to serialize accumulators for task.", e); return null; } }
java
public AccumulatorSnapshot getSnapshot() { try { return new AccumulatorSnapshot(jobID, taskID, userAccumulators); } catch (Throwable e) { LOG.warn("Failed to serialize accumulators for task.", e); return null; } }
[ "public", "AccumulatorSnapshot", "getSnapshot", "(", ")", "{", "try", "{", "return", "new", "AccumulatorSnapshot", "(", "jobID", ",", "taskID", ",", "userAccumulators", ")", ";", "}", "catch", "(", "Throwable", "e", ")", "{", "LOG", ".", "warn", "(", "\"Fa...
Creates a snapshot of this accumulator registry. @return a serialized accumulator map
[ "Creates", "a", "snapshot", "of", "this", "accumulator", "registry", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/accumulators/AccumulatorRegistry.java#L55-L62
train
Returns the accumulator snapshot for this task.
[ 30522, 2270, 16222, 2819, 20350, 2015, 2532, 4523, 12326, 4152, 2532, 4523, 12326, 1006, 1007, 1063, 3046, 1063, 2709, 2047, 16222, 2819, 20350, 2015, 2532, 4523, 12326, 1006, 3105, 3593, 1010, 4708, 3593, 1010, 5310, 6305, 24894, 20350, 20...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-yarn/src/main/java/org/apache/flink/yarn/YarnTaskExecutorRunner.java
YarnTaskExecutorRunner.main
public static void main(String[] args) { EnvironmentInformation.logEnvironmentInfo(LOG, "YARN TaskExecutor runner", args); SignalHandler.register(LOG); JvmShutdownSafeguard.installAsShutdownHook(LOG); run(args); }
java
public static void main(String[] args) { EnvironmentInformation.logEnvironmentInfo(LOG, "YARN TaskExecutor runner", args); SignalHandler.register(LOG); JvmShutdownSafeguard.installAsShutdownHook(LOG); run(args); }
[ "public", "static", "void", "main", "(", "String", "[", "]", "args", ")", "{", "EnvironmentInformation", ".", "logEnvironmentInfo", "(", "LOG", ",", "\"YARN TaskExecutor runner\"", ",", "args", ")", ";", "SignalHandler", ".", "register", "(", "LOG", ")", ";", ...
The entry point for the YARN task executor runner. @param args The command line arguments.
[ "The", "entry", "point", "for", "the", "YARN", "task", "executor", "runner", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-yarn/src/main/java/org/apache/flink/yarn/YarnTaskExecutorRunner.java#L74-L80
train
Main method for running YARN TaskExecutor.
[ 30522, 2270, 10763, 11675, 2364, 1006, 5164, 1031, 1033, 12098, 5620, 1007, 1063, 4044, 2378, 14192, 3370, 1012, 8833, 2368, 21663, 2239, 3672, 2378, 14876, 1006, 8833, 1010, 1000, 27158, 4708, 10288, 8586, 16161, 2099, 5479, 1000, 1010, 12...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
networknt/light-4j
metrics/src/main/java/io/dropwizard/metrics/InstrumentedExecutors.java
InstrumentedExecutors.newCachedThreadPool
public static InstrumentedExecutorService newCachedThreadPool( ThreadFactory threadFactory, MetricRegistry registry) { return new InstrumentedExecutorService(Executors.newCachedThreadPool(threadFactory), registry); }
java
public static InstrumentedExecutorService newCachedThreadPool( ThreadFactory threadFactory, MetricRegistry registry) { return new InstrumentedExecutorService(Executors.newCachedThreadPool(threadFactory), registry); }
[ "public", "static", "InstrumentedExecutorService", "newCachedThreadPool", "(", "ThreadFactory", "threadFactory", ",", "MetricRegistry", "registry", ")", "{", "return", "new", "InstrumentedExecutorService", "(", "Executors", ".", "newCachedThreadPool", "(", "threadFactory", ...
Creates an instrumented thread pool that creates new threads as needed, but will reuse previously constructed threads when they are available, and uses the provided ThreadFactory to create new threads when needed. @param threadFactory the factory to use when creating new threads @param registry the {@link MetricRegistry} that will contain the metrics. @return the newly created thread pool @throws NullPointerException if threadFactory is null @see Executors#newCachedThreadPool(ThreadFactory)
[ "Creates", "an", "instrumented", "thread", "pool", "that", "creates", "new", "threads", "as", "needed", "but", "will", "reuse", "previously", "constructed", "threads", "when", "they", "are", "available", "and", "uses", "the", "provided", "ThreadFactory", "to", "...
2a60257c60663684c8f6dc8b5ea3cf184e534db6
https://github.com/networknt/light-4j/blob/2a60257c60663684c8f6dc8b5ea3cf184e534db6/metrics/src/main/java/io/dropwizard/metrics/InstrumentedExecutors.java#L292-L295
train
Create a new cached thread pool executor service.
[ 30522, 2270, 10763, 6602, 14728, 2595, 8586, 16161, 22573, 2099, 7903, 2063, 2047, 3540, 7690, 2705, 16416, 18927, 13669, 1006, 11689, 21450, 11689, 21450, 1010, 12046, 2890, 24063, 2854, 15584, 1007, 1063, 2709, 2047, 6602, 14728, 2595, 8586...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/jobmanager/scheduler/CoLocationConstraint.java
CoLocationConstraint.lockLocation
public void lockLocation(TaskManagerLocation taskManagerLocation) { checkNotNull(taskManagerLocation); checkState(lockedLocation == null, "Location is already locked."); lockedLocation = taskManagerLocation; }
java
public void lockLocation(TaskManagerLocation taskManagerLocation) { checkNotNull(taskManagerLocation); checkState(lockedLocation == null, "Location is already locked."); lockedLocation = taskManagerLocation; }
[ "public", "void", "lockLocation", "(", "TaskManagerLocation", "taskManagerLocation", ")", "{", "checkNotNull", "(", "taskManagerLocation", ")", ";", "checkState", "(", "lockedLocation", "==", "null", ",", "\"Location is already locked.\"", ")", ";", "lockedLocation", "=...
Locks the location of this slot. The location can be locked only once and only after a shared slot has been assigned. <p>Note: This method exists for compatibility reasons with the new {@link SlotPool}. @param taskManagerLocation to lock this co-location constraint to
[ "Locks", "the", "location", "of", "this", "slot", ".", "The", "location", "can", "be", "locked", "only", "once", "and", "only", "after", "a", "shared", "slot", "has", "been", "assigned", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/jobmanager/scheduler/CoLocationConstraint.java#L182-L187
train
Lock the location.
[ 30522, 2270, 11675, 5843, 4135, 10719, 1006, 4708, 24805, 4590, 4135, 10719, 4708, 24805, 4590, 4135, 10719, 1007, 1063, 4638, 17048, 11231, 3363, 1006, 4708, 24805, 4590, 4135, 10719, 1007, 1025, 14148, 12259, 1006, 5299, 4135, 10719, 1027, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-clients/src/main/java/org/apache/flink/client/cli/CliFrontend.java
CliFrontend.getConfigurationDirectoryFromEnv
public static String getConfigurationDirectoryFromEnv() { String location = System.getenv(ConfigConstants.ENV_FLINK_CONF_DIR); if (location != null) { if (new File(location).exists()) { return location; } else { throw new RuntimeException("The configuration directory '" + location + "', specified in the '" + ConfigConstants.ENV_FLINK_CONF_DIR + "' environment variable, does not exist."); } } else if (new File(CONFIG_DIRECTORY_FALLBACK_1).exists()) { location = CONFIG_DIRECTORY_FALLBACK_1; } else if (new File(CONFIG_DIRECTORY_FALLBACK_2).exists()) { location = CONFIG_DIRECTORY_FALLBACK_2; } else { throw new RuntimeException("The configuration directory was not specified. " + "Please specify the directory containing the configuration file through the '" + ConfigConstants.ENV_FLINK_CONF_DIR + "' environment variable."); } return location; }
java
public static String getConfigurationDirectoryFromEnv() { String location = System.getenv(ConfigConstants.ENV_FLINK_CONF_DIR); if (location != null) { if (new File(location).exists()) { return location; } else { throw new RuntimeException("The configuration directory '" + location + "', specified in the '" + ConfigConstants.ENV_FLINK_CONF_DIR + "' environment variable, does not exist."); } } else if (new File(CONFIG_DIRECTORY_FALLBACK_1).exists()) { location = CONFIG_DIRECTORY_FALLBACK_1; } else if (new File(CONFIG_DIRECTORY_FALLBACK_2).exists()) { location = CONFIG_DIRECTORY_FALLBACK_2; } else { throw new RuntimeException("The configuration directory was not specified. " + "Please specify the directory containing the configuration file through the '" + ConfigConstants.ENV_FLINK_CONF_DIR + "' environment variable."); } return location; }
[ "public", "static", "String", "getConfigurationDirectoryFromEnv", "(", ")", "{", "String", "location", "=", "System", ".", "getenv", "(", "ConfigConstants", ".", "ENV_FLINK_CONF_DIR", ")", ";", "if", "(", "location", "!=", "null", ")", "{", "if", "(", "new", ...
--------------------------------------------------------------------------------------------
[ "--------------------------------------------------------------------------------------------" ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-clients/src/main/java/org/apache/flink/client/cli/CliFrontend.java#L1071-L1095
train
Get the configuration directory from the environment variable.
[ 30522, 2270, 10763, 5164, 2131, 8663, 8873, 27390, 3370, 4305, 2890, 16761, 2100, 19699, 8462, 2078, 2615, 1006, 1007, 1063, 5164, 3295, 1027, 2291, 1012, 2131, 2368, 2615, 1006, 9530, 8873, 18195, 5644, 5794, 3215, 1012, 4372, 2615, 1035, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
transport/src/main/java/io/netty/channel/AbstractCoalescingBufferQueue.java
AbstractCoalescingBufferQueue.remove
public final ByteBuf remove(ByteBufAllocator alloc, int bytes, ChannelPromise aggregatePromise) { checkPositiveOrZero(bytes, "bytes"); checkNotNull(aggregatePromise, "aggregatePromise"); // Use isEmpty rather than readableBytes==0 as we may have a promise associated with an empty buffer. if (bufAndListenerPairs.isEmpty()) { return removeEmptyValue(); } bytes = Math.min(bytes, readableBytes); ByteBuf toReturn = null; ByteBuf entryBuffer = null; int originalBytes = bytes; try { for (;;) { Object entry = bufAndListenerPairs.poll(); if (entry == null) { break; } if (entry instanceof ChannelFutureListener) { aggregatePromise.addListener((ChannelFutureListener) entry); continue; } entryBuffer = (ByteBuf) entry; if (entryBuffer.readableBytes() > bytes) { // Add the buffer back to the queue as we can't consume all of it. bufAndListenerPairs.addFirst(entryBuffer); if (bytes > 0) { // Take a slice of what we can consume and retain it. entryBuffer = entryBuffer.readRetainedSlice(bytes); toReturn = toReturn == null ? composeFirst(alloc, entryBuffer) : compose(alloc, toReturn, entryBuffer); bytes = 0; } break; } else { bytes -= entryBuffer.readableBytes(); toReturn = toReturn == null ? composeFirst(alloc, entryBuffer) : compose(alloc, toReturn, entryBuffer); } entryBuffer = null; } } catch (Throwable cause) { safeRelease(entryBuffer); safeRelease(toReturn); aggregatePromise.setFailure(cause); throwException(cause); } decrementReadableBytes(originalBytes - bytes); return toReturn; }
java
public final ByteBuf remove(ByteBufAllocator alloc, int bytes, ChannelPromise aggregatePromise) { checkPositiveOrZero(bytes, "bytes"); checkNotNull(aggregatePromise, "aggregatePromise"); // Use isEmpty rather than readableBytes==0 as we may have a promise associated with an empty buffer. if (bufAndListenerPairs.isEmpty()) { return removeEmptyValue(); } bytes = Math.min(bytes, readableBytes); ByteBuf toReturn = null; ByteBuf entryBuffer = null; int originalBytes = bytes; try { for (;;) { Object entry = bufAndListenerPairs.poll(); if (entry == null) { break; } if (entry instanceof ChannelFutureListener) { aggregatePromise.addListener((ChannelFutureListener) entry); continue; } entryBuffer = (ByteBuf) entry; if (entryBuffer.readableBytes() > bytes) { // Add the buffer back to the queue as we can't consume all of it. bufAndListenerPairs.addFirst(entryBuffer); if (bytes > 0) { // Take a slice of what we can consume and retain it. entryBuffer = entryBuffer.readRetainedSlice(bytes); toReturn = toReturn == null ? composeFirst(alloc, entryBuffer) : compose(alloc, toReturn, entryBuffer); bytes = 0; } break; } else { bytes -= entryBuffer.readableBytes(); toReturn = toReturn == null ? composeFirst(alloc, entryBuffer) : compose(alloc, toReturn, entryBuffer); } entryBuffer = null; } } catch (Throwable cause) { safeRelease(entryBuffer); safeRelease(toReturn); aggregatePromise.setFailure(cause); throwException(cause); } decrementReadableBytes(originalBytes - bytes); return toReturn; }
[ "public", "final", "ByteBuf", "remove", "(", "ByteBufAllocator", "alloc", ",", "int", "bytes", ",", "ChannelPromise", "aggregatePromise", ")", "{", "checkPositiveOrZero", "(", "bytes", ",", "\"bytes\"", ")", ";", "checkNotNull", "(", "aggregatePromise", ",", "\"ag...
Remove a {@link ByteBuf} from the queue with the specified number of bytes. Any added buffer who's bytes are fully consumed during removal will have it's promise completed when the passed aggregate {@link ChannelPromise} completes. @param alloc The allocator used if a new {@link ByteBuf} is generated during the aggregation process. @param bytes the maximum number of readable bytes in the returned {@link ByteBuf}, if {@code bytes} is greater than {@link #readableBytes} then a buffer of length {@link #readableBytes} is returned. @param aggregatePromise used to aggregate the promises and listeners for the constituent buffers. @return a {@link ByteBuf} composed of the enqueued buffers.
[ "Remove", "a", "{", "@link", "ByteBuf", "}", "from", "the", "queue", "with", "the", "specified", "number", "of", "bytes", ".", "Any", "added", "buffer", "who", "s", "bytes", "are", "fully", "consumed", "during", "removal", "will", "have", "it", "s", "pro...
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/transport/src/main/java/io/netty/channel/AbstractCoalescingBufferQueue.java#L137-L187
train
Removes the specified number of bytes from this buffer.
[ 30522, 2270, 2345, 24880, 8569, 2546, 6366, 1006, 24880, 8569, 13976, 24755, 4263, 2035, 10085, 1010, 20014, 27507, 1010, 3149, 21572, 28732, 9572, 21572, 28732, 1007, 1063, 4638, 6873, 28032, 3512, 2953, 6290, 2080, 1006, 27507, 1010, 1000, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/util/ArrayUtil.java
ArrayUtil.wrap
public static Character[] wrap(char... values) { if (null == values) { return null; } final int length = values.length; if (0 == length) { return new Character[0]; } final Character[] array = new Character[length]; for (int i = 0; i < length; i++) { array[i] = Character.valueOf(values[i]); } return array; }
java
public static Character[] wrap(char... values) { if (null == values) { return null; } final int length = values.length; if (0 == length) { return new Character[0]; } final Character[] array = new Character[length]; for (int i = 0; i < length; i++) { array[i] = Character.valueOf(values[i]); } return array; }
[ "public", "static", "Character", "[", "]", "wrap", "(", "char", "...", "values", ")", "{", "if", "(", "null", "==", "values", ")", "{", "return", "null", ";", "}", "final", "int", "length", "=", "values", ".", "length", ";", "if", "(", "0", "==", ...
将原始类型数组包装为包装类型 @param values 原始类型数组 @return 包装类型数组
[ "将原始类型数组包装为包装类型" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/util/ArrayUtil.java#L1487-L1501
train
Creates an array of characters based on the specified char values.
[ 30522, 2270, 10763, 2839, 1031, 1033, 10236, 1006, 25869, 1012, 1012, 1012, 5300, 1007, 1063, 2065, 1006, 19701, 1027, 1027, 5300, 1007, 1063, 2709, 19701, 1025, 1065, 30524, 2839, 1031, 3091, 1033, 1025, 2005, 1006, 20014, 1045, 1027, 1014...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/spark
common/network-common/src/main/java/org/apache/spark/network/protocol/StreamResponse.java
StreamResponse.encode
@Override public void encode(ByteBuf buf) { Encoders.Strings.encode(buf, streamId); buf.writeLong(byteCount); }
java
@Override public void encode(ByteBuf buf) { Encoders.Strings.encode(buf, streamId); buf.writeLong(byteCount); }
[ "@", "Override", "public", "void", "encode", "(", "ByteBuf", "buf", ")", "{", "Encoders", ".", "Strings", ".", "encode", "(", "buf", ",", "streamId", ")", ";", "buf", ".", "writeLong", "(", "byteCount", ")", ";", "}" ]
Encoding does NOT include 'buffer' itself. See {@link MessageEncoder}.
[ "Encoding", "does", "NOT", "include", "buffer", "itself", ".", "See", "{" ]
25ee0474f47d9c30d6f553a7892d9549f91071cf
https://github.com/apache/spark/blob/25ee0474f47d9c30d6f553a7892d9549f91071cf/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamResponse.java#L51-L55
train
Encodes the sequence number into the specified byte buffer.
[ 30522, 1030, 2058, 15637, 2270, 11675, 4372, 16044, 1006, 24880, 8569, 2546, 20934, 2546, 1007, 1063, 4372, 16044, 2869, 1012, 7817, 1012, 4372, 16044, 1006, 20934, 2546, 1010, 5460, 3593, 1007, 1025, 20934, 2546, 1012, 4339, 10052, 1006, 2...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/io/network/api/serialization/SpanningRecordSerializer.java
SpanningRecordSerializer.serializeRecord
@Override public void serializeRecord(T record) throws IOException { if (CHECKED) { if (dataBuffer.hasRemaining()) { throw new IllegalStateException("Pending serialization of previous record."); } } serializationBuffer.clear(); lengthBuffer.clear(); // write data and length record.write(serializationBuffer); int len = serializationBuffer.length(); lengthBuffer.putInt(0, len); dataBuffer = serializationBuffer.wrapAsByteBuffer(); }
java
@Override public void serializeRecord(T record) throws IOException { if (CHECKED) { if (dataBuffer.hasRemaining()) { throw new IllegalStateException("Pending serialization of previous record."); } } serializationBuffer.clear(); lengthBuffer.clear(); // write data and length record.write(serializationBuffer); int len = serializationBuffer.length(); lengthBuffer.putInt(0, len); dataBuffer = serializationBuffer.wrapAsByteBuffer(); }
[ "@", "Override", "public", "void", "serializeRecord", "(", "T", "record", ")", "throws", "IOException", "{", "if", "(", "CHECKED", ")", "{", "if", "(", "dataBuffer", ".", "hasRemaining", "(", ")", ")", "{", "throw", "new", "IllegalStateException", "(", "\"...
Serializes the complete record to an intermediate data serialization buffer. @param record the record to serialize
[ "Serializes", "the", "complete", "record", "to", "an", "intermediate", "data", "serialization", "buffer", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/io/network/api/serialization/SpanningRecordSerializer.java#L66-L84
train
Serialize a single record.
[ 30522, 1030, 2058, 15637, 2270, 11675, 7642, 17629, 8586, 8551, 1006, 1056, 2501, 1007, 11618, 22834, 10288, 24422, 1063, 2065, 1006, 7039, 1007, 1063, 2065, 1006, 2951, 8569, 12494, 1012, 2038, 28578, 8113, 2075, 1006, 1007, 1007, 1063, 54...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
networknt/light-4j
client/src/main/java/org/apache/hc/core5/util/copied/ByteArrayBuffer.java
ByteArrayBuffer.indexOf
public int indexOf(final byte b, final int from, final int to) { int beginIndex = from; if (beginIndex < 0) { beginIndex = 0; } int endIndex = to; if (endIndex > this.len) { endIndex = this.len; } if (beginIndex > endIndex) { return -1; } for (int i = beginIndex; i < endIndex; i++) { if (this.array[i] == b) { return i; } } return -1; }
java
public int indexOf(final byte b, final int from, final int to) { int beginIndex = from; if (beginIndex < 0) { beginIndex = 0; } int endIndex = to; if (endIndex > this.len) { endIndex = this.len; } if (beginIndex > endIndex) { return -1; } for (int i = beginIndex; i < endIndex; i++) { if (this.array[i] == b) { return i; } } return -1; }
[ "public", "int", "indexOf", "(", "final", "byte", "b", ",", "final", "int", "from", ",", "final", "int", "to", ")", "{", "int", "beginIndex", "=", "from", ";", "if", "(", "beginIndex", "<", "0", ")", "{", "beginIndex", "=", "0", ";", "}", "int", ...
Returns the index within this buffer of the first occurrence of the specified byte, starting the search at the specified {@code beginIndex} and finishing at {@code endIndex}. If no such byte occurs in this buffer within the specified bounds, {@code -1} is returned. <p> There is no restriction on the value of {@code beginIndex} and {@code endIndex}. If {@code beginIndex} is negative, it has the same effect as if it were zero. If {@code endIndex} is greater than {@link #length()}, it has the same effect as if it were {@link #length()}. If the {@code beginIndex} is greater than the {@code endIndex}, {@code -1} is returned. @param b the byte to search for. @param from the index to start the search from. @param to the index to finish the search at. @return the index of the first occurrence of the byte in the buffer within the given bounds, or {@code -1} if the byte does not occur. @since 4.1
[ "Returns", "the", "index", "within", "this", "buffer", "of", "the", "first", "occurrence", "of", "the", "specified", "byte", "starting", "the", "search", "at", "the", "specified", "{", "@code", "beginIndex", "}", "and", "finishing", "at", "{", "@code", "endI...
2a60257c60663684c8f6dc8b5ea3cf184e534db6
https://github.com/networknt/light-4j/blob/2a60257c60663684c8f6dc8b5ea3cf184e534db6/client/src/main/java/org/apache/hc/core5/util/copied/ByteArrayBuffer.java#L309-L327
train
Gets the index of the specified byte in this array.
[ 30522, 2270, 20014, 5950, 11253, 1006, 2345, 24880, 1038, 1010, 2345, 20014, 2013, 1010, 2345, 20014, 2000, 1007, 1063, 20014, 4088, 22254, 10288, 1027, 2013, 1025, 2065, 1006, 4088, 22254, 10288, 1026, 1014, 1007, 1063, 4088, 22254, 10288, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/types/SlotProfile.java
SlotProfile.priorAllocation
public static SlotProfile priorAllocation(ResourceProfile resourceProfile, Collection<AllocationID> priorAllocations) { return new SlotProfile(resourceProfile, Collections.emptyList(), priorAllocations); }
java
public static SlotProfile priorAllocation(ResourceProfile resourceProfile, Collection<AllocationID> priorAllocations) { return new SlotProfile(resourceProfile, Collections.emptyList(), priorAllocations); }
[ "public", "static", "SlotProfile", "priorAllocation", "(", "ResourceProfile", "resourceProfile", ",", "Collection", "<", "AllocationID", ">", "priorAllocations", ")", "{", "return", "new", "SlotProfile", "(", "resourceProfile", ",", "Collections", ".", "emptyList", "(...
Returns a slot profile for the given resource profile and the prior allocations. @param resourceProfile specifying the slot requirements @param priorAllocations specifying the prior allocations @return Slot profile with the given resource profile and prior allocations
[ "Returns", "a", "slot", "profile", "for", "the", "given", "resource", "profile", "and", "the", "prior", "allocations", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/types/SlotProfile.java#L143-L145
train
Create a slot profile with the given resource profile and prior allocations.
[ 30522, 2270, 10763, 10453, 21572, 8873, 2571, 3188, 8095, 23909, 1006, 7692, 21572, 8873, 2571, 7692, 21572, 8873, 2571, 1010, 3074, 1026, 16169, 3593, 1028, 3188, 8095, 23909, 2015, 1007, 1063, 2709, 2047, 10453, 21572, 8873, 2571, 1006, 7...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-crypto/src/main/java/cn/hutool/crypto/digest/Digester.java
Digester.digest
public byte[] digest(InputStream data, int bufferLength) throws IORuntimeException { if (bufferLength < 1) { bufferLength = IoUtil.DEFAULT_BUFFER_SIZE; } byte[] result; try { if (ArrayUtil.isEmpty(this.salt)) { result = digestWithoutSalt(data, bufferLength); } else { result = digestWithSalt(data, bufferLength); } } catch (IOException e) { throw new IORuntimeException(e); } return resetAndRepeatDigest(result); }
java
public byte[] digest(InputStream data, int bufferLength) throws IORuntimeException { if (bufferLength < 1) { bufferLength = IoUtil.DEFAULT_BUFFER_SIZE; } byte[] result; try { if (ArrayUtil.isEmpty(this.salt)) { result = digestWithoutSalt(data, bufferLength); } else { result = digestWithSalt(data, bufferLength); } } catch (IOException e) { throw new IORuntimeException(e); } return resetAndRepeatDigest(result); }
[ "public", "byte", "[", "]", "digest", "(", "InputStream", "data", ",", "int", "bufferLength", ")", "throws", "IORuntimeException", "{", "if", "(", "bufferLength", "<", "1", ")", "{", "bufferLength", "=", "IoUtil", ".", "DEFAULT_BUFFER_SIZE", ";", "}", "byte"...
生成摘要 @param data {@link InputStream} 数据流 @param bufferLength 缓存长度,不足1使用 {@link IoUtil#DEFAULT_BUFFER_SIZE} 做为默认值 @return 摘要bytes @throws IORuntimeException IO异常
[ "生成摘要" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-crypto/src/main/java/cn/hutool/crypto/digest/Digester.java#L297-L314
train
Digest the input stream using the specified buffer length.
[ 30522, 2270, 24880, 1031, 1033, 17886, 1006, 20407, 25379, 2951, 1010, 20014, 17698, 7770, 13512, 2232, 1007, 11618, 22834, 15532, 7292, 10288, 24422, 1063, 2065, 1006, 17698, 7770, 13512, 2232, 1026, 1015, 1007, 1063, 17698, 7770, 13512, 223...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/incubator-shardingsphere
sharding-core/sharding-core-parse/sharding-core-parse-common/src/main/java/org/apache/shardingsphere/core/parse/old/lexer/LexerEngine.java
LexerEngine.equalAny
public boolean equalAny(final TokenType... tokenTypes) { for (TokenType each : tokenTypes) { if (each == lexer.getCurrentToken().getType()) { return true; } } return false; }
java
public boolean equalAny(final TokenType... tokenTypes) { for (TokenType each : tokenTypes) { if (each == lexer.getCurrentToken().getType()) { return true; } } return false; }
[ "public", "boolean", "equalAny", "(", "final", "TokenType", "...", "tokenTypes", ")", "{", "for", "(", "TokenType", "each", ":", "tokenTypes", ")", "{", "if", "(", "each", "==", "lexer", ".", "getCurrentToken", "(", ")", ".", "getType", "(", ")", ")", ...
Judge current token equals one of input tokens or not. @param tokenTypes to be judged token types @return current token equals one of input tokens or not
[ "Judge", "current", "token", "equals", "one", "of", "input", "tokens", "or", "not", "." ]
f88fd29fc345dfb31fdce12e9e96cbfa0fd2402d
https://github.com/apache/incubator-shardingsphere/blob/f88fd29fc345dfb31fdce12e9e96cbfa0fd2402d/sharding-core/sharding-core-parse/sharding-core-parse-common/src/main/java/org/apache/shardingsphere/core/parse/old/lexer/LexerEngine.java#L133-L140
train
Checks if the current token is one of the given tokens.
[ 30522, 2270, 22017, 20898, 5020, 19092, 1006, 2345, 19204, 13874, 1012, 1012, 1012, 19204, 13874, 2015, 1007, 1063, 2005, 1006, 19204, 13874, 2169, 1024, 19204, 13874, 2015, 1007, 1063, 2065, 1006, 2169, 1027, 1027, 17244, 2121, 1012, 2131, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/ComputeFunction.java
ComputeFunction.getEdges
public final Iterable<Edge<K, EV>> getEdges() { verifyEdgeUsage(); this.edgeIterator.set(edges); return this.edgeIterator; }
java
public final Iterable<Edge<K, EV>> getEdges() { verifyEdgeUsage(); this.edgeIterator.set(edges); return this.edgeIterator; }
[ "public", "final", "Iterable", "<", "Edge", "<", "K", ",", "EV", ">", ">", "getEdges", "(", ")", "{", "verifyEdgeUsage", "(", ")", ";", "this", ".", "edgeIterator", ".", "set", "(", "edges", ")", ";", "return", "this", ".", "edgeIterator", ";", "}" ]
Gets an {@link java.lang.Iterable} with all out-going edges. This method is mutually exclusive with {@link #sendMessageToAllNeighbors(Object)} and may be called only once. @return An iterator with all edges.
[ "Gets", "an", "{", "@link", "java", ".", "lang", ".", "Iterable", "}", "with", "all", "out", "-", "going", "edges", ".", "This", "method", "is", "mutually", "exclusive", "with", "{", "@link", "#sendMessageToAllNeighbors", "(", "Object", ")", "}", "and", ...
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/ComputeFunction.java#L86-L90
train
Returns an iterator over the edges of this graph.
[ 30522, 2270, 2345, 2009, 6906, 3468, 1026, 3341, 1026, 1047, 1010, 23408, 1028, 1028, 2131, 24225, 2015, 1006, 1007, 1063, 20410, 24225, 10383, 3351, 1006, 1007, 1025, 2023, 1012, 3341, 21646, 8844, 1012, 2275, 1006, 30524, 0, 0, 0, 0, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
hankcs/HanLP
src/main/java/com/hankcs/hanlp/collection/dartsclone/details/DoubleArrayBuilder.java
DoubleArrayBuilder.build
public void build(Keyset keyset) { if (keyset.hasValues()) { DawgBuilder dawgBuilder = new DawgBuilder(); buildDawg(keyset, dawgBuilder); buildFromDawg(dawgBuilder); dawgBuilder.clear(); } else { buildFromKeyset(keyset); } }
java
public void build(Keyset keyset) { if (keyset.hasValues()) { DawgBuilder dawgBuilder = new DawgBuilder(); buildDawg(keyset, dawgBuilder); buildFromDawg(dawgBuilder); dawgBuilder.clear(); } else { buildFromKeyset(keyset); } }
[ "public", "void", "build", "(", "Keyset", "keyset", ")", "{", "if", "(", "keyset", ".", "hasValues", "(", ")", ")", "{", "DawgBuilder", "dawgBuilder", "=", "new", "DawgBuilder", "(", ")", ";", "buildDawg", "(", "keyset", ",", "dawgBuilder", ")", ";", "...
构建 @param keyset
[ "构建" ]
a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce
https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/collection/dartsclone/details/DoubleArrayBuilder.java#L18-L31
train
Build a single SECTYPE from a keyset.
[ 30522, 2270, 11675, 3857, 1006, 6309, 3388, 6309, 3388, 1007, 1063, 2065, 1006, 6309, 3388, 1012, 2038, 10175, 15808, 1006, 1007, 1007, 1063, 4830, 27767, 8569, 23891, 2099, 4830, 27767, 8569, 23891, 2099, 1027, 2047, 4830, 27767, 8569, 238...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/SingleOutputStreamOperator.java
SingleOutputStreamOperator.forceNonParallel
@PublicEvolving public SingleOutputStreamOperator<T> forceNonParallel() { transformation.setParallelism(1); transformation.setMaxParallelism(1); nonParallel = true; return this; }
java
@PublicEvolving public SingleOutputStreamOperator<T> forceNonParallel() { transformation.setParallelism(1); transformation.setMaxParallelism(1); nonParallel = true; return this; }
[ "@", "PublicEvolving", "public", "SingleOutputStreamOperator", "<", "T", ">", "forceNonParallel", "(", ")", "{", "transformation", ".", "setParallelism", "(", "1", ")", ";", "transformation", ".", "setMaxParallelism", "(", "1", ")", ";", "nonParallel", "=", "tru...
Sets the parallelism and maximum parallelism of this operator to one. And mark this operator cannot set a non-1 degree of parallelism. @return The operator with only one parallelism.
[ "Sets", "the", "parallelism", "and", "maximum", "parallelism", "of", "this", "operator", "to", "one", ".", "And", "mark", "this", "operator", "cannot", "set", "a", "non", "-", "1", "degree", "of", "parallelism", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/SingleOutputStreamOperator.java#L220-L226
train
Force non - parallel operation.
[ 30522, 1030, 2270, 6777, 4747, 6455, 2270, 2309, 5833, 18780, 21422, 25918, 8844, 1026, 1056, 1028, 2486, 8540, 28689, 6216, 2140, 1006, 1007, 1063, 8651, 1012, 2275, 28689, 6216, 28235, 1006, 1015, 1007, 1025, 8651, 1012, 2275, 17848, 2868...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/util/ArrayUtil.java
ArrayUtil.unWrap
public static double[] unWrap(Double... values) { if (null == values) { return null; } final int length = values.length; if (0 == length) { return new double[0]; } final double[] array = new double[length]; for (int i = 0; i < length; i++) { array[i] = values[i].doubleValue(); } return array; }
java
public static double[] unWrap(Double... values) { if (null == values) { return null; } final int length = values.length; if (0 == length) { return new double[0]; } final double[] array = new double[length]; for (int i = 0; i < length; i++) { array[i] = values[i].doubleValue(); } return array; }
[ "public", "static", "double", "[", "]", "unWrap", "(", "Double", "...", "values", ")", "{", "if", "(", "null", "==", "values", ")", "{", "return", "null", ";", "}", "final", "int", "length", "=", "values", ".", "length", ";", "if", "(", "0", "==", ...
包装类数组转为原始类型数组 @param values 包装类型数组 @return 原始类型数组
[ "包装类数组转为原始类型数组" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/util/ArrayUtil.java#L1685-L1699
train
Creates an array of double values from the specified double array.
[ 30522, 2270, 10763, 3313, 1031, 1033, 4895, 13088, 9331, 1006, 3313, 1012, 1012, 1012, 5300, 1007, 30524, 1007, 1063, 2709, 2047, 3313, 1031, 1014, 1033, 1025, 1065, 2345, 3313, 1031, 1033, 9140, 1027, 2047, 3313, 1031, 3091, 1033, 1025, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
codec-http/src/main/java/io/netty/handler/codec/http/websocketx/WebSocketServerHandshakerFactory.java
WebSocketServerHandshakerFactory.sendUnsupportedVersionResponse
public static ChannelFuture sendUnsupportedVersionResponse(Channel channel, ChannelPromise promise) { HttpResponse res = new DefaultFullHttpResponse( HttpVersion.HTTP_1_1, HttpResponseStatus.UPGRADE_REQUIRED); res.headers().set(HttpHeaderNames.SEC_WEBSOCKET_VERSION, WebSocketVersion.V13.toHttpHeaderValue()); HttpUtil.setContentLength(res, 0); return channel.writeAndFlush(res, promise); }
java
public static ChannelFuture sendUnsupportedVersionResponse(Channel channel, ChannelPromise promise) { HttpResponse res = new DefaultFullHttpResponse( HttpVersion.HTTP_1_1, HttpResponseStatus.UPGRADE_REQUIRED); res.headers().set(HttpHeaderNames.SEC_WEBSOCKET_VERSION, WebSocketVersion.V13.toHttpHeaderValue()); HttpUtil.setContentLength(res, 0); return channel.writeAndFlush(res, promise); }
[ "public", "static", "ChannelFuture", "sendUnsupportedVersionResponse", "(", "Channel", "channel", ",", "ChannelPromise", "promise", ")", "{", "HttpResponse", "res", "=", "new", "DefaultFullHttpResponse", "(", "HttpVersion", ".", "HTTP_1_1", ",", "HttpResponseStatus", "....
Return that we need cannot not support the web socket version
[ "Return", "that", "we", "need", "cannot", "not", "support", "the", "web", "socket", "version" ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/codec-http/src/main/java/io/netty/handler/codec/http/websocketx/WebSocketServerHandshakerFactory.java#L157-L164
train
Sends an unsupported WebSocket version response.
[ 30522, 2270, 10763, 3149, 11263, 11244, 4604, 4609, 6342, 9397, 15613, 27774, 6072, 26029, 3366, 1006, 3149, 3149, 1010, 3149, 21572, 28732, 4872, 1007, 1063, 8299, 6072, 26029, 3366, 24501, 1027, 2047, 12398, 3993, 2140, 11039, 25856, 6072, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/codec/Caesar.java
Caesar.decode
public static String decode(String ciphertext, int offset) { final int len = ciphertext.length(); final char[] plain = ciphertext.toCharArray(); char c; for (int i = 0; i < len; i++) { c = ciphertext.charAt(i); if (false == Character.isLetter(c)) { continue; } plain[i] = decodeChar(c, offset); } return new String(plain); }
java
public static String decode(String ciphertext, int offset) { final int len = ciphertext.length(); final char[] plain = ciphertext.toCharArray(); char c; for (int i = 0; i < len; i++) { c = ciphertext.charAt(i); if (false == Character.isLetter(c)) { continue; } plain[i] = decodeChar(c, offset); } return new String(plain); }
[ "public", "static", "String", "decode", "(", "String", "ciphertext", ",", "int", "offset", ")", "{", "final", "int", "len", "=", "ciphertext", ".", "length", "(", ")", ";", "final", "char", "[", "]", "plain", "=", "ciphertext", ".", "toCharArray", "(", ...
传入明文解密到密文 @param ciphertext 密文 @return 解密后的内容
[ "传入明文解密到密文" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/codec/Caesar.java#L42-L54
train
Decodes a UTF - 8 encoded string.
[ 30522, 2270, 10763, 5164, 21933, 3207, 1006, 5164, 27715, 18209, 1010, 20014, 16396, 1007, 1063, 2345, 20014, 18798, 1027, 27715, 18209, 1012, 3091, 1006, 1007, 1025, 2345, 25869, 1031, 1033, 5810, 1027, 27715, 18209, 1012, 2000, 7507, 19848,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rpc/akka/AkkaRpcActor.java
AkkaRpcActor.sendErrorIfSender
protected void sendErrorIfSender(Throwable throwable) { if (!getSender().equals(ActorRef.noSender())) { getSender().tell(new Status.Failure(throwable), getSelf()); } }
java
protected void sendErrorIfSender(Throwable throwable) { if (!getSender().equals(ActorRef.noSender())) { getSender().tell(new Status.Failure(throwable), getSelf()); } }
[ "protected", "void", "sendErrorIfSender", "(", "Throwable", "throwable", ")", "{", "if", "(", "!", "getSender", "(", ")", ".", "equals", "(", "ActorRef", ".", "noSender", "(", ")", ")", ")", "{", "getSender", "(", ")", ".", "tell", "(", "new", "Status"...
Send throwable to sender if the sender is specified. @param throwable to send to the sender
[ "Send", "throwable", "to", "sender", "if", "the", "sender", "is", "specified", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/rpc/akka/AkkaRpcActor.java#L433-L437
train
Send error if sender is not the no sender
[ 30522, 5123, 11675, 4604, 2121, 29165, 10128, 5054, 4063, 1006, 5466, 3085, 5466, 3085, 1007, 1063, 2065, 1006, 999, 4152, 10497, 2121, 1006, 1007, 1012, 19635, 1006, 3364, 2890, 2546, 1012, 4451, 11563, 1006, 1007, 1007, 1007, 1063, 4152, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
alibaba/canal
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbSyncService.java
RdbSyncService.getTargetColumnType
private Map<String, Integer> getTargetColumnType(Connection conn, MappingConfig config) { DbMapping dbMapping = config.getDbMapping(); String cacheKey = config.getDestination() + "." + dbMapping.getDatabase() + "." + dbMapping.getTable(); Map<String, Integer> columnType = columnsTypeCache.get(cacheKey); if (columnType == null) { synchronized (RdbSyncService.class) { columnType = columnsTypeCache.get(cacheKey); if (columnType == null) { columnType = new LinkedHashMap<>(); final Map<String, Integer> columnTypeTmp = columnType; String sql = "SELECT * FROM " + SyncUtil.getDbTableName(dbMapping) + " WHERE 1=2"; Util.sqlRS(conn, sql, rs -> { try { ResultSetMetaData rsd = rs.getMetaData(); int columnCount = rsd.getColumnCount(); for (int i = 1; i <= columnCount; i++) { columnTypeTmp.put(rsd.getColumnName(i).toLowerCase(), rsd.getColumnType(i)); } columnsTypeCache.put(cacheKey, columnTypeTmp); } catch (SQLException e) { logger.error(e.getMessage(), e); } }); } } } return columnType; }
java
private Map<String, Integer> getTargetColumnType(Connection conn, MappingConfig config) { DbMapping dbMapping = config.getDbMapping(); String cacheKey = config.getDestination() + "." + dbMapping.getDatabase() + "." + dbMapping.getTable(); Map<String, Integer> columnType = columnsTypeCache.get(cacheKey); if (columnType == null) { synchronized (RdbSyncService.class) { columnType = columnsTypeCache.get(cacheKey); if (columnType == null) { columnType = new LinkedHashMap<>(); final Map<String, Integer> columnTypeTmp = columnType; String sql = "SELECT * FROM " + SyncUtil.getDbTableName(dbMapping) + " WHERE 1=2"; Util.sqlRS(conn, sql, rs -> { try { ResultSetMetaData rsd = rs.getMetaData(); int columnCount = rsd.getColumnCount(); for (int i = 1; i <= columnCount; i++) { columnTypeTmp.put(rsd.getColumnName(i).toLowerCase(), rsd.getColumnType(i)); } columnsTypeCache.put(cacheKey, columnTypeTmp); } catch (SQLException e) { logger.error(e.getMessage(), e); } }); } } } return columnType; }
[ "private", "Map", "<", "String", ",", "Integer", ">", "getTargetColumnType", "(", "Connection", "conn", ",", "MappingConfig", "config", ")", "{", "DbMapping", "dbMapping", "=", "config", ".", "getDbMapping", "(", ")", ";", "String", "cacheKey", "=", "config", ...
获取目标字段类型 @param conn sql connection @param config 映射配置 @return 字段sqlType
[ "获取目标字段类型" ]
8f088cddc0755f4350c5aaae95c6e4002d90a40f
https://github.com/alibaba/canal/blob/8f088cddc0755f4350c5aaae95c6e4002d90a40f/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbSyncService.java#L399-L426
train
Get the target column type
[ 30522, 2797, 4949, 1026, 5164, 1010, 16109, 1028, 2131, 7559, 18150, 25778, 2819, 29405, 5051, 1006, 4434, 9530, 2078, 1010, 12375, 8663, 8873, 2290, 9530, 8873, 2290, 1007, 1063, 16962, 2863, 14853, 16962, 2863, 14853, 1027, 9530, 8873, 22...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
networknt/light-4j
metrics/src/main/java/io/dropwizard/metrics/InstrumentedExecutors.java
InstrumentedExecutors.newCachedThreadPool
public static InstrumentedExecutorService newCachedThreadPool(MetricRegistry registry, String name) { return new InstrumentedExecutorService(Executors.newCachedThreadPool(), registry, name); }
java
public static InstrumentedExecutorService newCachedThreadPool(MetricRegistry registry, String name) { return new InstrumentedExecutorService(Executors.newCachedThreadPool(), registry, name); }
[ "public", "static", "InstrumentedExecutorService", "newCachedThreadPool", "(", "MetricRegistry", "registry", ",", "String", "name", ")", "{", "return", "new", "InstrumentedExecutorService", "(", "Executors", ".", "newCachedThreadPool", "(", ")", ",", "registry", ",", ...
Creates an instrumented thread pool that creates new threads as needed, but will reuse previously constructed threads when they are available. These pools will typically improve the performance of programs that execute many short-lived asynchronous tasks. Calls to {@code execute} will reuse previously constructed threads if available. If no existing thread is available, a new thread will be created and added to the pool. Threads that have not been used for sixty seconds are terminated and removed from the cache. Thus, a pool that remains idle for long enough will not consume any resource. Note that pools with similar properties but different details (for example, timeout parameters) may be created using {@link ThreadPoolExecutor} constructors. @param registry the {@link MetricRegistry} that will contain the metrics. @param name the (metrics) name for this executor service, see {@link MetricRegistry#name(String, String...)}. @return the newly created thread pool @see Executors#newCachedThreadPool()
[ "Creates", "an", "instrumented", "thread", "pool", "that", "creates", "new", "threads", "as", "needed", "but", "will", "reuse", "previously", "constructed", "threads", "when", "they", "are", "available", ".", "These", "pools", "will", "typically", "improve", "th...
2a60257c60663684c8f6dc8b5ea3cf184e534db6
https://github.com/networknt/light-4j/blob/2a60257c60663684c8f6dc8b5ea3cf184e534db6/metrics/src/main/java/io/dropwizard/metrics/InstrumentedExecutors.java#L235-L237
train
Create a new cached thread pool.
[ 30522, 2270, 10763, 6602, 14728, 2595, 8586, 16161, 22573, 2099, 7903, 2063, 2047, 3540, 7690, 2705, 16416, 18927, 13669, 1006, 12046, 2890, 24063, 2854, 15584, 1010, 5164, 2171, 1007, 1063, 2709, 2047, 6602, 14728, 2595, 8586, 16161, 22573, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Graph.java
Graph.removeVertex
public Graph<K, VV, EV> removeVertex(Vertex<K, VV> vertex) { List<Vertex<K, VV>> vertexToBeRemoved = new ArrayList<>(); vertexToBeRemoved.add(vertex); return removeVertices(vertexToBeRemoved); }
java
public Graph<K, VV, EV> removeVertex(Vertex<K, VV> vertex) { List<Vertex<K, VV>> vertexToBeRemoved = new ArrayList<>(); vertexToBeRemoved.add(vertex); return removeVertices(vertexToBeRemoved); }
[ "public", "Graph", "<", "K", ",", "VV", ",", "EV", ">", "removeVertex", "(", "Vertex", "<", "K", ",", "VV", ">", "vertex", ")", "{", "List", "<", "Vertex", "<", "K", ",", "VV", ">", ">", "vertexToBeRemoved", "=", "new", "ArrayList", "<>", "(", ")...
Removes the given vertex and its edges from the graph. @param vertex the vertex to remove @return the new graph containing the existing vertices and edges without the removed vertex and its edges
[ "Removes", "the", "given", "vertex", "and", "its", "edges", "from", "the", "graph", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Graph.java#L1470-L1476
train
Removes a vertex from the graph.
[ 30522, 2270, 10629, 1026, 1047, 1010, 1058, 2615, 1010, 23408, 1028, 6366, 16874, 10288, 1006, 19449, 1026, 1047, 1010, 1058, 2615, 1028, 19449, 1007, 1063, 2862, 1026, 19449, 1026, 1047, 1010, 1058, 2615, 1028, 1028, 19449, 3406, 5677, 663...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/spark
common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java
TransportClientFactory.createClient
public TransportClient createClient(String remoteHost, int remotePort) throws IOException, InterruptedException { // Get connection from the connection pool first. // If it is not found or not active, create a new one. // Use unresolved address here to avoid DNS resolution each time we creates a client. final InetSocketAddress unresolvedAddress = InetSocketAddress.createUnresolved(remoteHost, remotePort); // Create the ClientPool if we don't have it yet. ClientPool clientPool = connectionPool.get(unresolvedAddress); if (clientPool == null) { connectionPool.putIfAbsent(unresolvedAddress, new ClientPool(numConnectionsPerPeer)); clientPool = connectionPool.get(unresolvedAddress); } int clientIndex = rand.nextInt(numConnectionsPerPeer); TransportClient cachedClient = clientPool.clients[clientIndex]; if (cachedClient != null && cachedClient.isActive()) { // Make sure that the channel will not timeout by updating the last use time of the // handler. Then check that the client is still alive, in case it timed out before // this code was able to update things. TransportChannelHandler handler = cachedClient.getChannel().pipeline() .get(TransportChannelHandler.class); synchronized (handler) { handler.getResponseHandler().updateTimeOfLastRequest(); } if (cachedClient.isActive()) { logger.trace("Returning cached connection to {}: {}", cachedClient.getSocketAddress(), cachedClient); return cachedClient; } } // If we reach here, we don't have an existing connection open. Let's create a new one. // Multiple threads might race here to create new connections. Keep only one of them active. final long preResolveHost = System.nanoTime(); final InetSocketAddress resolvedAddress = new InetSocketAddress(remoteHost, remotePort); final long hostResolveTimeMs = (System.nanoTime() - preResolveHost) / 1000000; if (hostResolveTimeMs > 2000) { logger.warn("DNS resolution for {} took {} ms", resolvedAddress, hostResolveTimeMs); } else { logger.trace("DNS resolution for {} took {} ms", resolvedAddress, hostResolveTimeMs); } synchronized (clientPool.locks[clientIndex]) { cachedClient = clientPool.clients[clientIndex]; if (cachedClient != null) { if (cachedClient.isActive()) { logger.trace("Returning cached connection to {}: {}", resolvedAddress, cachedClient); return cachedClient; } else { logger.info("Found inactive connection to {}, creating a new one.", resolvedAddress); } } clientPool.clients[clientIndex] = createClient(resolvedAddress); return clientPool.clients[clientIndex]; } }
java
public TransportClient createClient(String remoteHost, int remotePort) throws IOException, InterruptedException { // Get connection from the connection pool first. // If it is not found or not active, create a new one. // Use unresolved address here to avoid DNS resolution each time we creates a client. final InetSocketAddress unresolvedAddress = InetSocketAddress.createUnresolved(remoteHost, remotePort); // Create the ClientPool if we don't have it yet. ClientPool clientPool = connectionPool.get(unresolvedAddress); if (clientPool == null) { connectionPool.putIfAbsent(unresolvedAddress, new ClientPool(numConnectionsPerPeer)); clientPool = connectionPool.get(unresolvedAddress); } int clientIndex = rand.nextInt(numConnectionsPerPeer); TransportClient cachedClient = clientPool.clients[clientIndex]; if (cachedClient != null && cachedClient.isActive()) { // Make sure that the channel will not timeout by updating the last use time of the // handler. Then check that the client is still alive, in case it timed out before // this code was able to update things. TransportChannelHandler handler = cachedClient.getChannel().pipeline() .get(TransportChannelHandler.class); synchronized (handler) { handler.getResponseHandler().updateTimeOfLastRequest(); } if (cachedClient.isActive()) { logger.trace("Returning cached connection to {}: {}", cachedClient.getSocketAddress(), cachedClient); return cachedClient; } } // If we reach here, we don't have an existing connection open. Let's create a new one. // Multiple threads might race here to create new connections. Keep only one of them active. final long preResolveHost = System.nanoTime(); final InetSocketAddress resolvedAddress = new InetSocketAddress(remoteHost, remotePort); final long hostResolveTimeMs = (System.nanoTime() - preResolveHost) / 1000000; if (hostResolveTimeMs > 2000) { logger.warn("DNS resolution for {} took {} ms", resolvedAddress, hostResolveTimeMs); } else { logger.trace("DNS resolution for {} took {} ms", resolvedAddress, hostResolveTimeMs); } synchronized (clientPool.locks[clientIndex]) { cachedClient = clientPool.clients[clientIndex]; if (cachedClient != null) { if (cachedClient.isActive()) { logger.trace("Returning cached connection to {}: {}", resolvedAddress, cachedClient); return cachedClient; } else { logger.info("Found inactive connection to {}, creating a new one.", resolvedAddress); } } clientPool.clients[clientIndex] = createClient(resolvedAddress); return clientPool.clients[clientIndex]; } }
[ "public", "TransportClient", "createClient", "(", "String", "remoteHost", ",", "int", "remotePort", ")", "throws", "IOException", ",", "InterruptedException", "{", "// Get connection from the connection pool first.", "// If it is not found or not active, create a new one.", "// Use...
Create a {@link TransportClient} connecting to the given remote host / port. We maintains an array of clients (size determined by spark.shuffle.io.numConnectionsPerPeer) and randomly picks one to use. If no client was previously created in the randomly selected spot, this function creates a new client and places it there. Prior to the creation of a new TransportClient, we will execute all {@link TransportClientBootstrap}s that are registered with this factory. This blocks until a connection is successfully established and fully bootstrapped. Concurrency: This method is safe to call from multiple threads.
[ "Create", "a", "{", "@link", "TransportClient", "}", "connecting", "to", "the", "given", "remote", "host", "/", "port", "." ]
25ee0474f47d9c30d6f553a7892d9549f91071cf
https://github.com/apache/spark/blob/25ee0474f47d9c30d6f553a7892d9549f91071cf/common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java#L135-L195
train
Create a new client from the connection pool.
[ 30522, 2270, 3665, 20464, 11638, 3443, 20464, 11638, 1006, 5164, 6556, 15006, 2102, 1010, 20014, 6556, 6442, 1007, 11618, 22834, 10288, 24422, 1010, 7153, 10288, 24422, 1063, 1013, 1013, 2131, 4434, 2013, 1996, 4434, 4770, 2034, 1012, 1013, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/state/metainfo/StateMetaInfoSnapshotReadersWriters.java
StateMetaInfoSnapshotReadersWriters.getReader
@Nonnull static StateMetaInfoReader getReader(int readVersion) { switch (readVersion) { case CURRENT_STATE_META_INFO_SNAPSHOT_VERSION: return CurrentReaderImpl.INSTANCE; case 5: return V5ReaderImpl.INSTANCE; default: throw new IllegalArgumentException("Unsupported read version for state meta info: " + readVersion); } }
java
@Nonnull static StateMetaInfoReader getReader(int readVersion) { switch (readVersion) { case CURRENT_STATE_META_INFO_SNAPSHOT_VERSION: return CurrentReaderImpl.INSTANCE; case 5: return V5ReaderImpl.INSTANCE; default: throw new IllegalArgumentException("Unsupported read version for state meta info: " + readVersion); } }
[ "@", "Nonnull", "static", "StateMetaInfoReader", "getReader", "(", "int", "readVersion", ")", "{", "switch", "(", "readVersion", ")", "{", "case", "CURRENT_STATE_META_INFO_SNAPSHOT_VERSION", ":", "return", "CurrentReaderImpl", ".", "INSTANCE", ";", "case", "5", ":",...
Returns a reader for {@link StateMetaInfoSnapshot} with the requested state type and version number. @param readVersion the format version to read. @return the requested reader.
[ "Returns", "a", "reader", "for", "{", "@link", "StateMetaInfoSnapshot", "}", "with", "the", "requested", "state", "type", "and", "version", "number", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/state/metainfo/StateMetaInfoSnapshotReadersWriters.java#L101-L111
train
Get the reader for the given read version.
[ 30522, 1030, 2512, 11231, 3363, 10763, 2110, 11368, 8113, 29278, 13775, 2121, 2131, 16416, 4063, 1006, 20014, 3191, 27774, 1007, 1063, 6942, 1006, 3191, 27774, 1007, 1063, 2553, 2783, 1035, 2110, 1035, 18804, 1035, 18558, 1035, 20057, 12326, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/incubator-shardingsphere
sharding-core/sharding-core-parse/sharding-core-parse-common/src/main/java/org/apache/shardingsphere/core/parse/antlr/parser/SQLParserEngine.java
SQLParserEngine.parse
public SQLAST parse() { ParseTree parseTree = SQLParserFactory.newInstance(databaseType, sql).execute().getChild(0); if (parseTree instanceof ErrorNode) { throw new SQLParsingUnsupportedException(String.format("Unsupported SQL of `%s`", sql)); } Optional<SQLStatementRule> sqlStatementRule = parsingRuleRegistry.findSQLStatementRule(databaseType, parseTree.getClass().getSimpleName()); if (sqlStatementRule.isPresent()) { return new SQLAST((ParserRuleContext) parseTree, sqlStatementRule.get()); } if (parsingRuleRegistry instanceof EncryptParsingRuleRegistry) { return new SQLAST((ParserRuleContext) parseTree); } throw new SQLParsingUnsupportedException(String.format("Unsupported SQL of `%s`", sql)); }
java
public SQLAST parse() { ParseTree parseTree = SQLParserFactory.newInstance(databaseType, sql).execute().getChild(0); if (parseTree instanceof ErrorNode) { throw new SQLParsingUnsupportedException(String.format("Unsupported SQL of `%s`", sql)); } Optional<SQLStatementRule> sqlStatementRule = parsingRuleRegistry.findSQLStatementRule(databaseType, parseTree.getClass().getSimpleName()); if (sqlStatementRule.isPresent()) { return new SQLAST((ParserRuleContext) parseTree, sqlStatementRule.get()); } if (parsingRuleRegistry instanceof EncryptParsingRuleRegistry) { return new SQLAST((ParserRuleContext) parseTree); } throw new SQLParsingUnsupportedException(String.format("Unsupported SQL of `%s`", sql)); }
[ "public", "SQLAST", "parse", "(", ")", "{", "ParseTree", "parseTree", "=", "SQLParserFactory", ".", "newInstance", "(", "databaseType", ",", "sql", ")", ".", "execute", "(", ")", ".", "getChild", "(", "0", ")", ";", "if", "(", "parseTree", "instanceof", ...
Parse SQL to abstract syntax tree. @return abstract syntax tree of SQL
[ "Parse", "SQL", "to", "abstract", "syntax", "tree", "." ]
f88fd29fc345dfb31fdce12e9e96cbfa0fd2402d
https://github.com/apache/incubator-shardingsphere/blob/f88fd29fc345dfb31fdce12e9e96cbfa0fd2402d/sharding-core/sharding-core-parse/sharding-core-parse-common/src/main/java/org/apache/shardingsphere/core/parse/antlr/parser/SQLParserEngine.java#L50-L63
train
Parse SQL.
[ 30522, 2270, 29296, 14083, 11968, 3366, 1006, 1007, 1063, 11968, 13462, 9910, 11968, 13462, 9910, 1027, 29296, 19362, 8043, 21450, 1012, 2047, 7076, 26897, 1006, 7809, 13874, 1010, 29296, 1007, 1012, 15389, 1006, 1007, 1012, 2131, 19339, 1006...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-core/src/main/java/org/apache/flink/api/common/typeinfo/PrimitiveArrayTypeInfo.java
PrimitiveArrayTypeInfo.getInfoFor
@SuppressWarnings("unchecked") @PublicEvolving public static <X> PrimitiveArrayTypeInfo<X> getInfoFor(Class<X> type) { if (!type.isArray()) { throw new InvalidTypesException("The given class is no array."); } // basic type arrays return (PrimitiveArrayTypeInfo<X>) TYPES.get(type); }
java
@SuppressWarnings("unchecked") @PublicEvolving public static <X> PrimitiveArrayTypeInfo<X> getInfoFor(Class<X> type) { if (!type.isArray()) { throw new InvalidTypesException("The given class is no array."); } // basic type arrays return (PrimitiveArrayTypeInfo<X>) TYPES.get(type); }
[ "@", "SuppressWarnings", "(", "\"unchecked\"", ")", "@", "PublicEvolving", "public", "static", "<", "X", ">", "PrimitiveArrayTypeInfo", "<", "X", ">", "getInfoFor", "(", "Class", "<", "X", ">", "type", ")", "{", "if", "(", "!", "type", ".", "isArray", "(...
Tries to get the PrimitiveArrayTypeInfo for an array. Returns null, if the type is an array, but the component type is not a primitive type. @param type The class of the array. @return The corresponding PrimitiveArrayTypeInfo, or null, if the array is not an array of primitives. @throws InvalidTypesException Thrown, if the given class does not represent an array.
[ "Tries", "to", "get", "the", "PrimitiveArrayTypeInfo", "for", "an", "array", ".", "Returns", "null", "if", "the", "type", "is", "an", "array", "but", "the", "component", "type", "is", "not", "a", "primitive", "type", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/PrimitiveArrayTypeInfo.java#L201-L210
train
Returns the PrimitiveArrayTypeInfo for the given class.
[ 30522, 1030, 16081, 9028, 5582, 2015, 1006, 1000, 4895, 5403, 18141, 1000, 1007, 1030, 2270, 6777, 4747, 6455, 2270, 10763, 1026, 1060, 1028, 10968, 2906, 9447, 13874, 2378, 14876, 1026, 1060, 1028, 2131, 2378, 14876, 29278, 1006, 2465, 102...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/taskmanager/NetworkEnvironmentConfiguration.java
NetworkEnvironmentConfiguration.calculateNewNetworkBufferMemory
@VisibleForTesting public static long calculateNewNetworkBufferMemory(Configuration config, long maxJvmHeapMemory) { // The maximum heap memory has been adjusted as in TaskManagerServices#calculateHeapSizeMB // and we need to invert these calculations. final long jvmHeapNoNet; final MemoryType memoryType = ConfigurationParserUtils.getMemoryType(config); if (memoryType == MemoryType.HEAP) { jvmHeapNoNet = maxJvmHeapMemory; } else if (memoryType == MemoryType.OFF_HEAP) { long configuredMemory = ConfigurationParserUtils.getManagedMemorySize(config) << 20; // megabytes to bytes if (configuredMemory > 0) { // The maximum heap memory has been adjusted according to configuredMemory, i.e. // maxJvmHeap = jvmHeapNoNet - configuredMemory jvmHeapNoNet = maxJvmHeapMemory + configuredMemory; } else { // The maximum heap memory has been adjusted according to the fraction, i.e. // maxJvmHeap = jvmHeapNoNet - jvmHeapNoNet * managedFraction = jvmHeapNoNet * (1 - managedFraction) jvmHeapNoNet = (long) (maxJvmHeapMemory / (1.0 - ConfigurationParserUtils.getManagedMemoryFraction(config))); } } else { throw new RuntimeException("No supported memory type detected."); } // finally extract the network buffer memory size again from: // jvmHeapNoNet = jvmHeap - networkBufBytes // = jvmHeap - Math.min(networkBufMax, Math.max(networkBufMin, jvmHeap * netFraction) // jvmHeap = jvmHeapNoNet / (1.0 - networkBufFraction) float networkBufFraction = config.getFloat(TaskManagerOptions.NETWORK_BUFFERS_MEMORY_FRACTION); long networkBufSize = (long) (jvmHeapNoNet / (1.0 - networkBufFraction) * networkBufFraction); return calculateNewNetworkBufferMemory(config, networkBufSize, maxJvmHeapMemory); }
java
@VisibleForTesting public static long calculateNewNetworkBufferMemory(Configuration config, long maxJvmHeapMemory) { // The maximum heap memory has been adjusted as in TaskManagerServices#calculateHeapSizeMB // and we need to invert these calculations. final long jvmHeapNoNet; final MemoryType memoryType = ConfigurationParserUtils.getMemoryType(config); if (memoryType == MemoryType.HEAP) { jvmHeapNoNet = maxJvmHeapMemory; } else if (memoryType == MemoryType.OFF_HEAP) { long configuredMemory = ConfigurationParserUtils.getManagedMemorySize(config) << 20; // megabytes to bytes if (configuredMemory > 0) { // The maximum heap memory has been adjusted according to configuredMemory, i.e. // maxJvmHeap = jvmHeapNoNet - configuredMemory jvmHeapNoNet = maxJvmHeapMemory + configuredMemory; } else { // The maximum heap memory has been adjusted according to the fraction, i.e. // maxJvmHeap = jvmHeapNoNet - jvmHeapNoNet * managedFraction = jvmHeapNoNet * (1 - managedFraction) jvmHeapNoNet = (long) (maxJvmHeapMemory / (1.0 - ConfigurationParserUtils.getManagedMemoryFraction(config))); } } else { throw new RuntimeException("No supported memory type detected."); } // finally extract the network buffer memory size again from: // jvmHeapNoNet = jvmHeap - networkBufBytes // = jvmHeap - Math.min(networkBufMax, Math.max(networkBufMin, jvmHeap * netFraction) // jvmHeap = jvmHeapNoNet / (1.0 - networkBufFraction) float networkBufFraction = config.getFloat(TaskManagerOptions.NETWORK_BUFFERS_MEMORY_FRACTION); long networkBufSize = (long) (jvmHeapNoNet / (1.0 - networkBufFraction) * networkBufFraction); return calculateNewNetworkBufferMemory(config, networkBufSize, maxJvmHeapMemory); }
[ "@", "VisibleForTesting", "public", "static", "long", "calculateNewNetworkBufferMemory", "(", "Configuration", "config", ",", "long", "maxJvmHeapMemory", ")", "{", "// The maximum heap memory has been adjusted as in TaskManagerServices#calculateHeapSizeMB", "// and we need to invert th...
Calculates the amount of memory used for network buffers inside the current JVM instance based on the available heap or the max heap size and the according configuration parameters. <p>For containers or when started via scripts, if started with a memory limit and set to use off-heap memory, the maximum heap size for the JVM is adjusted accordingly and we are able to extract the intended values from this. <p>The following configuration parameters are involved: <ul> <li>{@link TaskManagerOptions#MANAGED_MEMORY_SIZE},</li> <li>{@link TaskManagerOptions#MANAGED_MEMORY_FRACTION},</li> <li>{@link TaskManagerOptions#NETWORK_BUFFERS_MEMORY_FRACTION},</li> <li>{@link TaskManagerOptions#NETWORK_BUFFERS_MEMORY_MIN},</li> <li>{@link TaskManagerOptions#NETWORK_BUFFERS_MEMORY_MAX}, and</li> <li>{@link TaskManagerOptions#NETWORK_NUM_BUFFERS} (fallback if the ones above do not exist)</li> </ul>. @param config configuration object @param maxJvmHeapMemory the maximum JVM heap size (in bytes) @return memory to use for network buffers (in bytes)
[ "Calculates", "the", "amount", "of", "memory", "used", "for", "network", "buffers", "inside", "the", "current", "JVM", "instance", "based", "on", "the", "available", "heap", "or", "the", "max", "heap", "size", "and", "the", "according", "configuration", "param...
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/taskmanager/NetworkEnvironmentConfiguration.java#L187-L217
train
Calculates the new network buffer memory.
[ 30522, 1030, 5710, 13028, 4355, 2075, 2270, 10763, 2146, 18422, 2638, 7962, 3388, 6198, 8569, 12494, 4168, 5302, 30524, 14905, 1013, 1013, 1998, 2057, 2342, 2000, 1999, 16874, 2122, 16268, 1012, 2345, 2146, 1046, 2615, 2213, 20192, 2361, 85...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-connectors/flink-connector-kinesis/src/main/java/org/apache/flink/streaming/connectors/kinesis/internals/KinesisDataFetcher.java
KinesisDataFetcher.emitWatermark
@VisibleForTesting protected void emitWatermark() { LOG.debug("Evaluating watermark for subtask {} time {}", indexOfThisConsumerSubtask, getCurrentTimeMillis()); long potentialWatermark = Long.MAX_VALUE; long idleTime = (shardIdleIntervalMillis > 0) ? getCurrentTimeMillis() - shardIdleIntervalMillis : Long.MAX_VALUE; for (Map.Entry<Integer, ShardWatermarkState> e : shardWatermarks.entrySet()) { // consider only active shards, or those that would advance the watermark Watermark w = e.getValue().periodicWatermarkAssigner.getCurrentWatermark(); if (w != null && (e.getValue().lastUpdated >= idleTime || w.getTimestamp() > lastWatermark)) { potentialWatermark = Math.min(potentialWatermark, w.getTimestamp()); } } // advance watermark if possible (watermarks can only be ascending) if (potentialWatermark == Long.MAX_VALUE) { if (shardWatermarks.isEmpty() || shardIdleIntervalMillis > 0) { LOG.debug("No active shard for subtask {}, marking the source idle.", indexOfThisConsumerSubtask); // no active shard, signal downstream operators to not wait for a watermark sourceContext.markAsTemporarilyIdle(); } } else if (potentialWatermark > lastWatermark) { LOG.debug("Emitting watermark {} from subtask {}", potentialWatermark, indexOfThisConsumerSubtask); sourceContext.emitWatermark(new Watermark(potentialWatermark)); lastWatermark = potentialWatermark; } }
java
@VisibleForTesting protected void emitWatermark() { LOG.debug("Evaluating watermark for subtask {} time {}", indexOfThisConsumerSubtask, getCurrentTimeMillis()); long potentialWatermark = Long.MAX_VALUE; long idleTime = (shardIdleIntervalMillis > 0) ? getCurrentTimeMillis() - shardIdleIntervalMillis : Long.MAX_VALUE; for (Map.Entry<Integer, ShardWatermarkState> e : shardWatermarks.entrySet()) { // consider only active shards, or those that would advance the watermark Watermark w = e.getValue().periodicWatermarkAssigner.getCurrentWatermark(); if (w != null && (e.getValue().lastUpdated >= idleTime || w.getTimestamp() > lastWatermark)) { potentialWatermark = Math.min(potentialWatermark, w.getTimestamp()); } } // advance watermark if possible (watermarks can only be ascending) if (potentialWatermark == Long.MAX_VALUE) { if (shardWatermarks.isEmpty() || shardIdleIntervalMillis > 0) { LOG.debug("No active shard for subtask {}, marking the source idle.", indexOfThisConsumerSubtask); // no active shard, signal downstream operators to not wait for a watermark sourceContext.markAsTemporarilyIdle(); } } else if (potentialWatermark > lastWatermark) { LOG.debug("Emitting watermark {} from subtask {}", potentialWatermark, indexOfThisConsumerSubtask); sourceContext.emitWatermark(new Watermark(potentialWatermark)); lastWatermark = potentialWatermark; } }
[ "@", "VisibleForTesting", "protected", "void", "emitWatermark", "(", ")", "{", "LOG", ".", "debug", "(", "\"Evaluating watermark for subtask {} time {}\"", ",", "indexOfThisConsumerSubtask", ",", "getCurrentTimeMillis", "(", ")", ")", ";", "long", "potentialWatermark", ...
Called periodically to emit a watermark. Checks all shards for the current event time watermark, and possibly emits the next watermark. <p>Shards that have not received an update for a certain interval are considered inactive so as to not hold back the watermark indefinitely. When all shards are inactive, the subtask will be marked as temporarily idle to not block downstream operators.
[ "Called", "periodically", "to", "emit", "a", "watermark", ".", "Checks", "all", "shards", "for", "the", "current", "event", "time", "watermark", "and", "possibly", "emits", "the", "next", "watermark", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-connectors/flink-connector-kinesis/src/main/java/org/apache/flink/streaming/connectors/kinesis/internals/KinesisDataFetcher.java#L720-L752
train
Emit the watermark.
[ 30522, 1030, 5710, 13028, 4355, 2075, 5123, 11675, 12495, 2102, 5880, 10665, 1006, 1007, 1063, 8833, 1012, 2139, 8569, 2290, 1006, 1000, 23208, 2300, 10665, 2005, 4942, 10230, 2243, 1063, 1065, 2051, 1063, 1065, 1000, 1010, 5950, 15794, 241...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/util/NumberUtil.java
NumberUtil.div
public static BigDecimal div(Number v1, Number v2, int scale) { return div(v1, v2, scale, RoundingMode.HALF_UP); }
java
public static BigDecimal div(Number v1, Number v2, int scale) { return div(v1, v2, scale, RoundingMode.HALF_UP); }
[ "public", "static", "BigDecimal", "div", "(", "Number", "v1", ",", "Number", "v2", ",", "int", "scale", ")", "{", "return", "div", "(", "v1", ",", "v2", ",", "scale", ",", "RoundingMode", ".", "HALF_UP", ")", ";", "}" ]
提供(相对)精确的除法运算,当发生除不尽的情况时,由scale指定精确度,后面的四舍五入 @param v1 被除数 @param v2 除数 @param scale 精确度,如果为负值,取绝对值 @return 两个参数的商 @since 3.1.0
[ "提供", "(", "相对", ")", "精确的除法运算", "当发生除不尽的情况时", "由scale指定精确度", "后面的四舍五入" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/util/NumberUtil.java#L623-L625
train
Divide v1 by v2 with the given scale.
[ 30522, 2270, 10763, 2502, 3207, 6895, 9067, 4487, 2615, 1006, 2193, 1058, 2487, 1010, 2193, 1058, 2475, 1010, 20014, 4094, 1007, 1063, 2709, 4487, 2615, 1006, 1058, 2487, 1010, 1058, 2475, 1010, 4094, 1010, 26939, 5302, 3207, 1012, 2431, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
codec-http/src/main/java/io/netty/handler/codec/http/HttpUtil.java
HttpUtil.getCharsetAsSequence
public static CharSequence getCharsetAsSequence(HttpMessage message) { CharSequence contentTypeValue = message.headers().get(HttpHeaderNames.CONTENT_TYPE); if (contentTypeValue != null) { return getCharsetAsSequence(contentTypeValue); } else { return null; } }
java
public static CharSequence getCharsetAsSequence(HttpMessage message) { CharSequence contentTypeValue = message.headers().get(HttpHeaderNames.CONTENT_TYPE); if (contentTypeValue != null) { return getCharsetAsSequence(contentTypeValue); } else { return null; } }
[ "public", "static", "CharSequence", "getCharsetAsSequence", "(", "HttpMessage", "message", ")", "{", "CharSequence", "contentTypeValue", "=", "message", ".", "headers", "(", ")", ".", "get", "(", "HttpHeaderNames", ".", "CONTENT_TYPE", ")", ";", "if", "(", "cont...
Fetch charset from message's Content-Type header as a char sequence. A lot of sites/possibly clients have charset="CHARSET", for example charset="utf-8". Or "utf8" instead of "utf-8" This is not according to standard, but this method provide an ability to catch desired mistakes manually in code @return the {@code CharSequence} with charset from message's Content-Type header or {@code null} if charset is not presented
[ "Fetch", "charset", "from", "message", "s", "Content", "-", "Type", "header", "as", "a", "char", "sequence", "." ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/codec-http/src/main/java/io/netty/handler/codec/http/HttpUtil.java#L430-L437
train
Gets the charset as sequence.
[ 30522, 2270, 10763, 25869, 3366, 4226, 5897, 2131, 7507, 22573, 10230, 3366, 4226, 5897, 1006, 8299, 7834, 3736, 3351, 4471, 1007, 1063, 25869, 3366, 4226, 5897, 4180, 13874, 10175, 5657, 1027, 4471, 1012, 20346, 2015, 1006, 1007, 1012, 213...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-libraries/flink-cep/src/main/java/org/apache/flink/cep/pattern/Pattern.java
Pattern.followedByAny
public Pattern<T, T> followedByAny(final String name) { return new Pattern<>(name, this, ConsumingStrategy.SKIP_TILL_ANY, afterMatchSkipStrategy); }
java
public Pattern<T, T> followedByAny(final String name) { return new Pattern<>(name, this, ConsumingStrategy.SKIP_TILL_ANY, afterMatchSkipStrategy); }
[ "public", "Pattern", "<", "T", ",", "T", ">", "followedByAny", "(", "final", "String", "name", ")", "{", "return", "new", "Pattern", "<>", "(", "name", ",", "this", ",", "ConsumingStrategy", ".", "SKIP_TILL_ANY", ",", "afterMatchSkipStrategy", ")", ";", "}...
Appends a new pattern to the existing one. The new pattern enforces non-strict temporal contiguity. This means that a matching event of this pattern and the preceding matching event might be interleaved with other events which are ignored. @param name Name of the new pattern @return A new pattern which is appended to this one
[ "Appends", "a", "new", "pattern", "to", "the", "existing", "one", ".", "The", "new", "pattern", "enforces", "non", "-", "strict", "temporal", "contiguity", ".", "This", "means", "that", "a", "matching", "event", "of", "this", "pattern", "and", "the", "prec...
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-libraries/flink-cep/src/main/java/org/apache/flink/cep/pattern/Pattern.java#L321-L323
train
Creates a pattern that matches when the given string is followed by any of the matched elements.
[ 30522, 2270, 5418, 1026, 1056, 1010, 1056, 1028, 2628, 3762, 19092, 1006, 2345, 5164, 2171, 1007, 1063, 2709, 2047, 5418, 1026, 1028, 1006, 2171, 1010, 2023, 1010, 15077, 20528, 2618, 6292, 1012, 13558, 1035, 6229, 1035, 2151, 1010, 2044, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-java/src/main/java/org/apache/flink/api/java/DataSet.java
DataSet.printToErr
@Deprecated @PublicEvolving public DataSink<T> printToErr(String sinkIdentifier) { return output(new PrintingOutputFormat<T>(sinkIdentifier, true)); }
java
@Deprecated @PublicEvolving public DataSink<T> printToErr(String sinkIdentifier) { return output(new PrintingOutputFormat<T>(sinkIdentifier, true)); }
[ "@", "Deprecated", "@", "PublicEvolving", "public", "DataSink", "<", "T", ">", "printToErr", "(", "String", "sinkIdentifier", ")", "{", "return", "output", "(", "new", "PrintingOutputFormat", "<", "T", ">", "(", "sinkIdentifier", ",", "true", ")", ")", ";", ...
Writes a DataSet to the standard error stream (stderr). <p>For each element of the DataSet the result of {@link Object#toString()} is written. @param sinkIdentifier The string to prefix the output with. @return The DataSink that writes the DataSet. @deprecated Use {@link #printOnTaskManager(String)} instead, or the {@link PrintingOutputFormat}.
[ "Writes", "a", "DataSet", "to", "the", "standard", "error", "stream", "(", "stderr", ")", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-java/src/main/java/org/apache/flink/api/java/DataSet.java#L1724-L1728
train
Print to error.
[ 30522, 1030, 2139, 28139, 12921, 1030, 2270, 6777, 4747, 6455, 2270, 2951, 11493, 2243, 1026, 1056, 1028, 6140, 3406, 2121, 2099, 1006, 5164, 7752, 5178, 16778, 8873, 2121, 1007, 1063, 2709, 6434, 1006, 2047, 8021, 5833, 18780, 14192, 4017,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-connectors/flink-connector-kafka-base/src/main/java/org/apache/flink/streaming/connectors/kafka/internals/AbstractFetcher.java
AbstractFetcher.emitRecordWithTimestamp
protected void emitRecordWithTimestamp( T record, KafkaTopicPartitionState<KPH> partitionState, long offset, long timestamp) throws Exception { if (record != null) { if (timestampWatermarkMode == NO_TIMESTAMPS_WATERMARKS) { // fast path logic, in case there are no watermarks generated in the fetcher // emit the record, using the checkpoint lock to guarantee // atomicity of record emission and offset state update synchronized (checkpointLock) { sourceContext.collectWithTimestamp(record, timestamp); partitionState.setOffset(offset); } } else if (timestampWatermarkMode == PERIODIC_WATERMARKS) { emitRecordWithTimestampAndPeriodicWatermark(record, partitionState, offset, timestamp); } else { emitRecordWithTimestampAndPunctuatedWatermark(record, partitionState, offset, timestamp); } } else { // if the record is null, simply just update the offset state for partition synchronized (checkpointLock) { partitionState.setOffset(offset); } } }
java
protected void emitRecordWithTimestamp( T record, KafkaTopicPartitionState<KPH> partitionState, long offset, long timestamp) throws Exception { if (record != null) { if (timestampWatermarkMode == NO_TIMESTAMPS_WATERMARKS) { // fast path logic, in case there are no watermarks generated in the fetcher // emit the record, using the checkpoint lock to guarantee // atomicity of record emission and offset state update synchronized (checkpointLock) { sourceContext.collectWithTimestamp(record, timestamp); partitionState.setOffset(offset); } } else if (timestampWatermarkMode == PERIODIC_WATERMARKS) { emitRecordWithTimestampAndPeriodicWatermark(record, partitionState, offset, timestamp); } else { emitRecordWithTimestampAndPunctuatedWatermark(record, partitionState, offset, timestamp); } } else { // if the record is null, simply just update the offset state for partition synchronized (checkpointLock) { partitionState.setOffset(offset); } } }
[ "protected", "void", "emitRecordWithTimestamp", "(", "T", "record", ",", "KafkaTopicPartitionState", "<", "KPH", ">", "partitionState", ",", "long", "offset", ",", "long", "timestamp", ")", "throws", "Exception", "{", "if", "(", "record", "!=", "null", ")", "{...
Emits a record attaching a timestamp to it. <p>Implementation Note: This method is kept brief to be JIT inlining friendly. That makes the fast path efficient, the extended paths are called as separate methods. @param record The record to emit @param partitionState The state of the Kafka partition from which the record was fetched @param offset The offset of the record
[ "Emits", "a", "record", "attaching", "a", "timestamp", "to", "it", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-connectors/flink-connector-kafka-base/src/main/java/org/apache/flink/streaming/connectors/kafka/internals/AbstractFetcher.java#L388-L412
train
This method is called by the source context to emit a single record with the given timestamp.
[ 30522, 5123, 11675, 12495, 7913, 27108, 2094, 24415, 7292, 9153, 8737, 1006, 1056, 2501, 1010, 10556, 24316, 10610, 24330, 19362, 3775, 9285, 12259, 1026, 1047, 8458, 1028, 13571, 9153, 2618, 1010, 2146, 16396, 1010, 2146, 2335, 15464, 2361, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-cron/src/main/java/cn/hutool/cron/pattern/parser/MonthValueParser.java
MonthValueParser.parseAlias
private int parseAlias(String value) throws CronException { for (int i = 0; i < ALIASES.length; i++) { if (ALIASES[i].equalsIgnoreCase(value)) { return i + 1; } } throw new CronException("Invalid month alias: {}", value); }
java
private int parseAlias(String value) throws CronException { for (int i = 0; i < ALIASES.length; i++) { if (ALIASES[i].equalsIgnoreCase(value)) { return i + 1; } } throw new CronException("Invalid month alias: {}", value); }
[ "private", "int", "parseAlias", "(", "String", "value", ")", "throws", "CronException", "{", "for", "(", "int", "i", "=", "0", ";", "i", "<", "ALIASES", ".", "length", ";", "i", "++", ")", "{", "if", "(", "ALIASES", "[", "i", "]", ".", "equalsIgnor...
解析别名 @param value 别名值 @return 月份int值 @throws CronException
[ "解析别名" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-cron/src/main/java/cn/hutool/cron/pattern/parser/MonthValueParser.java#L35-L42
train
Parses an alias.
[ 30522, 2797, 20014, 11968, 17310, 6632, 2015, 1006, 5164, 3643, 30524, 1007, 1063, 2709, 1045, 1009, 1015, 1025, 1065, 1065, 5466, 2047, 13675, 5643, 2595, 24422, 1006, 1000, 19528, 3204, 14593, 1024, 1063, 1065, 1000, 1010, 3643, 1007, 102...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/BroadcastConnectedStream.java
BroadcastConnectedStream.process
@PublicEvolving public <OUT> SingleOutputStreamOperator<OUT> process( final BroadcastProcessFunction<IN1, IN2, OUT> function, final TypeInformation<OUT> outTypeInfo) { Preconditions.checkNotNull(function); Preconditions.checkArgument(!(inputStream1 instanceof KeyedStream), "A BroadcastProcessFunction can only be used on a non-keyed stream."); TwoInputStreamOperator<IN1, IN2, OUT> operator = new CoBroadcastWithNonKeyedOperator<>(clean(function), broadcastStateDescriptors); return transform("Co-Process-Broadcast", outTypeInfo, operator); }
java
@PublicEvolving public <OUT> SingleOutputStreamOperator<OUT> process( final BroadcastProcessFunction<IN1, IN2, OUT> function, final TypeInformation<OUT> outTypeInfo) { Preconditions.checkNotNull(function); Preconditions.checkArgument(!(inputStream1 instanceof KeyedStream), "A BroadcastProcessFunction can only be used on a non-keyed stream."); TwoInputStreamOperator<IN1, IN2, OUT> operator = new CoBroadcastWithNonKeyedOperator<>(clean(function), broadcastStateDescriptors); return transform("Co-Process-Broadcast", outTypeInfo, operator); }
[ "@", "PublicEvolving", "public", "<", "OUT", ">", "SingleOutputStreamOperator", "<", "OUT", ">", "process", "(", "final", "BroadcastProcessFunction", "<", "IN1", ",", "IN2", ",", "OUT", ">", "function", ",", "final", "TypeInformation", "<", "OUT", ">", "outTyp...
Assumes as inputs a {@link BroadcastStream} and a non-keyed {@link DataStream} and applies the given {@link BroadcastProcessFunction} on them, thereby creating a transformed output stream. @param function The {@link BroadcastProcessFunction} that is called for each element in the stream. @param outTypeInfo The type of the output elements. @param <OUT> The type of the output elements. @return The transformed {@link DataStream}.
[ "Assumes", "as", "inputs", "a", "{", "@link", "BroadcastStream", "}", "and", "a", "non", "-", "keyed", "{", "@link", "DataStream", "}", "and", "applies", "the", "given", "{", "@link", "BroadcastProcessFunction", "}", "on", "them", "thereby", "creating", "a",...
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/BroadcastConnectedStream.java#L201-L213
train
Processes a single - output stream using a broadcast process function.
[ 30522, 1030, 2270, 6777, 4747, 6455, 2270, 1026, 2041, 1028, 2309, 5833, 18780, 21422, 25918, 8844, 1026, 2041, 1028, 2832, 1006, 2345, 3743, 21572, 9623, 22747, 4609, 7542, 1026, 1999, 2487, 1010, 1999, 2475, 1010, 2041, 1028, 3853, 1010, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-extra/src/main/java/cn/hutool/extra/template/engine/thymeleaf/ThymeleafEngine.java
ThymeleafEngine.getTemplate
@Override public Template getTemplate(String resource) { return ThymeleafTemplate.wrap(this.engine, resource, (null == this.config) ? null : this.config.getCharset()); }
java
@Override public Template getTemplate(String resource) { return ThymeleafTemplate.wrap(this.engine, resource, (null == this.config) ? null : this.config.getCharset()); }
[ "@", "Override", "public", "Template", "getTemplate", "(", "String", "resource", ")", "{", "return", "ThymeleafTemplate", ".", "wrap", "(", "this", ".", "engine", ",", "resource", ",", "(", "null", "==", "this", ".", "config", ")", "?", "null", ":", "thi...
--------------------------------------------------------------------------------- Constructor end
[ "---------------------------------------------------------------------------------", "Constructor", "end" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-extra/src/main/java/cn/hutool/extra/template/engine/thymeleaf/ThymeleafEngine.java#L55-L58
train
Returns a Thymeleaf Template object for the given resource.
[ 30522, 1030, 2058, 15637, 2270, 23561, 2131, 18532, 15725, 1006, 5164, 7692, 1007, 1063, 2709, 15177, 10199, 5243, 6199, 6633, 15725, 1012, 10236, 1006, 2023, 1012, 3194, 1010, 7692, 1010, 1006, 19701, 1027, 1027, 2023, 1012, 9530, 8873, 22...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/environment/StreamExecutionEnvironment.java
StreamExecutionEnvironment.addSource
@SuppressWarnings("unchecked") public <OUT> DataStreamSource<OUT> addSource(SourceFunction<OUT> function, String sourceName, TypeInformation<OUT> typeInfo) { if (typeInfo == null) { if (function instanceof ResultTypeQueryable) { typeInfo = ((ResultTypeQueryable<OUT>) function).getProducedType(); } else { try { typeInfo = TypeExtractor.createTypeInfo( SourceFunction.class, function.getClass(), 0, null, null); } catch (final InvalidTypesException e) { typeInfo = (TypeInformation<OUT>) new MissingTypeInfo(sourceName, e); } } } boolean isParallel = function instanceof ParallelSourceFunction; clean(function); final StreamSource<OUT, ?> sourceOperator = new StreamSource<>(function); return new DataStreamSource<>(this, typeInfo, sourceOperator, isParallel, sourceName); }
java
@SuppressWarnings("unchecked") public <OUT> DataStreamSource<OUT> addSource(SourceFunction<OUT> function, String sourceName, TypeInformation<OUT> typeInfo) { if (typeInfo == null) { if (function instanceof ResultTypeQueryable) { typeInfo = ((ResultTypeQueryable<OUT>) function).getProducedType(); } else { try { typeInfo = TypeExtractor.createTypeInfo( SourceFunction.class, function.getClass(), 0, null, null); } catch (final InvalidTypesException e) { typeInfo = (TypeInformation<OUT>) new MissingTypeInfo(sourceName, e); } } } boolean isParallel = function instanceof ParallelSourceFunction; clean(function); final StreamSource<OUT, ?> sourceOperator = new StreamSource<>(function); return new DataStreamSource<>(this, typeInfo, sourceOperator, isParallel, sourceName); }
[ "@", "SuppressWarnings", "(", "\"unchecked\"", ")", "public", "<", "OUT", ">", "DataStreamSource", "<", "OUT", ">", "addSource", "(", "SourceFunction", "<", "OUT", ">", "function", ",", "String", "sourceName", ",", "TypeInformation", "<", "OUT", ">", "typeInfo...
Ads a data source with a custom type information thus opening a {@link DataStream}. Only in very special cases does the user need to support type information. Otherwise use {@link #addSource(org.apache.flink.streaming.api.functions.source.SourceFunction)} @param function the user defined function @param sourceName Name of the data source @param <OUT> type of the returned stream @param typeInfo the user defined type information for the stream @return the data stream constructed
[ "Ads", "a", "data", "source", "with", "a", "custom", "type", "information", "thus", "opening", "a", "{", "@link", "DataStream", "}", ".", "Only", "in", "very", "special", "cases", "does", "the", "user", "need", "to", "support", "type", "information", ".", ...
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/environment/StreamExecutionEnvironment.java#L1449-L1472
train
Add a source to the data stream.
[ 30522, 1030, 16081, 9028, 5582, 2015, 1006, 1000, 4895, 5403, 18141, 1000, 1007, 2270, 1026, 2041, 1028, 2951, 21422, 6499, 3126, 3401, 1026, 2041, 1028, 9909, 8162, 3401, 1006, 3120, 11263, 27989, 1026, 2041, 1028, 3853, 1010, 5164, 3120, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/util/XmlUtil.java
XmlUtil.getNodeByXPath
public static Node getNodeByXPath(String expression, Object source) { return (Node) getByXPath(expression, source, XPathConstants.NODE); }
java
public static Node getNodeByXPath(String expression, Object source) { return (Node) getByXPath(expression, source, XPathConstants.NODE); }
[ "public", "static", "Node", "getNodeByXPath", "(", "String", "expression", ",", "Object", "source", ")", "{", "return", "(", "Node", ")", "getByXPath", "(", "expression", ",", "source", ",", "XPathConstants", ".", "NODE", ")", ";", "}" ]
通过XPath方式读取XML节点等信息<br> Xpath相关文章:https://www.ibm.com/developerworks/cn/xml/x-javaxpathapi.html @param expression XPath表达式 @param source 资源,可以是Docunent、Node节点等 @return 匹配返回类型的值 @since 4.0.9
[ "通过XPath方式读取XML节点等信息<br", ">", "Xpath相关文章:https", ":", "//", "www", ".", "ibm", ".", "com", "/", "developerworks", "/", "cn", "/", "xml", "/", "x", "-", "javaxpathapi", ".", "html" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/util/XmlUtil.java#L598-L600
train
Get a Node object by XPath expression.
[ 30522, 2270, 10763, 13045, 2131, 3630, 3207, 3762, 2595, 15069, 1006, 5164, 3670, 1010, 4874, 3120, 1007, 1063, 2709, 1006, 13045, 1007, 2131, 3762, 2595, 15069, 1006, 3670, 1010, 3120, 1010, 26726, 8988, 8663, 12693, 3215, 1012, 13045, 100...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
common/src/main/java/io/netty/util/internal/NativeLibraryLoader.java
NativeLibraryLoader.loadLibrary
private static void loadLibrary(final ClassLoader loader, final String name, final boolean absolute) { Throwable suppressed = null; try { try { // Make sure the helper is belong to the target ClassLoader. final Class<?> newHelper = tryToLoadClass(loader, NativeLibraryUtil.class); loadLibraryByHelper(newHelper, name, absolute); logger.debug("Successfully loaded the library {}", name); return; } catch (UnsatisfiedLinkError e) { // Should by pass the UnsatisfiedLinkError here! suppressed = e; logger.debug("Unable to load the library '{}', trying other loading mechanism.", name, e); } catch (Exception e) { suppressed = e; logger.debug("Unable to load the library '{}', trying other loading mechanism.", name, e); } NativeLibraryUtil.loadLibrary(name, absolute); // Fallback to local helper class. logger.debug("Successfully loaded the library {}", name); } catch (UnsatisfiedLinkError ule) { if (suppressed != null) { ThrowableUtil.addSuppressed(ule, suppressed); } throw ule; } }
java
private static void loadLibrary(final ClassLoader loader, final String name, final boolean absolute) { Throwable suppressed = null; try { try { // Make sure the helper is belong to the target ClassLoader. final Class<?> newHelper = tryToLoadClass(loader, NativeLibraryUtil.class); loadLibraryByHelper(newHelper, name, absolute); logger.debug("Successfully loaded the library {}", name); return; } catch (UnsatisfiedLinkError e) { // Should by pass the UnsatisfiedLinkError here! suppressed = e; logger.debug("Unable to load the library '{}', trying other loading mechanism.", name, e); } catch (Exception e) { suppressed = e; logger.debug("Unable to load the library '{}', trying other loading mechanism.", name, e); } NativeLibraryUtil.loadLibrary(name, absolute); // Fallback to local helper class. logger.debug("Successfully loaded the library {}", name); } catch (UnsatisfiedLinkError ule) { if (suppressed != null) { ThrowableUtil.addSuppressed(ule, suppressed); } throw ule; } }
[ "private", "static", "void", "loadLibrary", "(", "final", "ClassLoader", "loader", ",", "final", "String", "name", ",", "final", "boolean", "absolute", ")", "{", "Throwable", "suppressed", "=", "null", ";", "try", "{", "try", "{", "// Make sure the helper is bel...
Loading the native library into the specified {@link ClassLoader}. @param loader - The {@link ClassLoader} where the native library will be loaded into @param name - The native library path or name @param absolute - Whether the native library will be loaded by path or by name
[ "Loading", "the", "native", "library", "into", "the", "specified", "{" ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/common/src/main/java/io/netty/util/internal/NativeLibraryLoader.java#L333-L357
train
Load the library.
[ 30522, 2797, 10763, 11675, 7170, 29521, 19848, 2100, 1006, 2345, 2465, 11066, 2121, 7170, 2121, 1010, 2345, 5164, 2171, 1010, 2345, 22017, 20898, 7619, 1007, 1063, 5466, 3085, 13712, 1027, 19701, 1025, 3046, 1063, 3046, 1063, 1013, 1013, 21...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
transport-native-epoll/src/main/java/io/netty/channel/epoll/EpollServerSocketChannelConfig.java
EpollServerSocketChannelConfig.setIpTransparent
public EpollServerSocketChannelConfig setIpTransparent(boolean transparent) { try { ((EpollServerSocketChannel) channel).socket.setIpTransparent(transparent); return this; } catch (IOException e) { throw new ChannelException(e); } }
java
public EpollServerSocketChannelConfig setIpTransparent(boolean transparent) { try { ((EpollServerSocketChannel) channel).socket.setIpTransparent(transparent); return this; } catch (IOException e) { throw new ChannelException(e); } }
[ "public", "EpollServerSocketChannelConfig", "setIpTransparent", "(", "boolean", "transparent", ")", "{", "try", "{", "(", "(", "EpollServerSocketChannel", ")", "channel", ")", ".", "socket", ".", "setIpTransparent", "(", "transparent", ")", ";", "return", "this", ...
If {@code true} is used <a href="http://man7.org/linux/man-pages/man7/ip.7.html">IP_TRANSPARENT</a> is enabled, {@code false} for disable it. Default is disabled.
[ "If", "{" ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/transport-native-epoll/src/main/java/io/netty/channel/epoll/EpollServerSocketChannelConfig.java#L257-L264
train
Sets the transparent mode of the socket.
[ 30522, 2270, 4958, 14511, 8043, 14028, 7432, 3388, 26058, 8663, 8873, 2290, 2275, 11514, 6494, 3619, 19362, 4765, 1006, 22017, 20898, 13338, 1007, 1063, 3046, 1063, 1006, 1006, 4958, 14511, 8043, 14028, 7432, 3388, 26058, 1007, 3149, 1007, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/util/ArrayUtil.java
ArrayUtil.toArray
public static byte[] toArray(ByteBuffer bytebuffer) { if (false == bytebuffer.hasArray()) { int oldPosition = bytebuffer.position(); bytebuffer.position(0); int size = bytebuffer.limit(); byte[] buffers = new byte[size]; bytebuffer.get(buffers); bytebuffer.position(oldPosition); return buffers; } else { return Arrays.copyOfRange(bytebuffer.array(), bytebuffer.position(), bytebuffer.limit()); } }
java
public static byte[] toArray(ByteBuffer bytebuffer) { if (false == bytebuffer.hasArray()) { int oldPosition = bytebuffer.position(); bytebuffer.position(0); int size = bytebuffer.limit(); byte[] buffers = new byte[size]; bytebuffer.get(buffers); bytebuffer.position(oldPosition); return buffers; } else { return Arrays.copyOfRange(bytebuffer.array(), bytebuffer.position(), bytebuffer.limit()); } }
[ "public", "static", "byte", "[", "]", "toArray", "(", "ByteBuffer", "bytebuffer", ")", "{", "if", "(", "false", "==", "bytebuffer", ".", "hasArray", "(", ")", ")", "{", "int", "oldPosition", "=", "bytebuffer", ".", "position", "(", ")", ";", "bytebuffer"...
{@link ByteBuffer} 转byte数组 @param bytebuffer {@link ByteBuffer} @return byte数组 @since 3.0.1
[ "{", "@link", "ByteBuffer", "}", "转byte数组" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/util/ArrayUtil.java#L2580-L2592
train
Returns a byte array from a ByteBuffer.
[ 30522, 2270, 10763, 24880, 1031, 1033, 2000, 2906, 9447, 1006, 24880, 8569, 12494, 24880, 8569, 12494, 1007, 1063, 2065, 1006, 6270, 1027, 1027, 24880, 8569, 12494, 1012, 2038, 2906, 9447, 1006, 1007, 1007, 1063, 20014, 2214, 26994, 1027, 2...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
hankcs/HanLP
src/main/java/com/hankcs/hanlp/corpus/dictionary/StringDictionaryMaker.java
StringDictionaryMaker.load
public static StringDictionary load(String path, String separator) { StringDictionary dictionary = new StringDictionary(separator); if (dictionary.load(path)) return dictionary; return null; }
java
public static StringDictionary load(String path, String separator) { StringDictionary dictionary = new StringDictionary(separator); if (dictionary.load(path)) return dictionary; return null; }
[ "public", "static", "StringDictionary", "load", "(", "String", "path", ",", "String", "separator", ")", "{", "StringDictionary", "dictionary", "=", "new", "StringDictionary", "(", "separator", ")", ";", "if", "(", "dictionary", ".", "load", "(", "path", ")", ...
加载词典 @param path @param separator @return
[ "加载词典" ]
a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce
https://github.com/hankcs/HanLP/blob/a538d0722ab2e4980a9dcd9ea40324fc3ddba7ce/src/main/java/com/hankcs/hanlp/corpus/dictionary/StringDictionaryMaker.java#L29-L34
train
Load a StringDictionary from a file path.
[ 30522, 2270, 10763, 5164, 29201, 3258, 5649, 7170, 1006, 5164, 4130, 1010, 5164, 19802, 25879, 2953, 1007, 1063, 5164, 29201, 3258, 5649, 9206, 1027, 2047, 5164, 29201, 3258, 5649, 1006, 19802, 25879, 2953, 1007, 1025, 2065, 1006, 9206, 101...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
spring-projects/spring-boot
spring-boot-project/spring-boot/src/main/java/org/springframework/boot/web/servlet/context/XmlServletWebServerApplicationContext.java
XmlServletWebServerApplicationContext.load
public final void load(Class<?> relativeClass, String... resourceNames) { Resource[] resources = new Resource[resourceNames.length]; for (int i = 0; i < resourceNames.length; i++) { resources[i] = new ClassPathResource(resourceNames[i], relativeClass); } this.reader.loadBeanDefinitions(resources); }
java
public final void load(Class<?> relativeClass, String... resourceNames) { Resource[] resources = new Resource[resourceNames.length]; for (int i = 0; i < resourceNames.length; i++) { resources[i] = new ClassPathResource(resourceNames[i], relativeClass); } this.reader.loadBeanDefinitions(resources); }
[ "public", "final", "void", "load", "(", "Class", "<", "?", ">", "relativeClass", ",", "String", "...", "resourceNames", ")", "{", "Resource", "[", "]", "resources", "=", "new", "Resource", "[", "resourceNames", ".", "length", "]", ";", "for", "(", "int",...
Load bean definitions from the given XML resources. @param relativeClass class whose package will be used as a prefix when loading each specified resource name @param resourceNames relatively-qualified names of resources to load
[ "Load", "bean", "definitions", "from", "the", "given", "XML", "resources", "." ]
0b27f7c70e164b2b1a96477f1d9c1acba56790c1
https://github.com/spring-projects/spring-boot/blob/0b27f7c70e164b2b1a96477f1d9c1acba56790c1/spring-boot-project/spring-boot/src/main/java/org/springframework/boot/web/servlet/context/XmlServletWebServerApplicationContext.java#L130-L136
train
Load the beans from the given class.
[ 30522, 2270, 2345, 11675, 7170, 1006, 2465, 1026, 1029, 1028, 5816, 26266, 1010, 5164, 1012, 1012, 1012, 7692, 18442, 2015, 1007, 1063, 7692, 1031, 1033, 4219, 1027, 2047, 7692, 1031, 7692, 18442, 2015, 1012, 3091, 1033, 1025, 2005, 1006, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/util/CharsetUtil.java
CharsetUtil.convert
public static String convert(String source, Charset srcCharset, Charset destCharset) { if(null == srcCharset) { srcCharset = StandardCharsets.ISO_8859_1; } if(null == destCharset) { destCharset = StandardCharsets.UTF_8; } if (StrUtil.isBlank(source) || srcCharset.equals(destCharset)) { return source; } return new String(source.getBytes(srcCharset), destCharset); }
java
public static String convert(String source, Charset srcCharset, Charset destCharset) { if(null == srcCharset) { srcCharset = StandardCharsets.ISO_8859_1; } if(null == destCharset) { destCharset = StandardCharsets.UTF_8; } if (StrUtil.isBlank(source) || srcCharset.equals(destCharset)) { return source; } return new String(source.getBytes(srcCharset), destCharset); }
[ "public", "static", "String", "convert", "(", "String", "source", ",", "Charset", "srcCharset", ",", "Charset", "destCharset", ")", "{", "if", "(", "null", "==", "srcCharset", ")", "{", "srcCharset", "=", "StandardCharsets", ".", "ISO_8859_1", ";", "}", "if"...
转换字符串的字符集编码<br> 当以错误的编码读取为字符串时,打印字符串将出现乱码。<br> 此方法用于纠正因读取使用编码错误导致的乱码问题。<br> 例如,在Servlet请求中客户端用GBK编码了请求参数,我们使用UTF-8读取到的是乱码,此时,使用此方法即可还原原编码的内容 <pre> 客户端 -》 GBK编码 -》 Servlet容器 -》 UTF-8解码 -》 乱码 乱码 -》 UTF-8编码 -》 GBK解码 -》 正确内容 </pre> @param source 字符串 @param srcCharset 源字符集,默认ISO-8859-1 @param destCharset 目标字符集,默认UTF-8 @return 转换后的字符集
[ "转换字符串的字符集编码<br", ">", "当以错误的编码读取为字符串时,打印字符串将出现乱码。<br", ">", "此方法用于纠正因读取使用编码错误导致的乱码问题。<br", ">", "例如,在Servlet请求中客户端用GBK编码了请求参数,我们使用UTF", "-", "8读取到的是乱码,此时,使用此方法即可还原原编码的内容", "<pre", ">", "客户端", "-", "》", "GBK编码", "-", "》", "Servlet容器", "-", "》", "UTF", "-", "8解码", "-", ...
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/util/CharsetUtil.java#L67-L80
train
Converts the given source string to the destination string using the specified charset.
[ 30522, 2270, 10763, 5164, 10463, 1006, 5164, 3120, 1010, 25869, 13462, 5034, 9468, 8167, 13462, 1010, 25869, 13462, 4078, 10649, 11650, 3388, 1007, 1063, 2065, 1006, 19701, 1027, 1027, 5034, 9468, 8167, 13462, 1007, 1063, 5034, 9468, 8167, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
alibaba/canal
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/service/HbaseSyncService.java
HbaseSyncService.update
private void update(MappingConfig config, Dml dml) { List<Map<String, Object>> data = dml.getData(); List<Map<String, Object>> old = dml.getOld(); if (old == null || old.isEmpty() || data == null || data.isEmpty()) { return; } MappingConfig.HbaseMapping hbaseMapping = config.getHbaseMapping(); // if (!validHTable(config)) { // logger.error("HBase table '{}' not exists", // hbaseMapping.getHbaseTable()); // return; // } MappingConfig.ColumnItem rowKeyColumn = hbaseMapping.getRowKeyColumn(); int index = 0; int i = 1; boolean complete = false; List<HRow> rows = new ArrayList<>(); out: for (Map<String, Object> r : data) { byte[] rowKeyBytes; if (hbaseMapping.getRowKey() != null) { String[] rowKeyColumns = hbaseMapping.getRowKey().trim().split(","); // 判断是否有复合主键修改 for (String updateColumn : old.get(index).keySet()) { for (String rowKeyColumnName : rowKeyColumns) { if (rowKeyColumnName.equalsIgnoreCase(updateColumn)) { // 调用删除插入操作 deleteAndInsert(config, dml); continue out; } } } String rowKeyVale = getRowKeys(rowKeyColumns, r); rowKeyBytes = Bytes.toBytes(rowKeyVale); } else if (rowKeyColumn == null) { Map<String, Object> rowKey = data.get(0); rowKeyBytes = typeConvert(null, hbaseMapping, rowKey.values().iterator().next()); } else { rowKeyBytes = getRowKeyBytes(hbaseMapping, rowKeyColumn, r); } if (rowKeyBytes == null) throw new RuntimeException("rowKey值为空"); Map<String, MappingConfig.ColumnItem> columnItems = hbaseMapping.getColumnItems(); HRow hRow = new HRow(rowKeyBytes); for (String updateColumn : old.get(index).keySet()) { if (hbaseMapping.getExcludeColumns() != null && hbaseMapping.getExcludeColumns().contains(updateColumn)) { continue; } MappingConfig.ColumnItem columnItem = columnItems.get(updateColumn); if (columnItem == null) { String family = hbaseMapping.getFamily(); String qualifier = updateColumn; if (hbaseMapping.isUppercaseQualifier()) { qualifier = qualifier.toUpperCase(); } Object newVal = r.get(updateColumn); if (newVal == null) { hRow.addCell(family, qualifier, null); } else { hRow.addCell(family, qualifier, typeConvert(null, hbaseMapping, newVal)); } } else { // 排除修改id的情况 if (columnItem.isRowKey()) continue; Object newVal = r.get(updateColumn); if (newVal == null) { hRow.addCell(columnItem.getFamily(), columnItem.getQualifier(), null); } else { hRow.addCell(columnItem.getFamily(), columnItem.getQualifier(), typeConvert(columnItem, hbaseMapping, newVal)); } } } rows.add(hRow); complete = false; if (i % config.getHbaseMapping().getCommitBatch() == 0 && !rows.isEmpty()) { hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows); rows.clear(); complete = true; } i++; index++; } if (!complete && !rows.isEmpty()) { hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows); } }
java
private void update(MappingConfig config, Dml dml) { List<Map<String, Object>> data = dml.getData(); List<Map<String, Object>> old = dml.getOld(); if (old == null || old.isEmpty() || data == null || data.isEmpty()) { return; } MappingConfig.HbaseMapping hbaseMapping = config.getHbaseMapping(); // if (!validHTable(config)) { // logger.error("HBase table '{}' not exists", // hbaseMapping.getHbaseTable()); // return; // } MappingConfig.ColumnItem rowKeyColumn = hbaseMapping.getRowKeyColumn(); int index = 0; int i = 1; boolean complete = false; List<HRow> rows = new ArrayList<>(); out: for (Map<String, Object> r : data) { byte[] rowKeyBytes; if (hbaseMapping.getRowKey() != null) { String[] rowKeyColumns = hbaseMapping.getRowKey().trim().split(","); // 判断是否有复合主键修改 for (String updateColumn : old.get(index).keySet()) { for (String rowKeyColumnName : rowKeyColumns) { if (rowKeyColumnName.equalsIgnoreCase(updateColumn)) { // 调用删除插入操作 deleteAndInsert(config, dml); continue out; } } } String rowKeyVale = getRowKeys(rowKeyColumns, r); rowKeyBytes = Bytes.toBytes(rowKeyVale); } else if (rowKeyColumn == null) { Map<String, Object> rowKey = data.get(0); rowKeyBytes = typeConvert(null, hbaseMapping, rowKey.values().iterator().next()); } else { rowKeyBytes = getRowKeyBytes(hbaseMapping, rowKeyColumn, r); } if (rowKeyBytes == null) throw new RuntimeException("rowKey值为空"); Map<String, MappingConfig.ColumnItem> columnItems = hbaseMapping.getColumnItems(); HRow hRow = new HRow(rowKeyBytes); for (String updateColumn : old.get(index).keySet()) { if (hbaseMapping.getExcludeColumns() != null && hbaseMapping.getExcludeColumns().contains(updateColumn)) { continue; } MappingConfig.ColumnItem columnItem = columnItems.get(updateColumn); if (columnItem == null) { String family = hbaseMapping.getFamily(); String qualifier = updateColumn; if (hbaseMapping.isUppercaseQualifier()) { qualifier = qualifier.toUpperCase(); } Object newVal = r.get(updateColumn); if (newVal == null) { hRow.addCell(family, qualifier, null); } else { hRow.addCell(family, qualifier, typeConvert(null, hbaseMapping, newVal)); } } else { // 排除修改id的情况 if (columnItem.isRowKey()) continue; Object newVal = r.get(updateColumn); if (newVal == null) { hRow.addCell(columnItem.getFamily(), columnItem.getQualifier(), null); } else { hRow.addCell(columnItem.getFamily(), columnItem.getQualifier(), typeConvert(columnItem, hbaseMapping, newVal)); } } } rows.add(hRow); complete = false; if (i % config.getHbaseMapping().getCommitBatch() == 0 && !rows.isEmpty()) { hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows); rows.clear(); complete = true; } i++; index++; } if (!complete && !rows.isEmpty()) { hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows); } }
[ "private", "void", "update", "(", "MappingConfig", "config", ",", "Dml", "dml", ")", "{", "List", "<", "Map", "<", "String", ",", "Object", ">", ">", "data", "=", "dml", ".", "getData", "(", ")", ";", "List", "<", "Map", "<", "String", ",", "Object...
更新操作 @param config 配置对象 @param dml dml对象
[ "更新操作" ]
8f088cddc0755f4350c5aaae95c6e4002d90a40f
https://github.com/alibaba/canal/blob/8f088cddc0755f4350c5aaae95c6e4002d90a40f/client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/service/HbaseSyncService.java#L163-L259
train
Update DML.
[ 30522, 2797, 11675, 10651, 1006, 12375, 8663, 8873, 2290, 9530, 8873, 2290, 1010, 1040, 19968, 1040, 19968, 30524, 2696, 1006, 1007, 1025, 2862, 1026, 4949, 1026, 5164, 1010, 4874, 1028, 1028, 2214, 1027, 1040, 19968, 1012, 2131, 11614, 100...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/metrics/scope/ScopeFormat.java
ScopeFormat.concat
public static String concat(CharacterFilter filter, Character delimiter, String... components) { StringBuilder sb = new StringBuilder(); sb.append(filter.filterCharacters(components[0])); for (int x = 1; x < components.length; x++) { sb.append(delimiter); sb.append(filter.filterCharacters(components[x])); } return sb.toString(); }
java
public static String concat(CharacterFilter filter, Character delimiter, String... components) { StringBuilder sb = new StringBuilder(); sb.append(filter.filterCharacters(components[0])); for (int x = 1; x < components.length; x++) { sb.append(delimiter); sb.append(filter.filterCharacters(components[x])); } return sb.toString(); }
[ "public", "static", "String", "concat", "(", "CharacterFilter", "filter", ",", "Character", "delimiter", ",", "String", "...", "components", ")", "{", "StringBuilder", "sb", "=", "new", "StringBuilder", "(", ")", ";", "sb", ".", "append", "(", "filter", ".",...
Concatenates the given component names separated by the delimiter character. Additionally the character filter is applied to all component names. @param filter Character filter to be applied to the component names @param delimiter Delimiter to separate component names @param components Array of component names @return The concatenated component name
[ "Concatenates", "the", "given", "component", "names", "separated", "by", "the", "delimiter", "character", ".", "Additionally", "the", "character", "filter", "is", "applied", "to", "all", "component", "names", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/metrics/scope/ScopeFormat.java#L228-L236
train
Concatenates the given components using the given character filter and delimiter.
[ 30522, 2270, 10763, 5164, 9530, 11266, 1006, 2839, 8873, 21928, 11307, 1010, 2839, 3972, 27605, 3334, 1010, 5164, 1012, 1012, 1012, 6177, 1007, 1063, 5164, 8569, 23891, 2099, 24829, 1027, 2047, 5164, 8569, 23891, 2099, 1006, 1007, 1025, 248...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
handler/src/main/java/io/netty/handler/ssl/SslContext.java
SslContext.newServerContext
@Deprecated public static SslContext newServerContext( SslProvider provider, File certChainFile, File keyFile, String keyPassword, Iterable<String> ciphers, Iterable<String> nextProtocols, long sessionCacheSize, long sessionTimeout) throws SSLException { return newServerContext(provider, certChainFile, keyFile, keyPassword, ciphers, IdentityCipherSuiteFilter.INSTANCE, toApplicationProtocolConfig(nextProtocols), sessionCacheSize, sessionTimeout); }
java
@Deprecated public static SslContext newServerContext( SslProvider provider, File certChainFile, File keyFile, String keyPassword, Iterable<String> ciphers, Iterable<String> nextProtocols, long sessionCacheSize, long sessionTimeout) throws SSLException { return newServerContext(provider, certChainFile, keyFile, keyPassword, ciphers, IdentityCipherSuiteFilter.INSTANCE, toApplicationProtocolConfig(nextProtocols), sessionCacheSize, sessionTimeout); }
[ "@", "Deprecated", "public", "static", "SslContext", "newServerContext", "(", "SslProvider", "provider", ",", "File", "certChainFile", ",", "File", "keyFile", ",", "String", "keyPassword", ",", "Iterable", "<", "String", ">", "ciphers", ",", "Iterable", "<", "St...
Creates a new server-side {@link SslContext}. @param provider the {@link SslContext} implementation to use. {@code null} to use the current default one. @param certChainFile an X.509 certificate chain file in PEM format @param keyFile a PKCS#8 private key file in PEM format @param keyPassword the password of the {@code keyFile}. {@code null} if it's not password-protected. @param ciphers the cipher suites to enable, in the order of preference. {@code null} to use the default cipher suites. @param nextProtocols the application layer protocols to accept, in the order of preference. {@code null} to disable TLS NPN/ALPN extension. @param sessionCacheSize the size of the cache used for storing SSL session objects. {@code 0} to use the default value. @param sessionTimeout the timeout for the cached SSL session objects, in seconds. {@code 0} to use the default value. @return a new server-side {@link SslContext} @deprecated Replaced by {@link SslContextBuilder}
[ "Creates", "a", "new", "server", "-", "side", "{", "@link", "SslContext", "}", "." ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/handler/src/main/java/io/netty/handler/ssl/SslContext.java#L268-L277
train
Create a new server context using the specified provider.
[ 30522, 1030, 2139, 28139, 12921, 2270, 10763, 7020, 22499, 10111, 18413, 2739, 2121, 6299, 8663, 18209, 1006, 7020, 14277, 12298, 18688, 10802, 1010, 5371, 8292, 5339, 24925, 2078, 8873, 2571, 1010, 5371, 3145, 8873, 2571, 1010, 5164, 3145, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
buffer/src/main/java/io/netty/buffer/ByteBufUtil.java
ByteBufUtil.threadLocalDirectBuffer
public static ByteBuf threadLocalDirectBuffer() { if (THREAD_LOCAL_BUFFER_SIZE <= 0) { return null; } if (PlatformDependent.hasUnsafe()) { return ThreadLocalUnsafeDirectByteBuf.newInstance(); } else { return ThreadLocalDirectByteBuf.newInstance(); } }
java
public static ByteBuf threadLocalDirectBuffer() { if (THREAD_LOCAL_BUFFER_SIZE <= 0) { return null; } if (PlatformDependent.hasUnsafe()) { return ThreadLocalUnsafeDirectByteBuf.newInstance(); } else { return ThreadLocalDirectByteBuf.newInstance(); } }
[ "public", "static", "ByteBuf", "threadLocalDirectBuffer", "(", ")", "{", "if", "(", "THREAD_LOCAL_BUFFER_SIZE", "<=", "0", ")", "{", "return", "null", ";", "}", "if", "(", "PlatformDependent", ".", "hasUnsafe", "(", ")", ")", "{", "return", "ThreadLocalUnsafeD...
Returns a cached thread-local direct buffer, if available. @return a cached thread-local direct buffer, if available. {@code null} otherwise.
[ "Returns", "a", "cached", "thread", "-", "local", "direct", "buffer", "if", "available", "." ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/buffer/src/main/java/io/netty/buffer/ByteBufUtil.java#L805-L815
train
Returns a thread - local direct buffer.
[ 30522, 2270, 10763, 24880, 8569, 2546, 11689, 4135, 9289, 4305, 2890, 6593, 8569, 12494, 1006, 1007, 1063, 2065, 1006, 11689, 1035, 2334, 1035, 17698, 1035, 2946, 1026, 1027, 1014, 1007, 1063, 2709, 19701, 1025, 1065, 2065, 1006, 4132, 3207...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
alibaba/canal
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/service/HbaseSyncService.java
HbaseSyncService.convertData2Row
private static void convertData2Row(MappingConfig.HbaseMapping hbaseMapping, HRow hRow, Map<String, Object> data) { Map<String, MappingConfig.ColumnItem> columnItems = hbaseMapping.getColumnItems(); int i = 0; for (Map.Entry<String, Object> entry : data.entrySet()) { if (hbaseMapping.getExcludeColumns() != null && hbaseMapping.getExcludeColumns().contains(entry.getKey())) { continue; } if (entry.getValue() != null) { MappingConfig.ColumnItem columnItem = columnItems.get(entry.getKey()); byte[] bytes = typeConvert(columnItem, hbaseMapping, entry.getValue()); if (columnItem == null) { String familyName = hbaseMapping.getFamily(); String qualifier = entry.getKey(); if (hbaseMapping.isUppercaseQualifier()) { qualifier = qualifier.toUpperCase(); } if (hbaseMapping.getRowKey() == null && i == 0) { hRow.setRowKey(bytes); } else { hRow.addCell(familyName, qualifier, bytes); } } else { if (columnItem.isRowKey()) { if (columnItem.getRowKeyLen() != null && entry.getValue() != null) { if (entry.getValue() instanceof Number) { String v = String.format("%0" + columnItem.getRowKeyLen() + "d", ((Number) entry.getValue()).longValue()); bytes = Bytes.toBytes(v); } else { try { String v = String.format("%0" + columnItem.getRowKeyLen() + "d", Integer.parseInt((String) entry.getValue())); bytes = Bytes.toBytes(v); } catch (Exception e) { logger.error(e.getMessage(), e); } } } hRow.setRowKey(bytes); } else { hRow.addCell(columnItem.getFamily(), columnItem.getQualifier(), bytes); } } } i++; } }
java
private static void convertData2Row(MappingConfig.HbaseMapping hbaseMapping, HRow hRow, Map<String, Object> data) { Map<String, MappingConfig.ColumnItem> columnItems = hbaseMapping.getColumnItems(); int i = 0; for (Map.Entry<String, Object> entry : data.entrySet()) { if (hbaseMapping.getExcludeColumns() != null && hbaseMapping.getExcludeColumns().contains(entry.getKey())) { continue; } if (entry.getValue() != null) { MappingConfig.ColumnItem columnItem = columnItems.get(entry.getKey()); byte[] bytes = typeConvert(columnItem, hbaseMapping, entry.getValue()); if (columnItem == null) { String familyName = hbaseMapping.getFamily(); String qualifier = entry.getKey(); if (hbaseMapping.isUppercaseQualifier()) { qualifier = qualifier.toUpperCase(); } if (hbaseMapping.getRowKey() == null && i == 0) { hRow.setRowKey(bytes); } else { hRow.addCell(familyName, qualifier, bytes); } } else { if (columnItem.isRowKey()) { if (columnItem.getRowKeyLen() != null && entry.getValue() != null) { if (entry.getValue() instanceof Number) { String v = String.format("%0" + columnItem.getRowKeyLen() + "d", ((Number) entry.getValue()).longValue()); bytes = Bytes.toBytes(v); } else { try { String v = String.format("%0" + columnItem.getRowKeyLen() + "d", Integer.parseInt((String) entry.getValue())); bytes = Bytes.toBytes(v); } catch (Exception e) { logger.error(e.getMessage(), e); } } } hRow.setRowKey(bytes); } else { hRow.addCell(columnItem.getFamily(), columnItem.getQualifier(), bytes); } } } i++; } }
[ "private", "static", "void", "convertData2Row", "(", "MappingConfig", ".", "HbaseMapping", "hbaseMapping", ",", "HRow", "hRow", ",", "Map", "<", "String", ",", "Object", ">", "data", ")", "{", "Map", "<", "String", ",", "MappingConfig", ".", "ColumnItem", ">...
将Map数据转换为HRow行数据 @param hbaseMapping hbase映射配置 @param hRow 行对象 @param data Map数据
[ "将Map数据转换为HRow行数据" ]
8f088cddc0755f4350c5aaae95c6e4002d90a40f
https://github.com/alibaba/canal/blob/8f088cddc0755f4350c5aaae95c6e4002d90a40f/client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/service/HbaseSyncService.java#L106-L155
train
Convert data to row.
[ 30522, 2797, 10763, 11675, 10463, 2850, 2696, 2475, 10524, 1006, 12375, 8663, 8873, 2290, 1012, 1044, 15058, 2863, 14853, 1044, 15058, 2863, 14853, 1010, 17850, 5004, 17850, 5004, 1010, 4949, 1026, 5164, 1010, 4874, 1028, 2951, 1007, 1063, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-core/src/main/java/org/apache/flink/api/common/Plan.java
Plan.accept
@Override public void accept(Visitor<Operator<?>> visitor) { for (GenericDataSinkBase<?> sink : this.sinks) { sink.accept(visitor); } }
java
@Override public void accept(Visitor<Operator<?>> visitor) { for (GenericDataSinkBase<?> sink : this.sinks) { sink.accept(visitor); } }
[ "@", "Override", "public", "void", "accept", "(", "Visitor", "<", "Operator", "<", "?", ">", ">", "visitor", ")", "{", "for", "(", "GenericDataSinkBase", "<", "?", ">", "sink", ":", "this", ".", "sinks", ")", "{", "sink", ".", "accept", "(", "visitor...
Traverses the job depth first from all data sinks on towards the sources. @see Visitable#accept(Visitor)
[ "Traverses", "the", "job", "depth", "first", "from", "all", "data", "sinks", "on", "towards", "the", "sources", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/api/common/Plan.java#L326-L331
train
Visit all the sinks of this operator.
[ 30522, 1030, 2058, 15637, 2270, 11675, 5138, 1006, 10367, 1026, 6872, 1026, 1029, 1028, 1028, 10367, 1007, 1063, 2005, 1006, 12391, 2850, 10230, 19839, 15058, 1026, 1029, 1028, 7752, 1024, 2023, 1012, 23462, 1007, 1063, 7752, 1012, 5138, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
networknt/light-4j
utility/src/main/java/com/networknt/utility/CodeVerifierUtil.java
CodeVerifierUtil.generateRandomCodeVerifier
public static String generateRandomCodeVerifier(SecureRandom entropySource, int entropyBytes) { byte[] randomBytes = new byte[entropyBytes]; entropySource.nextBytes(randomBytes); return Base64.getUrlEncoder().withoutPadding().encodeToString(randomBytes); }
java
public static String generateRandomCodeVerifier(SecureRandom entropySource, int entropyBytes) { byte[] randomBytes = new byte[entropyBytes]; entropySource.nextBytes(randomBytes); return Base64.getUrlEncoder().withoutPadding().encodeToString(randomBytes); }
[ "public", "static", "String", "generateRandomCodeVerifier", "(", "SecureRandom", "entropySource", ",", "int", "entropyBytes", ")", "{", "byte", "[", "]", "randomBytes", "=", "new", "byte", "[", "entropyBytes", "]", ";", "entropySource", ".", "nextBytes", "(", "r...
Generates a random code verifier string using the provided entropy source and the specified number of bytes of entropy. @param entropySource entropy source @param entropyBytes entropy bytes @return String generated code verifier
[ "Generates", "a", "random", "code", "verifier", "string", "using", "the", "provided", "entropy", "source", "and", "the", "specified", "number", "of", "bytes", "of", "entropy", "." ]
2a60257c60663684c8f6dc8b5ea3cf184e534db6
https://github.com/networknt/light-4j/blob/2a60257c60663684c8f6dc8b5ea3cf184e534db6/utility/src/main/java/com/networknt/utility/CodeVerifierUtil.java#L117-L121
train
Generate a random code verifier.
[ 30522, 2270, 10763, 5164, 9699, 13033, 5358, 16044, 6299, 18095, 1006, 5851, 13033, 5358, 23077, 6499, 3126, 3401, 1010, 20014, 23077, 3762, 4570, 1007, 1063, 24880, 1031, 1033, 6721, 3762, 4570, 1027, 2047, 24880, 1031, 23077, 3762, 4570, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/tasks/StreamTask.java
StreamTask.disposeAllOperators
private void disposeAllOperators() { if (operatorChain != null) { for (StreamOperator<?> operator : operatorChain.getAllOperators()) { try { if (operator != null) { operator.dispose(); } } catch (Throwable t) { LOG.error("Error during disposal of stream operator.", t); } } } }
java
private void disposeAllOperators() { if (operatorChain != null) { for (StreamOperator<?> operator : operatorChain.getAllOperators()) { try { if (operator != null) { operator.dispose(); } } catch (Throwable t) { LOG.error("Error during disposal of stream operator.", t); } } } }
[ "private", "void", "disposeAllOperators", "(", ")", "{", "if", "(", "operatorChain", "!=", "null", ")", "{", "for", "(", "StreamOperator", "<", "?", ">", "operator", ":", "operatorChain", ".", "getAllOperators", "(", ")", ")", "{", "try", "{", "if", "(",...
Execute @link StreamOperator#dispose()} of each operator in the chain of this {@link StreamTask}. Disposing happens from <b>tail to head</b> operator in the chain. <p>The difference with the {@link #tryDisposeAllOperators()} is that in case of an exception, this method catches it and logs the message.
[ "Execute", "@link", "StreamOperator#dispose", "()", "}", "of", "each", "operator", "in", "the", "chain", "of", "this", "{", "@link", "StreamTask", "}", ".", "Disposing", "happens", "from", "<b", ">", "tail", "to", "head<", "/", "b", ">", "operator", "in", ...
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/tasks/StreamTask.java#L508-L521
train
Disposes all operators.
[ 30522, 2797, 11675, 27764, 8095, 25918, 18926, 1006, 1007, 1063, 2065, 1006, 6872, 24925, 2078, 999, 1027, 19701, 1007, 1063, 2005, 1006, 5460, 25918, 8844, 1026, 1029, 1028, 6872, 1024, 6872, 24925, 2078, 1012, 2131, 8095, 25918, 18926, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/io/disk/iomanager/IOManager.java
IOManager.shutdown
public void shutdown() { // remove all of our temp directories for (File path : paths) { try { if (path != null) { if (path.exists()) { FileUtils.deleteDirectory(path); LOG.info("I/O manager removed spill file directory {}", path.getAbsolutePath()); } } } catch (Throwable t) { LOG.error("IOManager failed to properly clean up temp file directory: " + path, t); } } }
java
public void shutdown() { // remove all of our temp directories for (File path : paths) { try { if (path != null) { if (path.exists()) { FileUtils.deleteDirectory(path); LOG.info("I/O manager removed spill file directory {}", path.getAbsolutePath()); } } } catch (Throwable t) { LOG.error("IOManager failed to properly clean up temp file directory: " + path, t); } } }
[ "public", "void", "shutdown", "(", ")", "{", "// remove all of our temp directories", "for", "(", "File", "path", ":", "paths", ")", "{", "try", "{", "if", "(", "path", "!=", "null", ")", "{", "if", "(", "path", ".", "exists", "(", ")", ")", "{", "Fi...
Close method, marks the I/O manager as closed and removed all temporary files.
[ "Close", "method", "marks", "the", "I", "/", "O", "manager", "as", "closed", "and", "removed", "all", "temporary", "files", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/io/disk/iomanager/IOManager.java#L87-L101
train
Shutdown the IO manager.
[ 30522, 2270, 30524, 1007, 1063, 1013, 1013, 6366, 2035, 1997, 2256, 8915, 8737, 2472, 3111, 2005, 1006, 5371, 4130, 1024, 10425, 1007, 1063, 3046, 1063, 2065, 1006, 4130, 999, 1027, 19701, 1007, 1063, 2065, 1006, 4130, 1012, 6526, 1006, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
looly/hutool
hutool-core/src/main/java/cn/hutool/core/img/Img.java
Img.scale
public Img scale(int width, int height, Color fixedColor) { final BufferedImage srcImage = getValidSrcImg(); int srcHeight = srcImage.getHeight(null); int srcWidth = srcImage.getWidth(null); double heightRatio = NumberUtil.div(height, srcHeight); double widthRatio = NumberUtil.div(width, srcWidth); if (heightRatio == widthRatio) { // 长宽都按照相同比例缩放时,返回缩放后的图片 return scale(width, height); } // 宽缩放比例小就按照宽缩放,否则按照高缩放 if (widthRatio < heightRatio) { scale(width, (int) (srcHeight * widthRatio)); } else { scale((int) (srcWidth * heightRatio), height); } if (null == fixedColor) {// 补白 fixedColor = Color.WHITE; } final BufferedImage image = new BufferedImage(width, height, getTypeInt()); Graphics2D g = image.createGraphics(); // 设置背景 g.setBackground(fixedColor); g.clearRect(0, 0, width, height); final BufferedImage itemp = this.targetImage; final int itempHeight = itemp.getHeight(); final int itempWidth = itemp.getWidth(); // 在中间贴图 g.drawImage(itemp, (width - itempWidth) / 2, (height - itempHeight) / 2, itempWidth, itempHeight, fixedColor, null); g.dispose(); this.targetImage = image; return this; }
java
public Img scale(int width, int height, Color fixedColor) { final BufferedImage srcImage = getValidSrcImg(); int srcHeight = srcImage.getHeight(null); int srcWidth = srcImage.getWidth(null); double heightRatio = NumberUtil.div(height, srcHeight); double widthRatio = NumberUtil.div(width, srcWidth); if (heightRatio == widthRatio) { // 长宽都按照相同比例缩放时,返回缩放后的图片 return scale(width, height); } // 宽缩放比例小就按照宽缩放,否则按照高缩放 if (widthRatio < heightRatio) { scale(width, (int) (srcHeight * widthRatio)); } else { scale((int) (srcWidth * heightRatio), height); } if (null == fixedColor) {// 补白 fixedColor = Color.WHITE; } final BufferedImage image = new BufferedImage(width, height, getTypeInt()); Graphics2D g = image.createGraphics(); // 设置背景 g.setBackground(fixedColor); g.clearRect(0, 0, width, height); final BufferedImage itemp = this.targetImage; final int itempHeight = itemp.getHeight(); final int itempWidth = itemp.getWidth(); // 在中间贴图 g.drawImage(itemp, (width - itempWidth) / 2, (height - itempHeight) / 2, itempWidth, itempHeight, fixedColor, null); g.dispose(); this.targetImage = image; return this; }
[ "public", "Img", "scale", "(", "int", "width", ",", "int", "height", ",", "Color", "fixedColor", ")", "{", "final", "BufferedImage", "srcImage", "=", "getValidSrcImg", "(", ")", ";", "int", "srcHeight", "=", "srcImage", ".", "getHeight", "(", "null", ")", ...
缩放图像(按高度和宽度缩放)<br> 缩放后默认为jpeg格式 @param width 缩放后的宽度 @param height 缩放后的高度 @param fixedColor 比例不对时补充的颜色,不补充为<code>null</code> @return this
[ "缩放图像(按高度和宽度缩放)<br", ">", "缩放后默认为jpeg格式" ]
bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a
https://github.com/looly/hutool/blob/bbd74eda4c7e8a81fe7a991fa6c2276eec062e6a/hutool-core/src/main/java/cn/hutool/core/img/Img.java#L240-L277
train
Scale the image.
[ 30522, 2270, 10047, 2290, 4094, 1006, 20014, 9381, 1010, 20014, 4578, 1010, 3609, 4964, 18717, 1007, 1063, 2345, 17698, 2098, 9581, 3351, 5034, 6895, 26860, 1027, 2131, 10175, 9821, 11890, 5714, 2290, 1006, 1007, 1025, 20014, 5034, 5403, 18...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
networknt/light-4j
dump/src/main/java/com/networknt/dump/BodyDumper.java
BodyDumper.dumpInputStream
private void dumpInputStream(){ //dump request body exchange.startBlocking(); InputStream inputStream = exchange.getInputStream(); try { if(config.isMaskEnabled() && inputStream.available() != -1) { this.bodyContent = Mask.maskJson(inputStream, "requestBody"); } else { try { this.bodyContent = StringUtils.inputStreamToString(inputStream, UTF_8); } catch (IOException e) { logger.error(e.toString()); } } } catch (IOException e) { logger.error("undertow inputstream error:" + e.getMessage()); } }
java
private void dumpInputStream(){ //dump request body exchange.startBlocking(); InputStream inputStream = exchange.getInputStream(); try { if(config.isMaskEnabled() && inputStream.available() != -1) { this.bodyContent = Mask.maskJson(inputStream, "requestBody"); } else { try { this.bodyContent = StringUtils.inputStreamToString(inputStream, UTF_8); } catch (IOException e) { logger.error(e.toString()); } } } catch (IOException e) { logger.error("undertow inputstream error:" + e.getMessage()); } }
[ "private", "void", "dumpInputStream", "(", ")", "{", "//dump request body", "exchange", ".", "startBlocking", "(", ")", ";", "InputStream", "inputStream", "=", "exchange", ".", "getInputStream", "(", ")", ";", "try", "{", "if", "(", "config", ".", "isMaskEnabl...
read from input stream, convert it to string, put into this.bodyContent
[ "read", "from", "input", "stream", "convert", "it", "to", "string", "put", "into", "this", ".", "bodyContent" ]
2a60257c60663684c8f6dc8b5ea3cf184e534db6
https://github.com/networknt/light-4j/blob/2a60257c60663684c8f6dc8b5ea3cf184e534db6/dump/src/main/java/com/networknt/dump/BodyDumper.java#L97-L114
train
dump request body content from inputstream
[ 30522, 2797, 11675, 15653, 2378, 18780, 21422, 1006, 1007, 1063, 1013, 1013, 15653, 5227, 2303, 3863, 1012, 2707, 23467, 2075, 1006, 1007, 1025, 20407, 25379, 20407, 25379, 1027, 3863, 1012, 2131, 2378, 18780, 21422, 1006, 1007, 1025, 3046, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
transport/src/main/java/io/netty/channel/ChannelHandlerAdapter.java
ChannelHandlerAdapter.isSharable
public boolean isSharable() { /** * Cache the result of {@link Sharable} annotation detection to workaround a condition. We use a * {@link ThreadLocal} and {@link WeakHashMap} to eliminate the volatile write/reads. Using different * {@link WeakHashMap} instances per {@link Thread} is good enough for us and the number of * {@link Thread}s are quite limited anyway. * * See <a href="https://github.com/netty/netty/issues/2289">#2289</a>. */ Class<?> clazz = getClass(); Map<Class<?>, Boolean> cache = InternalThreadLocalMap.get().handlerSharableCache(); Boolean sharable = cache.get(clazz); if (sharable == null) { sharable = clazz.isAnnotationPresent(Sharable.class); cache.put(clazz, sharable); } return sharable; }
java
public boolean isSharable() { /** * Cache the result of {@link Sharable} annotation detection to workaround a condition. We use a * {@link ThreadLocal} and {@link WeakHashMap} to eliminate the volatile write/reads. Using different * {@link WeakHashMap} instances per {@link Thread} is good enough for us and the number of * {@link Thread}s are quite limited anyway. * * See <a href="https://github.com/netty/netty/issues/2289">#2289</a>. */ Class<?> clazz = getClass(); Map<Class<?>, Boolean> cache = InternalThreadLocalMap.get().handlerSharableCache(); Boolean sharable = cache.get(clazz); if (sharable == null) { sharable = clazz.isAnnotationPresent(Sharable.class); cache.put(clazz, sharable); } return sharable; }
[ "public", "boolean", "isSharable", "(", ")", "{", "/**\n * Cache the result of {@link Sharable} annotation detection to workaround a condition. We use a\n * {@link ThreadLocal} and {@link WeakHashMap} to eliminate the volatile write/reads. Using different\n * {@link WeakHashMap} ...
Return {@code true} if the implementation is {@link Sharable} and so can be added to different {@link ChannelPipeline}s.
[ "Return", "{" ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/transport/src/main/java/io/netty/channel/ChannelHandlerAdapter.java#L45-L62
train
Returns true if the class is a Sharable class.
[ 30522, 2270, 22017, 20898, 26354, 11077, 3468, 1006, 1007, 1063, 1013, 1008, 1008, 1008, 17053, 1996, 2765, 1997, 1063, 1030, 4957, 21146, 16670, 1065, 5754, 17287, 3508, 10788, 2000, 2147, 24490, 1037, 4650, 1012, 2057, 2224, 1037, 1008, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/runtime/functions/SqlFunctionUtils.java
SqlFunctionUtils.replace
public static String replace(String str, String oldStr, String replacement) { return str.replace(oldStr, replacement); }
java
public static String replace(String str, String oldStr, String replacement) { return str.replace(oldStr, replacement); }
[ "public", "static", "String", "replace", "(", "String", "str", ",", "String", "oldStr", ",", "String", "replacement", ")", "{", "return", "str", ".", "replace", "(", "oldStr", ",", "replacement", ")", ";", "}" ]
Replaces all the old strings with the replacement string.
[ "Replaces", "all", "the", "old", "strings", "with", "the", "replacement", "string", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/runtime/functions/SqlFunctionUtils.java#L298-L300
train
replace the old string with replacement
[ 30522, 2270, 10763, 5164, 5672, 1006, 5164, 2358, 2099, 1010, 5164, 19457, 16344, 1010, 5164, 6110, 1007, 1063, 2709, 2358, 2099, 1012, 5672, 1006, 19457, 16344, 1010, 6110, 1007, 1025, 1065, 102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
alibaba/canal
instance/core/src/main/java/com/alibaba/otter/canal/instance/core/AbstractCanalInstance.java
AbstractCanalInstance.afterStartEventParser
protected void afterStartEventParser(CanalEventParser eventParser) { // 读取一下历史订阅的filter信息 List<ClientIdentity> clientIdentitys = metaManager.listAllSubscribeInfo(destination); for (ClientIdentity clientIdentity : clientIdentitys) { subscribeChange(clientIdentity); } }
java
protected void afterStartEventParser(CanalEventParser eventParser) { // 读取一下历史订阅的filter信息 List<ClientIdentity> clientIdentitys = metaManager.listAllSubscribeInfo(destination); for (ClientIdentity clientIdentity : clientIdentitys) { subscribeChange(clientIdentity); } }
[ "protected", "void", "afterStartEventParser", "(", "CanalEventParser", "eventParser", ")", "{", "// 读取一下历史订阅的filter信息", "List", "<", "ClientIdentity", ">", "clientIdentitys", "=", "metaManager", ".", "listAllSubscribeInfo", "(", "destination", ")", ";", "for", "(", "C...
around event parser, default impl
[ "around", "event", "parser", "default", "impl" ]
8f088cddc0755f4350c5aaae95c6e4002d90a40f
https://github.com/alibaba/canal/blob/8f088cddc0755f4350c5aaae95c6e4002d90a40f/instance/core/src/main/java/com/alibaba/otter/canal/instance/core/AbstractCanalInstance.java#L143-L149
train
Called after start event parser.
[ 30522, 5123, 11675, 2044, 14117, 2618, 15338, 19362, 8043, 1006, 5033, 18697, 3372, 19362, 8043, 2724, 19362, 8043, 1007, 1063, 1013, 1013, 100, 100, 1740, 1743, 100, 1790, 100, 100, 1916, 11307, 1767, 100, 2862, 1026, 7396, 5178, 16778, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/slotpool/SlotPoolImpl.java
SlotPoolImpl.releaseTaskManager
@Override public boolean releaseTaskManager(final ResourceID resourceId, final Exception cause) { componentMainThreadExecutor.assertRunningInMainThread(); if (registeredTaskManagers.remove(resourceId)) { releaseTaskManagerInternal(resourceId, cause); return true; } else { return false; } }
java
@Override public boolean releaseTaskManager(final ResourceID resourceId, final Exception cause) { componentMainThreadExecutor.assertRunningInMainThread(); if (registeredTaskManagers.remove(resourceId)) { releaseTaskManagerInternal(resourceId, cause); return true; } else { return false; } }
[ "@", "Override", "public", "boolean", "releaseTaskManager", "(", "final", "ResourceID", "resourceId", ",", "final", "Exception", "cause", ")", "{", "componentMainThreadExecutor", ".", "assertRunningInMainThread", "(", ")", ";", "if", "(", "registeredTaskManagers", "."...
Unregister TaskManager from this pool, all the related slots will be released and tasks be canceled. Called when we find some TaskManager becomes "dead" or "abnormal", and we decide to not using slots from it anymore. @param resourceId The id of the TaskManager @param cause for the releasing of the TaskManager
[ "Unregister", "TaskManager", "from", "this", "pool", "all", "the", "related", "slots", "will", "be", "released", "and", "tasks", "be", "canceled", ".", "Called", "when", "we", "find", "some", "TaskManager", "becomes", "dead", "or", "abnormal", "and", "we", "...
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/slotpool/SlotPoolImpl.java#L705-L716
train
Release a task manager.
[ 30522, 1030, 2058, 15637, 2270, 22017, 20898, 2713, 10230, 22287, 5162, 4590, 1006, 2345, 7692, 3593, 7692, 3593, 1010, 2345, 6453, 3426, 1007, 1063, 6922, 24238, 2705, 16416, 3207, 2595, 8586, 16161, 2099, 1012, 20865, 15532, 5582, 2378, 2...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
common/src/main/java/io/netty/util/CharsetUtil.java
CharsetUtil.encoder
public static CharsetEncoder encoder(Charset charset) { checkNotNull(charset, "charset"); Map<Charset, CharsetEncoder> map = InternalThreadLocalMap.get().charsetEncoderCache(); CharsetEncoder e = map.get(charset); if (e != null) { e.reset().onMalformedInput(CodingErrorAction.REPLACE).onUnmappableCharacter(CodingErrorAction.REPLACE); return e; } e = encoder(charset, CodingErrorAction.REPLACE, CodingErrorAction.REPLACE); map.put(charset, e); return e; }
java
public static CharsetEncoder encoder(Charset charset) { checkNotNull(charset, "charset"); Map<Charset, CharsetEncoder> map = InternalThreadLocalMap.get().charsetEncoderCache(); CharsetEncoder e = map.get(charset); if (e != null) { e.reset().onMalformedInput(CodingErrorAction.REPLACE).onUnmappableCharacter(CodingErrorAction.REPLACE); return e; } e = encoder(charset, CodingErrorAction.REPLACE, CodingErrorAction.REPLACE); map.put(charset, e); return e; }
[ "public", "static", "CharsetEncoder", "encoder", "(", "Charset", "charset", ")", "{", "checkNotNull", "(", "charset", ",", "\"charset\"", ")", ";", "Map", "<", "Charset", ",", "CharsetEncoder", ">", "map", "=", "InternalThreadLocalMap", ".", "get", "(", ")", ...
Returns a cached thread-local {@link CharsetEncoder} for the specified {@link Charset}. @param charset The specified charset @return The encoder for the specified {@code charset}
[ "Returns", "a", "cached", "thread", "-", "local", "{", "@link", "CharsetEncoder", "}", "for", "the", "specified", "{", "@link", "Charset", "}", "." ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/common/src/main/java/io/netty/util/CharsetUtil.java#L113-L126
train
Get a CharsetEncoder for the specified charset.
[ 30522, 2270, 10763, 25869, 13462, 2368, 16044, 2099, 4372, 16044, 2099, 1006, 25869, 13462, 25869, 13462, 1007, 1063, 4638, 17048, 11231, 3363, 1006, 25869, 13462, 1010, 1000, 25869, 13462, 1000, 1007, 1025, 4949, 1026, 25869, 13462, 1010, 25...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
transport-native-epoll/src/main/java/io/netty/channel/epoll/EpollEventLoop.java
EpollEventLoop.add
void add(AbstractEpollChannel ch) throws IOException { assert inEventLoop(); int fd = ch.socket.intValue(); Native.epollCtlAdd(epollFd.intValue(), fd, ch.flags); channels.put(fd, ch); }
java
void add(AbstractEpollChannel ch) throws IOException { assert inEventLoop(); int fd = ch.socket.intValue(); Native.epollCtlAdd(epollFd.intValue(), fd, ch.flags); channels.put(fd, ch); }
[ "void", "add", "(", "AbstractEpollChannel", "ch", ")", "throws", "IOException", "{", "assert", "inEventLoop", "(", ")", ";", "int", "fd", "=", "ch", ".", "socket", ".", "intValue", "(", ")", ";", "Native", ".", "epollCtlAdd", "(", "epollFd", ".", "intVal...
Register the given epoll with this {@link EventLoop}.
[ "Register", "the", "given", "epoll", "with", "this", "{" ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/transport-native-epoll/src/main/java/io/netty/channel/epoll/EpollEventLoop.java#L174-L179
train
Add a new EpollChannel to the set of channels.
[ 30522, 11675, 5587, 1006, 10061, 13699, 14511, 26058, 10381, 1007, 11618, 22834, 10288, 24422, 1063, 20865, 1999, 18697, 3372, 4135, 7361, 1006, 1007, 1025, 20014, 1042, 2094, 1027, 10381, 1012, 22278, 1012, 20014, 10175, 5657, 1006, 1007, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-core/src/main/java/org/apache/flink/util/LinkedOptionalMap.java
LinkedOptionalMap.hasAbsentKeysOrValues
public boolean hasAbsentKeysOrValues() { for (Entry<String, KeyValue<K, V>> entry : underlyingMap.entrySet()) { if (keyOrValueIsAbsent(entry)) { return true; } } return false; }
java
public boolean hasAbsentKeysOrValues() { for (Entry<String, KeyValue<K, V>> entry : underlyingMap.entrySet()) { if (keyOrValueIsAbsent(entry)) { return true; } } return false; }
[ "public", "boolean", "hasAbsentKeysOrValues", "(", ")", "{", "for", "(", "Entry", "<", "String", ",", "KeyValue", "<", "K", ",", "V", ">", ">", "entry", ":", "underlyingMap", ".", "entrySet", "(", ")", ")", "{", "if", "(", "keyOrValueIsAbsent", "(", "e...
Checks whether there are entries with absent keys or values.
[ "Checks", "whether", "there", "are", "entries", "with", "absent", "keys", "or", "values", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-core/src/main/java/org/apache/flink/util/LinkedOptionalMap.java#L143-L150
train
Returns true if any of the keys or values in the underlying map are absent.
[ 30522, 2270, 22017, 20898, 2038, 7875, 5054, 2102, 14839, 21748, 10175, 15808, 1006, 1007, 1063, 2005, 1006, 4443, 1026, 5164, 1010, 3145, 10175, 5657, 1026, 1047, 1010, 1058, 1028, 1028, 4443, 1024, 30524, 4443, 1007, 1007, 1063, 2709, 299...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
codec-http2/src/main/java/io/netty/handler/codec/http2/Http2FrameCodecBuilder.java
Http2FrameCodecBuilder.build
@Override public Http2FrameCodec build() { Http2FrameWriter frameWriter = this.frameWriter; if (frameWriter != null) { // This is to support our tests and will never be executed by the user as frameWriter(...) // is package-private. DefaultHttp2Connection connection = new DefaultHttp2Connection(isServer(), maxReservedStreams()); Long maxHeaderListSize = initialSettings().maxHeaderListSize(); Http2FrameReader frameReader = new DefaultHttp2FrameReader(maxHeaderListSize == null ? new DefaultHttp2HeadersDecoder(true) : new DefaultHttp2HeadersDecoder(true, maxHeaderListSize)); if (frameLogger() != null) { frameWriter = new Http2OutboundFrameLogger(frameWriter, frameLogger()); frameReader = new Http2InboundFrameLogger(frameReader, frameLogger()); } Http2ConnectionEncoder encoder = new DefaultHttp2ConnectionEncoder(connection, frameWriter); if (encoderEnforceMaxConcurrentStreams()) { encoder = new StreamBufferingEncoder(encoder); } Http2ConnectionDecoder decoder = new DefaultHttp2ConnectionDecoder(connection, encoder, frameReader, promisedRequestVerifier(), isAutoAckSettingsFrame()); return build(decoder, encoder, initialSettings()); } return super.build(); }
java
@Override public Http2FrameCodec build() { Http2FrameWriter frameWriter = this.frameWriter; if (frameWriter != null) { // This is to support our tests and will never be executed by the user as frameWriter(...) // is package-private. DefaultHttp2Connection connection = new DefaultHttp2Connection(isServer(), maxReservedStreams()); Long maxHeaderListSize = initialSettings().maxHeaderListSize(); Http2FrameReader frameReader = new DefaultHttp2FrameReader(maxHeaderListSize == null ? new DefaultHttp2HeadersDecoder(true) : new DefaultHttp2HeadersDecoder(true, maxHeaderListSize)); if (frameLogger() != null) { frameWriter = new Http2OutboundFrameLogger(frameWriter, frameLogger()); frameReader = new Http2InboundFrameLogger(frameReader, frameLogger()); } Http2ConnectionEncoder encoder = new DefaultHttp2ConnectionEncoder(connection, frameWriter); if (encoderEnforceMaxConcurrentStreams()) { encoder = new StreamBufferingEncoder(encoder); } Http2ConnectionDecoder decoder = new DefaultHttp2ConnectionDecoder(connection, encoder, frameReader, promisedRequestVerifier(), isAutoAckSettingsFrame()); return build(decoder, encoder, initialSettings()); } return super.build(); }
[ "@", "Override", "public", "Http2FrameCodec", "build", "(", ")", "{", "Http2FrameWriter", "frameWriter", "=", "this", ".", "frameWriter", ";", "if", "(", "frameWriter", "!=", "null", ")", "{", "// This is to support our tests and will never be executed by the user as fram...
Build a {@link Http2FrameCodec} object.
[ "Build", "a", "{" ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/codec-http2/src/main/java/io/netty/handler/codec/http2/Http2FrameCodecBuilder.java#L152-L178
train
Build the Http2FrameCodec.
[ 30522, 1030, 2058, 15637, 2270, 8299, 2475, 15643, 16044, 2278, 3857, 1006, 1007, 1063, 8299, 2475, 15643, 15994, 4853, 15994, 1027, 2023, 1012, 4853, 15994, 1025, 2065, 1006, 4853, 15994, 999, 1027, 19701, 1007, 1063, 1013, 1013, 2023, 200...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
alibaba/canal
deployer/src/main/java/com/alibaba/otter/canal/deployer/monitor/remote/DbRemoteConfigLoader.java
DbRemoteConfigLoader.loadRemoteInstanceConfigs
@Override public void loadRemoteInstanceConfigs() { try { // 加载远程instance配置 loadModifiedInstanceConfigs(); } catch (Exception e) { logger.error(e.getMessage(), e); } }
java
@Override public void loadRemoteInstanceConfigs() { try { // 加载远程instance配置 loadModifiedInstanceConfigs(); } catch (Exception e) { logger.error(e.getMessage(), e); } }
[ "@", "Override", "public", "void", "loadRemoteInstanceConfigs", "(", ")", "{", "try", "{", "// 加载远程instance配置", "loadModifiedInstanceConfigs", "(", ")", ";", "}", "catch", "(", "Exception", "e", ")", "{", "logger", ".", "error", "(", "e", ".", "getMessage", ...
加载远程的instance配置
[ "加载远程的instance配置" ]
8f088cddc0755f4350c5aaae95c6e4002d90a40f
https://github.com/alibaba/canal/blob/8f088cddc0755f4350c5aaae95c6e4002d90a40f/deployer/src/main/java/com/alibaba/otter/canal/deployer/monitor/remote/DbRemoteConfigLoader.java#L139-L147
train
Load the instance configs from the remote server.
[ 30522, 1030, 2058, 15637, 2270, 11675, 7170, 28578, 12184, 7076, 26897, 8663, 8873, 5620, 1006, 1007, 1063, 3046, 1063, 1013, 1013, 1779, 100, 100, 100, 6013, 100, 100, 7170, 5302, 4305, 10451, 7076, 26897, 8663, 8873, 5620, 1006, 1007, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/transformations/StreamTransformation.java
StreamTransformation.setParallelism
public void setParallelism(int parallelism) { Preconditions.checkArgument( parallelism > 0 || parallelism == ExecutionConfig.PARALLELISM_DEFAULT, "The parallelism must be at least one, or ExecutionConfig.PARALLELISM_DEFAULT (use system default)."); this.parallelism = parallelism; }
java
public void setParallelism(int parallelism) { Preconditions.checkArgument( parallelism > 0 || parallelism == ExecutionConfig.PARALLELISM_DEFAULT, "The parallelism must be at least one, or ExecutionConfig.PARALLELISM_DEFAULT (use system default)."); this.parallelism = parallelism; }
[ "public", "void", "setParallelism", "(", "int", "parallelism", ")", "{", "Preconditions", ".", "checkArgument", "(", "parallelism", ">", "0", "||", "parallelism", "==", "ExecutionConfig", ".", "PARALLELISM_DEFAULT", ",", "\"The parallelism must be at least one, or Executi...
Sets the parallelism of this {@code StreamTransformation}. @param parallelism The new parallelism to set on this {@code StreamTransformation}.
[ "Sets", "the", "parallelism", "of", "this", "{", "@code", "StreamTransformation", "}", "." ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/transformations/StreamTransformation.java#L207-L212
train
Sets the parallelism of the task.
[ 30522, 2270, 11675, 2275, 28689, 6216, 28235, 1006, 20014, 5903, 2964, 1007, 1063, 3653, 8663, 20562, 2015, 1012, 4638, 2906, 22850, 4765, 1006, 5903, 2964, 1028, 1014, 1064, 1064, 5903, 2964, 1027, 1027, 7781, 8663, 8873, 2290, 1012, 5903,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/operators/sort/AbstractMergeOuterJoinIterator.java
AbstractMergeOuterJoinIterator.callWithNextKey
@Override public boolean callWithNextKey(final FlatJoinFunction<T1, T2, O> joinFunction, final Collector<O> collector) throws Exception { if (!initialized) { //first run, set iterators to first elements it1Empty = !this.iterator1.nextKey(); it2Empty = !this.iterator2.nextKey(); initialized = true; } if (it1Empty && it2Empty) { return false; } else if (it2Empty) { if (outerJoinType == OuterJoinType.LEFT || outerJoinType == OuterJoinType.FULL) { joinLeftKeyValuesWithNull(iterator1.getValues(), joinFunction, collector); it1Empty = !iterator1.nextKey(); return true; } else { //consume rest of left side while (iterator1.nextKey()) { } it1Empty = true; return false; } } else if (it1Empty) { if (outerJoinType == OuterJoinType.RIGHT || outerJoinType == OuterJoinType.FULL) { joinRightKeyValuesWithNull(iterator2.getValues(), joinFunction, collector); it2Empty = !iterator2.nextKey(); return true; } else { //consume rest of right side while (iterator2.nextKey()) { } it2Empty = true; return false; } } else { final TypePairComparator<T1, T2> comparator = super.pairComparator; comparator.setReference(this.iterator1.getCurrent()); T2 current2 = this.iterator2.getCurrent(); // zig zag while (true) { // determine the relation between the (possibly composite) keys final int comp = comparator.compareToReference(current2); if (comp == 0) { break; } if (comp < 0) { //right key < left key if (outerJoinType == OuterJoinType.RIGHT || outerJoinType == OuterJoinType.FULL) { //join right key values with null in case of right or full outer join joinRightKeyValuesWithNull(iterator2.getValues(), joinFunction, collector); it2Empty = !iterator2.nextKey(); return true; } else { //skip this right key if it is a left outer join if (!this.iterator2.nextKey()) { //if right side is empty, join current left key values with null joinLeftKeyValuesWithNull(iterator1.getValues(), joinFunction, collector); it1Empty = !iterator1.nextKey(); it2Empty = true; return true; } current2 = this.iterator2.getCurrent(); } } else { //right key > left key if (outerJoinType == OuterJoinType.LEFT || outerJoinType == OuterJoinType.FULL) { //join left key values with null in case of left or full outer join joinLeftKeyValuesWithNull(iterator1.getValues(), joinFunction, collector); it1Empty = !iterator1.nextKey(); return true; } else { //skip this left key if it is a right outer join if (!this.iterator1.nextKey()) { //if right side is empty, join current right key values with null joinRightKeyValuesWithNull(iterator2.getValues(), joinFunction, collector); it1Empty = true; it2Empty = !iterator2.nextKey(); return true; } comparator.setReference(this.iterator1.getCurrent()); } } } // here, we have a common key! call the join function with the cross product of the // values final Iterator<T1> values1 = this.iterator1.getValues(); final Iterator<T2> values2 = this.iterator2.getValues(); crossMatchingGroup(values1, values2, joinFunction, collector); it1Empty = !iterator1.nextKey(); it2Empty = !iterator2.nextKey(); return true; } }
java
@Override public boolean callWithNextKey(final FlatJoinFunction<T1, T2, O> joinFunction, final Collector<O> collector) throws Exception { if (!initialized) { //first run, set iterators to first elements it1Empty = !this.iterator1.nextKey(); it2Empty = !this.iterator2.nextKey(); initialized = true; } if (it1Empty && it2Empty) { return false; } else if (it2Empty) { if (outerJoinType == OuterJoinType.LEFT || outerJoinType == OuterJoinType.FULL) { joinLeftKeyValuesWithNull(iterator1.getValues(), joinFunction, collector); it1Empty = !iterator1.nextKey(); return true; } else { //consume rest of left side while (iterator1.nextKey()) { } it1Empty = true; return false; } } else if (it1Empty) { if (outerJoinType == OuterJoinType.RIGHT || outerJoinType == OuterJoinType.FULL) { joinRightKeyValuesWithNull(iterator2.getValues(), joinFunction, collector); it2Empty = !iterator2.nextKey(); return true; } else { //consume rest of right side while (iterator2.nextKey()) { } it2Empty = true; return false; } } else { final TypePairComparator<T1, T2> comparator = super.pairComparator; comparator.setReference(this.iterator1.getCurrent()); T2 current2 = this.iterator2.getCurrent(); // zig zag while (true) { // determine the relation between the (possibly composite) keys final int comp = comparator.compareToReference(current2); if (comp == 0) { break; } if (comp < 0) { //right key < left key if (outerJoinType == OuterJoinType.RIGHT || outerJoinType == OuterJoinType.FULL) { //join right key values with null in case of right or full outer join joinRightKeyValuesWithNull(iterator2.getValues(), joinFunction, collector); it2Empty = !iterator2.nextKey(); return true; } else { //skip this right key if it is a left outer join if (!this.iterator2.nextKey()) { //if right side is empty, join current left key values with null joinLeftKeyValuesWithNull(iterator1.getValues(), joinFunction, collector); it1Empty = !iterator1.nextKey(); it2Empty = true; return true; } current2 = this.iterator2.getCurrent(); } } else { //right key > left key if (outerJoinType == OuterJoinType.LEFT || outerJoinType == OuterJoinType.FULL) { //join left key values with null in case of left or full outer join joinLeftKeyValuesWithNull(iterator1.getValues(), joinFunction, collector); it1Empty = !iterator1.nextKey(); return true; } else { //skip this left key if it is a right outer join if (!this.iterator1.nextKey()) { //if right side is empty, join current right key values with null joinRightKeyValuesWithNull(iterator2.getValues(), joinFunction, collector); it1Empty = true; it2Empty = !iterator2.nextKey(); return true; } comparator.setReference(this.iterator1.getCurrent()); } } } // here, we have a common key! call the join function with the cross product of the // values final Iterator<T1> values1 = this.iterator1.getValues(); final Iterator<T2> values2 = this.iterator2.getValues(); crossMatchingGroup(values1, values2, joinFunction, collector); it1Empty = !iterator1.nextKey(); it2Empty = !iterator2.nextKey(); return true; } }
[ "@", "Override", "public", "boolean", "callWithNextKey", "(", "final", "FlatJoinFunction", "<", "T1", ",", "T2", ",", "O", ">", "joinFunction", ",", "final", "Collector", "<", "O", ">", "collector", ")", "throws", "Exception", "{", "if", "(", "!", "initial...
Calls the <code>JoinFunction#join()</code> method for all two key-value pairs that share the same key and come from different inputs. Furthermore, depending on the outer join type (LEFT, RIGHT, FULL), all key-value pairs where no matching partner from the other input exists are joined with null. The output of the <code>join()</code> method is forwarded. @throws Exception Forwards all exceptions from the user code and the I/O system. @see org.apache.flink.runtime.operators.util.JoinTaskIterator#callWithNextKey(org.apache.flink.api.common.functions.FlatJoinFunction, org.apache.flink.util.Collector)
[ "Calls", "the", "<code", ">", "JoinFunction#join", "()", "<", "/", "code", ">", "method", "for", "all", "two", "key", "-", "value", "pairs", "that", "share", "the", "same", "key", "and", "come", "from", "different", "inputs", ".", "Furthermore", "depending...
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/operators/sort/AbstractMergeOuterJoinIterator.java#L74-L172
train
Call with next key.
[ 30522, 1030, 2058, 15637, 2270, 22017, 20898, 2655, 24415, 2638, 18413, 14839, 1006, 2345, 4257, 5558, 2378, 11263, 27989, 1026, 1056, 2487, 1010, 1056, 2475, 1010, 1051, 1028, 3693, 11263, 27989, 1010, 2345, 10018, 1026, 1051, 1028, 10018, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/incubator-shardingsphere
sharding-core/sharding-core-execute/src/main/java/org/apache/shardingsphere/core/execute/sql/execute/result/AggregationDistinctQueryMetaData.java
AggregationDistinctQueryMetaData.getAggregationDistinctColumnIndex
public int getAggregationDistinctColumnIndex(final String distinctColumnLabel) { for (Entry<Integer, String> entry : aggregationDistinctColumnIndexAndLabels.entrySet()) { if (entry.getValue().equals(distinctColumnLabel)) { return entry.getKey(); } } throw new ShardingException("Can not get aggregation distinct column index."); }
java
public int getAggregationDistinctColumnIndex(final String distinctColumnLabel) { for (Entry<Integer, String> entry : aggregationDistinctColumnIndexAndLabels.entrySet()) { if (entry.getValue().equals(distinctColumnLabel)) { return entry.getKey(); } } throw new ShardingException("Can not get aggregation distinct column index."); }
[ "public", "int", "getAggregationDistinctColumnIndex", "(", "final", "String", "distinctColumnLabel", ")", "{", "for", "(", "Entry", "<", "Integer", ",", "String", ">", "entry", ":", "aggregationDistinctColumnIndexAndLabels", ".", "entrySet", "(", ")", ")", "{", "i...
Get aggregation distinct column index. @param distinctColumnLabel aggregation distinct column label @return aggregation distinct column index
[ "Get", "aggregation", "distinct", "column", "index", "." ]
f88fd29fc345dfb31fdce12e9e96cbfa0fd2402d
https://github.com/apache/incubator-shardingsphere/blob/f88fd29fc345dfb31fdce12e9e96cbfa0fd2402d/sharding-core/sharding-core-execute/src/main/java/org/apache/shardingsphere/core/execute/sql/execute/result/AggregationDistinctQueryMetaData.java#L191-L198
train
Get aggregation distinct column index.
[ 30522, 2270, 20014, 2131, 8490, 17603, 12540, 10521, 7629, 6593, 25778, 2819, 11483, 3207, 2595, 1006, 2345, 5164, 5664, 25778, 2819, 20554, 16336, 2140, 1007, 1063, 2005, 1006, 4443, 1026, 16109, 1010, 5164, 1028, 4443, 1024, 28041, 10521, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/metrics/UpTimeGauge.java
UpTimeGauge.getValue
@Override public Long getValue() { final JobStatus status = eg.getState(); if (status == JobStatus.RUNNING) { // running right now - report the uptime final long runningTimestamp = eg.getStatusTimestamp(JobStatus.RUNNING); // we use 'Math.max' here to avoid negative timestamps when clocks change return Math.max(System.currentTimeMillis() - runningTimestamp, 0); } else if (status.isTerminalState()) { // not running any more -> finished or not on leader return NO_LONGER_RUNNING; } else { // not yet running or not up at the moment return 0L; } }
java
@Override public Long getValue() { final JobStatus status = eg.getState(); if (status == JobStatus.RUNNING) { // running right now - report the uptime final long runningTimestamp = eg.getStatusTimestamp(JobStatus.RUNNING); // we use 'Math.max' here to avoid negative timestamps when clocks change return Math.max(System.currentTimeMillis() - runningTimestamp, 0); } else if (status.isTerminalState()) { // not running any more -> finished or not on leader return NO_LONGER_RUNNING; } else { // not yet running or not up at the moment return 0L; } }
[ "@", "Override", "public", "Long", "getValue", "(", ")", "{", "final", "JobStatus", "status", "=", "eg", ".", "getState", "(", ")", ";", "if", "(", "status", "==", "JobStatus", ".", "RUNNING", ")", "{", "// running right now - report the uptime", "final", "l...
------------------------------------------------------------------------
[ "------------------------------------------------------------------------" ]
b62db93bf63cb3bb34dd03d611a779d9e3fc61ac
https://github.com/apache/flink/blob/b62db93bf63cb3bb34dd03d611a779d9e3fc61ac/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/metrics/UpTimeGauge.java#L48-L66
train
Returns the value of the ClusterStatus.
[ 30522, 1030, 2058, 15637, 2270, 2146, 2131, 10175, 5657, 1006, 1007, 1063, 2345, 5841, 29336, 2271, 3570, 1027, 1041, 2290, 1012, 4152, 12259, 1006, 1007, 1025, 2065, 1006, 3570, 1027, 1027, 5841, 29336, 2271, 1012, 2770, 1007, 1063, 1013, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
apache/spark
launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java
LauncherServer.unregister
void unregister(AbstractAppHandle handle) { for (Map.Entry<String, AbstractAppHandle> e : secretToPendingApps.entrySet()) { if (e.getValue().equals(handle)) { String secret = e.getKey(); secretToPendingApps.remove(secret); break; } } unref(); }
java
void unregister(AbstractAppHandle handle) { for (Map.Entry<String, AbstractAppHandle> e : secretToPendingApps.entrySet()) { if (e.getValue().equals(handle)) { String secret = e.getKey(); secretToPendingApps.remove(secret); break; } } unref(); }
[ "void", "unregister", "(", "AbstractAppHandle", "handle", ")", "{", "for", "(", "Map", ".", "Entry", "<", "String", ",", "AbstractAppHandle", ">", "e", ":", "secretToPendingApps", ".", "entrySet", "(", ")", ")", "{", "if", "(", "e", ".", "getValue", "(",...
Removes the client handle from the pending list (in case it's still there), and unrefs the server.
[ "Removes", "the", "client", "handle", "from", "the", "pending", "list", "(", "in", "case", "it", "s", "still", "there", ")", "and", "unrefs", "the", "server", "." ]
25ee0474f47d9c30d6f553a7892d9549f91071cf
https://github.com/apache/spark/blob/25ee0474f47d9c30d6f553a7892d9549f91071cf/launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java#L212-L222
train
Unregister an application handle.
[ 30522, 11675, 4895, 2890, 24063, 2121, 1006, 10061, 29098, 11774, 2571, 5047, 1007, 1063, 2005, 1006, 4949, 1012, 4443, 1026, 5164, 1010, 10061, 29098, 11774, 2571, 1028, 1041, 1024, 3595, 14399, 18537, 29098, 2015, 1012, 4443, 13462, 1006, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
handler/src/main/java/io/netty/handler/ssl/SslHandler.java
SslHandler.unwrap
private int unwrap( ChannelHandlerContext ctx, ByteBuf packet, int offset, int length) throws SSLException { final int originalLength = length; boolean wrapLater = false; boolean notifyClosure = false; int overflowReadableBytes = -1; ByteBuf decodeOut = allocate(ctx, length); try { // Only continue to loop if the handler was not removed in the meantime. // See https://github.com/netty/netty/issues/5860 unwrapLoop: while (!ctx.isRemoved()) { final SSLEngineResult result = engineType.unwrap(this, packet, offset, length, decodeOut); final Status status = result.getStatus(); final HandshakeStatus handshakeStatus = result.getHandshakeStatus(); final int produced = result.bytesProduced(); final int consumed = result.bytesConsumed(); // Update indexes for the next iteration offset += consumed; length -= consumed; switch (status) { case BUFFER_OVERFLOW: final int readableBytes = decodeOut.readableBytes(); final int previousOverflowReadableBytes = overflowReadableBytes; overflowReadableBytes = readableBytes; int bufferSize = engine.getSession().getApplicationBufferSize() - readableBytes; if (readableBytes > 0) { firedChannelRead = true; ctx.fireChannelRead(decodeOut); // This buffer was handled, null it out. decodeOut = null; if (bufferSize <= 0) { // It may happen that readableBytes >= engine.getSession().getApplicationBufferSize() // while there is still more to unwrap, in this case we will just allocate a new buffer // with the capacity of engine.getSession().getApplicationBufferSize() and call unwrap // again. bufferSize = engine.getSession().getApplicationBufferSize(); } } else { // This buffer was handled, null it out. decodeOut.release(); decodeOut = null; } if (readableBytes == 0 && previousOverflowReadableBytes == 0) { // If there is two consecutive loops where we overflow and are not able to consume any data, // assume the amount of data exceeds the maximum amount for the engine and bail throw new IllegalStateException("Two consecutive overflows but no content was consumed. " + SSLSession.class.getSimpleName() + " getApplicationBufferSize: " + engine.getSession().getApplicationBufferSize() + " maybe too small."); } // Allocate a new buffer which can hold all the rest data and loop again. // TODO: We may want to reconsider how we calculate the length here as we may // have more then one ssl message to decode. decodeOut = allocate(ctx, engineType.calculatePendingData(this, bufferSize)); continue; case CLOSED: // notify about the CLOSED state of the SSLEngine. See #137 notifyClosure = true; overflowReadableBytes = -1; break; default: overflowReadableBytes = -1; break; } switch (handshakeStatus) { case NEED_UNWRAP: break; case NEED_WRAP: // If the wrap operation transitions the status to NOT_HANDSHAKING and there is no more data to // unwrap then the next call to unwrap will not produce any data. We can avoid the potentially // costly unwrap operation and break out of the loop. if (wrapNonAppData(ctx, true) && length == 0) { break unwrapLoop; } break; case NEED_TASK: if (!runDelegatedTasks(true)) { // We scheduled a task on the delegatingTaskExecutor, so stop processing as we will // resume once the task completes. // // We break out of the loop only and do NOT return here as we still may need to notify // about the closure of the SSLEngine. // wrapLater = false; break unwrapLoop; } break; case FINISHED: setHandshakeSuccess(); wrapLater = true; // We 'break' here and NOT 'continue' as android API version 21 has a bug where they consume // data from the buffer but NOT correctly set the SSLEngineResult.bytesConsumed(). // Because of this it will raise an exception on the next iteration of the for loop on android // API version 21. Just doing a break will work here as produced and consumed will both be 0 // and so we break out of the complete for (;;) loop and so call decode(...) again later on. // On other platforms this will have no negative effect as we will just continue with the // for (;;) loop if something was either consumed or produced. // // See: // - https://github.com/netty/netty/issues/4116 // - https://code.google.com/p/android/issues/detail?id=198639&thanks=198639&ts=1452501203 break; case NOT_HANDSHAKING: if (setHandshakeSuccessIfStillHandshaking()) { wrapLater = true; continue; } // If we are not handshaking and there is no more data to unwrap then the next call to unwrap // will not produce any data. We can avoid the potentially costly unwrap operation and break // out of the loop. if (length == 0) { break unwrapLoop; } break; default: throw new IllegalStateException("unknown handshake status: " + handshakeStatus); } if (status == Status.BUFFER_UNDERFLOW || // If we processed NEED_TASK we should try again even we did not consume or produce anything. handshakeStatus != HandshakeStatus.NEED_TASK && consumed == 0 && produced == 0) { if (handshakeStatus == HandshakeStatus.NEED_UNWRAP) { // The underlying engine is starving so we need to feed it with more data. // See https://github.com/netty/netty/pull/5039 readIfNeeded(ctx); } break; } } if (flushedBeforeHandshake && handshakePromise.isDone()) { // We need to call wrap(...) in case there was a flush done before the handshake completed to ensure // we do not stale. // // See https://github.com/netty/netty/pull/2437 flushedBeforeHandshake = false; wrapLater = true; } if (wrapLater) { wrap(ctx, true); } if (notifyClosure) { notifyClosePromise(null); } } finally { if (decodeOut != null) { if (decodeOut.isReadable()) { firedChannelRead = true; ctx.fireChannelRead(decodeOut); } else { decodeOut.release(); } } } return originalLength - length; }
java
private int unwrap( ChannelHandlerContext ctx, ByteBuf packet, int offset, int length) throws SSLException { final int originalLength = length; boolean wrapLater = false; boolean notifyClosure = false; int overflowReadableBytes = -1; ByteBuf decodeOut = allocate(ctx, length); try { // Only continue to loop if the handler was not removed in the meantime. // See https://github.com/netty/netty/issues/5860 unwrapLoop: while (!ctx.isRemoved()) { final SSLEngineResult result = engineType.unwrap(this, packet, offset, length, decodeOut); final Status status = result.getStatus(); final HandshakeStatus handshakeStatus = result.getHandshakeStatus(); final int produced = result.bytesProduced(); final int consumed = result.bytesConsumed(); // Update indexes for the next iteration offset += consumed; length -= consumed; switch (status) { case BUFFER_OVERFLOW: final int readableBytes = decodeOut.readableBytes(); final int previousOverflowReadableBytes = overflowReadableBytes; overflowReadableBytes = readableBytes; int bufferSize = engine.getSession().getApplicationBufferSize() - readableBytes; if (readableBytes > 0) { firedChannelRead = true; ctx.fireChannelRead(decodeOut); // This buffer was handled, null it out. decodeOut = null; if (bufferSize <= 0) { // It may happen that readableBytes >= engine.getSession().getApplicationBufferSize() // while there is still more to unwrap, in this case we will just allocate a new buffer // with the capacity of engine.getSession().getApplicationBufferSize() and call unwrap // again. bufferSize = engine.getSession().getApplicationBufferSize(); } } else { // This buffer was handled, null it out. decodeOut.release(); decodeOut = null; } if (readableBytes == 0 && previousOverflowReadableBytes == 0) { // If there is two consecutive loops where we overflow and are not able to consume any data, // assume the amount of data exceeds the maximum amount for the engine and bail throw new IllegalStateException("Two consecutive overflows but no content was consumed. " + SSLSession.class.getSimpleName() + " getApplicationBufferSize: " + engine.getSession().getApplicationBufferSize() + " maybe too small."); } // Allocate a new buffer which can hold all the rest data and loop again. // TODO: We may want to reconsider how we calculate the length here as we may // have more then one ssl message to decode. decodeOut = allocate(ctx, engineType.calculatePendingData(this, bufferSize)); continue; case CLOSED: // notify about the CLOSED state of the SSLEngine. See #137 notifyClosure = true; overflowReadableBytes = -1; break; default: overflowReadableBytes = -1; break; } switch (handshakeStatus) { case NEED_UNWRAP: break; case NEED_WRAP: // If the wrap operation transitions the status to NOT_HANDSHAKING and there is no more data to // unwrap then the next call to unwrap will not produce any data. We can avoid the potentially // costly unwrap operation and break out of the loop. if (wrapNonAppData(ctx, true) && length == 0) { break unwrapLoop; } break; case NEED_TASK: if (!runDelegatedTasks(true)) { // We scheduled a task on the delegatingTaskExecutor, so stop processing as we will // resume once the task completes. // // We break out of the loop only and do NOT return here as we still may need to notify // about the closure of the SSLEngine. // wrapLater = false; break unwrapLoop; } break; case FINISHED: setHandshakeSuccess(); wrapLater = true; // We 'break' here and NOT 'continue' as android API version 21 has a bug where they consume // data from the buffer but NOT correctly set the SSLEngineResult.bytesConsumed(). // Because of this it will raise an exception on the next iteration of the for loop on android // API version 21. Just doing a break will work here as produced and consumed will both be 0 // and so we break out of the complete for (;;) loop and so call decode(...) again later on. // On other platforms this will have no negative effect as we will just continue with the // for (;;) loop if something was either consumed or produced. // // See: // - https://github.com/netty/netty/issues/4116 // - https://code.google.com/p/android/issues/detail?id=198639&thanks=198639&ts=1452501203 break; case NOT_HANDSHAKING: if (setHandshakeSuccessIfStillHandshaking()) { wrapLater = true; continue; } // If we are not handshaking and there is no more data to unwrap then the next call to unwrap // will not produce any data. We can avoid the potentially costly unwrap operation and break // out of the loop. if (length == 0) { break unwrapLoop; } break; default: throw new IllegalStateException("unknown handshake status: " + handshakeStatus); } if (status == Status.BUFFER_UNDERFLOW || // If we processed NEED_TASK we should try again even we did not consume or produce anything. handshakeStatus != HandshakeStatus.NEED_TASK && consumed == 0 && produced == 0) { if (handshakeStatus == HandshakeStatus.NEED_UNWRAP) { // The underlying engine is starving so we need to feed it with more data. // See https://github.com/netty/netty/pull/5039 readIfNeeded(ctx); } break; } } if (flushedBeforeHandshake && handshakePromise.isDone()) { // We need to call wrap(...) in case there was a flush done before the handshake completed to ensure // we do not stale. // // See https://github.com/netty/netty/pull/2437 flushedBeforeHandshake = false; wrapLater = true; } if (wrapLater) { wrap(ctx, true); } if (notifyClosure) { notifyClosePromise(null); } } finally { if (decodeOut != null) { if (decodeOut.isReadable()) { firedChannelRead = true; ctx.fireChannelRead(decodeOut); } else { decodeOut.release(); } } } return originalLength - length; }
[ "private", "int", "unwrap", "(", "ChannelHandlerContext", "ctx", ",", "ByteBuf", "packet", ",", "int", "offset", ",", "int", "length", ")", "throws", "SSLException", "{", "final", "int", "originalLength", "=", "length", ";", "boolean", "wrapLater", "=", "false...
Unwraps inbound SSL records.
[ "Unwraps", "inbound", "SSL", "records", "." ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/handler/src/main/java/io/netty/handler/ssl/SslHandler.java#L1321-L1485
train
Unwrap the given packet.
[ 30522, 2797, 20014, 4895, 13088, 9331, 1006, 3149, 11774, 3917, 8663, 18209, 14931, 2595, 1010, 24880, 8569, 2546, 14771, 1010, 20014, 16396, 1010, 20014, 3091, 1007, 11618, 7020, 2571, 2595, 24422, 1063, 2345, 20014, 2434, 7770, 13512, 2232,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
netty/netty
buffer/src/main/java/io/netty/buffer/CompositeByteBuf.java
CompositeByteBuf.consolidate
public CompositeByteBuf consolidate(int cIndex, int numComponents) { checkComponentIndex(cIndex, numComponents); if (numComponents <= 1) { return this; } final int endCIndex = cIndex + numComponents; final Component last = components[endCIndex - 1]; final int capacity = last.endOffset - components[cIndex].offset; final ByteBuf consolidated = allocBuffer(capacity); for (int i = cIndex; i < endCIndex; i ++) { components[i].transferTo(consolidated); } lastAccessed = null; removeCompRange(cIndex + 1, endCIndex); components[cIndex] = new Component(consolidated, 0, 0, capacity, consolidated); updateComponentOffsets(cIndex); return this; }
java
public CompositeByteBuf consolidate(int cIndex, int numComponents) { checkComponentIndex(cIndex, numComponents); if (numComponents <= 1) { return this; } final int endCIndex = cIndex + numComponents; final Component last = components[endCIndex - 1]; final int capacity = last.endOffset - components[cIndex].offset; final ByteBuf consolidated = allocBuffer(capacity); for (int i = cIndex; i < endCIndex; i ++) { components[i].transferTo(consolidated); } lastAccessed = null; removeCompRange(cIndex + 1, endCIndex); components[cIndex] = new Component(consolidated, 0, 0, capacity, consolidated); updateComponentOffsets(cIndex); return this; }
[ "public", "CompositeByteBuf", "consolidate", "(", "int", "cIndex", ",", "int", "numComponents", ")", "{", "checkComponentIndex", "(", "cIndex", ",", "numComponents", ")", ";", "if", "(", "numComponents", "<=", "1", ")", "{", "return", "this", ";", "}", "fina...
Consolidate the composed {@link ByteBuf}s @param cIndex the index on which to start to compose @param numComponents the number of components to compose
[ "Consolidate", "the", "composed", "{", "@link", "ByteBuf", "}", "s" ]
ba06eafa1c1824bd154f1a380019e7ea2edf3c4c
https://github.com/netty/netty/blob/ba06eafa1c1824bd154f1a380019e7ea2edf3c4c/buffer/src/main/java/io/netty/buffer/CompositeByteBuf.java#L1709-L1728
train
Consolidates the specified number of components from this CompositeByteBuf.
[ 30522, 2270, 12490, 3762, 2618, 8569, 2546, 24939, 1006, 20014, 25022, 13629, 2595, 1010, 20014, 16371, 12458, 25377, 5643, 7666, 1007, 1063, 4638, 30524, 12458, 25377, 5643, 7666, 1007, 1025, 2065, 1006, 16371, 12458, 25377, 5643, 7666, 1026...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...