language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/annotation/ConfigurationClassEnhancer.java | {
"start": 5825,
"end": 6338
} | class ____ is: %s",
configClass.getName(), enhancedClass.getName()));
}
return enhancedClass;
}
catch (CodeGenerationException ex) {
throw new BeanDefinitionStoreException("Could not enhance configuration class [" + configClass.getName() +
"]. Consider declaring @Configuration(proxyBeanMethods=false) without inter-bean references " +
"between @Bean methods on the configuration class, avoiding the need for CGLIB enhancement.", ex);
}
}
/**
* Checks whether the given config | name |
java | spring-projects__spring-security | core/src/main/java/org/springframework/security/authentication/dao/AbstractUserDetailsAuthenticationProvider.java | {
"start": 15531,
"end": 16729
} | class ____ implements UserDetailsChecker {
@Override
public void check(UserDetails user) {
if (!user.isAccountNonLocked()) {
AbstractUserDetailsAuthenticationProvider.this.logger
.debug("Failed to authenticate since user account is locked");
throw new LockedException(AbstractUserDetailsAuthenticationProvider.this.messages
.getMessage("AbstractUserDetailsAuthenticationProvider.locked", "User account is locked"));
}
if (!user.isEnabled()) {
AbstractUserDetailsAuthenticationProvider.this.logger
.debug("Failed to authenticate since user account is disabled");
throw new DisabledException(AbstractUserDetailsAuthenticationProvider.this.messages
.getMessage("AbstractUserDetailsAuthenticationProvider.disabled", "User is disabled"));
}
if (!user.isAccountNonExpired()) {
AbstractUserDetailsAuthenticationProvider.this.logger
.debug("Failed to authenticate since user account has expired");
throw new AccountExpiredException(AbstractUserDetailsAuthenticationProvider.this.messages
.getMessage("AbstractUserDetailsAuthenticationProvider.expired", "User account has expired"));
}
}
}
private | DefaultPreAuthenticationChecks |
java | apache__avro | lang/java/avro/src/main/java/org/apache/avro/Schema.java | {
"start": 37268,
"end": 39701
} | class ____ extends Schema {
private final List<Schema> types;
private final Map<String, Integer> indexByName;
public UnionSchema(LockableArrayList<Schema> types) {
super(Type.UNION);
this.indexByName = new HashMap<>(Math.multiplyExact(2, types.size()));
this.types = types.lock();
int index = 0;
for (Schema type : types) {
if (type.getType() == Type.UNION) {
throw new AvroRuntimeException("Nested union: " + this);
}
String name = type.getFullName();
if (name == null) {
throw new AvroRuntimeException("Nameless in union:" + this);
}
if (indexByName.put(name, index++) != null) {
throw new AvroRuntimeException("Duplicate in union:" + name);
}
}
}
/**
* Checks if a JSON value matches the schema.
*
* @param jsonValue a value to check against the schema
* @return true if the value is valid according to this schema
*/
public boolean isValidDefault(JsonNode jsonValue) {
return this.types.stream().anyMatch((Schema s) -> s.isValidDefault(jsonValue));
}
@Override
public List<Schema> getTypes() {
return types;
}
@Override
public Integer getIndexNamed(String name) {
return indexByName.get(name);
}
@Override
public boolean equals(Object o) {
if (o == this)
return true;
if (!(o instanceof UnionSchema))
return false;
UnionSchema that = (UnionSchema) o;
return equalCachedHash(that) && types.equals(that.types) && propsEqual(that);
}
@Override
int computeHash() {
int hash = super.computeHash();
for (Schema type : types)
hash += type.computeHash();
return hash;
}
@Override
public void addProp(String name, String value) {
throw new AvroRuntimeException("Can't set properties on a union: " + this);
}
@Override
@Deprecated
void toJson(Set<String> knownNames, String currentNamespace, JsonGenerator gen) throws IOException {
gen.writeStartArray();
for (Schema type : types)
type.toJson(knownNames, currentNamespace, gen);
gen.writeEndArray();
}
@Override
public String getName() {
return super.getName()
+ this.getTypes().stream().map(Schema::getName).collect(Collectors.joining(", ", "[", "]"));
}
}
private static | UnionSchema |
java | micronaut-projects__micronaut-core | http-client/src/main/java/io/micronaut/http/client/netty/DefaultHttpClient.java | {
"start": 105792,
"end": 105906
} | class ____ {
byte[] data;
String id;
String name;
Duration retry;
}
}
| CurrentEvent |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/IdentityBinaryExpressionTest.java | {
"start": 3311,
"end": 4619
} | class ____ {
void f(int a) {
// BUG: Diagnostic contains: equivalent to `1`
int r = a / a;
// BUG: Diagnostic contains: equivalent to `0`
r = a - a;
// BUG: Diagnostic contains: equivalent to `0`
r = a % a;
// BUG: Diagnostic contains: equivalent to `true`
boolean b = a >= a;
// BUG: Diagnostic contains: equivalent to `true`
b = a == a;
// BUG: Diagnostic contains: equivalent to `true`
b = a <= a;
// BUG: Diagnostic contains: equivalent to `false`
b = a > a;
// BUG: Diagnostic contains: equivalent to `false`
b = a < a;
// BUG: Diagnostic contains: equivalent to `false`
b = a != a;
// BUG: Diagnostic contains: equivalent to `false`
b = b ^ b;
}
}
""")
.doTest();
}
@Test
public void negativeAssert() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertFalse;
| Test |
java | junit-team__junit5 | junit-jupiter-params/src/main/java/org/junit/jupiter/params/support/ParameterDeclarations.java | {
"start": 2487,
"end": 2784
} | class ____, if field injection is used.
*/
AnnotatedElement getSourceElement();
/**
* {@return a human-readable description of the source element}
*
* <p>This may, for example, be used in error messages.
*
* @see #getSourceElement()
*/
String getSourceElementDescription();
}
| itself |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/various/OneOneGeneratedValueTest.java | {
"start": 1820,
"end": 2235
} | class ____ {
private Long aId;
private EntityA a;
private Long b;
@Id
public Long getAId() {
return aId;
}
public void setAId(Long aId) {
this.aId = aId;
}
@OneToOne
@PrimaryKeyJoinColumn
public EntityA getA() {
return a;
}
public void setA(EntityA a) {
this.a = a;
}
public Long getB() {
return b;
}
public void setB(Long b) {
this.b = b;
}
}
}
| EntityB |
java | quarkusio__quarkus | extensions/micrometer/deployment/src/test/java/io/quarkus/micrometer/deployment/pathparams/HttpPathParamLimitWithProgrammaticRoutes500Test.java | {
"start": 502,
"end": 2769
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withConfigurationResource("test-logging.properties")
.overrideConfigKey("quarkus.micrometer.binder-enabled-default", "false")
.overrideConfigKey("quarkus.micrometer.binder.http-client.enabled", "true")
.overrideConfigKey("quarkus.micrometer.binder.http-server.enabled", "true")
.overrideConfigKey("quarkus.micrometer.binder.vertx.enabled", "true")
.overrideConfigKey("quarkus.redis.devservices.enabled", "false")
.withApplicationRoot((jar) -> jar
.addClasses(Util.class,
Resource.class));
@Inject
MeterRegistry registry;
public static final int COUNT = 101;
@Test
void testWithProgrammaticRoutes500() throws InterruptedException {
registry.clear();
// Verify OK response
for (int i = 0; i < COUNT; i++) {
RestAssured.get("/programmatic").then().statusCode(200);
RestAssured.get("/programmatic/foo-" + i).then().statusCode(200);
}
// Verify metrics
Util.waitForMeters(registry.find("http.server.requests").timers(), COUNT);
Assertions.assertEquals(COUNT, registry.find("http.server.requests")
.tag("uri", "/programmatic").timers().iterator().next().count());
Assertions.assertEquals(COUNT, registry.find("http.server.requests")
.tag("uri", "/programmatic/{message}").timers().iterator().next().count());
// Verify 405 responses
for (int i = 0; i < COUNT; i++) {
RestAssured.get("/bad").then().statusCode(500);
RestAssured.get("/bad/foo-" + i).then().statusCode(500);
}
Util.waitForMeters(registry.find("http.server.requests").timers(), COUNT * 2);
Assertions.assertEquals(COUNT, registry.find("http.server.requests")
.tag("uri", "/bad").tag("method", "GET").timers().iterator().next().count());
Assertions.assertEquals(4, registry.find("http.server.requests")
.tag("method", "GET").timers().size());
}
@Singleton
public static | HttpPathParamLimitWithProgrammaticRoutes500Test |
java | google__gson | gson/src/main/java/com/google/gson/internal/sql/SqlDateTypeAdapter.java | {
"start": 1360,
"end": 1457
} | class ____ to synchronize its read and write methods.
*/
@SuppressWarnings("JavaUtilDate")
final | has |
java | apache__camel | components/camel-xmlsecurity/src/main/java/org/apache/camel/component/xmlsecurity/api/XmlSignatureProperties.java | {
"start": 1610,
"end": 3750
} | interface ____ {
/** Input message for reading header data */
Message getMessage();
/**
* The message body as DOM node. If the message body is plain text then the node will be a text node. If the
* message body is a XML document, then the node is the root element.
*/
Node getMessageBodyNode();
/**
* Returns the parent node of the signature element in the case of enveloped or detached XML signature, or the
* empty result document in the case of enveloping XML signature.
*
* @return parent node, cannot be <code>null</code>
*/
Node getParent();
/** Key info. */
KeyInfo getKeyInfo();
/**
* XML signature factory which can be used to create Reference and XMLObject instances.
*
* @return factory
*/
XMLSignatureFactory getSignatureFactory();
/**
* Signature algorithm. Example: "http://www.w3.org/2000/09/xmldsig#dsa-sha1".
*/
String getSignatureAlgorithm();
/**
* Digest algorithm which is used for the digest calculation of the message body.
*/
String getContentDigestAlgorithm();
/**
* Signature Id. Can be <code>null</code>, then no signature Id attribute is generated.
*/
String getSignatureId();
/**
* Reference URI which points to the content of the original XML document to be signed. For enveloped signature
* it is typically the empty string. For detached signature, it is an ID attribute value preceded by '#'. For
* enveloping signature, it is <code>null</code>.
*/
String getContentReferenceUri();
/**
* Retruns the signature type.
*
* @return signature type
*/
SignatureType getSignatureType();
/**
* Returns the prefix for the XML Signature namespace ("http://www.w3.org/2000/09/xmldsig#"). Can be null or
* empty.
*/
String getPrefixForXmlSignatureNamespace();
}
| Input |
java | processing__processing4 | app/src/processing/app/syntax/JEditTextArea.java | {
"start": 37119,
"end": 61428
} | enum ____ {
Word,
Whitespace,
Other
}
private CharacterKinds CharacterKind( char ch, String noWordSep )
{
if ( Character.isLetterOrDigit(ch) || ch=='_' || noWordSep.indexOf(ch) != -1 )
return CharacterKinds.Word;
else if ( Character.isWhitespace(ch) )
return CharacterKinds.Whitespace;
else
return CharacterKinds.Other;
}
/*
static float getFontCharWidth(char c, FontMetrics fm) {
return getFontCharsWidth(new char[] { c }, 0, 1, fm);
}
static final char[] spaceChar = new char[] { ' ' };
static float getFontCharsWidth(char[] data, int offset, int len,
FontMetrics fm) {
if (len == 0) {
return 0;
}
// doesn't seem to do anything fractional
float wi = fm.charsWidth(data, offset, len);
if (wi != ((int) wi)) {
System.out.println("extra: " + wi);
}
int spaceWidth = fm.charsWidth(spaceChar, 0, 1);
//return fm.charsWidth(data, offset, len);
return len * spaceWidth;
}
*/
/**
* Hacked up version of the function with the same name from
* javax.swing.text.Utilities.
*
* In spite of being a fixed width font, Source Code Pro (the default
* font starting in Processing 3) returns slightly different widths
* depending on the number of characters shown. Using the getBounds()
* method on text won't even give us these metrics for individual
* characters, which returns a float but never with any fractional.
*
* This function forces the width of each character to stay the same,
* just as we're doing by drawing individual characters in the
* TextAreaPainter class.
*
* <a href="https://github.com/processing/processing4/issues/447">#447</a>,
* <a href="https://github.com/processing/processing4/issues/226">#226</a>,
* <a href="https://github.com/processing/processing4/issues/194">#194</a>,
* and <a href="https://github.com/sampottinger/processing/issues/103">Sam's 103</a>
*/
static int getTabbedTextWidth(Segment s,
FontMetrics metrics, int x,
TabExpander e, int startOffset) {
int nextX = x;
char[] txt = s.array;
int txtOffset = s.offset;
int n = s.offset + s.count;
for (int i = txtOffset; i < n; i++) {
if (txt[i] == '\t' && e != null) {
nextX = (int) e.nextTabStop(nextX, startOffset + i - txtOffset);
continue;
}
nextX += metrics.charWidth(txt[i]);
}
return nextX - x;
}
/*
static int getTabbedTextWidth(Segment s,
FontMetrics metrics, int x,
TabExpander e, int startOffset) {
int nextX = x;
char[] txt = s.array;
int txtOffset = s.offset;
int n = s.offset + s.count;
int charCount = 0;
// int spaceAddon = 0;
int spaceWidth = metrics.charWidth(' ');
for (int i = txtOffset; i < n; i++) {
if (txt[i] == '\t') {
//nextX += metrics.charsWidth(txt, i-charCount, charCount);
nextX += charCount * spaceWidth;
charCount = 0;
if (txt[i] == '\t') {
if (e != null) {
nextX = (int) e.nextTabStop(nextX, startOffset + i - txtOffset);
} else {
// if no tab expander, just return the size of a space
//nextX += getFontCharWidth(' ', metrics);
nextX += spaceWidth;
}
} else if (txt[i] == ' ') {
//float spaceWidth = getFontCharWidth(' ', metrics);
//nextX += spaceWidth + spaceAddon;
nextX += spaceWidth;
}
} else if (txt[i] == '\n') {
// Ignore newlines, they take up space, and shouldn't be counted.
//nextX += getFontCharsWidth(txt, i - charCount, charCount, metrics);
nextX += charCount * spaceWidth;
// But this doesn't make any sense: why are we adding horizontally,
// shouldn't nextX be *reset* here? Guessing that segments never
// include a new line, so we never run into this. [fry 220129]
charCount = 0;
} else {
charCount++;
}
}
//nextX += getFontCharsWidth(txt, n - charCount, charCount, metrics);
nextX += charCount * spaceWidth;
// int amt = (int) (nextX - x);
// float spc = getFontCharWidth(' ', metrics);
// System.out.println(amt + " % " + spc + " = " + (amt % spc));
// return (int) (nextX - x); // nextX was a float, this was returning a float [fry 220128]
return nextX - x;
}
*/
protected void setNewSelectionWord( int line, int offset )
{
if (getLineLength(line) == 0) {
newSelectionStart = getLineStartOffset(line);
newSelectionEnd = newSelectionStart;
return;
}
String noWordSep = (String)document.getProperty("noWordSep");
if(noWordSep == null)
noWordSep = "";
String lineText = getLineText(line);
int wordStart = 0;
int wordEnd = lineText.length();
int charPos = PApplet.constrain(offset - 1, 0, lineText.length() - 1);
char ch = lineText.charAt(charPos);
CharacterKinds thisWord = CharacterKind(ch,noWordSep);
for(int i = offset - 1; i >= 0; i--) {
ch = lineText.charAt(i);
if(CharacterKind(ch,noWordSep) != thisWord) {
wordStart = i + 1;
break;
}
}
for(int i = offset; i < lineText.length(); i++) {
ch = lineText.charAt(i);
if(CharacterKind(ch,noWordSep) != thisWord) {
wordEnd = i;
break;
}
}
int lineStart = getLineStartOffset(line);
newSelectionStart = lineStart + wordStart;
newSelectionEnd = lineStart + wordEnd;
}
/**
* Returns the selected text, or null if no selection is active.
*/
public final String getSelectedText()
{
if (selectionStart == selectionEnd) {
return null;
} else {
return getText(selectionStart, selectionEnd - selectionStart);
}
}
/**
* Replaces the selection with the specified text.
* @param selectedText The replacement text for the selection
*/
public void setSelectedText(String selectedText) {
setSelectedText(selectedText, false);
}
/**
* Replaces the selection with the specified text.
* @param selectedText The replacement text for the selection
* @param recordCompoundEdit Whether the replacement should be
* recorded as a compound edit
*/
public void setSelectedText(String selectedText, boolean recordCompoundEdit) {
if (!editable) {
throw new InternalError("Text component read only");
}
if (recordCompoundEdit) {
document.beginCompoundEdit();
}
try {
document.remove(selectionStart, selectionEnd - selectionStart);
if (selectedText != null) {
document.insertString(selectionStart, selectedText,null);
}
} catch (BadLocationException bl) {
bl.printStackTrace();
throw new InternalError("Cannot replace selection");
} finally {
// No matter what happens... stops us from leaving document in a bad state
// (provided this has to be recorded as a compound edit, of course...)
if (recordCompoundEdit) {
document.endCompoundEdit();
}
}
setCaretPosition(selectionEnd);
}
/**
* Returns true if this text area is editable, false otherwise.
*/
public final boolean isEditable() {
return editable;
}
/**
* Sets if this component is editable.
* @param editable True if this text area should be editable,
* false otherwise
*/
public final void setEditable(boolean editable) {
this.editable = editable;
}
/**
* Returns the right click popup menu.
*/
public final JPopupMenu getRightClickPopup() {
return popup;
}
/**
* Sets the right click popup menu.
* @param popup The popup
*/
public final void setRightClickPopup(JPopupMenu popup) {
this.popup = popup;
}
/**
* Returns the 'magic' caret position. This can be used to preserve
* the column position when moving up and down lines.
*/
public final int getMagicCaretPosition() {
return magicCaret;
}
/**
* Sets the 'magic' caret position. This can be used to preserve
* the column position when moving up and down lines.
* @param magicCaret The magic caret position
*/
public final void setMagicCaretPosition(int magicCaret) {
this.magicCaret = magicCaret;
}
/**
* Similar to <code>setSelectedText()</code>, but overstrikes the
* appropriate number of characters if overwrite mode is enabled.
* @param str The string
* @see #setSelectedText(String)
* @see #isOverwriteEnabled()
*/
public void overwriteSetSelectedText(String str)
{
// Don't overstrike if there is a selection
if(!overwrite || selectionStart != selectionEnd)
{
// record the whole operation as a compound edit if
// selected text is being replaced
boolean isSelectAndReplaceOp = (selectionStart != selectionEnd);
setSelectedText(str, isSelectAndReplaceOp);
return;
}
// Don't overstrike if we're on the end of
// the line
int caret = getCaretPosition();
int caretLineEnd = getLineStopOffset(getCaretLine());
if(caretLineEnd - caret <= str.length())
{
setSelectedText(str, false);
return;
}
try
{
document.remove(caret,str.length());
document.insertString(caret,str,null);
}
catch(BadLocationException bl)
{
bl.printStackTrace();
}
}
/**
* Returns true if overwrite mode is enabled, false otherwise.
*/
public final boolean isOverwriteEnabled()
{
return overwrite;
}
/**
* Sets if overwrite mode should be enabled.
* @param overwrite True if overwrite mode should be enabled,
* false otherwise.
*/
public final void setOverwriteEnabled(boolean overwrite)
{
this.overwrite = overwrite;
painter.invalidateSelectedLines();
}
/**
* Returns the position of the highlighted bracket (the bracket
* matching the one before the caret)
*/
public final int getBracketPosition()
{
return bracketPosition;
}
/**
* Returns the line of the highlighted bracket (the bracket
* matching the one before the caret)
*/
public final int getBracketLine()
{
return bracketLine;
}
/**
* Adds a caret change listener to this text area.
* @param listener The listener
*/
public final void addCaretListener(CaretListener listener)
{
eventListenerList.add(CaretListener.class,listener);
}
/**
* Removes a caret change listener from this text area.
* @param listener The listener
*/
public final void removeCaretListener(CaretListener listener)
{
eventListenerList.remove(CaretListener.class,listener);
}
/**
* Deletes the selected text from the text area and places it
* into the clipboard.
* If no selection is made, the whole line with caret will be selectd.
*/
public void cut() {
if (editable) {
copy();
setSelectedText("");
}
}
/**
* Places the selected text into the clipboard.
* If no selection is made, the whole line with caret will be selectd.
*/
public void copy() {
if (selectionStart == selectionEnd) {
selectLine(getCaretLine());
}
Clipboard clipboard = getToolkit().getSystemClipboard();
String selection = getSelectedText();
if (selection != null) {
int repeatCount = inputHandler.getRepeatCount();
StringBuilder sb = new StringBuilder();
for(int i = 0; i < repeatCount; i++) {
sb.append(selection);
}
clipboard.setContents(new StringSelection(sb.toString()), null);
}
}
/**
* Copy the current selection as HTML, formerly "Format for Discourse".
* <p/>
* Original code by <A HREF="http://usuarios.iponet.es/imoreta">owd</A>.
* <p/>
* Revised and updated for revision 0108 by Ben Fry (10 March 2006).
* <p/>
* Updated for 0122 to simply copy the code directly to the clipboard,
* rather than opening a new window.
* <p/>
* Updated for 0144 to only format the selected lines.
* <p/>
* Updated for 0185 to incorporate the HTML changes from the Arduino project,
* and set the formatter to always use HTML (disabling, but not removing the
* YaBB version of the code) and also fixing it for the Tools API.
* <p/>
* Updated for 0190 to simply be part of JEditTextArea, removed YaBB code.
* Simplest and most sensible to have it live here, since it's no longer
* specific to any language or version of the PDE.
*/
public void copyAsHTML() {
HtmlSelection formatted = new HtmlSelection("<html><body><pre>\n"
+ getTextAsHtml(null) + "\n</pre></body></html>");
Clipboard clipboard = processing.app.ui.Toolkit.getSystemClipboard();
clipboard.setContents(formatted, (clipboard1, contents) -> {
// I don't care about ownership
});
}
/**
* Guts of copyAsHTML, minus the pre, body, and html blocks surrounding.
* @param doc If null, read only the selection if any, and use the active
* document. Otherwise, the whole of doc is used.
*/
public String getTextAsHtml(SyntaxDocument doc) {
StringBuilder cf = new StringBuilder();
int selStart = getSelectionStart();
int selStop = getSelectionStop();
int startLine = getSelectionStartLine();
int stopLine = getSelectionStopLine();
if (doc != null) {
startLine = 0;
stopLine = doc.getDefaultRootElement().getElementCount() - 1;
}
// If no selection, convert all the lines
else if (selStart == selStop) {
startLine = 0;
stopLine = getLineCount() - 1;
} else {
// Make sure the selection doesn't end at the beginning of the last line
if (getLineStartOffset(stopLine) == selStop) {
stopLine--;
}
}
if (doc == null) {
doc = getDocument();
}
// Read the code line by line
for (int i = startLine; i <= stopLine; i++) {
emitAsHTML(cf, i, doc);
}
return cf.toString();
}
private void emitAsHTML(StringBuilder cf, int line, SyntaxDocument doc) {
// Almost static; only needs the painter for a color scheme.
Segment segment = new Segment();
try {
Element element = doc.getDefaultRootElement().getElement(line);
int start = element.getStartOffset();
int stop = element.getEndOffset();
doc.getText(start, stop - start - 1, segment);
} catch (BadLocationException e) { return; }
char[] segmentArray = segment.array;
int limit = segment.getEndIndex();
int segmentOffset = segment.offset;
int segmentCount = segment.count;
TokenMarkerState tokenMarker = doc.getTokenMarker();
// If syntax coloring is disabled, do simple translation
if (tokenMarker == null) {
for (int j = 0; j < segmentCount; j++) {
char c = segmentArray[j + segmentOffset];
appendAsHTML(cf, c);
}
} else {
// If syntax coloring is enabled, we have to do this
// because tokens can vary in width
Token tokens = tokenMarker.markTokens(segment, line);
int offset = 0;
SyntaxStyle[] styles = painter.getStyles();
for (;;) {
byte id = tokens.id;
if (id == Token.END) {
if (segmentOffset + offset < limit) {
appendAsHTML(cf, segmentArray[segmentOffset + offset]);
} else {
cf.append('\n');
}
return; // cf.toString();
}
if (id != Token.NULL) {
cf.append("<span style=\"color: #");
cf.append(PApplet.hex(styles[id].getColor().getRGB() & 0xFFFFFF, 6));
cf.append(";\">");
if (styles[id].isBold())
cf.append("<b>");
}
int length = tokens.length;
for (int j = 0; j < length; j++) {
char c = segmentArray[segmentOffset + offset + j];
if (offset == 0 && c == ' ') {
// Force spaces at the beginning of the line
cf.append(" ");
} else {
appendAsHTML(cf, c);
}
// Place close tags [/]
if (j == (length - 1) && id != Token.NULL && styles[id].isBold())
cf.append("</b>");
if (j == (length - 1) && id != Token.NULL)
cf.append("</span>");
}
offset += length;
tokens = tokens.next;
}
}
}
/**
* Handle encoding HTML entities for lt, gt, and anything non-ASCII.
*/
private void appendAsHTML(StringBuilder buffer, char c) {
if (c == '<') {
buffer.append("<");
} else if (c == '>') {
buffer.append(">");
} else if (c == '&') {
buffer.append("&");
} else if (c == '\'') {
buffer.append("'");
} else if (c == '"') {
buffer.append(""");
} else if (c > 127) {
buffer.append("&#"); // use unicode entity
buffer.append((int) c); // use unicode entity
buffer.append(';'); // use unicode entity
} else {
buffer.append(c); // normal character
}
}
/**
* Inserts the clipboard contents into the text.
*/
public void paste() {
// System.out.println("focus owner is: " + isFocusOwner());
if (editable) {
Clipboard clipboard = getToolkit().getSystemClipboard();
try {
String selection =
((String) clipboard.getContents(this).getTransferData(DataFlavor.stringFlavor));
if (selection.contains("\r\n")) {
selection = selection.replaceAll("\r\n", "\n");
} else if (selection.contains("\r")) {
// The Mac OS MRJ doesn't convert \r to \n, so do it here
selection = selection.replace('\r','\n');
}
// Remove tabs and replace with spaces
// https://github.com/processing/processing/issues/108
if (selection.contains("\t")) {
int tabSize = Preferences.getInteger("editor.tabs.size");
char[] c = new char[tabSize];
Arrays.fill(c, ' ');
String tabString = new String(c);
selection = selection.replaceAll("\t", tabString);
}
// Replace unicode x00A0 (non-breaking space) with just a plain space.
// Seen often on Mac OS X when pasting from Safari. [fry 030929]
selection = selection.replace('\u00A0', ' ');
// Remove ASCII NUL characters. Reported when pasting from
// Acrobat Reader and PDF documents. [fry 130719]
// https://github.com/processing/processing/issues/1973
if (selection.indexOf('\0') != -1) {
//System.out.println("found NUL charaacters");
//int before = selection.length();
selection = selection.replaceAll("\0", "");
//int after = selection.length();
//System.out.println(before + " " + after);
}
int repeatCount = inputHandler.getRepeatCount();
selection = selection.repeat(Math.max(0, repeatCount));
setSelectedText(selection);
} catch (Exception e) {
getToolkit().beep();
System.err.println("Clipboard does not contain a string");
DataFlavor[] flavors = clipboard.getAvailableDataFlavors();
for (DataFlavor f : flavors) {
try {
Object o = clipboard.getContents(this).getTransferData(f);
System.out.println(f + " = " + o);
} catch (Exception ex) {
ex.printStackTrace();
}
}
}
}
}
/**
* Called by the AWT when this component is removed from it's parent.
* This stops clears the currently focused component.
*/
public void removeNotify() {
super.removeNotify();
// if(focusedComponent == this)
// focusedComponent = null;
if (!DISABLE_CARET) {
caretTimer.stop();
}
}
// /**
// * The component that tracks the current line number.
// */
// public EditorLineStatus editorLineStatus;
/*
public void processKeyEvent(KeyEvent evt) {
// this had to be added in Processing 007X, because the menu key
// events weren't making it up to the frame.
super.processKeyEvent(evt);
//System.out.println("jedittextarea: " + evt);
//System.out.println();
if (inputHandler == null) return;
switch(evt.getID()) {
case KeyEvent.KEY_TYPED:
if ((editorListener == null) || !editorListener.keyTyped(evt)) {
inputHandler.keyTyped(evt);
}
break;
case KeyEvent.KEY_PRESSED:
if ((editorListener == null) || !editorListener.keyPressed(evt)) {
inputHandler.keyPressed(evt);
}
break;
case KeyEvent.KEY_RELEASED:
inputHandler.keyReleased(evt);
break;
}
}
*/
public void processKeyEvent(KeyEvent event) {
// this had to be added in Processing 007X, because the menu key
// events weren't making it up to the frame.
super.processKeyEvent(event);
if (inputHandler != null) {
switch (event.getID()) {
case KeyEvent.KEY_TYPED:
inputHandler.keyTyped(event);
break;
case KeyEvent.KEY_PRESSED:
inputHandler.keyPressed(event);
break;
case KeyEvent.KEY_RELEASED:
inputHandler.keyReleased(event);
break;
}
}
}
// protected members
protected static String CENTER = "center";
protected static String RIGHT = "right";
protected static String BOTTOM = "bottom";
protected Timer caretTimer;
static private final boolean DISABLE_CARET = false;
protected TextAreaPainter painter;
protected JPopupMenu popup;
protected EventListenerList eventListenerList;
protected MutableCaretEvent caretEvent;
protected boolean caretBlinks;
protected boolean caretVisible;
protected boolean blink;
protected boolean editable = true;
protected int firstLine;
protected int visibleLines;
protected int electricScroll;
protected int horizontalOffset;
protected JScrollBar vertical;
protected JScrollBar horizontal;
protected boolean scrollBarsInitialized;
protected InputHandler inputHandler;
protected SyntaxDocument document;
protected DocumentHandler documentHandler;
protected Segment lineSegment;
protected int selectionStart;
protected int selectionStartLine;
protected int selectionEnd;
protected int selectionEndLine;
protected boolean biasLeft;
protected int newSelectionStart; // hack to get around lack of multiple returns in Java
protected int newSelectionEnd;
protected boolean selectWord;
protected boolean selectLine;
protected int selectionAncorStart;
protected int selectionAncorEnd;
protected int bracketPosition;
protected int bracketLine;
protected int magicCaret;
protected boolean overwrite;
protected void fireCaretEvent()
{
Object[] listeners = eventListenerList.getListenerList();
for(int i = listeners.length - 2; i >= 0; i--)
{
if(listeners[i] == CaretListener.class)
{
((CaretListener)listeners[i+1]).caretUpdate(caretEvent);
}
}
}
protected void updateBracketHighlight(int newCaretPosition)
{
if(newCaretPosition == 0)
{
bracketPosition = bracketLine = -1;
return;
}
try
{
int offset = bracketHelper.findMatchingBracket(document.getText(0,
document.getLength()), newCaretPosition - 1);
if(offset != -1)
{
bracketLine = getLineOfOffset(offset);
bracketPosition = offset - getLineStartOffset(bracketLine);
return;
}
}
catch(BadLocationException bl)
{
bl.printStackTrace();
}
bracketLine = bracketPosition = -1;
}
protected void documentChanged(DocumentEvent evt)
{
bracketHelper.invalidate();
DocumentEvent.ElementChange ch =
evt.getChange(document.getDefaultRootElement());
int count;
if(ch == null)
count = 0;
else
count = ch.getChildrenAdded().length -
ch.getChildrenRemoved().length;
int line = getLineOfOffset(evt.getOffset());
if(count == 0)
{
painter.invalidateLine(line);
}
// do magic stuff
else if(line < firstLine)
{
setFirstLine(line);
}
// end of magic stuff
else
{
painter.invalidateLineRange(line,firstLine + visibleLines);
updateScrollBars();
}
}
/*
| CharacterKinds |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/caching/mocked/ReadWriteCacheTest.java | {
"start": 9024,
"end": 9420
} | class ____ implements Consumer<SessionFactoryBuilder> {
@Override
public void accept(SessionFactoryBuilder sessionFactoryBuilder) {
TransactionInterceptor transactionInterceptor = new TransactionInterceptor();
sessionFactoryBuilder.addSessionFactoryObservers( transactionInterceptor );
sessionFactoryBuilder.applyInterceptor( transactionInterceptor );
}
}
private static | Configurer |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/spr12233/Spr12233Tests.java | {
"start": 2299,
"end": 2402
} | class ____ {
@Value("${idontexist}")
private String property;
}
}
| ConditionWithPropertyValueInjection |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/protocol/impl/pb/AddMountTableEntriesRequestPBImpl.java | {
"start": 1800,
"end": 3424
} | class ____
extends AddMountTableEntriesRequest implements PBRecord {
private final FederationProtocolPBTranslator<AddMountTableEntriesRequestProto,
AddMountTableEntriesRequestProto.Builder,
AddMountTableEntriesRequestProtoOrBuilder> translator =
new FederationProtocolPBTranslator<>(AddMountTableEntriesRequestProto.class);
public AddMountTableEntriesRequestPBImpl() {
}
public AddMountTableEntriesRequestPBImpl(AddMountTableEntriesRequestProto proto) {
this.translator.setProto(proto);
}
@Override
public AddMountTableEntriesRequestProto getProto() {
return this.translator.build();
}
@Override
public void setProto(Message proto) {
this.translator.setProto(proto);
}
@Override
public void readInstance(String base64String) throws IOException {
this.translator.readInstance(base64String);
}
@Override
public List<MountTable> getEntries() {
List<MountTableRecordProto> entryProto = this.translator.getProtoOrBuilder().getEntryList();
if (entryProto == null) {
return null;
}
List<MountTable> mountTables = new ArrayList<>();
entryProto.forEach(e -> mountTables.add(new MountTablePBImpl(e)));
return mountTables;
}
@Override
public void setEntries(List<MountTable> mountTables) {
for (MountTable mountTable : mountTables) {
if (mountTable instanceof MountTablePBImpl) {
MountTablePBImpl mountPB = (MountTablePBImpl) mountTable;
MountTableRecordProto mountProto = mountPB.getProto();
translator.getBuilder().addEntry(mountProto);
}
}
}
} | AddMountTableEntriesRequestPBImpl |
java | apache__camel | components/camel-servlet/src/test/java/org/apache/camel/component/servlet/rest/RestServletQueryParamTest.java | {
"start": 1233,
"end": 3192
} | class ____ extends ServletCamelRouterTestSupport {
@BindToRegistry("myBinding")
private ServletRestHttpBinding restHttpBinding = new ServletRestHttpBinding();
@Test
public void testQueryTrue() throws Exception {
WebRequest req = new GetMethodWebRequest(contextUrl + "/services/users/");
req.setParameter("auth", "secret");
WebResponse response = query(req, false);
assertEquals(200, response.getResponseCode());
assertEquals("secret;Donald Duck", response.getText());
}
@Test
public void testQueryFalse() throws Exception {
WebRequest req = new GetMethodWebRequest(contextUrl + "/services/users/");
WebResponse response = query(req, false);
assertEquals(400, response.getResponseCode());
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
// configure to use servlet on localhost
restConfiguration().component("servlet").host("localhost").endpointProperty("httpBinding", "#myBinding")
.clientRequestValidation(true);
// use the rest DSL to define the rest services
rest()
.get("/users/")
.param()
.name("auth")
.type(RestParamType.query)
.required(true)
.endParam()
.to("direct:users");
from("direct:users")
.to("mock:input").process(exchange -> {
String auth = exchange.getIn().getHeader("auth", String.class);
exchange.getMessage().setBody(auth + ";Donald Duck");
});
}
};
}
}
| RestServletQueryParamTest |
java | spring-projects__spring-security | core/src/test/java/org/springframework/security/core/annotation/UniqueSecurityAnnotationScannerTests.java | {
"start": 18454,
"end": 18650
} | class ____
implements AnnotationOnInterface, AlsoAnnotationOnInterface {
@Override
public String method() {
return "ok";
}
}
private static | ClassOverridingMultipleInterfaceInheritance |
java | apache__camel | components/camel-chunk/src/test/java/org/apache/camel/component/chunk/ChunkComponentLayersTest.java | {
"start": 1265,
"end": 2421
} | class ____ extends CamelTestSupport {
@EndpointInject("mock:endSimple")
protected MockEndpoint endSimpleMock;
@Produce("direct:startSimple")
protected ProducerTemplate startSimpleProducerTemplate;
/**
* Test using themeLayer parameter without Resource URI header defined
*/
@Test
void testChunkLayer() throws Exception {
// Prepare
Exchange exchange = createExchangeWithBody("The Body");
exchange.getIn().setHeader("name", "Andrew");
endSimpleMock.expectedMessageCount(1);
endSimpleMock.expectedBodiesReceived("<div>\nEarth to Andrew. Come in, Andrew.\n</div>\n");
// Act
startSimpleProducerTemplate.send(exchange);
// Verify
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:startSimple")
.to("chunk:example?themeLayer=example_1")
.to("mock:endSimple");
}
};
}
}
| ChunkComponentLayersTest |
java | grpc__grpc-java | api/src/test/java/io/grpc/ServiceProvidersTest.java | {
"start": 11024,
"end": 11284
} | class ____ extends ServiceProvidersTestAbstractProvider {
@Override
public boolean isAvailable() {
throw new AvailableException();
}
@Override
public int priority() {
return 0;
}
public static final | FailAtAvailableProvider |
java | micronaut-projects__micronaut-core | aop/src/main/java/io/micronaut/aop/Adapter.java | {
"start": 2117,
"end": 2394
} | class ____ delegate to the method then a compilation error should occur.</p>
*
* @author graemerocher
* @since 1.0
*/
@Documented
@Retention(RUNTIME)
@Target({ElementType.ANNOTATION_TYPE, ElementType.METHOD})
@DefaultScope(Singleton.class)
@Executable
@Inherited
public @ | cannot |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/codec/tsdb/es819/ES819TSDBDocValuesProducer.java | {
"start": 38277,
"end": 39947
} | class ____ extends SortedDocValues implements BlockLoader.OptionalColumnAtATimeReader {
final SortedEntry entry;
final TermsEnum termsEnum;
BaseSortedDocValues(SortedEntry entry) throws IOException {
this.entry = entry;
this.termsEnum = termsEnum();
}
@Override
public int getValueCount() {
return Math.toIntExact(entry.termsDictEntry.termsDictSize);
}
@Override
public BytesRef lookupOrd(int ord) throws IOException {
termsEnum.seekExact(ord);
return termsEnum.term();
}
@Override
public int lookupTerm(BytesRef key) throws IOException {
TermsEnum.SeekStatus status = termsEnum.seekCeil(key);
return switch (status) {
case FOUND -> Math.toIntExact(termsEnum.ord());
default -> Math.toIntExact(-1L - termsEnum.ord());
};
}
@Override
public TermsEnum termsEnum() throws IOException {
return new TermsDict(entry.termsDictEntry, data, merging);
}
@Override
public BlockLoader.Block tryRead(
BlockLoader.BlockFactory factory,
BlockLoader.Docs docs,
int offset,
boolean nullsFiltered,
BlockDocValuesReader.ToDouble toDouble,
boolean toInt
) throws IOException {
return null;
}
BlockLoader.Block tryReadAHead(BlockLoader.BlockFactory factory, BlockLoader.Docs docs, int offset) throws IOException {
return null;
}
}
abstract static | BaseSortedDocValues |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/method/annotation/ErrorsMethodArgumentResolver.java | {
"start": 1674,
"end": 2760
} | class ____ implements HandlerMethodArgumentResolver {
@Override
public boolean supportsParameter(MethodParameter parameter) {
Class<?> paramType = parameter.getParameterType();
return Errors.class.isAssignableFrom(paramType);
}
@Override
public @Nullable Object resolveArgument(MethodParameter parameter,
@Nullable ModelAndViewContainer mavContainer, NativeWebRequest webRequest,
@Nullable WebDataBinderFactory binderFactory) throws Exception {
Assert.state(mavContainer != null,
"Errors/BindingResult argument only supported on regular handler methods");
ModelMap model = mavContainer.getModel();
String lastKey = CollectionUtils.lastElement(model.keySet());
if (lastKey != null && lastKey.startsWith(BindingResult.MODEL_KEY_PREFIX)) {
return model.get(lastKey);
}
throw new IllegalStateException(
"An Errors/BindingResult argument is expected to be declared immediately after " +
"the model attribute, the @RequestBody or the @RequestPart arguments " +
"to which they apply: " + parameter.getMethod());
}
}
| ErrorsMethodArgumentResolver |
java | google__error-prone | check_api/src/test/java/com/google/errorprone/util/CommentsTest.java | {
"start": 14582,
"end": 15179
} | class ____ {
abstract Object target(Object param);
void test(Object param) {
// BUG: Diagnostic contains: [[] param [1]]
target(param); // 1
/* 2 */ int i;
}
}
""")
.doTest();
}
@Test
public void
findCommentsForArguments_attachesCommentToSecondArgument_whenFollowedByTreeContainingComma() {
CompilationTestHelper.newInstance(PrintCommentsForArguments.class, getClass())
.addSourceLines(
"Test.java",
"""
abstract | Test |
java | elastic__elasticsearch | libs/native/src/test/java/org/elasticsearch/nativeaccess/SystemCallFilterTests.java | {
"start": 760,
"end": 1488
} | class ____ extends ESTestCase {
/** command to try to run in tests */
static final String EXECUTABLE = Constants.WINDOWS ? "calc" : "ls";
@Override
public void setUp() throws Exception {
super.setUp();
assumeTrue(
"requires system call filter installation",
NativeAccess.instance().getExecSandboxState() != NativeAccess.ExecSandboxState.NONE
);
// otherwise security manager will block the execution, no fun
assumeTrue("cannot test with security manager enabled", System.getSecurityManager() == null);
// otherwise, since we don't have TSYNC support, rules are not applied to the test thread
// (randomizedrunner | SystemCallFilterTests |
java | apache__flink | flink-connectors/flink-connector-files/src/main/java/org/apache/flink/connector/file/table/batch/compact/BatchCompactOperator.java | {
"start": 2689,
"end": 6857
} | class ____<T> extends AbstractStreamOperator<CompactOutput>
implements OneInputStreamOperator<CoordinatorOutput, CompactOutput>, BoundedOneInput {
private static final long serialVersionUID = 1L;
public static final String UNCOMPACTED_PREFIX = "uncompacted-";
public static final String COMPACTED_PREFIX = "compacted-";
public static final String ATTEMPT_PREFIX = "attempt-";
private final SupplierWithException<FileSystem, IOException> fsFactory;
private final CompactReader.Factory<T> readerFactory;
private final CompactWriter.Factory<T> writerFactory;
private transient FileSystem fileSystem;
private transient Map<String, List<Path>> compactedFiles;
public BatchCompactOperator(
SupplierWithException<FileSystem, IOException> fsFactory,
CompactReader.Factory<T> readerFactory,
CompactWriter.Factory<T> writerFactory) {
this.fsFactory = fsFactory;
this.readerFactory = readerFactory;
this.writerFactory = writerFactory;
}
@Override
public void open() throws Exception {
fileSystem = fsFactory.get();
compactedFiles = new HashMap<>();
}
@Override
public void processElement(StreamRecord<CoordinatorOutput> element) throws Exception {
CoordinatorOutput value = element.getValue();
if (value instanceof CompactionUnit) {
CompactionUnit unit = (CompactionUnit) value;
String partition = unit.getPartition();
// these files should be merged to one file
List<Path> paths = unit.getPaths();
Configuration config =
getContainingTask().getEnvironment().getTaskManagerInfo().getConfiguration();
Path path = null;
if (paths.size() == 1) {
// for single file, we only need to move it to corresponding partition instead of
// compacting. we make the downstream commit operator to do the moving
path = paths.get(0);
} else if (paths.size() > 1) {
Path targetPath =
createCompactedFile(
paths, getRuntimeContext().getTaskInfo().getAttemptNumber());
path =
CompactFileUtils.doCompact(
fileSystem,
partition,
paths,
targetPath,
config,
readerFactory,
writerFactory);
}
if (path != null) {
compactedFiles.computeIfAbsent(partition, k -> new ArrayList<>()).add(path);
}
} else {
throw new UnsupportedOperationException("Unsupported input message: " + value);
}
}
@Override
public void endInput() throws Exception {
// emit the compacted files to downstream
output.collect(new StreamRecord<>(new CompactOutput(compactedFiles)));
}
@Override
public void close() throws Exception {
compactedFiles.clear();
}
private static Path createCompactedFile(List<Path> uncompactedFiles, int attemptNumber) {
Path path = convertFromUncompacted(uncompactedFiles.get(0));
// different attempt will have different target paths to avoid different attempts will
// write same path
return new Path(
path.getParent(), convertToCompactWithAttempt(attemptNumber, path.getName()));
}
public static Path convertFromUncompacted(Path path) {
Preconditions.checkArgument(
path.getName().startsWith(UNCOMPACTED_PREFIX),
"This should be uncompacted file: " + path);
return new Path(path.getParent(), path.getName().substring(UNCOMPACTED_PREFIX.length()));
}
private static String convertToCompactWithAttempt(int attemptNumber, String fileName) {
return String.format(
"%s%s%d-%s", COMPACTED_PREFIX, ATTEMPT_PREFIX, attemptNumber, fileName);
}
}
| BatchCompactOperator |
java | spring-projects__spring-boot | module/spring-boot-pulsar/src/main/java/org/springframework/boot/pulsar/autoconfigure/PulsarProperties.java | {
"start": 23037,
"end": 23587
} | class ____ {
/**
* Number of threads to be used for handling connections to brokers.
*/
private @Nullable Integer io;
/**
* Number of threads to be used for message listeners.
*/
private @Nullable Integer listener;
public @Nullable Integer getIo() {
return this.io;
}
public void setIo(@Nullable Integer io) {
this.io = io;
}
public @Nullable Integer getListener() {
return this.listener;
}
public void setListener(@Nullable Integer listener) {
this.listener = listener;
}
}
public static | Threads |
java | apache__maven | api/maven-api-core/src/main/java/org/apache/maven/api/Lifecycle.java | {
"start": 6081,
"end": 6449
} | enum ____ {
BEFORE,
AFTER
}
/**
* Returns the kind of link (BEFORE or AFTER).
*
* @return the link kind
*/
Kind kind();
/**
* Returns the pointer to the target phase.
*
* @return the phase pointer
*/
Pointer pointer();
}
| Kind |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/datastream/SingleOutputStreamOperator.java | {
"start": 12394,
"end": 12640
} | class ____ the returned data type.
* @return This operator with the type information corresponding to the given type class.
*/
public SingleOutputStreamOperator<T> returns(Class<T> typeClass) {
requireNonNull(typeClass, "type | of |
java | apache__camel | core/camel-management/src/test/java/org/apache/camel/management/ManagedThrottlingExceptionRoutePolicyTest.java | {
"start": 4571,
"end": 6475
} | class
____ threshold = proxy.getFailureThreshold();
assertEquals(10, threshold.intValue());
Long window = proxy.getFailureWindow();
assertEquals(1000L, window.longValue());
Long halfOpenAfter = proxy.getHalfOpenAfter();
assertEquals(5000L, halfOpenAfter.longValue());
// change value
proxy.setHalfOpenAfter(10000L);
halfOpenAfter = proxy.getHalfOpenAfter();
assertEquals(10000L, halfOpenAfter.longValue());
try {
getMockEndpoint("mock:result").expectedMessageCount(0);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
} catch (Exception e) {
// expected
}
// state should be closed w/ no failures
myState = proxy.currentState();
assertTrue(myState.contains("State closed, failures 1, last failure"));
// the route has 1 failure
val = proxy.getCurrentFailures();
assertEquals(1, val.intValue());
Thread.sleep(200);
// the route has 1 failure X mills ago
lastFail = proxy.getLastFailure();
assertTrue(lastFail.longValue() > 0);
}
@Override
protected RouteBuilder createRouteBuilder() {
ThrottlingExceptionRoutePolicy policy = new ThrottlingExceptionRoutePolicy(
10, 1000, 5000,
List.of(IOException.class, UnsupportedOperationException.class));
policy.setHalfOpenHandler(new DummyHandler());
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").routeId("testRoute")
.routePolicy(policy)
.to("log:foo")
.process(new BoomProcess())
.to("mock:result");
}
};
}
static | Integer |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/event/service/internal/PostCommitEventListenerGroupImpl.java | {
"start": 930,
"end": 2460
} | class ____<T> extends EventListenerGroupImpl<T> {
private final Class<?> extendedListenerContract;
public PostCommitEventListenerGroupImpl(
EventType<T> eventType,
CallbackRegistry callbackRegistry,
boolean isJpaBootstrap) {
super( eventType, callbackRegistry, isJpaBootstrap );
if ( eventType == EventType.POST_COMMIT_DELETE ) {
extendedListenerContract = PostCommitDeleteEventListener.class;
}
else if ( eventType == EventType.POST_COMMIT_INSERT ) {
extendedListenerContract = PostCommitInsertEventListener.class;
}
else if ( eventType == EventType.POST_COMMIT_UPDATE ) {
extendedListenerContract = PostCommitUpdateEventListener.class;
}
else {
throw new IllegalStateException( "Unexpected usage of PostCommitEventListenerGroupImpl" );
}
}
@Override
public void appendListener(T listener) {
checkAgainstExtendedContract( listener );
super.appendListener( listener );
}
private void checkAgainstExtendedContract(T listener) {
if ( !extendedListenerContract.isInstance( listener ) ) {
EVENT_LISTENER_LOGGER.warnf(
"Encountered event listener [%s] for post-commit event [%s] "
+ "which did not implement the corresponding extended "
+ "listener contract [%s]",
listener.getClass().getName(),
getEventType().eventName(),
extendedListenerContract.getName()
);
}
}
@Override
public void prependListener(T listener) {
checkAgainstExtendedContract( listener );
super.prependListener( listener );
}
}
| PostCommitEventListenerGroupImpl |
java | quarkusio__quarkus | devtools/gradle/gradle-extension-plugin/src/main/java/io/quarkus/extension/gradle/dsl/RemovedResource.java | {
"start": 101,
"end": 590
} | class ____ {
private String artifact;
private List<String> removedResources = new ArrayList<>(1);
public RemovedResource(String artifact) {
this.artifact = artifact;
}
public RemovedResource resource(String resource) {
removedResources.add(resource);
return this;
}
public String getArtifactName() {
return artifact;
}
public List<String> getRemovedResources() {
return removedResources;
}
}
| RemovedResource |
java | apache__hadoop | hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/ContainerServiceRecordProcessor.java | {
"start": 6991,
"end": 8054
} | class ____
extends ContainerRecordDescriptor<InetAddress> {
/**
* Creates a container A record descriptor.
* @param path registry path for service record
* @param record service record
* @throws Exception
*/
public AContainerRecordDescriptor(String path,
ServiceRecord record) throws Exception {
super(path, record);
}
/**
* Initializes the descriptor parameters.
* @param serviceRecord the service record.
*/
@Override protected void init(ServiceRecord serviceRecord) {
String ip = serviceRecord.get(YarnRegistryAttributes.YARN_IP);
if (ip == null) {
throw new IllegalArgumentException("No IP specified");
}
try {
this.setTarget(InetAddress.getByName(ip));
this.setNames(new Name[] {getContainerName(), getContainerIDName(),
getComponentName()});
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
}
/**
* A container AAAA record descriptor.
*/
| AContainerRecordDescriptor |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/DuplicateBranchesTest.java | {
"start": 3325,
"end": 3703
} | class ____ {
String g(boolean a, String b, String c) {
if (a) {
return c;
} else if (a) {
return b;
} else {
return b;
}
}
}
""")
.addOutputLines(
"Test.java",
"""
| Test |
java | google__dagger | javatests/dagger/internal/codegen/XExecutableTypesTest.java | {
"start": 11153,
"end": 12080
} | class ____ {",
" <T> List<T> toList(Collection<T> c) { throw new RuntimeException(); }",
"}");
CompilerTests.invocationCompiler(foo, bar)
.compile(
invocation -> {
XTypeElement fooType = invocation.getProcessingEnv().requireTypeElement("test.Foo");
XMethodElement m1 = fooType.getDeclaredMethods().get(0);
XTypeElement barType = invocation.getProcessingEnv().requireTypeElement("test.Bar");
XMethodElement m2 = barType.getDeclaredMethods().get(0);
assertThat(XExecutableTypes.isSubsignature(m2, m1)).isTrue();
assertThat(XExecutableTypes.isSubsignature(m1, m2)).isTrue();
});
}
@Test
public void subsignatureWildcards() {
Source foo =
CompilerTests.javaSource(
"test.Foo",
"package test;",
"import java.util.*;",
" | Bar |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/secondarytable/SecondaryRowTest.java | {
"start": 615,
"end": 3040
} | class ____ {
@Test
void testSecondaryTableOptionality(SessionFactoryScope scope) {
scope.inSession( (session) -> {
verifySecondaryRows( "Optional", 0, session );
verifySecondaryRows( "NonOptional", 0, session );
} );
final Record created = scope.fromTransaction( (session) -> {
Record record = new Record();
record.enabled = true;
record.text = "Hello World!";
session.persist( record );
return record;
} );
scope.inSession( (session) -> {
verifySecondaryRows( "Optional", 0, session );
verifySecondaryRows( "NonOptional", 1, session );
} );
created.comment = "I was here";
final Record merged = scope.fromTransaction( (session) -> session.merge( created ) );
scope.inSession( (session) -> {
verifySecondaryRows( "Optional", 1, session );
verifySecondaryRows( "NonOptional", 1, session );
} );
merged.comment = null;
scope.inTransaction( (session) -> session.merge( merged ) );
scope.inSession( (session) -> {
verifySecondaryRows( "Optional", 0, session );
verifySecondaryRows( "NonOptional", 1, session );
} );
}
@Test
public void testOwnedSecondaryTable(SessionFactoryScope scope) {
final String View_name = scope.getSessionFactory().getJdbcServices().getDialect().quote( "`View`" );
verifySecondaryRows( View_name, 0, scope );
final SpecialRecord created = scope.fromTransaction( (session) -> {
final SpecialRecord record = new SpecialRecord();
record.enabled = true;
record.text = "Hello World!";
session.persist( record );
return record;
} );
verifySecondaryRows( View_name, 0, scope );
created.timestamp = Instant.now();
final SpecialRecord merged = scope.fromTransaction( (session) -> session.merge( created ) );
verifySecondaryRows( View_name, 0, scope );
}
@AfterEach
void cleanUpTestData(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
private static void verifySecondaryRows(String table, int expectedCount, SessionFactoryScope sfScope) {
sfScope.inTransaction( (session) -> verifySecondaryRows( table, expectedCount, session ) );
}
private static void verifySecondaryRows(String table, int expectedCount, SessionImplementor session) {
final String sql = "select count(1) from " + table;
final int count = (int) session.createNativeQuery( sql, Integer.class ).getSingleResult();
assertThat( count ).isEqualTo( expectedCount );
}
}
| SecondaryRowTest |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/pubsub/api/sync/RedisPubSubCommands.java | {
"start": 305,
"end": 1551
} | interface ____<K, V> extends RedisCommands<K, V> {
/**
* Listen for messages published to channels matching the given patterns.
*
* @param patterns the patterns
*/
void psubscribe(K... patterns);
/**
* Stop listening for messages posted to channels matching the given patterns.
*
* @param patterns the patterns
*/
void punsubscribe(K... patterns);
/**
* Listen for messages published to the given channels.
*
* @param channels the channels
*/
void subscribe(K... channels);
/**
* Stop listening for messages posted to the given channels.
*
* @param channels the channels
*/
void unsubscribe(K... channels);
/**
* Listen for messages published to the given shard channels.
*
* @param shardChannels the channels
* @since 6.4
*/
void ssubscribe(K... shardChannels);
/**
* Stop listening for messages posted to the given channels.
*
* @param shardChannels the channels
* @since 6.4
*/
void sunsubscribe(K... shardChannels);
/**
* @return the underlying connection.
*/
StatefulRedisPubSubConnection<K, V> getStatefulConnection();
}
| RedisPubSubCommands |
java | qos-ch__slf4j | slf4j-api/src/main/java/org/slf4j/helpers/Util.java | {
"start": 4241,
"end": 4654
} | class ____ alternative.
*
* @deprecated replaced by the {@link Reporter#error(String, Throwable)} method.
* @param msg message to print
* @param t throwable to print
*/
static final public void report(String msg, Throwable t) {
System.err.println(msg);
System.err.println("Reported exception:");
t.printStackTrace();
}
/**
* See {@link Reporter} | for |
java | quarkusio__quarkus | extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devui/DevBeanInfos.java | {
"start": 229,
"end": 4656
} | class ____ {
private final List<DevBeanInfo> beans;
private final List<DevBeanInfo> removedBeans;
private final List<DevObserverInfo> observers;
private final List<DevInterceptorInfo> interceptors;
private final List<DevInterceptorInfo> removedInterceptors;
private final List<DevDecoratorInfo> decorators;
private final List<DevDecoratorInfo> removedDecorators;
private final Map<String, DependencyGraph> dependencyGraphs;
public DevBeanInfos() {
beans = new ArrayList<>();
removedBeans = new ArrayList<>();
observers = new ArrayList<>();
interceptors = new ArrayList<>();
removedInterceptors = new ArrayList<>();
decorators = new ArrayList<>();
removedDecorators = new ArrayList<>();
dependencyGraphs = new HashMap<>();
}
public List<DevBeanInfo> getRemovedBeans() {
return removedBeans;
}
public List<DevBeanInfo> getBeans() {
return beans;
}
public List<DevObserverInfo> getObservers() {
return observers;
}
public List<DevInterceptorInfo> getInterceptors() {
return interceptors;
}
public List<DevDecoratorInfo> getDecorators() {
return decorators;
}
public List<DevInterceptorInfo> getRemovedInterceptors() {
return removedInterceptors;
}
public List<DevDecoratorInfo> getRemovedDecorators() {
return removedDecorators;
}
public Map<String, DependencyGraph> getDependencyGraphs() {
return dependencyGraphs;
}
public String getBeanDescription() {
return DevConsoleManager.getGlobal(BEAN_DESCRIPTION);
}
public int getMaxDependencyLevel() {
Integer val = DevConsoleManager.getGlobal(MAX_DEPENDENCY_LEVEL);
return val != null ? val : DEFAULT_MAX_DEPENDENCY_LEVEL;
}
public DevBeanInfo getBean(String id) {
for (DevBeanInfo bean : beans) {
if (bean.getId().equals(id)) {
return bean;
}
}
return null;
}
public DevInterceptorInfo getInterceptor(String id) {
for (DevInterceptorInfo interceptor : interceptors) {
if (interceptor.getId().equals(id)) {
return interceptor;
}
}
return null;
}
public DependencyGraph getDependencyGraph(String beanId) {
// Note that MAX_DEPENDENCY_LEVEL is not implemented in UI yet
Integer maxLevel = DevConsoleManager.getGlobal(MAX_DEPENDENCY_LEVEL);
if (maxLevel == null) {
maxLevel = DEFAULT_MAX_DEPENDENCY_LEVEL;
}
if (dependencyGraphs.isEmpty()) {
return DependencyGraph.EMPTY;
}
DependencyGraph graph = dependencyGraphs.get(beanId);
return graph.maxLevel <= maxLevel ? graph : graph.forLevel(maxLevel);
}
public int getRemovedComponents() {
return removedBeans.size() + removedInterceptors.size() + removedDecorators.size();
}
void addBean(DevBeanInfo beanInfo) {
beans.add(beanInfo);
}
void addRemovedBean(DevBeanInfo beanInfo) {
removedBeans.add(beanInfo);
}
void addObserver(DevObserverInfo observer) {
observers.add(observer);
}
void addInterceptor(DevInterceptorInfo interceptor) {
interceptors.add(interceptor);
}
void addRemovedInterceptor(DevInterceptorInfo interceptor) {
removedInterceptors.add(interceptor);
}
void addDecorator(DevDecoratorInfo decorator) {
decorators.add(decorator);
}
void addRemovedDecorator(DevDecoratorInfo decorator) {
removedDecorators.add(decorator);
}
void addDependencyGraph(String beanId, DependencyGraph graph) {
dependencyGraphs.put(beanId, graph);
}
void sort() {
Collections.sort(beans);
Collections.sort(removedBeans);
Collections.sort(observers);
Collections.sort(interceptors);
Collections.sort(decorators);
Collections.sort(removedDecorators);
Collections.sort(removedInterceptors);
}
static final String BEAN_DESCRIPTION = "io.quarkus.arc.beanDescription";
static final String MAX_DEPENDENCY_LEVEL = "io.quarkus.arc.maxDependencyLevel";
public static final String BEAN_DEPENDENCIES = "io.quarkus.arc.beanDependencies";
static final int DEFAULT_MAX_DEPENDENCY_LEVEL = 10;
}
| DevBeanInfos |
java | apache__maven | compat/maven-embedder/src/main/java/org/fusesource/jansi/Ansi.java | {
"start": 5365,
"end": 23645
} | class ____ extends Ansi {
NoAnsi() {
super();
}
NoAnsi(int size) {
super(size);
}
NoAnsi(StringBuilder builder) {
super(builder);
}
@Override
public Ansi fg(Color color) {
return this;
}
@Override
public Ansi bg(Color color) {
return this;
}
@Override
public Ansi fgBright(Color color) {
return this;
}
@Override
public Ansi bgBright(Color color) {
return this;
}
@Override
public Ansi fg(int color) {
return this;
}
@Override
public Ansi fgRgb(int r, int g, int b) {
return this;
}
@Override
public Ansi bg(int color) {
return this;
}
@Override
public Ansi bgRgb(int r, int g, int b) {
return this;
}
@Override
public Ansi a(Attribute attribute) {
return this;
}
@Override
public Ansi cursor(int row, int column) {
return this;
}
@Override
public Ansi cursorToColumn(int x) {
return this;
}
@Override
public Ansi cursorUp(int y) {
return this;
}
@Override
public Ansi cursorRight(int x) {
return this;
}
@Override
public Ansi cursorDown(int y) {
return this;
}
@Override
public Ansi cursorLeft(int x) {
return this;
}
@Override
public Ansi cursorDownLine() {
return this;
}
@Override
public Ansi cursorDownLine(final int n) {
return this;
}
@Override
public Ansi cursorUpLine() {
return this;
}
@Override
public Ansi cursorUpLine(final int n) {
return this;
}
@Override
public Ansi eraseScreen() {
return this;
}
@Override
public Ansi eraseScreen(Erase kind) {
return this;
}
@Override
public Ansi eraseLine() {
return this;
}
@Override
public Ansi eraseLine(Erase kind) {
return this;
}
@Override
public Ansi scrollUp(int rows) {
return this;
}
@Override
public Ansi scrollDown(int rows) {
return this;
}
@Override
public Ansi saveCursorPosition() {
return this;
}
@Override
@Deprecated
public Ansi restorCursorPosition() {
return this;
}
@Override
public Ansi restoreCursorPosition() {
return this;
}
@Override
public Ansi reset() {
return this;
}
}
private final StringBuilder builder;
private final ArrayList<Integer> attributeOptions = new ArrayList<>(5);
public Ansi() {
this(new StringBuilder(80));
}
public Ansi(Ansi parent) {
this(new StringBuilder(parent.builder));
attributeOptions.addAll(parent.attributeOptions);
}
public Ansi(int size) {
this(new StringBuilder(size));
}
public Ansi(StringBuilder builder) {
this.builder = builder;
}
public Ansi fg(Color color) {
attributeOptions.add(color.fg());
return this;
}
public Ansi fg(int color) {
attributeOptions.add(38);
attributeOptions.add(5);
attributeOptions.add(color & 0xff);
return this;
}
public Ansi fgRgb(int color) {
return fgRgb(color >> 16, color >> 8, color);
}
public Ansi fgRgb(int r, int g, int b) {
attributeOptions.add(38);
attributeOptions.add(2);
attributeOptions.add(r & 0xff);
attributeOptions.add(g & 0xff);
attributeOptions.add(b & 0xff);
return this;
}
public Ansi fgBlack() {
return this.fg(Color.BLACK);
}
public Ansi fgBlue() {
return this.fg(Color.BLUE);
}
public Ansi fgCyan() {
return this.fg(Color.CYAN);
}
public Ansi fgDefault() {
return this.fg(Color.DEFAULT);
}
public Ansi fgGreen() {
return this.fg(Color.GREEN);
}
public Ansi fgMagenta() {
return this.fg(Color.MAGENTA);
}
public Ansi fgRed() {
return this.fg(Color.RED);
}
public Ansi fgYellow() {
return this.fg(Color.YELLOW);
}
public Ansi bg(Color color) {
attributeOptions.add(color.bg());
return this;
}
public Ansi bg(int color) {
attributeOptions.add(48);
attributeOptions.add(5);
attributeOptions.add(color & 0xff);
return this;
}
public Ansi bgRgb(int color) {
return bgRgb(color >> 16, color >> 8, color);
}
public Ansi bgRgb(int r, int g, int b) {
attributeOptions.add(48);
attributeOptions.add(2);
attributeOptions.add(r & 0xff);
attributeOptions.add(g & 0xff);
attributeOptions.add(b & 0xff);
return this;
}
public Ansi bgCyan() {
return this.bg(Color.CYAN);
}
public Ansi bgDefault() {
return this.bg(Color.DEFAULT);
}
public Ansi bgGreen() {
return this.bg(Color.GREEN);
}
public Ansi bgMagenta() {
return this.bg(Color.MAGENTA);
}
public Ansi bgRed() {
return this.bg(Color.RED);
}
public Ansi bgYellow() {
return this.bg(Color.YELLOW);
}
public Ansi fgBright(Color color) {
attributeOptions.add(color.fgBright());
return this;
}
public Ansi fgBrightBlack() {
return this.fgBright(Color.BLACK);
}
public Ansi fgBrightBlue() {
return this.fgBright(Color.BLUE);
}
public Ansi fgBrightCyan() {
return this.fgBright(Color.CYAN);
}
public Ansi fgBrightDefault() {
return this.fgBright(Color.DEFAULT);
}
public Ansi fgBrightGreen() {
return this.fgBright(Color.GREEN);
}
public Ansi fgBrightMagenta() {
return this.fgBright(Color.MAGENTA);
}
public Ansi fgBrightRed() {
return this.fgBright(Color.RED);
}
public Ansi fgBrightYellow() {
return this.fgBright(Color.YELLOW);
}
public Ansi bgBright(Color color) {
attributeOptions.add(color.bgBright());
return this;
}
public Ansi bgBrightCyan() {
return this.bgBright(Color.CYAN);
}
public Ansi bgBrightDefault() {
return this.bgBright(Color.DEFAULT);
}
public Ansi bgBrightGreen() {
return this.bgBright(Color.GREEN);
}
public Ansi bgBrightMagenta() {
return this.bgBright(Color.MAGENTA);
}
public Ansi bgBrightRed() {
return this.bgBright(Color.RED);
}
public Ansi bgBrightYellow() {
return this.bgBright(Color.YELLOW);
}
public Ansi a(Attribute attribute) {
attributeOptions.add(attribute.value());
return this;
}
/**
* Moves the cursor to row n, column m. The values are 1-based.
* Any values less than 1 are mapped to 1.
*
* @param row row (1-based) from top
* @param column column (1 based) from left
* @return this Ansi instance
*/
public Ansi cursor(final int row, final int column) {
return appendEscapeSequence('H', Math.max(1, row), Math.max(1, column));
}
/**
* Moves the cursor to column n. The parameter n is 1-based.
* If n is less than 1 it is moved to the first column.
*
* @param x the index (1-based) of the column to move to
* @return this Ansi instance
*/
public Ansi cursorToColumn(final int x) {
return appendEscapeSequence('G', Math.max(1, x));
}
/**
* Moves the cursor up. If the parameter y is negative it moves the cursor down.
*
* @param y the number of lines to move up
* @return this Ansi instance
*/
public Ansi cursorUp(final int y) {
return y > 0 ? appendEscapeSequence('A', y) : y < 0 ? cursorDown(-y) : this;
}
/**
* Moves the cursor down. If the parameter y is negative it moves the cursor up.
*
* @param y the number of lines to move down
* @return this Ansi instance
*/
public Ansi cursorDown(final int y) {
return y > 0 ? appendEscapeSequence('B', y) : y < 0 ? cursorUp(-y) : this;
}
/**
* Moves the cursor right. If the parameter x is negative it moves the cursor left.
*
* @param x the number of characters to move right
* @return this Ansi instance
*/
public Ansi cursorRight(final int x) {
return x > 0 ? appendEscapeSequence('C', x) : x < 0 ? cursorLeft(-x) : this;
}
/**
* Moves the cursor left. If the parameter x is negative it moves the cursor right.
*
* @param x the number of characters to move left
* @return this Ansi instance
*/
public Ansi cursorLeft(final int x) {
return x > 0 ? appendEscapeSequence('D', x) : x < 0 ? cursorRight(-x) : this;
}
/**
* Moves the cursor relative to the current position. The cursor is moved right if x is
* positive, left if negative and down if y is positive and up if negative.
*
* @param x the number of characters to move horizontally
* @param y the number of lines to move vertically
* @return this Ansi instance
* @since 2.2
*/
public Ansi cursorMove(final int x, final int y) {
return cursorRight(x).cursorDown(y);
}
/**
* Moves the cursor to the beginning of the line below.
*
* @return this Ansi instance
*/
public Ansi cursorDownLine() {
return appendEscapeSequence('E');
}
/**
* Moves the cursor to the beginning of the n-th line below. If the parameter n is negative it
* moves the cursor to the beginning of the n-th line above.
*
* @param n the number of lines to move the cursor
* @return this Ansi instance
*/
public Ansi cursorDownLine(final int n) {
return n < 0 ? cursorUpLine(-n) : appendEscapeSequence('E', n);
}
/**
* Moves the cursor to the beginning of the line above.
*
* @return this Ansi instance
*/
public Ansi cursorUpLine() {
return appendEscapeSequence('F');
}
/**
* Moves the cursor to the beginning of the n-th line above. If the parameter n is negative it
* moves the cursor to the beginning of the n-th line below.
*
* @param n the number of lines to move the cursor
* @return this Ansi instance
*/
public Ansi cursorUpLine(final int n) {
return n < 0 ? cursorDownLine(-n) : appendEscapeSequence('F', n);
}
public Ansi eraseScreen() {
return appendEscapeSequence('J', Erase.ALL.value());
}
public Ansi eraseScreen(final Erase kind) {
return appendEscapeSequence('J', kind.value());
}
public Ansi eraseLine() {
return appendEscapeSequence('K');
}
public Ansi eraseLine(final Erase kind) {
return appendEscapeSequence('K', kind.value());
}
public Ansi scrollUp(final int rows) {
if (rows == Integer.MIN_VALUE) {
return scrollDown(Integer.MAX_VALUE);
}
return rows > 0 ? appendEscapeSequence('S', rows) : rows < 0 ? scrollDown(-rows) : this;
}
public Ansi scrollDown(final int rows) {
if (rows == Integer.MIN_VALUE) {
return scrollUp(Integer.MAX_VALUE);
}
return rows > 0 ? appendEscapeSequence('T', rows) : rows < 0 ? scrollUp(-rows) : this;
}
@Deprecated
public Ansi restorCursorPosition() {
return restoreCursorPosition();
}
public Ansi saveCursorPosition() {
saveCursorPositionSCO();
return saveCursorPositionDEC();
}
// SCO command
public Ansi saveCursorPositionSCO() {
return appendEscapeSequence('s');
}
// DEC command
public Ansi saveCursorPositionDEC() {
builder.append(FIRST_ESC_CHAR);
builder.append('7');
return this;
}
public Ansi restoreCursorPosition() {
restoreCursorPositionSCO();
return restoreCursorPositionDEC();
}
// SCO command
public Ansi restoreCursorPositionSCO() {
return appendEscapeSequence('u');
}
// DEC command
public Ansi restoreCursorPositionDEC() {
builder.append(FIRST_ESC_CHAR);
builder.append('8');
return this;
}
public Ansi reset() {
return a(Attribute.RESET);
}
public Ansi bold() {
return a(Attribute.INTENSITY_BOLD);
}
public Ansi boldOff() {
return a(Attribute.INTENSITY_BOLD_OFF);
}
public Ansi a(String value) {
flushAttributes();
builder.append(value);
return this;
}
public Ansi a(boolean value) {
flushAttributes();
builder.append(value);
return this;
}
public Ansi a(char value) {
flushAttributes();
builder.append(value);
return this;
}
public Ansi a(char[] value, int offset, int len) {
flushAttributes();
builder.append(value, offset, len);
return this;
}
public Ansi a(char[] value) {
flushAttributes();
builder.append(value);
return this;
}
public Ansi a(CharSequence value, int start, int end) {
flushAttributes();
builder.append(value, start, end);
return this;
}
public Ansi a(CharSequence value) {
flushAttributes();
builder.append(value);
return this;
}
public Ansi a(double value) {
flushAttributes();
builder.append(value);
return this;
}
public Ansi a(float value) {
flushAttributes();
builder.append(value);
return this;
}
public Ansi a(int value) {
flushAttributes();
builder.append(value);
return this;
}
public Ansi a(long value) {
flushAttributes();
builder.append(value);
return this;
}
public Ansi a(Object value) {
flushAttributes();
builder.append(value);
return this;
}
public Ansi a(StringBuffer value) {
flushAttributes();
builder.append(value);
return this;
}
public Ansi newline() {
flushAttributes();
builder.append(System.lineSeparator());
return this;
}
public Ansi format(String pattern, Object... args) {
flushAttributes();
builder.append(String.format(pattern, args));
return this;
}
/**
* Applies another function to this Ansi instance.
*
* @param fun the function to apply
* @return this Ansi instance
* @since 2.2
*/
public Ansi apply(Consumer fun) {
fun.apply(this);
return this;
}
/**
* Uses the {@link org.jline.jansi.AnsiRenderer}
* to generate the ANSI escape sequences for the supplied text.
*
* @param text text
* @return this
* @since 2.2
*/
public Ansi render(final String text) {
a(new org.jline.jansi.Ansi().render(text).toString());
return this;
}
/**
* String formats and renders the supplied arguments. Uses the {@link org.jline.jansi.AnsiRenderer}
* to generate the ANSI escape sequences.
*
* @param text format
* @param args arguments
* @return this
* @since 2.2
*/
public Ansi render(final String text, Object... args) {
a(String.format(new org.jline.jansi.Ansi().render(text).toString(), args));
return this;
}
@Override
public String toString() {
flushAttributes();
return builder.toString();
}
///////////////////////////////////////////////////////////////////
// Private Helper Methods
///////////////////////////////////////////////////////////////////
private Ansi appendEscapeSequence(char command) {
flushAttributes();
builder.append(FIRST_ESC_CHAR);
builder.append(SECOND_ESC_CHAR);
builder.append(command);
return this;
}
private Ansi appendEscapeSequence(char command, int option) {
flushAttributes();
builder.append(FIRST_ESC_CHAR);
builder.append(SECOND_ESC_CHAR);
builder.append(option);
builder.append(command);
return this;
}
private Ansi appendEscapeSequence(char command, Object... options) {
flushAttributes();
return doAppendEscapeSequence(command, options);
}
private void flushAttributes() {
if (attributeOptions.isEmpty()) {
return;
}
if (attributeOptions.size() == 1 && attributeOptions.get(0) == 0) {
builder.append(FIRST_ESC_CHAR);
builder.append(SECOND_ESC_CHAR);
builder.append('m');
} else {
doAppendEscapeSequence('m', attributeOptions.toArray());
}
attributeOptions.clear();
}
private Ansi doAppendEscapeSequence(char command, Object... options) {
builder.append(FIRST_ESC_CHAR);
builder.append(SECOND_ESC_CHAR);
int size = options.length;
for (int i = 0; i < size; i++) {
if (i != 0) {
builder.append(';');
}
if (options[i] != null) {
builder.append(options[i]);
}
}
builder.append(command);
return this;
}
@Override
public Ansi append(CharSequence csq) {
builder.append(csq);
return this;
}
@Override
public Ansi append(CharSequence csq, int start, int end) {
builder.append(csq, start, end);
return this;
}
@Override
public Ansi append(char c) {
builder.append(c);
return this;
}
}
| NoAnsi |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBooleanEvaluator.java | {
"start": 2841,
"end": 3336
} | class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final EvalOperator.ExpressionEvaluator.Factory field;
public Factory(EvalOperator.ExpressionEvaluator.Factory field) {
this.field = field;
}
@Override
public MvFirstBooleanEvaluator get(DriverContext context) {
return new MvFirstBooleanEvaluator(field.get(context), context);
}
@Override
public String toString() {
return "MvFirst[field=" + field + "]";
}
}
}
| Factory |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/project/ProjectStateRegistry.java | {
"start": 13292,
"end": 20931
} | class ____ {
private final ImmutableOpenMap.Builder<ProjectId, Entry> projectsEntries;
private final Set<ProjectId> projectsMarkedForDeletion;
private final long projectsMarkedForDeletionGeneration;
private boolean newProjectMarkedForDeletion = false;
private Builder() {
this.projectsEntries = ImmutableOpenMap.builder();
projectsMarkedForDeletion = new HashSet<>();
projectsMarkedForDeletionGeneration = 0;
}
private Builder(ProjectStateRegistry original) {
this.projectsEntries = ImmutableOpenMap.builder(original.projectsEntries);
this.projectsMarkedForDeletion = new HashSet<>(original.projectsMarkedForDeletion);
this.projectsMarkedForDeletionGeneration = original.projectsMarkedForDeletionGeneration;
}
private void updateEntry(ProjectId projectId, UnaryOperator<Entry> modifier) {
Entry entry = projectsEntries.get(projectId);
if (entry == null) {
entry = new Entry();
}
entry = modifier.apply(entry);
projectsEntries.put(projectId, entry);
}
public Builder putProjectSettings(ProjectId projectId, Settings settings) {
updateEntry(projectId, entry -> entry.withSettings(settings));
return this;
}
public Builder putReservedStateMetadata(ProjectId projectId, ReservedStateMetadata reservedStateMetadata) {
updateEntry(projectId, entry -> entry.withReservedStateMetadata(reservedStateMetadata));
return this;
}
public Builder markProjectForDeletion(ProjectId projectId) {
if (projectsMarkedForDeletion.add(projectId)) {
newProjectMarkedForDeletion = true;
}
return this;
}
public Builder removeProject(ProjectId projectId) {
projectsEntries.remove(projectId);
projectsMarkedForDeletion.remove(projectId);
return this;
}
public ProjectStateRegistry build() {
final var unknownButUnderDeletion = Sets.difference(projectsMarkedForDeletion, projectsEntries.keys());
if (unknownButUnderDeletion.isEmpty() == false) {
throw new IllegalArgumentException(
"Cannot mark projects for deletion that are not in the registry: " + unknownButUnderDeletion
);
}
return new ProjectStateRegistry(
projectsEntries.build(),
Collections.unmodifiableSet(projectsMarkedForDeletion),
newProjectMarkedForDeletion ? projectsMarkedForDeletionGeneration + 1 : projectsMarkedForDeletionGeneration
);
}
}
private record Entry(Settings settings, ImmutableOpenMap<String, ReservedStateMetadata> reservedStateMetadata)
implements
ToXContentFragment,
Writeable,
Diffable<Entry> {
Entry() {
this(Settings.EMPTY, ImmutableOpenMap.of());
}
public static Entry readFrom(StreamInput in) throws IOException {
Settings settings = Settings.readSettingsFromStream(in);
ImmutableOpenMap<String, ReservedStateMetadata> reservedStateMetadata;
if (in.getTransportVersion().supports(PROJECT_RESERVED_STATE_MOVE_TO_REGISTRY)) {
int reservedStateSize = in.readVInt();
ImmutableOpenMap.Builder<String, ReservedStateMetadata> builder = ImmutableOpenMap.builder(reservedStateSize);
for (int i = 0; i < reservedStateSize; i++) {
ReservedStateMetadata r = ReservedStateMetadata.readFrom(in);
builder.put(r.namespace(), r);
}
reservedStateMetadata = builder.build();
} else {
reservedStateMetadata = ImmutableOpenMap.of();
}
return new Entry(settings, reservedStateMetadata);
}
public Entry withSettings(Settings settings) {
return new Entry(settings, reservedStateMetadata);
}
public Entry withReservedStateMetadata(ReservedStateMetadata reservedStateMetadata) {
ImmutableOpenMap<String, ReservedStateMetadata> reservedStateMetadataMap = ImmutableOpenMap.builder(this.reservedStateMetadata)
.fPut(reservedStateMetadata.namespace(), reservedStateMetadata)
.build();
return new Entry(settings, reservedStateMetadataMap);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeWriteable(settings);
if (out.getTransportVersion().supports(PROJECT_RESERVED_STATE_MOVE_TO_REGISTRY)) {
out.writeCollection(reservedStateMetadata.values());
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject("settings");
settings.toXContent(builder, new ToXContent.MapParams(Collections.singletonMap("flat_settings", "true")));
builder.endObject();
builder.startObject("reserved_state");
for (ReservedStateMetadata reservedStateMetadata : reservedStateMetadata.values()) {
reservedStateMetadata.toXContent(builder, params);
}
builder.endObject();
return builder;
}
@Override
public Diff<Entry> diff(Entry previousState) {
if (this == previousState) {
return SimpleDiffable.empty();
}
return new EntryDiff(
settings.diff(previousState.settings),
DiffableUtils.diff(previousState.reservedStateMetadata, reservedStateMetadata, DiffableUtils.getStringKeySerializer())
);
}
private record EntryDiff(
Diff<Settings> settingsDiff,
DiffableUtils.MapDiff<String, ReservedStateMetadata, ImmutableOpenMap<String, ReservedStateMetadata>> reservedStateMetadata
) implements Diff<Entry> {
public static EntryDiff readFrom(StreamInput in) throws IOException {
Diff<Settings> settingsDiff = Settings.readSettingsDiffFromStream(in);
DiffableUtils.MapDiff<String, ReservedStateMetadata, ImmutableOpenMap<String, ReservedStateMetadata>> reservedStateMetadata;
if (in.getTransportVersion().supports(PROJECT_RESERVED_STATE_MOVE_TO_REGISTRY)) {
reservedStateMetadata = DiffableUtils.readImmutableOpenMapDiff(
in,
DiffableUtils.getStringKeySerializer(),
RESERVED_DIFF_VALUE_READER
);
} else {
reservedStateMetadata = DiffableUtils.emptyDiff();
}
return new EntryDiff(settingsDiff, reservedStateMetadata);
}
@Override
public Entry apply(Entry part) {
return new Entry(settingsDiff.apply(part.settings), reservedStateMetadata.apply(part.reservedStateMetadata));
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeWriteable(settingsDiff);
if (out.getTransportVersion().supports(PROJECT_RESERVED_STATE_MOVE_TO_REGISTRY)) {
reservedStateMetadata.writeTo(out);
}
}
}
}
}
| Builder |
java | apache__camel | dsl/camel-endpointdsl/src/test/java/org/apache/camel/builder/endpoint/TimerNegativeDelayTest.java | {
"start": 989,
"end": 1718
} | class ____ extends BaseEndpointDslTest {
@Test
public void testNegativeDelay() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(10);
context.getRouteController().startAllRoutes();
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new EndpointRouteBuilder() {
@Override
public void configure() throws Exception {
from(timer("foo").delay(-1).period(0).repeatCount(10))
.noAutoStartup()
.to(mock("result"));
}
};
}
}
| TimerNegativeDelayTest |
java | micronaut-projects__micronaut-core | test-suite/src/test/java/io/micronaut/docs/inject/qualifiers/named/VehicleSpec.java | {
"start": 880,
"end": 1316
} | class ____ {
@Test
void testStartVehicle() {
// tag::start[]
final ApplicationContext context = ApplicationContext.run(Map.of("spec.name", "VehicleQualifiersNamedSpec"));
Vehicle vehicle = context.getBean(Vehicle.class);
DefaultGroovyMethods.println(this, vehicle.start());
// end::start[]
assertEquals("Starting V8", vehicle.start());
context.close();
}
}
| VehicleSpec |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/PagerUtilsTest_Limit_SQLServer_2.java | {
"start": 163,
"end": 693
} | class ____ extends TestCase {
public void test_db2_union() throws Exception {
String sql = "select * from t1 union select * from t2";
String result = PagerUtils.limit(sql, JdbcConstants.SQL_SERVER, 0, 10);
assertEquals("SELECT TOP 10 XX.*\n" +
"FROM (\n" +
"\tSELECT *\n" +
"\tFROM t1\n" +
"\tUNION\n" +
"\tSELECT *\n" +
"\tFROM t2\n" +
") XX", result);
}
}
| PagerUtilsTest_Limit_SQLServer_2 |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/discovery/DiscoveryTests.java | {
"start": 21812,
"end": 21958
} | class ____ {
@Test
@Tag("|")
void test() {
}
}
@SuppressWarnings("JUnitMalformedDeclaration")
@DisplayName("")
static | InvalidTagsTestCase |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/SinkManyReplayProcessorTest.java | {
"start": 1452,
"end": 1568
} | class ____ the deprecated ReplayProcessor. Will be removed with it in 3.5.
@SuppressWarnings("deprecation")
public | tests |
java | micronaut-projects__micronaut-core | inject/src/main/java/io/micronaut/context/RuntimeBeanDefinition.java | {
"start": 2325,
"end": 7081
} | interface ____<T> extends BeanDefinitionReference<T>, InstantiatableBeanDefinition<T>, BeanContextConditional {
@Override
@NonNull
default AnnotationMetadata getAnnotationMetadata() {
return AnnotationMetadata.EMPTY_METADATA;
}
@Override
default boolean isEnabled(@NonNull BeanContext context, BeanResolutionContext resolutionContext) {
return true;
}
@Override
default List<Argument<?>> getTypeArguments(Class<?> type) {
Class<T> beanType = getBeanType();
if (type != null && type.isAssignableFrom(beanType)) {
if (type.isInterface()) {
return Arrays.stream(GenericTypeUtils.resolveInterfaceTypeArguments(beanType, type))
.map(Argument::of)
.collect(Collectors.toList());
} else {
return Arrays.stream(GenericTypeUtils.resolveSuperTypeGenericArguments(beanType, type))
.map(Argument::of)
.collect(Collectors.toList());
}
} else {
return Collections.emptyList();
}
}
@Override
default boolean isContextScope() {
return getAnnotationMetadata().hasDeclaredAnnotation(Context.class);
}
@Override
default boolean isConfigurationProperties() {
return BeanDefinitionReference.super.isConfigurationProperties();
}
@Override
default BeanDefinition<T> load() {
return this;
}
@Override
default String getBeanDefinitionName() {
return DefaultRuntimeBeanDefinition.generateBeanName(getBeanType());
}
@Override
default BeanDefinition<T> load(BeanContext context) {
return this;
}
@Override
default boolean isPresent() {
return true;
}
@Override
default boolean isSingleton() {
return BeanDefinitionReference.super.isSingleton();
}
/**
* Creates a new effectively singleton bean definition that references the given bean.
*
* @param bean The bean
* @return The {@link BeanDefinitionReference}
* @param <B> The bean type
* @since 3.6.0
*/
@NonNull
static <B> RuntimeBeanDefinition<B> of(@NonNull B bean) {
Objects.requireNonNull(bean, "Bean cannot be null");
@SuppressWarnings("unchecked") Class<B> t = (Class<B>) bean.getClass();
return builder(t, () -> bean).singleton(true).build();
}
/**
* Creates a new bean definition that will resolve the bean from the given supplier.
*
* <p>The bean is by default not singleton and the supplier will be invoked for each injection point.</p>
* @param beanType The bean type
* @param beanSupplier The bean supplier
* @return The {@link BeanDefinitionReference}
* @param <B> The bean type
* @since 3.6.0
*/
@NonNull
static <B> RuntimeBeanDefinition<B> of(
@NonNull Class<B> beanType,
@NonNull Supplier<B> beanSupplier) {
return builder(beanType, beanSupplier).build();
}
/**
* A new builder for constructing and configuring runtime created beans.
* @param bean The bean to use
* @return The builder
* @param <B> The bean type
*/
@NonNull
static <B> Builder<B> builder(@NonNull B bean) {
Objects.requireNonNull(bean, "Bean cannot be null");
@SuppressWarnings("unchecked")
Argument<B> beanType = (Argument<B>) Argument.of(bean.getClass());
return new DefaultRuntimeBeanDefinition.RuntimeBeanBuilder<>(
beanType,
() -> bean
).singleton(true);
}
/**
* A new builder for constructing and configuring runtime created beans.
* @param beanType The bean type
* @param beanSupplier The bean supplier
* @return The builder
* @param <B> The bean type
*/
@NonNull
static <B> Builder<B> builder(@NonNull Class<B> beanType, @NonNull Supplier<B> beanSupplier) {
return new DefaultRuntimeBeanDefinition.RuntimeBeanBuilder<>(
Argument.of(beanType),
beanSupplier
);
}
/**
* A new builder for constructing and configuring runtime created beans.
* @param beanType The bean type
* @param beanSupplier The bean supplier
* @return The builder
* @param <B> The bean type
*/
@NonNull
static <B> Builder<B> builder(@NonNull Argument<B> beanType, @NonNull Supplier<B> beanSupplier) {
return new DefaultRuntimeBeanDefinition.RuntimeBeanBuilder<>(
beanType,
beanSupplier
);
}
/**
* A builder for constructing {@link RuntimeBeanDefinition} instances.
* @param <B> The bean type
*/
| RuntimeBeanDefinition |
java | alibaba__nacos | api/src/test/java/com/alibaba/nacos/api/remote/request/RequestMetaTest.java | {
"start": 1229,
"end": 5342
} | class ____ {
private RequestMeta requestMeta;
@BeforeEach
void setUp() {
requestMeta = new RequestMeta();
requestMeta.setClientIp("127.0.0.1");
requestMeta.setClientVersion("1.0.0");
requestMeta.setConnectionId("test-connection-id");
Map<String, String> labels = new HashMap<>();
labels.put("env", "dev");
requestMeta.setLabels(labels);
}
@Test
void testGetClientIp() {
assertEquals("127.0.0.1", requestMeta.getClientIp());
}
@Test
void testGetClientVersion() {
assertEquals("1.0.0", requestMeta.getClientVersion());
}
@Test
void testGetConnectionId() {
assertEquals("test-connection-id", requestMeta.getConnectionId());
}
@Test
void testGetLabels() {
Map<String, String> labels = requestMeta.getLabels();
assertNotNull(labels);
assertEquals(1, labels.size());
assertEquals("dev", labels.get("env"));
}
@Test
void testToString() {
String expected = "RequestMeta{connectionId='test-connection-id', clientIp='127.0.0.1', clientVersion='1.0.0', labels={env=dev}}";
assertEquals(expected, requestMeta.toString());
}
@Test
void testGetConnectionAbilityForNonExist() {
assertEquals(AbilityStatus.UNKNOWN, requestMeta.getConnectionAbility(AbilityKey.SERVER_FUZZY_WATCH));
requestMeta.setAbilityTable(Collections.emptyMap());
assertEquals(AbilityStatus.UNKNOWN, requestMeta.getConnectionAbility(AbilityKey.SERVER_FUZZY_WATCH));
}
@Test
void testGetConnectionAbilityForExist() {
requestMeta.setAbilityTable(Collections.singletonMap(AbilityKey.SERVER_FUZZY_WATCH.getName(), Boolean.FALSE));
assertEquals(AbilityStatus.NOT_SUPPORTED, requestMeta.getConnectionAbility(AbilityKey.SERVER_FUZZY_WATCH));
requestMeta.setAbilityTable(Collections.singletonMap(AbilityKey.SERVER_FUZZY_WATCH.getName(), Boolean.TRUE));
assertEquals(AbilityStatus.SUPPORTED, requestMeta.getConnectionAbility(AbilityKey.SERVER_FUZZY_WATCH));
}
@Test
void testExtractAppLabels() {
Map<String, String> labels = new HashMap<>(requestMeta.getLabels());
labels.put(Constants.APP_CONN_PREFIX + "testKey", "testValue");
labels.put(Constants.APP_CONN_PREFIX + "anotherKey", "anotherValue");
labels.put("no_app_prefix_key", "aaa");
requestMeta.setLabels(labels);
Map<String, String> appLabels = requestMeta.getAppLabels();
assertNotNull(appLabels);
assertEquals(5, appLabels.size()); // appname, client_version_key, client_ip + 2 custom keys
assertEquals("testValue", appLabels.get("testKey"));
assertEquals("anotherValue", appLabels.get("anotherKey"));
}
@Test
void testExtractAppLabelsEmptyAndBlankValues() {
Map<String, String> labels = new HashMap<>(requestMeta.getLabels());
// This should not be included - no value after prefix
labels.put(Constants.APP_CONN_PREFIX, "value");
// This should not be included - blank value
labels.put(Constants.APP_CONN_PREFIX + "blankValue", " ");
// This should not be included - empty value
labels.put(Constants.APP_CONN_PREFIX + "emptyValue", "");
// This should be included
labels.put(Constants.APP_CONN_PREFIX + "validKey", "validValue");
requestMeta.setLabels(labels);
Map<String, String> appLabels = requestMeta.getAppLabels();
assertNotNull(appLabels);
assertEquals(4, appLabels.size()); // appname, client_version_key, client_ip + 1 valid custom key
assertEquals("validValue", appLabels.get("validKey"));
assertTrue(appLabels.containsKey("validKey"));
// Keys with blank or empty values should not be included
assertFalse(appLabels.containsKey("blankValue"));
assertFalse(appLabels.containsKey("emptyValue"));
assertFalse(appLabels.containsKey("")); // key with just prefix
}
}
| RequestMetaTest |
java | google__dagger | javatests/dagger/internal/codegen/bindinggraphvalidation/NullableBindingValidationTest.java | {
"start": 1196,
"end": 1724
} | class ____ {
@Parameters(name = "{0}")
public static ImmutableList<Object[]> parameters() {
return CompilerMode.TEST_PARAMETERS;
}
private final CompilerMode compilerMode;
public NullableBindingValidationTest(CompilerMode compilerMode) {
this.compilerMode = compilerMode;
}
private static final Source NULLABLE =
CompilerTests.javaSource(
"test.Nullable", // force one-string-per-line format
"package test;",
"",
"public @ | NullableBindingValidationTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/script/field/vectors/BFloat16BinaryDenseVectorDocValuesField.java | {
"start": 797,
"end": 1360
} | class ____ extends BinaryDenseVectorDocValuesField {
public BFloat16BinaryDenseVectorDocValuesField(
BinaryDocValues input,
String name,
DenseVectorFieldMapper.ElementType elementType,
int dims,
IndexVersion indexVersion
) {
super(input, name, elementType, dims, indexVersion);
}
@Override
void decodeDenseVector(IndexVersion indexVersion, BytesRef vectorBR, float[] vector) {
VectorEncoderDecoder.decodeBFloat16DenseVector(vectorBR, vector);
}
}
| BFloat16BinaryDenseVectorDocValuesField |
java | apache__camel | components/camel-ai/camel-djl/src/main/java/org/apache/camel/component/djl/model/cv/ZooImageGenerationPredictor.java | {
"start": 1526,
"end": 3125
} | class ____ extends AbstractPredictor {
protected ZooModel<int[], Image[]> model;
public ZooImageGenerationPredictor(DJLEndpoint endpoint) throws ModelNotFoundException, MalformedModelException,
IOException {
super(endpoint);
Criteria.Builder<int[], Image[]> builder = Criteria.builder()
.optApplication(Application.CV.IMAGE_GENERATION)
.setTypes(int[].class, Image[].class)
.optArtifactId(endpoint.getArtifactId());
if (endpoint.isShowProgress()) {
builder.optProgress(new ProgressBar());
}
Criteria<int[], Image[]> criteria = builder.build();
this.model = ModelZoo.loadModel(criteria);
}
@Override
public void process(Exchange exchange) {
if (exchange.getIn().getBody() instanceof int[]) {
int[] seed = exchange.getIn().getBody(int[].class);
Image[] result = predict(exchange, seed);
exchange.getIn().setBody(result);
} else {
throw new RuntimeCamelException("Data type is not supported. Body should be int[]");
}
}
protected Image[] predict(Exchange exchange, int[] seed) {
exchange.getIn().setHeader(DJLConstants.INPUT, seed);
try (Predictor<int[], Image[]> predictor = model.newPredictor()) {
return predictor.predict(seed);
} catch (TranslateException e) {
throw new RuntimeCamelException("Could not process input or output", e);
}
}
}
| ZooImageGenerationPredictor |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotMetrics.java | {
"start": 1582,
"end": 7087
} | class ____ {
private static final long seed = 0;
private static final short REPLICATION = 3;
private static final String NN_METRICS = "NameNodeActivity";
private static final String NS_METRICS = "FSNamesystem";
private final Path dir = new Path("/TestSnapshot");
private final Path sub1 = new Path(dir, "sub1");
private final Path file1 = new Path(sub1, "file1");
private final Path file2 = new Path(sub1, "file2");
private Configuration conf;
private MiniDFSCluster cluster;
private DistributedFileSystem hdfs;
@BeforeEach
public void setUp() throws Exception {
conf = new Configuration();
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(REPLICATION)
.build();
cluster.waitActive();
hdfs = cluster.getFileSystem();
DFSTestUtil.createFile(hdfs, file1, 1024, REPLICATION, seed);
DFSTestUtil.createFile(hdfs, file2, 1024, REPLICATION, seed);
}
@AfterEach
public void tearDown() throws Exception {
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
}
/**
* Test the metric SnapshottableDirectories, AllowSnapshotOps,
* DisallowSnapshotOps, and listSnapshottableDirOps
*/
@Test
public void testSnapshottableDirs() throws Exception {
cluster.getNamesystem().getSnapshotManager().setAllowNestedSnapshots(true);
assertGauge("SnapshottableDirectories", 0, getMetrics(NS_METRICS));
assertCounter("AllowSnapshotOps", 0L, getMetrics(NN_METRICS));
assertCounter("DisallowSnapshotOps", 0L, getMetrics(NN_METRICS));
// Allow snapshots for directories, and check the metrics
hdfs.allowSnapshot(sub1);
assertGauge("SnapshottableDirectories", 1, getMetrics(NS_METRICS));
assertCounter("AllowSnapshotOps", 1L, getMetrics(NN_METRICS));
Path sub2 = new Path(dir, "sub2");
Path file = new Path(sub2, "file");
DFSTestUtil.createFile(hdfs, file, 1024, REPLICATION, seed);
hdfs.allowSnapshot(sub2);
assertGauge("SnapshottableDirectories", 2, getMetrics(NS_METRICS));
assertCounter("AllowSnapshotOps", 2L, getMetrics(NN_METRICS));
Path subsub1 = new Path(sub1, "sub1sub1");
Path subfile = new Path(subsub1, "file");
DFSTestUtil.createFile(hdfs, subfile, 1024, REPLICATION, seed);
hdfs.allowSnapshot(subsub1);
assertGauge("SnapshottableDirectories", 3, getMetrics(NS_METRICS));
assertCounter("AllowSnapshotOps", 3L, getMetrics(NN_METRICS));
// Set an already snapshottable directory to snapshottable, should not
// change the metrics
hdfs.allowSnapshot(sub1);
assertGauge("SnapshottableDirectories", 3, getMetrics(NS_METRICS));
// But the number of allowSnapshot operations still increases
assertCounter("AllowSnapshotOps", 4L, getMetrics(NN_METRICS));
// Disallow the snapshot for snapshottable directories, then check the
// metrics again
hdfs.disallowSnapshot(sub1);
assertGauge("SnapshottableDirectories", 2, getMetrics(NS_METRICS));
assertCounter("DisallowSnapshotOps", 1L, getMetrics(NN_METRICS));
// delete subsub1, snapshottable directories should be 1
hdfs.delete(subsub1, true);
assertGauge("SnapshottableDirectories", 1, getMetrics(NS_METRICS));
// list all the snapshottable directories
SnapshottableDirectoryStatus[] status = hdfs.getSnapshottableDirListing();
assertEquals(1, status.length);
assertCounter("ListSnapshottableDirOps", 1L, getMetrics(NN_METRICS));
}
/**
* Test the metrics Snapshots, CreateSnapshotOps, DeleteSnapshotOps,
* RenameSnapshotOps
*/
@Test
public void testSnapshots() throws Exception {
cluster.getNamesystem().getSnapshotManager().setAllowNestedSnapshots(true);
assertGauge("Snapshots", 0, getMetrics(NS_METRICS));
assertCounter("CreateSnapshotOps", 0L, getMetrics(NN_METRICS));
// Create a snapshot for a non-snapshottable directory, thus should not
// change the metrics
try {
hdfs.createSnapshot(sub1, "s1");
} catch (Exception e) {}
assertGauge("Snapshots", 0, getMetrics(NS_METRICS));
assertCounter("CreateSnapshotOps", 1L, getMetrics(NN_METRICS));
// Create snapshot for sub1
hdfs.allowSnapshot(sub1);
hdfs.createSnapshot(sub1, "s1");
assertGauge("Snapshots", 1, getMetrics(NS_METRICS));
assertCounter("CreateSnapshotOps", 2L, getMetrics(NN_METRICS));
hdfs.createSnapshot(sub1, "s2");
assertGauge("Snapshots", 2, getMetrics(NS_METRICS));
assertCounter("CreateSnapshotOps", 3L, getMetrics(NN_METRICS));
hdfs.getSnapshotDiffReport(sub1, "s1", "s2");
assertCounter("SnapshotDiffReportOps", 1L, getMetrics(NN_METRICS));
// Create snapshot for a directory under sub1
Path subsub1 = new Path(sub1, "sub1sub1");
Path subfile = new Path(subsub1, "file");
DFSTestUtil.createFile(hdfs, subfile, 1024, REPLICATION, seed);
hdfs.allowSnapshot(subsub1);
hdfs.createSnapshot(subsub1, "s11");
assertGauge("Snapshots", 3, getMetrics(NS_METRICS));
assertCounter("CreateSnapshotOps", 4L, getMetrics(NN_METRICS));
// delete snapshot
hdfs.deleteSnapshot(sub1, "s2");
assertGauge("Snapshots", 2, getMetrics(NS_METRICS));
assertCounter("DeleteSnapshotOps", 1L, getMetrics(NN_METRICS));
// rename snapshot
hdfs.renameSnapshot(sub1, "s1", "NewS1");
assertGauge("Snapshots", 2, getMetrics(NS_METRICS));
assertCounter("RenameSnapshotOps", 1L, getMetrics(NN_METRICS));
}
} | TestSnapshotMetrics |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java | {
"start": 2493,
"end": 4917
} | class ____ extends FileSystemContractBaseTest {
private static final Configuration conf = new Configuration();
private static final MiniDFSCluster cluster;
private String defaultWorkingDirectory;
private UserGroupInformation ugi;
static {
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
cluster.waitActive();
//change root permission to 777
cluster.getFileSystem().setPermission(
new Path("/"), new FsPermission((short)0777));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@BeforeEach
public void setUp() throws Exception {
//get file system as a non-superuser
final UserGroupInformation current = UserGroupInformation.getCurrentUser();
ugi = UserGroupInformation.createUserForTesting(
current.getShortUserName() + "x", new String[]{"user"});
fs = WebHdfsTestUtil.getWebHdfsFileSystemAs(ugi, conf, WebHdfsConstants.WEBHDFS_SCHEME);
defaultWorkingDirectory = fs.getWorkingDirectory().toUri().getPath();
}
@Override
protected String getDefaultWorkingDirectory() {
return defaultWorkingDirectory;
}
@Override
protected int getGlobalTimeout() {
return 60 * 1000;
}
/** HDFS throws AccessControlException
* when calling exist(..) on a path /foo/bar/file
* but /foo/bar is indeed a file in HDFS.
*/
@Test
public void testMkdirsFailsForSubdirectoryOfExistingFile() throws Exception {
Path testDir = path("/test/hadoop");
assertFalse(fs.exists(testDir));
assertTrue(fs.mkdirs(testDir));
assertTrue(fs.exists(testDir));
createFile(path("/test/hadoop/file"));
Path testSubDir = path("/test/hadoop/file/subdir");
try {
fs.mkdirs(testSubDir);
fail("Should throw IOException.");
} catch (IOException e) {
// expected
}
try {
assertFalse(fs.exists(testSubDir));
} catch(AccessControlException e) {
// also okay for HDFS.
}
Path testDeepSubDir = path("/test/hadoop/file/deep/sub/dir");
try {
fs.mkdirs(testDeepSubDir);
fail("Should throw IOException.");
} catch (IOException e) {
// expected
}
try {
assertFalse(fs.exists(testDeepSubDir));
} catch(AccessControlException e) {
// also okay for HDFS.
}
}
//the following are new tests (i.e. not over-riding the super | TestWebHdfsFileSystemContract |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/TaskState.java | {
"start": 5166,
"end": 6879
} | class ____ actually deprecated, just return the state size.
return getStateSize();
}
@Override
public long getStateSize() {
return streamSubtaskState().mapToLong(StateObject::getStateSize).sum();
}
@Override
public void collectSizeStats(StateObjectSizeStatsCollector collector) {
streamSubtaskState().forEach(state -> state.collectSizeStats(collector));
}
private Stream<SubtaskState> streamSubtaskState() {
return subtaskStates.values().stream().filter(Objects::nonNull);
}
@Override
public boolean equals(Object obj) {
if (obj instanceof TaskState) {
TaskState other = (TaskState) obj;
return jobVertexID.equals(other.jobVertexID)
&& parallelism == other.parallelism
&& subtaskStates.equals(other.subtaskStates);
} else {
return false;
}
}
@Override
public int hashCode() {
return parallelism + 31 * Objects.hash(jobVertexID, subtaskStates);
}
public Map<Integer, SubtaskState> getSubtaskStates() {
return Collections.unmodifiableMap(subtaskStates);
}
@Override
public String toString() {
// KvStates are always null in 1.1. Don't print this as it might
// confuse users that don't care about how we store it internally.
return "TaskState("
+ "jobVertexID: "
+ jobVertexID
+ ", parallelism: "
+ parallelism
+ ", sub task states: "
+ subtaskStates.size()
+ ", total size (bytes): "
+ getStateSize()
+ ')';
}
}
| is |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/AbstractItemSetMapReducer.java | {
"start": 1228,
"end": 1708
} | interface ____ makes it easier to implement aggregations in map-reduce style.
*
* The main concepts of a map-reduce framework:
*
* - mapper: takes single key/value pairs and consumes them
* - reducer: takes the output individual consumed outputs and creates the final result
* - combiner: a "local" reducer that takes an incomplete set of consumed outputs, usually
* used as optimization/compression before sending data over the wire.
*
* Apart from that the | that |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/placement/UserPlacementRule.java | {
"start": 1697,
"end": 2892
} | class ____ extends FSPlacementRule {
private static final Logger LOG =
LoggerFactory.getLogger(UserPlacementRule.class);
@Override
public ApplicationPlacementContext getPlacementForApp(
ApplicationSubmissionContext asc, String user) throws YarnException {
String queueName;
String cleanUser = cleanName(user);
PlacementRule parentRule = getParentRule();
if (parentRule != null) {
LOG.debug("User rule: parent rule found: {}", parentRule.getName());
ApplicationPlacementContext parent =
parentRule.getPlacementForApp(asc, user);
if (parent == null || getQueueManager().
getQueue(parent.getQueue()) instanceof FSLeafQueue) {
LOG.debug("User rule: parent rule failed");
return null;
}
LOG.debug("User rule: parent rule result: {}", parent.getQueue());
queueName = parent.getQueue() + DOT + cleanUser;
} else {
queueName = assureRoot(cleanUser);
}
// If we can create the queue in the rule or the queue exists return it
if (createQueue || configuredQueue(queueName)) {
return new ApplicationPlacementContext(queueName);
}
return null;
}
}
| UserPlacementRule |
java | apache__logging-log4j2 | log4j-iostreams/src/test/java/org/apache/logging/log4j/io/LoggerBufferedInputStreamTest.java | {
"start": 916,
"end": 1260
} | class ____ extends LoggerInputStreamTest {
@Override
protected InputStream createInputStream() {
return IoBuilder.forLogger(getExtendedLogger())
.filter(this.wrapped)
.setLevel(Level.ERROR)
.setBuffered(true)
.buildInputStream();
}
}
| LoggerBufferedInputStreamTest |
java | google__auto | common/src/test/java/com/google/auto/common/AnnotationMirrorsTest.java | {
"start": 5740,
"end": 5790
} | class ____ {}
@Stringy("foo")
static | StringyUnset |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_1300/Issue1367.java | {
"start": 4377,
"end": 4478
} | class ____<ID extends Serializable> {
public abstract ID getId();
}
static | GenericEntity |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/condition/ConditionalOnNotWebApplicationTests.java | {
"start": 1339,
"end": 2177
} | class ____ {
@Test
void testNotWebApplicationWithServletContext() {
new WebApplicationContextRunner().withUserConfiguration(NotWebApplicationConfiguration.class)
.run((context) -> assertThat(context).doesNotHaveBean(String.class));
}
@Test
void testNotWebApplicationWithReactiveContext() {
new ReactiveWebApplicationContextRunner().withUserConfiguration(NotWebApplicationConfiguration.class)
.run((context) -> assertThat(context).doesNotHaveBean(String.class));
}
@Test
void testNotWebApplication() {
new ApplicationContextRunner().withUserConfiguration(NotWebApplicationConfiguration.class)
.run((context) -> assertThat(context).getBeans(String.class).containsExactly(entry("none", "none")));
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnNotWebApplication
static | ConditionalOnNotWebApplicationTests |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/create/MySqlCreateSequenceTest2.java | {
"start": 913,
"end": 3913
} | class ____ extends OracleTest {
public void test_0() throws Exception {
String sql = //
"CREATE GROUP SEQUENCE seq1;";
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, DbType.mysql);
SQLStatement stmt = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
SchemaStatVisitor visitor = SQLUtils.createSchemaStatVisitor(DbType.mysql);
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(0, visitor.getTables().size());
// assertTrue(visitor.getTables().containsKey(new TableStat.Name("cdc.en_complaint_ipr_stat_fdt0")));
assertEquals(0, visitor.getColumns().size());
assertEquals("CREATE GROUP SEQUENCE seq1;", stmt.toString());
assertEquals("create group sequence seq1;", stmt.toLowerCaseString());
// assertTrue(visitor.getColumns().contains(new TableStat.Column("pivot_table", "*")));
// assertTrue(visitor.getColumns().contains(new TableStat.Column("pivot_table", "YEAR")));
// assertTrue(visitor.getColumns().contains(new TableStat.Column("pivot_table", "order_mode")));
}
public void test_1() throws Exception {
String sql = //
"CREATE SIMPLE SEQUENCE seq1;";
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, DbType.mysql);
SQLStatement stmt = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
SchemaStatVisitor visitor = SQLUtils.createSchemaStatVisitor(DbType.mysql);
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(0, visitor.getTables().size());
// assertTrue(visitor.getTables().containsKey(new TableStat.Name("cdc.en_complaint_ipr_stat_fdt0")));
assertEquals(0, visitor.getColumns().size());
assertEquals("CREATE SIMPLE SEQUENCE seq1;", stmt.toString());
assertEquals("create simple sequence seq1;", stmt.toLowerCaseString());
// assertTrue(visitor.getColumns().contains(new TableStat.Column("pivot_table", "*")));
// assertTrue(visitor.getColumns().contains(new TableStat.Column("pivot_table", "YEAR")));
// assertTrue(visitor.getColumns().contains(new TableStat.Column("pivot_table", "order_mode")));
}
}
| MySqlCreateSequenceTest2 |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/collection/spi/MapSemantics.java | {
"start": 354,
"end": 614
} | interface ____<MKV extends Map<K,V>,K,V> extends CollectionSemantics<MKV,V> {
Iterator<K> getKeyIterator(MKV rawMap);
void visitKeys(MKV rawMap, Consumer<? super K> action);
void visitEntries(MKV rawMap, BiConsumer<? super K,? super V> action);
}
| MapSemantics |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/spi/WrapAwareProcessor.java | {
"start": 957,
"end": 1078
} | interface ____ {
/**
* Gets the wrapped {@link Processor}
*/
Processor getWrapped();
}
| WrapAwareProcessor |
java | alibaba__nacos | core/src/main/java/com/alibaba/nacos/core/cluster/lookup/LookupFactory.java | {
"start": 4956,
"end": 6108
} | enum ____ {
/**
* File addressing mode.
*/
FILE_CONFIG(1, "file"),
/**
* Address server addressing mode.
*/
ADDRESS_SERVER(2, "address-server");
private final int code;
private final String name;
LookupType(int code, String name) {
this.code = code;
this.name = name;
}
/**
* find one {@link LookupType} by name, if not found, return null.
*
* @param name name
* @return {@link LookupType}
*/
public static LookupType sourceOf(String name) {
for (LookupType type : values()) {
if (Objects.equals(type.name, name)) {
return type;
}
}
return null;
}
public int getCode() {
return code;
}
public String getName() {
return name;
}
@Override
public String toString() {
return name;
}
}
}
| LookupType |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/abstract_/AbstractAssert_describedAs_printed_to_console_Test.java | {
"start": 1351,
"end": 5715
} | class ____ {
private static ByteArrayOutputStream systemOutContent;
private final static PrintStream originalSystemOut = System.out;
private final static boolean originalIsPrintAssertionsDescriptionEnabled = assertJConfig().printAssertionsDescription();
private final static Consumer<Description> originalDescriptionConsumer = assertJConfig().descriptionConsumer();
private static Configuration assertJConfig() {
return ConfigurationProvider.CONFIGURATION_PROVIDER.configuration();
}
@BeforeEach
void beforeAllTests() {
Assertions.setPrintAssertionsDescription(true);
systemOutContent = new ByteArrayOutputStream();
System.setOut(new PrintStream(systemOutContent));
}
@AfterEach
void restoreOriginalState() {
System.setOut(originalSystemOut);
Assertions.setPrintAssertionsDescription(originalIsPrintAssertionsDescriptionEnabled);
Assertions.setDescriptionConsumer(originalDescriptionConsumer);
}
@Test
void should_print_successful_assertions_description_to_console_with_new_line() {
// GIVEN
String description = RandomStringUtils.secure().nextAlphanumeric(20);
// WHEN
assertThat("abc").as(description + "1")
.startsWith("a")
.as(description + "2")
.contains("b")
.as(" ")
.endsWith("c");
// THEN
then(systemOutContent).hasToString("%s%n%s%n %n".formatted(description + "1", description + "2"));
}
@Test
void should_print_successful_assertions_description_to_console_with_new_line_until_first_failed_assertion_included() {
// GIVEN
String description = RandomStringUtils.secure().nextAlphanumeric(20);
// WHEN
catchThrowable(() -> assertThat("abc").as(description + "1")
.startsWith("a")
.as(description + "2")
.startsWith("b")
.as("not printed as previous assertion failed")
.endsWith("a"));
// THEN
then(systemOutContent).hasToString("%s%n%s%n".formatted(description + "1", description + "2"));
}
@Test
void should_print_all_soft_assertions_failed_or_successful() {
// GIVEN
String description = RandomStringUtils.secure().nextAlphanumeric(20);
SoftAssertions softly = new SoftAssertions();
// WHEN
softly.assertThat("abc").as("1" + description)
.startsWith("a")
.as("2" + description)
.startsWith("b")
.as("") // description not printed as it is empty
.startsWith("c")
.as("3" + description)
.endsWith("a");
// THEN
then(systemOutContent).hasToString("%s%n%s%n%s%n".formatted("1" + description, "2" + description, "3" + description));
// we don't care about the assertions result, we just want to check the description
}
@Test
void should_be_printed_and_consumed_by_configured_description_consumer() {
final StringBuffer consumedDescription = new StringBuffer("");
Assertions.setDescriptionConsumer(description -> consumedDescription.append(description.toString()));
String description = RandomStringUtils.secure().nextAlphanumeric(20);
// WHEN
assertThat("abc").as("1" + description)
.startsWith("a")
.as("2" + description)
.contains("b")
.as(" ")
.endsWith("c");
// THEN
then(consumedDescription).hasToString("1" + description + "2" + description + " ");
then(systemOutContent).hasToString("%s%n%s%n %n".formatted("1" + description, "2" + description));
}
@Test
void should_not_print_assertions_description_to_console_by_default() {
// GIVEN
Assertions.setPrintAssertionsDescription(originalIsPrintAssertionsDescriptionEnabled);
String description = RandomStringUtils.secure().nextAlphanumeric(20);
// WHEN
assertThat("abc").as(description + "1")
.startsWith("a")
.as(description + "2")
.contains("b")
.as(" ")
.endsWith("c");
// THEN
then(systemOutContent.toString()).isEmpty();
}
}
| AbstractAssert_describedAs_printed_to_console_Test |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/orm/hibernate/HibernateSessionFlushingTests.java | {
"start": 1971,
"end": 5028
} | class ____ {
private static final String SAM = "Sam";
private static final String JUERGEN = "Juergen";
JdbcTemplate jdbcTemplate;
@Autowired
SessionFactory sessionFactory;
@Autowired
PersonService personService;
@Autowired
void setDataSource(DataSource dataSource) {
this.jdbcTemplate = new JdbcTemplate(dataSource);
}
@BeforeEach
void setup() {
assertThatTransaction().isActive();
assertThat(personService).as("PersonService should have been autowired.").isNotNull();
assertThat(sessionFactory).as("SessionFactory should have been autowired.").isNotNull();
}
@Test
void findSam() {
Person sam = personService.findByName(SAM);
assertThat(sam).as("Should be able to find Sam").isNotNull();
DriversLicense driversLicense = sam.getDriversLicense();
assertThat(driversLicense).as("Sam's driver's license should not be null").isNotNull();
assertThat(driversLicense.getNumber()).as("Verifying Sam's driver's license number").isEqualTo(Long.valueOf(1234));
}
@Test // SPR-16956
@Transactional(readOnly = true)
void findSamWithReadOnlySession() {
Person sam = personService.findByName(SAM);
sam.setName("Vlad");
// By setting setDefaultReadOnly(true), the user can no longer modify any entity...
Session session = sessionFactory.getCurrentSession();
session.flush();
session.refresh(sam);
assertThat(sam.getName()).isEqualTo("Sam");
}
@Test
void saveJuergenWithDriversLicense() {
DriversLicense driversLicense = new DriversLicense(2L, 2222L);
Person juergen = new Person(JUERGEN, driversLicense);
int numRows = countRowsInTable("person");
personService.save(juergen);
assertThat(countRowsInTable("person")).as("Verifying number of rows in the 'person' table.").isEqualTo((numRows + 1));
assertThat(personService.findByName(JUERGEN)).as("Should be able to save and retrieve Juergen").isNotNull();
assertThat(juergen.getId()).as("Juergen's ID should have been set").isNotNull();
}
@Test
void saveJuergenWithNullDriversLicense() {
assertThatExceptionOfType(ConstraintViolationException.class)
.isThrownBy(() -> personService.save(new Person(JUERGEN)));
}
@Test
// no expected exception!
void updateSamWithNullDriversLicenseWithoutSessionFlush() {
updateSamWithNullDriversLicense();
// False positive, since an exception will be thrown once the session is
// finally flushed (i.e., in production code)
}
@Test
void updateSamWithNullDriversLicenseWithSessionFlush() {
updateSamWithNullDriversLicense();
// Manual flush is required to avoid false positive in test
assertThatExceptionOfType(ConstraintViolationException.class).isThrownBy(sessionFactory.getCurrentSession()::flush);
}
private void updateSamWithNullDriversLicense() {
Person sam = personService.findByName(SAM);
assertThat(sam).as("Should be able to find Sam").isNotNull();
sam.setDriversLicense(null);
personService.save(sam);
}
private int countRowsInTable(String tableName) {
return JdbcTestUtils.countRowsInTable(this.jdbcTemplate, tableName);
}
}
| HibernateSessionFlushingTests |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/load/engine/DecodeJob.java | {
"start": 23834,
"end": 24353
} | class ____<Z> implements DecodePath.DecodeCallback<Z> {
private final DataSource dataSource;
@Synthetic
DecodeCallback(DataSource dataSource) {
this.dataSource = dataSource;
}
@NonNull
@Override
public Resource<Z> onResourceDecoded(@NonNull Resource<Z> decoded) {
return DecodeJob.this.onResourceDecoded(dataSource, decoded);
}
}
/**
* Responsible for indicating when it is safe for the job to be cleared and returned to the pool.
*/
private static | DecodeCallback |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/collections/custom/parameterized/DefaultableListImpl.java | {
"start": 323,
"end": 747
} | class ____ extends ArrayList implements DefaultableList {
private String defaultValue;
public DefaultableListImpl() {
}
public DefaultableListImpl(int anticipatedSize) {
super( anticipatedSize + ( int ) Math.ceil( anticipatedSize * .75f ) );
}
public String getDefaultValue() {
return defaultValue;
}
public void setDefaultValue(String defaultValue) {
this.defaultValue = defaultValue;
}
}
| DefaultableListImpl |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/asm/ClassVisitor.java | {
"start": 7779,
"end": 7936
} | class ____ the class (see {@link
* Type#getInternalName()}).
* @param name the name of the method that contains the class, or {@literal null} if the | of |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/DebugConfig.java | {
"start": 567,
"end": 1502
} | interface ____ {
/**
* If set to true, writes a list of all reflective classes to META-INF
*/
@WithDefault("false")
boolean reflection();
/**
* If set to a directory, all generated classes will be written into that directory
*/
Optional<String> generatedClassesDir();
/**
* If set to a directory, all transformed classes (e.g. Panache entities) will be written into that directory
*/
Optional<String> transformedClassesDir();
/**
* If set to a directory, ZIG files for generated code will be written into that directory.
* <p>
* A ZIG file is a textual representation of the generated code that is referenced in the stacktraces.
*/
Optional<String> generatedSourcesDir();
/**
* If set to true then dump the build metrics to a JSON file in the build directory.
*/
@WithDefault("false")
boolean dumpBuildMetrics();
}
| DebugConfig |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsTests.java | {
"start": 532,
"end": 789
} | class ____ extends AbstractNumericMetricTestCase<StatsAggregationBuilder> {
@Override
protected StatsAggregationBuilder doCreateTestAggregatorFactory() {
return new StatsAggregationBuilder(randomAlphaOfLengthBetween(3, 10));
}
}
| StatsTests |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/ConfigurationClassPostProcessorTests.java | {
"start": 78108,
"end": 78185
} | class ____ {
abstract DependingFoo createFoo(BarArgument bar);
}
| FooFactory |
java | apache__camel | components/camel-activemq/src/generated/java/org/apache/camel/component/activemq/ActiveMQEndpointConfigurer.java | {
"start": 727,
"end": 2459
} | class ____ extends JmsEndpointConfigurer implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
ActiveMQEndpoint target = (ActiveMQEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "destinationoptions":
case "destinationOptions": target.setDestinationOptions(property(camelContext, java.util.Map.class, value)); return true;
default: return super.configure(camelContext, obj, name, value, ignoreCase);
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "destinationoptions":
case "destinationOptions": return java.util.Map.class;
default: return super.getOptionType(name, ignoreCase);
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
ActiveMQEndpoint target = (ActiveMQEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "destinationoptions":
case "destinationOptions": return target.getDestinationOptions();
default: return super.getOptionValue(obj, name, ignoreCase);
}
}
@Override
public Object getCollectionValueType(Object target, String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "destinationoptions":
case "destinationOptions": return java.lang.String.class;
default: return super.getCollectionValueType(target, name, ignoreCase);
}
}
}
| ActiveMQEndpointConfigurer |
java | apache__camel | core/camel-support/src/main/java/org/apache/camel/support/builder/ExpressionBuilder.java | {
"start": 7618,
"end": 12579
} | class ____
* @return an expression object which will return the header value
*/
public static Expression headerExpression(final Expression headerName, final Expression typeName) {
return new ExpressionAdapter() {
private ClassResolver classResolver;
@Override
public Object evaluate(Exchange exchange) {
Class<?> type;
try {
String text = typeName.evaluate(exchange, String.class);
type = classResolver.resolveMandatoryClass(text);
} catch (ClassNotFoundException e) {
throw CamelExecutionException.wrapCamelExecutionException(exchange, e);
}
String text = headerName.evaluate(exchange, String.class);
Object header = exchange.getIn().getHeader(text, type);
if (header == null) {
// fall back on a property
header = exchange.getProperty(text, type);
}
return header;
}
@Override
public void init(CamelContext context) {
super.init(context);
headerName.init(context);
typeName.init(context);
classResolver = context.getClassResolver();
}
@Override
public String toString() {
return "headerAs(" + headerName + ", " + typeName + ")";
}
};
}
/**
* Returns an expression for the variable with the given name
*
* @param variableName the name of the variable the expression will return
* @return an expression object which will return the variable value
*/
public static Expression variableExpression(final String variableName) {
return variableExpression(simpleExpression(variableName));
}
/**
* Returns an expression for the variable with the given name
*
* @param variableName the name of the variable the expression will return
* @param mandatory whether the variable is mandatory and if not present an exception is thrown
* @return an expression object which will return the variable value
*/
public static Expression variableExpression(final String variableName, boolean mandatory) {
return variableExpression(simpleExpression(variableName), mandatory);
}
/**
* Returns an expression for the variable with the given name
*
* @param variableName the name of the variable the expression will return
* @return an expression object which will return the variable value
*/
public static Expression variableExpression(final Expression variableName) {
return variableExpression(variableName, false);
}
/**
* Returns an expression for the variable with the given name
*
* @param variableName the name of the variable the expression will return
* @param mandatory whether the variable is mandatory and if not present an exception is thrown
* @return an expression object which will return the variable value
*/
public static Expression variableExpression(final Expression variableName, final boolean mandatory) {
return new ExpressionAdapter() {
@Override
public Object evaluate(Exchange exchange) {
String key = variableName.evaluate(exchange, String.class);
Object answer = ExchangeHelper.getVariable(exchange, key);
if (mandatory && answer == null) {
throw RuntimeCamelException.wrapRuntimeCamelException(new NoSuchVariableException(exchange, key));
}
return answer;
}
@Override
public void init(CamelContext context) {
super.init(context);
variableName.init(context);
}
@Override
public String toString() {
return "variable(" + variableName + ")";
}
};
}
/**
* Returns an expression for the variable with the given name converted to the given type
*
* @param variableName the name of the variable the expression will return
* @param type the type to convert to
* @return an expression object which will return the variable value
*/
public static <T> Expression variableExpression(final String variableName, final Class<T> type) {
return variableExpression(simpleExpression(variableName), constantExpression(type.getName()));
}
/**
* Returns an expression for the variable with the given name converted to the given type
*
* @param variableName the name of the variable the expression will return
* @param typeName the type to convert to as a FQN | name |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/ClassificationFeatureImportance.java | {
"start": 3121,
"end": 4423
} | class ____ here
return Math.abs(classImportance.get(0).getImportance());
}
return classImportance.stream().mapToDouble(ClassImportance::getImportance).map(Math::abs).sum();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(featureName);
out.writeCollection(classImportance);
}
@Override
public Map<String, Object> toMap() {
Map<String, Object> map = new LinkedHashMap<>();
map.put(FEATURE_NAME, featureName);
if (classImportance.isEmpty() == false) {
map.put(CLASSES, classImportance.stream().map(ClassImportance::toMap).collect(Collectors.toList()));
}
return map;
}
@Override
public boolean equals(Object object) {
if (object == this) {
return true;
}
if (object == null || getClass() != object.getClass()) {
return false;
}
ClassificationFeatureImportance that = (ClassificationFeatureImportance) object;
return Objects.equals(featureName, that.featureName) && Objects.equals(classImportance, that.classImportance);
}
@Override
public int hashCode() {
return Objects.hash(featureName, classImportance);
}
public static | importance |
java | google__guava | android/guava-tests/test/com/google/common/reflect/MutableTypeToInstanceMapTest.java | {
"start": 1544,
"end": 8256
} | class ____ extends TestCase {
@AndroidIncompatible // problem with suite builders on Android
public static Test suite() {
TestSuite suite = new TestSuite();
suite.addTestSuite(MutableTypeToInstanceMapTest.class);
suite.addTest(
MapTestSuiteBuilder.using(
new TestTypeToInstanceMapGenerator() {
// Other tests will verify what real, warning-free usage looks like
// but here we have to do some serious fudging
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public Map<TypeToken, Object> create(Object... elements) {
MutableTypeToInstanceMap<Object> map = new MutableTypeToInstanceMap<>();
for (Object object : elements) {
Entry<TypeToken, Object> entry = (Entry<TypeToken, Object>) object;
map.putInstance(entry.getKey(), entry.getValue());
}
return (Map) map;
}
})
.named("MutableTypeToInstanceMap")
.withFeatures(
MapFeature.SUPPORTS_REMOVE,
MapFeature.RESTRICTS_KEYS,
MapFeature.ALLOWS_NULL_VALUES,
CollectionFeature.SUPPORTS_ITERATOR_REMOVE,
CollectionSize.ANY,
MapFeature.ALLOWS_ANY_NULL_QUERIES)
.createTestSuite());
return suite;
}
private TypeToInstanceMap<Object> map;
@Override
protected void setUp() throws Exception {
map = new MutableTypeToInstanceMap<>();
}
public void testPutThrows() {
assertThrows(
UnsupportedOperationException.class,
() -> map.put(TypeToken.of(Integer.class), Integer.valueOf(5)));
}
public void testPutAllThrows() {
assertThrows(
UnsupportedOperationException.class,
() -> map.putAll(ImmutableMap.of(TypeToken.of(Integer.class), Integer.valueOf(5))));
}
public void testEntrySetMutationThrows() {
map.putInstance(String.class, "test");
assertEquals(TypeToken.of(String.class), map.entrySet().iterator().next().getKey());
assertEquals("test", map.entrySet().iterator().next().getValue());
assertThrows(
UnsupportedOperationException.class, () -> map.entrySet().iterator().next().setValue(1));
}
public void testEntrySetToArrayMutationThrows() {
map.putInstance(String.class, "test");
@SuppressWarnings("unchecked") // Should get a CCE later if cast is wrong
Entry<?, Object> entry = (Entry<?, Object>) map.entrySet().toArray()[0];
assertEquals(TypeToken.of(String.class), entry.getKey());
assertEquals("test", entry.getValue());
assertThrows(UnsupportedOperationException.class, () -> entry.setValue(1));
}
public void testEntrySetToTypedArrayMutationThrows() {
map.putInstance(String.class, "test");
@SuppressWarnings("unchecked") // Should get a CCE later if cast is wrong
Entry<?, Object> entry = (Entry<?, Object>) map.entrySet().toArray(new Entry<?, ?>[0])[0];
assertEquals(TypeToken.of(String.class), entry.getKey());
assertEquals("test", entry.getValue());
assertThrows(UnsupportedOperationException.class, () -> entry.setValue(1));
}
public void testPutAndGetInstance() {
assertThat(map.putInstance(Integer.class, Integer.valueOf(5))).isNull();
Integer oldValue = map.putInstance(Integer.class, Integer.valueOf(7));
assertEquals(5, (int) oldValue);
Integer newValue = map.getInstance(Integer.class);
assertEquals(7, (int) newValue);
assertEquals(7, (int) map.getInstance(TypeToken.of(Integer.class)));
// Won't compile: map.putInstance(Double.class, new Long(42));
}
public void testNull() {
assertThrows(
NullPointerException.class,
() -> map.putInstance((TypeToken<Integer>) null, Integer.valueOf(1)));
map.putInstance(Integer.class, null);
assertTrue(map.containsKey(TypeToken.of(Integer.class)));
assertTrue(map.entrySet().contains(immutableEntry(TypeToken.of(Integer.class), null)));
assertThat(map.get(TypeToken.of(Integer.class))).isNull();
assertThat(map.getInstance(Integer.class)).isNull();
map.putInstance(Long.class, null);
assertTrue(map.containsKey(TypeToken.of(Long.class)));
assertTrue(map.entrySet().contains(immutableEntry(TypeToken.of(Long.class), null)));
assertThat(map.get(TypeToken.of(Long.class))).isNull();
assertThat(map.getInstance(Long.class)).isNull();
}
public void testPrimitiveAndWrapper() {
assertThat(map.getInstance(int.class)).isNull();
assertThat(map.getInstance(Integer.class)).isNull();
assertThat(map.putInstance(int.class, 0)).isNull();
assertThat(map.putInstance(Integer.class, 1)).isNull();
assertEquals(2, map.size());
assertEquals(0, (int) map.getInstance(int.class));
assertEquals(1, (int) map.getInstance(Integer.class));
assertEquals(0, (int) map.putInstance(int.class, null));
assertEquals(1, (int) map.putInstance(Integer.class, null));
assertThat(map.getInstance(int.class)).isNull();
assertThat(map.getInstance(Integer.class)).isNull();
assertEquals(2, map.size());
}
public void testParameterizedType() {
TypeToken<ImmutableList<Integer>> type = new TypeToken<ImmutableList<Integer>>() {};
map.putInstance(type, ImmutableList.of(1));
assertEquals(1, map.size());
assertEquals(ImmutableList.of(1), map.getInstance(type));
}
public void testGenericArrayType() {
@SuppressWarnings("unchecked") // Trying to test generic array
ImmutableList<Integer>[] array =
(ImmutableList<Integer>[]) new ImmutableList<?>[] {ImmutableList.of(1)};
TypeToken<ImmutableList<Integer>[]> type = new TypeToken<ImmutableList<Integer>[]>() {};
map.putInstance(type, array);
assertEquals(1, map.size());
assertThat(map.getInstance(type)).asList().containsExactly(array[0]);
}
public void testWildcardType() {
TypeToken<ImmutableList<?>> type = new TypeToken<ImmutableList<?>>() {};
map.putInstance(type, ImmutableList.of(1));
assertEquals(1, map.size());
assertEquals(ImmutableList.of(1), map.getInstance(type));
}
public void testGetInstance_withTypeVariable() {
assertThrows(
IllegalArgumentException.class, () -> map.getInstance(this.<Number>anyIterableType()));
}
public void testPutInstance_withTypeVariable() {
assertThrows(
IllegalArgumentException.class,
() -> map.putInstance(this.<Integer>anyIterableType(), ImmutableList.of(1)));
}
private <T> TypeToken<Iterable<T>> anyIterableType() {
return new TypeToken<Iterable<T>>() {};
}
}
| MutableTypeToInstanceMapTest |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java | {
"start": 1509,
"end": 3773
} | class ____ extends TransportMasterNodeAction<PutDatafeedAction.Request, PutDatafeedAction.Response> {
private final XPackLicenseState licenseState;
private final SecurityContext securityContext;
private final DatafeedManager datafeedManager;
private final ProjectResolver projectResolver;
@Inject
public TransportPutDatafeedAction(
Settings settings,
TransportService transportService,
ClusterService clusterService,
ThreadPool threadPool,
XPackLicenseState licenseState,
ActionFilters actionFilters,
DatafeedManager datafeedManager,
ProjectResolver projectResolver
) {
super(
PutDatafeedAction.NAME,
transportService,
clusterService,
threadPool,
actionFilters,
PutDatafeedAction.Request::new,
PutDatafeedAction.Response::new,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
this.licenseState = licenseState;
this.securityContext = XPackSettings.SECURITY_ENABLED.get(settings)
? new SecurityContext(settings, threadPool.getThreadContext())
: null;
this.datafeedManager = datafeedManager;
this.projectResolver = projectResolver;
}
@Override
protected void masterOperation(
Task task,
PutDatafeedAction.Request request,
ClusterState state,
ActionListener<PutDatafeedAction.Response> listener
) {
datafeedManager.putDatafeed(request, state, securityContext, threadPool, listener);
}
@Override
protected ClusterBlockException checkBlock(PutDatafeedAction.Request request, ClusterState state) {
return state.blocks().globalBlockedException(projectResolver.getProjectId(), ClusterBlockLevel.METADATA_WRITE);
}
@Override
protected void doExecute(Task task, PutDatafeedAction.Request request, ActionListener<PutDatafeedAction.Response> listener) {
if (MachineLearningField.ML_API_FEATURE.check(licenseState)) {
super.doExecute(task, request, listener);
} else {
listener.onFailure(LicenseUtils.newComplianceException(XPackField.MACHINE_LEARNING));
}
}
}
| TransportPutDatafeedAction |
java | spring-projects__spring-boot | module/spring-boot-micrometer-tracing-opentelemetry/src/main/java/org/springframework/boot/micrometer/tracing/opentelemetry/autoconfigure/SdkTracerProviderBuilderCustomizer.java | {
"start": 838,
"end": 1063
} | interface ____ can be used to customize the {@link SdkTracerProviderBuilder}
* that is used to create the auto-configured {@link SdkTracerProvider}.
*
* @author Yanming Zhou
* @since 4.0.0
*/
@FunctionalInterface
public | that |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/inheritance/single/ChildAuditing.java | {
"start": 701,
"end": 2458
} | class ____ {
private Integer id1;
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) {
// Rev 1
scope.inTransaction( em -> {
ChildEntity ce = new ChildEntity( "x", 1l );
em.persist( ce );
id1 = ce.getId();
} );
// Rev 2
scope.inTransaction( em -> {
ChildEntity ce = em.find( ChildEntity.class, id1 );
ce.setData( "y" );
ce.setNumVal( 2l );
} );
}
@Test
public void testRevisionsCounts(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
assertEquals( Arrays.asList( 1, 2 ), auditReader.getRevisions( ChildEntity.class, id1 ) );
} );
}
@Test
public void testHistoryOfChildId1(EntityManagerFactoryScope scope) {
ChildEntity ver1 = new ChildEntity( id1, "x", 1l );
ChildEntity ver2 = new ChildEntity( id1, "y", 2l );
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
assertEquals( ver1, auditReader.find( ChildEntity.class, id1, 1 ) );
assertEquals( ver2, auditReader.find( ChildEntity.class, id1, 2 ) );
assertEquals( ver1, auditReader.find( ParentEntity.class, id1, 1 ) );
assertEquals( ver2, auditReader.find( ParentEntity.class, id1, 2 ) );
} );
}
@Test
public void testPolymorphicQuery(EntityManagerFactoryScope scope) {
ChildEntity childVer1 = new ChildEntity( id1, "x", 1l );
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
assertEquals( childVer1,
auditReader.createQuery().forEntitiesAtRevision( ChildEntity.class, 1 ).getSingleResult() );
assertEquals( childVer1,
auditReader.createQuery().forEntitiesAtRevision( ParentEntity.class, 1 ).getSingleResult() );
} );
}
}
| ChildAuditing |
java | quarkusio__quarkus | integration-tests/resteasy-mutiny/src/main/java/io/quarkus/it/resteasy/mutiny/MyRestService.java | {
"start": 268,
"end": 400
} | interface ____ {
@GET
@Path("/hello")
Uni<String> hello();
@GET
@Path("/pet")
Uni<Pet> pet();
}
| MyRestService |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/ExceptionHandlerExceptionResolver.java | {
"start": 21177,
"end": 22201
} | class ____ of interface-based proxy.
if (Proxy.isProxyClass(handlerType)) {
handlerType = AopUtils.getTargetClass(handlerMethod.getBean());
}
}
for (Map.Entry<ControllerAdviceBean, ExceptionHandlerMethodResolver> entry : this.exceptionHandlerAdviceCache.entrySet()) {
ControllerAdviceBean advice = entry.getKey();
if (advice.isApplicableToBeanType(handlerType)) {
ExceptionHandlerMethodResolver resolver = entry.getValue();
for (MediaType mediaType : acceptedMediaTypes) {
ExceptionHandlerMappingInfo mappingInfo = resolver.resolveExceptionMapping(exception, mediaType);
if (mappingInfo != null) {
if (!mappingInfo.getProducibleTypes().isEmpty()) {
webRequest.setAttribute(HandlerMapping.PRODUCIBLE_MEDIA_TYPES_ATTRIBUTE, mappingInfo.getProducibleTypes(), RequestAttributes.SCOPE_REQUEST);
}
return new ServletInvocableHandlerMethod(advice.resolveBean(), mappingInfo.getHandlerMethod(), this.applicationContext);
}
}
}
}
return null;
}
}
| instead |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/UnnecessaryBreakInSwitch.java | {
"start": 1577,
"end": 2984
} | class ____ extends BugChecker implements BugChecker.CaseTreeMatcher {
@Override
public Description matchCase(CaseTree tree, VisitorState state) {
if (!tree.getCaseKind().equals(CaseKind.RULE)) {
return NO_MATCH;
}
Tree body = tree.getBody();
ImmutableList<BreakTree> unnecessaryBreaks = unnecessaryBreaks(body);
if (unnecessaryBreaks.isEmpty()) {
return NO_MATCH;
}
unnecessaryBreaks.forEach(
unnecessaryBreak ->
state.reportMatch(
describeMatch(unnecessaryBreak, SuggestedFix.delete(unnecessaryBreak))));
return NO_MATCH;
}
private ImmutableList<BreakTree> unnecessaryBreaks(Tree tree) {
ImmutableList.Builder<BreakTree> result = ImmutableList.builder();
new SimpleTreeVisitor<Void, Void>() {
@Override
public Void visitBreak(BreakTree node, Void unused) {
if (node.getLabel() == null) {
result.add(node);
}
return null;
}
@Override
public Void visitBlock(BlockTree node, Void unused) {
visit(getLast(node.getStatements(), null), null);
return null;
}
@Override
public Void visitIf(IfTree node, Void unused) {
visit(node.getThenStatement(), null);
visit(node.getElseStatement(), null);
return null;
}
}.visit(tree, null);
return result.build();
}
}
| UnnecessaryBreakInSwitch |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/starrocks/parser/StarRocksCreateTableParser.java | {
"start": 542,
"end": 8455
} | class ____ extends SQLCreateTableParser {
public StarRocksCreateTableParser(Lexer lexer) {
super(new StarRocksExprParser(lexer));
}
public StarRocksCreateTableParser(String sql) {
super(new StarRocksExprParser(sql));
}
public StarRocksCreateTableParser(SQLExprParser exprParser) {
super(exprParser);
}
@Override
public StarRocksExprParser getExprParser() {
return (StarRocksExprParser) exprParser;
}
protected void createTableBefore(SQLCreateTableStatement createTable) {
if (lexer.nextIfIdentifier(FnvHash.Constants.EXTERNAL)) {
createTable.setExternal(true);
}
if (lexer.nextIfIdentifier(FnvHash.Constants.TEMPORARY)) {
createTable.setTemporary(true);
}
}
public void parseUniqueKey(SQLCreateTableStatement stmt) {
SQLUnique sqlUnique;
if (lexer.nextIfIdentifier(FnvHash.Constants.DUPLICATE)) {
sqlUnique = new StarRocksDuplicateKey();
} else if (lexer.nextIfIdentifier(FnvHash.Constants.AGGREGATE)) {
sqlUnique = new StarRocksAggregateKey();
} else if (lexer.nextIf(Token.PRIMARY)) {
sqlUnique = new SQLPrimaryKeyImpl();
} else if (lexer.nextIf(Token.UNIQUE)) {
sqlUnique = new SQLUnique();
} else {
return;
}
accept(Token.KEY);
accept(Token.LPAREN);
this.exprParser.orderBy(sqlUnique.getColumns(), sqlUnique);
accept(Token.RPAREN);
stmt.setUnique(sqlUnique);
}
protected void parseIndex(SQLCreateTableStatement createTable) {
if (lexer.token() == Token.INDEX) {
StarRocksIndexDefinition index = new StarRocksIndexDefinition();
lexer.nextToken();
index.setIndexName(this.exprParser.name());
accept(Token.LPAREN);
for (; ; ) {
index.getColumns().add(this.exprParser.name());
if (!(lexer.token() == (Token.COMMA))) {
break;
} else {
lexer.nextToken();
}
}
accept(Token.RPAREN);
if (lexer.token() == Token.USING) {
lexer.nextToken();
if (lexer.token() == Token.BITMAP) {
lexer.nextToken();
index.setIndexType("BITMAP");
} else if (lexer.token() == Token.NGRAMBF) {
lexer.nextToken();
index.setIndexType("NGRAMBF");
if (lexer.token() == Token.LPAREN) {
accept(Token.LPAREN);
parseAssignItems(index.getIndexOption(), createTable, false);
accept(Token.RPAREN);
}
} else if (lexer.token() == Token.INVERTED) {
lexer.nextToken();
index.setIndexType("INVERTED");
}
}
if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
index.setComment(this.exprParser.expr());
}
index.setParent(createTable);
createTable.getTableElementList().add(index);
}
}
public void parseCreateTableRest(SQLCreateTableStatement stmt) {
StarRocksCreateTableStatement srStmt = (StarRocksCreateTableStatement) stmt;
if (lexer.nextIfIdentifier(FnvHash.Constants.ENGINE)) {
accept(Token.EQ);
srStmt.setEngine(
this.exprParser.expr()
);
}
parseUniqueKey(stmt);
if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
SQLExpr comment = this.exprParser.expr();
srStmt.setComment(comment);
}
if (lexer.nextIfIdentifier("AUTO")) {
SQLPartitionBy partitionBy = this.parsePartitionBy();
if (partitionBy != null) {
partitionBy.setAuto(true);
}
stmt.setPartitionBy(partitionBy);
} else {
stmt.setPartitionBy(this.parsePartitionBy());
}
// Distributed by.
if (lexer.nextIfIdentifier(FnvHash.Constants.DISTRIBUTED)) {
accept(Token.BY);
if (lexer.nextIfIdentifier(FnvHash.Constants.HASH)) {
srStmt.setDistributedByType(DistributedByType.Hash);
accept(Token.LPAREN);
this.exprParser.orderBy(srStmt.getDistributedBy(), srStmt);
accept(Token.RPAREN);
} else if (lexer.nextIfIdentifier(FnvHash.Constants.RANDOM)) {
srStmt.setDistributedByType(DistributedByType.Random);
}
}
if (lexer.nextIfIdentifier(FnvHash.Constants.BUCKETS)) {
if (lexer.token() == Token.LITERAL_INT) {
stmt.setBuckets(lexer.integerValue().intValue());
lexer.nextToken();
} else if (lexer.nextIfIdentifier("AUTO")) {
stmt.setAutoBucket(true);
}
}
if (lexer.token() == Token.ORDER) {
SQLOrderBy orderBy = this.exprParser.parseOrderBy();
srStmt.setOrderBy(orderBy);
}
if (lexer.nextIfIdentifier(FnvHash.Constants.PROPERTIES)) {
accept(Token.LPAREN);
parseAssignItems(srStmt.getTableOptions(), srStmt, false);
accept(Token.RPAREN);
}
if (lexer.nextIfIdentifier(FnvHash.Constants.BROKER)) {
acceptIdentifier(FnvHash.Constants.PROPERTIES);
accept(Token.LPAREN);
parseAssignItems(srStmt.getBrokerProperties(), srStmt, false);
accept(Token.RPAREN);
}
}
/**
* PARTITION BY RANGE (col1[,col2])
* PARTITION BY LIST (col1[,col2])
* PARTITION BY (col1[,col2])
* PARTITION BY FUNC(param1[,param2])
*
* @return
*/
public SQLPartitionBy parsePartitionBy() {
if (lexer.nextIf(Token.PARTITION)) {
accept(Token.BY);
SQLPartitionBy partitionClause;
boolean hasLparen = false;
if (lexer.nextIfIdentifier(FnvHash.Constants.RANGE)) {
partitionClause = new SQLPartitionByRange();
accept(Token.LPAREN);
hasLparen = true;
} else if (lexer.nextIfIdentifier(FnvHash.Constants.LIST)) {
partitionClause = new SQLPartitionByList();
((SQLPartitionByList) partitionClause).setType(SQLPartitionByList.PartitionByListType.LIST_EXPRESSION);
accept(Token.LPAREN);
hasLparen = true;
} else if (lexer.nextIf(Token.LPAREN)) {
partitionClause = new SQLPartitionByValue();
hasLparen = true;
} else {
partitionClause = new SQLPartitionByValue();
}
for (; ; ) {
partitionClause.addColumn(this.exprParser.expr());
if (lexer.nextIf(Token.COMMA)) {
continue;
}
break;
}
if (hasLparen) {
accept(Token.RPAREN);
}
accept(Token.LPAREN);
for (; ; ) {
if (lexer.token() == Token.RPAREN) {
break;
}
partitionClause.addPartition(this.getExprParser().parsePartition());
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
return partitionClause;
}
return null;
}
protected StarRocksCreateTableStatement newCreateStatement() {
return new StarRocksCreateTableStatement();
}
}
| StarRocksCreateTableParser |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sqm/SqmTreeTransformationLogger.java | {
"start": 245,
"end": 408
} | interface ____ {
String LOGGER_NAME = QueryLogging.subLoggerName( "sqm.transform" );
Logger LOGGER = Logger.getLogger( LOGGER_NAME );
}
| SqmTreeTransformationLogger |
java | micronaut-projects__micronaut-core | test-suite/src/test/java/io/micronaut/docs/server/response/StatusControllerSpec.java | {
"start": 1138,
"end": 2471
} | class ____ {
private static EmbeddedServer server;
private static HttpClient client;
@BeforeAll
static void setupServer() {
server = ApplicationContext.run(EmbeddedServer.class, Collections.singletonMap("spec.name", "httpstatus"));
client = server
.getApplicationContext()
.createBean(HttpClient.class, server.getURL());
}
@AfterAll
static void stopServer() {
if (server != null) {
server.stop();
}
if (client != null) {
client.stop();
}
}
@Test
void testStatus() {
HttpResponse<String> response = client.toBlocking().exchange(HttpRequest.GET("/status"), String.class);
Optional<String> body = response.getBody();
assertEquals(HttpStatus.CREATED, response.getStatus());
assertEquals("success", body.get());
response = client.toBlocking().exchange(HttpRequest.GET("/status/http-response"), String.class);
body = response.getBody();
assertEquals(HttpStatus.CREATED, response.getStatus());
assertEquals("success", body.get());
response = client.toBlocking().exchange(HttpRequest.GET("/status/http-status"), String.class);
assertEquals(HttpStatus.CREATED, response.getStatus());
}
}
| StatusControllerSpec |
java | square__retrofit | retrofit-adapters/rxjava2/src/main/java/retrofit2/adapter/rxjava2/RxJava2CallAdapterFactory.java | {
"start": 1234,
"end": 1421
} | class ____ {@link Retrofit} allows you to return an {@link Observable}, {@link
* Flowable}, {@link Single}, {@link Completable} or {@link Maybe} from service methods.
*
* <pre><code>
* | to |
java | spring-projects__spring-framework | spring-aop/src/testFixtures/java/org/springframework/aop/testfixture/mixin/Lockable.java | {
"start": 758,
"end": 833
} | interface ____ {
void lock();
void unlock();
boolean locked();
}
| Lockable |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/ServiceProviderBuildItem.java | {
"start": 3556,
"end": 3847
} | class ____ resources
*/
public static ServiceProviderBuildItem allProvidersFromClassPath(final String serviceInterfaceClassName) {
if (serviceInterfaceClassName == null || serviceInterfaceClassName.trim().isEmpty()) {
throw new IllegalArgumentException("service | path |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/convert/converters/MultiValuesConverterFactory.java | {
"start": 10403,
"end": 15857
} | class ____<T>
implements FormattingTypeConverter<ConvertibleMultiValues, T, Format> {
protected ConversionService conversionService;
AbstractConverterFromMultiValues(ConversionService conversionService) {
this.conversionService = conversionService;
}
/**
* Implemented convert function that checks which Format is specified inside the {@link Format} annotation
* If one is specified, it calls one of the corresponding abstract methods. Otherwise, empty optional is returned
*/
@Override
public Optional<T> convert(
ConvertibleMultiValues object, Class<T> targetType, ConversionContext conversionContext
) {
if (!(conversionContext instanceof ArgumentConversionContext)) {
return Optional.empty();
}
ConvertibleMultiValues<String> parameters = (ConvertibleMultiValues<String>) object;
ArgumentConversionContext<T> context = (ArgumentConversionContext<T>) conversionContext;
String format = conversionContext.getAnnotationMetadata()
.stringValue(Format.class).orElse(null);
if (format == null) {
return Optional.empty();
}
String name = conversionContext.getAnnotationMetadata().stringValue(Bindable.class)
.orElse(context.getArgument().getName());
String defaultValue = conversionContext.getAnnotationMetadata()
.stringValue(Bindable.class, "defaultValue")
.orElse(null);
switch (normalizeFormatName(format)) {
case FORMAT_CSV:
return retrieveSeparatedValue(context, name, parameters, defaultValue, CSV_DELIMITER);
case FORMAT_SSV:
return retrieveSeparatedValue(context, name, parameters, defaultValue, SSV_DELIMITER);
case FORMAT_PIPES:
return retrieveSeparatedValue(context, name, parameters, defaultValue, PIPES_DELIMITER);
case FORMAT_MULTI:
return retrieveMultiValue(context, name, parameters);
case FORMAT_DEEP_OBJECT:
return retrieveDeepObjectValue(context, name, parameters);
default:
return Optional.empty();
}
}
/**
* Method to retrieve the values from a separated parameter and return the parameter in desired type.
*
* @param conversionContext the conversion context of the value to which conversion is done
* (including type and annotations)
* @param name the name of the parameter
* @param parameters all the parameters from which the parameter of given name needs to be retrieved
* @param defaultValue default value
* @param delimiter the delimiter of the values in the parameter String
* @return the converted value if conversion was successful
*/
protected abstract Optional<T> retrieveSeparatedValue(ArgumentConversionContext<T> conversionContext,
String name,
ConvertibleMultiValues<String> parameters,
@Nullable String defaultValue,
Character delimiter);
/**
* Method to retrieve the values from a parameter in MULTI format and return in desired type.
*
* @param conversionContext the conversion context of the value to which conversion is done
* (including type and annotations)
* @param name the name of the parameter
* @param parameters all the parameters from which the parameter of given name needs to be retrieved
* @return the converted value if conversion was successful
*/
protected abstract Optional<T> retrieveMultiValue(ArgumentConversionContext<T> conversionContext,
String name,
ConvertibleMultiValues<String> parameters);
/**
* Method to retrieve the values from a parameter in DEEP_OBJECT format and return in desired type.
*
* @param conversionContext the conversion context of the value to which conversion is done
* (including type and annotations)
* @param name the name of the parameter
* @param parameters all the parameters from which the parameter of given name needs to be retrieved
* @return the converted value if conversion was successful*/
protected abstract Optional<T> retrieveDeepObjectValue(ArgumentConversionContext<T> conversionContext,
String name,
ConvertibleMultiValues<String> parameters);
@Override
public Class<Format> annotationType() {
return Format.class;
}
}
/**
* A converter to convert from {@link ConvertibleMultiValues} to an {@link Iterable}.
*/
public static | AbstractConverterFromMultiValues |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-guava-tests/src/test/java/org/assertj/tests/guava/api/RangeMapAssert_contains_Test.java | {
"start": 1264,
"end": 2967
} | class ____ extends RangeMapAssertBaseTest {
@Test
void should_pass_if_actual_contains_given_entries() {
assertThat(actual).contains(entry(400, "violet"));
assertThat(actual).contains(entry(420, "violet"), entry(595, "orange"));
}
@Test
void should_fail_if_actual_is_null() {
// GIVEN
actual = null;
// WHEN
var error = expectAssertionError(() -> assertThat(actual).contains(entry(400, "violet")));
// THEN
then(error).hasMessage(actualIsNull());
}
@Test
void should_fail_if_entries_to_look_for_are_null() {
// GIVEN
MapEntry<Integer, String>[] entries = null;
// WHEN
Throwable thrown = catchThrowable(() -> assertThat(actual).contains(entries));
// THEN
then(thrown).isInstanceOf(IllegalArgumentException.class)
.hasMessage("The entries to look for should not be null");
}
@Test
void should_fail_if_entries_to_look_for_are_empty() {
// WHEN
MapEntry<Integer, String>[] entries = array();
// WHEN
Throwable thrown = catchThrowable(() -> assertThat(actual).contains(entries));
// THEN
then(thrown).isInstanceOf(IllegalArgumentException.class)
.hasMessage("The entries to look for should not be empty");
}
@Test
void should_fail_if_actual_does_not_contain_all_given_entries() {
// GIVEN
MapEntry<Integer, String>[] entries = array(entry(400, "violet"), entry(100, "violet"), entry(500, "pink"));
// WHEN
var error = expectAssertionError(() -> assertThat(actual).contains(entries));
// THEN
then(error).hasMessage(shouldContain(actual, entries, asList(entry(100, "violet"), entry(500, "pink"))).create());
}
}
| RangeMapAssert_contains_Test |
java | quarkusio__quarkus | independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/Beans.java | {
"start": 32451,
"end": 39877
} | class ____ part of "
+ "the Jandex index. Classes that are not subject to discovery can be registered via "
+ "AdditionalBeanBuildItem and non-qualifier annotations can use QualifierRegistrarBuildItem");
}
for (AnnotationValue val : requiredQualifier.valuesWithDefaults(beanDeployment.getBeanArchiveIndex())) {
if (!requiredClazz.method(val.name()).hasAnnotation(DotNames.NONBINDING)
&& !nonBindingFields.contains(val.name())) {
values.add(val);
}
}
}
for (AnnotationValue value : values) {
if (!value.equals(qualifier.valueWithDefault(beanDeployment.getBeanArchiveIndex(), value.name()))) {
matches = false;
break;
}
}
if (matches) {
return true;
}
}
}
return false;
}
static Set<AnnotationInstance> addImplicitQualifiers(Set<AnnotationInstance> qualifiers) {
if (qualifiers.isEmpty()) {
return BuiltinQualifier.DEFAULT_QUALIFIERS;
}
if ((qualifiers.size() <= 2 && qualifiers.stream()
.allMatch(a -> DotNames.NAMED.equals(a.name()) || DotNames.ANY.equals(a.name())))) {
qualifiers.add(BuiltinQualifier.DEFAULT.getInstance());
}
qualifiers.add(BuiltinQualifier.ANY.getInstance());
return qualifiers;
}
static List<MethodInfo> getCallbacks(ClassInfo beanClass, DotName annotation, IndexView index) {
InterceptionType interceptionType = null;
if (DotNames.POST_CONSTRUCT.equals(annotation)) {
interceptionType = InterceptionType.POST_CONSTRUCT;
} else if (DotNames.PRE_DESTROY.equals(annotation)) {
interceptionType = InterceptionType.PRE_DESTROY;
} else {
throw new IllegalArgumentException("Unexpected callback annotation: " + annotation);
}
List<MethodInfo> callbacks = new ArrayList<>();
collectCallbacks(beanClass, callbacks, annotation, index, new HashSet<>(), interceptionType);
Collections.reverse(callbacks);
return callbacks;
}
static List<MethodInfo> getAroundInvokes(ClassInfo beanClass, BeanDeployment deployment) {
AnnotationStore store = deployment.getAnnotationStore();
List<MethodInfo> methods = new ArrayList<>();
List<MethodInfo> allMethods = new ArrayList<>();
ClassInfo aClass = beanClass;
while (aClass != null) {
int aroundInvokesFound = 0;
for (MethodInfo method : aClass.methods()) {
if (Modifier.isStatic(method.flags())) {
continue;
}
if (store.hasAnnotation(method, DotNames.AROUND_INVOKE)) {
InterceptorInfo.addInterceptorMethod(allMethods, methods, method, InterceptionType.AROUND_INVOKE,
InterceptorPlacement.TARGET_CLASS);
if (++aroundInvokesFound > 1) {
throw new DefinitionException(
"Multiple @AroundInvoke interceptor methods declared on class: " + aClass);
}
}
allMethods.add(method);
}
DotName superTypeName = aClass.superName();
aClass = superTypeName == null || DotNames.OBJECT.equals(superTypeName) ? null
: getClassByName(deployment.getBeanArchiveIndex(), superTypeName);
}
Collections.reverse(methods);
return methods.isEmpty() ? List.of() : List.copyOf(methods);
}
static void analyzeType(Type type, BeanDeployment beanDeployment) {
if (type.kind() == Type.Kind.PARAMETERIZED_TYPE) {
for (Type argument : type.asParameterizedType().arguments()) {
fetchType(argument, beanDeployment);
}
} else if (type.kind() == Type.Kind.TYPE_VARIABLE) {
for (Type bound : type.asTypeVariable().bounds()) {
fetchType(bound, beanDeployment);
}
} else if (type.kind() == Type.Kind.WILDCARD_TYPE) {
fetchType(type.asWildcardType().extendsBound(), beanDeployment);
fetchType(type.asWildcardType().superBound(), beanDeployment);
}
}
static void validateInterceptorDecorator(BeanInfo bean, List<Throwable> errors,
Consumer<BytecodeTransformer> bytecodeTransformerConsumer) {
// transform any private injected fields into package private
if (bean.isClassBean() && bean.getDeployment().transformPrivateInjectedFields) {
for (Injection injection : bean.getInjections()) {
if (injection.isField() && Modifier.isPrivate(injection.getTarget().asField().flags())) {
bytecodeTransformerConsumer
.accept(new BytecodeTransformer(bean.getTarget().get().asClass().name().toString(),
new PrivateInjectedFieldTransformFunction(injection.getTarget().asField())));
}
}
}
if (bean.isDecorator()) {
DecoratorInfo decorator = (DecoratorInfo) bean;
for (InjectionPointInfo injectionPointInfo : bean.getAllInjectionPoints()) {
// the injection point is a field, an initializer method parameter or a bean constructor of a decorator,
// with qualifier @Decorated, then the type parameter of the injected Bean must be the same as the delegate type
if (injectionPointInfo.getRequiredType().name().equals(DotNames.BEAN)
&& injectionPointInfo.getRequiredQualifier(DotNames.DECORATED) != null
&& injectionPointInfo.getRequiredType().kind() == Type.Kind.PARAMETERIZED_TYPE) {
ParameterizedType parameterizedType = injectionPointInfo.getRequiredType().asParameterizedType();
if (parameterizedType.arguments().size() != 1
|| !parameterizedType.arguments().get(0).equals(decorator.getDelegateType())) {
throw new DefinitionException(
"Injected @Decorated Bean<> has to use the delegate type as its type parameter. " +
"Problematic injection point: " + injectionPointInfo.getTargetInfo());
}
}
}
}
}
static void validateBean(BeanInfo bean, List<Throwable> errors, Consumer<BytecodeTransformer> bytecodeTransformerConsumer,
Set<DotName> classesReceivingNoArgsCtor, boolean failIfNotProxyable) {
if (bean.isClassBean()) {
ClassInfo beanClass = bean.getTarget().get().asClass();
String classifier = bean.getScope().isNormal() ? "Normal scoped" : null;
if (classifier == null && bean.isSubclassRequired()) {
classifier = "Intercepted";
failIfNotProxyable = true;
}
if (beanClass.isFinal() && classifier != null) {
// Client proxies and subclasses require a non-final | is |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/producer/primitive/PrimitiveWrapperProducerTest.java | {
"start": 2892,
"end": 3420
} | class ____ {
@Produces
Boolean bool() {
return null;
}
@Produces
Byte b = null;
@Produces
Short s() {
return null;
}
@Produces
Integer i = null;
@Produces
Long l() {
return null;
}
@Produces
Float f = null;
@Produces
Double d() {
return null;
}
@Produces
Character c = null;
}
@Dependent
static | Producers |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/federation/policies/router/TestLoadBasedRouterPolicy.java | {
"start": 2473,
"end": 8710
} | class ____ extends BaseRouterPoliciesTest {
@BeforeEach
public void setUp() throws Exception {
setPolicy(new LoadBasedRouterPolicy());
setPolicyInfo(new WeightedPolicyInfo());
Map<SubClusterIdInfo, Float> routerWeights = new HashMap<>();
Map<SubClusterIdInfo, Float> amrmWeights = new HashMap<>();
long now = Time.now();
// simulate 20 active subclusters
for (int i = 0; i < 20; i++) {
SubClusterIdInfo sc = new SubClusterIdInfo(String.format("sc%02d", i));
SubClusterInfo federationSubClusterInfo = SubClusterInfo.newInstance(
sc.toId(), "dns1:80", "dns1:81", "dns1:82", "dns1:83",
now - 1000, SubClusterState.SC_RUNNING, now - 2000, generateClusterMetricsInfo(i));
getActiveSubclusters().put(sc.toId(), federationSubClusterInfo);
float weight = getRand().nextInt(2);
if (i == 5) {
weight = 1.0f;
}
// 5% chance we omit one of the weights
if (i <= 5 || getRand().nextFloat() > 0.05f) {
routerWeights.put(sc, weight);
amrmWeights.put(sc, weight);
}
}
getPolicyInfo().setRouterPolicyWeights(routerWeights);
getPolicyInfo().setAMRMPolicyWeights(amrmWeights);
// initialize policy with context
setupContext();
}
public String generateClusterMetricsInfo(int id) {
long mem = 1024 * getRand().nextInt(277 * 100 - 1);
// plant a best cluster
if (id == 5) {
mem = 1024 * 277 * 100;
}
String clusterMetrics =
"{\"clusterMetrics\":{\"appsSubmitted\":65," + "\"appsCompleted\":64,"
+ "\"appsPending\":0,\"appsRunning\":0,\"appsFailed\":0,"
+ "\"appsKilled\":1,\"reservedMB\":0,\"availableMB\":" + mem + ","
+ "\"allocatedMB\":0,\"reservedVirtualCores\":0,"
+ "\"availableVirtualCores\":2216,\"allocatedVirtualCores\":0,"
+ "\"containersAllocated\":0,\"containersReserved\":0,"
+ "\"containersPending\":0,\"totalMB\":28364800,"
+ "\"totalVirtualCores\":2216,\"totalNodes\":278,\"lostNodes\":1,"
+ "\"unhealthyNodes\":0,\"decommissionedNodes\":0,"
+ "\"rebootedNodes\":0,\"activeNodes\":277}}\n";
return clusterMetrics;
}
@Test
public void testLoadIsRespected() throws YarnException {
SubClusterId chosen = ((FederationRouterPolicy) getPolicy())
.getHomeSubcluster(getApplicationSubmissionContext(), null);
// check the "planted" best cluster is chosen
assertEquals("sc05", chosen.getId());
}
@Test
public void testIfNoSubclustersWithWeightOne() throws Exception {
setPolicy(new LoadBasedRouterPolicy());
setPolicyInfo(new WeightedPolicyInfo());
Map<SubClusterIdInfo, Float> routerWeights = new HashMap<>();
Map<SubClusterIdInfo, Float> amrmWeights = new HashMap<>();
// update subcluster with weight 0
SubClusterIdInfo sc = new SubClusterIdInfo(String.format("sc%02d", 0));
SubClusterInfo federationSubClusterInfo = SubClusterInfo.newInstance(
sc.toId(), null, null, null, null, -1, SubClusterState.SC_RUNNING, -1,
generateClusterMetricsInfo(0));
getActiveSubclusters().clear();
getActiveSubclusters().put(sc.toId(), federationSubClusterInfo);
routerWeights.put(sc, 0.0f);
amrmWeights.put(sc, 0.0f);
getPolicyInfo().setRouterPolicyWeights(routerWeights);
getPolicyInfo().setAMRMPolicyWeights(amrmWeights);
ConfigurableFederationPolicy policy = getPolicy();
FederationPoliciesTestUtil.initializePolicyContext(policy,
getPolicyInfo(), getActiveSubclusters());
LambdaTestUtils.intercept(YarnException.class, "Zero Active Subcluster with weight 1.",
() -> ((FederationRouterPolicy) policy).
getHomeSubcluster(getApplicationSubmissionContext(), null));
}
@Test
public void testUpdateReservation() throws YarnException {
long now = Time.now();
ReservationSubmissionRequest resReq = getReservationSubmissionRequest();
when(resReq.getQueue()).thenReturn("queue1");
when(resReq.getReservationId()).thenReturn(ReservationId.newInstance(now, 1));
// first we invoke a reservation placement
FederationRouterPolicy routerPolicy = (FederationRouterPolicy) getPolicy();
SubClusterId chosen = routerPolicy.getReservationHomeSubcluster(resReq);
// add this to the store
FederationStateStoreFacade facade =
getFederationPolicyContext().getFederationStateStoreFacade();
ReservationHomeSubCluster subCluster =
ReservationHomeSubCluster.newInstance(resReq.getReservationId(), chosen);
facade.addReservationHomeSubCluster(subCluster);
// get all activeSubClusters
Map<SubClusterId, SubClusterInfo> activeSubClusters = getActiveSubclusters();
// Update ReservationHomeSubCluster
// Cannot be randomly selected, SubCluster with Weight >= 1.0 needs to be selected
WeightedPolicyInfo weightedPolicyInfo = this.getPolicyInfo();
Map<SubClusterIdInfo, Float> routerPolicyWeights = weightedPolicyInfo.getRouterPolicyWeights();
List<SubClusterId> subClusterIds = new ArrayList<>();
for (Map.Entry<SubClusterIdInfo, Float> entry : routerPolicyWeights.entrySet()) {
SubClusterIdInfo subClusterIdInfo = entry.getKey();
Float subClusterWeight = entry.getValue();
if (subClusterWeight >= 1.0) {
subClusterIds.add(subClusterIdInfo.toId());
}
}
SubClusterId chosen2 = subClusterIds.get(this.getRand().nextInt(subClusterIds.size()));
ReservationHomeSubCluster subCluster2 =
ReservationHomeSubCluster.newInstance(resReq.getReservationId(), chosen2);
facade.updateReservationHomeSubCluster(subCluster2);
// route an application that uses this app
ApplicationSubmissionContext applicationSubmissionContext =
ApplicationSubmissionContext.newInstance(
ApplicationId.newInstance(now, 1), "app1", "queue1", Priority.newInstance(1),
null, false, false, 1, null, null, false);
applicationSubmissionContext.setReservationID(resReq.getReservationId());
SubClusterId chosen3 = routerPolicy.getHomeSubcluster(
applicationSubmissionContext, new ArrayList<>());
assertEquals(chosen2, chosen3);
}
}
| TestLoadBasedRouterPolicy |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestAclsEndToEnd.java | {
"start": 2895,
"end": 53709
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(TestAclsEndToEnd.class.getName());
private static final String TEXT =
"The blue zone is for loading and unloading only. "
+ "Please park in the red zone.";
private static final Path ZONE1 = new Path("/tmp/BLUEZONE");
private static final Path ZONE2 = new Path("/tmp/REDZONE");
private static final Path ZONE3 = new Path("/tmp/LOADINGZONE");
private static final Path ZONE4 = new Path("/tmp/UNLOADINGZONE");
private static final Path FILE1 = new Path(ZONE1, "file1");
private static final Path FILE1A = new Path(ZONE1, "file1a");
private static final Path FILE2 = new Path(ZONE2, "file2");
private static final Path FILE3 = new Path(ZONE3, "file3");
private static final Path FILE4 = new Path(ZONE4, "file4");
private static final String KEY1 = "key1";
private static final String KEY2 = "key2";
private static final String KEY3 = "key3";
private static UserGroupInformation realUgi;
private static String realUser;
private MiniKMS miniKMS;
private File kmsDir;
private MiniDFSCluster cluster;
private DistributedFileSystem fs;
@BeforeAll
public static void captureUser() throws IOException {
realUgi = UserGroupInformation.getCurrentUser();
realUser = System.getProperty("user.name");
}
/**
* Extract the URI for the miniKMS.
*
* @return the URI for the miniKMS
*/
private String getKeyProviderURI() {
return KMSClientProvider.SCHEME_NAME + "://" +
miniKMS.getKMSUrl().toExternalForm().replace("://", "@");
}
/**
* Write out the config files needed by the miniKMS. The miniKMS doesn't
* provide a way to set the configs directly, so the only way to pass config
* parameters is to write them out into config files.
*
* @param confDir the directory into which to write the configs
* @param conf the config to write.
* @throws IOException
*/
private void writeConf(File confDir, Configuration conf)
throws IOException {
URI keystore = new Path(kmsDir.getAbsolutePath(), "kms.keystore").toUri();
conf.set(KMSConfiguration.KEY_PROVIDER_URI, "jceks://file@" + keystore);
conf.set("hadoop.kms.authentication.type", "simple");
Writer writer =
new FileWriter(new File(confDir, KMSConfiguration.KMS_SITE_XML));
conf.writeXml(writer);
writer.close();
writer = new FileWriter(new File(confDir, KMSConfiguration.KMS_ACLS_XML));
conf.writeXml(writer);
writer.close();
//create empty core-site.xml
writer = new FileWriter(new File(confDir, "core-site.xml"));
new Configuration(false).writeXml(writer);
writer.close();
}
/**
* Setup a fresh miniKMS and miniDFS.
*
* @param conf the configuration to use for both the miniKMS and miniDFS
* @throws Exception thrown if setup fails
*/
private void setup(Configuration conf) throws Exception {
setup(conf, true, true);
}
/**
* Setup a fresh miniDFS and a miniKMS. The resetKms parameter controls
* whether the miniKMS will start fresh or reuse the existing data.
*
* @param conf the configuration to use for both the miniKMS and miniDFS
* @param resetKms whether to start a fresh miniKMS
* @throws Exception thrown if setup fails
*/
private void setup(Configuration conf, boolean resetKms) throws Exception {
setup(conf, resetKms, true);
}
/**
* Setup a miniDFS and miniKMS. The resetKms and resetDfs parameters control
* whether the services will start fresh or reuse the existing data.
*
* @param conf the configuration to use for both the miniKMS and miniDFS
* @param resetKms whether to start a fresh miniKMS
* @param resetDfs whether to start a fresh miniDFS
* @throws Exception thrown if setup fails
*/
private void setup(Configuration conf, boolean resetKms, boolean resetDfs)
throws Exception {
if (resetKms) {
FileSystemTestHelper fsHelper = new FileSystemTestHelper();
kmsDir = new File(fsHelper.getTestRootDir()).getAbsoluteFile();
assertTrue(kmsDir.mkdirs());
}
writeConf(kmsDir, conf);
MiniKMS.Builder miniKMSBuilder = new MiniKMS.Builder();
miniKMS = miniKMSBuilder.setKmsConfDir(kmsDir).build();
miniKMS.start();
conf = new HdfsConfiguration();
// Set up java key store
conf.set(ProxyUsers.CONF_HADOOP_PROXYUSER + "." + realUser + ".users",
"keyadmin,hdfs,user");
conf.set(ProxyUsers.CONF_HADOOP_PROXYUSER + "." + realUser + ".hosts",
"*");
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH,
getKeyProviderURI());
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY,
true);
MiniDFSCluster.Builder clusterBuilder = new MiniDFSCluster.Builder(conf);
cluster = clusterBuilder.numDataNodes(1).format(resetDfs).build();
fs = cluster.getFileSystem();
}
/**
* Stop the miniKMS and miniDFS.
*/
private void teardown() {
// Restore login user
UserGroupInformation.setLoginUser(realUgi);
if (cluster != null) {
cluster.shutdown();
}
miniKMS.stop();
}
/**
* Return a new {@link Configuration} with KMS ACLs appropriate to pass the
* full ACL test in {@link #doFullAclTest()} set.
*
* @param hdfsUgi the hdfs user
* @param keyadminUgi the keyadmin user
* @return the configuration
*/
private static Configuration getBaseConf(UserGroupInformation hdfsUgi,
UserGroupInformation keyadminUgi) {
Configuration conf = new Configuration();
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.CREATE",
keyadminUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.DELETE",
keyadminUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.ROLLOVER",
keyadminUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GET", " ");
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GET_KEYS",
keyadminUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GET_METADATA",
hdfsUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.SET_KEY_MATERIAL", " ");
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GENERATE_EEK",
hdfsUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.DECRYPT_EEK", "*");
return conf;
}
/**
* Set the recommended blacklists.
*
* @param hdfsUgi the hdfs user
*/
private static void setBlacklistAcls(Configuration conf,
UserGroupInformation hdfsUgi) {
conf.set(KMSConfiguration.CONFIG_PREFIX + "blacklist.CREATE",
hdfsUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "blacklist.DELETE",
hdfsUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "blacklist.ROLLOVER",
hdfsUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "blacklist.GET", "*");
conf.set(KMSConfiguration.CONFIG_PREFIX + "blacklist.SET_KEY_MATERIAL",
"*");
conf.set(KMSConfiguration.CONFIG_PREFIX + "blacklist.DECRYPT_EEK",
hdfsUgi.getUserName());
}
/**
* Set the key ACLs appropriate to pass the full ACL test in
* {@link #doFullAclTest()} using the specified prefix. The prefix should
* either be "whitelist.key.acl." or "key.acl.key1.".
*
* @param conf the configuration
* @param prefix the ACL prefix
* @param hdfsUgi the hdfs user
* @param keyadminUgi the keyadmin user
* @param userUgi the normal user
*/
private static void setKeyAcls(Configuration conf, String prefix,
UserGroupInformation hdfsUgi,
UserGroupInformation keyadminUgi,
UserGroupInformation userUgi) {
conf.set(prefix + "MANAGEMENT", keyadminUgi.getUserName());
conf.set(prefix + "READ", hdfsUgi.getUserName());
conf.set(prefix + "GENERATE_EEK", hdfsUgi.getUserName());
conf.set(KeyAuthorizationKeyProvider.KEY_ACL + KEY1 + ".DECRYPT_EEK",
userUgi.getUserName());
}
/**
* Test the full life cycle of a key using a config with whitelist key ACLs.
* The configuration used is the correct configuration to pass the full ACL
* test in {@link #doFullAclTest()}.
*
* @throws Exception thrown on test failure
*/
@Test
public void testGoodWithWhitelist() throws Exception {
UserGroupInformation hdfsUgi =
UserGroupInformation.createProxyUserForTesting("hdfs",
realUgi, new String[] {"supergroup"});
UserGroupInformation keyadminUgi =
UserGroupInformation.createProxyUserForTesting("keyadmin",
realUgi, new String[] {"keyadmin"});
UserGroupInformation userUgi =
UserGroupInformation.createProxyUserForTesting("user",
realUgi, new String[] {"staff"});
Configuration conf = getBaseConf(hdfsUgi, keyadminUgi);
setBlacklistAcls(conf, hdfsUgi);
setKeyAcls(conf, KMSConfiguration.WHITELIST_KEY_ACL_PREFIX,
hdfsUgi, keyadminUgi, userUgi);
doFullAclTest(conf, hdfsUgi, keyadminUgi, userUgi);
}
/**
* Test the full life cycle of a key using a config with key ACLs.
* The configuration used is the correct configuration to pass the full ACL
* test in {@link #doFullAclTest()}.
*
* @throws Exception thrown on test failure
*/
@Test
public void testGoodWithKeyAcls() throws Exception {
UserGroupInformation hdfsUgi =
UserGroupInformation.createProxyUserForTesting("hdfs",
realUgi, new String[] {"supergroup"});
UserGroupInformation keyadminUgi =
UserGroupInformation.createProxyUserForTesting("keyadmin",
realUgi, new String[] {"keyadmin"});
UserGroupInformation userUgi =
UserGroupInformation.createProxyUserForTesting("user",
realUgi, new String[] {"staff"});
Configuration conf = getBaseConf(hdfsUgi, keyadminUgi);
setBlacklistAcls(conf, hdfsUgi);
setKeyAcls(conf, KeyAuthorizationKeyProvider.KEY_ACL + KEY1 + ".",
hdfsUgi, keyadminUgi, userUgi);
doFullAclTest(conf, hdfsUgi, keyadminUgi, userUgi);
}
/**
* Test the full life cycle of a key using a config with whitelist key ACLs
* and without blacklist ACLs. The configuration used is the correct
* configuration to pass the full ACL test in {@link #doFullAclTest()}.
*
* @throws Exception thrown on test failure
*/
@Test
public void testGoodWithWhitelistWithoutBlacklist() throws Exception {
UserGroupInformation hdfsUgi =
UserGroupInformation.createProxyUserForTesting("hdfs",
realUgi, new String[] {"supergroup"});
UserGroupInformation keyadminUgi =
UserGroupInformation.createProxyUserForTesting("keyadmin",
realUgi, new String[] {"keyadmin"});
UserGroupInformation userUgi =
UserGroupInformation.createProxyUserForTesting("user",
realUgi, new String[] {"staff"});
Configuration conf = getBaseConf(hdfsUgi, keyadminUgi);
setKeyAcls(conf, KMSConfiguration.WHITELIST_KEY_ACL_PREFIX,
hdfsUgi, keyadminUgi, userUgi);
doFullAclTest(conf, hdfsUgi, keyadminUgi, userUgi);
}
/**
* Test the full life cycle of a key using a config with whitelist key ACLs
* and without blacklist ACLs. The configuration used is the correct
* configuration to pass the full ACL test in {@link #doFullAclTest()}.
*
* @throws Exception thrown on test failure
*/
@Test
public void testGoodWithKeyAclsWithoutBlacklist() throws Exception {
UserGroupInformation hdfsUgi =
UserGroupInformation.createProxyUserForTesting("hdfs",
realUgi, new String[] {"supergroup"});
UserGroupInformation keyadminUgi =
UserGroupInformation.createProxyUserForTesting("keyadmin",
realUgi, new String[] {"keyadmin"});
UserGroupInformation userUgi =
UserGroupInformation.createProxyUserForTesting("user",
realUgi, new String[] {"staff"});
Configuration conf = getBaseConf(hdfsUgi, keyadminUgi);
setKeyAcls(conf, KeyAuthorizationKeyProvider.KEY_ACL + KEY1 + ".",
hdfsUgi, keyadminUgi, userUgi);
doFullAclTest(conf, hdfsUgi, keyadminUgi, userUgi);
}
/**
* Run a full key life cycle test using the provided configuration and users.
*
* @param conf the configuration
* @param hdfs the user to use as the hdfs user
* @param keyadmin the user to use as the keyadmin user
* @param user the user to use as the normal user
* @throws Exception thrown if there is a test failure
*/
private void doFullAclTest(final Configuration conf,
final UserGroupInformation hdfsUgi,
final UserGroupInformation keyadminUgi,
final UserGroupInformation userUgi) throws Exception {
try {
setup(conf);
// Create a test key
assertTrue(createKey(keyadminUgi, KEY1, conf),
"Exception during creation of key " + KEY1 + " by " + keyadminUgi.getUserName());
// Fail to create a test key
assertFalse(createKey(hdfsUgi, KEY2, conf),
"Allowed creation of key " + KEY2 + " by " + hdfsUgi.getUserName());
assertFalse(createKey(userUgi, KEY2, conf),
"Allowed creation of key " + KEY2 + " by " + userUgi.getUserName());
// Create a directory and chown it to the normal user.
fs.mkdirs(ZONE1);
fs.setOwner(ZONE1, userUgi.getUserName(),
userUgi.getPrimaryGroupName());
// Create an EZ
assertTrue(createEncryptionZone(hdfsUgi, KEY1, ZONE1),
"Exception during creation of EZ " + ZONE1 + " by "
+ hdfsUgi.getUserName() + " using key " + KEY1);
// Fail to create an EZ
assertFalse(createEncryptionZone(keyadminUgi, KEY1, ZONE2),
"Allowed creation of EZ " + ZONE2 + " by "
+ keyadminUgi.getUserName() + " using key " + KEY1);
assertFalse(createEncryptionZone(userUgi, KEY1, ZONE2),
"Allowed creation of EZ " + ZONE2 + " by "
+ userUgi.getUserName() + " using key " + KEY1);
// Create a file in the zone
assertTrue(createFile(userUgi, FILE1, TEXT),
"Exception during creation of file " + FILE1 + " by "
+ userUgi.getUserName());
// Fail to create a file in the zone
assertFalse(createFile(hdfsUgi, FILE1A, TEXT),
"Allowed creation of file " + FILE1A + " by "
+ hdfsUgi.getUserName());
assertFalse(createFile(keyadminUgi, FILE1A, TEXT),
"Allowed creation of file " + FILE1A + " by "
+ keyadminUgi.getUserName());
// Read a file in the zone
assertTrue(compareFile(userUgi, FILE1, TEXT),
"Exception while reading file " + FILE1 + " by "
+ userUgi.getUserName());
// Fail to read a file in the zone
assertFalse(compareFile(hdfsUgi, FILE1, TEXT),
"Allowed reading of file " + FILE1 + " by "
+ hdfsUgi.getUserName());
assertFalse(compareFile(keyadminUgi, FILE1, TEXT),
"Allowed reading of file " + FILE1 + " by "
+ keyadminUgi.getUserName());
// Remove the zone
fs.delete(ZONE1, true);
// Fail to remove the key
assertFalse(deleteKey(hdfsUgi, KEY1), "Allowed deletion of file " + FILE1 + " by "
+ hdfsUgi.getUserName());
assertFalse(deleteKey(userUgi, KEY1), "Allowed deletion of file " + FILE1 + " by "
+ userUgi.getUserName());
// Remove
assertTrue(deleteKey(keyadminUgi, KEY1),
"Exception during deletion of file " + FILE1 + " by "
+ keyadminUgi.getUserName());
} finally {
fs.delete(ZONE1, true);
fs.delete(ZONE2, true);
teardown();
}
}
/**
* Test that key creation is correctly governed by ACLs.
* @throws Exception thrown if setup fails
*/
@Test
public void testCreateKey() throws Exception {
Configuration conf = new Configuration();
// Correct config with whitelist ACL
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.CREATE",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "MANAGEMENT",
realUgi.getUserName());
try {
setup(conf);
assertTrue(createKey(realUgi, KEY1, conf),
"Exception during key creation with correct config using whitelist key ACLs");
} finally {
teardown();
}
conf = new Configuration();
// Correct config with default ACL
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.CREATE",
realUgi.getUserName());
conf.set(KMSConfiguration.DEFAULT_KEY_ACL_PREFIX + "MANAGEMENT",
realUgi.getUserName());
try {
setup(conf);
assertTrue(createKey(realUgi, KEY2, conf),
"Exception during key creation with correct config using default key ACLs");
} finally {
teardown();
}
conf = new Configuration();
// Denied because of blacklist
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.CREATE",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "blacklist.CREATE",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "MANAGEMENT",
realUgi.getUserName());
try {
setup(conf);
assertFalse(createKey(realUgi, KEY3, conf),
"Allowed key creation with blacklist for CREATE");
} finally {
teardown();
}
conf = new Configuration();
// Missing KMS ACL
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.CREATE", " ");
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "MANAGEMENT",
realUgi.getUserName());
try {
setup(conf);
assertFalse(createKey(realUgi, KEY3, conf),
"Allowed key creation without CREATE KMS ACL");
} finally {
teardown();
}
conf = new Configuration();
// Missing key ACL
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.CREATE",
realUgi.getUserName());
try {
setup(conf);
assertFalse(createKey(realUgi, KEY3, conf),
"Allowed key creation without MANAGMENT key ACL");
} finally {
teardown();
}
conf = new Configuration();
// Denied because the key ACL set ignores the default ACL set for key3
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.CREATE",
realUgi.getUserName());
conf.set(KMSConfiguration.DEFAULT_KEY_ACL_PREFIX + "MANAGEMENT",
realUgi.getUserName());
conf.set(KeyAuthorizationKeyProvider.KEY_ACL + KEY3 + ".DECRYPT_EEK",
realUgi.getUserName());
try {
setup(conf);
assertFalse(createKey(realUgi, KEY3, conf),
"Allowed key creation when default key ACL should have been"
+ " overridden by key ACL");
} finally {
teardown();
}
conf = new Configuration();
// Allowed because the default setting for KMS ACLs is fully permissive
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "MANAGEMENT",
realUgi.getUserName());
try {
setup(conf);
assertTrue(createKey(realUgi, KEY3, conf),
"Exception during key creation with default KMS ACLs");
} finally {
teardown();
}
}
/**
* Test that zone creation is correctly governed by ACLs.
* @throws Exception thrown if setup fails
*/
@Test
public void testCreateEncryptionZone() throws Exception {
Configuration conf = new Configuration();
// Create a test key
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.CREATE",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "MANAGEMENT",
realUgi.getUserName());
try {
setup(conf);
assertTrue(createKey(realUgi, KEY1, conf),
"Exception during key creation");
} finally {
teardown();
}
// We tear everything down and then restart it with the ACLs we want to
// test so that there's no contamination from the ACLs needed for setup.
// To make that work, we have to tell the setup() method not to create a
// new KMS directory.
conf = new Configuration();
// Correct config with whitelist ACL
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GET_METADATA",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "READ",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "GENERATE_EEK",
realUgi.getUserName());
try {
setup(conf, false);
fs.mkdirs(ZONE1);
assertTrue(createEncryptionZone(realUgi, KEY1, ZONE1),
"Exception during zone creation with correct config using"
+ " whitelist key ACLs");
} finally {
fs.delete(ZONE1, true);
teardown();
}
conf = new Configuration();
// Correct config with default ACL
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GET_METADATA",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.DEFAULT_KEY_ACL_PREFIX + "READ",
realUgi.getUserName());
conf.set(KMSConfiguration.DEFAULT_KEY_ACL_PREFIX + "GENERATE_EEK",
realUgi.getUserName());
try {
setup(conf, false);
fs.mkdirs(ZONE2);
assertTrue(createEncryptionZone(realUgi, KEY1, ZONE2),
"Exception during zone creation with correct config using"
+ " default key ACLs");
} finally {
fs.delete(ZONE2, true);
teardown();
}
conf = new Configuration();
// Denied because the key ACL set ignores the default ACL set for key1
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GET_METADATA",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.DEFAULT_KEY_ACL_PREFIX + "READ",
realUgi.getUserName());
conf.set(KMSConfiguration.DEFAULT_KEY_ACL_PREFIX + "GENERATE_EEK",
realUgi.getUserName());
conf.set(KeyAuthorizationKeyProvider.KEY_ACL + KEY1 + ".DECRYPT_EEK",
realUgi.getUserName());
try {
setup(conf, false);
fs.mkdirs(ZONE3);
assertFalse(createEncryptionZone(realUgi, KEY1, ZONE3),
"Allowed creation of zone when default key ACLs should have"
+ " been overridden by key ACL");
} finally {
fs.delete(ZONE3, true);
teardown();
}
conf = new Configuration();
// Correct config with blacklist
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GET_METADATA",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "blacklist.GET_METADATA",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "READ",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "GENERATE_EEK",
realUgi.getUserName());
try {
setup(conf, false);
fs.mkdirs(ZONE3);
assertFalse(createEncryptionZone(realUgi, KEY1, ZONE3),
"Allowed zone creation of zone with blacklisted GET_METADATA");
} finally {
fs.delete(ZONE3, true);
teardown();
}
conf = new Configuration();
// Correct config with blacklist
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GET_METADATA",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "blacklist.GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "READ",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "GENERATE_EEK",
realUgi.getUserName());
try {
setup(conf, false);
fs.mkdirs(ZONE3);
assertFalse(createEncryptionZone(realUgi, KEY1, ZONE3),
"Allowed zone creation of zone with blacklisted GENERATE_EEK");
} finally {
fs.delete(ZONE3, true);
teardown();
}
conf = new Configuration();
// Missing KMS ACL but works because defaults for KMS ACLs are fully
// permissive
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "READ",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "GENERATE_EEK",
realUgi.getUserName());
try {
setup(conf, false);
fs.mkdirs(ZONE3);
assertTrue(createEncryptionZone(realUgi, KEY1, ZONE3),
"Exception during zone creation with default KMS ACLs");
} finally {
fs.delete(ZONE3, true);
teardown();
}
conf = new Configuration();
// Missing GET_METADATA KMS ACL
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GET_METADATA", " ");
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "READ",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "GENERATE_EEK",
realUgi.getUserName());
try {
setup(conf, false);
fs.mkdirs(ZONE4);
assertFalse(createEncryptionZone(realUgi, KEY1, ZONE4),
"Allowed zone creation without GET_METADATA KMS ACL");
} finally {
fs.delete(ZONE4, true);
teardown();
}
conf = new Configuration();
// Missing GET_METADATA KMS ACL
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GET_METADATA",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GENERATE_EEK", " ");
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "READ",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "GENERATE_EEK",
realUgi.getUserName());
try {
setup(conf, false);
fs.mkdirs(ZONE4);
assertFalse(createEncryptionZone(realUgi, KEY1, ZONE4),
"Allowed zone creation without GENERATE_EEK KMS ACL");
} finally {
fs.delete(ZONE4, true);
teardown();
}
conf = new Configuration();
// Missing READ key ACL
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GET_METADATA",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "GENERATE_EEK",
realUgi.getUserName());
try {
setup(conf, false);
fs.mkdirs(ZONE4);
assertFalse(createEncryptionZone(realUgi, KEY1, ZONE4),
"Allowed zone creation without READ ACL");
} finally {
fs.delete(ZONE4, true);
teardown();
}
conf = new Configuration();
// Missing GENERATE_EEK key ACL
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GET_METADATA",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "READ",
realUgi.getUserName());
try {
setup(conf, false);
fs.mkdirs(ZONE4);
assertFalse(createEncryptionZone(realUgi, KEY1, ZONE4),
"Allowed zone creation without GENERATE_EEK ACL");
} finally {
fs.delete(ZONE4, true);
teardown();
}
}
/**
* Test that in-zone file creation is correctly governed by ACLs.
* @throws Exception thrown if setup fails
*/
@Test
public void testCreateFileInEncryptionZone() throws Exception {
Configuration conf = new Configuration();
// Create a test key
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.CREATE",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GET_METADATA",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "MANAGEMENT",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "READ",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "GENERATE_EEK",
realUgi.getUserName());
assertTrue(new File(kmsDir, "kms.keystore").length() == 0);
try {
setup(conf);
assertTrue(createKey(realUgi, KEY1, conf),
"Exception during key creation");
fs.mkdirs(ZONE1);
assertTrue(createEncryptionZone(realUgi, KEY1, ZONE1),
"Exception during zone creation");
fs.mkdirs(ZONE2);
assertTrue(createEncryptionZone(realUgi, KEY1, ZONE2),
"Exception during zone creation");
fs.mkdirs(ZONE3);
assertTrue(createEncryptionZone(realUgi, KEY1, ZONE3),
"Exception during zone creation");
fs.mkdirs(ZONE4);
assertTrue(createEncryptionZone(realUgi, KEY1, ZONE4),
"Exception during zone creation");
} catch (Throwable ex) {
fs.delete(ZONE1, true);
fs.delete(ZONE2, true);
fs.delete(ZONE3, true);
fs.delete(ZONE4, true);
throw ex;
} finally {
teardown();
}
// We tear everything down and then restart it with the ACLs we want to
// test so that there's no contamination from the ACLs needed for setup.
// To make that work, we have to tell the setup() method not to create a
// new KMS directory or DFS dierctory.
conf = new Configuration();
// Correct config with whitelist ACLs
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.DECRYPT_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "DECRYPT_EEK",
realUgi.getUserName());
try {
setup(conf, false, false);
assertTrue(createFile(realUgi, FILE1, TEXT),
"Exception during file creation with correct config" + " using whitelist ACL");
} finally {
fs.delete(ZONE1, true);
teardown();
}
conf = new Configuration();
// Correct config with default ACLs
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.DECRYPT_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.DEFAULT_KEY_ACL_PREFIX + "GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.DEFAULT_KEY_ACL_PREFIX + "DECRYPT_EEK",
realUgi.getUserName());
try {
setup(conf, false, false);
assertTrue(createFile(realUgi, FILE2, TEXT),
"Exception during file creation with correct config using whitelist ACL");
} finally {
fs.delete(ZONE2, true);
teardown();
}
conf = new Configuration();
// Denied because the key ACL set ignores the default ACL set for key1
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.DECRYPT_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.DEFAULT_KEY_ACL_PREFIX + "GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.DEFAULT_KEY_ACL_PREFIX + "DECRYPT_EEK",
realUgi.getUserName());
conf.set(KeyAuthorizationKeyProvider.KEY_ACL + KEY1 + ".READ",
realUgi.getUserName());
try {
setup(conf, false, false);
assertFalse(createFile(realUgi, FILE3, TEXT),
"Allowed file creation when default key ACLs should have been overridden by key ACL");
} catch (Exception ex) {
fs.delete(ZONE3, true);
throw ex;
} finally {
teardown();
}
conf = new Configuration();
// Denied by blacklist
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.DECRYPT_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "blacklist.GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "DECRYPT_EEK",
realUgi.getUserName());
try {
setup(conf, false, false);
assertFalse(createFile(realUgi, FILE3, TEXT),
"Allowed file creation with blacklist for GENERATE_EEK");
} catch (Exception ex) {
fs.delete(ZONE3, true);
throw ex;
} finally {
teardown();
}
conf = new Configuration();
// Denied by blacklist
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.DECRYPT_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "blacklist.DECRYPT_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "DECRYPT_EEK",
realUgi.getUserName());
try {
setup(conf, false, false);
assertFalse(createFile(realUgi, FILE3, TEXT),
"Allowed file creation with blacklist for DECRYPT_EEK");
} catch (Exception ex) {
fs.delete(ZONE3, true);
throw ex;
} finally {
teardown();
}
conf = new Configuration();
// Allowed because default KMS ACLs are fully permissive
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "DECRYPT_EEK",
realUgi.getUserName());
try {
setup(conf, false, false);
assertTrue(createFile(realUgi, FILE3, TEXT),
"Exception during file creation with default KMS ACLs");
} catch (Exception ex) {
fs.delete(ZONE3, true);
throw ex;
} finally {
teardown();
}
conf = new Configuration();
// Denied because of missing GENERATE_EEK KMS ACL
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GENERATE_EEK", " ");
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.DECRYPT_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "DECRYPT_EEK",
realUgi.getUserName());
try {
setup(conf, false, false);
assertFalse(createFile(realUgi, FILE4, TEXT),
"Allowed file creation without GENERATE_EEK KMS ACL");
} catch (Exception ex) {
fs.delete(ZONE3, true);
throw ex;
} finally {
teardown();
}
conf = new Configuration();
// Denied because of missing DECRYPT_EEK KMS ACL
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.DECRYPT_EEK", " ");
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "DECRYPT_EEK",
realUgi.getUserName());
try {
setup(conf, false, false);
assertFalse(createFile(realUgi, FILE3, TEXT),
"Allowed file creation without DECRYPT_EEK KMS ACL");
} catch (Exception ex) {
fs.delete(ZONE3, true);
throw ex;
} finally {
teardown();
}
conf = new Configuration();
// Denied because of missing GENERATE_EEK key ACL
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.DECRYPT_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "DECRYPT_EEK",
realUgi.getUserName());
try {
setup(conf, false, false);
assertFalse(createFile(realUgi, FILE3, TEXT),
"Allowed file creation without GENERATE_EEK key ACL");
} catch (Exception ex) {
fs.delete(ZONE3, true);
throw ex;
} finally {
teardown();
}
conf = new Configuration();
// Denied because of missing DECRYPT_EEK key ACL
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.DECRYPT_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "GENERATE_EEK",
realUgi.getUserName());
try {
setup(conf, false, false);
assertFalse(createFile(realUgi, FILE3, TEXT),
"Allowed file creation without DECRYPT_EEK key ACL");
} catch (Exception ex) {
fs.delete(ZONE3, true);
throw ex;
} finally {
teardown();
}
}
/**
* Test that in-zone file read is correctly governed by ACLs.
* @throws Exception thrown if setup fails
*/
@Test
public void testReadFileInEncryptionZone() throws Exception {
Configuration conf = new Configuration();
// Create a test key
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.CREATE",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GET_METADATA",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.DECRYPT_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "MANAGEMENT",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "READ",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "GENERATE_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "DECRYPT_EEK",
realUgi.getUserName());
assertTrue(new File(kmsDir, "kms.keystore").length() == 0);
try {
setup(conf);
assertTrue(createKey(realUgi, KEY1, conf),
"Exception during key creation");
fs.mkdirs(ZONE1);
assertTrue(createEncryptionZone(realUgi, KEY1, ZONE1),
"Exception during zone creation");
assertTrue(createFile(realUgi, FILE1, TEXT),
"Exception during file creation");
} catch (Throwable ex) {
fs.delete(ZONE1, true);
throw ex;
} finally {
teardown();
}
// We tear everything down and then restart it with the ACLs we want to
// test so that there's no contamination from the ACLs needed for setup.
// To make that work, we have to tell the setup() method not to create a
// new KMS directory or DFS dierctory.
conf = new Configuration();
// Correct config with whitelist ACLs
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.DECRYPT_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "DECRYPT_EEK",
realUgi.getUserName());
try {
setup(conf, false, false);
assertTrue(compareFile(realUgi, FILE1, TEXT),
"Exception while reading file with correct config with whitelist ACLs");
} catch (Throwable ex) {
fs.delete(ZONE1, true);
throw ex;
} finally {
teardown();
}
conf = new Configuration();
// Correct config with default ACLs
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.DECRYPT_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.DEFAULT_KEY_ACL_PREFIX + "DECRYPT_EEK",
realUgi.getUserName());
try {
setup(conf, false, false);
assertTrue(compareFile(realUgi, FILE1, TEXT),
"Exception while reading file with correct config with default ACLs");
} catch (Throwable ex) {
fs.delete(ZONE1, true);
throw ex;
} finally {
teardown();
}
conf = new Configuration();
// Denied because the key ACL set ignores the default ACL set for key1
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.DECRYPT_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.DEFAULT_KEY_ACL_PREFIX + "DECRYPT_EEK",
realUgi.getUserName());
conf.set(KeyAuthorizationKeyProvider.KEY_ACL + KEY1 + ".READ",
realUgi.getUserName());
try {
setup(conf, false, false);
assertFalse(compareFile(realUgi, FILE1, TEXT),
"Allowed file read when default key ACLs should have been overridden by key ACL");
} catch (Throwable ex) {
fs.delete(ZONE1, true);
throw ex;
} finally {
teardown();
}
conf = new Configuration();
// Denied by blacklist
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.DECRYPT_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "blacklist.DECRYPT_EEK",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "DECRYPT_EEK",
realUgi.getUserName());
try {
setup(conf, false, false);
assertFalse(compareFile(realUgi, FILE1, TEXT),
"Allowed file read with blacklist for DECRYPT_EEK");
} catch (Throwable ex) {
fs.delete(ZONE1, true);
throw ex;
} finally {
teardown();
}
conf = new Configuration();
// Allowed because default KMS ACLs are fully permissive
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "DECRYPT_EEK",
realUgi.getUserName());
try {
setup(conf, false, false);
assertTrue(compareFile(realUgi, FILE1, TEXT),
"Exception while reading file with default KMS ACLs");
} catch (Throwable ex) {
fs.delete(ZONE1, true);
throw ex;
} finally {
teardown();
}
conf = new Configuration();
// Denied because of missing DECRYPT_EEK KMS ACL
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.DECRYPT_EEK", " ");
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "DECRYPT_EEK",
realUgi.getUserName());
try {
setup(conf, false, false);
assertFalse(compareFile(realUgi, FILE1, TEXT),
"Allowed file read without DECRYPT_EEK KMS ACL");
} catch (Throwable ex) {
fs.delete(ZONE1, true);
throw ex;
} finally {
teardown();
}
// Denied because of missing DECRYPT_EEK key ACL
conf = new Configuration();
try {
setup(conf, false, false);
assertFalse(compareFile(realUgi, FILE1, TEXT),
"Allowed file read without DECRYPT_EEK key ACL");
} catch (Throwable ex) {
fs.delete(ZONE1, true);
throw ex;
} finally {
teardown();
}
}
/**
* Test that key deletion is correctly governed by ACLs.
* @throws Exception thrown if setup fails
*/
@Test
public void testDeleteKey() throws Exception {
Configuration conf = new Configuration();
// Create a test key
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.CREATE",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "MANAGEMENT",
realUgi.getUserName());
try {
setup(conf);
assertTrue(createKey(realUgi, KEY1, conf),
"Exception during key creation");
assertTrue(createKey(realUgi, KEY2, conf),
"Exception during key creation");
assertTrue(createKey(realUgi, KEY3, conf),
"Exception during key creation");
} finally {
teardown();
}
// We tear everything down and then restart it with the ACLs we want to
// test so that there's no contamination from the ACLs needed for setup.
// To make that work, we have to tell the setup() method not to create a
// new KMS directory.
conf = new Configuration();
// Correct config with whitelist ACL
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.DELETE",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "MANAGEMENT",
realUgi.getUserName());
try {
setup(conf, false);
assertTrue(deleteKey(realUgi, KEY1), "Exception during key deletion with correct config"
+ " using whitelist key ACLs");
} finally {
teardown();
}
conf = new Configuration();
// Correct config with default ACL
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.DELETE",
realUgi.getUserName());
conf.set(KMSConfiguration.DEFAULT_KEY_ACL_PREFIX + "MANAGEMENT",
realUgi.getUserName());
try {
setup(conf, false);
assertTrue(deleteKey(realUgi, KEY2), "Exception during key deletion with correct config"
+ " using default key ACLs");
} finally {
teardown();
}
conf = new Configuration();
// Denied because of blacklist
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.DELETE",
realUgi.getUserName());
conf.set(KMSConfiguration.CONFIG_PREFIX + "blacklist.DELETE",
realUgi.getUserName());
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "MANAGEMENT",
realUgi.getUserName());
try {
setup(conf, false);
assertFalse(deleteKey(realUgi, KEY3),
"Allowed key deletion with blacklist for DELETE");
} finally {
teardown();
}
conf = new Configuration();
// Missing KMS ACL
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.DELETE", " ");
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "MANAGEMENT",
realUgi.getUserName());
try {
setup(conf, false);
assertFalse(deleteKey(realUgi, KEY3),
"Allowed key deletion without DELETE KMS ACL");
} finally {
teardown();
}
conf = new Configuration();
// Missing key ACL
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.DELETE",
realUgi.getUserName());
try {
setup(conf, false);
assertFalse(deleteKey(realUgi, KEY3),
"Allowed key deletion without MANAGMENT key ACL");
} finally {
teardown();
}
conf = new Configuration();
// Denied because the key ACL set ignores the default ACL set for key3
conf.set(KMSConfiguration.CONFIG_PREFIX + "acl.DELETE",
realUgi.getUserName());
conf.set(KMSConfiguration.DEFAULT_KEY_ACL_PREFIX + "MANAGEMENT",
realUgi.getUserName());
conf.set(KeyAuthorizationKeyProvider.KEY_ACL + KEY3 + ".DECRYPT_EEK",
realUgi.getUserName());
try {
setup(conf, false);
assertFalse(deleteKey(realUgi, KEY3),
"Allowed key deletion when default key ACL should have been overridden by key ACL");
} finally {
teardown();
}
conf = new Configuration();
// Allowed because the default setting for KMS ACLs is fully permissive
conf.set(KMSConfiguration.WHITELIST_KEY_ACL_PREFIX + "MANAGEMENT",
realUgi.getUserName());
try {
setup(conf, false);
assertTrue(deleteKey(realUgi, KEY3),
"Exception during key deletion with default KMS ACLs");
} finally {
teardown();
}
}
/**
* Create a key as the specified user.
*
* @param ugi the target user
* @param key the target key
* @param conf the configuration
* @return whether the key creation succeeded
*/
private boolean createKey(UserGroupInformation ugi, final String key,
final Configuration conf) {
return doUserOp(ugi, new UserOp() {
@Override
public void execute() throws IOException {
try {
DFSTestUtil.createKey(key, cluster, conf);
} catch (NoSuchAlgorithmException ex) {
throw new IOException(ex);
}
}
});
}
/**
* Create a zone as the specified user.
*
* @param ugi the target user
* @param key the target key
* @param zone the target zone
* @return whether the zone creation succeeded
*/
private boolean createEncryptionZone(UserGroupInformation ugi,
final String key, final Path zone) {
return doUserOp(ugi, new UserOp() {
@Override
public void execute() throws IOException {
cluster.getFileSystem().createEncryptionZone(zone, key);
}
});
}
/**
* Create a file as the specified user.
*
* @param ugi the target user
* @param file the target file
* @param text the target file contents
* @return whether the file creation succeeded
*/
private boolean createFile(UserGroupInformation ugi,
final Path file, final String text) {
return doUserOp(ugi, new UserOp() {
@Override
public void execute() throws IOException {
FSDataOutputStream dout = cluster.getFileSystem().create(file);
PrintWriter out = new PrintWriter(new OutputStreamWriter(dout));
out.println(text);
out.close();
}
});
}
/**
* Read a file as the specified user and compare the contents to expectations.
*
* @param ugi the target user
* @param file the target file
* @param text the expected file contents
* @return true if the file read succeeded and the contents were as expected
*/
private boolean compareFile(UserGroupInformation ugi,
final Path file, final String text) {
return doUserOp(ugi, new UserOp() {
@Override
public void execute() throws IOException {
FSDataInputStream din = cluster.getFileSystem().open(file);
BufferedReader in = new BufferedReader(new InputStreamReader(din));
assertEquals(text, in.readLine(),
"The text read does not match the text written");
}
});
}
/**
* Delete a key as the specified user.
*
* @param ugi the target user
* @param key the target key
* @return whether the key deletion succeeded
*/
private boolean deleteKey(UserGroupInformation ugi, final String key)
throws IOException, InterruptedException {
return doUserOp(ugi, new UserOp() {
@Override
public void execute() throws IOException {
cluster.getNameNode().getNamesystem().getProvider().deleteKey(key);
}
});
}
/**
* Perform an operation as the given user. This method requires setting the
* login user. This method does not restore the login user to the setting
* from prior to the method call.
*
* @param ugi the target user
* @param op the operation to perform
* @return true if the operation succeeded without throwing an exception
*/
private boolean doUserOp(UserGroupInformation ugi, final UserOp op) {
UserGroupInformation.setLoginUser(ugi);
// Create a test key
return ugi.doAs(new PrivilegedAction<Boolean>() {
@Override
public Boolean run() {
try {
op.execute();
return true;
} catch (IOException ex) {
LOG.error("IOException thrown during doAs() operation", ex);
return false;
}
}
});
}
/**
* Simple | TestAclsEndToEnd |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/hamlet2/TestHamlet.java | {
"start": 1480,
"end": 3334
} | class ____ {
@Test
void testHamlet() {
Hamlet h = newHamlet().
title("test").
h1("heading 1").
p("#id.class").
b("hello").
em("world!").__().
div("#footer").
__("Brought to you by").
a("https://hostname/", "Somebody").__();
PrintWriter out = h.getWriter();
out.flush();
assertEquals(0, h.nestLevel);
verify(out).print("<title");
verify(out).print("test");
verify(out).print("</title>");
verify(out).print("<h1");
verify(out).print("heading 1");
verify(out).print("</h1>");
verify(out).print("<p");
verify(out).print(" id=\"id\"");
verify(out).print(" class=\"class\"");
verify(out).print("<b");
verify(out).print("hello");
verify(out).print("</b>");
verify(out).print("<em");
verify(out).print("world!");
verify(out).print("</em>");
verify(out).print("<div");
verify(out).print(" id=\"footer\"");
verify(out).print("Brought to you by");
verify(out).print("<a");
verify(out).print(" href=\"https://hostname/\"");
verify(out).print("Somebody");
verify(out).print("</a>");
verify(out).print("</div>");
verify(out, never()).print("</p>");
}
@Test
void testTable() {
Hamlet h = newHamlet().
title("test table").
link("style.css");
TABLE t = h.table("#id");
for (int i = 0; i < 3; ++i) {
t.tr().td("1").td("2").__();
}
t.__();
PrintWriter out = h.getWriter();
out.flush();
assertEquals(0, h.nestLevel);
verify(out).print("<table");
verify(out).print("</table>");
verify(out, atLeast(1)).print("</td>");
verify(out, atLeast(1)).print("</tr>");
}
@Test
void testEnumAttrs() {
Hamlet h = newHamlet().
meta_http("Content-type", "text/html; charset=utf-8").
title("test | TestHamlet |
java | apache__kafka | clients/src/test/java/org/apache/kafka/common/network/NetworkReceiveTest.java | {
"start": 1309,
"end": 4959
} | class ____ {
@Test
public void testBytesRead() throws IOException {
NetworkReceive receive = new NetworkReceive(128, "0");
assertEquals(0, receive.bytesRead());
ScatteringByteChannel channel = Mockito.mock(ScatteringByteChannel.class);
ArgumentCaptor<ByteBuffer> bufferCaptor = ArgumentCaptor.forClass(ByteBuffer.class);
Mockito.when(channel.read(bufferCaptor.capture())).thenAnswer(invocation -> {
bufferCaptor.getValue().putInt(128);
return 4;
}).thenReturn(0);
assertEquals(4, receive.readFrom(channel));
assertEquals(4, receive.bytesRead());
assertFalse(receive.complete());
Mockito.reset(channel);
Mockito.when(channel.read(bufferCaptor.capture())).thenAnswer(invocation -> {
bufferCaptor.getValue().put(TestUtils.randomBytes(64));
return 64;
});
assertEquals(64, receive.readFrom(channel));
assertEquals(68, receive.bytesRead());
assertFalse(receive.complete());
Mockito.reset(channel);
Mockito.when(channel.read(bufferCaptor.capture())).thenAnswer(invocation -> {
bufferCaptor.getValue().put(TestUtils.randomBytes(64));
return 64;
});
assertEquals(64, receive.readFrom(channel));
assertEquals(132, receive.bytesRead());
assertTrue(receive.complete());
}
@Test
public void testRequiredMemoryAmountKnownWhenNotSet() {
NetworkReceive receive = new NetworkReceive("0");
assertFalse(receive.requiredMemoryAmountKnown(), "Memory amount should not be known before read.");
}
@Test
public void testRequiredMemoryAmountKnownWhenSet() throws IOException {
NetworkReceive receive = new NetworkReceive(128, "0");
ScatteringByteChannel channel = Mockito.mock(ScatteringByteChannel.class);
ArgumentCaptor<ByteBuffer> bufferCaptor = ArgumentCaptor.forClass(ByteBuffer.class);
Mockito.when(channel.read(bufferCaptor.capture())).thenAnswer(invocation -> {
bufferCaptor.getValue().putInt(64);
return 4;
});
receive.readFrom(channel);
assertTrue(receive.requiredMemoryAmountKnown(), "Memory amount should be known after read.");
}
@Test
public void testSizeWithPredefineBuffer() {
int payloadSize = 8;
int expectedTotalSize = 4 + payloadSize; // 4 bytes for size buffer + payload size
ByteBuffer payloadBuffer = ByteBuffer.allocate(payloadSize);
IntStream.range(0, payloadSize).forEach(i -> payloadBuffer.put((byte) i));
NetworkReceive networkReceive = new NetworkReceive("0", payloadBuffer);
assertEquals(expectedTotalSize, networkReceive.size(), "The total size should be the sum of the size buffer and payload.");
}
@Test
public void testSizeAfterRead() throws IOException {
int payloadSize = 32;
int expectedTotalSize = 4 + payloadSize; // 4 bytes for size buffer + payload size
NetworkReceive receive = new NetworkReceive(128, "0");
ScatteringByteChannel channel = Mockito.mock(ScatteringByteChannel.class);
ArgumentCaptor<ByteBuffer> bufferCaptor = ArgumentCaptor.forClass(ByteBuffer.class);
Mockito.when(channel.read(bufferCaptor.capture())).thenAnswer(invocation -> {
bufferCaptor.getValue().putInt(payloadSize);
return 4;
});
receive.readFrom(channel);
assertEquals(expectedTotalSize, receive.size(), "The total size should be the sum of the size buffer and receive size.");
}
}
| NetworkReceiveTest |
java | alibaba__nacos | config/src/main/java/com/alibaba/nacos/config/server/service/repository/embedded/EmbeddedConfigInfoBetaPersistServiceImpl.java | {
"start": 3266,
"end": 16080
} | class ____ implements ConfigInfoBetaPersistService {
private DataSourceService dataSourceService;
private final DatabaseOperate databaseOperate;
private MapperManager mapperManager;
/**
* The constructor sets the dependency injection order.
*
* @param databaseOperate databaseOperate.
*/
public EmbeddedConfigInfoBetaPersistServiceImpl(DatabaseOperate databaseOperate) {
this.databaseOperate = databaseOperate;
this.dataSourceService = DynamicDataSource.getInstance().getDataSource();
Boolean isDataSourceLogEnable = EnvUtil.getProperty(CommonConstant.NACOS_PLUGIN_DATASOURCE_LOG, Boolean.class,
false);
this.mapperManager = MapperManager.instance(isDataSourceLogEnable);
NotifyCenter.registerToSharePublisher(DerbyImportEvent.class);
}
@Override
public <E> PaginationHelper<E> createPaginationHelper() {
return new EmbeddedPaginationHelperImpl<>(databaseOperate);
}
@Override
public ConfigInfoStateWrapper findConfigInfo4BetaState(final String dataId, final String group,
final String tenant) {
ConfigInfoBetaMapper configInfoBetaMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO_BETA);
String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant;
final String sql = configInfoBetaMapper.select(
Arrays.asList("id", "data_id", "group_id", "tenant_id", "gmt_modified"),
Arrays.asList("data_id", "group_id", "tenant_id"));
return databaseOperate.queryOne(sql, new Object[] {dataId, group, tenantTmp},
CONFIG_INFO_STATE_WRAPPER_ROW_MAPPER);
}
private ConfigOperateResult getBetaOperateResult(String dataId, String group, String tenant) {
String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant;
ConfigInfoStateWrapper configInfo4Beta = this.findConfigInfo4BetaState(dataId, group, tenantTmp);
if (configInfo4Beta == null) {
return new ConfigOperateResult(false);
}
return new ConfigOperateResult(configInfo4Beta.getId(), configInfo4Beta.getLastModified());
}
@Override
public ConfigOperateResult addConfigInfo4Beta(ConfigInfo configInfo, String betaIps, String srcIp, String srcUser) {
String appNameTmp = StringUtils.defaultEmptyIfBlank(configInfo.getAppName());
String tenantTmp = StringUtils.defaultEmptyIfBlank(configInfo.getTenant());
String encryptedDataKey = StringUtils.defaultEmptyIfBlank(configInfo.getEncryptedDataKey());
configInfo.setTenant(tenantTmp);
try {
String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE);
ConfigInfoBetaMapper configInfoBetaMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO_BETA);
final String sql = configInfoBetaMapper.insert(
Arrays.asList("data_id", "group_id", "tenant_id", "app_name", "content", "md5", "beta_ips",
"src_ip", "src_user", "gmt_create@NOW()", "gmt_modified@NOW()", "encrypted_data_key"));
final Object[] args = new Object[]{configInfo.getDataId(), configInfo.getGroup(), tenantTmp, appNameTmp,
configInfo.getContent(), md5, betaIps, srcIp, srcUser, encryptedDataKey};
Timestamp time = new Timestamp(System.currentTimeMillis());
EmbeddedStorageContextUtils.onModifyConfigBetaInfo(configInfo, betaIps, srcIp, time);
EmbeddedStorageContextHolder.addSqlContext(sql, args);
databaseOperate.blockUpdate();
return getBetaOperateResult(configInfo.getDataId(), configInfo.getGroup(), tenantTmp);
} finally {
EmbeddedStorageContextHolder.cleanAllContext();
}
}
@Override
public ConfigOperateResult insertOrUpdateBeta(final ConfigInfo configInfo, final String betaIps, final String srcIp,
final String srcUser) {
if (findConfigInfo4BetaState(configInfo.getDataId(), configInfo.getGroup(), configInfo.getTenant()) == null) {
return addConfigInfo4Beta(configInfo, betaIps, srcIp, srcUser);
} else {
return updateConfigInfo4Beta(configInfo, betaIps, srcIp, srcUser);
}
}
@Override
public ConfigOperateResult insertOrUpdateBetaCas(final ConfigInfo configInfo, final String betaIps,
final String srcIp, final String srcUser) {
if (findConfigInfo4BetaState(configInfo.getDataId(), configInfo.getGroup(), configInfo.getTenant()) == null) {
return addConfigInfo4Beta(configInfo, betaIps, srcIp, srcUser);
} else {
return updateConfigInfo4BetaCas(configInfo, betaIps, srcIp, srcUser);
}
}
@Override
public void removeConfigInfo4Beta(final String dataId, final String group, final String tenant) {
final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant;
ConfigInfoStateWrapper configInfo = findConfigInfo4BetaState(dataId, group, tenant);
if (configInfo != null) {
try {
ConfigInfoBetaMapper configInfoBetaMapper = mapperManager.findMapper(
dataSourceService.getDataSourceType(), TableConstant.CONFIG_INFO_BETA);
final String sql = configInfoBetaMapper.delete(Arrays.asList("data_id", "group_id", "tenant_id"));
final Object[] args = new Object[] {dataId, group, tenantTmp};
EmbeddedStorageContextUtils.onDeleteConfigBetaInfo(tenantTmp, group, dataId,
System.currentTimeMillis());
EmbeddedStorageContextHolder.addSqlContext(sql, args);
boolean result = databaseOperate.update(EmbeddedStorageContextHolder.getCurrentSqlContext());
if (!result) {
throw new NacosConfigException("[Tag] Configuration deletion failed");
}
} finally {
EmbeddedStorageContextHolder.cleanAllContext();
}
}
}
@Override
public ConfigOperateResult updateConfigInfo4Beta(ConfigInfo configInfo, String betaIps, String srcIp,
String srcUser) {
String appNameTmp = StringUtils.defaultEmptyIfBlank(configInfo.getAppName());
String tenantTmp = StringUtils.defaultEmptyIfBlank(configInfo.getTenant());
String encryptedDataKey = StringUtils.defaultEmptyIfBlank(configInfo.getEncryptedDataKey());
configInfo.setTenant(tenantTmp);
try {
String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE);
ConfigInfoBetaMapper configInfoBetaMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO_BETA);
final String sql = configInfoBetaMapper.update(
Arrays.asList("content", "md5", "beta_ips", "src_ip", "src_user", "gmt_modified@NOW()",
"app_name", "encrypted_data_key"), Arrays.asList("data_id", "group_id", "tenant_id"));
final Object[] args = new Object[]{configInfo.getContent(), md5, betaIps, srcIp, srcUser, appNameTmp,
encryptedDataKey, configInfo.getDataId(), configInfo.getGroup(), tenantTmp};
Timestamp time = new Timestamp(System.currentTimeMillis());
EmbeddedStorageContextUtils.onModifyConfigBetaInfo(configInfo, betaIps, srcIp, time);
EmbeddedStorageContextHolder.addSqlContext(sql, args);
databaseOperate.blockUpdate();
return getBetaOperateResult(configInfo.getDataId(), configInfo.getGroup(), tenantTmp);
} finally {
EmbeddedStorageContextHolder.cleanAllContext();
}
}
@Override
public ConfigOperateResult updateConfigInfo4BetaCas(ConfigInfo configInfo, String betaIps, String srcIp,
String srcUser) {
String appNameTmp = StringUtils.defaultEmptyIfBlank(configInfo.getAppName());
String tenantTmp = StringUtils.defaultEmptyIfBlank(configInfo.getTenant());
configInfo.setTenant(tenantTmp);
try {
String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE);
ConfigInfoBetaMapper configInfoBetaMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO_BETA);
MapperContext context = new MapperContext();
context.putUpdateParameter(FieldConstant.CONTENT, configInfo.getContent());
context.putUpdateParameter(FieldConstant.MD5, md5);
context.putUpdateParameter(FieldConstant.BETA_IPS, betaIps);
context.putUpdateParameter(FieldConstant.SRC_IP, srcIp);
context.putUpdateParameter(FieldConstant.SRC_USER, srcUser);
context.putUpdateParameter(FieldConstant.APP_NAME, appNameTmp);
context.putWhereParameter(FieldConstant.DATA_ID, configInfo.getDataId());
context.putWhereParameter(FieldConstant.GROUP_ID, configInfo.getGroup());
context.putWhereParameter(FieldConstant.TENANT_ID, tenantTmp);
context.putWhereParameter(FieldConstant.MD5, configInfo.getMd5());
MapperResult mapperResult = configInfoBetaMapper.updateConfigInfo4BetaCas(context);
final String sql = mapperResult.getSql();
List<Object> paramList = mapperResult.getParamList();
final Object[] args = paramList.toArray();
Timestamp time = new Timestamp(System.currentTimeMillis());
EmbeddedStorageContextUtils.onModifyConfigBetaInfo(configInfo, betaIps, srcIp, time);
EmbeddedStorageContextHolder.addSqlContext(sql, args);
boolean success = databaseOperate.blockUpdate();
if (success) {
return getBetaOperateResult(configInfo.getDataId(), configInfo.getGroup(), tenantTmp);
} else {
return new ConfigOperateResult(false);
}
} finally {
EmbeddedStorageContextHolder.cleanAllContext();
}
}
@Override
public ConfigInfoBetaWrapper findConfigInfo4Beta(final String dataId, final String group, final String tenant) {
String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant;
ConfigInfoBetaMapper configInfoBetaMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO_BETA);
final String sql = configInfoBetaMapper.select(
Arrays.asList("id", "data_id", "group_id", "tenant_id", "app_name", "content", "beta_ips",
"encrypted_data_key", "gmt_modified"), Arrays.asList("data_id", "group_id", "tenant_id"));
return databaseOperate.queryOne(sql, new Object[] {dataId, group, tenantTmp},
CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER);
}
@Override
public int configInfoBetaCount() {
ConfigInfoBetaMapper configInfoBetaMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO_BETA);
String sql = configInfoBetaMapper.count(null);
Integer result = databaseOperate.queryOne(sql, Integer.class);
if (result == null) {
throw new IllegalArgumentException("configInfoBetaCount error");
}
return result;
}
@Override
public Page<ConfigInfoBetaWrapper> findAllConfigInfoBetaForDumpAll(final int pageNo, final int pageSize) {
final int startRow = (pageNo - 1) * pageSize;
ConfigInfoBetaMapper configInfoBetaMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO_BETA);
String sqlCountRows = configInfoBetaMapper.count(null);
MapperContext context = new MapperContext();
context.setStartRow(startRow);
context.setPageSize(pageSize);
MapperResult mapperResult = configInfoBetaMapper.findAllConfigInfoBetaForDumpAllFetchRows(context);
String sqlFetchRows = mapperResult.getSql();
PaginationHelper<ConfigInfoBetaWrapper> helper = createPaginationHelper();
return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {}, pageNo, pageSize,
CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER);
}
}
| EmbeddedConfigInfoBetaPersistServiceImpl |
java | micronaut-projects__micronaut-core | router/src/main/java/io/micronaut/web/router/version/DefaultVersionProvider.java | {
"start": 790,
"end": 942
} | interface ____ {
/**
*
* @return Resolves a default Version
*/
@NonNull
String resolveDefaultVersion();
}
| DefaultVersionProvider |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/StateToProcessWriterHelper.java | {
"start": 488,
"end": 1387
} | class ____ {
private StateToProcessWriterHelper() {}
public static void writeStateToStream(BytesReference source, OutputStream stream) throws IOException {
// The source bytes are already UTF-8. The C++ process wants UTF-8, so we
// can avoid converting to a Java String only to convert back again.
int length = source.length();
// There's a complication that the source can already have trailing 0 bytes
while (length > 0 && source.get(length - 1) == 0) {
--length;
}
source.slice(0, length).writeTo(stream);
// This is dictated by the JSON parser on the C++ side; it treats a '\0' as the character
// that separates distinct JSON documents, and this is what we need because we're
// sending multiple JSON documents via the same named pipe.
stream.write(0);
}
}
| StateToProcessWriterHelper |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/ServiceRegistryUtils.java | {
"start": 1387,
"end": 3904
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(ServiceRegistryUtils.class);
public static final String SVC_USERS = "/services/yarn/users";
/**
* Get the registry path for an instance under the user's home node
* @param instanceName application instance
* @return a path to the registry location for this application instance.
*/
public static String registryPathForInstance(String instanceName) {
return RegistryUtils.servicePath(
RegistryUtils.currentUser(), YarnServiceConstants.APP_TYPE, instanceName
);
}
/**
* Build the path to a service folder
* @param username user name
* @param serviceName service name
* @return the home path to the service
*/
public static String mkServiceHomePath(String username, String serviceName) {
return mkUserHomePath(username) + "/" + serviceName;
}
/**
* Build the path to a user home folder;
*/
public static String mkUserHomePath(String username) {
return SVC_USERS + "/" + username;
}
/**
* Determine whether a DNS lookup exists for a given name. If a DNS server
* address is provided, the lookup will be performed against this DNS
* server. This option is provided because it may be desirable to perform
* the lookup against Registry DNS directly to avoid caching of negative
* responses that may be performed by other DNS servers, thereby allowing the
* lookup to succeed sooner.
*
* @param addr host:port dns address, or null
* @param name name to look up
* @return true if a lookup succeeds for the specified name
*/
public static boolean registryDNSLookupExists(String addr, String
name) {
if (addr == null) {
try {
InetAddress.getByName(name);
return true;
} catch (UnknownHostException e) {
return false;
}
}
String dnsURI = String.format("dns://%s", addr);
Hashtable<String, Object> env = new Hashtable<>();
env.put(Context.INITIAL_CONTEXT_FACTORY,
"com.sun.jndi.dns.DnsContextFactory");
env.put(Context.PROVIDER_URL, dnsURI);
try {
DirContext ictx = new InitialDirContext(env);
Attributes attrs = ictx.getAttributes(name, new String[]{"A"});
if (attrs.size() > 0) {
return true;
}
} catch (NameNotFoundException e) {
// this doesn't need to be logged
} catch (NamingException e) {
LOG.error("Got exception when performing DNS lookup", e);
}
return false;
}
}
| ServiceRegistryUtils |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/hybrid/tiered/netty/TestingTieredStorageNettyService.java | {
"start": 2802,
"end": 4291
} | class ____ {
private BiConsumer<TieredStoragePartitionId, NettyServiceProducer>
registerProducerConsumer = (partitionId, nettyServiceProducer) -> {};
private BiFunction<
TieredStoragePartitionId,
TieredStorageSubpartitionId,
CompletableFuture<NettyConnectionReader>>
registerConsumerFunction = (tieredStoragePartitionId, subpartitionId) -> null;
public Builder() {}
public TestingTieredStorageNettyService.Builder setRegisterProducerConsumer(
BiConsumer<TieredStoragePartitionId, NettyServiceProducer>
registerProducerConsumer) {
this.registerProducerConsumer = registerProducerConsumer;
return this;
}
public TestingTieredStorageNettyService.Builder setRegisterConsumerFunction(
BiFunction<
TieredStoragePartitionId,
TieredStorageSubpartitionId,
CompletableFuture<NettyConnectionReader>>
registerConsumerFunction) {
this.registerConsumerFunction = registerConsumerFunction;
return this;
}
public TestingTieredStorageNettyService build() {
return new TestingTieredStorageNettyService(
registerProducerConsumer, registerConsumerFunction);
}
}
}
| Builder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.