language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java | {
"start": 2865,
"end": 6037
} | class ____
extends AccessTokenProvider {
public static final String OAUTH_REFRESH_TOKEN_KEY
= "dfs.webhdfs.oauth2.refresh.token";
public static final String OAUTH_REFRESH_TOKEN_EXPIRES_KEY
= "dfs.webhdfs.oauth2.refresh.token.expires.ms.since.epoch";
private AccessTokenTimer accessTokenTimer;
private String accessToken;
private String refreshToken;
private String clientId;
private String refreshURL;
public ConfRefreshTokenBasedAccessTokenProvider() {
this.accessTokenTimer = new AccessTokenTimer();
}
public ConfRefreshTokenBasedAccessTokenProvider(Timer timer) {
this.accessTokenTimer = new AccessTokenTimer(timer);
}
@Override
public void setConf(Configuration conf) {
super.setConf(conf);
refreshToken = notNull(conf, (OAUTH_REFRESH_TOKEN_KEY));
accessTokenTimer.setExpiresInMSSinceEpoch(
notNull(conf, OAUTH_REFRESH_TOKEN_EXPIRES_KEY));
clientId = notNull(conf, OAUTH_CLIENT_ID_KEY);
refreshURL = notNull(conf, OAUTH_REFRESH_URL_KEY);
}
@Override
public synchronized String getAccessToken() throws IOException {
if(accessTokenTimer.shouldRefresh()) {
refresh();
}
return accessToken;
}
void refresh() throws IOException {
final List<NameValuePair> pairs = new ArrayList<>();
pairs.add(new BasicNameValuePair(GRANT_TYPE, REFRESH_TOKEN));
pairs.add(new BasicNameValuePair(REFRESH_TOKEN, refreshToken));
pairs.add(new BasicNameValuePair(CLIENT_ID, clientId));
final RequestConfig config = RequestConfig.custom()
.setConnectTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT)
.setConnectionRequestTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT)
.setSocketTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT)
.build();
try (CloseableHttpClient client =
HttpClientBuilder.create().setDefaultRequestConfig(config).build()) {
final HttpPost httpPost = new HttpPost(refreshURL);
httpPost.setEntity(new UrlEncodedFormEntity(pairs, StandardCharsets.UTF_8));
httpPost.setHeader(HttpHeaders.CONTENT_TYPE, URLENCODED);
try (CloseableHttpResponse response = client.execute(httpPost)) {
final int statusCode = response.getStatusLine().getStatusCode();
if (statusCode != HttpStatus.SC_OK) {
throw new IllegalArgumentException(
"Received invalid http response: " + statusCode + ", text = " +
EntityUtils.toString(response.getEntity()));
}
Map<?, ?> responseBody = JsonSerialization.mapReader().readValue(
EntityUtils.toString(response.getEntity()));
String newExpiresIn = responseBody.get(EXPIRES_IN).toString();
accessTokenTimer.setExpiresIn(newExpiresIn);
accessToken = responseBody.get(ACCESS_TOKEN).toString();
}
} catch (RuntimeException e) {
throw new IOException("Exception while refreshing access token", e);
} catch (Exception e) {
throw new IOException("Exception while refreshing access token", e);
}
}
public String getRefreshToken() {
return refreshToken;
}
}
| ConfRefreshTokenBasedAccessTokenProvider |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/enums/EnumAltIdTest.java | {
"start": 900,
"end": 1086
} | enum ____ {
A, B, C;
private LowerCaseEnum() { }
@Override
public String toString() { return name().toLowerCase(); }
}
protected static | LowerCaseEnum |
java | processing__processing4 | app/src/processing/app/syntax/im/InputMethodSupport.java | {
"start": 1616,
"end": 8920
} | interface ____ {
public void onCommitted(char c);
}
private Callback callback;
*/
static private final Attribute[] CUSTOM_IM_ATTRIBUTES = {
TextAttribute.INPUT_METHOD_HIGHLIGHT,
};
private JEditTextArea textArea;
private int committedCount = 0;
private AttributedString composedTextString;
public InputMethodSupport(JEditTextArea textArea) {
this.textArea = textArea;
textArea.enableInputMethods(true);
textArea.addInputMethodListener(this);
}
/*
public void setCallback(Callback callback) {
this.callback = callback;
}
*/
/////////////////////////////////////////////////////////////////////////////
// InputMethodRequest
/////////////////////////////////////////////////////////////////////////////
@Override
public Rectangle getTextLocation(TextHitInfo offset) {
Messages.log("#Called getTextLocation:" + offset);
int line = textArea.getCaretLine();
int offsetX = textArea.getCaretPosition() - textArea.getLineStartOffset(line);
// '+1' mean textArea.lineToY(line) + textArea.getPainter().getFontMetrics().getHeight().
// TextLayout#draw method need at least one height of font.
Rectangle rectangle = new Rectangle(textArea.offsetToX(line, offsetX), textArea.lineToY(line + 1), 0, 0);
Point location = textArea.getPainter().getLocationOnScreen();
rectangle.translate(location.x, location.y);
return rectangle;
}
@Override
public TextHitInfo getLocationOffset(int x, int y) {
return null;
}
@Override
public int getInsertPositionOffset() {
return -textArea.getCaretPosition();
}
@Override
public AttributedCharacterIterator getCommittedText(int beginIndex,
int endIndex, AttributedCharacterIterator.Attribute[] attributes) {
int length = endIndex - beginIndex;
String textAreaString = textArea.getText(beginIndex, length);
return new AttributedString(textAreaString).getIterator();
}
@Override
public int getCommittedTextLength() {
return committedCount;
}
@Override
public AttributedCharacterIterator cancelLatestCommittedText(
AttributedCharacterIterator.Attribute[] attributes) {
return null;
}
@Override
public AttributedCharacterIterator getSelectedText(
AttributedCharacterIterator.Attribute[] attributes) {
return null;
}
/////////////////////////////////////////////////////////////////////////////
// InputMethodListener
/////////////////////////////////////////////////////////////////////////////
/**
* Handles events from InputMethod.
*
* @param event event from Input Method.
*/
@Override
public void inputMethodTextChanged(InputMethodEvent event) {
if (Base.DEBUG) {
StringBuilder sb = new StringBuilder();
sb.append("#Called inputMethodTextChanged");
sb.append("\t ID: " + event.getID());
sb.append("\t timestamp: " + new java.util.Date(event.getWhen()));
sb.append("\t parmString: " + event.paramString());
Messages.log(sb.toString());
}
AttributedCharacterIterator text = event.getText(); // text = composedText + commitedText
committedCount = event.getCommittedCharacterCount();
// The caret for Input Method. If you type a character by a input method,
// original caret position will be incorrect. JEditTextArea is not
// implemented using AttributedString and TextLayout.
textArea.setCaretVisible(false);
// Japanese : if the enter key pressed, event.getText is null.
// Japanese : if first space key pressed, event.getText is null.
// Chinese (pinin) : if a space key pressed, event.getText is null.
// Taiwan (bopomofo): ?
// Korean : ?
// Korean Input Method
if (text != null && text.getEndIndex() - (text.getBeginIndex() + committedCount) <= 0) {
textArea.setCaretVisible(true);
}
// Japanese Input Method
if (text == null) {
textArea.setCaretVisible(true);
}
if (text != null) {
if (committedCount > 0) {
char[] insertion = new char[committedCount];
char c = text.first();
for (int i = 0; i < committedCount; i++) {
insertion[i] = c;
c = text.next();
}
// Insert this as a compound edit
textArea.setSelectedText(new String(insertion), true);
textArea.getInputHandler().handleInputMethodCommit();
}
CompositionTextPainter compositionPainter = textArea.getPainter().getCompositionTextPainter();
Messages.log("textArea.getCaretPosition() + committed_count: " + (textArea.getCaretPosition() + committedCount));
compositionPainter.setComposedTextLayout(getTextLayout(text, committedCount), textArea.getCaretPosition() + committedCount);
compositionPainter.setCaret(event.getCaret());
} else { // otherwise hide the input method
CompositionTextPainter compositionPainter = textArea.getPainter().getCompositionTextPainter();
compositionPainter.setComposedTextLayout(null, 0);
compositionPainter.setCaret(null);
}
event.consume();
textArea.repaint();
}
private TextLayout getTextLayout(AttributedCharacterIterator text, int committedCount) {
boolean antialias = Preferences.getBoolean("editor.smooth");
TextAreaPainter painter = textArea.getPainter();
// create attributed string with font info.
if (text.getEndIndex() - (text.getBeginIndex() + committedCount) > 0) {
composedTextString = new AttributedString(text, committedCount, text.getEndIndex(), CUSTOM_IM_ATTRIBUTES);
Font font = painter.getFontMetrics().getFont();
TextAreaDefaults defaults = textArea.getDefaults();
Color bgColor = defaults.lineHighlight ?
defaults.lineHighlightColor : defaults.bgcolor;
composedTextString.addAttribute(TextAttribute.FONT, font);
composedTextString.addAttribute(TextAttribute.FOREGROUND, defaults.fgcolor);
composedTextString.addAttribute(TextAttribute.BACKGROUND, bgColor);
} else {
composedTextString = new AttributedString("");
return null;
}
// set hint of antialiasing to render target.
Graphics2D g2d = (Graphics2D)painter.getGraphics();
g2d.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING,
antialias ?
RenderingHints.VALUE_TEXT_ANTIALIAS_ON :
RenderingHints.VALUE_TEXT_ANTIALIAS_OFF);
FontRenderContext frc = g2d.getFontRenderContext();
Messages.log("debug: FontRenderContext is Antialiased = " + frc.getAntiAliasingHint());
return new TextLayout(composedTextString.getIterator(), frc);
}
@Override
public void caretPositionChanged(InputMethodEvent event) {
event.consume();
}
/*
private void insertCharacter(char c) {
if (Base.DEBUG) {
Messages.log("debug: insertCharacter(char c) textArea.getCaretPosition()=" + textArea.getCaretPosition());
}
try {
textArea.getDocument().insertString(textArea.getCaretPosition(), Character.toString(c), null);
if (Base.DEBUG) {
Messages.log("debug: \t after:insertCharacter(char c) textArea.getCaretPosition()=" + textArea.getCaretPosition());
}
} catch (BadLocationException e) {
e.printStackTrace();
}
}
*/
}
| Callback |
java | google__dagger | javatests/dagger/functional/binds/subpackage/NotExposedInjectsMembers.java | {
"start": 684,
"end": 820
} | class ____ implements ExposedInjectsMembers {
@Inject Exposed exposed;
@Inject NotExposedInjectsMembers() {}
}
| NotExposedInjectsMembers |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/type/descriptor/java/spi/BasicCollectionJavaType.java | {
"start": 2147,
"end": 16054
} | class ____<C extends Collection<E>, E>
extends AbstractJavaType<C>
implements BasicPluralJavaType<E> {
private final CollectionSemantics<C, E> semantics;
private final JavaType<E> componentJavaType;
public BasicCollectionJavaType(ParameterizedType type, JavaType<E> componentJavaType, CollectionSemantics<C, E> semantics) {
super( type, new CollectionMutabilityPlan<>( componentJavaType, semantics ) );
this.semantics = semantics;
this.componentJavaType = componentJavaType;
}
@Override
public JavaType<E> getElementJavaType() {
return componentJavaType;
}
@Override
public JdbcType getRecommendedJdbcType(JdbcTypeIndicators indicators) {
if ( componentJavaType instanceof UnknownBasicJavaType ) {
throw new MappingException("Basic collection has element type '"
+ componentJavaType.getTypeName()
+ "' which is not a known basic type"
+ " (attribute is not annotated '@ElementCollection', '@OneToMany', or '@ManyToMany')");
}
// Always determine the recommended type to make sure this is a valid basic java type
// (even though we only use this inside the if block, we want it to throw here if something wrong)
final var recommendedComponentJdbcType = componentJavaType.getRecommendedJdbcType( indicators );
final var typeConfiguration = indicators.getTypeConfiguration();
return typeConfiguration.getJdbcTypeRegistry()
.resolveTypeConstructorDescriptor(
indicators.getPreferredSqlTypeCodeForArray( recommendedComponentJdbcType.getDefaultSqlTypeCode() ),
typeConfiguration.getBasicTypeRegistry().resolve( componentJavaType, recommendedComponentJdbcType ),
ColumnTypeInformation.EMPTY
);
}
public CollectionSemantics<C, E> getSemantics() {
return semantics;
}
@Override
public boolean isWider(JavaType<?> javaType) {
// Support binding single element value
return this == javaType || componentJavaType == javaType;
}
@Override
public BasicType<?> resolveType(
TypeConfiguration typeConfiguration,
Dialect dialect,
BasicType<E> elementType,
ColumnTypeInformation columnTypeInformation,
JdbcTypeIndicators stdIndicators) {
final var elementJavaType = elementType.getJavaTypeDescriptor();
final var elementJavaTypeClass = elementJavaType.getJavaTypeClass();
if ( elementType instanceof BasicPluralType<?, ?>
|| elementJavaTypeClass != null && elementJavaTypeClass.isArray() ) {
return null;
}
final var collectionJavaType = collectionJavaType( typeConfiguration, elementJavaType );
final int elementSqlTypeCode = elementType.getJdbcType().getDefaultSqlTypeCode();
final int arrayTypeCode = stdIndicators.getPreferredSqlTypeCodeForArray( elementSqlTypeCode );
final var arrayJdbcType =
typeConfiguration.getJdbcTypeRegistry()
.resolveTypeConstructorDescriptor( arrayTypeCode, elementType, columnTypeInformation );
final var valueConverter = elementType.getValueConverter();
if ( valueConverter == null ) {
return typeConfiguration.getBasicTypeRegistry()
.resolve( collectionJavaType, arrayJdbcType,
() -> new BasicCollectionType<>( elementType, arrayJdbcType, collectionJavaType ) );
}
else {
return convertedCollectionType(
typeConfiguration,
elementType,
arrayJdbcType,
collectionJavaType,
valueConverter
);
}
}
private static <C extends Collection<E>, E, R> ConvertedBasicCollectionType<C, E> convertedCollectionType(
TypeConfiguration typeConfiguration,
BasicType<E> elementType,
JdbcType arrayJdbcType,
BasicCollectionJavaType<C, E> collectionJavaType,
BasicValueConverter<E, R> elementValueConverter) {
final var elementRelationalJavaType = elementValueConverter.getRelationalJavaType();
final var relationalJavaType =
typeConfiguration.getJavaTypeRegistry()
.resolveArrayDescriptor( elementRelationalJavaType.getJavaTypeClass() );
return new ConvertedBasicCollectionType<>(
elementType,
arrayJdbcType,
collectionJavaType,
new CollectionConverter<>( elementValueConverter, collectionJavaType, relationalJavaType )
);
}
private BasicCollectionJavaType<C, E> collectionJavaType(
TypeConfiguration typeConfiguration,
JavaType<E> elementJavaType) {
if ( componentJavaType == elementJavaType ) {
return this;
}
else {
final var parameterizedType = (ParameterizedType) getJavaType();
final var collectionJavaType =
new BasicCollectionJavaType<>( parameterizedType, elementJavaType, semantics );
// Register the collection type as that will be resolved in the next step
typeConfiguration.getJavaTypeRegistry().addDescriptor( collectionJavaType );
return collectionJavaType;
}
}
@Override
public String extractLoggableRepresentation(C value) {
if ( value == null ) {
return "null";
}
final var iterator = value.iterator();
if ( !iterator.hasNext() ) {
return "[]";
}
final var string = new StringBuilder();
string.append( '[' );
do {
final E element = iterator.next();
string.append( componentJavaType.extractLoggableRepresentation( element ) );
if ( !iterator.hasNext() ) {
return string.append( ']' ).toString();
}
string.append( ", " );
} while ( true );
}
@Override
public boolean areEqual(C one, C another) {
if ( one == null && another == null ) {
return true;
}
if ( one == null || another == null ) {
return false;
}
if ( one.size() != another.size() ) {
return false;
}
switch ( semantics.getCollectionClassification() ) {
case ARRAY:
case LIST:
case ORDERED_SET:
case SORTED_SET:
final var iterator1 = one.iterator();
final var iterator2 = another.iterator();
while ( iterator1.hasNext() ) {
if ( !componentJavaType.areEqual( iterator1.next(), iterator2.next() ) ) {
return false;
}
}
default: {
OUTER: for ( E e1 : one ) {
for ( E e2 : another ) {
if ( componentJavaType.areEqual( e1, e2 ) ) {
continue OUTER;
}
}
return false;
}
}
}
return true;
}
@Override
public int extractHashCode(C value) {
int result = 0;
if ( value != null && !value.isEmpty() ) {
for ( E element : value ) {
if ( element != null ) {
result += componentJavaType.extractHashCode( element );
}
}
}
return result;
}
@Override
public String toString(C value) {
if ( value == null ) {
return null;
}
final var string = new StringBuilder();
string.append( '{' );
String glue = "";
for ( E v : value ) {
string.append( glue );
if ( v == null ) {
string.append( "null" );
glue = ",";
continue;
}
string.append( '"' );
String valstr = componentJavaType.toString( v );
// using replaceAll is a shorter, but much slower way to do this
for (int i = 0, len = valstr.length(); i < len; i ++ ) {
char c = valstr.charAt( i );
// Surrogate pairs. This is how they're done.
if (c == '\\' || c == '"') {
string.append( '\\' );
}
string.append( c );
}
string.append( '"' );
glue = ",";
}
string.append( '}' );
return string.toString();
}
@Override
public C fromString(CharSequence charSequence) {
if ( charSequence == null ) {
return null;
}
java.util.ArrayList<String> list = new java.util.ArrayList<>();
StringBuilder sb = null;
char lastChar = charSequence.charAt( charSequence.length() - 1 );
char firstChar = charSequence.charAt( 0 );
if ( firstChar != '{' || lastChar != '}' ) {
throw new IllegalArgumentException( "Cannot parse given string into array of strings. First and last character must be { and }" );
}
int len = charSequence.length();
boolean inquote = false;
for ( int i = 1; i < len; i ++ ) {
char c = charSequence.charAt( i );
if ( c == '"' ) {
if (inquote) {
list.add( sb.toString() );
}
else {
sb = new StringBuilder();
}
inquote = !inquote;
continue;
}
else if ( !inquote ) {
if ( Character.isWhitespace( c ) ) {
continue;
}
else if ( c == ',' ) {
// treat no-value between commas to mean null
if ( sb == null ) {
list.add( null );
}
else {
sb = null;
}
continue;
}
else {
// i + 4, because there has to be a comma or closing brace after null
if ( i + 4 < len
&& charSequence.charAt( i ) == 'n'
&& charSequence.charAt( i + 1 ) == 'u'
&& charSequence.charAt( i + 2 ) == 'l'
&& charSequence.charAt( i + 3 ) == 'l') {
list.add( null );
i += 4;
continue;
}
if (i + 1 == len) {
break;
}
throw new IllegalArgumentException( "Cannot parse given string into array of strings."
+ " Outside of quote, but neither whitespace, comma, array end, nor null found." );
}
}
else if ( c == '\\' && i + 2 < len && (charSequence.charAt( i + 1 ) == '\\'
|| charSequence.charAt( i + 1 ) == '"') ) {
c = charSequence.charAt( ++i );
}
// If there is ever a null-pointer here, the if-else logic before is incomplete
sb.append( c );
}
final C result = semantics.instantiateRaw( list.size(), null );
for ( int i = 0; i < list.size(); i ++ ) {
if ( list.get( i ) != null ) {
result.add( componentJavaType.fromString( list.get( i ) ) );
}
}
return result;
}
@Override
public <X> X unwrap(C value, Class<X> type, WrapperOptions options) {
if ( value == null ) {
return null;
}
if ( type.isInstance( value ) ) {
//noinspection unchecked
return (X) value;
}
else if ( type == byte[].class ) {
// byte[] can only be requested if the value should be serialized
return (X) SerializationHelper.serialize( asArrayList( value ) );
}
else if ( type == BinaryStream.class ) {
// BinaryStream can only be requested if the value should be serialized
//noinspection unchecked
return (X) new ArrayBackedBinaryStream( SerializationHelper.serialize( asArrayList( value ) ) );
}
else if ( type == Object[].class ) {
//noinspection unchecked
return (X) value.toArray();
}
else if ( Object[].class.isAssignableFrom( type ) ) {
final var preferredJavaTypeClass = type.getComponentType();
final Object[] unwrapped = (Object[]) newInstance( preferredJavaTypeClass, value.size() );
int i = 0;
for ( E element : value ) {
unwrapped[i] = componentJavaType.unwrap( element, preferredJavaTypeClass, options );
i++;
}
//noinspection unchecked
return (X) unwrapped;
}
else if ( type.isArray() ) {
final var preferredJavaTypeClass = type.getComponentType();
//noinspection unchecked
final X unwrapped = (X) newInstance( preferredJavaTypeClass, value.size() );
int i = 0;
for ( E element : value ) {
set( unwrapped, i, componentJavaType.unwrap( element, preferredJavaTypeClass, options ) );
i++;
}
return unwrapped;
}
throw unknownUnwrap( type );
}
@Override
public <X> C wrap(X value, WrapperOptions options) {
if ( value == null ) {
return null;
}
if ( value instanceof java.sql.Array array ) {
try {
//noinspection unchecked
value = (X) array.getArray();
}
catch ( SQLException ex ) {
// This basically shouldn't happen unless you've lost connection to the database.
throw new HibernateException( ex );
}
}
if ( value instanceof Object[] raw ) {
final C wrapped = semantics.instantiateRaw( raw.length, null );
if ( componentJavaType.getJavaTypeClass()
.isAssignableFrom( value.getClass().getComponentType() ) ) {
for ( Object o : raw ) {
//noinspection unchecked
wrapped.add( (E) o );
}
}
else {
for ( Object o : raw ) {
wrapped.add( componentJavaType.wrap( o, options ) );
}
}
return wrapped;
}
else if ( value instanceof byte[] bytes ) {
// When the value is a byte[], this is a deserialization request
//noinspection unchecked
return fromCollection( (ArrayList<E>) SerializationHelper.deserialize( bytes ), options );
}
else if ( value instanceof BinaryStream stream ) {
// When the value is a BinaryStream, this is a deserialization request
//noinspection unchecked
return fromCollection( (ArrayList<E>) SerializationHelper.deserialize( stream.getBytes() ), options );
}
else if ( value instanceof Collection<?> ) {
//noinspection unchecked
return fromCollection( (Collection<E>) value, options );
}
else if ( value.getClass().isArray() ) {
final int length = getLength( value );
final C wrapped = semantics.instantiateRaw( length, null );
for ( int i = 0; i < length; i++ ) {
wrapped.add( componentJavaType.wrap( get( value, i ), options ) );
}
return wrapped;
}
else if ( getElementJavaType().isInstance( value ) ) {
// Support binding a single element as parameter value
final C wrapped = semantics.instantiateRaw( 1, null );
//noinspection unchecked
wrapped.add( (E) value );
return wrapped;
}
throw unknownWrap( value.getClass() );
}
private ArrayList<E> asArrayList(C value) {
if ( value instanceof ArrayList ) {
//noinspection unchecked
return (ArrayList<E>) value;
}
return new ArrayList<>( value );
}
private C fromCollection(Collection<E> value, WrapperOptions options) {
final C collection;
switch ( semantics.getCollectionClassification() ) {
case SET:
// Keep consistent with CollectionMutabilityPlan::deepCopy
//noinspection unchecked
collection = (C) new LinkedHashSet<>( value.size() );
break;
case LIST:
case BAG:
if ( value instanceof ArrayList<E> arrayList ) {
arrayList.replaceAll( e -> componentJavaType.wrap( e, options ) );
//noinspection unchecked
return (C) value;
}
default:
collection = semantics.instantiateRaw( value.size(), null );
break;
}
for ( E e : value ) {
collection.add( componentJavaType.wrap( e, options ) );
}
return collection;
}
private static | BasicCollectionJavaType |
java | apache__camel | components/camel-aws/camel-aws2-redshift/src/main/java/org/apache/camel/component/aws2/redshift/data/RedshiftData2Endpoint.java | {
"start": 1863,
"end": 4058
} | class ____ extends DefaultEndpoint implements EndpointServiceLocation {
private RedshiftDataClient awsRedshiftDataClient;
@UriParam
private RedshiftData2Configuration configuration;
public RedshiftData2Endpoint(String uri, Component component, RedshiftData2Configuration configuration) {
super(uri, component);
this.configuration = configuration;
}
@Override
public RedshiftData2Component getComponent() {
return (RedshiftData2Component) super.getComponent();
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
throw new UnsupportedOperationException("You cannot receive messages from this endpoint");
}
@Override
public Producer createProducer() throws Exception {
return new RedshiftData2Producer(this);
}
@Override
public void doStart() throws Exception {
super.doStart();
awsRedshiftDataClient = configuration.getAwsRedshiftDataClient() != null
? configuration.getAwsRedshiftDataClient()
: RedshiftData2ClientFactory.getRedshiftDataClient(configuration).getRedshiftDataClient();
}
@Override
public void doStop() throws Exception {
if (ObjectHelper.isEmpty(configuration.getAwsRedshiftDataClient())) {
if (awsRedshiftDataClient != null) {
awsRedshiftDataClient.close();
}
}
super.doStop();
}
public RedshiftData2Configuration getConfiguration() {
return configuration;
}
public RedshiftDataClient getAwsRedshiftDataClient() {
return awsRedshiftDataClient;
}
@Override
public String getServiceUrl() {
if (!configuration.isOverrideEndpoint()) {
if (ObjectHelper.isNotEmpty(configuration.getRegion())) {
return configuration.getRegion();
}
} else if (ObjectHelper.isNotEmpty(configuration.getUriEndpointOverride())) {
return configuration.getUriEndpointOverride();
}
return null;
}
@Override
public String getServiceProtocol() {
return "redshift";
}
}
| RedshiftData2Endpoint |
java | apache__maven | api/maven-api-plugin/src/test/java/org/apache/maven/api/plugin/descriptor/another/ExtendedPluginDescriptorTest.java | {
"start": 1106,
"end": 1268
} | class ____ {
/**
* A subclass of the generated class {@link PluginDescriptor} that adds an additional field.
*/
static | ExtendedPluginDescriptorTest |
java | quarkusio__quarkus | integration-tests/maven/src/test/resources-filtered/projects/test-module-dependency/app/src/main/java/org/acme/quarkus/HelloResource.java | {
"start": 597,
"end": 628
} | class ____ not visible";
}
}
| is |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/condition/ArrayCompareCondition.java | {
"start": 776,
"end": 11698
} | class ____ extends AbstractCompareCondition {
public static final String TYPE = "array_compare";
private final String arrayPath;
private final String path;
private final Op op;
private final Object value;
private final Quantifier quantifier;
ArrayCompareCondition(String arrayPath, String path, Op op, Object value, Quantifier quantifier, Clock clock) {
super(TYPE, clock);
this.arrayPath = arrayPath;
this.path = path;
this.op = op;
this.value = value;
this.quantifier = quantifier;
}
public String getArrayPath() {
return arrayPath;
}
public String getPath() {
return path;
}
public ArrayCompareCondition.Op getOp() {
return op;
}
public Object getValue() {
return value;
}
public ArrayCompareCondition.Quantifier getQuantifier() {
return quantifier;
}
public static ArrayCompareCondition parse(Clock clock, String watchId, XContentParser parser) throws IOException {
if (parser.currentToken() != XContentParser.Token.START_OBJECT) {
throw new ElasticsearchParseException(
"could not parse [{}] condition for watch [{}]. expected an object but found [{}] " + "instead",
TYPE,
watchId,
parser.currentToken()
);
}
String arrayPath = null;
String path = null;
Op op = null;
Object value = null;
Quantifier quantifier = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
arrayPath = parser.currentName();
} else if (arrayPath == null) {
throw new ElasticsearchParseException(
"could not parse [{}] condition for watch [{}]. expected a field indicating the "
+ "compared path, but found [{}] instead",
TYPE,
watchId,
token
);
} else if (token == XContentParser.Token.START_OBJECT) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
if (parser.currentName().equals("path")) {
parser.nextToken();
path = parser.text();
} else {
try {
op = Op.resolve(parser.currentName());
} catch (IllegalArgumentException iae) {
throw new ElasticsearchParseException(
"could not parse [{}] condition for watch [{}]. unknown comparison " + "operator [{}]",
TYPE,
watchId,
parser.currentName(),
iae
);
}
token = parser.nextToken();
if (token == XContentParser.Token.START_OBJECT) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
if (parser.currentName().equals("value")) {
token = parser.nextToken();
if (op.supportsStructures() == false
&& token.isValue() == false
&& token != XContentParser.Token.VALUE_NULL) {
throw new ElasticsearchParseException(
"could not parse [{}] condition for watch [{}]. "
+ "compared value for [{}] with operation [{}] must either be a numeric, string, "
+ "boolean or null value, but found [{}] instead",
TYPE,
watchId,
path,
op.name().toLowerCase(Locale.ROOT),
token
);
}
value = XContentUtils.readValue(parser, token);
} else if (parser.currentName().equals("quantifier")) {
parser.nextToken();
try {
quantifier = Quantifier.resolve(parser.text());
} catch (IllegalArgumentException iae) {
throw new ElasticsearchParseException(
"could not parse [{}] condition for watch [{}]. "
+ "unknown comparison quantifier [{}]",
TYPE,
watchId,
parser.text(),
iae
);
}
} else {
throw new ElasticsearchParseException(
"could not parse [{}] condition for watch [{}]. "
+ "expected a field indicating the comparison value or comparison quantifier, but found"
+ " [{}] instead",
TYPE,
watchId,
parser.currentName()
);
}
} else {
throw new ElasticsearchParseException(
"could not parse [{}] condition for watch [{}]. expected a "
+ "field indicating the comparison value or comparison quantifier, but found [{}] instead",
TYPE,
watchId,
token
);
}
}
} else {
throw new ElasticsearchParseException(
"could not parse [{}] condition for watch [{}]. expected an object "
+ "for field [{}] but found [{}] instead",
TYPE,
watchId,
op.id(),
token
);
}
}
} else {
throw new ElasticsearchParseException(
"could not parse [{}] condition for watch [{}]. expected a field indicating"
+ " the compared path or a comparison operator, but found [{}] instead",
TYPE,
watchId,
token
);
}
}
} else {
throw new ElasticsearchParseException(
"could not parse [{}] condition for watch [{}]. expected an object for field [{}] " + "but found [{}] instead",
TYPE,
watchId,
path,
token
);
}
}
if (path == null) {
path = "";
}
if (quantifier == null) {
quantifier = Quantifier.SOME;
}
return new ArrayCompareCondition(arrayPath, path, op, value, quantifier, clock);
}
public Result doExecute(Map<String, Object> model, Map<String, Object> resolvedValues) {
Object configuredValue = resolveConfiguredValue(resolvedValues, model, value);
Object object = ObjectPath.eval(arrayPath, model);
if (object != null && (object instanceof List) == false) {
throw new IllegalStateException("array path " + arrayPath + " did not evaluate to array, was " + object);
}
@SuppressWarnings("unchecked")
List<Object> resolvedArray = object != null ? (List<Object>) object : Collections.emptyList();
List<Object> resolvedValue = new ArrayList<>(resolvedArray.size());
for (int i = 0; i < resolvedArray.size(); i++) {
resolvedValue.add(ObjectPath.eval(path, resolvedArray.get(i)));
}
resolvedValues.put(arrayPath, resolvedArray);
return new Result(resolvedValues, TYPE, quantifier.eval(resolvedValue, configuredValue, op));
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ArrayCompareCondition that = (ArrayCompareCondition) o;
return Objects.equals(getArrayPath(), that.getArrayPath())
&& Objects.equals(getPath(), that.getPath())
&& Objects.equals(getOp(), that.getOp())
&& Objects.equals(getValue(), that.getValue())
&& Objects.equals(getQuantifier(), that.getQuantifier());
}
@Override
public int hashCode() {
return Objects.hash(arrayPath, path, op, value, quantifier);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.startObject()
.startObject(arrayPath)
.field("path", path)
.startObject(op.id())
.field("value", value)
.field("quantifier", quantifier.id())
.endObject()
.endObject()
.endObject();
}
public | ArrayCompareCondition |
java | apache__avro | lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputsSyncable.java | {
"start": 5198,
"end": 6214
} | class ____ extends Reducer<Text, IntWritable, AvroKey<TextStats>, NullWritable> {
private AvroKey<TextStats> mStats;
private AvroMultipleOutputs amos;
@Override
protected void setup(Context context) {
mStats = new AvroKey<>(null);
amos = new AvroMultipleOutputs(context);
}
@Override
protected void reduce(Text line, Iterable<IntWritable> counts, Context context)
throws IOException, InterruptedException {
TextStats record = new TextStats();
record.setCount(0);
for (IntWritable count : counts) {
record.setCount(record.getCount() + count.get());
}
record.setName(line.toString());
mStats.datum(record);
context.write(mStats, NullWritable.get());
amos.sync("myavro3", "myavro3");
amos.write("myavro3", mStats, NullWritable.get());
}
@Override
protected void cleanup(Context context) throws IOException, InterruptedException {
amos.close();
}
}
private static | SpecificStatsReducer |
java | quarkusio__quarkus | extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/typesafe/DataNamespaceValidationTest.java | {
"start": 404,
"end": 1403
} | class ____ {
private static final String ITEM_NAME = "Test Name";
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(Item.class, OtherItem.class, Globals.class)
.addAsResource(new StringAsset(
"{@io.quarkus.qute.deployment.typesafe.Item item}\n" +
"{#for item in item.otherItems}\n" +
" {data:item.name}\n" +
"{/for}\n"),
"templates/item.html"));
@Inject
Template item;
@Test
public void testCorrectParamDeclarationIsAssumed() {
// succeed as global item declaration is overridden
assertEquals(
ITEM_NAME,
item.data("item", new Item(ITEM_NAME, new OtherItem())).render().trim());
}
public static | DataNamespaceValidationTest |
java | quarkusio__quarkus | extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/exc/ExceptionInterceptor.java | {
"start": 483,
"end": 1002
} | class ____ implements ServerInterceptor, Prioritized {
@Inject
ExceptionHandlerProvider provider;
@Override
public <ReqT, RespT> ServerCall.Listener<ReqT> interceptCall(
ServerCall<ReqT, RespT> call,
Metadata headers,
ServerCallHandler<ReqT, RespT> next) {
return provider.createHandler(next.startCall(call, headers), call, headers);
}
@Override
public int getPriority() {
return Interceptors.EXCEPTION_HANDLER;
}
}
| ExceptionInterceptor |
java | google__guava | android/guava-testlib/src/com/google/common/collect/testing/google/SortedMapGenerators.java | {
"start": 2373,
"end": 3764
} | class ____
implements TestListGenerator<Entry<String, Integer>> {
@Override
public SampleElements<Entry<String, Integer>> samples() {
return new SampleElements<>(
mapEntry("foo", 5),
mapEntry("bar", 3),
mapEntry("baz", 17),
mapEntry("quux", 1),
mapEntry("toaster", -2));
}
@SuppressWarnings("unchecked")
@Override
public Entry<String, Integer>[] createArray(int length) {
return (Entry<String, Integer>[]) new Entry<?, ?>[length];
}
@Override
public Iterable<Entry<String, Integer>> order(List<Entry<String, Integer>> insertionOrder) {
return new Ordering<Entry<String, Integer>>() {
@Override
public int compare(Entry<String, Integer> left, Entry<String, Integer> right) {
return left.getKey().compareTo(right.getKey());
}
}.sortedCopy(insertionOrder);
}
@Override
public List<Entry<String, Integer>> create(Object... elements) {
ImmutableSortedMap.Builder<String, Integer> builder = ImmutableSortedMap.naturalOrder();
for (Object o : elements) {
@SuppressWarnings("unchecked")
Entry<String, Integer> entry = (Entry<String, Integer>) checkNotNull(o);
builder.put(entry);
}
return builder.build().entrySet().asList();
}
}
public static | ImmutableSortedMapEntryListGenerator |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/plugins/spi/SPIClassIterator.java | {
"start": 5713,
"end": 6730
} | class ____ from URL: " + url, ioe);
}
if (lines.isEmpty() == false) {
this.linesIterator = lines.iterator();
return true;
}
}
return false;
}
@Override
public boolean hasNext() {
return linesIterator.hasNext() || loadNextProfile();
}
@Override
public Class<? extends S> next() {
// hasNext() implicitely loads the next profile, so it is essential to call this here!
if (hasNext() == false) {
throw new NoSuchElementException();
}
assert linesIterator.hasNext();
final String c = linesIterator.next();
try {
// don't initialize the class (pass false as 2nd parameter):
return Class.forName(c, false, loader).asSubclass(clazz);
} catch (ClassNotFoundException cnfe) {
throw new ServiceConfigurationError(
String.format(
Locale.ROOT,
"An SPI | list |
java | ReactiveX__RxJava | src/jmh/java/io/reactivex/rxjava3/core/JustAsyncPerf.java | {
"start": 1079,
"end": 5164
} | class ____ {
Flowable<Integer> subscribeOnFlowable;
Flowable<Integer> observeOnFlowable;
Flowable<Integer> pipelineFlowable;
Observable<Integer> subscribeOnObservable;
Observable<Integer> observeOnObservable;
Observable<Integer> pipelineObservable;
Single<Integer> observeOnSingle;
Single<Integer> subscribeOnSingle;
Single<Integer> pipelineSingle;
Completable observeOnCompletable;
Completable subscribeOnCompletable;
Completable pipelineCompletable;
Maybe<Integer> observeOnMaybe;
Maybe<Integer> subscribeOnMaybe;
Maybe<Integer> pipelineMaybe;
@Setup
public void setup() {
Scheduler s = Schedulers.single();
Scheduler s2 = new SingleScheduler();
subscribeOnFlowable = Flowable.just(1).subscribeOn(s);
observeOnFlowable = Flowable.just(1).observeOn(s);
pipelineFlowable = Flowable.just(1).subscribeOn(s).observeOn(s2);
// ----
subscribeOnObservable = Observable.just(1).subscribeOn(s);
observeOnObservable = Observable.just(1).observeOn(s);
pipelineObservable = Observable.just(1).subscribeOn(s).observeOn(s2);
// ----
observeOnSingle = Single.just(1).observeOn(s);
subscribeOnSingle = Single.just(1).subscribeOn(s);
pipelineSingle = Single.just(1).subscribeOn(s).observeOn(s2);
// ----
observeOnCompletable = Completable.complete().observeOn(s);
subscribeOnCompletable = Completable.complete().subscribeOn(s);
pipelineCompletable = Completable.complete().subscribeOn(s).observeOn(s2);
// ----
observeOnMaybe = Maybe.just(1).observeOn(s);
subscribeOnMaybe = Maybe.just(1).subscribeOn(s);
pipelineMaybe = Maybe.just(1).subscribeOn(s).observeOn(s2);
}
@Benchmark
public void subscribeOnFlowable(Blackhole bh) {
subscribeOnFlowable.subscribeWith(new PerfAsyncConsumer(bh)).await(1);
}
@Benchmark
public void observeOnFlowable(Blackhole bh) {
observeOnFlowable.subscribeWith(new PerfAsyncConsumer(bh)).await(1);
}
@Benchmark
public void pipelineFlowable(Blackhole bh) {
pipelineFlowable.subscribeWith(new PerfAsyncConsumer(bh)).await(1);
}
@Benchmark
public void subscribeOnObservable(Blackhole bh) {
subscribeOnObservable.subscribeWith(new PerfAsyncConsumer(bh)).await(1);
}
@Benchmark
public void observeOnObservable(Blackhole bh) {
observeOnObservable.subscribeWith(new PerfAsyncConsumer(bh)).await(1);
}
@Benchmark
public void pipelineObservable(Blackhole bh) {
pipelineObservable.subscribeWith(new PerfAsyncConsumer(bh)).await(1);
}
@Benchmark
public void observeOnSingle(Blackhole bh) {
observeOnSingle.subscribeWith(new PerfAsyncConsumer(bh)).await(1);
}
@Benchmark
public void subscribeOnSingle(Blackhole bh) {
subscribeOnSingle.subscribeWith(new PerfAsyncConsumer(bh)).await(1);
}
@Benchmark
public void pipelineSingle(Blackhole bh) {
pipelineSingle.subscribeWith(new PerfAsyncConsumer(bh)).await(1);
}
@Benchmark
public void observeOnCompletable(Blackhole bh) {
observeOnCompletable.subscribeWith(new PerfAsyncConsumer(bh)).await(1);
}
@Benchmark
public void subscribeOnCompletable(Blackhole bh) {
subscribeOnCompletable.subscribeWith(new PerfAsyncConsumer(bh)).await(1);
}
@Benchmark
public void pipelineCompletable(Blackhole bh) {
pipelineCompletable.subscribeWith(new PerfAsyncConsumer(bh)).await(1);
}
@Benchmark
public void observeOnMaybe(Blackhole bh) {
observeOnMaybe.subscribeWith(new PerfAsyncConsumer(bh)).await(1);
}
@Benchmark
public void subscribeOnMaybe(Blackhole bh) {
subscribeOnMaybe.subscribeWith(new PerfAsyncConsumer(bh)).await(1);
}
@Benchmark
public void pipelineMaybe(Blackhole bh) {
pipelineMaybe.subscribeWith(new PerfAsyncConsumer(bh)).await(1);
}
}
| JustAsyncPerf |
java | apache__camel | components/camel-aws/camel-aws2-s3/src/test/java/org/apache/camel/component/aws2/s3/integration/S3CreateDownloadLinkWithProvidedPresignerOperationIT.java | {
"start": 1669,
"end": 4039
} | class ____ extends Aws2S3Base {
@BindToRegistry("amazonS3Presigner")
S3Presigner presigner
= S3Presigner.builder()
.credentialsProvider(StaticCredentialsProvider.create(AwsBasicCredentials.create("xxx", "yyy")))
.region(Region.of(Region.EU_WEST_1.toString())).build();
@EndpointInject
private ProducerTemplate template;
@EndpointInject("mock:result")
private MockEndpoint result;
@SuppressWarnings("unchecked")
@Test
public void sendIn() throws Exception {
result.expectedMessageCount(1);
template.send("direct:listBucket", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(AWS2S3Constants.S3_OPERATION, AWS2S3Operations.listBuckets);
}
});
template.send("direct:addObject", ExchangePattern.InOnly, new Processor() {
public void process(Exchange exchange) {
exchange.getIn().setHeader(AWS2S3Constants.KEY, "CamelUnitTest2");
exchange.getIn().setBody("This is my bucket content.");
exchange.getIn().removeHeader(AWS2S3Constants.S3_OPERATION);
}
});
Exchange ex1 = template.request("direct:createDownloadLink", new Processor() {
public void process(Exchange exchange) {
exchange.getIn().setHeader(AWS2S3Constants.KEY, "CamelUnitTest2");
exchange.getIn().setHeader(AWS2S3Constants.BUCKET_NAME, name.get());
exchange.getIn().setHeader(AWS2S3Constants.S3_OPERATION, AWS2S3Operations.createDownloadLink);
}
});
assertNotNull(ex1.getMessage().getBody());
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
String awsEndpoint = "aws2-s3://" + name.get() + "?autoCreateBucket=true";
from("direct:listBucket").to(awsEndpoint);
from("direct:addObject").to(awsEndpoint);
from("direct:createDownloadLink").to(awsEndpoint)
.to("mock:result");
}
};
}
}
| S3CreateDownloadLinkWithProvidedPresignerOperationIT |
java | alibaba__druid | druid-spring-boot-3-starter/src/main/java/com/alibaba/druid/spring/boot3/autoconfigure/properties/DruidStatProperties.java | {
"start": 1667,
"end": 3318
} | class ____ {
/**
* Enable StatViewServlet, default false.
*/
private boolean enabled;
private String urlPattern;
private String allow;
private String deny;
private String loginUsername;
private String loginPassword;
private String resetEnable;
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public String getUrlPattern() {
return urlPattern;
}
public void setUrlPattern(String urlPattern) {
this.urlPattern = urlPattern;
}
public String getAllow() {
return allow;
}
public void setAllow(String allow) {
this.allow = allow;
}
public String getDeny() {
return deny;
}
public void setDeny(String deny) {
this.deny = deny;
}
public String getLoginUsername() {
return loginUsername;
}
public void setLoginUsername(String loginUsername) {
this.loginUsername = loginUsername;
}
public String getLoginPassword() {
return loginPassword;
}
public void setLoginPassword(String loginPassword) {
this.loginPassword = loginPassword;
}
public String getResetEnable() {
return resetEnable;
}
public void setResetEnable(String resetEnable) {
this.resetEnable = resetEnable;
}
}
public static | StatViewServlet |
java | apache__avro | lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectData.java | {
"start": 3250,
"end": 4331
} | class ____ extends ReflectData {
private static final AllowNull INSTANCE = new AllowNull();
/** Return the singleton instance. */
public static AllowNull get() {
return INSTANCE;
}
@Override
protected Schema createFieldSchema(Field field, Map<String, Schema> names) {
Schema schema = super.createFieldSchema(field, names);
if (field.getType().isPrimitive()) {
// for primitive values, such as int, a null will result in a
// NullPointerException at read time
return schema;
}
return makeNullable(schema);
}
}
private static final ReflectData INSTANCE = new ReflectData();
static {
addLogicalTypeConversions(INSTANCE);
}
/** For subclasses. Applications normally use {@link ReflectData#get()}. */
public ReflectData() {
}
/** Construct with a particular classloader. */
public ReflectData(ClassLoader classLoader) {
super(classLoader);
}
/** Return the singleton instance. */
public static ReflectData get() {
return INSTANCE;
}
/**
* Cause a | AllowNull |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/JGroupsEndpointBuilderFactory.java | {
"start": 13101,
"end": 13997
} | interface ____
extends
JGroupsEndpointConsumerBuilder,
JGroupsEndpointProducerBuilder {
default AdvancedJGroupsEndpointBuilder advanced() {
return (AdvancedJGroupsEndpointBuilder) this;
}
/**
* Specifies configuration properties of the JChannel used by the
* endpoint.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param channelProperties the value to set
* @return the dsl builder
*/
default JGroupsEndpointBuilder channelProperties(String channelProperties) {
doSetProperty("channelProperties", channelProperties);
return this;
}
}
/**
* Advanced builder for endpoint for the JGroups component.
*/
public | JGroupsEndpointBuilder |
java | apache__camel | components/camel-google/camel-google-storage/src/test/java/org/apache/camel/component/google/storage/localstorage/FakeStorageRpc.java | {
"start": 2582,
"end": 2865
} | class ____ been extended from
//https://github.com/googleapis/java-storage-nio/blob/master/google-cloud-nio/src/main/java/com/google/cloud/storage/contrib/nio/testing/FakeStorageRpc.java
/**
* A bare-bones in-memory implementation of StorageRpc, meant for testing.
*
* <p>
* This | has |
java | quarkusio__quarkus | extensions/spring-web/resteasy-reactive/tests/src/test/java/io/quarkus/spring/web/resteasy/reactive/test/SomeClass.java | {
"start": 62,
"end": 375
} | class ____ {
private String message;
public SomeClass() {
}
public SomeClass(final String message) {
this.message = message;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
}
| SomeClass |
java | apache__flink | flink-table/flink-sql-client/src/test/java/org/apache/flink/table/client/cli/CliUtilsTest.java | {
"start": 1129,
"end": 2311
} | class ____ {
@TempDir private Path realFolder;
@TempDir private Path linkFolder;
@Test
void testCreateFileRealDir() {
Path realDirHistoryFile = Paths.get(realFolder.toFile().getPath(), "history.file");
CliUtils.createFile(realDirHistoryFile);
assertThat(Files.exists(realDirHistoryFile)).isTrue();
}
@Test
void testCreateFileLinkDir() throws IOException {
Path link = Paths.get(linkFolder.toFile().getPath(), "link");
Files.createSymbolicLink(link, realFolder);
Path linkDirHistoryFile = Paths.get(link.toAbsolutePath().toString(), "history.file");
Path realLinkDirHistoryFile = Paths.get(realFolder.toFile().getPath(), "history.file");
CliUtils.createFile(linkDirHistoryFile);
assertThat(Files.exists(linkDirHistoryFile)).isTrue();
assertThat(Files.exists(realLinkDirHistoryFile)).isTrue();
}
@Test
void testCreateFileSubDir() {
Path subDirHistoryFile = Paths.get(realFolder.toFile().getPath(), "subdir", "history.file");
CliUtils.createFile(subDirHistoryFile);
assertThat(Files.exists(subDirHistoryFile)).isTrue();
}
}
| CliUtilsTest |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/producer/ProducerConfig.java | {
"start": 24348,
"end": 24654
} | class ____ key that implements the <code>org.apache.kafka.common.serialization.Serializer</code> interface.";
/** <code>value.serializer</code> */
public static final String VALUE_SERIALIZER_CLASS_CONFIG = "value.serializer";
public static final String VALUE_SERIALIZER_CLASS_DOC = "Serializer | for |
java | apache__camel | dsl/camel-jbang/camel-jbang-core/src/main/java/org/apache/camel/dsl/jbang/core/commands/update/CamelUpdateMixin.java | {
"start": 1123,
"end": 4502
} | class ____ {
@CommandLine.Parameters(description = "The version to which the Camel project should be updated.", arity = "1")
String version;
@CommandLine.Option(names = { "--openRewriteVersion" },
description = "The version of OpenRewrite to use during the update process.",
defaultValue = "6.0.4")
String openRewriteVersion;
@CommandLine.Option(names = { "--camelArtifact" },
description = "The Maven artifact coordinates for the Camel upgrade recipes.",
defaultValue = "camel-upgrade-recipes")
String camelArtifactCoordinates;
@CommandLine.Option(names = { "--camelSpringBootArtifact" },
description = "The Maven artifact coordinates for the Camel Spring Boot upgrade recipes.",
defaultValue = "camel-spring-boot-upgrade-recipes")
String camelSpringBootArtifactCoordinates;
@CommandLine.Option(names = { "--debug" },
defaultValue = "false",
description = "Enables debug logging if set to true.")
boolean debug;
@CommandLine.Option(names = { "--quarkusMavenPluginVersion" },
description = "The version of the Quarkus Maven plugin to use.",
defaultValue = RuntimeType.QUARKUS_VERSION)
String quarkusMavenPluginVersion;
@CommandLine.Option(names = { "--quarkusMavenPluginGroupId" },
description = "The group ID of the Quarkus Maven plugin.",
defaultValue = "io.quarkus")
String quarkusMavenPluginGroupId;
@CommandLine.Option(names = { "--dryRun" },
description = "If set to true, performs a dry run of the update process without making any changes.",
defaultValue = "false")
boolean dryRun;
@CommandLine.Option(names = { "--runtime" },
completionCandidates = RuntimeCompletionCandidates.class,
defaultValue = "camel-main",
converter = RuntimeTypeConverter.class,
description = "Runtime (${COMPLETION-CANDIDATES})")
RuntimeType runtime = RuntimeType.main;
@CommandLine.Option(names = { "--repo", "--repos" },
description = "Additional maven repositories for download on-demand (Use commas to separate multiple repositories)")
String repos;
@CommandLine.Option(names = { "--extraActiveRecipes" },
description = "Comma separated list of recipes to be executed after the Camel one, " +
"make sure the artifact containing the recipes is added via extraRecipeArtifactCoordinates")
List<String> extraActiveRecipes;
@CommandLine.Option(names = { "--extraRecipeArtifactCoordinates" },
description = "Comma separated list of artifact coordinates containing extraActiveRecipes, " +
"ex.my.org:recipes:1.0.0")
List<String> extraRecipeArtifactCoordinates;
@CommandLine.Option(names = { "--upgradeTimeout" },
description = "Time to wait, in seconds, before shutting down the upgrade process",
defaultValue = "240")
int upgradeTimeout;
}
| CamelUpdateMixin |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/javassist/User.java | {
"start": 729,
"end": 1174
} | class ____ {
private Integer id;
private String name;
private List<Group> groups;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<Group> getGroups() {
return groups;
}
public void setGroups(List<Group> groups) {
this.groups = groups;
}
}
| User |
java | apache__camel | components/camel-reactive-streams/src/main/java/org/apache/camel/component/reactive/streams/api/CamelReactiveStreamsServiceFactory.java | {
"start": 1001,
"end": 1444
} | interface ____ {
/**
* Creates a new instance of the {@link ReactiveStreamsEngineConfiguration}
*
* @param context the Camel context
* @param configuration the ReactiveStreams engine configuration
* @return the ReactiveStreams service
*/
CamelReactiveStreamsService newInstance(CamelContext context, ReactiveStreamsEngineConfiguration configuration);
}
| CamelReactiveStreamsServiceFactory |
java | quarkusio__quarkus | extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/QuteProcessor.java | {
"start": 114830,
"end": 129825
} | class ____ globals -> generated type; non-application globals only
final Map<String, String> globals = new HashMap<>();
final Set<String> allGlobals = new HashSet<>();
boolean contains(MethodInfo extensionMethod) {
return identifiersToGeneratedClass
.containsKey(toKey(extensionMethod));
}
String getGeneratedGlobalClass(DotName declaringClassName) {
return globals.get(declaringClassName.toString());
}
String getGeneratedClass(MethodInfo extensionMethod) {
return identifiersToGeneratedClass.get(toKey(extensionMethod));
}
void add(MethodInfo extensionMethod, String className, Predicate<DotName> applicationClassPredicate) {
if (!applicationClassPredicate.test(extensionMethod.declaringClass().name())) {
identifiersToGeneratedClass.put(toKey(extensionMethod), className);
}
}
void addGlobal(DotName declaringClassName, String generatedClassName, Predicate<DotName> applicationClassPredicate) {
if (allGlobals.add(generatedClassName.toString()) && !applicationClassPredicate.test(declaringClassName)) {
globals.put(declaringClassName.toString(), generatedClassName);
}
}
private String toKey(MethodInfo extensionMethod) {
return extensionMethod.declaringClass().toString() + "#" + extensionMethod.toString();
}
}
@BuildStep
void collectTemplates(ApplicationArchivesBuildItem applicationArchives,
CurateOutcomeBuildItem curateOutcome,
List<TemplatePathExcludeBuildItem> templatePathExcludes,
BuildProducer<HotDeploymentWatchedFileBuildItem> watchedPaths,
BuildProducer<TemplatePathBuildItem> templatePaths,
BuildProducer<NativeImageResourceBuildItem> nativeImageResources,
QuteConfig config,
TemplateRootsBuildItem templateRoots,
LaunchModeBuildItem launchMode)
throws IOException {
// Make sure the new templates are watched as well
watchedPaths.produce(HotDeploymentWatchedFileBuildItem.builder().setLocationPredicate(new Predicate<String>() {
@Override
public boolean test(String path) {
for (String rootPath : templateRoots) {
if (path.startsWith(rootPath)) {
return true;
}
}
return false;
}
}).build());
List<Pattern> excludePatterns = new ArrayList<>(templatePathExcludes.size() + 1);
excludePatterns.add(config.templatePathExclude());
for (TemplatePathExcludeBuildItem exclude : templatePathExcludes) {
excludePatterns.add(Pattern.compile(exclude.getRegexPattern()));
}
final boolean tryLocateSource = launchMode.getLaunchMode().isDev()
&& DevModeType.LOCAL == launchMode.getDevModeType().orElse(null);
final Set<ApplicationArchive> allApplicationArchives = applicationArchives.getAllApplicationArchives();
final Set<ArtifactKey> appArtifactKeys = new HashSet<>(allApplicationArchives.size());
for (var archive : allApplicationArchives) {
appArtifactKeys.add(archive.getKey());
}
ApplicationModel applicationModel = curateOutcome.getApplicationModel();
for (ResolvedDependency artifact : applicationModel.getDependencies(DependencyFlags.RUNTIME_EXTENSION_ARTIFACT)) {
// Skip extension archives that are also application archives
if (!appArtifactKeys.contains(artifact.getKey())) {
scanPathTree(artifact.getContentTree(), templateRoots, watchedPaths, templatePaths, nativeImageResources,
config, excludePatterns, TemplatePathBuildItem.APP_ARCHIVE_PRIORITY, null, tryLocateSource);
}
}
for (ApplicationArchive archive : applicationArchives.getApplicationArchives()) {
archive.accept(
tree -> scanPathTree(tree, templateRoots, watchedPaths, templatePaths, nativeImageResources, config,
excludePatterns, TemplatePathBuildItem.APP_ARCHIVE_PRIORITY, null, tryLocateSource));
}
WorkspaceModule appModule;
if (tryLocateSource) {
appModule = applicationModel.getApplicationModule();
} else {
appModule = null;
}
applicationArchives.getRootArchive().accept(
tree -> scanPathTree(tree, templateRoots, watchedPaths, templatePaths, nativeImageResources, config,
excludePatterns, TemplatePathBuildItem.ROOT_ARCHIVE_PRIORITY, appModule, tryLocateSource));
}
private void scanPathTree(PathTree pathTree, TemplateRootsBuildItem templateRoots,
BuildProducer<HotDeploymentWatchedFileBuildItem> watchedPaths,
BuildProducer<TemplatePathBuildItem> templatePaths,
BuildProducer<NativeImageResourceBuildItem> nativeImageResources,
QuteConfig config, List<Pattern> excludePatterns,
int templatePriority, WorkspaceModule module, boolean tryLocateSource) {
for (String templateRoot : templateRoots) {
if (PathTreeUtils.containsCaseSensitivePath(pathTree, templateRoot)) {
pathTree.walkIfContains(templateRoot, visit -> {
Path path = visit.getPath();
if (Files.isRegularFile(path)) {
if (!Identifiers.isValid(path.getFileName().toString())) {
LOGGER.warnf("Invalid file name detected [%s] - template is ignored", visit.getPath());
return;
}
LOGGER.debugf("Found template: %s", path);
// remove templateRoot + /
final String relativePath = visit.getRelativePath();
String templatePath = relativePath.substring(templateRoot.length() + 1);
for (Pattern p : excludePatterns) {
if (p.matcher(templatePath).matches()) {
LOGGER.debugf("Template file excluded: %s", path);
return;
}
}
// Try to find source
URI source = null;
if (module != null) {
for (SourceDir resources : module.getMainSources().getResourceDirs()) {
Path sourcePath = resources.getDir().resolve(visit.getRelativePath());
if (Files.isRegularFile(sourcePath)) {
LOGGER.debugf("Source file found for template %s: %s", templatePath, sourcePath);
source = sourcePath.toUri();
}
}
} else if (tryLocateSource) {
try {
source = visit.getUrl().toURI();
LOGGER.debugf("Source file found for template %s: %s", templatePath, source);
} catch (Exception e) {
LOGGER.warnf("Unable to locate source for %s: %s", templatePath, e.toString());
}
}
produceTemplateBuildItems(templatePaths, watchedPaths, nativeImageResources,
relativePath, templatePath, path, config, templatePriority, source);
}
});
}
}
}
@BuildStep
TemplateFilePathsBuildItem collectTemplateFilePaths(QuteConfig config,
EffectiveTemplatePathsBuildItem effectiveTemplatePaths) {
Set<String> filePaths = new HashSet<String>();
for (TemplatePathBuildItem templatePath : effectiveTemplatePaths.getTemplatePaths()) {
String path = templatePath.getPath();
filePaths.add(path);
// Also add version without suffix from the path
// For example for "items.html" also add "items"
for (String suffix : config.suffixes()) {
if (path.endsWith(suffix)) {
filePaths.add(path.substring(0, path.length() - (suffix.length() + 1)));
}
}
}
return new TemplateFilePathsBuildItem(filePaths);
}
@BuildStep
void validateTemplateInjectionPoints(TemplateFilePathsBuildItem filePaths,
EffectiveTemplatePathsBuildItem effectiveTemplatePaths,
ValidationPhaseBuildItem validationPhase, BuildProducer<ValidationErrorBuildItem> validationErrors,
CustomTemplateLocatorPatternsBuildItem locatorPatternsBuildItem) {
for (InjectionPointInfo injectionPoint : validationPhase.getContext().getInjectionPoints()) {
if (injectionPoint.getRequiredType().name().equals(Names.TEMPLATE)) {
AnnotationInstance location = injectionPoint.getRequiredQualifier(Names.LOCATION);
String name;
if (location != null) {
name = location.value().asString();
} else if (injectionPoint.hasDefaultedQualifier()) {
name = getName(injectionPoint);
} else {
name = null;
}
if (name != null) {
// For "@Inject Template items" we try to match "items"
// For "@Location("github/pulls") Template pulls" we try to match "github/pulls"
// For "@Location("foo/bar/baz.txt") Template baz" we try to match "foo/bar/baz.txt"
if (!filePaths.contains(name)
&& isNotLocatedByCustomTemplateLocator(locatorPatternsBuildItem.getLocationPatterns(),
name)) {
validationErrors.produce(new ValidationErrorBuildItem(
new TemplateException(
String.format(
"No template found for path [%s] defined at %s\n\t- available templates: %s",
name, injectionPoint.getTargetInfo(),
effectiveTemplatePaths.getTemplatePaths().stream()
.map(TemplatePathBuildItem::getPath)
.collect(Collectors.toList())))));
}
}
}
}
}
@BuildStep
CustomTemplateLocatorPatternsBuildItem validateAndCollectCustomTemplateLocatorLocations(
BeanArchiveIndexBuildItem beanArchiveIndex,
BuildProducer<ValidationErrorBuildItem> validationErrors) {
Collection<Pattern> locationPatterns = new ArrayList<>();
// Collect TemplateLocators annotated with io.quarkus.qute.Locate
for (AnnotationInstance locate : beanArchiveIndex.getIndex().getAnnotations(Names.LOCATE)) {
AnnotationTarget locateTarget = locate.target();
if (locateTarget.kind() == Kind.CLASS) {
if (Types.isImplementorOf(locateTarget.asClass(), Names.TEMPLATE_LOCATOR, beanArchiveIndex.getIndex())) {
addLocationRegExToLocators(locationPatterns, locate.value(), locateTarget, validationErrors);
} else {
reportFoundInvalidTarget(validationErrors, locateTarget);
}
}
}
// Collect TemplateLocators annotated with multiple 'io.quarkus.qute.Locate'
for (AnnotationInstance locates : beanArchiveIndex.getIndex().getAnnotations(Names.LOCATES)) {
AnnotationTarget locatesTarget = locates.target();
if (locatesTarget.kind() == Kind.CLASS) {
if (Types.isImplementorOf(locatesTarget.asClass(), Names.TEMPLATE_LOCATOR, beanArchiveIndex.getIndex())) {
// locates.value() is array of 'io.quarkus.qute.Locate'
for (AnnotationInstance locate : locates.value().asNestedArray()) {
addLocationRegExToLocators(locationPatterns, locate.value(), locatesTarget, validationErrors);
}
} else {
reportFoundInvalidTarget(validationErrors, locatesTarget);
}
}
}
return new CustomTemplateLocatorPatternsBuildItem(locationPatterns);
}
@BuildStep
void collectEngineConfigurations(
BeanArchiveIndexBuildItem beanArchiveIndex,
BuildProducer<EngineConfigurationsBuildItem> engineConfig,
BuildProducer<ValidationErrorBuildItem> validationErrors) {
Collection<AnnotationInstance> engineConfigAnnotations = beanArchiveIndex.getIndex()
.getAnnotations(Names.ENGINE_CONFIGURATION);
if (engineConfigAnnotations.isEmpty()) {
return;
}
List<ClassInfo> engineConfigClasses = new ArrayList<>();
IndexView index = beanArchiveIndex.getIndex();
for (AnnotationInstance annotation : engineConfigAnnotations) {
AnnotationTarget target = annotation.target();
if (target.kind() == Kind.CLASS) {
ClassInfo clazz = target.asClass();
if (clazz.isAbstract()
|| clazz.isInterface()
|| (clazz.nestingType() != NestingType.TOP_LEVEL
&& (clazz.nestingType() != NestingType.INNER || !Modifier.isStatic(clazz.flags())))) {
validationErrors.produce(
new ValidationErrorBuildItem(
new TemplateException(String.format(
"Only non-abstract, top-level or static nested classes may be annotated with @%s: %s",
EngineConfiguration.class.getSimpleName(), clazz.name()))));
} else if (Types.isImplementorOf(clazz, Names.SECTION_HELPER_FACTORY, index)
|| Types.isImplementorOf(clazz, Names.PARSER_HOOK, index)) {
if (clazz.hasNoArgsConstructor()
&& Modifier.isPublic(clazz.flags())) {
engineConfigClasses.add(clazz);
} else {
validationErrors.produce(
new ValidationErrorBuildItem(
new TemplateException(String.format(
"A | declaring |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/tools/picocli/CommandLine.java | {
"start": 277639,
"end": 288551
} | class ____ implements Cloneable {
private final int maxLength;
private int from;
private int length;
private StringBuilder plain = new StringBuilder();
private List<StyledSection> sections = new ArrayList<>();
/** Constructs a Text with the specified max length (for use in a TextTable Column).
* @param maxLength max length of this text */
public Text(final int maxLength) {
this.maxLength = maxLength;
}
/**
* Constructs a Text with the specified String, which may contain markup like
* {@code @|bg(red),white,underline some text|@}.
* @param input the string with markup to parse
*/
public Text(final String input) {
maxLength = -1;
plain.setLength(0);
int i = 0;
while (true) {
int j = input.indexOf("@|", i);
if (j == -1) {
if (i == 0) {
plain.append(input);
length = plain.length();
return;
}
plain.append(input.substring(i, input.length()));
length = plain.length();
return;
}
plain.append(input.substring(i, j));
final int k = input.indexOf("|@", j);
if (k == -1) {
plain.append(input);
length = plain.length();
return;
}
j += 2;
final String spec = input.substring(j, k);
final String[] items = spec.split(" ", 2);
if (items.length == 1) {
plain.append(input);
length = plain.length();
return;
}
final IStyle[] styles = Style.parse(items[0]);
addStyledSection(
plain.length(),
items[1].length(),
Style.on(styles),
Style.off(reverse(styles)) + Style.reset.off());
plain.append(items[1]);
i = k + 2;
}
}
private void addStyledSection(
final int start, final int length, final String startStyle, final String endStyle) {
sections.add(new StyledSection(start, length, startStyle, endStyle));
}
@Override
public Object clone() {
try {
return super.clone();
} catch (final CloneNotSupportedException e) {
throw new IllegalStateException(e);
}
}
public Text[] splitLines() {
final List<Text> result = new ArrayList<>();
boolean trailingEmptyString = false;
int start = 0, end = 0;
for (int i = 0; i < plain.length(); i++, end = i) {
final char c = plain.charAt(i);
boolean eol = c == '\n';
eol |= (c == '\r' && i + 1 < plain.length() && plain.charAt(i + 1) == '\n' && ++i > 0); // \r\n
eol |= c == '\r';
if (eol) {
result.add(this.substring(start, end));
trailingEmptyString = i == plain.length() - 1;
start = i + 1;
}
}
if (start < plain.length() || trailingEmptyString) {
result.add(this.substring(start, plain.length()));
}
return result.toArray(new Text[result.size()]);
}
/** Returns a new {@code Text} instance that is a substring of this Text. Does not modify this instance!
* @param start index in the plain text where to start the substring
* @return a new Text instance that is a substring of this Text */
public Text substring(final int start) {
return substring(start, length);
}
/** Returns a new {@code Text} instance that is a substring of this Text. Does not modify this instance!
* @param start index in the plain text where to start the substring
* @param end index in the plain text where to end the substring
* @return a new Text instance that is a substring of this Text */
public Text substring(final int start, final int end) {
final Text result = (Text) clone();
result.from = from + start;
result.length = end - start;
return result;
}
/** Returns a new {@code Text} instance with the specified text appended. Does not modify this instance!
* @param string the text to append
* @return a new Text instance */
public Text append(final String string) {
return append(new Text(string));
}
/** Returns a new {@code Text} instance with the specified text appended. Does not modify this instance!
* @param other the text to append
* @return a new Text instance */
public Text append(final Text other) {
final Text result = (Text) clone();
result.plain = new StringBuilder(plain.toString().substring(from, from + length));
result.from = 0;
result.sections = new ArrayList<>();
for (final StyledSection section : sections) {
result.sections.add(section.withStartIndex(section.startIndex - from));
}
result.plain.append(other.plain.toString().substring(other.from, other.from + other.length));
for (final StyledSection section : other.sections) {
final int index = result.length + section.startIndex - other.from;
result.sections.add(section.withStartIndex(index));
}
result.length = result.plain.length();
return result;
}
/**
* Copies the specified substring of this Text into the specified destination, preserving the markup.
* @param from start of the substring
* @param length length of the substring
* @param destination destination Text to modify
* @param offset indentation (padding)
*/
public void getStyledChars(final int from, final int length, final Text destination, final int offset) {
if (destination.length < offset) {
for (int i = destination.length; i < offset; i++) {
destination.plain.append(' ');
}
destination.length = offset;
}
for (final StyledSection section : sections) {
destination.sections.add(
section.withStartIndex(section.startIndex - from + destination.length));
}
destination.plain.append(plain.toString().substring(from, from + length));
destination.length = destination.plain.length();
}
/** Returns the plain text without any formatting.
* @return the plain text without any formatting */
public String plainString() {
return plain.toString().substring(from, from + length);
}
@Override
public boolean equals(final Object obj) {
return toString().equals(String.valueOf(obj));
}
@Override
public int hashCode() {
return toString().hashCode();
}
/** Returns a String representation of the text with ANSI escape codes embedded, unless ANSI is
* {@linkplain Ansi#enabled()} not enabled}, in which case the plain text is returned.
* @return a String representation of the text with ANSI escape codes embedded (if enabled) */
@Override
public String toString() {
if (!Ansi.this.enabled()) {
return plain.toString().substring(from, from + length);
}
if (length == 0) {
return "";
}
final StringBuilder sb = new StringBuilder(plain.length() + 20 * sections.size());
StyledSection current = null;
final int end = Math.min(from + length, plain.length());
for (int i = from; i < end; i++) {
final StyledSection section = findSectionContaining(i);
if (section != current) {
if (current != null) {
sb.append(current.endStyles);
}
if (section != null) {
sb.append(section.startStyles);
}
current = section;
}
sb.append(plain.charAt(i));
}
if (current != null) {
sb.append(current.endStyles);
}
return sb.toString();
}
private StyledSection findSectionContaining(final int index) {
for (final StyledSection section : sections) {
if (index >= section.startIndex && index < section.startIndex + section.length) {
return section;
}
}
return null;
}
}
}
}
/**
* Utility | Text |
java | apache__spark | common/network-common/src/main/java/org/apache/spark/network/crypto/AuthClientBootstrap.java | {
"start": 2030,
"end": 5102
} | class ____ implements TransportClientBootstrap {
private static final SparkLogger LOG = SparkLoggerFactory.getLogger(AuthClientBootstrap.class);
private final TransportConf conf;
private final String appId;
private final SecretKeyHolder secretKeyHolder;
public AuthClientBootstrap(
TransportConf conf,
String appId,
SecretKeyHolder secretKeyHolder) {
this.conf = conf;
// TODO: right now this behaves like the SASL backend, because when executors start up
// they don't necessarily know the app ID. So they send a hardcoded "user" that is defined
// in the SecurityManager, which will also always return the same secret (regardless of the
// user name). All that's needed here is for this "user" to match on both sides, since that's
// required by the protocol. At some point, though, it would be better for the actual app ID
// to be provided here.
this.appId = appId;
this.secretKeyHolder = secretKeyHolder;
}
@Override
public void doBootstrap(TransportClient client, Channel channel) {
if (!conf.encryptionEnabled()) {
LOG.debug("AES encryption disabled, using old auth protocol.");
doSaslAuth(client, channel);
return;
}
try {
doSparkAuth(client, channel);
client.setClientId(appId);
} catch (GeneralSecurityException | IOException e) {
throw new RuntimeException(e);
} catch (RuntimeException e) {
// There isn't a good exception that can be caught here to know whether it's really
// OK to switch back to SASL (because the server doesn't speak the new protocol). So
// try it anyway, unless it's a timeout, which is locally fatal. In the worst case
// things will fail again.
if (!conf.saslFallback() || e.getCause() instanceof TimeoutException) {
throw e;
}
if (LOG.isDebugEnabled()) {
Throwable cause = e.getCause() != null ? e.getCause() : e;
LOG.debug("New auth protocol failed, trying SASL.", cause);
} else {
LOG.info("New auth protocol failed, trying SASL.");
}
doSaslAuth(client, channel);
}
}
private void doSparkAuth(TransportClient client, Channel channel)
throws GeneralSecurityException, IOException {
String secretKey = secretKeyHolder.getSecretKey(appId);
try (AuthEngine engine = new AuthEngine(appId, secretKey, conf)) {
AuthMessage challenge = engine.challenge();
ByteBuf challengeData = Unpooled.buffer(challenge.encodedLength());
challenge.encode(challengeData);
ByteBuffer responseData =
client.sendRpcSync(challengeData.nioBuffer(), conf.authRTTimeoutMs());
AuthMessage response = AuthMessage.decodeMessage(responseData);
engine.deriveSessionCipher(challenge, response);
engine.sessionCipher().addToChannel(channel);
}
}
private void doSaslAuth(TransportClient client, Channel channel) {
SaslClientBootstrap sasl = new SaslClientBootstrap(conf, appId, secretKeyHolder);
sasl.doBootstrap(client, channel);
}
}
| AuthClientBootstrap |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/chunks/MemoryIndexChunkScorer.java | {
"start": 1175,
"end": 4178
} | class ____ {
private static final String CONTENT_FIELD = "content";
private final StandardAnalyzer analyzer;
public MemoryIndexChunkScorer() {
// TODO: Allow analyzer to be customizable and/or read from the field mapping
this.analyzer = new StandardAnalyzer();
}
/**
* Creates an in-memory index of chunks, or chunks, returns ordered, scored list.
*
* @param chunks the list of text chunks to score
* @param inferenceText the query text to compare against
* @param maxResults maximum number of results to return
* @return list of scored chunks ordered by relevance
* @throws IOException on failure scoring chunks
*/
public List<ScoredChunk> scoreChunks(List<String> chunks, String inferenceText, int maxResults) throws IOException {
if (chunks == null || chunks.isEmpty() || inferenceText == null || inferenceText.trim().isEmpty()) {
return new ArrayList<>();
}
try (Directory directory = new ByteBuffersDirectory()) {
IndexWriterConfig config = new IndexWriterConfig(analyzer);
try (IndexWriter writer = new IndexWriter(directory, config)) {
for (String chunk : chunks) {
Document doc = new Document();
doc.add(new TextField(CONTENT_FIELD, chunk, Field.Store.YES));
writer.addDocument(doc);
}
writer.commit();
}
try (DirectoryReader reader = DirectoryReader.open(directory)) {
IndexSearcher searcher = new IndexSearcher(reader);
org.apache.lucene.util.QueryBuilder qb = new QueryBuilder(analyzer);
Query query = qb.createBooleanQuery(CONTENT_FIELD, inferenceText, BooleanClause.Occur.SHOULD);
int numResults = Math.min(maxResults, chunks.size());
TopDocs topDocs = searcher.search(query, numResults);
List<ScoredChunk> scoredChunks = new ArrayList<>();
for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
Document doc = reader.storedFields().document(scoreDoc.doc);
String content = doc.get(CONTENT_FIELD);
scoredChunks.add(new ScoredChunk(content, scoreDoc.score));
}
// It's possible that no chunks were scorable (for example, a semantic match that does not have a lexical match).
// In this case, we'll return the first N chunks with a score of 0.
// TODO: consider parameterizing this
return scoredChunks.isEmpty() == false
? scoredChunks
: chunks.subList(0, Math.min(maxResults, chunks.size())).stream().map(c -> new ScoredChunk(c, 0.0f)).toList();
}
}
}
/**
* Represents a chunk with its relevance score.
*/
public record ScoredChunk(String content, float score) {}
}
| MemoryIndexChunkScorer |
java | apache__spark | sql/hive-thriftserver/src/main/java/org/apache/hive/service/BreakableService.java | {
"start": 1314,
"end": 3119
} | class ____ extends AbstractService {
private boolean failOnInit;
private boolean failOnStart;
private boolean failOnStop;
private final int[] counts = new int[4];
public BreakableService() {
this(false, false, false);
}
public BreakableService(boolean failOnInit,
boolean failOnStart,
boolean failOnStop) {
super("BreakableService");
this.failOnInit = failOnInit;
this.failOnStart = failOnStart;
this.failOnStop = failOnStop;
inc(STATE.NOTINITED);
}
private int convert(STATE state) {
switch (state) {
case NOTINITED: return 0;
case INITED: return 1;
case STARTED: return 2;
case STOPPED: return 3;
default: return 0;
}
}
private void inc(STATE state) {
int index = convert(state);
counts[index] ++;
}
public int getCount(STATE state) {
return counts[convert(state)];
}
private void maybeFail(boolean fail, String action) {
if (fail) {
throw new BrokenLifecycleEvent(action);
}
}
@Override
public void init(HiveConf conf) {
inc(STATE.INITED);
maybeFail(failOnInit, "init");
super.init(conf);
}
@Override
public void start() {
inc(STATE.STARTED);
maybeFail(failOnStart, "start");
super.start();
}
@Override
public void stop() {
inc(STATE.STOPPED);
maybeFail(failOnStop, "stop");
super.stop();
}
public void setFailOnInit(boolean failOnInit) {
this.failOnInit = failOnInit;
}
public void setFailOnStart(boolean failOnStart) {
this.failOnStart = failOnStart;
}
public void setFailOnStop(boolean failOnStop) {
this.failOnStop = failOnStop;
}
/**
* The exception explicitly raised on a failure
*/
public static | BreakableService |
java | junit-team__junit5 | junit-jupiter-engine/src/main/java/org/junit/jupiter/engine/extension/AutoCloseExtension.java | {
"start": 1695,
"end": 5021
} | class ____ implements TestInstancePreDestroyCallback, AfterAllCallback {
private static final Logger logger = LoggerFactory.getLogger(AutoCloseExtension.class);
@Override
public void preDestroyTestInstance(ExtensionContext context) {
ThrowableCollector throwableCollector = new ThrowableCollector(__ -> false);
TestInstancePreDestroyCallback.preDestroyTestInstances(context,
testInstance -> closeFields(testInstance.getClass(), testInstance, throwableCollector));
throwableCollector.assertEmpty();
}
@Override
public void afterAll(ExtensionContext context) {
ThrowableCollector throwableCollector = new ThrowableCollector(__ -> false);
closeFields(context.getRequiredTestClass(), null, throwableCollector);
throwableCollector.assertEmpty();
}
private static void closeFields(Class<?> testClass, @Nullable Object testInstance,
ThrowableCollector throwableCollector) {
Predicate<Field> predicate = (testInstance == null ? ModifierSupport::isStatic : ModifierSupport::isNotStatic);
AnnotationSupport.findAnnotatedFields(testClass, AutoClose.class, predicate, BOTTOM_UP)//
.forEach(field -> throwableCollector.execute(() -> closeField(field, testInstance)));
}
private static void closeField(Field field, @Nullable Object testInstance) throws Exception {
String methodName = AnnotationSupport.findAnnotation(field, AutoClose.class).orElseThrow().value();
Class<?> fieldType = field.getType();
checkCondition(StringUtils.isNotBlank(methodName), "@AutoClose on field %s must specify a method name.", field);
checkCondition(!fieldType.isPrimitive(), "@AutoClose is not supported on primitive field %s.", field);
checkCondition(!fieldType.isArray(), "@AutoClose is not supported on array field %s.", field);
Object fieldValue = ReflectionSupport.tryToReadFieldValue(field, testInstance).get();
if (fieldValue == null) {
logger.warn(() -> "Cannot @AutoClose field %s because it is null.".formatted(getQualifiedName(field)));
}
else {
invokeCloseMethod(field, fieldValue, methodName.strip());
}
}
private static void invokeCloseMethod(Field field, Object target, String methodName) throws Exception {
// Avoid reflection if we can directly invoke close() via AutoCloseable.
if (target instanceof @SuppressWarnings("resource") AutoCloseable closeable && "close".equals(methodName)) {
closeable.close();
return;
}
Class<?> targetType = target.getClass();
Method closeMethod = ReflectionSupport.findMethod(targetType, methodName).orElseThrow(
() -> new ExtensionConfigurationException(
"Cannot @AutoClose field %s because %s does not define method %s()."//
.formatted(getQualifiedName(field), targetType.getName(), methodName)));
closeMethod = ReflectionUtils.getInterfaceMethodIfPossible(closeMethod, targetType);
ReflectionSupport.invokeMethod(closeMethod, target);
}
private static void checkCondition(boolean condition, String messageFormat, Field field) {
Preconditions.condition(condition, () -> messageFormat.formatted(getQualifiedName(field)));
}
private static String getQualifiedName(Field field) {
String typeName = field.getDeclaringClass().getCanonicalName();
if (typeName == null) {
typeName = field.getDeclaringClass().getTypeName();
}
return typeName + "." + field.getName();
}
}
| AutoCloseExtension |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/DestroyMethodInferenceTests.java | {
"start": 6493,
"end": 6646
} | class ____ implements Closeable {
boolean closed = false;
@Override
public void close() {
closed = true;
}
}
static | WithInheritedCloseMethod |
java | apache__kafka | connect/mirror/src/main/java/org/apache/kafka/connect/mirror/MirrorUtils.java | {
"start": 2279,
"end": 2627
} | class ____ {
public static final String SOURCE_CLUSTER_KEY = "cluster";
public static final String TOPIC_KEY = "topic";
public static final String PARTITION_KEY = "partition";
public static final String OFFSET_KEY = "offset";
private static final Logger log = LoggerFactory.getLogger(MirrorUtils.class);
// utility | MirrorUtils |
java | quarkusio__quarkus | integration-tests/hibernate-search-orm-elasticsearch-tenancy/src/test/java/io/quarkus/it/hibernate/search/orm/elasticsearch/multitenancy/book/HibernateSearchTenancyReindexFunctionalityTest.java | {
"start": 802,
"end": 966
} | class ____ {
public static final TypeRef<List<Book>> BOOK_LIST_TYPE_REF = new TypeRef<>() {
};
public static | HibernateSearchTenancyReindexFunctionalityTest |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/PropertySourceAnnotationTests.java | {
"start": 15683,
"end": 15821
} | class ____ {
}
@Retention(RetentionPolicy.RUNTIME)
@PropertySource(value = {}, factory = MyCustomFactory.class)
@ | WithCustomFactoryAsMeta |
java | google__guava | android/guava-testlib/src/com/google/common/collect/testing/features/FeatureUtil.java | {
"start": 9255,
"end": 9305
} | class ____ method.
*
* @param classOrMethod a | or |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/exc/ExceptionSerializationTest.java | {
"start": 917,
"end": 2888
} | class ____ {
private String strVal;
public String getVal() { return strVal; }
public NoSerdeConstructor( String strVal ) {
this.strVal = strVal;
}
}
/*
/**********************************************************************
/* Test methods
/**********************************************************************
*/
private final ObjectMapper MAPPER = newJsonMapper();
@Test
public void testSimple() throws Exception
{
String TEST = "test exception";
Map<String,Object> result = writeAndMap(MAPPER, new Exception(TEST));
// JDK 7 has introduced a new property 'suppressed' to Throwable
Object ob = result.get("suppressed");
if (ob != null) {
assertEquals(5, result.size());
} else {
assertEquals(4, result.size());
}
assertEquals(TEST, result.get("message"));
assertNull(result.get("cause"));
assertEquals(TEST, result.get("localizedMessage"));
// hmmh. what should we get for stack traces?
Object traces = result.get("stackTrace");
if (!(traces instanceof List<?>)) {
fail("Expected a List for exception member 'stackTrace', got: "+traces);
}
}
// to double-check [databind#1413]
@Test
public void testSimpleOther() throws Exception
{
JsonParser p = MAPPER.createParser("{ }");
InvalidFormatException exc = InvalidFormatException.from(p, "Test", getClass(), String.class);
String json = MAPPER.writeValueAsString(exc);
p.close();
assertNotNull(json);
}
// for [databind#877]
@SuppressWarnings("unchecked")
@Test
public void testIgnorals() throws Exception
{
ExceptionWithIgnoral input = new ExceptionWithIgnoral("foobar");
input.initCause(new IOException("surprise!"));
// First, should ignore anything with | NoSerdeConstructor |
java | elastic__elasticsearch | x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/SemanticTextInferenceFieldsIT.java | {
"start": 2405,
"end": 13524
} | class ____ extends ESIntegTestCase {
private final String indexName = randomIdentifier();
private final Map<String, TaskType> inferenceIds = new HashMap<>();
private static final Map<String, Object> SPARSE_EMBEDDING_SERVICE_SETTINGS = Map.of("model", "my_model", "api_key", "my_api_key");
private static final Map<String, Object> TEXT_EMBEDDING_SERVICE_SETTINGS = Map.of(
"model",
"my_model",
"dimensions",
256,
"similarity",
"cosine",
"api_key",
"my_api_key"
);
@Override
protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
return Settings.builder().put(LicenseSettings.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial").build();
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return List.of(LocalStateInferencePlugin.class, TestInferenceServicePlugin.class, ReindexPlugin.class, FakeMlPlugin.class);
}
@Override
protected boolean forbidPrivateIndexSettings() {
return false;
}
@After
public void cleanUp() {
IntegrationTestUtils.deleteIndex(client(), indexName);
for (var entry : inferenceIds.entrySet()) {
IntegrationTestUtils.deleteInferenceEndpoint(client(), entry.getValue(), entry.getKey());
}
}
public void testExcludeInferenceFieldsFromSource() throws Exception {
excludeInferenceFieldsFromSourceTestCase(IndexVersion.current(), IndexVersion.current(), 10);
}
public void testExcludeInferenceFieldsFromSourceOldIndexVersions() throws Exception {
excludeInferenceFieldsFromSourceTestCase(
IndexVersions.SEMANTIC_TEXT_FIELD_TYPE,
IndexVersionUtils.getPreviousVersion(IndexVersion.current()),
40
);
}
private void excludeInferenceFieldsFromSourceTestCase(IndexVersion minIndexVersion, IndexVersion maxIndexVersion, int iterations)
throws Exception {
final String sparseEmbeddingInferenceId = randomIdentifier();
final String textEmbeddingInferenceId = randomIdentifier();
createInferenceEndpoint(TaskType.SPARSE_EMBEDDING, sparseEmbeddingInferenceId, SPARSE_EMBEDDING_SERVICE_SETTINGS);
createInferenceEndpoint(TaskType.TEXT_EMBEDDING, textEmbeddingInferenceId, TEXT_EMBEDDING_SERVICE_SETTINGS);
final String sparseEmbeddingField = randomIdentifier();
final String textEmbeddingField = randomIdentifier();
for (int i = 0; i < iterations; i++) {
final IndexVersion indexVersion = IndexVersionUtils.randomVersionBetween(random(), minIndexVersion, maxIndexVersion);
final Settings indexSettings = generateIndexSettings(indexVersion);
XContentBuilder mappings = IntegrationTestUtils.generateSemanticTextMapping(
Map.of(sparseEmbeddingField, sparseEmbeddingInferenceId, textEmbeddingField, textEmbeddingInferenceId)
);
assertAcked(prepareCreate(indexName).setSettings(indexSettings).setMapping(mappings));
final int docCount = randomIntBetween(10, 50);
indexDocuments(sparseEmbeddingField, docCount);
indexDocuments(textEmbeddingField, docCount);
QueryBuilder sparseEmbeddingFieldQuery = new SemanticQueryBuilder(sparseEmbeddingField, randomAlphaOfLength(10));
assertSearchResponse(sparseEmbeddingFieldQuery, indexSettings, docCount, request -> {
request.source().fetchSource(generateRandomFetchSourceContext()).fetchField(sparseEmbeddingField);
}, response -> {
for (SearchHit hit : response.getHits()) {
Map<String, DocumentField> documentFields = hit.getDocumentFields();
assertThat(documentFields.size(), is(1));
assertThat(documentFields.containsKey(sparseEmbeddingField), is(true));
}
});
QueryBuilder textEmbeddingFieldQuery = new SemanticQueryBuilder(textEmbeddingField, randomAlphaOfLength(10));
assertSearchResponse(textEmbeddingFieldQuery, indexSettings, docCount, request -> {
request.source().fetchSource(generateRandomFetchSourceContext()).fetchField(textEmbeddingField);
}, response -> {
for (SearchHit hit : response.getHits()) {
Map<String, DocumentField> documentFields = hit.getDocumentFields();
assertThat(documentFields.size(), is(1));
assertThat(documentFields.containsKey(textEmbeddingField), is(true));
}
});
IntegrationTestUtils.deleteIndex(client(), indexName);
}
}
private void createInferenceEndpoint(TaskType taskType, String inferenceId, Map<String, Object> serviceSettings) throws IOException {
IntegrationTestUtils.createInferenceEndpoint(client(), taskType, inferenceId, serviceSettings);
inferenceIds.put(inferenceId, taskType);
}
private Settings generateIndexSettings(IndexVersion indexVersion) {
int numDataNodes = internalCluster().numDataNodes();
return Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, indexVersion)
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numDataNodes)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
.build();
}
private void indexDocuments(String field, int count) {
for (int i = 0; i < count; i++) {
Map<String, Object> source = Map.of(field, randomAlphaOfLength(10));
DocWriteResponse response = client().prepareIndex(indexName).setSource(source).get(TEST_REQUEST_TIMEOUT);
assertThat(response.getResult(), is(DocWriteResponse.Result.CREATED));
}
client().admin().indices().prepareRefresh(indexName).get();
}
private void assertSearchResponse(
QueryBuilder queryBuilder,
Settings indexSettings,
int expectedHits,
@Nullable Consumer<SearchRequest> searchRequestModifier,
@Nullable Consumer<SearchResponse> searchResponseValidator
) throws Exception {
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(queryBuilder).size(expectedHits);
SearchRequest searchRequest = new SearchRequest(new String[] { indexName }, searchSourceBuilder);
if (searchRequestModifier != null) {
searchRequestModifier.accept(searchRequest);
}
ExpectedSource expectedSource = getExpectedSource(indexSettings, searchRequest.source().fetchSource());
assertResponse(client().search(searchRequest), response -> {
assertThat(response.getSuccessfulShards(), equalTo(response.getTotalShards()));
assertThat(response.getHits().getTotalHits().value(), equalTo((long) expectedHits));
for (SearchHit hit : response.getHits()) {
switch (expectedSource) {
case NONE -> assertThat(hit.getSourceAsMap(), nullValue());
case INFERENCE_FIELDS_EXCLUDED -> {
Map<String, Object> sourceAsMap = hit.getSourceAsMap();
assertThat(sourceAsMap, notNullValue());
assertThat(sourceAsMap.containsKey(InferenceMetadataFieldsMapper.NAME), is(false));
}
case INFERENCE_FIELDS_INCLUDED -> {
Map<String, Object> sourceAsMap = hit.getSourceAsMap();
assertThat(sourceAsMap, notNullValue());
assertThat(sourceAsMap.containsKey(InferenceMetadataFieldsMapper.NAME), is(true));
}
}
}
if (searchResponseValidator != null) {
searchResponseValidator.accept(response);
}
});
}
private static ExpectedSource getExpectedSource(Settings indexSettings, FetchSourceContext fetchSourceContext) {
if (fetchSourceContext != null && fetchSourceContext.fetchSource() == false) {
return ExpectedSource.NONE;
} else if (InferenceMetadataFieldsMapper.isEnabled(indexSettings) == false) {
return ExpectedSource.INFERENCE_FIELDS_EXCLUDED;
}
if (fetchSourceContext != null) {
SourceFilter filter = fetchSourceContext.filter();
if (filter != null) {
if (Arrays.asList(filter.getExcludes()).contains(InferenceMetadataFieldsMapper.NAME)) {
return ExpectedSource.INFERENCE_FIELDS_EXCLUDED;
} else if (filter.getIncludes().length > 0) {
return Arrays.asList(filter.getIncludes()).contains(InferenceMetadataFieldsMapper.NAME)
? ExpectedSource.INFERENCE_FIELDS_INCLUDED
: ExpectedSource.INFERENCE_FIELDS_EXCLUDED;
}
}
Boolean excludeInferenceFieldsExplicit = fetchSourceContext.excludeInferenceFields();
if (excludeInferenceFieldsExplicit != null) {
return excludeInferenceFieldsExplicit ? ExpectedSource.INFERENCE_FIELDS_EXCLUDED : ExpectedSource.INFERENCE_FIELDS_INCLUDED;
}
}
return ExpectedSource.INFERENCE_FIELDS_EXCLUDED;
}
private static FetchSourceContext generateRandomFetchSourceContext() {
FetchSourceContext fetchSourceContext = switch (randomIntBetween(0, 4)) {
case 0 -> FetchSourceContext.FETCH_SOURCE;
case 1 -> FetchSourceContext.FETCH_ALL_SOURCE;
case 2 -> FetchSourceContext.FETCH_ALL_SOURCE_EXCLUDE_INFERENCE_FIELDS;
case 3 -> FetchSourceContext.DO_NOT_FETCH_SOURCE;
case 4 -> null;
default -> throw new IllegalStateException("Unhandled randomized case");
};
if (fetchSourceContext != null && fetchSourceContext.fetchSource()) {
String[] includes = null;
String[] excludes = null;
if (randomBoolean()) {
// Randomly include a non-existent field to test explicit inclusion handling
String field = randomBoolean() ? InferenceMetadataFieldsMapper.NAME : randomIdentifier();
includes = new String[] { field };
}
if (randomBoolean()) {
// Randomly exclude a non-existent field to test implicit inclusion handling
String field = randomBoolean() ? InferenceMetadataFieldsMapper.NAME : randomIdentifier();
excludes = new String[] { field };
}
if (includes != null || excludes != null) {
fetchSourceContext = FetchSourceContext.of(
fetchSourceContext.fetchSource(),
fetchSourceContext.excludeVectors(),
fetchSourceContext.excludeInferenceFields(),
includes,
excludes
);
}
}
return fetchSourceContext;
}
private | SemanticTextInferenceFieldsIT |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/restriction/RestrictionTest.java | {
"start": 14052,
"end": 14118
} | class ____{
@Id String name;
@Version int version;
}
}
| Publisher |
java | alibaba__nacos | common/src/main/java/com/alibaba/nacos/common/http/client/HttpClientRequestInterceptor.java | {
"start": 1007,
"end": 1580
} | interface ____ {
/**
* is intercept.
*
* @param uri uri
* @param httpMethod http method
* @param requestHttpEntity request entity
* @return boolean
*/
boolean isIntercept(URI uri, String httpMethod, RequestHttpEntity requestHttpEntity);
/**
* if isIntercept method is true Intercept the given request, and return a response Otherwise,
* the {@link HttpClientRequest} will be used for execution.
*
* @return HttpClientResponse
*/
HttpClientResponse intercept();
}
| HttpClientRequestInterceptor |
java | netty__netty | transport-native-epoll/src/test/java/io/netty/channel/epoll/EpollSocketFixedLengthEchoTest.java | {
"start": 909,
"end": 1186
} | class ____ extends SocketFixedLengthEchoTest {
@Override
protected List<TestsuitePermutation.BootstrapComboFactory<ServerBootstrap, Bootstrap>> newFactories() {
return EpollSocketTestPermutation.INSTANCE.socketWithFastOpen();
}
}
| EpollSocketFixedLengthEchoTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/RxReturnValueIgnoredTest.java | {
"start": 10708,
"end": 11187
} | class ____ {
Single getSingle() {
return null;
}
void f() {
// BUG: Diagnostic contains: Rx objects must be checked.
getSingle();
}
}
""")
.doTest();
}
@Test
public void rx2Completable() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import io.reactivex.Completable;
| Test |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/view/xml/JacksonXmlView.java | {
"start": 2021,
"end": 4205
} | class ____ extends AbstractJacksonView {
/**
* Default content type: {@value}.
* <p>Overridable through {@link #setContentType(String)}.
*/
public static final String DEFAULT_CONTENT_TYPE = "application/xml";
private @Nullable String modelKey;
/**
* Construct a new instance with an {@link XmlMapper} customized with
* the {@link tools.jackson.databind.JacksonModule}s found by
* {@link MapperBuilder#findModules(ClassLoader)} and setting
* the content type to {@code application/xml}.
*/
public JacksonXmlView() {
super(XmlMapper.builder(), DEFAULT_CONTENT_TYPE);
}
/**
* Construct a new instance using the provided {@link XmlMapper.Builder}
* customized with the {@link tools.jackson.databind.JacksonModule}s
* found by {@link MapperBuilder#findModules(ClassLoader)} and setting
* the content type to {@code application/xml}.
* @see XmlMapper#builder()
*/
public JacksonXmlView(XmlMapper.Builder builder) {
super(builder, DEFAULT_CONTENT_TYPE);
}
/**
* Construct a new instance using the provided {@link XmlMapper}
* and setting the content type to {@code application/xml}.
* @see XmlMapper#builder()
*/
public JacksonXmlView(XmlMapper mapper) {
super(mapper, DEFAULT_CONTENT_TYPE);
}
@Override
public void setModelKey(String modelKey) {
this.modelKey = modelKey;
}
@Override
protected Object filterModel(Map<String, Object> model, HttpServletRequest request) {
Object value = null;
if (this.modelKey != null) {
value = model.get(this.modelKey);
if (value == null) {
throw new IllegalStateException(
"Model contains no object with key [" + this.modelKey + "]");
}
}
else {
for (Map.Entry<String, Object> entry : model.entrySet()) {
if (!(entry.getValue() instanceof BindingResult) &&
!entry.getKey().equals(JSON_VIEW_HINT) &&
!entry.getKey().equals(FILTER_PROVIDER_HINT)) {
if (value != null) {
throw new IllegalStateException("Model contains more than one object to render, only one is supported");
}
value = entry.getValue();
}
}
}
Assert.state(value != null, "Model contains no object to render");
return value;
}
}
| JacksonXmlView |
java | spring-projects__spring-boot | core/spring-boot-testcontainers/src/dockerTest/java/org/springframework/boot/testcontainers/LoadTimeWeaverAwareConsumerImportTestcontainersTests.java | {
"start": 2023,
"end": 2549
} | class ____ {
@Bean
DataSource dataSource() {
EmbeddedDatabaseFactory embeddedDatabaseFactory = new EmbeddedDatabaseFactory();
embeddedDatabaseFactory.setGenerateUniqueDatabaseName(true);
embeddedDatabaseFactory.setDatabaseType(EmbeddedDatabaseType.H2);
return embeddedDatabaseFactory.getDatabase();
}
@Bean
LoadTimeWeaverAwareConsumer loadTimeWeaverAwareConsumer(DatabaseConnectionDetails connectionDetails) {
return new LoadTimeWeaverAwareConsumer(connectionDetails);
}
}
static | TestConfiguration |
java | apache__camel | components/camel-undertow/src/test/java/org/apache/camel/component/undertow/spi/ProviderWithServletTest.java | {
"start": 1350,
"end": 1437
} | class ____ extends AbstractProviderServletTest {
public static | ProviderWithServletTest |
java | spring-projects__spring-security | buildSrc/src/test/java/io/spring/gradle/convention/RepositoryConventionPluginTests.java | {
"start": 1183,
"end": 6107
} | class ____ {
private Project project = ProjectBuilder.builder().build();
@BeforeEach
public void setUp() {
this.project.getProperties().clear();
}
@Test
public void applyWhenIsReleaseThenShouldIncludeReleaseRepo() {
this.project.setVersion("1.0.0.RELEASE");
this.project.getPluginManager().apply(RepositoryConventionPlugin.class);
RepositoryHandler repositories = this.project.getRepositories();
assertReleaseRepository(repositories);
}
@Test
public void applyWhenIsMilestoneThenShouldIncludeMilestoneRepo() {
this.project.setVersion("1.0.0.M1");
this.project.getPluginManager().apply(RepositoryConventionPlugin.class);
RepositoryHandler repositories = this.project.getRepositories();
assertMilestoneRepository(repositories); // milestone
}
@Test
public void applyWhenIsSnapshotThenShouldIncludeSnapshotRepo() {
this.project.setVersion("1.0.0.BUILD-SNAPSHOT");
this.project.getPluginManager().apply(RepositoryConventionPlugin.class);
RepositoryHandler repositories = this.project.getRepositories();
assertSnapshotRepository(repositories);
}
@Test
public void applyWhenIsSnapshotWithForceReleaseThenShouldOnlyIncludeReleaseRepo() {
this.project.getExtensions().getByType(ExtraPropertiesExtension.class)
.set("forceMavenRepositories", "release");
this.project.setVersion("1.0.0.RELEASE");
this.project.getPluginManager().apply(RepositoryConventionPlugin.class);
RepositoryHandler repositories = this.project.getRepositories();
assertReleaseRepository(repositories);
}
@Test
public void applyWhenIsReleaseWithForceMilestoneThenShouldIncludeMilestoneRepo() {
this.project.getExtensions().getByType(ExtraPropertiesExtension.class)
.set("forceMavenRepositories", "milestone");
this.project.setVersion("1.0.0.RELEASE");
this.project.getPluginManager().apply(RepositoryConventionPlugin.class);
RepositoryHandler repositories = this.project.getRepositories();
assertMilestoneRepository(repositories);
}
@Test
public void applyWhenIsReleaseWithForceSnapshotThenShouldIncludeSnapshotRepo() {
this.project.getExtensions().getByType(ExtraPropertiesExtension.class)
.set("forceMavenRepositories", "snapshot");
this.project.setVersion("1.0.0.RELEASE");
this.project.getPluginManager().apply(RepositoryConventionPlugin.class);
RepositoryHandler repositories = this.project.getRepositories();
assertSnapshotRepository(repositories);
}
@Test
public void applyWhenIsReleaseWithForceLocalThenShouldIncludeReleaseAndLocalRepos() {
this.project.getExtensions().getByType(ExtraPropertiesExtension.class)
.set("forceMavenRepositories", "local");
this.project.setVersion("1.0.0.RELEASE");
this.project.getPluginManager().apply(RepositoryConventionPlugin.class);
RepositoryHandler repositories = this.project.getRepositories();
assertThat(repositories).hasSize(4);
assertThat((repositories.get(0)).getName()).isEqualTo("MavenLocal");
}
@Test
public void applyWhenIsReleaseWithForceMilestoneAndLocalThenShouldIncludeMilestoneAndLocalRepos() {
this.project.getExtensions().getByType(ExtraPropertiesExtension.class)
.set("forceMavenRepositories", "milestone,local");
this.project.setVersion("1.0.0.RELEASE");
this.project.getPluginManager().apply(RepositoryConventionPlugin.class);
RepositoryHandler repositories = this.project.getRepositories();
assertThat(repositories).hasSize(5);
assertThat((repositories.get(0)).getName()).isEqualTo("MavenLocal");
}
private void assertSnapshotRepository(RepositoryHandler repositories) {
assertThat(repositories).extracting(ArtifactRepository::getName).hasSize(5);
assertThat(((MavenArtifactRepository) repositories.get(1)).getUrl().toString())
.isEqualTo("https://repo.maven.apache.org/maven2/");
assertThat(((MavenArtifactRepository) repositories.get(2)).getUrl().toString())
.isEqualTo("https://repo.spring.io/snapshot/");
assertThat(((MavenArtifactRepository) repositories.get(3)).getUrl().toString())
.isEqualTo("https://repo.spring.io/milestone/");
}
private void assertMilestoneRepository(RepositoryHandler repositories) {
assertThat(repositories).extracting(ArtifactRepository::getName).hasSize(4);
assertThat(((MavenArtifactRepository) repositories.get(1)).getUrl().toString())
.isEqualTo("https://repo.maven.apache.org/maven2/");
assertThat(((MavenArtifactRepository) repositories.get(2)).getUrl().toString())
.isEqualTo("https://repo.spring.io/milestone/");
}
private void assertReleaseRepository(RepositoryHandler repositories) {
assertThat(repositories).extracting(ArtifactRepository::getName).hasSize(3);
assertThat(((MavenArtifactRepository) repositories.get(1)).getUrl().toString())
.isEqualTo("https://repo.maven.apache.org/maven2/");
assertThat(((MavenArtifactRepository) repositories.get(2)).getUrl().toString())
.isEqualTo("https://repo.spring.io/release/");
}
}
| RepositoryConventionPluginTests |
java | grpc__grpc-java | benchmarks/src/generated/main/grpc/io/grpc/benchmarks/proto/BenchmarkServiceGrpc.java | {
"start": 16268,
"end": 19580
} | class ____
extends io.grpc.stub.AbstractAsyncStub<BenchmarkServiceStub> {
private BenchmarkServiceStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected BenchmarkServiceStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new BenchmarkServiceStub(channel, callOptions);
}
/**
* <pre>
* One request followed by one response.
* The server returns the client payload as-is.
* </pre>
*/
public void unaryCall(io.grpc.benchmarks.proto.Messages.SimpleRequest request,
io.grpc.stub.StreamObserver<io.grpc.benchmarks.proto.Messages.SimpleResponse> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getUnaryCallMethod(), getCallOptions()), request, responseObserver);
}
/**
* <pre>
* Repeated sequence of one request followed by one response.
* Should be called streaming ping-pong
* The server returns the client payload as-is on each response
* </pre>
*/
public io.grpc.stub.StreamObserver<io.grpc.benchmarks.proto.Messages.SimpleRequest> streamingCall(
io.grpc.stub.StreamObserver<io.grpc.benchmarks.proto.Messages.SimpleResponse> responseObserver) {
return io.grpc.stub.ClientCalls.asyncBidiStreamingCall(
getChannel().newCall(getStreamingCallMethod(), getCallOptions()), responseObserver);
}
/**
* <pre>
* Single-sided unbounded streaming from client to server
* The server returns the client payload as-is once the client does WritesDone
* </pre>
*/
public io.grpc.stub.StreamObserver<io.grpc.benchmarks.proto.Messages.SimpleRequest> streamingFromClient(
io.grpc.stub.StreamObserver<io.grpc.benchmarks.proto.Messages.SimpleResponse> responseObserver) {
return io.grpc.stub.ClientCalls.asyncClientStreamingCall(
getChannel().newCall(getStreamingFromClientMethod(), getCallOptions()), responseObserver);
}
/**
* <pre>
* Single-sided unbounded streaming from server to client
* The server repeatedly returns the client payload as-is
* </pre>
*/
public void streamingFromServer(io.grpc.benchmarks.proto.Messages.SimpleRequest request,
io.grpc.stub.StreamObserver<io.grpc.benchmarks.proto.Messages.SimpleResponse> responseObserver) {
io.grpc.stub.ClientCalls.asyncServerStreamingCall(
getChannel().newCall(getStreamingFromServerMethod(), getCallOptions()), request, responseObserver);
}
/**
* <pre>
* Two-sided unbounded streaming between server to client
* Both sides send the content of their own choice to the other
* </pre>
*/
public io.grpc.stub.StreamObserver<io.grpc.benchmarks.proto.Messages.SimpleRequest> streamingBothWays(
io.grpc.stub.StreamObserver<io.grpc.benchmarks.proto.Messages.SimpleResponse> responseObserver) {
return io.grpc.stub.ClientCalls.asyncBidiStreamingCall(
getChannel().newCall(getStreamingBothWaysMethod(), getCallOptions()), responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service BenchmarkService.
*/
public static final | BenchmarkServiceStub |
java | quarkusio__quarkus | extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/FormAuthConfig.java | {
"start": 198,
"end": 316
} | interface ____ {
/**
* SameSite attribute values for the session and location cookies.
*/
| FormAuthConfig |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/NamedNode.java | {
"start": 941,
"end": 3302
} | interface ____ extends LineNumberAware {
/**
* Gets the value of the id property.
*/
String getId();
/**
* Gets the node prefix id.
*/
String getNodePrefixId();
/**
* Returns a short name for this node which can be useful for ID generation or referring to related resources like
* images
*
* @return defaults to "node" but derived nodes should overload this to provide a unique name
*/
String getShortName();
/**
* Returns a label to describe this node such as the expression if some kind of expression node
*/
String getLabel();
/**
* Returns the description text or null if there is no description text associated with this node
*/
String getDescriptionText();
/**
* Returns the parent
*/
NamedNode getParent();
/**
* Whether this node can accept debugging the current exchange. This allows flexibility for some EIPs that need to
* compute whether to accept debugging or not
*
* @param exchange the current exchange
* @return true to accept debugging this node, or false to skip
*/
default boolean acceptDebugger(Exchange exchange) {
return true;
}
/**
* Processor Level in the route tree
*/
default int getLevel() {
NamedNode node = this;
int level = 0;
while (node != null && node.getParent() != null) {
boolean shallow = "when".equals(node.getShortName()) || "otherwise".equals(node.getShortName());
node = node.getParent();
if (!shallow) {
level++;
}
}
return level;
}
default String getParentId() {
NamedNode node = this;
while (node != null && node.getParent() != null) {
boolean shallow = "when".equals(node.getShortName()) || "otherwise".equals(node.getShortName());
node = node.getParent();
if (!shallow) {
return node.getId();
}
}
return null;
}
/**
* Special methods for Choice EIP
*/
default NamedNode findMatchingWhen(String id) {
return null;
}
/**
* Special methods for Choice EIP
*/
default NamedNode findMatchingOtherwise(String id) {
return null;
}
}
| NamedNode |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/xml/CollectionsWithDefaultTypesTests.java | {
"start": 1065,
"end": 3154
} | class ____ {
private final DefaultListableBeanFactory beanFactory;
public CollectionsWithDefaultTypesTests() {
this.beanFactory = new DefaultListableBeanFactory();
new XmlBeanDefinitionReader(this.beanFactory).loadBeanDefinitions(
new ClassPathResource("collectionsWithDefaultTypes.xml", getClass()));
}
@Test
void testListHasDefaultType() {
TestBean bean = (TestBean) this.beanFactory.getBean("testBean");
for (Object o : bean.getSomeList()) {
assertThat(o.getClass()).as("Value type is incorrect").isEqualTo(Integer.class);
}
}
@Test
void testSetHasDefaultType() {
TestBean bean = (TestBean) this.beanFactory.getBean("testBean");
for (Object o : bean.getSomeSet()) {
assertThat(o.getClass()).as("Value type is incorrect").isEqualTo(Integer.class);
}
}
@Test
void testMapHasDefaultKeyAndValueType() {
TestBean bean = (TestBean) this.beanFactory.getBean("testBean");
assertMap(bean.getSomeMap());
}
@Test
void testMapWithNestedElementsHasDefaultKeyAndValueType() {
TestBean bean = (TestBean) this.beanFactory.getBean("testBean2");
assertMap(bean.getSomeMap());
}
@SuppressWarnings("rawtypes")
private void assertMap(Map<?,?> map) {
for (Map.Entry entry : map.entrySet()) {
assertThat(entry.getKey().getClass()).as("Key type is incorrect").isEqualTo(Integer.class);
assertThat(entry.getValue().getClass()).as("Value type is incorrect").isEqualTo(Boolean.class);
}
}
@Test
@SuppressWarnings("rawtypes")
public void testBuildCollectionFromMixtureOfReferencesAndValues() {
MixedCollectionBean jumble = (MixedCollectionBean) this.beanFactory.getBean("jumble");
assertThat(jumble.getJumble()).as("Expected 3 elements, not " + jumble.getJumble().size()).hasSize(3);
List l = (List) jumble.getJumble();
assertThat(l.get(0).equals("literal")).isTrue();
Integer[] array1 = (Integer[]) l.get(1);
assertThat(array1[0]).isEqualTo(2);
assertThat(array1[1]).isEqualTo(4);
int[] array2 = (int[]) l.get(2);
assertThat(array2[0]).isEqualTo(3);
assertThat(array2[1]).isEqualTo(5);
}
}
| CollectionsWithDefaultTypesTests |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/layout/Rfc5424Layout.java | {
"start": 33880,
"end": 34881
} | class ____ {
private final Map<String, List<PatternFormatter>> delegateMap;
private final boolean discardIfEmpty;
public FieldFormatter(final Map<String, List<PatternFormatter>> fieldMap, final boolean discardIfEmpty) {
this.discardIfEmpty = discardIfEmpty;
this.delegateMap = fieldMap;
}
public StructuredDataElement format(final LogEvent event) {
final Map<String, String> map = new HashMap<>(delegateMap.size());
for (final Map.Entry<String, List<PatternFormatter>> entry : delegateMap.entrySet()) {
final StringBuilder buffer = new StringBuilder();
for (final PatternFormatter formatter : entry.getValue()) {
formatter.format(event, buffer);
}
map.put(entry.getKey(), buffer.toString());
}
return new StructuredDataElement(map, eventPrefix, discardIfEmpty);
}
}
private | FieldFormatter |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/util/internal/PrivateMaxEntriesMap.java | {
"start": 19988,
"end": 20610
} | class ____ implements Runnable {
final Node<K, V> node;
final int weight;
AddTask(Node<K, V> node, int weight) {
this.weight = weight;
this.node = node;
}
@Override
//@GuardedBy("evictionLock")
public void run() {
weightedSize.lazySet(weightedSize.get() + weight);
// ignore out-of-order write operations
if (node.get().isAlive()) {
evictionDeque.add(node);
evict();
}
}
}
/** Removes a node from the page replacement policy. */
final | AddTask |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/env/MapPropertySource.java | {
"start": 1126,
"end": 1899
} | class ____ extends EnumerablePropertySource<Map<String, Object>> {
/**
* Create a new {@code MapPropertySource} with the given name and {@code Map}.
* @param name the associated name
* @param source the Map source (without {@code null} values in order to get
* consistent {@link #getProperty} and {@link #containsProperty} behavior)
*/
public MapPropertySource(String name, Map<String, Object> source) {
super(name, source);
}
@Override
public @Nullable Object getProperty(String name) {
return this.source.get(name);
}
@Override
public boolean containsProperty(String name) {
return this.source.containsKey(name);
}
@Override
public String[] getPropertyNames() {
return StringUtils.toStringArray(this.source.keySet());
}
}
| MapPropertySource |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/NonOverridingEqualsTest.java | {
"start": 1285,
"end": 1784
} | class ____ {
// BUG: Diagnostic contains: Did you mean '@Override'
public boolean equals(Test other) {
return false;
}
}
""")
.doTest();
}
// The following two tests are really to help debug the construction of the suggested fixes.
@Test
public void flagsComplicatedCovariantEqualsMethod() {
compilationHelper
.addSourceLines(
"Test.java",
"""
public | Test |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/BeanProperty.java | {
"start": 6310,
"end": 10532
} | class ____ implements BeanProperty
{
protected final PropertyName _name;
protected final JavaType _type;
protected final PropertyName _wrapperName;
protected final PropertyMetadata _metadata;
/**
* Physical entity (field, method or constructor argument) that
* is used to access value of property (or in case of constructor
* property, just placeholder)
*/
protected final AnnotatedMember _member;
public Std(PropertyName name, JavaType type, PropertyName wrapperName,
AnnotatedMember member, PropertyMetadata metadata)
{
_name = name;
_type = type;
_wrapperName = wrapperName;
_metadata = metadata;
_member = member;
}
public Std(Std base, JavaType newType) {
this(base._name, newType, base._wrapperName, base._member, base._metadata);
}
public Std withType(JavaType type) {
return new Std(this, type);
}
@Override
public <A extends Annotation> A getAnnotation(Class<A> acls) {
return (_member == null) ? null : _member.getAnnotation(acls);
}
@Override
public <A extends Annotation> A getContextAnnotation(Class<A> acls) {
return null;
}
@Override
public JsonFormat.Value findFormatOverrides(MapperConfig<?> config) {
return null;
}
@Override
public JsonFormat.Value findPropertyFormat(MapperConfig<?> config, Class<?> baseType) {
JsonFormat.Value v0 = config.getDefaultPropertyFormat(baseType);
AnnotationIntrospector intr = config.getAnnotationIntrospector();
if ((intr == null) || (_member == null)) {
return v0;
}
JsonFormat.Value v = intr.findFormat(config, _member);
if (v == null) {
return v0;
}
return v0.withOverrides(v);
}
@Override
public JsonInclude.Value findPropertyInclusion(MapperConfig<?> config, Class<?> baseType)
{
JsonInclude.Value v0 = config.getDefaultInclusion(baseType, _type.getRawClass());
AnnotationIntrospector intr = config.getAnnotationIntrospector();
if ((intr == null) || (_member == null)) {
return v0;
}
JsonInclude.Value v = intr.findPropertyInclusion(config, _member);
if (v == null) {
return v0;
}
return v0.withOverrides(v);
}
@Override
public List<PropertyName> findAliases(MapperConfig<?> config) {
// 26-Feb-2017, tatu: Do we really need to allow actual definition?
// For now, let's not.
return Collections.emptyList();
}
@Override public String getName() { return _name.getSimpleName(); }
@Override public PropertyName getFullName() { return _name; }
@Override public JavaType getType() { return _type; }
@Override public PropertyName getWrapperName() { return _wrapperName; }
@Override public boolean isRequired() { return _metadata.isRequired(); }
@Override public PropertyMetadata getMetadata() { return _metadata; }
@Override public AnnotatedMember getMember() { return _member; }
@Override
public boolean isVirtual() { return false; }
/**
* Implementation of this method throws
* {@link UnsupportedOperationException}, since instances of this
* implementation should not be used as part of actual structure
* visited. Rather, other implementations should handle it.
*/
@Override
public void depositSchemaProperty(JsonObjectFormatVisitor objectVisitor,
SerializationContext provider) {
throw new UnsupportedOperationException("Instances of "+getClass().getName()+" should not get visited");
}
}
/**
* Alternative "Null" implementation that can be used in cases where a non-null
* {@link BeanProperty} is needed
*/
public static | Std |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/stream/StreamExecGroupTableAggregate.java | {
"start": 3054,
"end": 9128
} | class ____ extends ExecNodeBase<RowData>
implements StreamExecNode<RowData>, SingleTransformationTranslator<RowData> {
private static final Logger LOG = LoggerFactory.getLogger(StreamExecGroupTableAggregate.class);
private static final String GROUP_TABLE_AGGREGATE_TRANSFORMATION = "group-table-aggregate";
private final int[] grouping;
private final AggregateCall[] aggCalls;
/** Each element indicates whether the corresponding agg call needs `retract` method. */
private final boolean[] aggCallNeedRetractions;
/** Whether this node will generate UPDATE_BEFORE messages. */
private final boolean generateUpdateBefore;
/** Whether this node consumes retraction messages. */
private final boolean needRetraction;
public StreamExecGroupTableAggregate(
ReadableConfig tableConfig,
int[] grouping,
AggregateCall[] aggCalls,
boolean[] aggCallNeedRetractions,
boolean generateUpdateBefore,
boolean needRetraction,
InputProperty inputProperty,
RowType outputType,
String description) {
super(
ExecNodeContext.newNodeId(),
ExecNodeContext.newContext(StreamExecGroupTableAggregate.class),
ExecNodeContext.newPersistedConfig(
StreamExecGroupTableAggregate.class, tableConfig),
Collections.singletonList(inputProperty),
outputType,
description);
Preconditions.checkArgument(aggCalls.length == aggCallNeedRetractions.length);
this.grouping = grouping;
this.aggCalls = aggCalls;
this.aggCallNeedRetractions = aggCallNeedRetractions;
this.generateUpdateBefore = generateUpdateBefore;
this.needRetraction = needRetraction;
}
@SuppressWarnings("unchecked")
@Override
protected Transformation<RowData> translateToPlanInternal(
PlannerBase planner, ExecNodeConfig config) {
if (grouping.length > 0 && config.getStateRetentionTime() < 0) {
LOG.warn(
"No state retention interval configured for a query which accumulates state. "
+ "Please provide a query configuration with valid retention interval to prevent excessive "
+ "state size. You may specify a retention time of 0 to not clean up the state.");
}
final ExecEdge inputEdge = getInputEdges().get(0);
final Transformation<RowData> inputTransform =
(Transformation<RowData>) inputEdge.translateToPlan(planner);
final RowType inputRowType = (RowType) inputEdge.getOutputType();
final AggsHandlerCodeGenerator generator =
new AggsHandlerCodeGenerator(
new CodeGeneratorContext(
config, planner.getFlinkContext().getClassLoader()),
planner.createRelBuilder(),
JavaScalaConversionUtil.toScala(inputRowType.getChildren()),
// TODO: heap state backend do not copy key currently,
// we have to copy input field
// TODO: copy is not need when state backend is rocksdb,
// improve this in future
// TODO: but other operators do not copy this input field.....
true)
.needAccumulate();
if (needRetraction) {
generator.needRetract();
}
final AggregateInfoList aggInfoList =
AggregateUtil.transformToStreamAggregateInfoList(
planner.getTypeFactory(),
inputRowType,
JavaScalaConversionUtil.toScala(Arrays.asList(aggCalls)),
aggCallNeedRetractions,
needRetraction,
true, // isStateBackendDataViews
true); // needDistinctInfo
final GeneratedTableAggsHandleFunction aggsHandler =
generator.generateTableAggsHandler("GroupTableAggHandler", aggInfoList);
final LogicalType[] accTypes =
Arrays.stream(aggInfoList.getAccTypes())
.map(LogicalTypeDataTypeConverter::fromDataTypeToLogicalType)
.toArray(LogicalType[]::new);
final int inputCountIndex = aggInfoList.getIndexOfCountStar();
final GroupTableAggFunction aggFunction =
new GroupTableAggFunction(
aggsHandler,
accTypes,
inputCountIndex,
generateUpdateBefore,
generator.isIncrementalUpdate(),
config.getStateRetentionTime());
final OneInputStreamOperator<RowData, RowData> operator =
new KeyedProcessOperator<>(aggFunction);
// partitioned aggregation
final OneInputTransformation<RowData, RowData> transform =
ExecNodeUtil.createOneInputTransformation(
inputTransform,
createTransformationMeta(GROUP_TABLE_AGGREGATE_TRANSFORMATION, config),
operator,
InternalTypeInfo.of(getOutputType()),
inputTransform.getParallelism(),
false);
// set KeyType and Selector for state
final RowDataKeySelector selector =
KeySelectorUtil.getRowDataSelector(
planner.getFlinkContext().getClassLoader(),
grouping,
InternalTypeInfo.of(inputRowType));
transform.setStateKeySelector(selector);
transform.setStateKeyType(selector.getProducedType());
return transform;
}
}
| StreamExecGroupTableAggregate |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/query/internals/InternalQueryResultUtil.java | {
"start": 997,
"end": 1125
} | class ____ support operations the Kafka Streams framework needs to
* perform on {@link QueryResult}s.
*/
@Unstable
public final | to |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java | {
"start": 9093,
"end": 9571
} | interface ____ {
static boolean match(Task task, String expectedId) {
if (task instanceof TaskMatcher) {
if (Strings.isAllOrWildcard(expectedId)) {
return true;
}
String expectedDescription = MlTasks.DATA_FRAME_ANALYTICS_TASK_ID_PREFIX + expectedId;
return expectedDescription.equals(task.getDescription());
}
return false;
}
}
}
| TaskMatcher |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_1148/Issue1148Mapper.java | {
"start": 369,
"end": 1817
} | interface ____ {
Issue1148Mapper INSTANCE = Mappers.getMapper( Issue1148Mapper.class );
@Mappings({
@Mapping(target = "sender.nestedClient.id", source = "senderId"),
@Mapping(target = "recipient.nestedClient.id", source = "recipientId"),
@Mapping(target = "client.nestedClient.id", source = "sameLevel.client.id"),
@Mapping(target = "client2.nestedClient.id", source = "sameLevel2.client.id"),
@Mapping(target = "nested.id", source = "level.client.id"),
@Mapping(target = "nested2.id", source = "level2.client.id"),
@Mapping(target = "id", source = "nestedDto.id"),
@Mapping(target = "id2", source = "nestedDto2.id")
})
Entity toEntity(Entity.Dto dto);
@Mappings({
@Mapping(target = "sender.nestedClient.id", source = "dto2.senderId"),
@Mapping(target = "recipient.nestedClient.id", source = "dto1.recipientId"),
@Mapping(target = "client.nestedClient.id", source = "dto1.sameLevel.client.id"),
@Mapping(target = "client2.nestedClient.id", source = "dto2.sameLevel2.client.id"),
@Mapping(target = "nested.id", source = "dto1.level.client.id"),
@Mapping(target = "nested2.id", source = "dto2.level2.client.id"),
@Mapping(target = "id", source = "dto1.nestedDto.id"),
@Mapping(target = "id2", source = "dto2.nestedDto2.id")
})
Entity toEntity(Entity.Dto dto1, Entity.Dto dto2);
}
| Issue1148Mapper |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/serializer/enum_/EnumTest.java | {
"start": 316,
"end": 1409
} | enum ____ {
Big, Medium, Small
}
public void test_enum() throws Exception {
Assert.assertEquals("0", JSON.toJSONStringZ(Type.Big, SerializeConfig.getGlobalInstance())); // 0
Assert.assertEquals("1", JSON.toJSONStringZ(Type.Medium, SerializeConfig.getGlobalInstance())); // 1
Assert.assertEquals("2", JSON.toJSONStringZ(Type.Small, SerializeConfig.getGlobalInstance())); // 2
Assert.assertEquals("\"Big\"", JSON.toJSONString(Type.Big, SerializerFeature.WriteEnumUsingToString)); // "Big"
Assert.assertEquals("\"Medium\"", JSON.toJSONString(Type.Medium, SerializerFeature.WriteEnumUsingToString)); // "Medium"
Assert.assertEquals("\"Small\"", JSON.toJSONString(Type.Small, SerializerFeature.WriteEnumUsingToString)); // "Small"
Assert.assertEquals("'Small'", JSON.toJSONString(Type.Small, SerializerFeature.UseSingleQuotes)); // "Small"
}
public void test_empty() throws Exception {
Model model = JSON.parseObject("{\"type\":\"\"}", Model.class);
assertNull(model.type);
}
public static | Type |
java | apache__camel | components/camel-test/camel-test-spring-junit5/src/main/java/org/apache/camel/test/spring/junit5/CamelSpringTestSupport.java | {
"start": 2413,
"end": 2614
} | class ____ classic Spring application such as standalone, web applications. Do <tt>not</tt> use this class
* for Spring Boot testing, instead use <code>@CamelSpringBootTest</code>.
*/
public abstract | for |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/kstream/internals/foreignkeyjoin/SubscriptionWrapperSerde.java | {
"start": 2355,
"end": 7102
} | class ____<KLeft>
implements Serializer<SubscriptionWrapper<KLeft>>, WrappingNullableSerializer<SubscriptionWrapper<KLeft>, KLeft, Void> {
private final Supplier<String> primaryKeySerializationPseudoTopicSupplier;
private String primaryKeySerializationPseudoTopic = null;
private Serializer<KLeft> primaryKeySerializer;
private boolean upgradeFromV0 = false;
SubscriptionWrapperSerializer(final Supplier<String> primaryKeySerializationPseudoTopicSupplier,
final Serializer<KLeft> primaryKeySerializer) {
this.primaryKeySerializationPseudoTopicSupplier = primaryKeySerializationPseudoTopicSupplier;
this.primaryKeySerializer = primaryKeySerializer;
}
@SuppressWarnings({"unchecked", "resource"})
@Override
public void setIfUnset(final SerdeGetter getter) {
if (primaryKeySerializer == null) {
primaryKeySerializer = (Serializer<KLeft>) getter.keySerde().serializer();
}
}
@Override
public void configure(final Map<String, ?> configs, final boolean isKey) {
this.upgradeFromV0 = upgradeFromV0(configs);
}
private static boolean upgradeFromV0(final Map<String, ?> configs) {
final Object upgradeFrom = configs.get(StreamsConfig.UPGRADE_FROM_CONFIG);
if (upgradeFrom == null) {
return false;
}
switch (UpgradeFromValues.fromString((String) upgradeFrom)) {
case UPGRADE_FROM_24:
case UPGRADE_FROM_25:
case UPGRADE_FROM_26:
case UPGRADE_FROM_27:
case UPGRADE_FROM_28:
case UPGRADE_FROM_30:
case UPGRADE_FROM_31:
case UPGRADE_FROM_32:
case UPGRADE_FROM_33:
// there is no need to add new versions here
return true;
default:
return false;
}
}
@Override
public byte[] serialize(final String ignored, final SubscriptionWrapper<KLeft> data) {
//{1-bit-isHashNull}{7-bits-version}{1-byte-instruction}{Optional-16-byte-Hash}{PK-serialized}{4-bytes-primaryPartition}
if (data.version() < 0) {
throw new UnsupportedVersionException("SubscriptionWrapper version cannot be negative");
}
final int version = data.version();
if (upgradeFromV0 || version == 0) {
return serializeV0(data);
} else if (version == 1) {
return serializeV1(data);
} else {
throw new UnsupportedVersionException("Unsupported SubscriptionWrapper version " + data.version());
}
}
private byte[] serializePrimaryKey(final SubscriptionWrapper<KLeft> data) {
if (primaryKeySerializationPseudoTopic == null) {
primaryKeySerializationPseudoTopic = primaryKeySerializationPseudoTopicSupplier.get();
}
return primaryKeySerializer.serialize(
primaryKeySerializationPseudoTopic,
data.primaryKey()
);
}
private ByteBuffer serializeCommon(final SubscriptionWrapper<KLeft> data, final byte version, final int extraLength) {
final byte[] primaryKeySerializedData = serializePrimaryKey(data);
final ByteBuffer buf;
int dataLength = 2 + primaryKeySerializedData.length + extraLength;
if (data.hash() != null) {
dataLength += 2 * Long.BYTES;
buf = ByteBuffer.allocate(dataLength);
buf.put(version);
} else {
//Don't store hash as it's null.
buf = ByteBuffer.allocate(dataLength);
buf.put((byte) (version | (byte) 0x80));
}
buf.put(data.instruction().value());
final long[] elem = data.hash();
if (data.hash() != null) {
buf.putLong(elem[0]);
buf.putLong(elem[1]);
}
buf.put(primaryKeySerializedData);
return buf;
}
private byte[] serializeV0(final SubscriptionWrapper<KLeft> data) {
return serializeCommon(data, (byte) 0, 0).array();
}
private byte[] serializeV1(final SubscriptionWrapper<KLeft> data) {
final ByteBuffer buf = serializeCommon(data, data.version(), Integer.BYTES);
buf.putInt(data.primaryPartition());
return buf.array();
}
}
private static | SubscriptionWrapperSerializer |
java | alibaba__fastjson | src/main/java/com/alibaba/fastjson/parser/deserializer/EnumCreatorDeserializer.java | {
"start": 273,
"end": 838
} | class ____ implements ObjectDeserializer {
private final Method creator;
private final Class paramType;
public EnumCreatorDeserializer(Method creator) {
this.creator = creator;
paramType = creator.getParameterTypes()[0];
}
public <T> T deserialze(DefaultJSONParser parser, Type type, Object fieldName) {
Object arg = parser.parseObject(paramType);
try {
return (T) creator.invoke(null, arg);
} catch (IllegalAccessException e) {
throw new JSONException("parse | EnumCreatorDeserializer |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/sqm/mutation/BasicMutationQueryTests.java | {
"start": 927,
"end": 3243
} | class ____ {
@Test
void basicHqlDeleteTest(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
session.createMutationQuery( "delete Contact" ).executeUpdate();
} );
}
@Test
void basicNativeDeleteTest(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
session.createNativeMutationQuery( "delete from contacts" ).executeUpdate();
} );
}
@Test
void basicNamedHqlDeleteTest(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
session.createNamedMutationQuery( "valid-hql" ).executeUpdate();
} );
}
@Test
void basicNamedNativeDeleteTest(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
session.createNamedMutationQuery( "valid-native" ).executeUpdate();
} );
}
@Test
@ExpectedException( IllegalMutationQueryException.class )
void basicNonMutationQueryTest(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
session.createMutationQuery( "select c from Contact c" );
} );
}
@Test
@ExpectedException( IllegalMutationQueryException.class )
void basicInvalidNamedHqlDeleteTest(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
session.createNamedMutationQuery( "invalid-hql" );
} );
}
@Test
@ExpectedException( IllegalMutationQueryException.class )
void basicInvalidNamedNativeDeleteTest(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
session.createNamedMutationQuery( "invalid-native" ).executeUpdate();
} );
}
@Test
@ExpectedException( IllegalMutationQueryException.class )
void basicUnequivocallyInvalidNamedNativeDeleteTest(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
session.createNamedMutationQuery( "invalid-native-result" );
} );
}
@Entity( name = "SillyEntity" )
@Table( name = "SillyEntity" )
@NamedQuery(
name = "valid-hql",
query = "delete Contact"
)
@NamedQuery(
name = "invalid-hql",
query = "select c from Contact c"
)
@NamedNativeQuery(
name = "valid-native",
query = "delete from contacts"
)
@NamedNativeQuery(
name = "invalid-native",
query = "select * from contacts"
)
@NamedNativeQuery(
name = "invalid-native-result",
query = "select * from contacts",
resultClass = BasicMutationQueryTests.SillyEntity.class
)
public static | BasicMutationQueryTests |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/state/FullSnapshotAsyncWriter.java | {
"start": 1805,
"end": 10991
} | class ____<K>
implements SnapshotStrategy.SnapshotResultSupplier<KeyedStateHandle> {
/** Supplier for the stream into which we write the snapshot. */
@Nonnull
private final SupplierWithException<CheckpointStreamWithResultProvider, Exception>
checkpointStreamSupplier;
@Nonnull private final FullSnapshotResources<K> snapshotResources;
@Nonnull private final SnapshotType snapshotType;
public FullSnapshotAsyncWriter(
@Nonnull SnapshotType snapshotType,
@Nonnull
SupplierWithException<CheckpointStreamWithResultProvider, Exception>
checkpointStreamSupplier,
@Nonnull FullSnapshotResources<K> snapshotResources) {
this.checkpointStreamSupplier = checkpointStreamSupplier;
this.snapshotResources = snapshotResources;
this.snapshotType = snapshotType;
}
@Override
public SnapshotResult<KeyedStateHandle> get(CloseableRegistry snapshotCloseableRegistry)
throws Exception {
final KeyGroupRangeOffsets keyGroupRangeOffsets =
new KeyGroupRangeOffsets(snapshotResources.getKeyGroupRange());
final CheckpointStreamWithResultProvider checkpointStreamWithResultProvider =
checkpointStreamSupplier.get();
snapshotCloseableRegistry.registerCloseable(checkpointStreamWithResultProvider);
writeSnapshotToOutputStream(checkpointStreamWithResultProvider, keyGroupRangeOffsets);
if (snapshotCloseableRegistry.unregisterCloseable(checkpointStreamWithResultProvider)) {
final CheckpointStreamWithResultProvider.KeyedStateHandleFactory stateHandleFactory;
if (snapshotType.isSavepoint()) {
stateHandleFactory = KeyGroupsSavepointStateHandle::new;
} else {
stateHandleFactory = KeyGroupsStateHandle::new;
}
return CheckpointStreamWithResultProvider.toKeyedStateHandleSnapshotResult(
checkpointStreamWithResultProvider.closeAndFinalizeCheckpointStreamResult(),
keyGroupRangeOffsets,
stateHandleFactory);
} else {
throw new IOException("Stream is already unregistered/closed.");
}
}
private void writeSnapshotToOutputStream(
@Nonnull CheckpointStreamWithResultProvider checkpointStreamWithResultProvider,
@Nonnull KeyGroupRangeOffsets keyGroupRangeOffsets)
throws IOException, InterruptedException {
final DataOutputView outputView =
new DataOutputViewStreamWrapper(
checkpointStreamWithResultProvider.getCheckpointOutputStream());
writeKVStateMetaData(outputView);
try (KeyValueStateIterator kvStateIterator = snapshotResources.createKVStateIterator()) {
writeKVStateData(
kvStateIterator, checkpointStreamWithResultProvider, keyGroupRangeOffsets);
}
}
private void writeKVStateMetaData(final DataOutputView outputView) throws IOException {
KeyedBackendSerializationProxy<K> serializationProxy =
new KeyedBackendSerializationProxy<>(
// TODO: this code assumes that writing a serializer is threadsafe, we
// should support to
// get a serialized form already at state registration time in the
// future
snapshotResources.getKeySerializer(),
snapshotResources.getMetaInfoSnapshots(),
!Objects.equals(
UncompressedStreamCompressionDecorator.INSTANCE,
snapshotResources.getStreamCompressionDecorator()));
serializationProxy.write(outputView);
}
private void writeKVStateData(
final KeyValueStateIterator mergeIterator,
final CheckpointStreamWithResultProvider checkpointStreamWithResultProvider,
final KeyGroupRangeOffsets keyGroupRangeOffsets)
throws IOException, InterruptedException {
byte[] previousKey = null;
byte[] previousValue = null;
DataOutputView kgOutView = null;
OutputStream kgOutStream = null;
CheckpointStateOutputStream checkpointOutputStream =
checkpointStreamWithResultProvider.getCheckpointOutputStream();
try {
// preamble: setup with first key-group as our lookahead
if (mergeIterator.isValid()) {
// begin first key-group by recording the offset
keyGroupRangeOffsets.setKeyGroupOffset(
mergeIterator.keyGroup(), checkpointOutputStream.getPos());
// write the k/v-state id as metadata
kgOutStream =
snapshotResources
.getStreamCompressionDecorator()
.decorateWithCompression(checkpointOutputStream);
kgOutView = new DataOutputViewStreamWrapper(kgOutStream);
// TODO this could be aware of keyGroupPrefixBytes and write only one byte
// if possible
kgOutView.writeShort(mergeIterator.kvStateId());
previousKey = mergeIterator.key();
previousValue = mergeIterator.value();
mergeIterator.next();
}
// main loop: write k/v pairs ordered by (key-group, kv-state), thereby tracking
// key-group offsets.
while (mergeIterator.isValid()) {
assert (!hasMetaDataFollowsFlag(previousKey));
// set signal in first key byte that meta data will follow in the stream
// after this k/v pair
if (mergeIterator.isNewKeyGroup() || mergeIterator.isNewKeyValueState()) {
// be cooperative and check for interruption from time to time in the
// hot loop
checkInterrupted();
setMetaDataFollowsFlagInKey(previousKey);
}
writeKeyValuePair(previousKey, previousValue, kgOutView);
// write meta data if we have to
if (mergeIterator.isNewKeyGroup()) {
// TODO this could be aware of keyGroupPrefixBytes and write only one
// byte if possible
kgOutView.writeShort(END_OF_KEY_GROUP_MARK);
// this will just close the outer stream
kgOutStream.close();
// begin new key-group
keyGroupRangeOffsets.setKeyGroupOffset(
mergeIterator.keyGroup(), checkpointOutputStream.getPos());
// write the kev-state
// TODO this could be aware of keyGroupPrefixBytes and write only one
// byte if possible
kgOutStream =
snapshotResources
.getStreamCompressionDecorator()
.decorateWithCompression(checkpointOutputStream);
kgOutView = new DataOutputViewStreamWrapper(kgOutStream);
kgOutView.writeShort(mergeIterator.kvStateId());
} else if (mergeIterator.isNewKeyValueState()) {
// write the k/v-state
// TODO this could be aware of keyGroupPrefixBytes and write only one
// byte if possible
kgOutView.writeShort(mergeIterator.kvStateId());
}
// request next k/v pair
previousKey = mergeIterator.key();
previousValue = mergeIterator.value();
mergeIterator.next();
}
// epilogue: write last key-group
if (previousKey != null) {
assert (!hasMetaDataFollowsFlag(previousKey));
setMetaDataFollowsFlagInKey(previousKey);
writeKeyValuePair(previousKey, previousValue, kgOutView);
// TODO this could be aware of keyGroupPrefixBytes and write only one byte if
// possible
kgOutView.writeShort(END_OF_KEY_GROUP_MARK);
// this will just close the outer stream
kgOutStream.close();
kgOutStream = null;
}
} finally {
// this will just close the outer stream
IOUtils.closeQuietly(kgOutStream);
}
}
private void writeKeyValuePair(byte[] key, byte[] value, DataOutputView out)
throws IOException {
BytePrimitiveArraySerializer.INSTANCE.serialize(key, out);
BytePrimitiveArraySerializer.INSTANCE.serialize(value, out);
}
private void checkInterrupted() throws InterruptedException {
if (Thread.currentThread().isInterrupted()) {
throw new InterruptedException("RocksDB snapshot interrupted.");
}
}
}
| FullSnapshotAsyncWriter |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/convert/support/MapToMapConverter.java | {
"start": 4582,
"end": 4887
} | class ____ {
private final @Nullable Object key;
private final @Nullable Object value;
public MapEntry(@Nullable Object key, @Nullable Object value) {
this.key = key;
this.value = value;
}
public void addToMap(Map<Object, Object> map) {
map.put(this.key, this.value);
}
}
}
| MapEntry |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestStoragePolicyPermissionSettings.java | {
"start": 1698,
"end": 5898
} | class ____ {
private static final short REPL = 1;
private static final int SIZE = 128;
private static Configuration conf;
private static MiniDFSCluster cluster;
private static DistributedFileSystem fs;
private static BlockStoragePolicySuite suite;
private static BlockStoragePolicy cold;
private static UserGroupInformation nonAdmin;
private static UserGroupInformation admin;
@BeforeAll
public static void clusterSetUp() throws IOException {
conf = new HdfsConfiguration();
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(REPL).build();
cluster.waitActive();
fs = cluster.getFileSystem();
suite = BlockStoragePolicySuite.createDefaultSuite();
cold = suite.getPolicy("COLD");
nonAdmin = UserGroupInformation.createUserForTesting(
"user1", new String[] {"test"});
admin = UserGroupInformation.createUserForTesting("user2",
new String[]{"supergroup"});
}
@AfterAll
public static void clusterShutdown() throws IOException {
if (fs != null) {
fs.close();
fs = null;
}
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
}
private void setStoragePolicyPermissions(boolean isStoragePolicyEnabled,
boolean isStoragePolicySuperuserOnly)
throws ReflectiveOperationException {
ReflectionUtils.setFinalField(
FSNamesystem.class, cluster.getNamesystem(),
"isStoragePolicyEnabled", isStoragePolicyEnabled);
ReflectionUtils.setFinalField(
FSNamesystem.class, cluster.getNamesystem(),
"isStoragePolicySuperuserOnly", isStoragePolicySuperuserOnly);
}
@Test
public void testStoragePolicyPermissionDefault() throws Exception {
Path foo = new Path("/foo");
DFSTestUtil.createFile(fs, foo, SIZE, REPL, 0);
setStoragePolicyPermissions(true, false);
// Test default user fails
final FileSystem fileSystemNonAdmin =
DFSTestUtil.getFileSystemAs(nonAdmin, conf);
LambdaTestUtils.intercept(AccessControlException.class,
"Permission denied: user=user1",
"Only super user can set storage policy.",
() -> fileSystemNonAdmin.setStoragePolicy(foo, cold.getName()));
// widen privilege
fs.setPermission(foo, new FsPermission("777"));
assertNotEquals(fs.getStoragePolicy(foo), cold);
LambdaTestUtils.eval(
() -> fileSystemNonAdmin.setStoragePolicy(foo, cold.getName()));
assertEquals(fs.getStoragePolicy(foo), cold);
}
@Test
public void testStoragePolicyPermissionAdmins() throws Exception {
Path foo = new Path("/foo");
DFSTestUtil.createFile(fs, foo, SIZE, REPL, 0);
fs.setPermission(foo, new FsPermission("777"));
// Test only super can set storage policies
setStoragePolicyPermissions(true, true);
final FileSystem fileSystemNonAdmin =
DFSTestUtil.getFileSystemAs(nonAdmin, conf);
LambdaTestUtils.intercept(AccessControlException.class,
"Access denied for user user1. Superuser privilege is required",
"Only super user can set storage policy.",
() -> fileSystemNonAdmin.setStoragePolicy(foo, cold.getName()));
final FileSystem fileSystemAdmin =
DFSTestUtil.getFileSystemAs(admin, conf);
assertNotEquals(fs.getStoragePolicy(foo), cold);
LambdaTestUtils.eval(
() -> fileSystemAdmin.setStoragePolicy(foo, cold.getName()));
assertEquals(fs.getStoragePolicy(foo), cold);
}
@Test
public void testStoragePolicyPermissionDisabled() throws Exception {
Path foo = new Path("/foo");
DFSTestUtil.createFile(fs, foo, SIZE, REPL, 0);
fs.setPermission(foo, new FsPermission("777"));
setStoragePolicyPermissions(false, false);
final FileSystem fileSystemAdmin =
DFSTestUtil.getFileSystemAs(admin, conf);
LambdaTestUtils.intercept(IOException.class,
"Failed to set storage policy " +
"since dfs.storage.policy.enabled is set to false.",
"Storage policy settings are disabled.",
() -> fileSystemAdmin.setStoragePolicy(foo, cold.getName()));
assertEquals(suite.getDefaultPolicy(), fs.getStoragePolicy(foo));
}
}
| TestStoragePolicyPermissionSettings |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/records/RecordJsonValue3063Test.java | {
"start": 346,
"end": 1043
} | class ____ extends DatabindTestUtil
{
// [databind#3063]
record GetLocations3063(@JsonValue Map<String, String> nameToLocation)
{
@JsonCreator
public GetLocations3063(Map<String, String> nameToLocation)
{
this.nameToLocation = nameToLocation;
}
}
private final ObjectMapper MAPPER = newJsonMapper();
// [databind#3063]
@Test
public void testRecordWithJsonValue3063() throws Exception
{
Map<String, String> locations = Collections.singletonMap("a", "locationA");
String json = MAPPER.writeValueAsString(new GetLocations3063(locations));
assertNotNull(json);
}
}
| RecordJsonValue3063Test |
java | apache__flink | flink-table/flink-table-runtime/src/test/java/org/apache/flink/table/runtime/operators/join/stream/multijoin/StreamingTwoWayNonEquiJoinOperatorTest.java | {
"start": 2312,
"end": 12103
} | class ____ extends StreamingMultiJoinOperatorTestBase {
private static final GeneratedJoinCondition EqualIdAndGreaterAmountCondition =
createAndCondition(
createJoinCondition(1, 0), // equi-join on user_id
createFieldLongGreaterThanCondition(
1, 1) // non-equi: users.amount > orders.amount
);
private static final List<GeneratedJoinCondition> customJoinConditions =
Arrays.asList(null, EqualIdAndGreaterAmountCondition);
private static final Map<Integer, List<ConditionAttributeRef>> customAttributeMap =
new HashMap<>();
static {
customAttributeMap.put(1, Collections.singletonList(new ConditionAttributeRef(0, 0, 1, 0)));
}
public StreamingTwoWayNonEquiJoinOperatorTest(StateBackendMode stateBackendMode) {
super(
stateBackendMode,
2, // numInputs
List.of(FlinkJoinType.INNER, FlinkJoinType.INNER), // joinTypes
customJoinConditions,
customAttributeMap,
false // isFullOuterJoin
);
}
/**
* SELECT u.*, o.* FROM Users u INNER JOIN Orders o ON u.user_id = o.user_id AND u.amount >
* o.amount.
*/
@TestTemplate
void testInnerJoinWithNonEquiCondition() throws Exception {
/* -------- Basic Insertions and Matches ----------- */
// User with amount 100
insertUser("1", 100L, "Gus");
emitsNothing();
// Order with amount 50 (100 > 50 -> match)
insertOrder("1", 50L, "order_1");
emits(INSERT, r("1", 100L, "Gus", "1", 50L, "order_1"));
// Order with amount 150 (100 > 150 -> no match)
insertOrder("1", 150L, "order_2");
emitsNothing();
// Order with amount 100 (100 > 100 -> no match)
insertOrder("1", 100L, "order_3");
emitsNothing();
/* -------- User Delete/Re-insert with Different Amount ----------- */
// Delete the original user
deleteUser("1", 100L, "Gus");
emits(DELETE, r("1", 100L, "Gus", "1", 50L, "order_1"));
// Re-insert user with a larger amount
insertUser("1", 200L, "Gus Updated");
// Should join with order_1 (200 > 50), order_2 (200 > 150), order_3 (200 > 100)
emits(
INSERT, r("1", 200L, "Gus Updated", "1", 50L, "order_1"),
INSERT, r("1", 200L, "Gus Updated", "1", 100L, "order_3"),
INSERT, r("1", 200L, "Gus Updated", "1", 150L, "order_2"));
// Delete order_1
deleteOrder("1", 50L, "order_1");
emits(DELETE, r("1", 200L, "Gus Updated", "1", 50L, "order_1"));
}
/**
* -- Test inner join with non-equi condition (u.amount > o.amount) focusing on user updates.
*
* <p>SQL: SELECT u.*, o.* FROM Users u INNER JOIN Orders o ON u.user_id = o.user_id AND
* u.amount > o.amount
*
* <p>Schema: Users(user_id PRIMARY KEY, amount, name) Orders(user_id, order_id PRIMARY KEY,
* amount)
*/
@TestTemplate
void testInnerJoinWithNonEquiConditionUserUpdates() throws Exception {
/* -------- Setup Initial User and Order ----------- */
// User with amount 100
insertUser("1", 100L, "Gus");
emitsNothing();
/* -------- User Update ----------- */
// Update User: amount from 100 to 40 (40 > 50 -> no match)
updateBeforeUser("1", 100L, "Gus");
emitsNothing(); // No new match
updateAfterUser("1", 200L, "Gus Updated");
emitsNothing();
/* -------- Insert order (creates match) ----------- */
// Insert another order that will also match
insertOrder("1", 150L, "order_1"); // 200 > 150
emits(INSERT, r("1", 200L, "Gus Updated", "1", 150L, "order_1"));
/* -------- User Update: Amount Increase and decreases (creates and removes Match) ----------- */
updateBeforeUser("1", 200L, "Gus Updated");
emits(UPDATE_BEFORE, r("1", 200L, "Gus Updated", "1", 150L, "order_1"));
updateAfterUser("1", 200L, "Gus Updated Again");
emits(UPDATE_AFTER, r("1", 200L, "Gus Updated Again", "1", 150L, "order_1"));
updateBeforeUser("1", 200L, "Gus Updated Again");
emits(UPDATE_BEFORE, r("1", 200L, "Gus Updated Again", "1", 150L, "order_1"));
updateAfterUser("1", 50L, "Gus Updated Again to 50");
emitsNothing();
updateBeforeUser("1", 50L, "Gus Updated Again to 50");
emitsNothing();
updateAfterUser("1", 200L, "Gus Updated Again");
emits(UPDATE_AFTER, r("1", 200L, "Gus Updated Again", "1", 150L, "order_1"));
/* -------- Insert Additional Order and Test Subsequent User Update ----------- */
// Insert another order that will also match
insertOrder("1", 150L, "order_2"); // 200 > 150
emits(INSERT, r("1", 200L, "Gus Updated Again", "1", 150L, "order_2"));
// Update User: amount from 200 to 100.
// (100 > 50 -> match with order_1)
// (100 > 150 -> no match with order_2)
updateBeforeUser("1", 200L, "Gus Updated Again");
emits(
UPDATE_BEFORE, r("1", 200L, "Gus Updated Again", "1", 150L, "order_1"),
UPDATE_BEFORE, r("1", 200L, "Gus Updated Again", "1", 150L, "order_2"));
updateAfterUser("1", 500L, "Gus Final");
emits(
UPDATE_AFTER, r("1", 500L, "Gus Final", "1", 150L, "order_1"),
UPDATE_AFTER, r("1", 500L, "Gus Final", "1", 150L, "order_2"));
}
/**
* -- Test inner join with non-equi condition (u.amount > o.amount) focusing on order updates.
*
* <p>SQL: SELECT u.*, o.* FROM Users u INNER JOIN Orders o ON u.user_id = o.user_id AND
* u.amount > o.amount
*
* <p>Schema: Users(user_id PRIMARY KEY, amount, name) Orders(user_id, order_id PRIMARY KEY,
* amount)
*/
@TestTemplate
void testInnerJoinWithNonEquiConditionOrderUpdates() throws Exception {
/* -------- Setup Initial User and Non-Matching Order ----------- */
// User with amount 100
insertUser("1", 100L, "Gus");
emitsNothing();
// Order with amount 150 (100 > 150 -> no match)
insertOrder("1", 150L, "order_1");
emitsNothing();
/* -------- Order Update: Amount Decrease (Creates Match) ----------- */
// Update Order: amount from 150 to 50 (100 > 50 -> match)
updateBeforeOrder("1", 150L, "order_1");
emitsNothing();
updateAfterOrder("1", 50L, "order_1");
emits(RowKind.UPDATE_AFTER, r("1", 100L, "Gus", "1", 50L, "order_1"));
/* -------- Order Update: Amount Increase (Breaks Match) ----------- */
// Update Order: amount from 50 to 100 (100 > 100 -> no match)
updateBeforeOrder("1", 50L, "order_1");
emits(RowKind.UPDATE_BEFORE, r("1", 100L, "Gus", "1", 50L, "order_1"));
updateAfterOrder("1", 100L, "order_1");
emitsNothing(); // No new match
/* -------- Insert New User and Test Subsequent Order Update ----------- */
// Insert another user who will match the current order (amount 100)
insertUser("1", 200L, "Bob"); // 200 > 100 -> match
emits(INSERT, r("1", 200L, "Bob", "1", 100L, "order_1"));
// Update Order: amount from 100 to 250.
updateBeforeOrder("1", 100L, "order_1");
emits(UPDATE_BEFORE, r("1", 200L, "Bob", "1", 100L, "order_1"));
updateAfterOrder("1", 250L, "order_1_too_high");
emitsNothing();
}
// Override input types to include an amount field
@Override
protected RowType createInputTypeInfo(int inputIndex) {
if (inputIndex == 0) { // Users: user_id (VARCHAR), amount (BIGINT), name (VARCHAR)
return RowType.of(
new LogicalType[] {
VarCharType.STRING_TYPE, new BigIntType(), VarCharType.STRING_TYPE
},
new String[] {"user_id_0", "amount_0", "name_0"});
} else { // Orders: order_user_id (VARCHAR), amount (BIGINT), order_id (VARCHAR)
return RowType.of(
new LogicalType[] {
VarCharType.STRING_TYPE, new BigIntType(), VarCharType.STRING_TYPE
},
new String[] {"user_id_1", "amount_1", "order_id_1"});
}
}
// Override unique key type (assuming amount is not part of UK)
@Override
protected InternalTypeInfo<RowData> createUniqueKeyType(int inputIndex) {
if (inputIndex == 0) { // Users: user_id
return InternalTypeInfo.of(
RowType.of(
new LogicalType[] {VarCharType.STRING_TYPE},
new String[] {"user_id_0"}));
} else { // Orders: order_id
return InternalTypeInfo.of(
RowType.of(
new LogicalType[] {VarCharType.STRING_TYPE},
new String[] {"order_id_1"}));
}
}
// Override key selector for unique key (field 0 for Users, field 2 for Orders)
@Override
protected RowDataKeySelector createKeySelector(int inputIndex) {
return HandwrittenSelectorUtil.getRowDataSelector(
new int[] {inputIndex == 0 ? 0 : 2}, // UK is user_id[0] or order_id[2]
inputTypeInfos.get(inputIndex).getChildren().toArray(new LogicalType[0]));
}
/**
* Implements the non-equi join condition: inputs[indexToCompare].amount > inputs[index].amount.
* Assumes amount is at field index 1.
*/
private static | StreamingTwoWayNonEquiJoinOperatorTest |
java | google__error-prone | core/src/main/java/com/google/errorprone/refaster/UTemplater.java | {
"start": 16053,
"end": 36486
} | class ____ be invoked with "
+ "an explicit type parameter; for example, 'Refaster.<T>isInstance(o)'.");
}
return Iterables.getOnlyElement(tree.getTypeArguments());
}
static <T, U extends Unifiable<? super T>> boolean anyMatch(
U toUnify, T target, Unifier unifier) {
return toUnify.unify(target, unifier).findFirst().isPresent();
}
@Override
public UExpression visitMethodInvocation(MethodInvocationTree tree, Void v) {
if (anyMatch(ANY_OF, tree.getMethodSelect(), new Unifier(context))) {
return UAnyOf.create(templateExpressions(tree.getArguments()));
} else if (anyMatch(IS_INSTANCE, tree.getMethodSelect(), new Unifier(context))) {
return UInstanceOf.create(
template(Iterables.getOnlyElement(tree.getArguments())),
templateType(getSingleExplicitTypeArgument(tree)));
} else if (anyMatch(CLAZZ, tree.getMethodSelect(), new Unifier(context))) {
Tree typeArg = getSingleExplicitTypeArgument(tree);
return UMemberSelect.create(
templateType(typeArg),
"class",
UClassType.create("java.lang.Class", template(((JCTree) typeArg).type)));
} else if (anyMatch(NEW_ARRAY, tree.getMethodSelect(), new Unifier(context))) {
Tree typeArg = getSingleExplicitTypeArgument(tree);
ExpressionTree lengthArg = Iterables.getOnlyElement(tree.getArguments());
return UNewArray.create(templateType(typeArg), ImmutableList.of(template(lengthArg)), null);
} else if (anyMatch(ENUM_VALUE_OF, tree.getMethodSelect(), new Unifier(context))) {
Tree typeArg = getSingleExplicitTypeArgument(tree);
ExpressionTree strArg = Iterables.getOnlyElement(tree.getArguments());
return UMethodInvocation.create(
UMemberSelect.create(
templateType(typeArg),
"valueOf",
UMethodType.create(
template(((JCTree) typeArg).type), UClassType.create("java.lang.String"))),
template(strArg));
} else if (anyMatch(AS_VARARGS, tree.getMethodSelect(), new Unifier(context))) {
ExpressionTree arg = Iterables.getOnlyElement(tree.getArguments());
checkArgument(
hasAnnotation(ASTHelpers.getSymbol(arg), REPEATED_ANNOTATION, new VisitorState(context)));
return template(arg);
}
Map<MethodSymbol, PlaceholderMethod> placeholderMethods =
context.get(RefasterRuleBuilderScanner.PLACEHOLDER_METHODS_KEY);
if (placeholderMethods != null && placeholderMethods.containsKey(ASTHelpers.getSymbol(tree))) {
return UPlaceholderExpression.create(
placeholderMethods.get(ASTHelpers.getSymbol(tree)),
templateExpressions(tree.getArguments()));
} else {
return UMethodInvocation.create(
templateTypeExpressions(tree.getTypeArguments()),
template(tree.getMethodSelect()),
templateExpressions(tree.getArguments()));
}
}
@Override
public UBinary visitBinary(BinaryTree tree, Void v) {
return UBinary.create(
tree.getKind(), template(tree.getLeftOperand()), template(tree.getRightOperand()));
}
@Override
public UAssignOp visitCompoundAssignment(CompoundAssignmentTree tree, Void v) {
return UAssignOp.create(
template(tree.getVariable()), tree.getKind(), template(tree.getExpression()));
}
@Override
public UUnary visitUnary(UnaryTree tree, Void v) {
return UUnary.create(tree.getKind(), template(tree.getExpression()));
}
@Override
public UExpression visitConditionalExpression(ConditionalExpressionTree tree, Void v) {
return UConditional.create(
template(tree.getCondition()),
template(tree.getTrueExpression()),
template(tree.getFalseExpression()));
}
@Override
public UNewArray visitNewArray(NewArrayTree tree, Void v) {
return UNewArray.create(
(UExpression) template(tree.getType()),
templateExpressions(tree.getDimensions()),
templateExpressions(tree.getInitializers()));
}
@Override
public UNewClass visitNewClass(NewClassTree tree, Void v) {
return UNewClass.create(
tree.getEnclosingExpression() == null ? null : template(tree.getEnclosingExpression()),
templateTypeExpressions(tree.getTypeArguments()),
template(tree.getIdentifier()),
templateExpressions(tree.getArguments()),
(tree.getClassBody() == null) ? null : visitClass(tree.getClassBody(), null));
}
@Override
public UClassDecl visitClass(ClassTree tree, Void v) {
ImmutableList.Builder<UMethodDecl> decls = ImmutableList.builder();
for (MethodTree decl : Iterables.filter(tree.getMembers(), MethodTree.class)) {
if (ASTHelpers.isGeneratedConstructor(decl)) {
// skip synthetic constructors
continue;
}
decls.add(visitMethod(decl, null));
}
return UClassDecl.create(decls.build());
}
@Override
public UArrayTypeTree visitArrayType(ArrayTypeTree tree, Void v) {
return UArrayTypeTree.create(templateType(tree.getType()));
}
@Override
public UTypeApply visitParameterizedType(ParameterizedTypeTree tree, Void v) {
return UTypeApply.create(
templateType(tree.getType()), templateTypeExpressions(tree.getTypeArguments()));
}
@Override
public UUnionType visitUnionType(UnionTypeTree tree, Void v) {
return UUnionType.create(templateTypeExpressions(tree.getTypeAlternatives()));
}
@Override
public UWildcard visitWildcard(WildcardTree tree, Void v) {
return UWildcard.create(
tree.getKind(), (tree.getBound() == null) ? null : templateType(tree.getBound()));
}
@Override
public UIntersectionType visitIntersectionType(IntersectionTypeTree tree, Void v) {
return UIntersectionType.create(templateTypeExpressions(tree.getBounds()));
}
@Override
public UTypeParameter visitTypeParameter(TypeParameterTree tree, Void v) {
return UTypeParameter.create(
tree.getName(),
templateTypeExpressions(tree.getBounds()),
cast(templateExpressions(tree.getAnnotations()), UAnnotation.class));
}
@Override
public UTypeCast visitTypeCast(TypeCastTree tree, Void v) {
return UTypeCast.create(templateType(tree.getType()), template(tree.getExpression()));
}
@Override
public ULambda visitLambdaExpression(LambdaExpressionTree tree, Void v) {
return ULambda.create(
((JCLambda) tree).paramKind,
cast(templateStatements(tree.getParameters()), UVariableDecl.class),
(UTree<?>) template(tree.getBody()));
}
@Override
public UMemberReference visitMemberReference(MemberReferenceTree tree, Void v) {
return UMemberReference.create(
tree.getMode(),
template(tree.getQualifierExpression()),
tree.getName(),
(tree.getTypeArguments() == null) ? null : templateExpressions(tree.getTypeArguments()));
}
@Override
public UExpression visitIdentifier(IdentifierTree tree, Void v) {
Symbol sym = ASTHelpers.getSymbol(tree);
if (sym instanceof ClassSymbol classSymbol) {
return UClassIdent.create(classSymbol);
} else if (sym != null && isStatic(sym)) {
return staticMember(sym);
} else if (freeVariables.containsKey(tree.getName().toString())) {
VarSymbol symbol = freeVariables.get(tree.getName().toString());
checkState(symbol == sym);
UExpression ident = UFreeIdent.create(tree.getName());
Matches matches = ASTHelpers.getAnnotation(symbol, Matches.class);
if (matches != null) {
ident = UMatches.create(getValue(matches), /* positive= */ true, ident);
}
NotMatches notMatches = ASTHelpers.getAnnotation(symbol, NotMatches.class);
if (notMatches != null) {
ident = UMatches.create(getValue(notMatches), /* positive= */ false, ident);
}
OfKind hasKind = ASTHelpers.getAnnotation(symbol, OfKind.class);
if (hasKind != null) {
EnumSet<Kind> allowed = EnumSet.copyOf(Arrays.asList(hasKind.value()));
ident = UOfKind.create(ident, ImmutableSet.copyOf(allowed));
}
// @Repeated annotations need to be checked last.
Repeated repeated = ASTHelpers.getAnnotation(symbol, Repeated.class);
if (repeated != null) {
ident = URepeated.create(tree.getName(), ident);
}
return ident;
}
if (sym == null) {
return UTypeVarIdent.create(tree.getName());
}
return switch (sym.getKind()) {
case TYPE_PARAMETER -> UTypeVarIdent.create(tree.getName());
case METHOD -> method(sym);
default -> ULocalVarIdent.create(tree.getName());
};
}
/**
* Returns the {@link Class} instance for the {@link Matcher} associated with the provided {@link
* Matches} annotation. This roundabout solution is recommended and explained by {@link
* Element#getAnnotation(Class)}.
*/
static Class<? extends Matcher<? super ExpressionTree>> getValue(Matches matches) {
String name;
try {
var unused = matches.value();
throw new RuntimeException("unreachable");
} catch (MirroredTypeException e) {
DeclaredType type = (DeclaredType) e.getTypeMirror();
name = ((TypeElement) type.asElement()).getQualifiedName().toString();
}
try {
return asSubclass(Class.forName(name), new TypeToken<Matcher<? super ExpressionTree>>() {});
} catch (ClassNotFoundException | ClassCastException e) {
throw new RuntimeException(e);
}
}
/**
* Returns the {@link Class} instance for the {@link Matcher} associated with the provided {@link
* NotMatches} annotation. This roundabout solution is recommended and explained by {@link
* Element#getAnnotation(Class)}.
*/
static Class<? extends Matcher<? super ExpressionTree>> getValue(NotMatches matches) {
String name;
try {
var unused = matches.value();
throw new RuntimeException("unreachable");
} catch (MirroredTypeException e) {
DeclaredType type = (DeclaredType) e.getTypeMirror();
name = ((TypeElement) type.asElement()).getQualifiedName().toString();
}
try {
return asSubclass(Class.forName(name), new TypeToken<Matcher<? super ExpressionTree>>() {});
} catch (ClassNotFoundException | ClassCastException e) {
throw new RuntimeException(e);
}
}
/**
* Similar to {@link Class#asSubclass(Class)}, but it accepts a {@link TypeToken} so it handles
* generics better.
*/
@SuppressWarnings("unchecked")
private static <T> Class<? extends T> asSubclass(Class<?> klass, TypeToken<T> token) {
if (!token.isSupertypeOf(klass)) {
throw new ClassCastException(klass + " is not assignable to " + token);
}
return (Class<? extends T>) klass;
}
public UStatement template(StatementTree tree) {
return (UStatement) tree.accept(this, null);
}
private @Nullable ImmutableList<UStatement> templateStatements(
@Nullable List<? extends StatementTree> statements) {
if (statements == null) {
return null;
}
ImmutableList.Builder<UStatement> builder = ImmutableList.builder();
for (StatementTree statement : statements) {
builder.add(template(statement));
}
return builder.build();
}
@Override
public UTry visitTry(TryTree tree, Void v) {
@SuppressWarnings({"unchecked", "rawtypes"})
ImmutableList<UTree<?>> resources =
cast(templateTrees(tree.getResources()), (Class<UTree<?>>) (Class) UTree.class);
UBlock block = visitBlock(tree.getBlock(), null);
ImmutableList.Builder<UCatch> catchesBuilder = ImmutableList.builder();
for (CatchTree catchTree : tree.getCatches()) {
catchesBuilder.add(visitCatch(catchTree, null));
}
UBlock finallyBlock =
(tree.getFinallyBlock() == null) ? null : visitBlock(tree.getFinallyBlock(), null);
return UTry.create(resources, block, catchesBuilder.build(), finallyBlock);
}
@Override
public UCatch visitCatch(CatchTree tree, Void v) {
return UCatch.create(
visitVariable(tree.getParameter(), null), visitBlock(tree.getBlock(), null));
}
private @Nullable PlaceholderMethod placeholder(@Nullable ExpressionTree expr) {
Map<MethodSymbol, PlaceholderMethod> placeholderMethods =
context.get(RefasterRuleBuilderScanner.PLACEHOLDER_METHODS_KEY);
return (placeholderMethods != null && expr != null)
? placeholderMethods.get(ASTHelpers.getSymbol(expr))
: null;
}
@Override
public UStatement visitExpressionStatement(ExpressionStatementTree tree, Void v) {
PlaceholderMethod placeholderMethod = placeholder(tree.getExpression());
if (placeholderMethod != null && placeholderMethod.returnType().equals(UPrimitiveType.VOID)) {
MethodInvocationTree invocation = (MethodInvocationTree) tree.getExpression();
return UPlaceholderStatement.create(
placeholderMethod,
templateExpressions(invocation.getArguments()),
ControlFlowVisitor.Result.NEVER_EXITS);
}
return UExpressionStatement.create(template(tree.getExpression()));
}
@Override
public UStatement visitReturn(ReturnTree tree, Void v) {
PlaceholderMethod placeholderMethod = placeholder(tree.getExpression());
if (placeholderMethod != null) {
MethodInvocationTree invocation = (MethodInvocationTree) tree.getExpression();
return UPlaceholderStatement.create(
placeholderMethod,
templateExpressions(invocation.getArguments()),
ControlFlowVisitor.Result.ALWAYS_RETURNS);
}
return UReturn.create((tree.getExpression() == null) ? null : template(tree.getExpression()));
}
@Override
public UWhileLoop visitWhileLoop(WhileLoopTree tree, Void v) {
return UWhileLoop.create(template(tree.getCondition()), template(tree.getStatement()));
}
@Override
public UVariableDecl visitVariable(VariableTree tree, Void v) {
return UVariableDecl.create(
tree.getName(),
templateType(tree.getType()),
(tree.getInitializer() == null) ? null : template(tree.getInitializer()));
}
@Override
public USkip visitEmptyStatement(EmptyStatementTree tree, Void v) {
return USkip.INSTANCE;
}
@Override
public UForLoop visitForLoop(ForLoopTree tree, Void v) {
return UForLoop.create(
templateStatements(tree.getInitializer()),
(tree.getCondition() == null) ? null : template(tree.getCondition()),
cast(templateStatements(tree.getUpdate()), UExpressionStatement.class),
template(tree.getStatement()));
}
@Override
public ULabeledStatement visitLabeledStatement(LabeledStatementTree tree, Void v) {
return ULabeledStatement.create(tree.getLabel(), template(tree.getStatement()));
}
@Override
public UBreak visitBreak(BreakTree tree, Void v) {
return UBreak.create(tree.getLabel());
}
@Override
public UContinue visitContinue(ContinueTree tree, Void v) {
return UContinue.create(tree.getLabel());
}
@Override
public UBlock visitBlock(BlockTree tree, Void v) {
return UBlock.create(templateStatements(tree.getStatements()));
}
@Override
public UThrow visitThrow(ThrowTree tree, Void v) {
return UThrow.create(template(tree.getExpression()));
}
@Override
public UDoWhileLoop visitDoWhileLoop(DoWhileLoopTree tree, Void v) {
return UDoWhileLoop.create(template(tree.getStatement()), template(tree.getCondition()));
}
@Override
public UEnhancedForLoop visitEnhancedForLoop(EnhancedForLoopTree tree, Void v) {
return UEnhancedForLoop.create(
visitVariable(tree.getVariable(), null),
template(tree.getExpression()),
template(tree.getStatement()));
}
@Override
public USynchronized visitSynchronized(SynchronizedTree tree, Void v) {
return USynchronized.create(template(tree.getExpression()), visitBlock(tree.getBlock(), null));
}
@Override
public UIf visitIf(IfTree tree, Void v) {
return UIf.create(
template(tree.getCondition()),
template(tree.getThenStatement()),
(tree.getElseStatement() == null) ? null : template(tree.getElseStatement()));
}
@Override
public UAssert visitAssert(AssertTree tree, Void v) {
return UAssert.create(
template(tree.getCondition()),
(tree.getDetail() == null) ? null : template(tree.getDetail()));
}
@Override
protected UTree<?> defaultAction(Tree tree, Void v) {
throw new IllegalArgumentException(
"Refaster does not currently support syntax " + tree.getClass());
}
public UType template(Type type) {
return type.accept(typeTemplater, null);
}
List<UType> templateTypes(Iterable<? extends Type> types) {
ImmutableList.Builder<UType> builder = ImmutableList.builder();
for (Type ty : types) {
builder.add(template(ty));
}
return builder.build();
}
private final Type.Visitor<UType, Void> typeTemplater =
new Types.SimpleVisitor<UType, Void>() {
private final Map<TypeSymbol, UTypeVar> typeVariables = new HashMap<>();
@Override
public UType visitType(Type type, Void v) {
if (UPrimitiveType.isDeFactoPrimitive(type.getKind())) {
return UPrimitiveType.create(type.getKind());
} else {
throw new IllegalArgumentException(
"Refaster does not currently support syntax " + type.getKind());
}
}
@Override
public UArrayType visitArrayType(ArrayType type, Void v) {
return UArrayType.create(type.getComponentType().accept(this, null));
}
@Override
public UMethodType visitMethodType(MethodType type, Void v) {
return UMethodType.create(
type.getReturnType().accept(this, null), templateTypes(type.getParameterTypes()));
}
@Override
public UType visitClassType(ClassType type, Void v) {
if (type instanceof IntersectionClassType intersectionClassType) {
return UIntersectionClassType.create(
templateTypes(intersectionClassType.getComponents()));
}
return UClassType.create(
type.tsym.getQualifiedName().toString(), templateTypes(type.getTypeArguments()));
}
@Override
public UWildcardType visitWildcardType(WildcardType type, Void v) {
return UWildcardType.create(type.kind, type.type.accept(this, null));
}
@Override
public UTypeVar visitTypeVar(TypeVar type, Void v) {
/*
* In order to handle recursively bounded type variables without a stack overflow, we first
* cache a type var with no bounds, then we template the bounds.
*/
TypeSymbol tsym = type.asElement();
if (typeVariables.containsKey(tsym)) {
return typeVariables.get(tsym);
}
UTypeVar var = UTypeVar.create(tsym.getSimpleName().toString());
typeVariables.put(
tsym, var); // so the type variable can be used recursively in the bounds
var.setLowerBound(type.getLowerBound().accept(this, null));
var.setUpperBound(type.getUpperBound().accept(this, null));
return var;
}
@Override
public UForAll visitForAll(ForAll type, Void v) {
ImmutableList<UTypeVar> vars =
cast(templateTypes(type.getTypeVariables()), UTypeVar.class);
return UForAll.create(vars, type.qtype.accept(this, null));
}
};
@SuppressWarnings("unchecked")
public static ImmutableClassToInstanceMap<Annotation> annotationMap(Symbol symbol) {
ImmutableClassToInstanceMap.Builder<Annotation> builder = ImmutableClassToInstanceMap.builder();
for (Compound compound : symbol.getAnnotationMirrors()) {
String annotationClassName =
classNameFrom((TypeElement) compound.getAnnotationType().asElement());
try {
Class<? extends Annotation> annotationClazz =
Class.forName(annotationClassName).asSubclass(Annotation.class);
builder.put(
(Class) annotationClazz,
AnnotationProxyMaker.generateAnnotation(compound, annotationClazz));
} catch (ClassNotFoundException e) {
String friendlyMessage =
"Tried to instantiate an instance of the annotation "
+ annotationClassName
+ " while processing "
+ symbol.getSimpleName()
+ ", but the annotation | must |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/junit/jupiter/nested/ParameterizedConstructorInjectionTestMethodScopedExtensionContextNestedTests.java | {
"start": 2114,
"end": 2761
} | class ____ {
final String beanName;
final String foo;
final ApplicationContext context;
ParameterizedConstructorInjectionTestMethodScopedExtensionContextNestedTests(
String beanName, TestInfo testInfo, @Autowired String foo, ApplicationContext context) {
this.context = context;
this.beanName = beanName;
this.foo = foo;
}
@Test
void topLevelTest() {
assertThat(foo).isEqualTo("foo");
if (beanName.equals("foo")) {
assertThat(context.getBean(beanName, String.class)).isEqualTo(beanName);
}
}
@Nested
@SpringJUnitConfig(NestedConfig.class)
| ParameterizedConstructorInjectionTestMethodScopedExtensionContextNestedTests |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/HandlerMethodAnnotationDetectionTests.java | {
"start": 15879,
"end": 15950
} | interface ____ not contain type information.
*/
@Controller
static | does |
java | apache__rocketmq | openmessaging/src/main/java/io/openmessaging/rocketmq/domain/BytesMessageImpl.java | {
"start": 1105,
"end": 3151
} | class ____ implements BytesMessage {
private KeyValue sysHeaders;
private KeyValue userHeaders;
private byte[] body;
public BytesMessageImpl() {
this.sysHeaders = OMS.newKeyValue();
this.userHeaders = OMS.newKeyValue();
}
@Override
public <T> T getBody(Class<T> type) throws OMSMessageFormatException {
if (type == byte[].class) {
return (T)body;
}
throw new OMSMessageFormatException("", "Cannot assign byte[] to " + type.getName());
}
@Override
public BytesMessage setBody(final byte[] body) {
this.body = body;
return this;
}
@Override
public KeyValue sysHeaders() {
return sysHeaders;
}
@Override
public KeyValue userHeaders() {
return userHeaders;
}
@Override
public Message putSysHeaders(String key, int value) {
sysHeaders.put(key, value);
return this;
}
@Override
public Message putSysHeaders(String key, long value) {
sysHeaders.put(key, value);
return this;
}
@Override
public Message putSysHeaders(String key, double value) {
sysHeaders.put(key, value);
return this;
}
@Override
public Message putSysHeaders(String key, String value) {
sysHeaders.put(key, value);
return this;
}
@Override
public Message putUserHeaders(String key, int value) {
userHeaders.put(key, value);
return this;
}
@Override
public Message putUserHeaders(String key, long value) {
userHeaders.put(key, value);
return this;
}
@Override
public Message putUserHeaders(String key, double value) {
userHeaders.put(key, value);
return this;
}
@Override
public Message putUserHeaders(String key, String value) {
userHeaders.put(key, value);
return this;
}
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this);
}
}
| BytesMessageImpl |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/onetomany/Post.java | {
"start": 505,
"end": 846
} | class ____ extends Comment{
protected List<Comment> comments = new ArrayList<>();
@OneToMany(mappedBy = "post", cascade = CascadeType.ALL , fetch = FetchType.LAZY)
@OrderColumn(name = "idx")
public List<Comment> getComments() {
return comments;
}
public void setComments(List<Comment> comments) {
this.comments = comments;
}
}
| Post |
java | quarkusio__quarkus | extensions/tls-registry/deployment/src/test/java/io/quarkus/tls/DefaultPemKeyStoreTest.java | {
"start": 845,
"end": 2119
} | class ____ {
private static final String configuration = """
quarkus.tls.key-store.pem.foo.cert=target/certs/test-formats.crt
quarkus.tls.key-store.pem.foo.key=target/certs/test-formats.key
""";
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class)
.add(new StringAsset(configuration), "application.properties"));
@Inject
TlsConfigurationRegistry certificates;
@Test
void test() throws KeyStoreException, CertificateParsingException {
TlsConfiguration def = certificates.getDefault().orElseThrow();
assertThat(def.getKeyStoreOptions()).isNotNull();
assertThat(def.getKeyStore()).isNotNull();
// dummy-entry-x is the alias of the certificate in the keystore generated by Vert.x.
X509Certificate certificate = (X509Certificate) def.getKeyStore().getCertificate("dummy-entry-0");
assertThat(certificate).isNotNull();
assertThat(certificate.getSubjectAlternativeNames()).anySatisfy(l -> {
assertThat(l.get(0)).isEqualTo(2);
assertThat(l.get(1)).isEqualTo("localhost");
});
}
}
| DefaultPemKeyStoreTest |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/resource/basic/ContainerRequestContextTest.java | {
"start": 1660,
"end": 1879
} | class ____ implements ContainerRequestFilter {
@Override
public void filter(ContainerRequestContext context) throws IOException {
context.setProperty("name", "foo");
}
}
}
| TestFilter |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/file/FileConsumerPollStrategyTest.java | {
"start": 1389,
"end": 2625
} | class ____ extends ContextTestSupport {
private static int counter;
private static String event = "";
@Override
protected Registry createCamelRegistry() throws Exception {
Registry jndi = super.createCamelRegistry();
jndi.bind("myPoll", new MyPollStrategy());
return jndi;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from(fileUri("?pollStrategy=#myPoll&noop=true&initialDelay=0&delay=10")).convertBodyTo(String.class)
.to("mock:result");
}
};
}
@Test
public void testFirstPollRollbackThenCommit() throws Exception {
template.sendBodyAndHeader(fileUri(), "Hello World", Exchange.FILE_NAME, "hello.txt");
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
assertMockEndpointsSatisfied();
oneExchangeDone.matchesWaitTime();
// give the file consumer a bit of time
Awaitility.await().atMost(Duration.ofSeconds(1)).untilAsserted(() -> assertTrue(event.startsWith("rollbackcommit")));
}
private static | FileConsumerPollStrategyTest |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/server/FormPostServerRedirectStrategy.java | {
"start": 1788,
"end": 4852
} | class ____ implements ServerRedirectStrategy {
private static final String CONTENT_SECURITY_POLICY_HEADER = "Content-Security-Policy";
private static final String REDIRECT_PAGE_TEMPLATE = """
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<meta name="description" content="">
<meta name="author" content="">
<title>Redirect</title>
</head>
<body>
<form id="redirect-form" method="POST" action="{{action}}">
{{params}}
<noscript>
<p>JavaScript is not enabled for this page.</p>
<button type="submit">Click to continue</button>
</noscript>
</form>
<script nonce="{{nonce}}">
document.getElementById("redirect-form").submit();
</script>
</body>
</html>
""";
private static final String HIDDEN_INPUT_TEMPLATE = """
<input name="{{name}}" type="hidden" value="{{value}}" />
""";
private static final StringKeyGenerator DEFAULT_NONCE_GENERATOR = new Base64StringKeyGenerator(
Base64.getUrlEncoder().withoutPadding(), 96);
@Override
public Mono<Void> sendRedirect(ServerWebExchange exchange, URI location) {
final UriComponentsBuilder uriComponentsBuilder = UriComponentsBuilder.fromUri(location);
final StringBuilder hiddenInputsHtmlBuilder = new StringBuilder();
for (final Map.Entry<String, List<String>> entry : uriComponentsBuilder.build().getQueryParams().entrySet()) {
final String name = entry.getKey();
for (final String value : entry.getValue()) {
// @formatter:off
final String hiddenInput = HIDDEN_INPUT_TEMPLATE
.replace("{{name}}", HtmlUtils.htmlEscape(name))
.replace("{{value}}", HtmlUtils.htmlEscape(value));
// @formatter:on
hiddenInputsHtmlBuilder.append(hiddenInput.trim());
}
}
// Create the script-src policy directive for the Content-Security-Policy header
final String nonce = DEFAULT_NONCE_GENERATOR.generateKey();
final String policyDirective = "script-src 'nonce-%s'".formatted(nonce);
// @formatter:off
final String html = REDIRECT_PAGE_TEMPLATE
// Clear the query string as we don't want that to be part of the form action URL
.replace("{{action}}", HtmlUtils.htmlEscape(uriComponentsBuilder.replaceQuery(null).build().toUriString()))
.replace("{{params}}", hiddenInputsHtmlBuilder.toString())
.replace("{{nonce}}", HtmlUtils.htmlEscape(nonce));
// @formatter:on
final ServerHttpResponse response = exchange.getResponse();
response.setStatusCode(HttpStatus.OK);
response.getHeaders().setContentType(MediaType.TEXT_HTML);
response.getHeaders().set(CONTENT_SECURITY_POLICY_HEADER, policyDirective);
final DataBufferFactory bufferFactory = response.bufferFactory();
final DataBuffer buffer = bufferFactory.wrap(html.getBytes(StandardCharsets.UTF_8));
return response.writeWith(Mono.just(buffer)).doOnError((error) -> DataBufferUtils.release(buffer));
}
}
| FormPostServerRedirectStrategy |
java | apache__camel | components/camel-telemetry/src/main/java/org/apache/camel/telemetry/propagation/CamelJMSHeadersSpanContextPropagationExtractor.java | {
"start": 1096,
"end": 2569
} | class ____ implements SpanContextPropagationExtractor {
private final Map<String, Object> map = new CaseInsensitiveMap();
public CamelJMSHeadersSpanContextPropagationExtractor(final Map<String, Object> map) {
map.entrySet().stream().filter(e -> e.getValue() instanceof String || e.getValue() instanceof byte[]).forEach(e -> {
if (e.getValue() instanceof byte[] bytes) {
this.map.put(decodeDash(e.getKey()), new String(bytes, StandardCharsets.UTF_8));
} else {
this.map.put(decodeDash(e.getKey()), e.getValue());
}
});
}
@Override
public Iterator<Map.Entry<String, Object>> iterator() {
return map.entrySet().iterator();
}
@Override
public Object get(String key) {
return this.map.get(key);
}
@Override
public Set<String> keys() {
return map.keySet();
}
/**
* Decode dashes (encoded in {@link CamelJMSHeadersSpanContextPropagationInjector} Dash encoding and decoding is
* required by JMS. This is implemented here rather than specifically to JMS so that other Camel messaging endpoints
* can take part in traces where the peer is using JMS.
*/
private String decodeDash(String key) {
if (key != null) {
return key.replace(CamelJMSHeadersSpanContextPropagationInjector.JMS_DASH, "-");
}
return key;
}
}
| CamelJMSHeadersSpanContextPropagationExtractor |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/codec/vectors/diskbbq/ES920DiskBBQBFloat16VectorsFormatTests.java | {
"start": 1606,
"end": 4516
} | class ____ extends BaseBFloat16KnnVectorsFormatTestCase {
static {
LogConfigurator.loadLog4jPlugins();
LogConfigurator.configureESLogging(); // native access requires logging to be initialized
}
private KnnVectorsFormat format;
@Before
@Override
public void setUp() throws Exception {
if (rarely()) {
format = new ES920DiskBBQVectorsFormat(
random().nextInt(2 * MIN_VECTORS_PER_CLUSTER, ES920DiskBBQVectorsFormat.MAX_VECTORS_PER_CLUSTER),
random().nextInt(8, ES920DiskBBQVectorsFormat.MAX_CENTROIDS_PER_PARENT_CLUSTER),
DenseVectorFieldMapper.ElementType.BFLOAT16,
random().nextBoolean()
);
} else {
// run with low numbers to force many clusters with parents
format = new ES920DiskBBQVectorsFormat(
random().nextInt(MIN_VECTORS_PER_CLUSTER, 2 * MIN_VECTORS_PER_CLUSTER),
random().nextInt(MIN_CENTROIDS_PER_PARENT_CLUSTER, 8),
DenseVectorFieldMapper.ElementType.BFLOAT16,
random().nextBoolean()
);
}
super.setUp();
}
@Override
protected Codec getCodec() {
return TestUtil.alwaysKnnVectorsFormat(format);
}
@Override
protected VectorSimilarityFunction randomSimilarity() {
return RandomPicks.randomFrom(
random(),
List.of(
VectorSimilarityFunction.DOT_PRODUCT,
VectorSimilarityFunction.EUCLIDEAN,
VectorSimilarityFunction.MAXIMUM_INNER_PRODUCT
)
);
}
@Override
public void testSearchWithVisitedLimit() {
throw new AssumptionViolatedException("ivf doesn't enforce visitation limit");
}
@Override
public void testAdvance() throws Exception {
// TODO re-enable with hierarchical IVF, clustering as it is is flaky
}
@Override
protected void assertOffHeapByteSize(LeafReader r, String fieldName) throws IOException {
var fieldInfo = r.getFieldInfos().fieldInfo(fieldName);
if (r instanceof CodecReader codecReader) {
KnnVectorsReader knnVectorsReader = codecReader.getVectorReader();
if (knnVectorsReader instanceof PerFieldKnnVectorsFormat.FieldsReader fieldsReader) {
knnVectorsReader = fieldsReader.getFieldReader(fieldName);
}
var offHeap = knnVectorsReader.getOffHeapByteSize(fieldInfo);
long totalByteSize = offHeap.values().stream().mapToLong(Long::longValue).sum();
assertThat(offHeap.size(), equalTo(3));
assertThat(totalByteSize, equalTo(offHeap.values().stream().mapToLong(Long::longValue).sum()));
} else {
throw new AssertionError("unexpected:" + r.getClass());
}
}
}
| ES920DiskBBQBFloat16VectorsFormatTests |
java | apache__avro | lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflect.java | {
"start": 44139,
"end": 44202
} | enum ____ {
ENUM_1, ENUM_2
}
@AvroDoc("DocTest | DocTestEnum |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/immutable/CaptionConverter.java | {
"start": 247,
"end": 531
} | class ____ implements AttributeConverter<Caption, String> {
@Override
public String convertToDatabaseColumn(Caption attribute) {
return attribute.getText();
}
@Override
public Caption convertToEntityAttribute(String dbData) {
return new Caption( dbData );
}
}
| CaptionConverter |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmSettings.java | {
"start": 1574,
"end": 1668
} | class ____ some convenience methods for defining and retrieving such settings.
*/
public | provides |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/serializer/filters/AfterFilterTest2.java | {
"start": 257,
"end": 1104
} | class ____ extends TestCase {
public void test_afterFilter() throws Exception {
AfterFilter filter = new AfterFilter() {
@Override
public void writeAfter(Object object) {
this.writeKeyValue("id", 123);
}
};
Assert.assertEquals("{\"id\":123}",JSON.toJSONString( new VO(), filter));
}
public void test_afterFilter2() throws Exception {
AfterFilter filter = new AfterFilter() {
@Override
public void writeAfter(Object object) {
this.writeKeyValue("id", 123);
this.writeKeyValue("name", "wenshao");
}
};
Assert.assertEquals("{\"id\":123,\"name\":\"wenshao\"}", JSON.toJSONString(new VO(), filter));
}
public static | AfterFilterTest2 |
java | spring-projects__spring-framework | spring-webflux/src/test/java/org/springframework/web/reactive/resource/LiteWebJarsResourceResolverTests.java | {
"start": 1471,
"end": 5484
} | class ____ {
private static final Duration TIMEOUT = Duration.ofSeconds(1);
private List<Resource> locations = List.of(new ClassPathResource("/META-INF/resources/webjars"));
// for this to work, an actual WebJar must be on the test classpath
private LiteWebJarsResourceResolver resolver = new LiteWebJarsResourceResolver();
private ResourceResolverChain chain = mock();
private ServerWebExchange exchange = MockServerWebExchange.from(MockServerHttpRequest.get(""));
@Test
void resolveUrlExisting() {
String file = "/foo/2.3/foo.txt";
given(this.chain.resolveUrlPath(file, this.locations)).willReturn(Mono.just(file));
String actual = this.resolver.resolveUrlPath(file, this.locations, this.chain).block(TIMEOUT);
assertThat(actual).isEqualTo(file);
verify(this.chain, times(1)).resolveUrlPath(file, this.locations);
}
@Test
void resolveUrlExistingNotInJarFile() {
String file = "foo/foo.txt";
given(this.chain.resolveUrlPath(file, this.locations)).willReturn(Mono.empty());
String actual = this.resolver.resolveUrlPath(file, this.locations, this.chain).block(TIMEOUT);
assertThat(actual).isNull();
verify(this.chain, times(1)).resolveUrlPath(file, this.locations);
verify(this.chain, never()).resolveUrlPath("foo/2.3/foo.txt", this.locations);
}
@Test
void resolveUrlWebJarResource() {
String file = "underscorejs/underscore.js";
String expected = "underscorejs/1.8.3/underscore.js";
given(this.chain.resolveUrlPath(file, this.locations)).willReturn(Mono.empty());
given(this.chain.resolveUrlPath(expected, this.locations)).willReturn(Mono.just(expected));
String actual = this.resolver.resolveUrlPath(file, this.locations, this.chain).block(TIMEOUT);
assertThat(actual).isEqualTo(expected);
verify(this.chain, times(1)).resolveUrlPath(file, this.locations);
verify(this.chain, times(1)).resolveUrlPath(expected, this.locations);
}
@Test
void resolveUrlWebJarResourceNotFound() {
String file = "something/something.js";
given(this.chain.resolveUrlPath(file, this.locations)).willReturn(Mono.empty());
String actual = this.resolver.resolveUrlPath(file, this.locations, this.chain).block(TIMEOUT);
assertThat(actual).isNull();
verify(this.chain, times(1)).resolveUrlPath(file, this.locations);
verify(this.chain, never()).resolveUrlPath(null, this.locations);
}
@Test
void resolveResourceExisting() {
Resource expected = mock();
String file = "foo/2.3/foo.txt";
given(this.chain.resolveResource(this.exchange, file, this.locations)).willReturn(Mono.just(expected));
Resource actual = this.resolver
.resolveResource(this.exchange, file, this.locations, this.chain)
.block(TIMEOUT);
assertThat(actual).isEqualTo(expected);
verify(this.chain, times(1)).resolveResource(this.exchange, file, this.locations);
}
@Test
void resolveResourceNotFound() {
String file = "something/something.js";
given(this.chain.resolveResource(this.exchange, file, this.locations)).willReturn(Mono.empty());
Resource actual = this.resolver
.resolveResource(this.exchange, file, this.locations, this.chain)
.block(TIMEOUT);
assertThat(actual).isNull();
verify(this.chain, times(1)).resolveResource(this.exchange, file, this.locations);
verify(this.chain, never()).resolveResource(this.exchange, null, this.locations);
}
@Test
void resolveResourceWebJar() {
String file = "underscorejs/underscore.js";
given(this.chain.resolveResource(this.exchange, file, this.locations)).willReturn(Mono.empty());
Resource expected = mock();
String expectedPath = "underscorejs/1.8.3/underscore.js";
given(this.chain.resolveResource(this.exchange, expectedPath, this.locations))
.willReturn(Mono.just(expected));
Resource actual = this.resolver
.resolveResource(this.exchange, file, this.locations, this.chain)
.block(TIMEOUT);
assertThat(actual).isEqualTo(expected);
verify(this.chain, times(1)).resolveResource(this.exchange, file, this.locations);
}
}
| LiteWebJarsResourceResolverTests |
java | apache__camel | components/camel-aws/camel-aws2-athena/src/generated/java/org/apache/camel/component/aws2/athena/Athena2EndpointUriFactory.java | {
"start": 521,
"end": 3424
} | class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
private static final String BASE = ":label";
private static final Set<String> PROPERTY_NAMES;
private static final Set<String> SECRET_PROPERTY_NAMES;
private static final Map<String, String> MULTI_VALUE_PREFIXES;
static {
Set<String> props = new HashSet<>(33);
props.add("accessKey");
props.add("amazonAthenaClient");
props.add("clientRequestToken");
props.add("database");
props.add("delay");
props.add("encryptionOption");
props.add("includeTrace");
props.add("initialDelay");
props.add("kmsKey");
props.add("label");
props.add("lazyStartProducer");
props.add("maxAttempts");
props.add("maxResults");
props.add("nextToken");
props.add("operation");
props.add("outputLocation");
props.add("outputType");
props.add("profileCredentialsName");
props.add("proxyHost");
props.add("proxyPort");
props.add("proxyProtocol");
props.add("queryExecutionId");
props.add("queryString");
props.add("region");
props.add("resetWaitTimeoutOnRetry");
props.add("retry");
props.add("secretKey");
props.add("sessionToken");
props.add("useDefaultCredentialsProvider");
props.add("useProfileCredentialsProvider");
props.add("useSessionCredentials");
props.add("waitTimeout");
props.add("workGroup");
PROPERTY_NAMES = Collections.unmodifiableSet(props);
Set<String> secretProps = new HashSet<>(3);
secretProps.add("accessKey");
secretProps.add("secretKey");
secretProps.add("sessionToken");
SECRET_PROPERTY_NAMES = Collections.unmodifiableSet(secretProps);
MULTI_VALUE_PREFIXES = Collections.emptyMap();
}
@Override
public boolean isEnabled(String scheme) {
return "aws2-athena".equals(scheme);
}
@Override
public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
String syntax = scheme + BASE;
String uri = syntax;
Map<String, Object> copy = new HashMap<>(properties);
uri = buildPathParameter(syntax, uri, "label", null, true, copy);
uri = buildQueryParameters(uri, copy, encode);
return uri;
}
@Override
public Set<String> propertyNames() {
return PROPERTY_NAMES;
}
@Override
public Set<String> secretPropertyNames() {
return SECRET_PROPERTY_NAMES;
}
@Override
public Map<String, String> multiValuePrefixes() {
return MULTI_VALUE_PREFIXES;
}
@Override
public boolean isLenientProperties() {
return false;
}
}
| Athena2EndpointUriFactory |
java | netty__netty | common/src/main/java/io/netty/util/concurrent/DefaultEventExecutorChooserFactory.java | {
"start": 884,
"end": 1548
} | class ____ implements EventExecutorChooserFactory {
public static final DefaultEventExecutorChooserFactory INSTANCE = new DefaultEventExecutorChooserFactory();
private DefaultEventExecutorChooserFactory() { }
@Override
public EventExecutorChooser newChooser(EventExecutor[] executors) {
if (isPowerOfTwo(executors.length)) {
return new PowerOfTwoEventExecutorChooser(executors);
} else {
return new GenericEventExecutorChooser(executors);
}
}
private static boolean isPowerOfTwo(int val) {
return (val & -val) == val;
}
private static final | DefaultEventExecutorChooserFactory |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/PushExpressionsToFieldLoad.java | {
"start": 2322,
"end": 7171
} | class ____ {
private final Map<Attribute.IdIgnoringWrapper, Attribute> addedAttrs = new HashMap<>();
private final LocalLogicalOptimizerContext context;
private final LogicalPlan plan;
/**
* The primary indices, lazily initialized.
*/
private List<EsRelation> primaries;
private boolean planWasTransformed = false;
private Rule(LocalLogicalOptimizerContext context, LogicalPlan plan) {
this.context = context;
this.plan = plan;
}
private LogicalPlan doRule(LogicalPlan plan) {
planWasTransformed = false;
if (plan instanceof Eval || plan instanceof Filter || plan instanceof Aggregate) {
LogicalPlan transformedPlan = plan.transformExpressionsOnly(Expression.class, e -> {
if (e instanceof BlockLoaderExpression ble) {
return transformExpression(e, ble);
}
return e;
});
// TODO rebuild everything one time rather than after each find.
if (planWasTransformed == false) {
return plan;
}
List<Attribute> previousAttrs = transformedPlan.output();
// Transforms EsRelation to extract the new attributes
List<Attribute> addedAttrsList = addedAttrs.values().stream().toList();
transformedPlan = transformedPlan.transformDown(EsRelation.class, esRelation -> {
AttributeSet updatedOutput = esRelation.outputSet().combine(AttributeSet.of(addedAttrsList));
return esRelation.withAttributes(updatedOutput.stream().toList());
});
// Transforms Projects so the new attribute is not discarded
transformedPlan = transformedPlan.transformDown(EsqlProject.class, esProject -> {
List<NamedExpression> projections = new ArrayList<>(esProject.projections());
projections.addAll(addedAttrsList);
return esProject.withProjections(projections);
});
return new EsqlProject(Source.EMPTY, transformedPlan, previousAttrs);
}
return plan;
}
private Expression transformExpression(Expression e, BlockLoaderExpression ble) {
BlockLoaderExpression.PushedBlockLoaderExpression fuse = ble.tryPushToFieldLoading(context.searchStats());
if (fuse == null) {
return e;
}
if (anyPrimaryContains(fuse.field()) == false) {
return e;
}
var preference = context.configuration().pragmas().fieldExtractPreference();
if (context.searchStats().supportsLoaderConfig(fuse.field().fieldName(), fuse.config(), preference) == false) {
return e;
}
planWasTransformed = true;
return replaceFieldsForFieldTransformations(e, fuse);
}
private Expression replaceFieldsForFieldTransformations(Expression e, BlockLoaderExpression.PushedBlockLoaderExpression fuse) {
// Change the expression to a reference of the pushed down function on the field
FunctionEsField functionEsField = new FunctionEsField(fuse.field().field(), e.dataType(), fuse.config());
var name = rawTemporaryName(fuse.field().name(), fuse.config().function().toString(), String.valueOf(fuse.config().hashCode()));
var newFunctionAttr = new FieldAttribute(
fuse.field().source(),
fuse.field().parentName(),
fuse.field().qualifier(),
name,
functionEsField,
fuse.field().nullable(),
new NameId(),
true
);
Attribute.IdIgnoringWrapper key = newFunctionAttr.ignoreId();
if (addedAttrs.containsKey(key)) {
return addedAttrs.get(key);
}
addedAttrs.put(key, newFunctionAttr);
return newFunctionAttr;
}
private List<EsRelation> primaries() {
if (primaries == null) {
primaries = new ArrayList<>(2);
plan.forEachUp(EsRelation.class, r -> {
if (r.indexMode() != IndexMode.LOOKUP) {
primaries.add(r);
}
});
}
return primaries;
}
private boolean anyPrimaryContains(FieldAttribute attr) {
for (EsRelation primary : primaries()) {
if (primary.outputSet().contains(attr)) {
return true;
}
}
return false;
}
}
}
| Rule |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/TryWithResourcesVariableTest.java | {
"start": 1203,
"end": 1528
} | class ____ {
void f(AutoCloseable r1) {
try (AutoCloseable r2 = r1) {
System.err.println(r2);
} catch (Exception e) {
}
}
}
""")
.addOutputLines(
"Test.java",
"""
| Test |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/cluster/ProfilingFileMessageParameters.java | {
"start": 1359,
"end": 1850
} | class ____ extends MessageParameters {
public final ProfilingFileNamePathParameter profilingFileNamePathParameter =
new ProfilingFileNamePathParameter();
@Override
public Collection<MessagePathParameter<?>> getPathParameters() {
return Collections.singleton(profilingFileNamePathParameter);
}
@Override
public Collection<MessageQueryParameter<?>> getQueryParameters() {
return Collections.emptyList();
}
}
| ProfilingFileMessageParameters |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/odps/ast/OdpsAddUserStatement.java | {
"start": 917,
"end": 1564
} | class ____ extends SQLStatementImpl {
private SQLName user;
public OdpsAddUserStatement() {
super(DbType.odps);
}
@Override
protected void accept0(SQLASTVisitor visitor) {
accept0((OdpsASTVisitor) visitor);
}
public void accept0(OdpsASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, user);
}
visitor.endVisit(this);
}
public SQLName getUser() {
return user;
}
public void setUser(SQLName user) {
if (user != null) {
user.setParent(this);
}
this.user = user;
}
}
| OdpsAddUserStatement |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/StartupShutdownSpringCamelContextFactoryBeanOrderTest.java | {
"start": 1296,
"end": 1823
} | class ____ {
@Bean
CamelContextFactoryBean camelContext() {
final CamelContextFactoryBean factory = new CamelContextFactoryBean();
factory.setId("camelContext");
return factory;
}
}
@Override
ConfigurableApplicationContext createContext() {
final ConfigurableApplicationContext context = new AnnotationConfigApplicationContext(
CamelContextConfiguration.class, Beans.class);
return context;
}
}
| CamelContextConfiguration |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/AnalyzeTableUtil.java | {
"start": 2805,
"end": 20713
} | class ____ {
private AnalyzeTableUtil() {}
public static TableResultInternal analyzeTable(
TableEnvironmentImpl tableEnv, AnalyzeTableOperation operation)
throws TableNotExistException, PartitionNotExistException, TablePartitionedException {
List<Column> columns = operation.getColumns();
// the TableIdentifier has be validated before
Catalog catalog =
tableEnv.getCatalogManager()
.getCatalog(operation.getTableIdentifier().getCatalogName())
.orElseThrow(() -> new TableException("This should not happen."));
ObjectPath objectPath = operation.getTableIdentifier().toObjectPath();
if (operation.getPartitionSpecs().isPresent()) {
List<CatalogPartitionSpec> targetPartitions = operation.getPartitionSpecs().get();
if (targetPartitions.isEmpty()) {
return TableResultImpl.TABLE_RESULT_OK;
}
String statSql =
generateAnalyzeSqlForMultiParts(
operation.getTableIdentifier(), targetPartitions, columns);
int partitionCount = targetPartitions.size();
Map<Integer, StatisticsWrapper> results =
executeSqlAndGenerateStatisticsForMultiParts(
tableEnv, columns, statSql, partitionCount);
for (int i = 0; i < partitionCount; ++i) {
StatisticsWrapper result = results.get(i);
CatalogPartitionSpec partitionSpec = targetPartitions.get(i);
catalog.alterPartitionStatistics(
objectPath, partitionSpec, result.tableStat, false);
CatalogColumnStatistics newColumnStat = result.columnStat;
if (newColumnStat != null) {
CatalogColumnStatistics oldColumnStat =
catalog.getPartitionColumnStatistics(objectPath, partitionSpec);
// merge stats
CatalogColumnStatistics mergedColumnStatistics =
mergeColumnStatistics(oldColumnStat, newColumnStat);
catalog.alterPartitionColumnStatistics(
objectPath, partitionSpec, mergedColumnStatistics, false);
}
}
} else {
String statSql = generateAnalyzeSql(operation.getTableIdentifier(), null, columns, -1);
StatisticsWrapper result = executeSqlAndGenerateStatistics(tableEnv, columns, statSql);
catalog.alterTableStatistics(objectPath, result.tableStat, false);
CatalogColumnStatistics newColumnStat = result.columnStat;
if (newColumnStat != null) {
CatalogColumnStatistics oldColumnStat =
catalog.getTableColumnStatistics(objectPath);
// merge stats.
CatalogColumnStatistics mergedColumnStatistics =
mergeColumnStatistics(oldColumnStat, newColumnStat);
catalog.alterTableColumnStatistics(objectPath, mergedColumnStatistics, false);
}
}
return TableResultImpl.TABLE_RESULT_OK;
}
private static CatalogColumnStatistics mergeColumnStatistics(
CatalogColumnStatistics oldColumnStatistics,
CatalogColumnStatistics newColumnStatistics) {
CatalogColumnStatistics columnStatistics = oldColumnStatistics.copy();
columnStatistics
.getColumnStatisticsData()
.putAll(newColumnStatistics.getColumnStatisticsData());
return columnStatistics;
}
private static StatisticsWrapper executeSqlAndGenerateStatistics(
TableEnvironmentImpl tableEnv, List<Column> columns, String statSql) {
TableResult tableResult = tableEnv.executeSql(statSql);
List<Row> result = CollectionUtil.iteratorToList(tableResult.collect());
Preconditions.checkArgument(result.size() == 1);
Row row = result.get(0);
CatalogTableStatistics tableStat = convertToTableStatistics(row);
CatalogColumnStatistics columnStat = null;
if (!columns.isEmpty()) {
columnStat = convertToColumnStatistics(row, columns);
}
return new StatisticsWrapper(tableStat, columnStat);
}
private static Map<Integer, StatisticsWrapper> executeSqlAndGenerateStatisticsForMultiParts(
TableEnvironmentImpl tableEnv,
List<Column> columns,
String statSql,
int partitionCount) {
TableResult tableResult = tableEnv.executeSql(statSql);
List<Row> result = CollectionUtil.iteratorToList(tableResult.collect());
Preconditions.checkArgument(result.size() == partitionCount);
Map<Integer, StatisticsWrapper> map = new HashMap<>();
for (Row row : result) {
CatalogTableStatistics tableStat = convertToTableStatistics(row);
CatalogColumnStatistics columnStat = null;
if (!columns.isEmpty()) {
columnStat = convertToColumnStatistics(row, columns);
}
int index = row.getFieldAs(getPartitionIdxColumn());
map.put(index, new StatisticsWrapper(tableStat, columnStat));
}
return map;
}
private static String generateAnalyzeSqlForMultiParts(
ObjectIdentifier tableIdentifier,
List<CatalogPartitionSpec> partitionSpecs,
List<Column> columns) {
List<String> sqlList = new ArrayList<>();
for (int i = 0; i < partitionSpecs.size(); ++i) {
sqlList.add(generateAnalyzeSql(tableIdentifier, partitionSpecs.get(i), columns, i));
}
return String.join("\n UNION ALL \n", sqlList);
}
private static String generateAnalyzeSql(
ObjectIdentifier tableIdentifier,
@Nullable CatalogPartitionSpec partitionSpec,
List<Column> columns,
int index) {
String partitionFilter;
if (partitionSpec != null) {
partitionFilter =
" WHERE "
+ partitionSpec.getPartitionSpec().entrySet().stream()
.map(e -> e.getKey() + "=" + e.getValue())
.collect(Collectors.joining(" AND "));
} else {
partitionFilter = "";
}
final String columnStatsSelects;
if (columns.isEmpty()) {
columnStatsSelects = "";
} else {
columnStatsSelects = ", " + getColumnStatsSelects(columns);
}
return String.format(
"SELECT COUNT(1) AS %s %s %s FROM %s %s",
getRowCountColumn(),
columnStatsSelects,
index >= 0 ? String.format(", %s as %s", index, getPartitionIdxColumn()) : "",
tableIdentifier,
partitionFilter);
}
private static String getColumnStatsSelects(List<Column> columns) {
return columns.stream()
.flatMap(
f -> {
String c = f.getName();
List<String> columnStatSelect = new ArrayList<>();
String computeNullCount =
String.format(
"(COUNT(1) - COUNT(`%s`)) AS %s",
c, getNullCountColumn(c));
columnStatSelect.add(computeNullCount);
String computeNdv =
String.format(
"APPROX_COUNT_DISTINCT(`%s`) AS %s",
c, getNdvColumn(c));
switch (f.getDataType().getLogicalType().getTypeRoot()) {
case BOOLEAN:
columnStatSelect.add(
String.format(
"COUNT(`%s`) FILTER (WHERE `%s` IS TRUE) AS %s",
c, c, getTrueCountColumn(c)));
columnStatSelect.add(
String.format(
"COUNT(`%s`) FILTER (WHERE `%s` IS FALSE) AS %s",
c, c, getFalseCountColumn(c)));
break;
case TINYINT:
case SMALLINT:
case INTEGER:
case FLOAT:
case DATE:
case TIME_WITHOUT_TIME_ZONE:
case BIGINT:
case DOUBLE:
case DECIMAL:
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
case TIMESTAMP_WITHOUT_TIME_ZONE:
columnStatSelect.add(computeNdv);
columnStatSelect.add(
String.format("MAX(`%s`) AS %s", c, getMaxColumn(c)));
columnStatSelect.add(
String.format("MIN(`%s`) AS %s", c, getMinColumn(c)));
break;
case CHAR:
case VARCHAR:
columnStatSelect.add(computeNdv);
columnStatSelect.add(
String.format(
"AVG(CAST(CHAR_LENGTH(`%s`) AS DOUBLE)) AS %s",
c, getAvgLenColumn(c)));
columnStatSelect.add(
String.format(
"MAX(CAST(CHAR_LENGTH(`%s`) AS BIGINT)) AS %s",
c, getMaxLenColumn(c)));
break;
default:
break;
}
return columnStatSelect.stream();
})
.collect(Collectors.joining(", "));
}
private static CatalogTableStatistics convertToTableStatistics(Row row) {
Long rowCount = row.getFieldAs(getRowCountColumn());
return new CatalogTableStatistics(rowCount, -1, -1, -1);
}
private static CatalogColumnStatistics convertToColumnStatistics(
Row row, List<Column> columns) {
Preconditions.checkArgument(!columns.isEmpty());
Map<String, CatalogColumnStatisticsDataBase> columnStatMap = new HashMap<>();
for (Column column : columns) {
CatalogColumnStatisticsDataBase columnStat = convertToColumnStatisticsData(row, column);
if (columnStat != null) {
columnStatMap.put(column.getName(), columnStat);
}
}
return new CatalogColumnStatistics(columnStatMap);
}
private static CatalogColumnStatisticsDataBase convertToColumnStatisticsData(
Row row, Column column) {
String c = column.getName();
Long nullCount = row.getFieldAs(getNullCountColumn(c));
switch (column.getDataType().getLogicalType().getTypeRoot()) {
case BOOLEAN:
Long trueCount = row.getFieldAs(getTrueCountColumn(c));
Long falseCount = row.getFieldAs(getFalseCountColumn(c));
return new CatalogColumnStatisticsDataBoolean(trueCount, falseCount, nullCount);
case TINYINT:
Byte maxByte = row.getFieldAs(getMaxColumn(c));
Byte minByte = row.getFieldAs(getMinColumn(c));
Long ndvByte = row.getFieldAs(getNdvColumn(c));
return new CatalogColumnStatisticsDataLong(
minByte != null ? minByte.longValue() : null,
maxByte != null ? maxByte.longValue() : null,
ndvByte,
nullCount);
case SMALLINT:
Short maxShort = row.getFieldAs(getMaxColumn(c));
Short minShort = row.getFieldAs(getMinColumn(c));
Long ndvShort = row.getFieldAs(getNdvColumn(c));
return new CatalogColumnStatisticsDataLong(
minShort != null ? minShort.longValue() : null,
maxShort != null ? maxShort.longValue() : null,
ndvShort,
nullCount);
case INTEGER:
Integer maxInt = row.getFieldAs(getMaxColumn(c));
Integer minInt = row.getFieldAs(getMinColumn(c));
Long ndvInt = row.getFieldAs(getNdvColumn(c));
return new CatalogColumnStatisticsDataLong(
minInt != null ? minInt.longValue() : null,
maxInt != null ? maxInt.longValue() : null,
ndvInt,
nullCount);
case BIGINT:
Long ndvLong = row.getFieldAs(getNdvColumn(c));
Long maxLong = row.getFieldAs(getMaxColumn(c));
Long minLong = row.getFieldAs(getMinColumn(c));
return new CatalogColumnStatisticsDataLong(minLong, maxLong, ndvLong, nullCount);
case TIMESTAMP_WITHOUT_TIME_ZONE:
Long ndvTs = row.getFieldAs(getNdvColumn(c));
LocalDateTime maxTs = row.getFieldAs(getMaxColumn(c));
LocalDateTime minTs = row.getFieldAs(getMinColumn(c));
return new CatalogColumnStatisticsDataLong(
minTs != null ? minTs.toEpochSecond(ZoneOffset.UTC) : null,
maxTs != null ? maxTs.toEpochSecond(ZoneOffset.UTC) : null,
ndvTs,
nullCount);
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
Long ndvTsLtz = row.getFieldAs(getNdvColumn(c));
Instant maxTsLtz = row.getFieldAs(getMaxColumn(c));
Instant minTsLtz = row.getFieldAs(getMinColumn(c));
return new CatalogColumnStatisticsDataLong(
minTsLtz != null ? minTsLtz.toEpochMilli() : null,
maxTsLtz != null ? maxTsLtz.toEpochMilli() : null,
ndvTsLtz,
nullCount);
case FLOAT:
Long ndvFloat = row.getFieldAs(getNdvColumn(c));
Float maxFloat = row.getFieldAs(getMaxColumn(c));
Float minFloat = row.getFieldAs(getMinColumn(c));
return new CatalogColumnStatisticsDataDouble(
minFloat != null ? minFloat.doubleValue() : null,
maxFloat != null ? maxFloat.doubleValue() : null,
ndvFloat,
nullCount);
case DOUBLE:
Long ndvDouble = row.getFieldAs(getNdvColumn(c));
Double maxDouble = row.getFieldAs(getMaxColumn(c));
Double minDouble = row.getFieldAs(getMinColumn(c));
return new CatalogColumnStatisticsDataDouble(
minDouble, maxDouble, ndvDouble, nullCount);
case DECIMAL:
Long ndvDecimal = row.getFieldAs(getNdvColumn(c));
BigDecimal maxDecimal = row.getFieldAs(getMaxColumn(c));
BigDecimal minDecimal = row.getFieldAs(getMinColumn(c));
return new CatalogColumnStatisticsDataDouble(
minDecimal != null ? minDecimal.doubleValue() : null,
maxDecimal != null ? maxDecimal.doubleValue() : null,
ndvDecimal,
nullCount);
case DATE:
Long ndvDate = row.getFieldAs(getNdvColumn(c));
LocalDate maxDate = row.getFieldAs(getMaxColumn(c));
LocalDate minDate = row.getFieldAs(getMinColumn(c));
return new CatalogColumnStatisticsDataDate(
minDate != null ? new Date(minDate.toEpochDay()) : null,
maxDate != null ? new Date(maxDate.toEpochDay()) : null,
ndvDate,
nullCount);
case TIME_WITHOUT_TIME_ZONE:
Long ndvTime = row.getFieldAs(getNdvColumn(c));
LocalTime maxTime = row.getFieldAs(getMaxColumn(c));
LocalTime minTime = row.getFieldAs(getMinColumn(c));
return new CatalogColumnStatisticsDataLong(
minTime != null ? minTime.toNanoOfDay() : null,
maxTime != null ? maxTime.toNanoOfDay() : null,
ndvTime,
nullCount);
case CHAR:
case VARCHAR:
Long ndvString = row.getFieldAs(getNdvColumn(c));
Double avgLen = row.getFieldAs(getAvgLenColumn(c));
Long maxLen = row.getFieldAs(getMaxLenColumn(c));
return new CatalogColumnStatisticsDataString(maxLen, avgLen, ndvString, nullCount);
case BINARY:
case VARBINARY:
return new CatalogColumnStatisticsDataBinary(null, null, nullCount);
default:
return null;
}
}
private static | AnalyzeTableUtil |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/ReadFrom.java | {
"start": 1170,
"end": 9034
} | class ____ {
/**
* Setting to read from the upstream only.
*/
public static final ReadFrom MASTER = new ReadFromImpl.ReadFromUpstream();
/**
* Setting to read preferred from the upstream and fall back to a replica if the master is not available.
*/
public static final ReadFrom MASTER_PREFERRED = new ReadFromImpl.ReadFromUpstreamPreferred();
/**
* Setting to read from the upstream only.
*
* @since 6.0
*/
public static final ReadFrom UPSTREAM = new ReadFromImpl.ReadFromUpstream();
/**
* Setting to read preferred from the upstream and fall back to a replica if the upstream is not available.
*
* @since 6.0
*/
public static final ReadFrom UPSTREAM_PREFERRED = new ReadFromImpl.ReadFromUpstreamPreferred();
/**
* Setting to read preferred from replica and fall back to upstream if no replica is available.
*
* @since 5.2
*/
public static final ReadFrom REPLICA_PREFERRED = new ReadFromImpl.ReadFromReplicaPreferred();
/**
* Setting to read preferred from replicas and fall back to upstream if no replica is available.
*
* @since 4.4
* @deprecated Renamed to {@link #REPLICA_PREFERRED}.
*/
@Deprecated
public static final ReadFrom SLAVE_PREFERRED = REPLICA_PREFERRED;
/**
* Setting to read from the replica only.
*
* @since 5.2
*/
public static final ReadFrom REPLICA = new ReadFromImpl.ReadFromReplica();
/**
* Setting to read from the replica only.
*
* @deprecated renamed to {@link #REPLICA}.
*/
@Deprecated
public static final ReadFrom SLAVE = REPLICA;
/**
* Setting to read from the node with the lowest latency during topology discovery. Note that latency measurements are
* momentary snapshots that can change in rapid succession. Requires dynamic refresh sources to obtain topologies and
* latencies from all nodes in the cluster.
*
* @since 6.1.7
*/
public static final ReadFrom LOWEST_LATENCY = new ReadFromImpl.ReadFromLowestCommandLatency();
/**
* Setting to read from the node with the lowest latency during topology discovery. Note that latency measurements are
* momentary snapshots that can change in rapid succession. Requires dynamic refresh sources to obtain topologies and
* latencies from all nodes in the cluster.
*
* @deprecated since 6.1.7 as we're renaming this setting to {@link #LOWEST_LATENCY} for more clarity what this setting
* actually represents.
*/
@Deprecated
public static final ReadFrom NEAREST = LOWEST_LATENCY;
/**
* Setting to read from any node.
*
* @since 5.2
*/
public static final ReadFrom ANY = new ReadFromImpl.ReadFromAnyNode();
/**
* Setting to read from any replica node.
*
* @since 6.0.1
*/
public static final ReadFrom ANY_REPLICA = new ReadFromImpl.ReadFromAnyReplica();
/**
* Setting to read from any node in the subnets.
*
* @param cidrNotations CIDR-block notation strings, e.g., "192.168.0.0/16", "2001:db8:abcd:0000::/52". Must not be
* {@code null}.
* @return an instance of {@link ReadFromImpl.ReadFromSubnet}.
* @since 6.1
*/
public static ReadFrom subnet(String... cidrNotations) {
return new ReadFromImpl.ReadFromSubnet(cidrNotations);
}
/**
* Read from any node that has {@link RedisURI} matching with the given pattern.
*
* @param pattern regex pattern, e.g., {@code Pattern.compile(".*region-1.*")}. Must not be {@code null}.
* @return an instance of {@link ReadFromImpl.ReadFromRegex}.
* @since 6.1
*/
public static ReadFrom regex(Pattern pattern) {
return regex(pattern, false);
}
/**
* Read from any node that has {@link RedisURI} matching with the given pattern.
*
* @param pattern regex pattern, e.g., {@code Pattern.compile(".*region-1.*")}. Must not be {@code null}.
* @param orderSensitive {@code true} to attempt reads in the order of hosts returned by {@link ReadFrom#select(Nodes)};
* {@code false} to apply randomization.
* @return an instance of {@link ReadFromImpl.ReadFromRegex}.
* @since 6.1
*/
public static ReadFrom regex(Pattern pattern, boolean orderSensitive) {
return new ReadFromImpl.ReadFromRegex(pattern, orderSensitive);
}
/**
* Chooses the nodes from the matching Redis nodes that match this read selector.
*
* @param nodes set of nodes that are suitable for reading
* @return List of {@link RedisNodeDescription}s that are selected for reading
*/
public abstract List<RedisNodeDescription> select(Nodes nodes);
/**
* Returns whether this {@link ReadFrom} requires ordering of the resulting {@link RedisNodeDescription nodes}.
*
* @return {@code true} if code using {@link ReadFrom} should retain ordering or {@code false} to allow reordering of
* {@link RedisNodeDescription nodes}.
* @since 5.2
*/
protected boolean isOrderSensitive() {
return false;
}
/**
* Retrieve the {@link ReadFrom} preset by name. For complex types like {@code subnet} or {@code regex}, the following
* syntax could be used {@code subnet:192.168.0.0/16,2001:db8:abcd:0000::/52} and {@code regex:.*region-1.*} respectively.
*
* @param name the case-insensitive name of the read from setting
* @return the {@link ReadFrom} preset
* @throws IllegalArgumentException if {@code name} is empty, {@code null} or the {@link ReadFrom} preset is unknown.
*/
public static ReadFrom valueOf(String name) {
if (LettuceStrings.isEmpty(name)) {
throw new IllegalArgumentException("Name must not be empty");
}
int index = name.indexOf(':');
if (index != -1) {
String type = name.substring(0, index);
String value = name.substring(index + 1);
if (LettuceStrings.isEmpty(value)) {
throw new IllegalArgumentException("Value must not be empty for the type '" + type + "'");
}
if (type.equalsIgnoreCase("subnet")) {
return subnet(value.split(","));
}
if (type.equalsIgnoreCase("regex")) {
try {
return regex(Pattern.compile(value));
} catch (PatternSyntaxException ex) {
throw new IllegalArgumentException("Value '" + value + "' is not a valid regular expression", ex);
}
}
}
if (name.equalsIgnoreCase("master")) {
return UPSTREAM;
}
if (name.equalsIgnoreCase("masterPreferred")) {
return UPSTREAM_PREFERRED;
}
if (name.equalsIgnoreCase("upstream")) {
return UPSTREAM;
}
if (name.equalsIgnoreCase("upstreamPreferred")) {
return UPSTREAM_PREFERRED;
}
if (name.equalsIgnoreCase("slave") || name.equalsIgnoreCase("replica")) {
return REPLICA;
}
if (name.equalsIgnoreCase("slavePreferred") || name.equalsIgnoreCase("replicaPreferred")) {
return REPLICA_PREFERRED;
}
if (name.equalsIgnoreCase("nearest") || name.equalsIgnoreCase("lowestLatency")) {
return LOWEST_LATENCY;
}
if (name.equalsIgnoreCase("any")) {
return ANY;
}
if (name.equalsIgnoreCase("anyReplica")) {
return ANY_REPLICA;
}
throw new IllegalArgumentException("ReadFrom " + name + " not supported");
}
/**
* Descriptor of nodes that are available for the current read operation.
*/
public | ReadFrom |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java | {
"start": 1099,
"end": 5965
} | class ____ extends AllocationDecider {
public static final String NAME = "snapshot_in_progress";
/**
* Returns a {@link Decision} whether the given shard routing can be
* re-balanced to the given allocation. The default is
* {@link Decision#ALWAYS}.
*/
@Override
public Decision canRebalance(ShardRouting shardRouting, RoutingAllocation allocation) {
return canMove(shardRouting, allocation);
}
/**
* Returns a {@link Decision} whether the given shard routing can be
* allocated on the given node. The default is {@link Decision#ALWAYS}.
*/
@Override
public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
return canMove(shardRouting, allocation);
}
@Override
public Decision canForceAllocateDuringReplace(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
return canAllocate(shardRouting, node, allocation);
}
private static final Decision YES_NOT_RUNNING = Decision.single(Decision.Type.YES, NAME, "no snapshots are currently running");
private static final Decision YES_NOT_SNAPSHOTTED = Decision.single(Decision.Type.YES, NAME, "the shard is not being snapshotted");
private static Decision canMove(ShardRouting shardRouting, RoutingAllocation allocation) {
if (allocation.isSimulating()) {
return allocation.decision(Decision.YES, NAME, "allocation is always enabled when simulating");
}
if (shardRouting.primary() == false) {
// Only primary shards are snapshotted
return YES_NOT_SNAPSHOTTED;
}
SnapshotsInProgress snapshotsInProgress = SnapshotsInProgress.get(allocation.getClusterState());
if (snapshotsInProgress.isEmpty()) {
// Snapshots are not running
return YES_NOT_RUNNING;
}
if (shardRouting.currentNodeId() == null) {
// Shard is not assigned to a node
return YES_NOT_SNAPSHOTTED;
}
@FixForMultiProject(description = "replace with entriesByRepo(ProjectId), see also ES-12195")
final var entriesByRepoIterable = snapshotsInProgress.entriesByRepo();
for (final var entriesByRepo : entriesByRepoIterable) {
for (final var entry : entriesByRepo) {
if (entry.isClone()) {
// clones do not run on data nodes
continue;
}
if (entry.hasShardsInInitState() == false) {
// this snapshot has no running shard snapshots
// (NB this means we let ABORTED shards move without waiting for them to complete)
continue;
}
final var shardSnapshotStatus = entry.shards().get(shardRouting.shardId());
if (shardSnapshotStatus == null) {
// this snapshot is not snapshotting the shard to allocate
continue;
}
if (shardSnapshotStatus.state().completed()) {
// this shard snapshot is complete
continue;
}
if (Objects.equals(shardRouting.currentNodeId(), shardSnapshotStatus.nodeId()) == false) {
// this shard snapshot is allocated to a different node
continue;
}
if (shardSnapshotStatus.state() == SnapshotsInProgress.ShardState.PAUSED_FOR_NODE_REMOVAL) {
// this shard snapshot is paused pending the removal of its assigned node
final var nodeShutdown = allocation.metadata().nodeShutdowns().get(shardRouting.currentNodeId());
if (nodeShutdown != null && nodeShutdown.getType() != SingleNodeShutdownMetadata.Type.RESTART) {
// NB we check metadata().nodeShutdowns() too because if the node was marked for removal and then that mark was
// removed then the shard can still be PAUSED_FOR_NODE_REMOVAL while there are other shards on the node which
// haven't finished pausing yet. In that case the shard is about to go back into INIT state again, so we should keep
// it where it is.
continue;
}
}
return allocation.decision(
Decision.THROTTLE,
NAME,
"waiting for snapshot [%s] of shard [%s] to complete on node [%s]",
entry.snapshot(),
shardRouting.shardId(),
shardRouting.currentNodeId()
);
}
}
return YES_NOT_SNAPSHOTTED;
}
}
| SnapshotInProgressAllocationDecider |
java | alibaba__nacos | naming/src/main/java/com/alibaba/nacos/naming/pojo/instance/BeatInfoInstanceBuilder.java | {
"start": 1046,
"end": 3006
} | class ____ {
private final InstanceBuilder actualBuilder;
private final Collection<InstanceExtensionHandler> handlers;
private BeatInfoInstanceBuilder() {
this.actualBuilder = InstanceBuilder.newBuilder();
this.handlers = NacosServiceLoader.newServiceInstances(InstanceExtensionHandler.class);
}
public static BeatInfoInstanceBuilder newBuilder() {
return new BeatInfoInstanceBuilder();
}
/**
* Build a new {@link Instance} and chain handled by {@link InstanceExtensionHandler}.
*
* @return new instance
*/
public Instance build() {
Instance result = actualBuilder.build();
for (InstanceExtensionHandler each : handlers) {
each.handleExtensionInfo(result);
}
setInstanceId(result);
return result;
}
public BeatInfoInstanceBuilder setRequest(HttpServletRequest request) {
for (InstanceExtensionHandler each : handlers) {
each.configExtensionInfoFromRequest(request);
}
return this;
}
public BeatInfoInstanceBuilder setServiceName(String serviceName) {
actualBuilder.setServiceName(serviceName);
return this;
}
public BeatInfoInstanceBuilder setBeatInfo(RsInfo beatInfo) {
setAttributesToBuilder(beatInfo);
return this;
}
private void setAttributesToBuilder(RsInfo beatInfo) {
actualBuilder.setPort(beatInfo.getPort());
actualBuilder.setIp(beatInfo.getIp());
actualBuilder.setWeight(beatInfo.getWeight());
actualBuilder.setMetadata(beatInfo.getMetadata());
actualBuilder.setClusterName(beatInfo.getCluster());
actualBuilder.setEphemeral(beatInfo.isEphemeral());
}
private void setInstanceId(Instance instance) {
instance.setInstanceId(InstanceIdGeneratorManager.generateInstanceId(instance));
}
}
| BeatInfoInstanceBuilder |
java | google__guice | extensions/grapher/src/com/google/inject/grapher/ProviderAliasCreator.java | {
"start": 1136,
"end": 1632
} | class ____ implements AliasCreator {
@Override
public Iterable<Alias> createAliases(Iterable<Binding<?>> bindings) {
List<Alias> aliases = Lists.newArrayList();
for (Binding<?> binding : bindings) {
if (binding instanceof ProviderBinding) {
aliases.add(
new Alias(
NodeId.newTypeId(binding.getKey()),
NodeId.newTypeId(((ProviderBinding<?>) binding).getProvidedKey())));
}
}
return aliases;
}
}
| ProviderAliasCreator |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/view/freemarker/FreeMarkerConfigurerTests.java | {
"start": 1491,
"end": 3704
} | class ____ {
private final FreeMarkerConfigurer freeMarkerConfigurer = new FreeMarkerConfigurer();
@Test
void freeMarkerConfigurerWithConfigLocation() {
freeMarkerConfigurer.setConfigLocation(new FileSystemResource("myprops.properties"));
Properties props = new Properties();
props.setProperty("myprop", "/mydir");
freeMarkerConfigurer.setFreemarkerSettings(props);
assertThatIOException().isThrownBy(freeMarkerConfigurer::afterPropertiesSet);
}
@Test
void freeMarkerConfigurerWithResourceLoaderPath() throws Exception {
freeMarkerConfigurer.setTemplateLoaderPath("file:/mydir");
freeMarkerConfigurer.afterPropertiesSet();
Configuration cfg = freeMarkerConfigurer.getConfiguration();
assertThat(cfg.getTemplateLoader()).isInstanceOf(MultiTemplateLoader.class);
MultiTemplateLoader multiTemplateLoader = (MultiTemplateLoader)cfg.getTemplateLoader();
assertThat(multiTemplateLoader.getTemplateLoader(0)).isInstanceOf(SpringTemplateLoader.class);
assertThat(multiTemplateLoader.getTemplateLoader(1)).isInstanceOf(ClassTemplateLoader.class);
}
@Test
@SuppressWarnings("rawtypes")
public void freeMarkerConfigurerWithNonFileResourceLoaderPath() throws Exception {
freeMarkerConfigurer.setTemplateLoaderPath("file:/mydir");
Properties settings = new Properties();
settings.setProperty("localized_lookup", "false");
freeMarkerConfigurer.setFreemarkerSettings(settings);
freeMarkerConfigurer.setResourceLoader(new ResourceLoader() {
@Override
public Resource getResource(String location) {
if (!("file:/mydir".equals(location) || "file:/mydir/test".equals(location))) {
throw new IllegalArgumentException(location);
}
return new ByteArrayResource("test".getBytes(), "test");
}
@Override
public ClassLoader getClassLoader() {
return getClass().getClassLoader();
}
});
freeMarkerConfigurer.afterPropertiesSet();
assertThat(freeMarkerConfigurer.getConfiguration()).isInstanceOf(Configuration.class);
Configuration fc = freeMarkerConfigurer.getConfiguration();
Template ft = fc.getTemplate("test");
assertThat(FreeMarkerTemplateUtils.processTemplateIntoString(ft, new HashMap())).isEqualTo("test");
}
}
| FreeMarkerConfigurerTests |
java | apache__camel | components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerTest.java | {
"start": 1310,
"end": 3465
} | class ____ {
private final KafkaConfiguration configuration = mock(KafkaConfiguration.class);
private final KafkaClientFactory clientFactory = mock(KafkaClientFactory.class);
private final KafkaComponent component = mock(KafkaComponent.class);
private final KafkaEndpoint endpoint = mock(KafkaEndpoint.class);
private final Processor processor = mock(Processor.class);
private final CamelContext context = mock(CamelContext.class);
private final ExtendedCamelContext ecc = mock(ExtendedCamelContext.class);
private final ExchangeFactory ef = mock(ExchangeFactory.class);
@Test
public void consumerRequiresBootstrapServers() {
when(endpoint.getCamelContext()).thenReturn(context);
when(context.getCamelContextExtension()).thenReturn(ecc);
when(ecc.getExchangeFactory()).thenReturn(ef);
when(ef.newExchangeFactory(any())).thenReturn(ef);
when(endpoint.getComponent()).thenReturn(component);
when(endpoint.getConfiguration()).thenReturn(configuration);
when(endpoint.getConfiguration().getGroupId()).thenReturn("groupOne");
when(endpoint.getKafkaClientFactory()).thenReturn(clientFactory);
when(component.getKafkaClientFactory()).thenReturn(clientFactory);
when(clientFactory.getBrokers(any())).thenThrow(new IllegalArgumentException());
final KafkaConsumer kafkaConsumer = new KafkaConsumer(endpoint, processor);
assertThrows(IllegalArgumentException.class, () -> kafkaConsumer.getProps());
}
@Test
public void consumerOnlyRequiresBootstrapServers() {
when(endpoint.getCamelContext()).thenReturn(context);
when(context.getCamelContextExtension()).thenReturn(ecc);
when(ecc.getExchangeFactory()).thenReturn(ef);
when(ef.newExchangeFactory(any())).thenReturn(ef);
when(endpoint.getComponent()).thenReturn(component);
when(endpoint.getConfiguration()).thenReturn(configuration);
when(endpoint.getConfiguration().getBrokers()).thenReturn("localhost:2181");
assertDoesNotThrow(() -> new KafkaConsumer(endpoint, processor));
}
}
| KafkaConsumerTest |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.