language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/builder/off/SimpleNotRealyImmutableBuilderTest.java
|
{
"start": 729,
"end": 1868
}
|
class ____ {
@RegisterExtension
final GeneratedSource generatedSource = new GeneratedSource();
@ProcessorTest
@WithClasses({ SimpleMapper.class })
public void testSimpleImmutableBuilderHappyPath() {
SimpleMapper mapper = Mappers.getMapper( SimpleMapper.class );
SimpleMutablePerson source = new SimpleMutablePerson();
source.setFullName( "Bob" );
SimpleNotRealyImmutablePerson targetObject = mapper.toNotRealyImmutable( source );
assertThat( targetObject.getName() ).isEqualTo( "Bob" );
}
@ProcessorTest
@WithClasses({ SimpleWithBuilderMapper.class })
@ProcessorOption( name = "mapstruct.disableBuilders", value = "true")
public void builderGloballyDisabled() {
SimpleWithBuilderMapper mapper = Mappers.getMapper( SimpleWithBuilderMapper.class );
SimpleMutablePerson source = new SimpleMutablePerson();
source.setFullName( "Bob" );
SimpleNotRealyImmutablePerson targetObject = mapper.toNotRealyImmutable( source );
assertThat( targetObject.getName() ).isEqualTo( "Bob" );
}
}
|
SimpleNotRealyImmutableBuilderTest
|
java
|
quarkusio__quarkus
|
core/runtime/src/main/java/io/quarkus/runtime/logging/InheritableLevel.java
|
{
"start": 2346,
"end": 2683
}
|
class ____ implements ObjectSubstitution<InheritableLevel, String> {
@Override
public String serialize(InheritableLevel obj) {
return obj.toString();
}
@Override
public InheritableLevel deserialize(String obj) {
return InheritableLevel.of(obj);
}
}
}
|
Substitution
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/ErrorProneJavacPluginTest.java
|
{
"start": 2634,
"end": 3100
}
|
class ____ {
@Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder();
@Test
public void hello() throws IOException {
FileSystem fileSystem = Jimfs.newFileSystem(Configuration.unix());
Path source = fileSystem.getPath("Test.java");
Files.write(
source,
ImmutableList.of(
"package test;",
"import java.util.HashSet;",
"import java.util.Set;",
"
|
ErrorProneJavacPluginTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java
|
{
"start": 3874,
"end": 4455
}
|
class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory val;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) {
this.source = source;
this.val = val;
}
@Override
public SignumDoubleEvaluator get(DriverContext context) {
return new SignumDoubleEvaluator(source, val.get(context), context);
}
@Override
public String toString() {
return "SignumDoubleEvaluator[" + "val=" + val + "]";
}
}
}
|
Factory
|
java
|
quarkusio__quarkus
|
extensions/amazon-lambda/deployment/src/test/java/io/quarkus/amazon/lambda/deployment/RequestHandlerJandexUtilTest.java
|
{
"start": 20489,
"end": 20770
}
|
class ____ implements RequestHandler<Object, Object> {
@Override
public Object handleRequest(Object input, Context context) {
return input;
}
}
// Abstract parent with concrete child override
public static abstract
|
ResolvedObjectHandler
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequest.java
|
{
"start": 510,
"end": 777
}
|
class ____ extends LegacyActionRequest {
protected EsqlQueryRequest() {}
protected EsqlQueryRequest(StreamInput in) throws IOException {
super(in);
}
public abstract String query();
public abstract QueryBuilder filter();
}
|
EsqlQueryRequest
|
java
|
apache__camel
|
core/camel-base-engine/src/main/java/org/apache/camel/impl/engine/DefaultNodeIdFactory.java
|
{
"start": 1079,
"end": 1973
}
|
class ____ implements NodeIdFactory {
protected static final Map<String, AtomicInteger> NODE_COUNTERS = new ConcurrentHashMap<>();
@Override
public String createId(NamedNode definition) {
String key = definition.getShortName();
return key + getNodeCounter(key).incrementAndGet();
}
/**
* Returns the counter for the given node key, lazily creating one if necessary
*/
protected static AtomicInteger getNodeCounter(String key) {
return NODE_COUNTERS.computeIfAbsent(key, k -> new AtomicInteger());
}
/**
* Helper method for test purposes that allows tests to start clean (made protected to ensure that it is not called
* accidentally)
*/
protected static void resetAllCounters() {
for (AtomicInteger counter : NODE_COUNTERS.values()) {
counter.set(0);
}
}
}
|
DefaultNodeIdFactory
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/AutoConfigureNodeTests.java
|
{
"start": 1847,
"end": 17690
}
|
class ____ extends ESTestCase {
public void testRemovePreviousAutoconfiguration() throws Exception {
final List<String> file1 = List.of(
"# commented out line",
"# commented out line",
"# commented out line",
"# commented out line",
"# commented out line",
"# commented out line",
"some.setting1: some.value",
"some.setting2: some.value",
"some.setting3: some.value",
"some.setting4: some.value",
"# commented out line",
"# commented out line",
"# commented out line"
);
final List<String> file2 = List.of(
"# commented out line",
"# commented out line",
"# commented out line",
"# commented out line",
"# commented out line",
"# commented out line",
"some.setting1: some.value",
"some.setting2: some.value",
"some.setting3: some.value",
"some.setting4: some.value",
"# commented out line",
"# commented out line",
"# commented out line",
AutoConfigureNode.AUTO_CONFIGURATION_START_MARKER,
"cluster.initial_master_nodes: [\"node1\"]",
"http.host: [_site_]",
"xpack.security.enabled: true",
"xpack.security.enrollment.enabled: true",
"xpack.security.http.ssl.enabled: true",
"xpack.security.http.ssl.keystore.path: /path/to/the/file",
"xpack.security.transport.ssl.keystore.path: /path/to/the/file",
"xpack.security.transport.ssl.truststore.path: /path/to/the/file",
AutoConfigureNode.AUTO_CONFIGURATION_END_MARKER
);
assertEquals(file1, removePreviousAutoconfiguration(file2));
}
public void testRemovePreviousAutoconfigurationRetainsUserAdded() throws Exception {
final List<String> file1 = List.of(
"# commented out line",
"# commented out line",
"# commented out line",
"# commented out line",
"# commented out line",
"# commented out line",
"some.setting1: some.value",
"some.setting2: some.value",
"some.setting3: some.value",
"some.setting4: some.value",
"# commented out line",
"# commented out line",
"# commented out line",
"some.extra.added.setting: value"
);
final List<String> file2 = List.of(
"# commented out line",
"# commented out line",
"# commented out line",
"# commented out line",
"# commented out line",
"# commented out line",
"some.setting1: some.value",
"some.setting2: some.value",
"some.setting3: some.value",
"some.setting4: some.value",
"# commented out line",
"# commented out line",
"# commented out line",
AutoConfigureNode.AUTO_CONFIGURATION_START_MARKER,
"cluster.initial_master_nodes: [\"node1\"]",
"http.host: [_site_]",
"xpack.security.enabled: true",
"xpack.security.enrollment.enabled: true",
"xpack.security.http.ssl.enabled: true",
"some.extra.added.setting: value",
"xpack.security.http.ssl.keystore.path: /path/to/the/file",
"xpack.security.transport.ssl.keystore.path: /path/to/the/file",
"xpack.security.transport.ssl.truststore.path: /path/to/the/file",
"",
AutoConfigureNode.AUTO_CONFIGURATION_END_MARKER
);
assertEquals(file1, removePreviousAutoconfiguration(file2));
}
public void testSubjectAndIssuerForGeneratedCertificates() throws Exception {
// test no publish settings
Path tempDir = createTempDir();
try {
Files.createDirectory(tempDir.resolve("config"));
// empty yml file, it just has to exist
Files.write(tempDir.resolve("config").resolve("elasticsearch.yml"), List.of(), CREATE_NEW);
Tuple<X509Certificate, X509Certificate> generatedCerts = runAutoConfigAndReturnCertificates(tempDir, Settings.EMPTY);
assertThat(checkSubjectAndIssuerDN(generatedCerts.v1(), "CN=dummy.test.hostname", AUTO_CONFIG_HTTP_ALT_DN), is(true));
assertThat(checkSubjectAndIssuerDN(generatedCerts.v2(), "CN=dummy.test.hostname", AUTO_CONFIG_TRANSPORT_ALT_DN), is(true));
} finally {
deleteDirectory(tempDir);
}
}
public void testGeneratedHTTPCertificateSANsAndKeyUsage() throws Exception {
// test no publish settings
Path tempDir = createTempDir();
try {
Files.createDirectory(tempDir.resolve("config"));
// empty yml file, it just has to exist
Files.write(tempDir.resolve("config").resolve("elasticsearch.yml"), List.of(), CREATE_NEW);
X509Certificate httpCertificate = runAutoConfigAndReturnHTTPCertificate(tempDir, Settings.EMPTY);
assertThat(checkGeneralNameSan(httpCertificate, "dummy.test.hostname", GeneralName.dNSName), is(true));
assertThat(checkGeneralNameSan(httpCertificate, "localhost", GeneralName.dNSName), is(true));
} finally {
deleteDirectory(tempDir);
}
// test network publish settings
tempDir = createTempDir();
try {
Files.createDirectory(tempDir.resolve("config"));
// empty yml file, it just has to exist
Files.write(tempDir.resolve("config").resolve("elasticsearch.yml"), List.of(), CREATE_NEW);
X509Certificate httpCertificate = runAutoConfigAndReturnHTTPCertificate(
tempDir,
Settings.builder()
.put(NetworkService.GLOBAL_NETWORK_PUBLISH_HOST_SETTING.getKey(), "172.168.1.100")
.put(HttpTransportSettings.SETTING_HTTP_HOST.getKey(), "10.10.10.100")
.build()
);
assertThat(checkGeneralNameSan(httpCertificate, "dummy.test.hostname", GeneralName.dNSName), is(true));
assertThat(checkGeneralNameSan(httpCertificate, "localhost", GeneralName.dNSName), is(true));
assertThat(checkGeneralNameSan(httpCertificate, "172.168.1.100", GeneralName.iPAddress), is(true));
assertThat(checkGeneralNameSan(httpCertificate, "10.10.10.100", GeneralName.iPAddress), is(false));
verifyKeyUsageAndExtendedKeyUsage(httpCertificate);
} finally {
deleteDirectory(tempDir);
}
// test http publish settings
tempDir = createTempDir();
try {
Files.createDirectory(tempDir.resolve("config"));
// empty yml file, it just has to exist
Files.write(tempDir.resolve("config").resolve("elasticsearch.yml"), List.of(), CREATE_NEW);
X509Certificate httpCertificate = runAutoConfigAndReturnHTTPCertificate(
tempDir,
Settings.builder()
.put(NetworkService.GLOBAL_NETWORK_HOST_SETTING.getKey(), "172.168.1.100")
.put(HttpTransportSettings.SETTING_HTTP_PUBLISH_HOST.getKey(), "10.10.10.100")
.build()
);
assertThat(checkGeneralNameSan(httpCertificate, "dummy.test.hostname", GeneralName.dNSName), is(true));
assertThat(checkGeneralNameSan(httpCertificate, "localhost", GeneralName.dNSName), is(true));
assertThat(checkGeneralNameSan(httpCertificate, "172.168.1.100", GeneralName.iPAddress), is(false));
assertThat(checkGeneralNameSan(httpCertificate, "10.10.10.100", GeneralName.iPAddress), is(true));
verifyKeyUsageAndExtendedKeyUsage(httpCertificate);
} finally {
deleteDirectory(tempDir);
}
// test network AND http publish settings
tempDir = createTempDir();
try {
Files.createDirectory(tempDir.resolve("config"));
// empty yml file, it just has to exist
Files.write(tempDir.resolve("config").resolve("elasticsearch.yml"), List.of(), CREATE_NEW);
X509Certificate httpCertificate = runAutoConfigAndReturnHTTPCertificate(
tempDir,
Settings.builder()
.put(NetworkService.GLOBAL_NETWORK_PUBLISH_HOST_SETTING.getKey(), "gypsy.hill")
.put(NetworkService.GLOBAL_NETWORK_HOST_SETTING.getKey(), "172.168.1.100")
.put(HttpTransportSettings.SETTING_HTTP_PUBLISH_HOST.getKey(), "balkan.beast")
.put(HttpTransportSettings.SETTING_HTTP_HOST.getKey(), "10.10.10.100")
.build()
);
assertThat(checkGeneralNameSan(httpCertificate, "dummy.test.hostname", GeneralName.dNSName), is(true));
assertThat(checkGeneralNameSan(httpCertificate, "localhost", GeneralName.dNSName), is(true));
assertThat(checkGeneralNameSan(httpCertificate, "gypsy.hill", GeneralName.dNSName), is(true));
assertThat(checkGeneralNameSan(httpCertificate, "balkan.beast", GeneralName.dNSName), is(true));
assertThat(checkGeneralNameSan(httpCertificate, "172.168.1.100", GeneralName.iPAddress), is(false));
assertThat(checkGeneralNameSan(httpCertificate, "10.10.10.100", GeneralName.iPAddress), is(false));
verifyKeyUsageAndExtendedKeyUsage(httpCertificate);
} finally {
deleteDirectory(tempDir);
}
}
public void testAnyRemoteHostNodeAddress() throws Exception {
List<String> remoteAddresses = List.of("192.168.0.1:9300", "127.0.0.1:9300");
InetAddress[] localAddresses = new InetAddress[] { InetAddress.getByName("192.168.0.1"), InetAddress.getByName("127.0.0.1") };
assertThat(anyRemoteHostNodeAddress(remoteAddresses, localAddresses), equalTo(false));
remoteAddresses = List.of("192.168.0.1:9300", "127.0.0.1:9300", "[::1]:9300");
localAddresses = new InetAddress[] { InetAddress.getByName("192.168.0.1"), InetAddress.getByName("127.0.0.1") };
assertThat(anyRemoteHostNodeAddress(remoteAddresses, localAddresses), equalTo(false));
remoteAddresses = List.of("192.168.0.1:9300", "127.0.0.1:9300", "[::1]:9300");
localAddresses = new InetAddress[] {
InetAddress.getByName("192.168.0.1"),
InetAddress.getByName("127.0.0.1"),
InetAddress.getByName("10.0.0.1") };
assertThat(anyRemoteHostNodeAddress(remoteAddresses, localAddresses), equalTo(false));
remoteAddresses = List.of("192.168.0.1:9300", "127.0.0.1:9300", "[::1]:9300", "10.0.0.1:9301");
localAddresses = new InetAddress[] { InetAddress.getByName("192.168.0.1"), InetAddress.getByName("127.0.0.1") };
assertThat(anyRemoteHostNodeAddress(remoteAddresses, localAddresses), equalTo(true));
remoteAddresses = List.of("127.0.0.1:9300", "[::1]:9300");
localAddresses = new InetAddress[] { InetAddress.getByName("[::1]"), InetAddress.getByName("127.0.0.1") };
assertThat(anyRemoteHostNodeAddress(remoteAddresses, localAddresses), equalTo(false));
remoteAddresses = List.of("127.0.0.1:9300", "[::1]:9300");
localAddresses = new InetAddress[] { InetAddress.getByName("192.168.2.3") };
assertThat(anyRemoteHostNodeAddress(remoteAddresses, localAddresses), equalTo(false));
remoteAddresses = List.of("1.2.3.4:9300");
localAddresses = new InetAddress[] { InetAddress.getByName("[::1]"), InetAddress.getByName("127.0.0.1") };
assertThat(anyRemoteHostNodeAddress(remoteAddresses, localAddresses), equalTo(true));
remoteAddresses = List.of();
localAddresses = new InetAddress[] { InetAddress.getByName("192.168.0.1"), InetAddress.getByName("127.0.0.1") };
assertThat(anyRemoteHostNodeAddress(remoteAddresses, localAddresses), equalTo(false));
}
private boolean checkGeneralNameSan(X509Certificate certificate, String generalName, int generalNameTag) throws Exception {
for (List<?> san : certificate.getSubjectAlternativeNames()) {
if (san.get(0).equals(generalNameTag) && san.get(1).equals(generalName)) {
return true;
}
}
return false;
}
private boolean checkSubjectAndIssuerDN(X509Certificate certificate, String subjectName, String issuerName) throws Exception {
if (certificate.getSubjectX500Principal().getName().equals(subjectName)
&& certificate.getIssuerX500Principal().getName().equals(issuerName)) {
return true;
}
return false;
}
private void verifyKeyUsageAndExtendedKeyUsage(X509Certificate httpCertificate) throws Exception {
List<String> extendedKeyUsage = httpCertificate.getExtendedKeyUsage();
assertEquals("Only one extended key usage expected for HTTP certificate.", 1, extendedKeyUsage.size());
String expectedServerAuthUsage = KeyPurposeId.id_kp_serverAuth.toASN1Primitive().toString();
assertEquals("Expected serverAuth extended key usage.", expectedServerAuthUsage, extendedKeyUsage.get(0));
assertExpectedKeyUsage(httpCertificate, HttpCertificateCommand.DEFAULT_CERT_KEY_USAGE);
}
private X509Certificate runAutoConfigAndReturnHTTPCertificate(Path configDir, Settings settings) throws Exception {
Tuple<X509Certificate, X509Certificate> generatedCertificates = runAutoConfigAndReturnCertificates(configDir, settings);
return generatedCertificates.v1();
}
private Tuple<X509Certificate, X509Certificate> runAutoConfigAndReturnCertificates(Path configDir, Settings settings) throws Exception {
final Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", configDir).put(settings).build());
// runs the command to auto-generate the config files and the keystore
new AutoConfigureNode(false).execute(MockTerminal.create(), new OptionParser().parse(), env, null);
KeyStoreWrapper nodeKeystore = KeyStoreWrapper.load(configDir.resolve("config"));
nodeKeystore.decrypt(new char[0]); // the keystore is always bootstrapped with an empty password
SecureString httpKeystorePassword = nodeKeystore.getString("xpack.security.http.ssl.keystore.secure_password");
SecureString transportKeystorePassword = nodeKeystore.getString("xpack.security.transport.ssl.keystore.secure_password");
final Settings newSettings = Settings.builder().loadFromPath(env.configDir().resolve("elasticsearch.yml")).build();
final String httpKeystorePath = newSettings.get("xpack.security.http.ssl.keystore.path");
final String transportKeystorePath = newSettings.get("xpack.security.transport.ssl.keystore.path");
KeyStore httpKeystore = KeyStoreUtil.readKeyStore(
configDir.resolve("config").resolve(httpKeystorePath),
"PKCS12",
httpKeystorePassword.getChars()
);
KeyStore transportKeystore = KeyStoreUtil.readKeyStore(
configDir.resolve("config").resolve(transportKeystorePath),
"PKCS12",
transportKeystorePassword.getChars()
);
X509Certificate httpCertificate = (X509Certificate) httpKeystore.getCertificate("http");
X509Certificate transportCertificate = (X509Certificate) transportKeystore.getCertificate("transport");
return new Tuple<>(httpCertificate, transportCertificate);
}
private void deleteDirectory(Path directory) throws IOException {
IOUtils.rm(directory);
}
}
|
AutoConfigureNodeTests
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/jsonFormatVisitors/JsonNullFormatVisitor.java
|
{
"start": 328,
"end": 378
}
|
class ____ implements JsonNullFormatVisitor { }
}
|
Base
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/single/SingleFlatMapMaybe.java
|
{
"start": 1009,
"end": 1568
}
|
class ____<T, R> extends Maybe<R> {
final SingleSource<? extends T> source;
final Function<? super T, ? extends MaybeSource<? extends R>> mapper;
public SingleFlatMapMaybe(SingleSource<? extends T> source, Function<? super T, ? extends MaybeSource<? extends R>> mapper) {
this.mapper = mapper;
this.source = source;
}
@Override
protected void subscribeActual(MaybeObserver<? super R> downstream) {
source.subscribe(new FlatMapSingleObserver<T, R>(downstream, mapper));
}
static final
|
SingleFlatMapMaybe
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/transformer/TransformerContractTest.java
|
{
"start": 5080,
"end": 5970
}
|
class ____ extends DataFormatDefinition {
public MyDataFormatDefinition() {
super(new DefaultDataFormat() {
@Override
public void marshal(Exchange exchange, Object graph, OutputStream stream) {
assertEquals(B.class, graph.getClass());
PrintWriter pw = new PrintWriter(new OutputStreamWriter(stream));
pw.print("<fooResponse/>");
pw.close();
}
@Override
public Object unmarshal(Exchange exchange, InputStream stream) throws Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(stream));
assertEquals("<foo/>", br.readLine());
return new A();
}
});
}
}
public static
|
MyDataFormatDefinition
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java
|
{
"start": 2563,
"end": 3637
}
|
class ____ extends KnnVectorsWriter {
private final FlatVectorsWriter writer;
ES813FlatVectorWriter(FlatVectorsWriter writer) {
super();
this.writer = writer;
}
@Override
public KnnFieldVectorsWriter<?> addField(FieldInfo fieldInfo) throws IOException {
return writer.addField(fieldInfo);
}
@Override
public void flush(int maxDoc, Sorter.DocMap sortMap) throws IOException {
writer.flush(maxDoc, sortMap);
}
@Override
public void finish() throws IOException {
writer.finish();
}
@Override
public void close() throws IOException {
writer.close();
}
@Override
public long ramBytesUsed() {
return writer.ramBytesUsed();
}
@Override
public void mergeOneField(FieldInfo fieldInfo, MergeState mergeState) throws IOException {
writer.mergeOneField(fieldInfo, mergeState);
}
}
static
|
ES813FlatVectorWriter
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/generated/DefaultGeneratedValueIdentityTest.java
|
{
"start": 2256,
"end": 5501
}
|
class ____ {
@Test
@JiraKey( "HHH-12671" )
public void testGenerationWithIdentityInsert(SessionFactoryScope scope) {
final TheEntity theEntity = new TheEntity();
scope.inTransaction( (session) -> {
assertNull( theEntity.createdDate );
assertNull( theEntity.alwaysDate );
assertNull( theEntity.vmCreatedDate );
assertNull( theEntity.vmCreatedSqlDate );
assertNull( theEntity.vmCreatedSqlTime );
assertNull( theEntity.vmCreatedSqlTimestamp );
assertNull( theEntity.vmCreatedSqlLocalDate );
assertNull( theEntity.vmCreatedSqlLocalTime );
assertNull( theEntity.vmCreatedSqlLocalDateTime );
assertNull( theEntity.vmCreatedSqlMonthDay );
assertNull( theEntity.vmCreatedSqlOffsetDateTime );
assertNull( theEntity.vmCreatedSqlOffsetTime );
assertNull( theEntity.vmCreatedSqlYear );
assertNull( theEntity.vmCreatedSqlYearMonth );
assertNull( theEntity.vmCreatedSqlZonedDateTime );
assertNull( theEntity.dbCreatedDate );
assertNull( theEntity.name );
session.persist( theEntity );
assertNotNull( theEntity.createdDate );
assertNotNull( theEntity.alwaysDate );
assertNotNull( theEntity.vmCreatedDate );
assertNotNull( theEntity.vmCreatedSqlDate );
assertNotNull( theEntity.vmCreatedSqlTime );
assertNotNull( theEntity.vmCreatedSqlTimestamp );
assertNotNull( theEntity.vmCreatedSqlLocalDate );
assertNotNull( theEntity.vmCreatedSqlLocalTime );
assertNotNull( theEntity.vmCreatedSqlLocalDateTime );
assertNotNull( theEntity.vmCreatedSqlMonthDay );
assertNotNull( theEntity.vmCreatedSqlOffsetDateTime );
assertNotNull( theEntity.vmCreatedSqlOffsetTime );
assertNotNull( theEntity.vmCreatedSqlYear );
assertNotNull( theEntity.vmCreatedSqlYearMonth );
assertNotNull( theEntity.vmCreatedSqlZonedDateTime );
assertNotNull( theEntity.dbCreatedDate );
assertNotNull( theEntity.name );
} );
assertNotNull( theEntity.createdDate );
assertNotNull( theEntity.alwaysDate );
assertEquals( "Bob", theEntity.name );
scope.inTransaction( (session) -> {
TheEntity _theEntity = session.find( TheEntity.class, theEntity.id );
assertNotNull( _theEntity.createdDate );
assertNotNull( _theEntity.alwaysDate );
assertNotNull( _theEntity.vmCreatedDate );
assertNotNull( _theEntity.vmCreatedSqlDate );
assertNotNull( _theEntity.vmCreatedSqlTime );
assertNotNull( _theEntity.vmCreatedSqlTimestamp );
assertNotNull( _theEntity.vmCreatedSqlLocalDate );
assertNotNull( _theEntity.vmCreatedSqlLocalTime );
assertNotNull( _theEntity.vmCreatedSqlLocalDateTime );
assertNotNull( _theEntity.vmCreatedSqlMonthDay );
assertNotNull( _theEntity.vmCreatedSqlOffsetDateTime );
assertNotNull( _theEntity.vmCreatedSqlOffsetTime );
assertNotNull( _theEntity.vmCreatedSqlYear );
assertNotNull( _theEntity.vmCreatedSqlYearMonth );
assertNotNull( _theEntity.vmCreatedSqlZonedDateTime );
assertNotNull( _theEntity.dbCreatedDate );
assertEquals( "Bob", _theEntity.name );
_theEntity.lastName = "Smith";
} );
}
@AfterEach
public void dropTestData(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Entity( name = "TheEntity" )
public static
|
DefaultGeneratedValueIdentityTest
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/servlet/config/AnnotationDrivenBeanDefinitionParserTests.java
|
{
"start": 9313,
"end": 9522
}
|
class ____ implements WebArgumentResolver {
@Override
public Object resolveArgument(MethodParameter methodParameter, NativeWebRequest webRequest) throws Exception {
return null;
}
}
|
TestWebArgumentResolver
|
java
|
quarkusio__quarkus
|
extensions/smallrye-jwt-build/runtime/src/main/java/io/quarkus/smallrye/jwt/build/runtime/graalvm/Substitutions.java
|
{
"start": 336,
"end": 1041
}
|
class ____ {
@Substitute
public Target_org_jose4j_jwk_OctetKeyPairJsonWebKey(java.security.PublicKey publicKey) {
throw new UnsupportedOperationException(
"OctetKeyPairJsonWebKey depends on EdECPrivateKeySpec which is not available in Java < 15");
}
@Substitute
Target_org_jose4j_jwk_OctetKeyPairUtil subtypeKeyUtil() {
throw new UnsupportedOperationException(
"OctetKeyPairJsonWebKey depends on EdECPrivateKeySpec which is not available in Java < 15");
}
}
@TargetClass(className = "org.jose4j.keys.OctetKeyPairUtil", onlyWith = JavaVersionLessThan17andOctetKeyPairOnClasspath.class)
final
|
Target_org_jose4j_jwk_OctetKeyPairJsonWebKey
|
java
|
google__dagger
|
hilt-android/main/java/dagger/hilt/android/internal/managers/ActivityComponentManager.java
|
{
"start": 3142,
"end": 3525
}
|
class ____ in your manifest's "
+ "<application />'s android:name attribute?"
: "Found: " + activity.getApplication().getClass()));
}
return EntryPoints.get(
activityRetainedComponentManager, ActivityComponentBuilderEntryPoint.class)
.activityComponentBuilder()
.activity(activity)
.build();
}
}
|
name
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/ser/SerializationOrderTest.java
|
{
"start": 1387,
"end": 1468
}
|
class ____ { }
@JsonPropertyOrder(value={"a","b","x","z"})
static
|
OrderMixIn
|
java
|
grpc__grpc-java
|
api/src/main/java/io/grpc/ServerStreamTracer.java
|
{
"start": 2632,
"end": 2869
}
|
class ____<ReqT, RespT> {
public abstract MethodDescriptor<ReqT, RespT> getMethodDescriptor();
public abstract Attributes getAttributes();
@Nullable
public abstract String getAuthority();
}
/**
* This
|
ServerCallInfo
|
java
|
mybatis__mybatis-3
|
src/test/java/org/apache/ibatis/submitted/immutable_constructor/ImmutablePOJOMapper.java
|
{
"start": 762,
"end": 954
}
|
interface ____ {
ImmutablePOJO getImmutablePOJO(@Param("pojoID") Integer pojoID);
ImmutablePOJO getImmutablePOJONoMatchingConstructor(@Param("pojoID") Integer pojoID);
}
|
ImmutablePOJOMapper
|
java
|
apache__kafka
|
trogdor/src/main/java/org/apache/kafka/trogdor/workload/ConfigurableProducerWorker.java
|
{
"start": 5539,
"end": 6225
}
|
class ____ implements Callback {
private final SendRecords sendRecords;
private final long startMs;
SendRecordsCallback(SendRecords sendRecords, long startMs) {
this.sendRecords = sendRecords;
this.startMs = startMs;
}
@Override
public void onCompletion(RecordMetadata metadata, Exception exception) {
long now = Time.SYSTEM.milliseconds();
long durationMs = now - startMs;
sendRecords.recordDuration(durationMs);
if (exception != null) {
log.error("SendRecordsCallback: error", exception);
}
}
}
public
|
SendRecordsCallback
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/support/JpaEntityInformationSupport.java
|
{
"start": 2250,
"end": 2547
}
|
class ____ {@link EntityManager}.
*
* @param domainClass must not be {@literal null}.
* @param em must not be {@literal null}.
* @return
*/
public static <T> JpaEntityInformation<T, ?> getEntityInformation(Class<T> domainClass, EntityManager em) {
Assert.notNull(domainClass, "Domain
|
and
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/web/ErrorResponse.java
|
{
"start": 12536,
"end": 12952
}
|
interface ____ {
/**
* Handle the given {@code ProblemDetail} that's going to be rendered,
* and the {@code ErrorResponse} it originates from, if applicable.
* @param detail the {@code ProblemDetail} to be rendered
* @param errorResponse the {@code ErrorResponse}, or {@code null} if there isn't one
*/
void handleError(ProblemDetail detail, @Nullable ErrorResponse errorResponse);
}
}
|
Interceptor
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/AccuracyResultTests.java
|
{
"start": 828,
"end": 2092
}
|
class ____ extends AbstractWireSerializingTestCase<Result> {
public static Result createRandom() {
int numClasses = randomIntBetween(2, 100);
List<String> classNames = Stream.generate(() -> randomAlphaOfLength(10)).limit(numClasses).collect(Collectors.toList());
List<PerClassSingleValue> classes = new ArrayList<>(numClasses);
for (int i = 0; i < numClasses; i++) {
double accuracy = randomDoubleBetween(0.0, 1.0, true);
classes.add(new PerClassSingleValue(classNames.get(i), accuracy));
}
double overallAccuracy = randomDoubleBetween(0.0, 1.0, true);
return new Result(classes, overallAccuracy);
}
@Override
protected NamedWriteableRegistry getNamedWriteableRegistry() {
return new NamedWriteableRegistry(MlEvaluationNamedXContentProvider.getNamedWriteables());
}
@Override
protected Result createTestInstance() {
return createRandom();
}
@Override
protected Result mutateInstance(Result instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
@Override
protected Writeable.Reader<Result> instanceReader() {
return Result::new;
}
}
|
AccuracyResultTests
|
java
|
alibaba__nacos
|
config/src/main/java/com/alibaba/nacos/config/server/service/repository/embedded/EmbeddedConfigInfoPersistServiceImpl.java
|
{
"start": 5311,
"end": 58800
}
|
class ____ implements ConfigInfoPersistService {
public static final String SPOT = ".";
private static final String RESOURCE_CONFIG_INFO_ID = "config-info-id";
private static final String RESOURCE_CONFIG_HISTORY_ID = "config-history-id";
private static final String RESOURCE_CONFIG_TAG_RELATION_ID = "config-tag-relation-id";
private static final String RESOURCE_APP_CONFIGDATA_RELATION_SUBS = "app-configdata-relation-subs";
private static final String RESOURCE_CONFIG_BETA_ID = "config-beta-id";
private static final String RESOURCE_NAMESPACE_ID = "namespace-id";
private static final String RESOURCE_USER_ID = "user-id";
private static final String RESOURCE_ROLE_ID = "role-id";
private static final String RESOURCE_PERMISSIONS_ID = "permissions_id";
private static final String DATA_ID = "dataId";
private static final String GROUP = "group";
private static final String APP_NAME = "appName";
private static final String CONTENT = "content";
private static final String TENANT = "tenant_id";
private static final Set<String> SYSTEM_GROUP = Set.of("mcp-server", "mcp-server-versions", "mcp-tools");
private final DatabaseOperate databaseOperate;
private final IdGeneratorManager idGeneratorManager;
MapperManager mapperManager;
private DataSourceService dataSourceService;
private HistoryConfigInfoPersistService historyConfigInfoPersistService;
/**
* The constructor sets the dependency injection order.
*
* @param databaseOperate databaseOperate.
* @param idGeneratorManager {@link IdGeneratorManager}
*/
public EmbeddedConfigInfoPersistServiceImpl(DatabaseOperate databaseOperate, IdGeneratorManager idGeneratorManager,
@Qualifier("embeddedHistoryConfigInfoPersistServiceImpl") HistoryConfigInfoPersistService historyConfigInfoPersistService) {
this.databaseOperate = databaseOperate;
this.idGeneratorManager = idGeneratorManager;
idGeneratorManager.register(RESOURCE_CONFIG_INFO_ID, RESOURCE_CONFIG_HISTORY_ID,
RESOURCE_CONFIG_TAG_RELATION_ID, RESOURCE_APP_CONFIGDATA_RELATION_SUBS, RESOURCE_CONFIG_BETA_ID,
RESOURCE_NAMESPACE_ID, RESOURCE_USER_ID, RESOURCE_ROLE_ID, RESOURCE_PERMISSIONS_ID);
this.dataSourceService = DynamicDataSource.getInstance().getDataSource();
Boolean isDataSourceLogEnable = EnvUtil.getProperty(CommonConstant.NACOS_PLUGIN_DATASOURCE_LOG, Boolean.class,
false);
this.mapperManager = MapperManager.instance(isDataSourceLogEnable);
this.historyConfigInfoPersistService = historyConfigInfoPersistService;
NotifyCenter.registerToSharePublisher(DerbyImportEvent.class);
}
@Override
public <E> PaginationHelper<E> createPaginationHelper() {
return new EmbeddedPaginationHelperImpl<>(databaseOperate);
}
@Override
public String generateLikeArgument(String s) {
String fuzzySearchSign = "\\*";
String sqlLikePercentSign = "%";
if (s.contains(PATTERN_STR)) {
return s.replaceAll(fuzzySearchSign, sqlLikePercentSign);
} else {
return s;
}
}
@Override
public ConfigInfoStateWrapper findConfigInfoState(final String dataId, final String group, final String tenant) {
final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant;
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
final String sql = configInfoMapper.select(
Arrays.asList("id", "data_id", "group_id", "tenant_id", "gmt_modified"),
Arrays.asList("data_id", "group_id", "tenant_id"));
return databaseOperate.queryOne(sql, new Object[] {dataId, group, tenantTmp},
CONFIG_INFO_STATE_WRAPPER_ROW_MAPPER);
}
private ConfigOperateResult getConfigInfoOperateResult(String dataId, String group, String tenant) {
ConfigInfoStateWrapper configInfo4 = this.findConfigInfoState(dataId, group, tenant);
if (configInfo4 == null) {
return new ConfigOperateResult(false);
}
return new ConfigOperateResult(configInfo4.getId(), configInfo4.getLastModified());
}
@Override
public ConfigOperateResult addConfigInfo(final String srcIp, final String srcUser, final ConfigInfo configInfo,
final Map<String, Object> configAdvanceInfo) {
return addConfigInfo(srcIp, srcUser, configInfo, configAdvanceInfo, null);
}
private ConfigOperateResult addConfigInfo(final String srcIp, final String srcUser, final ConfigInfo configInfo,
final Map<String, Object> configAdvanceInfo, BiConsumer<Boolean, Throwable> consumer) {
try {
final String tenantTmp =
StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant();
configInfo.setTenant(tenantTmp);
long configId = idGeneratorManager.nextId(RESOURCE_CONFIG_INFO_ID);
long hisId = idGeneratorManager.nextId(RESOURCE_CONFIG_HISTORY_ID);
addConfigInfoAtomic(configId, srcIp, srcUser, configInfo, configAdvanceInfo);
String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags");
addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfo.getGroup(),
configInfo.getTenant());
Timestamp now = new Timestamp(System.currentTimeMillis());
if (!CONFIG_MIGRATE_FLAG.get()) {
historyConfigInfoPersistService.insertConfigHistoryAtomic(hisId, configInfo, srcIp, srcUser, now, "I",
Constants.FORMAL, null,
ConfigExtInfoUtil.getExtraInfoFromAdvanceInfoMap(configAdvanceInfo, srcUser));
}
EmbeddedStorageContextUtils.onModifyConfigInfo(configInfo, srcIp, now);
boolean result = databaseOperate.blockUpdate(consumer);
if (!result) {
return new ConfigOperateResult(false);
}
return getConfigInfoOperateResult(configInfo.getDataId(), configInfo.getGroup(), tenantTmp);
} finally {
EmbeddedStorageContextHolder.cleanAllContext();
}
}
@Override
public ConfigOperateResult updateConfigInfoMetadata(String dataId, String group, String tenant, String configTags,
String description) throws NacosException {
try {
ConfigInfoWrapper configInfoWrapper = findConfigInfo(dataId, group, tenant);
if (configInfoWrapper == null) {
throw new NacosException(NacosException.NOT_FOUND,
"config is not found for dataId=" + dataId + ", group=" + group);
}
Long configId = configInfoWrapper.getId();
Timestamp now = new Timestamp(System.currentTimeMillis());
if (description != null) {
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
final String sql = configInfoMapper.update(Arrays.asList("gmt_modified@NOW()", "c_desc"),
Arrays.asList("id"));
final Object[] args = new Object[] {description, configId};
EmbeddedStorageContextHolder.addSqlContext(sql, args);
}
if (configTags != null) {
removeTagByIdAtomic(configId);
addConfigTagsRelation(configId, configTags, configInfoWrapper.getDataId(), configInfoWrapper.getGroup(),
configInfoWrapper.getTenant());
}
EmbeddedStorageContextUtils.onModifyConfigInfo(configInfoWrapper, null, now);
databaseOperate.blockUpdate();
return getConfigInfoOperateResult(configInfoWrapper.getDataId(), configInfoWrapper.getGroup(), tenant);
} finally {
EmbeddedStorageContextHolder.cleanAllContext();
}
}
@Override
public ConfigOperateResult insertOrUpdate(String srcIp, String srcUser, ConfigInfo configInfo,
Map<String, Object> configAdvanceInfo) {
if (Objects.isNull(
findConfigInfoState(configInfo.getDataId(), configInfo.getGroup(), configInfo.getTenant()))) {
return addConfigInfo(srcIp, srcUser, configInfo, configAdvanceInfo);
} else {
return updateConfigInfo(configInfo, srcIp, srcUser, configAdvanceInfo);
}
}
@Override
public ConfigOperateResult insertOrUpdateCas(String srcIp, String srcUser, ConfigInfo configInfo,
Map<String, Object> configAdvanceInfo) {
if (Objects.isNull(
findConfigInfoState(configInfo.getDataId(), configInfo.getGroup(), configInfo.getTenant()))) {
return addConfigInfo(srcIp, srcUser, configInfo, configAdvanceInfo);
} else {
return updateConfigInfoCas(configInfo, srcIp, srcUser, configAdvanceInfo);
}
}
@Override
public long addConfigInfoAtomic(final long id, final String srcIp, final String srcUser,
final ConfigInfo configInfo, Map<String, Object> configAdvanceInfo) {
final String appNameTmp = StringUtils.defaultEmptyIfBlank(configInfo.getAppName());
final String tenantTmp = StringUtils.defaultEmptyIfBlank(configInfo.getTenant());
final String desc = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("desc");
final String use = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("use");
final String effect = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("effect");
final String type = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("type");
final String schema = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("schema");
final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.PERSIST_ENCODE);
final String encryptedDataKey =
configInfo.getEncryptedDataKey() == null ? StringUtils.EMPTY : configInfo.getEncryptedDataKey();
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
final String sql = configInfoMapper.insert(
Arrays.asList("id", "data_id", "group_id", "tenant_id", "app_name", "content", "md5", "src_ip",
"src_user", "gmt_create@NOW()", "gmt_modified@NOW()", "c_desc", "c_use", "effect", "type",
"c_schema", "encrypted_data_key"));
final Object[] args = new Object[] {id, configInfo.getDataId(), configInfo.getGroup(), tenantTmp, appNameTmp,
configInfo.getContent(), md5Tmp, srcIp, srcUser, desc, use, effect, type, schema, encryptedDataKey};
EmbeddedStorageContextHolder.addSqlContext(sql, args);
return id;
}
@Override
public void addConfigTagRelationAtomic(long configId, String tagName, String dataId, String group, String tenant) {
ConfigTagsRelationMapper configTagsRelationMapper = mapperManager.findMapper(
dataSourceService.getDataSourceType(), TableConstant.CONFIG_TAGS_RELATION);
final String sql = configTagsRelationMapper.insert(
Arrays.asList("id", "tag_name", "tag_type", "data_id", "group_id", "tenant_id"));
final Object[] args = new Object[] {configId, tagName, StringUtils.EMPTY, dataId, group, tenant};
EmbeddedStorageContextHolder.addSqlContext(sql, args);
}
@Override
public void addConfigTagsRelation(long configId, String configTags, String dataId, String group, String tenant) {
if (StringUtils.isNotBlank(configTags)) {
String[] tagArr = configTags.split(",");
for (int i = 0; i < tagArr.length; i++) {
addConfigTagRelationAtomic(configId, tagArr[i], dataId, group, tenant);
}
}
}
@Override
public Map<String, Object> batchInsertOrUpdate(List<ConfigAllInfo> configInfoList, String srcUser, String srcIp,
Map<String, Object> configAdvanceInfo, SameConfigPolicy policy) throws NacosException {
int succCount = 0;
int skipCount = 0;
List<Map<String, String>> failData = null;
List<Map<String, String>> skipData = null;
final BiConsumer<Boolean, Throwable> callFinally = (result, t) -> {
if (t != null) {
throw new NacosRuntimeException(0, t);
}
};
for (int i = 0; i < configInfoList.size(); i++) {
ConfigAllInfo configInfo = configInfoList.get(i);
try {
ParamUtils.checkParam(configInfo.getDataId(), configInfo.getGroup(), "datumId",
configInfo.getContent());
} catch (Throwable e) {
DEFAULT_LOG.error("data verification failed", e);
throw e;
}
ConfigInfo configInfo2Save = new ConfigInfo(configInfo.getDataId(), configInfo.getGroup(),
configInfo.getTenant(), configInfo.getAppName(), configInfo.getContent());
configInfo2Save.setEncryptedDataKey(
configInfo.getEncryptedDataKey() == null ? "" : configInfo.getEncryptedDataKey());
String type = configInfo.getType();
if (StringUtils.isBlank(type)) {
// simple judgment of file type based on suffix
if (configInfo.getDataId().contains(SPOT)) {
String extName = configInfo.getDataId().substring(configInfo.getDataId().lastIndexOf(SPOT) + 1);
FileTypeEnum fileTypeEnum = FileTypeEnum.getFileTypeEnumByFileExtensionOrFileType(extName);
type = fileTypeEnum.getFileType();
} else {
type = FileTypeEnum.getFileTypeEnumByFileExtensionOrFileType(null).getFileType();
}
}
if (configAdvanceInfo == null) {
configAdvanceInfo = new HashMap<>(16);
}
configAdvanceInfo.put("type", type);
configAdvanceInfo.put("desc", configInfo.getDesc());
try {
ConfigInfoStateWrapper foundCfg = findConfigInfoState(configInfo2Save.getDataId(),
configInfo2Save.getGroup(), configInfo2Save.getTenant());
if (foundCfg != null) {
throw new Throwable("DuplicateKeyException: config already exists, should be overridden");
}
addConfigInfo(srcIp, srcUser, configInfo2Save, configAdvanceInfo, callFinally);
succCount++;
} catch (Throwable e) {
if (!StringUtils.contains(e.toString(), "DuplicateKeyException")) {
throw new NacosException(NacosException.SERVER_ERROR, e);
}
// uniqueness constraint conflict
if (SameConfigPolicy.ABORT.equals(policy)) {
failData = new ArrayList<>();
skipData = new ArrayList<>();
Map<String, String> faileditem = new HashMap<>(2);
faileditem.put("dataId", configInfo2Save.getDataId());
faileditem.put("group", configInfo2Save.getGroup());
failData.add(faileditem);
for (int j = (i + 1); j < configInfoList.size(); j++) {
ConfigInfo skipConfigInfo = configInfoList.get(j);
Map<String, String> skipitem = new HashMap<>(2);
skipitem.put("dataId", skipConfigInfo.getDataId());
skipitem.put("group", skipConfigInfo.getGroup());
skipData.add(skipitem);
skipCount++;
}
break;
} else if (SameConfigPolicy.SKIP.equals(policy)) {
skipCount++;
if (skipData == null) {
skipData = new ArrayList<>();
}
Map<String, String> skipitem = new HashMap<>(2);
skipitem.put("dataId", configInfo2Save.getDataId());
skipitem.put("group", configInfo2Save.getGroup());
skipData.add(skipitem);
} else if (SameConfigPolicy.OVERWRITE.equals(policy)) {
succCount++;
updateConfigInfo(configInfo2Save, srcIp, srcUser, configAdvanceInfo);
}
}
}
Map<String, Object> result = new HashMap<>(4);
result.put("succCount", succCount);
result.put("skipCount", skipCount);
if (failData != null && !failData.isEmpty()) {
result.put("failData", failData);
}
if (skipData != null && !skipData.isEmpty()) {
result.put("skipData", skipData);
}
return result;
}
@Override
public void removeConfigInfo(final String dataId, final String group, final String tenant, final String srcIp,
final String srcUser) {
final Timestamp time = new Timestamp(System.currentTimeMillis());
ConfigAllInfo oldConfigAllInfo = findConfigAllInfo(dataId, group, tenant);
if (Objects.nonNull(oldConfigAllInfo)) {
try {
String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant;
removeConfigInfoAtomic(dataId, group, tenantTmp, srcIp, srcUser);
removeTagByIdAtomic(oldConfigAllInfo.getId());
if (!CONFIG_MIGRATE_FLAG.get()) {
historyConfigInfoPersistService.insertConfigHistoryAtomic(oldConfigAllInfo.getId(),
oldConfigAllInfo, srcIp, srcUser, time, "D", Constants.FORMAL, null,
ConfigExtInfoUtil.getExtInfoFromAllInfo(oldConfigAllInfo));
}
EmbeddedStorageContextUtils.onDeleteConfigInfo(tenantTmp, group, dataId, srcIp, time);
boolean result = databaseOperate.update(EmbeddedStorageContextHolder.getCurrentSqlContext());
if (!result) {
throw new NacosConfigException("config deletion failed");
}
} finally {
EmbeddedStorageContextHolder.cleanAllContext();
}
}
}
@Override
public List<ConfigAllInfo> removeConfigInfoByIds(final List<Long> ids, final String srcIp, final String srcUser) {
if (CollectionUtils.isEmpty(ids)) {
return null;
}
ids.removeAll(Collections.singleton(null));
final Timestamp time = new Timestamp(System.currentTimeMillis());
try {
String idsStr = StringUtils.join(ids, StringUtils.COMMA);
List<ConfigAllInfo> oldConfigAllInfoList = findAllConfigInfo4Export(null, null, null, null, ids);
if (CollectionUtils.isNotEmpty(oldConfigAllInfoList)) {
removeConfigInfoByIdsAtomic(idsStr);
for (ConfigAllInfo configAllInfo : oldConfigAllInfoList) {
removeTagByIdAtomic(configAllInfo.getId());
historyConfigInfoPersistService.insertConfigHistoryAtomic(configAllInfo.getId(), configAllInfo,
srcIp, srcUser, time, "D", Constants.FORMAL, null,
ConfigExtInfoUtil.getExtInfoFromAllInfo(configAllInfo));
}
}
EmbeddedStorageContextUtils.onBatchDeleteConfigInfo(oldConfigAllInfoList);
boolean result = databaseOperate.update(EmbeddedStorageContextHolder.getCurrentSqlContext());
if (!result) {
throw new NacosConfigException("Failed to config batch deletion");
}
return oldConfigAllInfoList;
} finally {
EmbeddedStorageContextHolder.cleanAllContext();
}
}
@Override
public void removeTagByIdAtomic(long id) {
ConfigTagsRelationMapper configTagsRelationMapper = mapperManager.findMapper(
dataSourceService.getDataSourceType(), TableConstant.CONFIG_TAGS_RELATION);
final String sql = configTagsRelationMapper.delete(Collections.singletonList("id"));
final Object[] args = new Object[] {id};
EmbeddedStorageContextHolder.addSqlContext(sql, args);
}
@Override
public void removeConfigInfoAtomic(final String dataId, final String group, final String tenant, final String srcIp,
final String srcUser) {
String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant;
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
final String sql = configInfoMapper.delete(Arrays.asList("data_id", "group_id", "tenant_id"));
final Object[] args = new Object[] {dataId, group, tenantTmp};
EmbeddedStorageContextHolder.addSqlContext(sql, args);
}
@Override
public void removeConfigInfoByIdsAtomic(final String ids) {
if (StringUtils.isBlank(ids)) {
return;
}
List<Long> paramList = new ArrayList<>();
String[] idArr = ids.split(",");
for (int i = 0; i < idArr.length; i++) {
paramList.add(Long.parseLong(idArr[i]));
}
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
MapperContext context = new MapperContext();
context.putWhereParameter(FieldConstant.IDS, paramList);
MapperResult result = configInfoMapper.removeConfigInfoByIdsAtomic(context);
EmbeddedStorageContextHolder.addSqlContext(result.getSql(), result.getParamList().toArray());
}
@Override
public ConfigOperateResult updateConfigInfo(final ConfigInfo configInfo, final String srcIp, final String srcUser,
final Map<String, Object> configAdvanceInfo) {
try {
ConfigAllInfo oldConfigAllInfo = findConfigAllInfo(configInfo.getDataId(), configInfo.getGroup(),
configInfo.getTenant());
if (oldConfigAllInfo == null) {
if (LogUtil.FATAL_LOG.isErrorEnabled()) {
LogUtil.FATAL_LOG.error("expected config info[dataid:{}, group:{}, tenent:{}] but not found.",
configInfo.getDataId(), configInfo.getGroup(), configInfo.getTenant());
}
return new ConfigOperateResult(false);
}
final String tenantTmp =
StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant();
oldConfigAllInfo.setTenant(tenantTmp);
String appNameTmp = oldConfigAllInfo.getAppName();
// If the appName passed by the user is not empty, the appName of the user is persisted;
// otherwise, the appName of db is used. Empty string is required to clear appName
if (configInfo.getAppName() == null) {
configInfo.setAppName(appNameTmp);
}
updateConfigInfoAtomic(configInfo, srcIp, srcUser, configAdvanceInfo);
String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags");
if (configTags != null) {
// Delete all tags and recreate them
removeTagByIdAtomic(oldConfigAllInfo.getId());
addConfigTagsRelation(oldConfigAllInfo.getId(), configTags, configInfo.getDataId(),
configInfo.getGroup(), configInfo.getTenant());
}
Timestamp time = new Timestamp(System.currentTimeMillis());
if (!CONFIG_MIGRATE_FLAG.get()) {
historyConfigInfoPersistService.insertConfigHistoryAtomic(oldConfigAllInfo.getId(), oldConfigAllInfo,
srcIp, srcUser, time, "U", Constants.FORMAL, null,
ConfigExtInfoUtil.getExtInfoFromAllInfo(oldConfigAllInfo));
}
EmbeddedStorageContextUtils.onModifyConfigInfo(configInfo, srcIp, time);
databaseOperate.blockUpdate();
return getConfigInfoOperateResult(configInfo.getDataId(), configInfo.getGroup(), tenantTmp);
} finally {
EmbeddedStorageContextHolder.cleanAllContext();
}
}
@Override
public ConfigOperateResult updateConfigInfoCas(final ConfigInfo configInfo, final String srcIp,
final String srcUser, final Map<String, Object> configAdvanceInfo) {
try {
ConfigAllInfo oldConfigAllInfo = findConfigAllInfo(configInfo.getDataId(), configInfo.getGroup(),
configInfo.getTenant());
if (oldConfigAllInfo == null) {
if (LogUtil.FATAL_LOG.isErrorEnabled()) {
LogUtil.FATAL_LOG.error("expected config info[dataid:{}, group:{}, tenent:{}] but not found.",
configInfo.getDataId(), configInfo.getGroup(), configInfo.getTenant());
}
return new ConfigOperateResult(false);
}
final String tenantTmp =
StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant();
oldConfigAllInfo.setTenant(tenantTmp);
String appNameTmp = oldConfigAllInfo.getAppName();
// If the appName passed by the user is not empty, the appName of the user is persisted;
// otherwise, the appName of db is used. Empty string is required to clear appName
if (configInfo.getAppName() == null) {
configInfo.setAppName(appNameTmp);
}
updateConfigInfoAtomicCas(configInfo, srcIp, srcUser, configAdvanceInfo);
String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags");
if (configTags != null) {
// Delete all tags and recreate them
removeTagByIdAtomic(oldConfigAllInfo.getId());
addConfigTagsRelation(oldConfigAllInfo.getId(), configTags, configInfo.getDataId(),
configInfo.getGroup(), configInfo.getTenant());
}
Timestamp time = new Timestamp(System.currentTimeMillis());
if (!CONFIG_MIGRATE_FLAG.get()) {
historyConfigInfoPersistService.insertConfigHistoryAtomic(oldConfigAllInfo.getId(), oldConfigAllInfo,
srcIp, srcUser, time, "U", Constants.FORMAL, null,
ConfigExtInfoUtil.getExtInfoFromAllInfo(oldConfigAllInfo));
}
EmbeddedStorageContextUtils.onModifyConfigInfo(configInfo, srcIp, time);
boolean success = databaseOperate.blockUpdate();
if (success) {
return getConfigInfoOperateResult(configInfo.getDataId(), configInfo.getGroup(), tenantTmp);
} else {
return new ConfigOperateResult(false);
}
} finally {
EmbeddedStorageContextHolder.cleanAllContext();
}
}
private ConfigOperateResult updateConfigInfoAtomicCas(final ConfigInfo configInfo, final String srcIp,
final String srcUser, Map<String, Object> configAdvanceInfo) {
final String appNameTmp = StringUtils.defaultEmptyIfBlank(configInfo.getAppName());
final String tenantTmp = StringUtils.defaultEmptyIfBlank(configInfo.getTenant());
final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE);
final String desc = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("desc");
final String use = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("use");
final String effect = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("effect");
final String type = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("type");
final String schema = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("schema");
final String encryptedDataKey =
configInfo.getEncryptedDataKey() == null ? StringUtils.EMPTY : configInfo.getEncryptedDataKey();
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
MapperContext context = new MapperContext();
context.putUpdateParameter(FieldConstant.CONTENT, configInfo.getContent());
context.putUpdateParameter(FieldConstant.MD5, md5Tmp);
context.putUpdateParameter(FieldConstant.SRC_IP, srcIp);
context.putUpdateParameter(FieldConstant.SRC_USER, srcUser);
context.putUpdateParameter(FieldConstant.APP_NAME, appNameTmp);
context.putUpdateParameter(FieldConstant.C_DESC, desc);
context.putUpdateParameter(FieldConstant.C_USE, use);
context.putUpdateParameter(FieldConstant.EFFECT, effect);
context.putUpdateParameter(FieldConstant.TYPE, type);
context.putUpdateParameter(FieldConstant.C_SCHEMA, schema);
context.putUpdateParameter(FieldConstant.ENCRYPTED_DATA_KEY, encryptedDataKey);
context.putWhereParameter(FieldConstant.DATA_ID, configInfo.getDataId());
context.putWhereParameter(FieldConstant.GROUP_ID, configInfo.getGroup());
context.putWhereParameter(FieldConstant.TENANT_ID, tenantTmp);
context.putWhereParameter(FieldConstant.MD5, configInfo.getMd5());
MapperResult mapperResult = configInfoMapper.updateConfigInfoAtomicCas(context);
EmbeddedStorageContextHolder.addSqlContext(Boolean.TRUE, mapperResult.getSql(),
mapperResult.getParamList().toArray());
return getConfigInfoOperateResult(configInfo.getDataId(), configInfo.getGroup(), tenantTmp);
}
@Override
public void updateConfigInfoAtomic(final ConfigInfo configInfo, final String srcIp, final String srcUser,
Map<String, Object> configAdvanceInfo) {
final String appNameTmp = StringUtils.defaultEmptyIfBlank(configInfo.getAppName());
final String tenantTmp = StringUtils.defaultEmptyIfBlank(configInfo.getTenant());
final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE);
final String desc = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("desc");
final String use = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("use");
final String effect = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("effect");
final String type = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("type");
final String schema = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("schema");
final String encryptedDataKey =
configInfo.getEncryptedDataKey() == null ? StringUtils.EMPTY : configInfo.getEncryptedDataKey();
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
final String sql = configInfoMapper.update(
Arrays.asList("content", "md5", "src_ip", "src_user", "gmt_modified@NOW()", "app_name", "c_desc",
"c_use", "effect", "type", "c_schema", "encrypted_data_key"),
Arrays.asList("data_id", "group_id", "tenant_id"));
final Object[] args = new Object[] {configInfo.getContent(), md5Tmp, srcIp, srcUser, appNameTmp, desc, use,
effect, type, schema, encryptedDataKey, configInfo.getDataId(), configInfo.getGroup(), tenantTmp};
EmbeddedStorageContextHolder.addSqlContext(sql, args);
}
@Override
public long findConfigMaxId() {
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
MapperResult mapperResult = configInfoMapper.findConfigMaxId(null);
return Optional.ofNullable(databaseOperate.queryOne(mapperResult.getSql(), Long.class)).orElse(0L);
}
@Override
public ConfigInfo findConfigInfo(long id) {
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
final String sql = configInfoMapper.select(
Arrays.asList("id", "data_id", "group_id", "tenant_id", "app_name", "content"),
Collections.singletonList("id"));
return databaseOperate.queryOne(sql, new Object[] {id}, CONFIG_INFO_ROW_MAPPER);
}
@Override
public ConfigInfoWrapper findConfigInfo(final String dataId, final String group, final String tenant) {
final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant;
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
final String sql = configInfoMapper.select(
Arrays.asList("id", "data_id", "group_id", "tenant_id", "app_name", "content", "md5", "type",
"encrypted_data_key", "gmt_modified"), Arrays.asList("data_id", "group_id", "tenant_id"));
final Object[] args = new Object[] {dataId, group, tenantTmp};
return databaseOperate.queryOne(sql, args, CONFIG_INFO_WRAPPER_ROW_MAPPER);
}
@Override
public Page<ConfigInfo> findConfigInfo4Page(final int pageNo, final int pageSize, final String dataId,
final String group, final String tenant, final Map<String, Object> configAdvanceInfo) {
String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant;
final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName");
final String content = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("content");
final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags");
MapperResult sql;
MapperResult sqlCount;
final MapperContext context = new MapperContext();
context.putWhereParameter(FieldConstant.TENANT_ID, tenantTmp);
if (StringUtils.isNotBlank(dataId)) {
context.putWhereParameter(FieldConstant.DATA_ID, dataId);
}
if (StringUtils.isNotBlank(group)) {
context.putWhereParameter(FieldConstant.GROUP_ID, group);
}
if (StringUtils.isNotBlank(appName)) {
context.putWhereParameter(FieldConstant.APP_NAME, appName);
}
if (!StringUtils.isBlank(content)) {
context.putWhereParameter(FieldConstant.CONTENT, content);
}
context.setStartRow((pageNo - 1) * pageSize);
context.setPageSize(pageSize);
if (StringUtils.isNotBlank(configTags)) {
String[] tagArr = configTags.split(",");
context.putWhereParameter(FieldConstant.TAG_ARR, tagArr);
ConfigTagsRelationMapper configTagsRelationMapper = mapperManager.findMapper(
dataSourceService.getDataSourceType(), TableConstant.CONFIG_TAGS_RELATION);
sqlCount = configTagsRelationMapper.findConfigInfo4PageCountRows(context);
sql = configTagsRelationMapper.findConfigInfo4PageFetchRows(context);
} else {
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
sqlCount = configInfoMapper.findConfigInfo4PageCountRows(context);
sql = configInfoMapper.findConfigInfo4PageFetchRows(context);
}
PaginationHelper<ConfigInfo> helper = createPaginationHelper();
Page<ConfigInfo> page = helper.fetchPageLimit(sqlCount, sql, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER);
for (ConfigInfo configInfo : page.getPageItems()) {
Pair<String, String> pair = EncryptionHandler.decryptHandler(configInfo.getDataId(),
configInfo.getEncryptedDataKey(), configInfo.getContent());
configInfo.setContent(pair.getSecond());
// 查询并设置标签信息
List<String> configTagList = selectTagByConfig(configInfo.getDataId(), configInfo.getGroup(), configInfo.getTenant());
if (CollectionUtils.isNotEmpty(configTagList)) {
String configTagsStr = String.join(",", configTagList);
configInfo.setConfigTags(configTagsStr);
}
}
return page;
}
@Override
public int configInfoCount() {
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
String sql = configInfoMapper.count(null);
Integer result = databaseOperate.queryOne(sql, Integer.class);
if (result == null) {
throw new IllegalArgumentException("configInfoCount error");
}
return result;
}
@Override
public int configInfoCount(String tenant) {
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
MapperContext context = new MapperContext();
context.putWhereParameter(FieldConstant.TENANT_ID, tenant);
MapperResult mapperResult = configInfoMapper.configInfoLikeTenantCount(context);
Integer result = databaseOperate.queryOne(mapperResult.getSql(), mapperResult.getParamList().toArray(),
Integer.class);
if (result == null) {
throw new IllegalArgumentException("configInfoCount error");
}
return result;
}
@Override
public List<String> getTenantIdList(int page, int pageSize) {
PaginationHelper<Map<String, Object>> helper = createPaginationHelper();
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
int from = (page - 1) * pageSize;
MapperResult mapperResult = configInfoMapper.getTenantIdList(new MapperContext(from, pageSize));
Page<Map<String, Object>> pageList = helper.fetchPageLimit(mapperResult.getSql(),
mapperResult.getParamList().toArray(), page, pageSize, MAP_ROW_MAPPER);
return pageList.getPageItems().stream().map(map -> String.valueOf(map.get("TENANT_ID")))
.collect(Collectors.toList());
}
@Override
public List<String> getGroupIdList(int page, int pageSize) {
PaginationHelper<Map<String, Object>> helper = createPaginationHelper();
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
int from = (page - 1) * pageSize;
MapperResult mapperResult = configInfoMapper.getGroupIdList(new MapperContext(from, pageSize));
Page<Map<String, Object>> pageList = helper.fetchPageLimit(mapperResult.getSql(),
mapperResult.getParamList().toArray(), page, pageSize, MAP_ROW_MAPPER);
return pageList.getPageItems().stream().map(map -> String.valueOf(map.get("GROUP_ID")))
.collect(Collectors.toList());
}
@Override
public Page<ConfigInfoWrapper> findAllConfigInfoFragment(final long lastMaxId, final int pageSize,
boolean needContent) {
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
MapperContext context = new MapperContext(0, pageSize);
context.putContextParameter(ContextConstant.NEED_CONTENT, String.valueOf(needContent));
context.putWhereParameter(FieldConstant.ID, lastMaxId);
MapperResult select = configInfoMapper.findAllConfigInfoFragment(context);
PaginationHelper<ConfigInfoWrapper> helper = createPaginationHelper();
return helper.fetchPageLimit(select.getSql(), select.getParamList().toArray(), 1, pageSize,
CONFIG_INFO_WRAPPER_ROW_MAPPER);
}
@Override
public Page<ConfigInfo> findConfigInfoLike4Page(final int pageNo, final int pageSize, final String dataId,
final String group, final String tenant, final Map<String, Object> configAdvanceInfo) {
String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant;
final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName");
final String content = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("content");
final String types = Optional.ofNullable(configAdvanceInfo).map(e -> (String) e.get(ParametersField.TYPES))
.orElse(null);
final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags");
MapperResult sqlCountRows;
MapperResult sqlFetchRows;
MapperContext context = new MapperContext((pageNo - 1) * pageSize, pageSize);
context.putWhereParameter(FieldConstant.TENANT_ID, generateLikeArgument(tenantTmp));
if (!StringUtils.isBlank(dataId)) {
context.putWhereParameter(FieldConstant.DATA_ID, generateLikeArgument(dataId));
}
if (!StringUtils.isBlank(group)) {
context.putWhereParameter(FieldConstant.GROUP_ID, generateLikeArgument(group));
}
if (!StringUtils.isBlank(appName)) {
context.putWhereParameter(FieldConstant.APP_NAME, appName);
}
if (!StringUtils.isBlank(content)) {
context.putWhereParameter(FieldConstant.CONTENT, generateLikeArgument(content));
}
if (StringUtils.isNotBlank(types)) {
String[] typesArr = types.split(Symbols.COMMA);
context.putWhereParameter(FieldConstant.TYPE, typesArr);
}
if (StringUtils.isNotBlank(configTags)) {
String[] tagArr = configTags.split(",");
for (int i = 0; i < tagArr.length; i++) {
tagArr[i] = generateLikeArgument(tagArr[i]);
}
context.putWhereParameter(FieldConstant.TAG_ARR, tagArr);
ConfigTagsRelationMapper configTagsRelationMapper = mapperManager.findMapper(
dataSourceService.getDataSourceType(), TableConstant.CONFIG_TAGS_RELATION);
sqlCountRows = configTagsRelationMapper.findConfigInfoLike4PageCountRows(context);
sqlFetchRows = configTagsRelationMapper.findConfigInfoLike4PageFetchRows(context);
} else {
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
sqlCountRows = configInfoMapper.findConfigInfoLike4PageCountRows(context);
sqlFetchRows = configInfoMapper.findConfigInfoLike4PageFetchRows(context);
}
PaginationHelper<ConfigInfo> helper = createPaginationHelper();
Page<ConfigInfo> page = helper.fetchPageLimit(sqlCountRows, sqlFetchRows, pageNo, pageSize,
CONFIG_INFO_ROW_MAPPER);
for (ConfigInfo configInfo : page.getPageItems()) {
Pair<String, String> pair = EncryptionHandler.decryptHandler(configInfo.getDataId(),
configInfo.getEncryptedDataKey(), configInfo.getContent());
configInfo.setContent(pair.getSecond());
// 查询并设置标签信息
List<String> configTagList = selectTagByConfig(configInfo.getDataId(), configInfo.getGroup(), configInfo.getTenant());
if (CollectionUtils.isNotEmpty(configTagList)) {
String configTagsStr = String.join(",", configTagList);
configInfo.setConfigTags(configTagsStr);
}
}
return page;
}
@Override
public List<ConfigInfoStateWrapper> findChangeConfig(final Timestamp startTime, long lastMaxId,
final int pageSize) {
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
MapperContext context = new MapperContext();
context.putWhereParameter(FieldConstant.START_TIME, startTime);
context.putWhereParameter(FieldConstant.PAGE_SIZE, pageSize);
context.putWhereParameter(FieldConstant.LAST_MAX_ID, lastMaxId);
MapperResult mapperResult = configInfoMapper.findChangeConfig(context);
return databaseOperate.queryMany(mapperResult.getSql(), mapperResult.getParamList().toArray(),
CONFIG_INFO_STATE_WRAPPER_ROW_MAPPER);
}
@Override
public List<String> selectTagByConfig(String dataId, String group, String tenant) {
ConfigTagsRelationMapper configTagsRelationMapper = mapperManager.findMapper(
dataSourceService.getDataSourceType(), TableConstant.CONFIG_TAGS_RELATION);
String sql = configTagsRelationMapper.select(Collections.singletonList("tag_name"),
Arrays.asList("data_id", "group_id", "tenant_id"));
return databaseOperate.queryMany(sql, new Object[] {dataId, group, tenant}, String.class);
}
@Override
public List<ConfigInfo> findConfigInfosByIds(final String ids) {
if (StringUtils.isBlank(ids)) {
return null;
}
List<Long> paramList = new ArrayList<>();
String[] idArr = ids.split(",");
for (int i = 0; i < idArr.length; i++) {
paramList.add(Long.parseLong(idArr[i]));
}
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
MapperContext context = new MapperContext();
context.putWhereParameter(FieldConstant.IDS, paramList);
MapperResult mapperResult = configInfoMapper.findConfigInfosByIds(context);
return databaseOperate.queryMany(mapperResult.getSql(), mapperResult.getParamList().toArray(),
CONFIG_INFO_ROW_MAPPER);
}
@Override
public ConfigAdvanceInfo findConfigAdvanceInfo(final String dataId, final String group, final String tenant) {
final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant;
List<String> configTagList = this.selectTagByConfig(dataId, group, tenant);
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
ConfigAdvanceInfo configAdvance = databaseOperate.queryOne(configInfoMapper.select(
Arrays.asList("gmt_create", "gmt_modified", "src_user", "src_ip", "c_desc", "c_use", "effect", "type",
"c_schema"), Arrays.asList("data_id", "group_id", "tenant_id")),
new Object[] {dataId, group, tenantTmp}, CONFIG_ADVANCE_INFO_ROW_MAPPER);
if (CollectionUtils.isNotEmpty(configTagList)) {
StringBuilder configTagsTmp = new StringBuilder();
for (String configTag : configTagList) {
if (configTagsTmp.length() == 0) {
configTagsTmp.append(configTag);
} else {
configTagsTmp.append(',').append(configTag);
}
}
configAdvance.setConfigTags(configTagsTmp.toString());
}
return configAdvance;
}
@Override
public ConfigAllInfo findConfigAllInfo(final String dataId, final String group, final String tenant) {
final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant;
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
final String sql = configInfoMapper.select(
Arrays.asList("id", "data_id", "group_id", "tenant_id", "app_name", "content", "md5", "gmt_create",
"gmt_modified", "src_user", "src_ip", "c_desc", "c_use", "effect", "type", "c_schema",
"encrypted_data_key"), Arrays.asList("data_id", "group_id", "tenant_id"));
List<String> configTagList = selectTagByConfig(dataId, group, tenant);
ConfigAllInfo configAdvance = databaseOperate.queryOne(sql, new Object[] {dataId, group, tenantTmp},
CONFIG_ALL_INFO_ROW_MAPPER);
if (configTagList != null && !configTagList.isEmpty()) {
StringBuilder configTagsTmp = new StringBuilder();
for (String configTag : configTagList) {
if (configTagsTmp.length() == 0) {
configTagsTmp.append(configTag);
} else {
configTagsTmp.append(',').append(configTag);
}
}
configAdvance.setConfigTags(configTagsTmp.toString());
}
return configAdvance;
}
@Override
public List<ConfigAllInfo> findAllConfigInfo4Export(final String dataId, final String group, final String tenant,
final String appName, final List<Long> ids) {
String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant;
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
MapperContext context = new MapperContext();
if (!CollectionUtils.isEmpty(ids)) {
context.putWhereParameter(FieldConstant.IDS, ids);
} else {
context.putWhereParameter(FieldConstant.TENANT_ID, tenantTmp);
if (!StringUtils.isBlank(dataId)) {
context.putWhereParameter(FieldConstant.DATA_ID, generateLikeArgument(dataId));
}
if (StringUtils.isNotBlank(group)) {
context.putWhereParameter(FieldConstant.GROUP_ID, group);
}
if (StringUtils.isNotBlank(appName)) {
context.putWhereParameter(FieldConstant.APP_NAME, appName);
}
}
MapperResult mapperResult = configInfoMapper.findAllConfigInfo4Export(context);
List<ConfigAllInfo> configAllInfos = databaseOperate.queryMany(mapperResult.getSql(),
mapperResult.getParamList().toArray(), CONFIG_ALL_INFO_ROW_MAPPER);
if (CollectionUtils.isEmpty(configAllInfos)) {
return configAllInfos;
}
for (ConfigAllInfo configAllInfo : configAllInfos) {
List<String> configTagList = selectTagByConfig(configAllInfo.getDataId(), configAllInfo.getGroup(),
configAllInfo.getTenant());
if (CollectionUtils.isNotEmpty(configTagList)) {
StringBuilder configTags = new StringBuilder();
for (String configTag : configTagList) {
if (configTags.length() == 0) {
configTags.append(configTag);
} else {
configTags.append(',').append(configTag);
}
}
configAllInfo.setConfigTags(configTags.toString());
}
}
return configAllInfos;
}
@Override
public List<ConfigInfoWrapper> queryConfigInfoByNamespace(String tenantId) {
if (Objects.isNull(tenantId)) {
throw new IllegalArgumentException("tenantId can not be null");
}
String tenantTmp = StringUtils.isBlank(tenantId) ? StringUtils.EMPTY : tenantId;
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
final String sql = configInfoMapper.select(
Arrays.asList("data_id", "group_id", "tenant_id", "app_name", "type", "gmt_modified"),
Collections.singletonList("tenant_id"));
return databaseOperate.queryMany(sql, new Object[] {tenantTmp}, CONFIG_INFO_WRAPPER_ROW_MAPPER);
}
}
|
EmbeddedConfigInfoPersistServiceImpl
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/InjectConstructorFactoryGeneratorTest.java
|
{
"start": 8361,
"end": 8619
}
|
class ____<A extends Bar> {",
" @Inject GenericClass(A a, Bar bar) {}",
"}");
Source packagePrivateBar =
CompilerTests.javaSource(
"test.Bar",
"package test;",
"",
"
|
GenericClass
|
java
|
quarkusio__quarkus
|
integration-tests/logging-min-level-set/src/test/java/io/quarkus/it/logging/minlevel/set/NativeLoggingMinLevelBelowIT.java
|
{
"start": 130,
"end": 253
}
|
class ____ extends LoggingMinLevelBelowTest {
// Execute the same tests but in native mode.
}
|
NativeLoggingMinLevelBelowIT
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/pool/xa/JtdsXAConnection.java
|
{
"start": 959,
"end": 2313
}
|
class ____ implements XAConnection {
private Connection connection;
private final XAResource resource;
private final int xaConnectionId;
public JtdsXAConnection(Connection connection) throws SQLException {
this.resource = new JtdsXAResource(this, connection);
this.connection = connection;
this.xaConnectionId = XASupport.xa_open(connection);
}
int getXAConnectionID() {
return this.xaConnectionId;
}
@Override
public Connection getConnection() throws SQLException {
return connection;
}
@Override
public void close() throws SQLException {
try {
XASupport.xa_close(connection, xaConnectionId);
} catch (SQLException e) {
// Ignore close errors
}
JdbcUtils.close(connection);
}
@Override
public void addConnectionEventListener(ConnectionEventListener listener) {
}
@Override
public void removeConnectionEventListener(ConnectionEventListener listener) {
}
@Override
public void addStatementEventListener(StatementEventListener listener) {
}
@Override
public void removeStatementEventListener(StatementEventListener listener) {
}
@Override
public XAResource getXAResource() throws SQLException {
return resource;
}
}
|
JtdsXAConnection
|
java
|
apache__camel
|
components/camel-http/src/test/java/org/apache/camel/component/http/HttpCamelHeadersTest.java
|
{
"start": 3679,
"end": 4617
}
|
class ____ extends HeaderValidationHandler {
private final String expectProtocolVersion;
MyHeaderValidationHandler(String expectedMethod, String protocolVersion,
String responseContent, Map<String, String> expectedHeaders) {
super(expectedMethod, null, null, responseContent, expectedHeaders);
expectProtocolVersion = protocolVersion;
}
@Override
public void handle(
final ClassicHttpRequest request, final ClassicHttpResponse response,
final HttpContext context)
throws HttpException, IOException {
if (!expectProtocolVersion.equals(request.getVersion().toString())) {
response.setCode(HttpStatus.SC_HTTP_VERSION_NOT_SUPPORTED);
return;
}
super.handle(request, response, context);
}
}
}
|
MyHeaderValidationHandler
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/aop/aspectj/autoproxy/AtAspectJAnnotationBindingTests.java
|
{
"start": 1975,
"end": 2326
}
|
class ____ {
@Around("execution(* *(..)) && @annotation(testAnn)")
public Object doWithAnnotation(ProceedingJoinPoint pjp, TestAnnotation testAnn) throws Throwable {
String annValue = testAnn.value();
Object result = pjp.proceed();
return (result instanceof String ? annValue + " " + result : result);
}
}
|
AtAspectJAnnotationBindingTestAspect
|
java
|
apache__camel
|
test-infra/camel-test-infra-core/src/test/java/org/apache/camel/test/infra/core/AnnotationProcessor.java
|
{
"start": 2785,
"end": 3089
}
|
class ____ may indicate that a field needs to be evaluated
* @param instance the test instance
* @param context
*/
void evalField(
ExtensionContext extensionContext, Class<? extends Annotation> annotationClass, Object instance,
CamelContext context);
}
|
that
|
java
|
jhy__jsoup
|
src/main/java/org/jsoup/select/Evaluator.java
|
{
"start": 25980,
"end": 26693
}
|
class ____ extends Evaluator {
private final Regex pattern;
public Matches(Regex pattern) {
this.pattern = pattern;
}
public Matches(Pattern pattern) {
this(Regex.fromPattern(pattern));
}
@Override
public boolean matches(Element root, Element element) {
return pattern.matcher(element.text()).find();
}
@Override protected int cost() {
return 8;
}
@Override
public String toString() {
return String.format(":matches(%s)", pattern);
}
}
/**
* Evaluator for matching Element's own text with regex
*/
public static final
|
Matches
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/acl/AccessControlEntryData.java
|
{
"start": 997,
"end": 3440
}
|
class ____ {
private final String principal;
private final String host;
private final AclOperation operation;
private final AclPermissionType permissionType;
AccessControlEntryData(String principal, String host, AclOperation operation, AclPermissionType permissionType) {
this.principal = principal;
this.host = host;
this.operation = operation;
this.permissionType = permissionType;
}
String principal() {
return principal;
}
String host() {
return host;
}
AclOperation operation() {
return operation;
}
AclPermissionType permissionType() {
return permissionType;
}
/**
* Returns a string describing an ANY or UNKNOWN field, or null if there is
* no such field.
*/
public String findIndefiniteField() {
if (principal() == null)
return "Principal is NULL";
if (host() == null)
return "Host is NULL";
if (operation() == AclOperation.ANY)
return "Operation is ANY";
if (operation() == AclOperation.UNKNOWN)
return "Operation is UNKNOWN";
if (permissionType() == AclPermissionType.ANY)
return "Permission type is ANY";
if (permissionType() == AclPermissionType.UNKNOWN)
return "Permission type is UNKNOWN";
return null;
}
@Override
public String toString() {
return "(principal=" + (principal == null ? "<any>" : principal) +
", host=" + (host == null ? "<any>" : host) +
", operation=" + operation +
", permissionType=" + permissionType + ")";
}
/**
* Return true if there are any UNKNOWN components.
*/
boolean isUnknown() {
return operation.isUnknown() || permissionType.isUnknown();
}
@Override
public boolean equals(Object o) {
if (!(o instanceof AccessControlEntryData))
return false;
AccessControlEntryData other = (AccessControlEntryData) o;
return Objects.equals(principal, other.principal) &&
Objects.equals(host, other.host) &&
Objects.equals(operation, other.operation) &&
Objects.equals(permissionType, other.permissionType);
}
@Override
public int hashCode() {
return Objects.hash(principal, host, operation, permissionType);
}
}
|
AccessControlEntryData
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/misc/CompositeIterator.java
|
{
"start": 740,
"end": 2245
}
|
class ____<T> implements Iterator<T> {
private Iterator<Iterator<T>> listIterator;
private Iterator<T> currentIterator;
private int limit;
private int counter;
public CompositeIterator(Iterator<Iterator<T>> iterators, int limit) {
listIterator = iterators;
this.limit = limit;
}
@Override
public boolean hasNext() {
if (currentIterator == null || !currentIterator.hasNext()) {
while (listIterator.hasNext()) {
Iterator<T> iterator = listIterator.next();
currentIterator = iterator;
if (iterator.hasNext()) {
if (limit == 0) {
return true;
} else {
return limit >= counter + 1;
}
}
}
return false;
}
if (currentIterator.hasNext()) {
if (limit == 0) {
return true;
} else {
return limit >= counter + 1;
}
}
return false;
}
@Override
public T next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
counter++;
return currentIterator.next();
}
@Override
public void remove() {
if (currentIterator == null) {
throw new IllegalStateException("next() has not yet been called");
}
currentIterator.remove();
}
}
|
CompositeIterator
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/record/Record.java
|
{
"start": 1079,
"end": 4045
}
|
interface ____ {
Header[] EMPTY_HEADERS = new Header[0];
/**
* The offset of this record in the log
* @return the offset
*/
long offset();
/**
* Get the sequence number assigned by the producer.
* @return the sequence number
*/
int sequence();
/**
* Get the size in bytes of this record.
* @return the size of the record in bytes
*/
int sizeInBytes();
/**
* Get the record's timestamp.
* @return the record's timestamp
*/
long timestamp();
/**
* Raise a {@link org.apache.kafka.common.errors.CorruptRecordException} if the record does not have a valid checksum.
*/
void ensureValid();
/**
* Get the size in bytes of the key.
* @return the size of the key, or -1 if there is no key
*/
int keySize();
/**
* Check whether this record has a key
* @return true if there is a key, false otherwise
*/
boolean hasKey();
/**
* Get the record's key.
* @return the key or null if there is none
*/
ByteBuffer key();
/**
* Get the size in bytes of the value.
* @return the size of the value, or -1 if the value is null
*/
int valueSize();
/**
* Check whether a value is present (i.e. if the value is not null)
* @return true if so, false otherwise
*/
boolean hasValue();
/**
* Get the record's value
* @return the (nullable) value
*/
ByteBuffer value();
/**
* Check whether the record has a particular magic. For versions prior to 2, the record contains its own magic,
* so this function can be used to check whether it matches a particular value. For version 2 and above, this
* method returns true if the passed magic is greater than or equal to 2.
*
* @param magic the magic value to check
* @return true if the record has a magic field (versions prior to 2) and the value matches
*/
boolean hasMagic(byte magic);
/**
* For versions prior to 2, check whether the record is compressed (and therefore
* has nested record content). For versions 2 and above, this always returns false.
* @return true if the magic is lower than 2 and the record is compressed
*/
boolean isCompressed();
/**
* For versions prior to 2, the record contained a timestamp type attribute. This method can be
* used to check whether the value of that attribute matches a particular timestamp type. For versions
* 2 and above, this will always be false.
*
* @param timestampType the timestamp type to compare
* @return true if the version is lower than 2 and the timestamp type matches
*/
boolean hasTimestampType(TimestampType timestampType);
/**
* Get the headers. For magic versions 1 and below, this always returns an empty array.
*
* @return the array of headers
*/
Header[] headers();
}
|
Record
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java
|
{
"start": 5684,
"end": 11591
}
|
class ____ {
private final SpatialEvaluatorFactory.SpatialSourceResolution supplier;
private final boolean pointsOnly;
private final boolean supportsGrid;
SpatialTypeResolver(SpatialEvaluatorFactory.SpatialSourceResolution supplier, boolean pointsOnly, boolean supportsGrid) {
this.supplier = supplier;
this.pointsOnly = pointsOnly;
this.supportsGrid = supportsGrid;
}
public Expression left() {
return supplier.left();
}
public Expression right() {
return supplier.right();
}
public String sourceText() {
return supplier.source().text();
}
protected TypeResolution resolveType() {
if (left().foldable() && right().foldable() == false || isNull(left().dataType())) {
// Left is literal, but right is not, check the left field’s type against the right field
return resolveType(right(), left(), SECOND, FIRST);
} else {
// All other cases check the right against the left
return resolveType(left(), right(), FIRST, SECOND);
}
}
protected Expression.TypeResolution isCompatibleSpatial(Expression e, TypeResolutions.ParamOrdinal paramOrd) {
return pointsOnly
? EsqlTypeResolutions.isSpatialPoint(e, sourceText(), paramOrd)
: (supportsGrid
? EsqlTypeResolutions.isSpatialOrGrid(e, sourceText(), paramOrd)
: EsqlTypeResolutions.isSpatial(e, sourceText(), paramOrd));
}
protected Expression.TypeResolution isGeoPoint(Expression e, TypeResolutions.ParamOrdinal paramOrd) {
return isType(e, GEO_POINT::equals, sourceText(), paramOrd, GEO_POINT.typeName());
}
private TypeResolution resolveType(
Expression leftExpression,
Expression rightExpression,
TypeResolutions.ParamOrdinal leftOrdinal,
TypeResolutions.ParamOrdinal rightOrdinal
) {
TypeResolution leftResolution = isCompatibleSpatial(leftExpression, leftOrdinal);
TypeResolution rightResolution = isCompatibleSpatial(rightExpression, rightOrdinal);
if (leftResolution.resolved()) {
return resolveType(leftExpression, rightExpression, rightOrdinal);
} else if (rightResolution.resolved()) {
return resolveType(rightExpression, leftExpression, leftOrdinal);
} else {
return leftResolution;
}
}
protected TypeResolution resolveType(
Expression spatialExpression,
Expression otherExpression,
TypeResolutions.ParamOrdinal otherParamOrdinal
) {
if (isNull(spatialExpression.dataType())) {
return isCompatibleSpatial(otherExpression, otherParamOrdinal);
}
TypeResolution resolution = isSameSpatialType(spatialExpression.dataType(), otherExpression, sourceText(), otherParamOrdinal);
// TODO Remove these grid checks once we support geo_shape relation to geoGrid
// but retain a rule to disallow grid-grid relations
if (resolution.resolved() && DataType.isGeoGrid(spatialExpression.dataType())) {
resolution = isGeoPoint(otherExpression, otherParamOrdinal);
}
if (resolution.resolved() && DataType.isGeoGrid(otherExpression.dataType())) {
resolution = isGeoPoint(spatialExpression, otherParamOrdinal == FIRST ? SECOND : FIRST);
}
if (resolution.unresolved()) {
return resolution;
}
supplier.setCrsType(spatialExpression.dataType());
return TypeResolution.TYPE_RESOLVED;
}
protected TypeResolution isSameSpatialType(
DataType spatialDataType,
Expression expression,
String operationName,
TypeResolutions.ParamOrdinal paramOrd
) {
Predicate<DataType> isSpatialType = pointsOnly
? dt -> dt == spatialDataType
: (supportsGrid
? dt -> DataType.isSpatialOrGrid(dt) && spatialCRSCompatible(spatialDataType, dt)
: dt -> DataType.isSpatial(dt) && spatialCRSCompatible(spatialDataType, dt));
return isType(expression, isSpatialType, operationName, paramOrd, compatibleTypeNames(spatialDataType));
}
}
@Override
public void setCrsType(DataType dataType) {
crsType = SpatialCrsType.fromDataType(dataType);
}
private static final String[] GEO_TYPE_NAMES = new String[] { GEO_POINT.typeName(), GEO_SHAPE.typeName() };
private static final String[] CARTESIAN_TYPE_NAMES = new String[] { CARTESIAN_POINT.typeName(), CARTESIAN_SHAPE.typeName() };
protected static boolean spatialCRSCompatible(DataType spatialDataType, DataType otherDataType) {
return DataType.isSpatialGeo(spatialDataType) && DataType.isSpatialGeo(otherDataType)
|| DataType.isSpatialGeo(spatialDataType) == false && DataType.isSpatialGeo(otherDataType) == false;
}
static String[] compatibleTypeNames(DataType spatialDataType) {
return DataType.isSpatialGeo(spatialDataType) ? GEO_TYPE_NAMES : CARTESIAN_TYPE_NAMES;
}
@Override
public SpatialCrsType crsType() {
if (crsType == null) {
resolveType();
}
return crsType;
}
public boolean leftDocValues() {
return leftDocValues;
}
public boolean rightDocValues() {
return rightDocValues;
}
/**
* For most spatial functions we only need to know if the CRS is geo or cartesian, not whether the type is point or shape.
* This
|
SpatialTypeResolver
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FifoAppComparator.java
|
{
"start": 1226,
"end": 1863
}
|
class ____ implements Comparator<FSAppAttempt>, Serializable {
private static final long serialVersionUID = 3428835083489547918L;
public int compare(FSAppAttempt a1, FSAppAttempt a2) {
int res = a1.getPriority().compareTo(a2.getPriority());
if (res == 0) {
if (a1.getStartTime() < a2.getStartTime()) {
res = -1;
} else {
res = (a1.getStartTime() == a2.getStartTime() ? 0 : 1);
}
}
if (res == 0) {
// If there is a tie, break it by app ID to get a deterministic order
res = a1.getApplicationId().compareTo(a2.getApplicationId());
}
return res;
}
}
|
FifoAppComparator
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV2Publisher.java
|
{
"start": 22326,
"end": 22864
}
|
class ____
implements EventHandler<TimelineV2PublishEvent> {
@Override
public void handle(TimelineV2PublishEvent event) {
switch (event.getType()) {
case PUBLISH_APPLICATION_FINISHED_ENTITY:
putEntity(event.getEntity(), event.getApplicationId());
((ApplicationFinishPublishEvent) event).getRMAppImpl()
.stopTimelineCollector();
break;
default:
putEntity(event.getEntity(), event.getApplicationId());
break;
}
}
}
private
|
TimelineV2EventHandler
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/client/impl/BlockReaderTestUtil.java
|
{
"start": 2913,
"end": 3001
}
|
class ____ setup the cluster, and get to BlockReader and DataNode for a block.
*/
public
|
to
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/spatial/SpatialExtentAggregationIT.java
|
{
"start": 540,
"end": 922
}
|
class ____ extends SpatialExtentAggregationTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return List.of(SpatialPlugin.class, EsqlPluginWithEnterpriseOrTrialLicense.class);
}
@Override
public void testStExtentAggregationWithShapes() {
assertStExtentFromIndex("index_geo_shape");
}
}
|
SpatialExtentAggregationIT
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/EvalSerializationTests.java
|
{
"start": 538,
"end": 1450
}
|
class ____ extends AbstractLogicalPlanSerializationTests<Eval> {
@Override
protected Eval createTestInstance() {
Source source = randomSource();
LogicalPlan child = randomChild(0);
List<Alias> fields = randomList(0, 10, AliasTests::randomAlias);
return new Eval(source, child, fields);
}
@Override
protected Eval mutateInstance(Eval instance) throws IOException {
LogicalPlan child = instance.child();
List<Alias> fields = instance.fields();
if (randomBoolean()) {
child = randomValueOtherThan(child, () -> randomChild(0));
} else {
fields = randomValueOtherThan(fields, () -> randomList(0, 10, AliasTests::randomAlias));
}
return new Eval(instance.source(), child, fields);
}
@Override
protected boolean alwaysEmptySource() {
return true;
}
}
|
EvalSerializationTests
|
java
|
resilience4j__resilience4j
|
resilience4j-spring/src/test/java/io/github/resilience4j/retry/configure/RxJava3RetryAspectExtTest.java
|
{
"start": 549,
"end": 1445
}
|
class ____ {
@Mock
ProceedingJoinPoint proceedingJoinPoint;
@InjectMocks
RxJava3RetryAspectExt rxJava3RetryAspectExt;
@Test
public void testCheckTypes() {
assertThat(rxJava3RetryAspectExt.canHandleReturnType(Flowable.class)).isTrue();
assertThat(rxJava3RetryAspectExt.canHandleReturnType(Single.class)).isTrue();
}
@Test
public void testReactorTypes() throws Throwable {
Retry retry = Retry.ofDefaults("test");
when(proceedingJoinPoint.proceed()).thenReturn(Single.just("Test"));
assertThat(rxJava3RetryAspectExt.handle(proceedingJoinPoint, retry, "testMethod"))
.isNotNull();
when(proceedingJoinPoint.proceed()).thenReturn(Flowable.just("Test"));
assertThat(rxJava3RetryAspectExt.handle(proceedingJoinPoint, retry, "testMethod"))
.isNotNull();
}
}
|
RxJava3RetryAspectExtTest
|
java
|
quarkusio__quarkus
|
extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/upgrade/HttpUpgradeCheckPathParamsTest.java
|
{
"start": 1069,
"end": 2326
}
|
class ____ {
@RegisterExtension
public static final QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot(root -> root
.addClasses(Endpoint.class, UpgradeCheck.class, WSClient.class));
@Inject
Vertx vertx;
@TestHTTPResource("accept")
URI acceptUri;
@TestHTTPResource("reject")
URI rejectUri;
@BeforeEach
public void cleanUp() {
Endpoint.OPENED.set(false);
}
@Test
public void testHttpUpgradeRejected() {
try (WSClient client = new WSClient(vertx)) {
CompletionException ce = assertThrows(CompletionException.class,
() -> client.connect(rejectUri));
Throwable root = ExceptionUtil.getRootCause(ce);
assertInstanceOf(UpgradeRejectedException.class, root);
assertTrue(root.getMessage().contains("404"), root.getMessage());
}
}
@Test
public void testHttpUpgradePermitted() {
try (WSClient client = new WSClient(vertx)) {
client.connect(acceptUri);
Awaitility.await().atMost(Duration.ofSeconds(2)).until(Endpoint.OPENED::get);
}
}
@WebSocket(path = "/{action}")
public static
|
HttpUpgradeCheckPathParamsTest
|
java
|
spring-projects__spring-boot
|
module/spring-boot-devtools/src/main/java/org/springframework/boot/devtools/remote/client/RemoteClientConfiguration.java
|
{
"start": 3627,
"end": 5859
}
|
class ____ implements InitializingBean {
private static final Log logger = LogFactory.getLog(RemoteClientConfiguration.class);
private final DevToolsProperties properties;
@Value("${remoteUrl}")
private @Nullable String remoteUrl;
public RemoteClientConfiguration(DevToolsProperties properties) {
this.properties = properties;
}
@Bean
public static PropertySourcesPlaceholderConfigurer propertySourcesPlaceholderConfigurer() {
return new PropertySourcesPlaceholderConfigurer();
}
@Bean
public ClientHttpRequestFactory clientHttpRequestFactory() {
List<ClientHttpRequestInterceptor> interceptors = Collections.singletonList(getSecurityInterceptor());
SimpleClientHttpRequestFactory requestFactory = new SimpleClientHttpRequestFactory();
Proxy proxy = this.properties.getRemote().getProxy();
if (proxy.getHost() != null && proxy.getPort() != null) {
requestFactory
.setProxy(new java.net.Proxy(Type.HTTP, new InetSocketAddress(proxy.getHost(), proxy.getPort())));
}
return new InterceptingClientHttpRequestFactory(requestFactory, interceptors);
}
private ClientHttpRequestInterceptor getSecurityInterceptor() {
RemoteDevToolsProperties remoteProperties = this.properties.getRemote();
String secretHeaderName = remoteProperties.getSecretHeaderName();
String secret = remoteProperties.getSecret();
Assert.state(secret != null,
"The environment value 'spring.devtools.remote.secret' is required to secure your connection.");
return new HttpHeaderInterceptor(secretHeaderName, secret);
}
@Override
public void afterPropertiesSet() {
logWarnings();
}
private void logWarnings() {
RemoteDevToolsProperties remoteProperties = this.properties.getRemote();
if (!remoteProperties.getRestart().isEnabled()) {
logger.warn("Remote restart is disabled.");
}
if (this.remoteUrl == null || !this.remoteUrl.startsWith("https://")) {
logger.warn(LogMessage.format(
"The connection to %s is insecure. You should use a URL starting with 'https://'.",
this.remoteUrl));
}
}
/**
* LiveReload configuration.
*/
@Configuration(proxyBeanMethods = false)
@ConditionalOnBooleanProperty(name = "spring.devtools.livereload.enabled")
static
|
RemoteClientConfiguration
|
java
|
apache__camel
|
components/camel-google/camel-google-calendar/src/main/java/org/apache/camel/component/google/calendar/GoogleCalendarConfiguration.java
|
{
"start": 1352,
"end": 6487
}
|
class ____ {
@UriPath
@Metadata(required = true)
private GoogleCalendarApiName apiName;
@UriPath(enums = "calendarImport,clear,delete,get,insert,instances,list,move,patch,query,quickAdd,stop,update,watch")
@Metadata(required = true)
private String methodName;
@UriParam(defaultValue = CalendarScopes.CALENDAR)
private String scopes;
@UriParam
private String clientId;
@UriParam
private String applicationName;
@UriParam(label = "security", secret = true)
private String emailAddress;
@UriParam(label = "security", secret = true)
private String clientSecret;
@UriParam(label = "security", secret = true)
private String accessToken;
@UriParam(label = "security", secret = true)
private String refreshToken;
@UriParam(label = "security", secret = true)
private String p12FileName;
@UriParam(label = "security", secret = true)
private String user;
/* Service account */
@UriParam(label = "security")
private String serviceAccountKey;
@UriParam
private String delegate;
public GoogleCalendarApiName getApiName() {
return apiName;
}
/**
* What kind of operation to perform
*/
public void setApiName(GoogleCalendarApiName apiName) {
this.apiName = apiName;
}
public String getMethodName() {
return methodName;
}
/**
* What sub operation to use for the selected operation
*/
public void setMethodName(String methodName) {
this.methodName = methodName;
}
public String getClientId() {
return clientId;
}
/**
* Client ID of the calendar application
*/
public void setClientId(String clientId) {
this.clientId = clientId;
}
public String getEmailAddress() {
return emailAddress;
}
/**
* The emailAddress of the Google Service Account.
*/
public void setEmailAddress(String emailAddress) {
this.emailAddress = emailAddress;
}
public String getClientSecret() {
return clientSecret;
}
/**
* Client secret of the calendar application
*/
public void setClientSecret(String clientSecret) {
this.clientSecret = clientSecret;
}
public String getAccessToken() {
return accessToken;
}
/**
* OAuth 2 access token. This typically expires after an hour so refreshToken is recommended for long term usage.
*/
public void setAccessToken(String accessToken) {
this.accessToken = accessToken;
}
public String getRefreshToken() {
return refreshToken;
}
/**
* OAuth 2 refresh token. Using this, the Google Calendar component can obtain a new accessToken whenever the
* current one expires - a necessity if the application is long-lived.
*/
public void setRefreshToken(String refreshToken) {
this.refreshToken = refreshToken;
}
public String getApplicationName() {
return applicationName;
}
/**
* Google calendar application name. Example would be "camel-google-calendar/1.0"
*/
public void setApplicationName(String applicationName) {
this.applicationName = applicationName;
}
public String getScopes() {
return scopes;
}
public Collection<String> getScopesAsList() {
if (scopes != null) {
return List.of(scopes.split(","));
} else {
return null;
}
}
/**
* Specifies the level of permissions you want a calendar application to have to a user account. See
* https://developers.google.com/identity/protocols/googlescopes for more info. Multiple scopes can be separated by
* comma.
*
* @see com.google.api.services.calendar.CalendarScopes
*/
public void setScopes(String scopes) {
this.scopes = scopes;
}
public String getP12FileName() {
return p12FileName;
}
/**
* The name of the p12 file which has the private key to use with the Google Service Account.
*/
public void setP12FileName(String p12FileName) {
this.p12FileName = p12FileName;
}
public String getUser() {
return user;
}
/**
* The email address of the user the application is trying to impersonate in the service account flow
*/
public void setUser(String user) {
this.user = user;
}
public String getServiceAccountKey() {
return serviceAccountKey;
}
/**
* Service account key in json format to authenticate an application as a service account. Accept base64 adding the
* prefix "base64:"
*
* @param serviceAccountKey String file, classpath, base64, or http url
*/
public void setServiceAccountKey(String serviceAccountKey) {
this.serviceAccountKey = serviceAccountKey;
}
public String getDelegate() {
return delegate;
}
/**
* Delegate for wide-domain service account
*/
public void setDelegate(String delegate) {
this.delegate = delegate;
}
}
|
GoogleCalendarConfiguration
|
java
|
google__auto
|
factory/src/test/resources/good/Generics.java
|
{
"start": 1359,
"end": 1482
}
|
class ____<M extends Bar> implements Foo<M> {
ExplicitFooImpl(@Provided M unused) {}
}
abstract static
|
ExplicitFooImpl
|
java
|
apache__camel
|
components/camel-netty-http/src/test/java/org/apache/camel/component/netty/http/NettyHttpEndpointUriFactoryTest.java
|
{
"start": 1025,
"end": 1523
}
|
class ____ extends EndpointUriFactoryTestSupport {
@Override
protected String scheme() {
return "netty-http";
}
@Override
protected Map<String, Object> pathParameters() {
Map<String, Object> params = new HashMap<>();
params.put("protocol", "http");
params.put("host", "localhost");
params.put("port", AvailablePortFinder.getNextAvailable());
params.put("path", "test");
return params;
}
}
|
NettyHttpEndpointUriFactoryTest
|
java
|
netty__netty
|
resolver-dns/src/main/java/io/netty/resolver/dns/DefaultDnsCache.java
|
{
"start": 1459,
"end": 6269
}
|
class ____ implements DnsCache {
private final Cache<DefaultDnsCacheEntry> resolveCache = new Cache<DefaultDnsCacheEntry>() {
@Override
protected boolean shouldReplaceAll(DefaultDnsCacheEntry entry) {
return entry.cause() != null;
}
@Override
protected boolean equals(DefaultDnsCacheEntry entry, DefaultDnsCacheEntry otherEntry) {
if (entry.address() != null) {
return entry.address().equals(otherEntry.address());
}
if (otherEntry.address() != null) {
return false;
}
return entry.cause().equals(otherEntry.cause());
}
};
private final int minTtl;
private final int maxTtl;
private final int negativeTtl;
/**
* Create a cache that respects the TTL returned by the DNS server
* and doesn't cache negative responses.
*/
public DefaultDnsCache() {
this(0, Cache.MAX_SUPPORTED_TTL_SECS, 0);
}
/**
* Create a cache.
* @param minTtl the minimum TTL
* @param maxTtl the maximum TTL
* @param negativeTtl the TTL for failed queries
*/
public DefaultDnsCache(int minTtl, int maxTtl, int negativeTtl) {
this.minTtl = Math.min(Cache.MAX_SUPPORTED_TTL_SECS, checkPositiveOrZero(minTtl, "minTtl"));
this.maxTtl = Math.min(Cache.MAX_SUPPORTED_TTL_SECS, checkPositiveOrZero(maxTtl, "maxTtl"));
if (minTtl > maxTtl) {
throw new IllegalArgumentException(
"minTtl: " + minTtl + ", maxTtl: " + maxTtl + " (expected: 0 <= minTtl <= maxTtl)");
}
this.negativeTtl = Math.min(Cache.MAX_SUPPORTED_TTL_SECS, checkPositiveOrZero(negativeTtl, "negativeTtl"));
}
/**
* Returns the minimum TTL of the cached DNS resource records (in seconds).
*
* @see #maxTtl()
*/
public int minTtl() {
return minTtl;
}
/**
* Returns the maximum TTL of the cached DNS resource records (in seconds).
*
* @see #minTtl()
*/
public int maxTtl() {
return maxTtl;
}
/**
* Returns the TTL of the cache for the failed DNS queries (in seconds). The default value is {@code 0}, which
* disables the cache for negative results.
*/
public int negativeTtl() {
return negativeTtl;
}
@Override
public void clear() {
resolveCache.clear();
}
@Override
public boolean clear(String hostname) {
checkNotNull(hostname, "hostname");
return resolveCache.clear(appendDot(hostname));
}
private static boolean emptyAdditionals(DnsRecord[] additionals) {
return additionals == null || additionals.length == 0;
}
@Override
public List<? extends DnsCacheEntry> get(String hostname, DnsRecord[] additionals) {
checkNotNull(hostname, "hostname");
if (!emptyAdditionals(additionals)) {
return Collections.<DnsCacheEntry>emptyList();
}
final List<? extends DnsCacheEntry> entries = resolveCache.get(appendDot(hostname));
if (entries == null || entries.isEmpty()) {
return entries;
}
return new DnsCacheEntryList(entries);
}
@Override
public DnsCacheEntry cache(String hostname, DnsRecord[] additionals,
InetAddress address, long originalTtl, EventLoop loop) {
checkNotNull(hostname, "hostname");
checkNotNull(address, "address");
checkNotNull(loop, "loop");
DefaultDnsCacheEntry e = new DefaultDnsCacheEntry(hostname, address);
if (maxTtl == 0 || !emptyAdditionals(additionals)) {
return e;
}
resolveCache.cache(appendDot(hostname), e, Math.max(minTtl, (int) Math.min(maxTtl, originalTtl)), loop);
return e;
}
@Override
public DnsCacheEntry cache(String hostname, DnsRecord[] additionals, Throwable cause, EventLoop loop) {
checkNotNull(hostname, "hostname");
checkNotNull(cause, "cause");
checkNotNull(loop, "loop");
DefaultDnsCacheEntry e = new DefaultDnsCacheEntry(hostname, cause);
if (negativeTtl == 0 || !emptyAdditionals(additionals)) {
return e;
}
resolveCache.cache(appendDot(hostname), e, negativeTtl, loop);
return e;
}
@Override
public String toString() {
return new StringBuilder()
.append("DefaultDnsCache(minTtl=")
.append(minTtl).append(", maxTtl=")
.append(maxTtl).append(", negativeTtl=")
.append(negativeTtl).append(", cached resolved hostname=")
.append(resolveCache.size()).append(')')
.toString();
}
private static final
|
DefaultDnsCache
|
java
|
dropwizard__dropwizard
|
dropwizard-auth/src/test/java/io/dropwizard/auth/AuthFilterTest.java
|
{
"start": 1534,
"end": 5128
}
|
class ____ implements ContainerRequestContext {
private SecurityContext securityContext;
FakeSecureRequestContext() {
securityContext = mock(SecurityContext.class);
when(securityContext.isSecure()).thenReturn(true);
}
@Override
public SecurityContext getSecurityContext() {
return securityContext;
}
@Override
public void setSecurityContext(SecurityContext context) {
this.securityContext = context;
}
@Override
public Object getProperty(String name) {
throw new UnsupportedOperationException();
}
@Override
public Collection<String> getPropertyNames() {
throw new UnsupportedOperationException();
}
@Override
public void setProperty(String name, Object object) {
throw new UnsupportedOperationException();
}
@Override
public void removeProperty(String name) {
throw new UnsupportedOperationException();
}
@Override
public UriInfo getUriInfo() {
throw new UnsupportedOperationException();
}
@Override
public void setRequestUri(URI requestUri) {
throw new UnsupportedOperationException();
}
@Override
public void setRequestUri(URI baseUri, URI requestUri) {
throw new UnsupportedOperationException();
}
@Override
public Request getRequest() {
throw new UnsupportedOperationException();
}
@Override
public String getMethod() {
throw new UnsupportedOperationException();
}
@Override
public void setMethod(String method) {
throw new UnsupportedOperationException();
}
@Override
public MultivaluedMap<String, String> getHeaders() {
throw new UnsupportedOperationException();
}
@Override
public String getHeaderString(String name) {
throw new UnsupportedOperationException();
}
@Override
public Date getDate() {
throw new UnsupportedOperationException();
}
@Override
public Locale getLanguage() {
throw new UnsupportedOperationException();
}
@Override
public int getLength() {
throw new UnsupportedOperationException();
}
@Override
public MediaType getMediaType() {
throw new UnsupportedOperationException();
}
@Override
public List<MediaType> getAcceptableMediaTypes() {
throw new UnsupportedOperationException();
}
@Override
public List<Locale> getAcceptableLanguages() {
throw new UnsupportedOperationException();
}
@Override
public Map<String, Cookie> getCookies() {
throw new UnsupportedOperationException();
}
@Override
public boolean hasEntity() {
throw new UnsupportedOperationException();
}
@Override
public InputStream getEntityStream() {
throw new UnsupportedOperationException();
}
@Override
public void setEntityStream(InputStream input) {
throw new UnsupportedOperationException();
}
@Override
public void abortWith(Response response) {
throw new UnsupportedOperationException();
}
}
}
|
FakeSecureRequestContext
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDataFrameAnalyticsAction.java
|
{
"start": 1989,
"end": 6824
}
|
class ____ extends BaseTasksRequest<Request> implements ToXContentObject {
public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match");
public static final ParseField FORCE = new ParseField("force");
public static final ParseField TIMEOUT = new ParseField("timeout");
private static final ObjectParser<Request, Void> PARSER = new ObjectParser<>(NAME, Request::new);
static {
PARSER.declareString((request, id) -> request.id = id, DataFrameAnalyticsConfig.ID);
PARSER.declareString((request, val) -> request.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT);
PARSER.declareBoolean(Request::setAllowNoMatch, ALLOW_NO_MATCH);
PARSER.declareBoolean(Request::setForce, FORCE);
}
public static Request parseRequest(String id, XContentParser parser) {
Request request = PARSER.apply(parser, null);
if (request.getId() == null) {
request.setId(id);
} else if (Strings.isNullOrEmpty(id) == false && id.equals(request.getId()) == false) {
throw new IllegalArgumentException(
Messages.getMessage(Messages.INCONSISTENT_ID, DataFrameAnalyticsConfig.ID, request.getId(), id)
);
}
return request;
}
private String id;
private boolean allowNoMatch = true;
private boolean force;
private Set<String> expandedIds = Collections.emptySet();
public Request(String id) {
this();
setId(id);
}
public Request(StreamInput in) throws IOException {
super(in);
id = in.readString();
allowNoMatch = in.readBoolean();
force = in.readBoolean();
expandedIds = new HashSet<>(Arrays.asList(in.readStringArray()));
}
public Request() {
setTimeout(DEFAULT_TIMEOUT);
}
public Request setId(String id) {
this.id = ExceptionsHelper.requireNonNull(id, DataFrameAnalyticsConfig.ID);
return this;
}
public String getId() {
return id;
}
public boolean allowNoMatch() {
return allowNoMatch;
}
public Request setAllowNoMatch(boolean allowNoMatch) {
this.allowNoMatch = allowNoMatch;
return this;
}
public boolean isForce() {
return force;
}
public Request setForce(boolean force) {
this.force = force;
return this;
}
@Nullable
public Set<String> getExpandedIds() {
return expandedIds;
}
public void setExpandedIds(Set<String> expandedIds) {
this.expandedIds = Objects.requireNonNull(expandedIds);
}
@Override
public boolean match(Task task) {
return expandedIds.stream().anyMatch(expandedId -> StartDataFrameAnalyticsAction.TaskMatcher.match(task, expandedId));
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(id);
out.writeBoolean(allowNoMatch);
out.writeBoolean(force);
out.writeStringCollection(expandedIds);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.startObject()
.field(DataFrameAnalyticsConfig.ID.getPreferredName(), id)
.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch)
.field(FORCE.getPreferredName(), force)
.endObject();
}
@Override
public int hashCode() {
return Objects.hash(id, getTimeout(), allowNoMatch, force, expandedIds);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || obj.getClass() != getClass()) {
return false;
}
StopDataFrameAnalyticsAction.Request other = (StopDataFrameAnalyticsAction.Request) obj;
return Objects.equals(id, other.id)
&& Objects.equals(getTimeout(), other.getTimeout())
&& allowNoMatch == other.allowNoMatch
&& force == other.force
&& Objects.equals(expandedIds, other.expandedIds);
}
@Override
public String toString() {
return Strings.toString(this);
}
}
public static
|
Request
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/joincolumn/embedded/StringToCharArrayInEmbeddedMultipleJoinColumnTest.java
|
{
"start": 3465,
"end": 4117
}
|
class ____ {
@EmbeddedId
private VehicleInvoiceId id;
@ManyToOne(fetch = FetchType.EAGER)
@JoinColumn(name = "string_col", referencedColumnName = "char_array_col", insertable = false, updatable = false)
@JoinColumn(name = "int_col", referencedColumnName = "int_col", insertable = false, updatable = false)
private Vehicle vehicle;
public VehicleInvoiceId getId() {
return id;
}
public void setId(VehicleInvoiceId id) {
this.id = id;
}
public Vehicle getVehicle() {
return vehicle;
}
public void setVehicle(Vehicle vehicle) {
this.vehicle = vehicle;
}
}
@Entity(name = "Vehicle")
public static
|
VehicleInvoice
|
java
|
apache__rocketmq
|
tools/src/main/java/org/apache/rocketmq/tools/command/topic/TopicStatusSubCommand.java
|
{
"start": 1641,
"end": 4972
}
|
class ____ implements SubCommand {
@Override
public String commandName() {
return "topicStatus";
}
@Override
public String commandDesc() {
return "Examine topic Status info.";
}
@Override
public Options buildCommandlineOptions(Options options) {
Option opt = new Option("t", "topic", true, "topic name");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("c", "cluster", true, "cluster name or lmq parent topic, lmq is used to find the route.");
opt.setRequired(false);
options.addOption(opt);
return options;
}
@Override
public void execute(final CommandLine commandLine, final Options options,
RPCHook rpcHook) throws SubCommandException {
DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook);
defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis()));
try {
TopicStatsTable topicStatsTable = new TopicStatsTable();
defaultMQAdminExt.start();
String topic = commandLine.getOptionValue('t').trim();
if (commandLine.hasOption('c')) {
String cluster = commandLine.getOptionValue('c').trim();
TopicRouteData topicRouteData = defaultMQAdminExt.examineTopicRouteInfo(cluster);
for (BrokerData bd : topicRouteData.getBrokerDatas()) {
String addr = bd.selectBrokerAddr();
if (addr != null) {
TopicStatsTable tst = defaultMQAdminExt.examineTopicStats(addr, topic);
topicStatsTable.getOffsetTable().putAll(tst.getOffsetTable());
}
}
} else {
topicStatsTable = defaultMQAdminExt.examineTopicStats(topic);
}
List<MessageQueue> mqList = new LinkedList<>();
mqList.addAll(topicStatsTable.getOffsetTable().keySet());
Collections.sort(mqList);
System.out.printf("%-32s %-4s %-20s %-20s %s%n",
"#Broker Name",
"#QID",
"#Min Offset",
"#Max Offset",
"#Last Updated"
);
for (MessageQueue mq : mqList) {
TopicOffset topicOffset = topicStatsTable.getOffsetTable().get(mq);
String humanTimestamp = "";
if (topicOffset.getLastUpdateTimestamp() > 0) {
humanTimestamp = UtilAll.timeMillisToHumanString2(topicOffset.getLastUpdateTimestamp());
}
System.out.printf("%-32s %-4d %-20d %-20d %s%n",
UtilAll.frontStringAtLeast(mq.getBrokerName(), 32),
mq.getQueueId(),
topicOffset.getMinOffset(),
topicOffset.getMaxOffset(),
humanTimestamp
);
}
System.out.printf("%n");
System.out.printf("Topic Put TPS: %s%n", topicStatsTable.getTopicPutTps());
} catch (Exception e) {
throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e);
} finally {
defaultMQAdminExt.shutdown();
}
}
}
|
TopicStatusSubCommand
|
java
|
spring-projects__spring-security
|
core/src/main/java/org/springframework/security/jackson2/AnonymousAuthenticationTokenMixin.java
|
{
"start": 1218,
"end": 1565
}
|
class ____ need to register it with
* {@link com.fasterxml.jackson.databind.ObjectMapper} and
* {@link SimpleGrantedAuthorityMixin} because AnonymousAuthenticationToken contains
* SimpleGrantedAuthority. <pre>
* ObjectMapper mapper = new ObjectMapper();
* mapper.registerModule(new CoreJackson2Module());
* </pre>
*
* <i>Note: This
|
you
|
java
|
netty__netty
|
codec-http/src/main/java/io/netty/handler/codec/spdy/SpdySession.java
|
{
"start": 7988,
"end": 10707
}
|
class ____ {
private final byte priority;
private boolean remoteSideClosed;
private boolean localSideClosed;
private boolean receivedReply;
private final AtomicInteger sendWindowSize;
private final AtomicInteger receiveWindowSize;
private int receiveWindowSizeLowerBound;
private final Queue<PendingWrite> pendingWriteQueue = new ConcurrentLinkedQueue<PendingWrite>();
StreamState(
byte priority, boolean remoteSideClosed, boolean localSideClosed,
int sendWindowSize, int receiveWindowSize) {
this.priority = priority;
this.remoteSideClosed = remoteSideClosed;
this.localSideClosed = localSideClosed;
this.sendWindowSize = new AtomicInteger(sendWindowSize);
this.receiveWindowSize = new AtomicInteger(receiveWindowSize);
}
byte getPriority() {
return priority;
}
boolean isRemoteSideClosed() {
return remoteSideClosed;
}
void closeRemoteSide() {
remoteSideClosed = true;
}
boolean isLocalSideClosed() {
return localSideClosed;
}
void closeLocalSide() {
localSideClosed = true;
}
boolean hasReceivedReply() {
return receivedReply;
}
void receivedReply() {
receivedReply = true;
}
int getSendWindowSize() {
return sendWindowSize.get();
}
int updateSendWindowSize(int deltaWindowSize) {
return sendWindowSize.addAndGet(deltaWindowSize);
}
int updateReceiveWindowSize(int deltaWindowSize) {
return receiveWindowSize.addAndGet(deltaWindowSize);
}
int getReceiveWindowSizeLowerBound() {
return receiveWindowSizeLowerBound;
}
void setReceiveWindowSizeLowerBound(int receiveWindowSizeLowerBound) {
this.receiveWindowSizeLowerBound = receiveWindowSizeLowerBound;
}
boolean putPendingWrite(PendingWrite msg) {
return pendingWriteQueue.offer(msg);
}
PendingWrite getPendingWrite() {
return pendingWriteQueue.peek();
}
PendingWrite removePendingWrite() {
return pendingWriteQueue.poll();
}
void clearPendingWrites(Throwable cause) {
for (;;) {
PendingWrite pendingWrite = pendingWriteQueue.poll();
if (pendingWrite == null) {
break;
}
pendingWrite.fail(cause);
}
}
}
private final
|
StreamState
|
java
|
alibaba__nacos
|
naming/src/main/java/com/alibaba/nacos/naming/push/v2/executor/PushExecutorUdpImpl.java
|
{
"start": 1371,
"end": 4251
}
|
class ____ implements PushExecutor {
private final UdpPushService pushService;
public PushExecutorUdpImpl(UdpPushService pushService) {
this.pushService = pushService;
}
@Override
public void doPush(String clientId, Subscriber subscriber, PushDataWrapper data) {
pushService.pushDataWithoutCallback(subscriber,
handleClusterData(replaceServiceInfoName(data, subscriber), subscriber));
}
@Override
public void doPushWithCallback(String clientId, Subscriber subscriber, PushDataWrapper data,
NamingPushCallback callBack) {
ServiceInfo actualServiceInfo = replaceServiceInfoName(data, subscriber);
callBack.setActualServiceInfo(actualServiceInfo);
pushService.pushDataWithCallback(subscriber, handleClusterData(actualServiceInfo, subscriber), callBack);
}
/**
* The reason to replace the name is upd push is used in 1.x client. And 1.x client do not identify the group
* attribute but only identify name attribute. So for supporting 1.x client, replace it with a new {@link
* ServiceInfo}.
*
* <p>
* Why not setName directly? Because the input {@link ServiceInfo} may be reused by 2.x push execute. And if set
* name directly will has some effect for 2.x client.
* </p>
*
* @param originalData original service info
* @return new service info for 1.x
*/
private ServiceInfo replaceServiceInfoName(PushDataWrapper originalData, Subscriber subscriber) {
ServiceInfo serviceInfo = ServiceUtil
.selectInstancesWithHealthyProtection(originalData.getOriginalData(), originalData.getServiceMetadata(),
false, true, subscriber);
ServiceInfo result = new ServiceInfo();
result.setName(NamingUtils.getGroupedName(serviceInfo.getName(), serviceInfo.getGroupName()));
result.setClusters(serviceInfo.getClusters());
result.setHosts(serviceInfo.getHosts());
result.setLastRefTime(serviceInfo.getLastRefTime());
result.setCacheMillis(serviceInfo.getCacheMillis());
return result;
}
/**
* For adapt push cluster feature for v1.x.
*
* @param data original data
* @param subscriber subscriber information
* @return cluster filtered data
* @deprecated Will be removed after client can filter cluster
*/
@Deprecated
private ServiceInfo handleClusterData(ServiceInfo data, Subscriber subscriber) {
return StringUtils.isBlank(subscriber.getCluster()) ? data
: ServiceUtil.selectInstances(data, subscriber.getCluster());
}
@Override
public void doFuzzyWatchNotifyPushWithCallBack(String clientId, AbstractFuzzyWatchNotifyRequest watchNotifyRequest, PushCallBack callBack) {
}
}
|
PushExecutorUdpImpl
|
java
|
apache__camel
|
components/camel-quartz/src/test/java/org/apache/camel/routepolicy/quartz/MultiplePoliciesOnRouteTest.java
|
{
"start": 1479,
"end": 3560
}
|
class ____ extends CamelTestSupport {
private String url = "seda:foo?concurrentConsumers=20";
private int size = 100;
@Override
protected void bindToRegistry(Registry registry) {
registry.bind("startPolicy", createRouteStartPolicy());
registry.bind("throttlePolicy", createThrottlePolicy());
}
@Override
public boolean isUseRouteBuilder() {
return false;
}
private RoutePolicy createRouteStartPolicy() {
SimpleScheduledRoutePolicy policy = new SimpleScheduledRoutePolicy();
long startTime = System.currentTimeMillis() + 3000L;
policy.setRouteStartDate(new Date(startTime));
policy.setRouteStartRepeatCount(1);
policy.setRouteStartRepeatInterval(3000);
return policy;
}
private RoutePolicy createThrottlePolicy() {
ThrottlingInflightRoutePolicy policy = new ThrottlingInflightRoutePolicy();
policy.setMaxInflightExchanges(10);
return policy;
}
@Test
public void testMultiplePoliciesOnRoute() throws Exception {
MockEndpoint success = context.getEndpoint("mock:success", MockEndpoint.class);
success.expectedMinimumMessageCount(size - 10);
context.getComponent("quartz", QuartzComponent.class)
.setPropertiesFile("org/apache/camel/routepolicy/quartz/myquartz.properties");
context.addRoutes(new RouteBuilder() {
public void configure() {
from(url)
.routeId("test")
.routePolicyRef("startPolicy, throttlePolicy")
.to("log:foo?groupSize=10")
.to("mock:success");
}
});
context.start();
assertSame(ServiceStatus.Started, context.getRouteController().getRouteStatus("test"));
executeSlowly(size, 3, TimeUnit.MILLISECONDS, (i) -> template.sendBody(url, "Message " + i));
context.getComponent("quartz", QuartzComponent.class).stop();
success.assertIsSatisfied();
}
}
|
MultiplePoliciesOnRouteTest
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/http/client/reactive/AbstractClientHttpResponse.java
|
{
"start": 1323,
"end": 2403
}
|
class ____ implements ClientHttpResponse {
private final HttpStatusCode statusCode;
private final HttpHeaders headers;
private final MultiValueMap<String, ResponseCookie> cookies;
private final Flux<DataBuffer> body;
protected AbstractClientHttpResponse(HttpStatusCode statusCode, HttpHeaders headers,
MultiValueMap<String, ResponseCookie> cookies, Flux<DataBuffer> body) {
Assert.notNull(statusCode, "StatusCode must not be null");
Assert.notNull(headers, "Headers must not be null");
Assert.notNull(body, "Body must not be null");
this.statusCode = statusCode;
this.headers = headers;
this.cookies = cookies;
this.body = Flux.from(new SingleSubscriberPublisher<>(body));
}
@Override
public HttpStatusCode getStatusCode() {
return this.statusCode;
}
@Override
public HttpHeaders getHeaders() {
return this.headers;
}
@Override
public MultiValueMap<String, ResponseCookie> getCookies() {
return this.cookies;
}
@Override
public Flux<DataBuffer> getBody() {
return this.body;
}
private static final
|
AbstractClientHttpResponse
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/SpringApplicationTests.java
|
{
"start": 76304,
"end": 76383
}
|
class ____ extends RuntimeException {
}
abstract static
|
RefreshFailureException
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystemPathHandle.java
|
{
"start": 1131,
"end": 2983
}
|
class ____ implements PathHandle {
private final String path;
private final Long mtime;
public LocalFileSystemPathHandle(String path, Optional<Long> mtime) {
this.path = path;
this.mtime = mtime.orElse(null);
}
public LocalFileSystemPathHandle(ByteBuffer bytes) throws IOException {
if (null == bytes) {
throw new IOException("Missing PathHandle");
}
LocalFileSystemPathHandleProto p =
LocalFileSystemPathHandleProto.parseFrom(ByteString.copyFrom(bytes));
path = p.hasPath() ? p.getPath() : null;
mtime = p.hasMtime() ? p.getMtime() : null;
}
public String getPath() {
return path;
}
public void verify(FileStatus stat) throws InvalidPathHandleException {
if (null == stat) {
throw new InvalidPathHandleException("Could not resolve handle");
}
if (mtime != null && mtime != stat.getModificationTime()) {
throw new InvalidPathHandleException("Content changed");
}
}
@Override
public ByteBuffer bytes() {
LocalFileSystemPathHandleProto.Builder b =
LocalFileSystemPathHandleProto.newBuilder();
b.setPath(path);
if (mtime != null) {
b.setMtime(mtime);
}
return b.build().toByteString().asReadOnlyByteBuffer();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
LocalFileSystemPathHandle that = (LocalFileSystemPathHandle) o;
return Objects.equals(path, that.path) &&
Objects.equals(mtime, that.mtime);
}
@Override
public int hashCode() {
return Objects.hash(path, mtime);
}
@Override
public String toString() {
return "LocalFileSystemPathHandle{" +
"path='" + path + '\'' +
", mtime=" + mtime +
'}';
}
}
|
LocalFileSystemPathHandle
|
java
|
spring-projects__spring-boot
|
module/spring-boot-h2console/src/main/java/org/springframework/boot/h2console/autoconfigure/H2ConsoleAutoConfiguration.java
|
{
"start": 2282,
"end": 3563
}
|
class ____ {
private static final Log logger = LogFactory.getLog(H2ConsoleAutoConfiguration.class);
private final H2ConsoleProperties properties;
H2ConsoleAutoConfiguration(H2ConsoleProperties properties) {
this.properties = properties;
}
@Bean
ServletRegistrationBean<JakartaWebServlet> h2Console() {
String path = this.properties.getPath();
String urlMapping = path + (path.endsWith("/") ? "*" : "/*");
ServletRegistrationBean<JakartaWebServlet> registration = new ServletRegistrationBean<>(new JakartaWebServlet(),
urlMapping);
configureH2ConsoleSettings(registration, this.properties.getSettings());
return registration;
}
@Bean
H2ConsoleLogger h2ConsoleLogger(ObjectProvider<DataSource> dataSources) {
return new H2ConsoleLogger(dataSources, this.properties.getPath());
}
private void configureH2ConsoleSettings(ServletRegistrationBean<JakartaWebServlet> registration,
Settings settings) {
if (settings.isTrace()) {
registration.addInitParameter("trace", "");
}
if (settings.isWebAllowOthers()) {
registration.addInitParameter("webAllowOthers", "");
}
if (settings.getWebAdminPassword() != null) {
registration.addInitParameter("webAdminPassword", settings.getWebAdminPassword());
}
}
static
|
H2ConsoleAutoConfiguration
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestGetContentSummaryWithPermission.java
|
{
"start": 1794,
"end": 1863
}
|
class ____ get content summary with permission settings.
*/
public
|
tests
|
java
|
google__truth
|
core/src/main/java/com/google/common/truth/DoubleSubject.java
|
{
"start": 2986,
"end": 14195
}
|
interface ____ {
void compareAgainst(double other);
}
/**
* Prepares for a check that the actual value is a finite number within the given tolerance of an
* expected value that will be provided in the next call in the fluent chain.
*
* <p>The check will fail if either the actual value or the expected value is {@link
* Double#POSITIVE_INFINITY}, {@link Double#NEGATIVE_INFINITY}, or {@link Double#NaN}. To check
* for those values, use {@link #isPositiveInfinity}, {@link #isNegativeInfinity}, {@link #isNaN},
* or (with more generality) {@link #isEqualTo}.
*
* <p>The check will pass if both values are zero, even if one is {@code 0.0} and the other is
* {@code -0.0}. Use {@link #isEqualTo} to assert that a value is exactly {@code 0.0} or that it
* is exactly {@code -0.0}.
*
* <p>You can use a tolerance of {@code 0.0} to assert the exact equality of finite doubles, but
* often {@link #isEqualTo} is preferable (note the different behaviours around non-finite values
* and {@code -0.0}). See the documentation on {@link #isEqualTo} for advice on when exact
* equality assertions are appropriate.
*
* @param tolerance an inclusive upper bound on the difference between the actual value and
* expected value allowed by the check, which must be a non-negative finite value, i.e. not
* {@link Double#NaN}, {@link Double#POSITIVE_INFINITY}, or negative, including {@code -0.0}
*/
public TolerantDoubleComparison isWithin(double tolerance) {
return TolerantDoubleComparison.comparing(
other -> {
if (!Double.isFinite(tolerance)) {
failWithoutActual(
simpleFact(
"could not perform approximate-equality check because tolerance was not"
+ " finite"),
numericFact("expected", other),
numericFact("was", actual),
numericFact("tolerance", tolerance));
} else if (Double.compare(tolerance, 0.0) < 0) {
failWithoutActual(
simpleFact(
"could not perform approximate-equality check because tolerance was negative"),
numericFact("expected", other),
numericFact("was", actual),
numericFact("tolerance", tolerance));
} else if (!Double.isFinite(other)) {
failWithoutActual(
simpleFact(
"could not perform approximate-equality check because expected value was not"
+ " finite"),
numericFact("expected", other),
numericFact("was", actual),
numericFact("tolerance", tolerance));
} else if (actual == null || !Double.isFinite(actual)) {
failWithoutActual(
numericFact("expected a finite value near", other),
numericFact("but was", actual),
numericFact("tolerance", tolerance));
} else if (!equalWithinTolerance(actual, other, tolerance)) {
failWithoutActual(
numericFact("expected", other),
numericFact("but was", actual),
numericFact("outside tolerance", tolerance));
}
});
}
/**
* Prepares for a check that the actual value is a finite number not within the given tolerance of
* an expected value that will be provided in the next call in the fluent chain.
*
* <p>The check will fail if either the actual value or the expected value is {@link
* Double#POSITIVE_INFINITY}, {@link Double#NEGATIVE_INFINITY}, or {@link Double#NaN}. See {@link
* #isFinite}, {@link #isNotNaN}, or {@link #isNotEqualTo} for checks with other behaviours.
*
* <p>The check will fail if both values are zero, even if one is {@code 0.0} and the other is
* {@code -0.0}. Use {@link #isNotEqualTo} for a test which fails for a value of exactly zero with
* one sign but passes for zero with the opposite sign.
*
* <p>You can use a tolerance of {@code 0.0} to assert the exact non-equality of finite doubles,
* but sometimes {@link #isNotEqualTo} is preferable (note the different behaviours around
* non-finite values and {@code -0.0}).
*
* @param tolerance an exclusive lower bound on the difference between the actual value and
* expected value allowed by the check, which must be a non-negative finite value, i.e. not
* {@link Double#NaN}, {@link Double#POSITIVE_INFINITY}, or negative, including {@code -0.0}
*/
public TolerantDoubleComparison isNotWithin(double tolerance) {
return TolerantDoubleComparison.comparing(
other -> {
if (!Double.isFinite(tolerance)) {
failWithoutActual(
simpleFact(
"could not perform approximate-equality check because tolerance was not"
+ " finite"),
numericFact("expected not to be", other),
numericFact("was", actual),
numericFact("tolerance", tolerance));
} else if (Double.compare(tolerance, 0.0) < 0) {
failWithoutActual(
simpleFact(
"could not perform approximate-equality check because tolerance was negative"),
numericFact("expected not to be", other),
numericFact("was", actual),
numericFact("tolerance", tolerance));
} else if (!Double.isFinite(other)) {
failWithoutActual(
simpleFact(
"could not perform approximate-equality check because expected value was not"
+ " finite"),
numericFact("expected not to be", other),
numericFact("was", actual),
numericFact("tolerance", tolerance));
} else if (actual == null || !Double.isFinite(actual)) {
failWithoutActual(
numericFact("expected a finite value that is not near", other),
numericFact("but was", actual),
numericFact("tolerance", tolerance));
} else if (!notEqualWithinTolerance(actual, other, tolerance)) {
failWithoutActual(
numericFact("expected not to be", other),
numericFact("but was", actual),
numericFact("within tolerance", tolerance));
}
});
}
/**
* Asserts that the actual value is exactly equal to the given value, with equality defined as by
* {@link Double#equals}. This method is <i>not</i> recommended when the code under test is doing
* any kind of arithmetic: use {@link #isWithin} with a suitable tolerance in that case. (Remember
* that the exact result of floating point arithmetic is sensitive to apparently trivial changes
* such as replacing {@code (a + b) + c} with {@code a + (b + c)}.) This method is recommended
* when the code under test is specified as either copying a value without modification from its
* input or returning a well-defined literal or constant value.
*
* <p><b>Note:</b> The assertion {@code isEqualTo(0.0)} fails for an input of {@code -0.0}, and
* vice versa. For an assertion that passes for either {@code 0.0} or {@code -0.0}, use {@link
* #isZero}.
*/
@Override
public void isEqualTo(@Nullable Object expected) {
super.isEqualTo(expected);
}
/**
* Asserts that the actual value is not exactly equal to the given value, with equality defined as
* by {link Double#equals}. See {@link #isEqualTo} for advice on when exact equality is
* recommended. Use {@link #isNotWithin} for an assertion with a tolerance.
*
* <p><b>Note:</b> The assertion {@code isNotEqualTo(0.0)} passes for {@code -0.0}, and vice
* versa. For an assertion that fails for either {@code 0.0} or {@code -0.0}, use {@link
* #isNonZero}.
*/
@Override
public void isNotEqualTo(@Nullable Object other) {
super.isNotEqualTo(other);
}
/**
* @deprecated Use {@link #isWithin} or {@link #isEqualTo} instead (see documentation for advice).
*/
@Override
@Deprecated
public void isEquivalentAccordingToCompareTo(@Nullable Double expected) {
super.isEquivalentAccordingToCompareTo(expected);
}
/** Asserts that the actual value is zero (i.e. it is either {@code 0.0} or {@code -0.0}). */
public void isZero() {
if (actual == null || actual != 0.0) {
failWithActual(simpleFact("expected zero"));
}
}
/**
* Asserts that the actual value is a non-null value other than zero (i.e. it is not {@code 0.0},
* {@code -0.0} or {@code null}).
*/
public void isNonZero() {
if (actual == null) {
failWithActual(simpleFact("expected a double other than zero"));
} else if (actual == 0.0) {
failWithActual(simpleFact("expected not to be zero"));
}
}
/** Asserts that the actual value is {@link Double#POSITIVE_INFINITY}. */
public void isPositiveInfinity() {
isEqualTo(POSITIVE_INFINITY);
}
/** Asserts that the actual value is {@link Double#NEGATIVE_INFINITY}. */
public void isNegativeInfinity() {
isEqualTo(NEGATIVE_INFINITY);
}
/** Asserts that the actual value is {@link Double#NaN}. */
public void isNaN() {
isEqualTo(NaN);
}
/**
* Asserts that the actual value is finite, i.e. not {@link Double#POSITIVE_INFINITY}, {@link
* Double#NEGATIVE_INFINITY}, or {@link Double#NaN}.
*/
public void isFinite() {
if (actual == null || actual.isNaN() || actual.isInfinite()) {
failWithActual(simpleFact("expected to be finite"));
}
}
/**
* Asserts that the actual value is a non-null value other than {@link Double#NaN} (but it may be
* {@link Double#POSITIVE_INFINITY} or {@link Double#NEGATIVE_INFINITY}).
*/
public void isNotNaN() {
if (actual == null) {
failWithActual(simpleFact("expected a double other than NaN"));
} else {
isNotEqualTo(NaN);
}
}
/**
* Checks that the actual value is greater than {@code other}.
*
* <p>To check that the actual value is greater than <i>or equal to</i> {@code other}, use {@link
* #isAtLeast}.
*/
public void isGreaterThan(int other) {
isGreaterThan((double) other);
}
/**
* Checks that the actual value is less than {@code other}.
*
* <p>To check that the actual value is less than <i>or equal to</i> {@code other}, use {@link
* #isAtMost} .
*/
public void isLessThan(int other) {
isLessThan((double) other);
}
/**
* Checks that the actual value is less than or equal to {@code other}.
*
* <p>To check that the actual value is <i>strictly</i> less than {@code other}, use {@link
* #isLessThan}.
*/
public void isAtMost(int other) {
isAtMost((double) other);
}
/**
* Checks that the actual value is greater than or equal to {@code other}.
*
* <p>To check that the actual value is <i>strictly</i> greater than {@code other}, use {@link
* #isGreaterThan}.
*/
public void isAtLeast(int other) {
isAtLeast((double) other);
}
static Factory<DoubleSubject, Double> doubles() {
return DoubleSubject::new;
}
}
|
DoubleComparer
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/RobotFrameworkEndpointBuilderFactory.java
|
{
"start": 120825,
"end": 121162
}
|
interface ____
extends
AdvancedRobotFrameworkEndpointConsumerBuilder,
AdvancedRobotFrameworkEndpointProducerBuilder {
default RobotFrameworkEndpointBuilder basic() {
return (RobotFrameworkEndpointBuilder) this;
}
}
public
|
AdvancedRobotFrameworkEndpointBuilder
|
java
|
apache__spark
|
examples/src/main/java/org/apache/spark/examples/mllib/JavaRecommendationExample.java
|
{
"start": 1152,
"end": 2977
}
|
class ____ {
public static void main(String[] args) {
// $example on$
SparkConf conf = new SparkConf().setAppName("Java Collaborative Filtering Example");
JavaSparkContext jsc = new JavaSparkContext(conf);
// Load and parse the data
String path = "data/mllib/als/test.data";
JavaRDD<String> data = jsc.textFile(path);
JavaRDD<Rating> ratings = data.map(s -> {
String[] sarray = s.split(",");
return new Rating(Integer.parseInt(sarray[0]),
Integer.parseInt(sarray[1]),
Double.parseDouble(sarray[2]));
});
// Build the recommendation model using ALS
int rank = 10;
int numIterations = 10;
MatrixFactorizationModel model = ALS.train(JavaRDD.toRDD(ratings), rank, numIterations, 0.01);
// Evaluate the model on rating data
JavaRDD<Tuple2<Object, Object>> userProducts =
ratings.map(r -> new Tuple2<>(r.user(), r.product()));
JavaPairRDD<Tuple2<Integer, Integer>, Double> predictions = JavaPairRDD.fromJavaRDD(
model.predict(JavaRDD.toRDD(userProducts)).toJavaRDD()
.map(r -> new Tuple2<>(new Tuple2<>(r.user(), r.product()), r.rating()))
);
JavaRDD<Tuple2<Double, Double>> ratesAndPreds = JavaPairRDD.fromJavaRDD(
ratings.map(r -> new Tuple2<>(new Tuple2<>(r.user(), r.product()), r.rating())))
.join(predictions).values();
double MSE = ratesAndPreds.mapToDouble(pair -> {
double err = pair._1() - pair._2();
return err * err;
}).mean();
System.out.println("Mean Squared Error = " + MSE);
// Save and load model
model.save(jsc.sc(), "target/tmp/myCollaborativeFilter");
MatrixFactorizationModel sameModel = MatrixFactorizationModel.load(jsc.sc(),
"target/tmp/myCollaborativeFilter");
// $example off$
jsc.stop();
}
}
|
JavaRecommendationExample
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/common/util/BigArrays.java
|
{
"start": 3331,
"end": 4054
}
|
class ____ extends AbstractArray implements BigArray {
static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(ByteArrayWrapper.class);
private final Releasable releasable;
private final long size;
AbstractArrayWrapper(BigArrays bigArrays, long size, Releasable releasable, boolean clearOnResize) {
super(bigArrays, clearOnResize);
this.releasable = releasable;
this.size = size;
}
@Override
public final long size() {
return size;
}
@Override
protected final void doClose() {
Releasables.close(releasable);
}
}
private static
|
AbstractArrayWrapper
|
java
|
mapstruct__mapstruct
|
processor/src/main/java/org/mapstruct/ap/internal/conversion/WrapperToStringConversion.java
|
{
"start": 995,
"end": 4896
}
|
class ____ extends AbstractNumberToStringConversion {
private final Class<?> sourceType;
private final Class<?> primitiveType;
public WrapperToStringConversion(Class<?> sourceType) {
super( NativeTypes.isNumber( sourceType ) );
if ( sourceType.isPrimitive() ) {
throw new IllegalArgumentException( sourceType + " is no wrapper type." );
}
this.sourceType = sourceType;
this.primitiveType = NativeTypes.getPrimitiveType( sourceType );
}
@Override
public String getToExpression(ConversionContext conversionContext) {
if ( requiresDecimalFormat( conversionContext ) ) {
StringBuilder sb = new StringBuilder();
appendDecimalFormatter( sb, conversionContext );
sb.append( ".format( <SOURCE> )" );
return sb.toString();
}
else {
return "String.valueOf( <SOURCE> )";
}
}
@Override
public Set<Type> getToConversionImportTypes(ConversionContext conversionContext) {
if ( requiresDecimalFormat( conversionContext ) ) {
if ( conversionContext.getLocale() != null ) {
return asSet(
conversionContext.getTypeFactory().getType( DecimalFormat.class ),
conversionContext.getTypeFactory().getType( DecimalFormatSymbols.class ),
conversionContext.getTypeFactory().getType( Locale.class )
);
}
return Collections.singleton( conversionContext.getTypeFactory().getType( DecimalFormat.class ) );
}
return Collections.emptySet();
}
@Override
public String getFromExpression(ConversionContext conversionContext) {
if ( requiresDecimalFormat( conversionContext ) ) {
StringBuilder sb = new StringBuilder();
appendDecimalFormatter( sb, conversionContext );
sb.append( ".parse( <SOURCE> )." );
sb.append( primitiveType.getSimpleName() );
sb.append( "Value()" );
return sb.toString();
}
else {
return sourceType.getSimpleName() + ".parse"
+ Strings.capitalize( primitiveType.getSimpleName() ) + "( <SOURCE> )";
}
}
@Override
protected Set<Type> getFromConversionImportTypes(ConversionContext conversionContext) {
if ( requiresDecimalFormat( conversionContext ) ) {
if ( conversionContext.getLocale() != null ) {
return asSet(
conversionContext.getTypeFactory().getType( DecimalFormat.class ),
conversionContext.getTypeFactory().getType( DecimalFormatSymbols.class ),
conversionContext.getTypeFactory().getType( Locale.class )
);
}
return Collections.singleton( conversionContext.getTypeFactory().getType( DecimalFormat.class ) );
}
return Collections.emptySet();
}
private void appendDecimalFormatter(StringBuilder sb, ConversionContext conversionContext) {
sb.append( "new " );
sb.append( decimalFormat( conversionContext ) );
sb.append( "( " );
if ( conversionContext.getNumberFormat() != null ) {
sb.append( "\"" );
sb.append( conversionContext.getNumberFormat() );
sb.append( "\"" );
if ( conversionContext.getLocale() != null ) {
sb.append( ", " )
.append( decimalFormatSymbols( conversionContext ) )
.append( ".getInstance( " )
.append( locale( conversionContext ) )
.append( ".forLanguageTag( \"" )
.append( conversionContext.getLocale() )
.append( " \" ) )" );
}
}
sb.append( " )" );
}
}
|
WrapperToStringConversion
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskAttempt.java
|
{
"start": 6401,
"end": 19957
}
|
class ____ extends AppenderSkeleton {
private final List<LoggingEvent> logEvents = new CopyOnWriteArrayList<>();
@Override
public boolean requiresLayout() {
return false;
}
@Override
public void close() {
}
@Override
protected void append(LoggingEvent arg0) {
logEvents.add(arg0);
}
private List<LoggingEvent> getLogEvents() {
return logEvents;
}
}
@BeforeAll
public static void setupBeforeClass() {
ResourceUtils.resetResourceTypes(new Configuration());
}
@BeforeEach
public void before() {
TaskAttemptImpl.RESOURCE_REQUEST_CACHE.clear();
}
@AfterEach
public void tearDown() {
ResourceUtils.resetResourceTypes(new Configuration());
}
@Test
public void testMRAppHistoryForMap() throws Exception {
MRApp app = null;
try {
app = new FailingAttemptsMRApp(1, 0);
testMRAppHistory(app);
} finally {
app.close();
}
}
@Test
public void testMRAppHistoryForReduce() throws Exception {
MRApp app = null;
try {
app = new FailingAttemptsMRApp(0, 1);
testMRAppHistory(app);
} finally {
app.close();
}
}
@Test
public void testMRAppHistoryForTAFailedInAssigned() throws Exception {
// test TA_CONTAINER_LAUNCH_FAILED for map
FailingAttemptsDuringAssignedMRApp app = null;
try {
app =
new FailingAttemptsDuringAssignedMRApp(1, 0,
TaskAttemptEventType.TA_CONTAINER_LAUNCH_FAILED);
testTaskAttemptAssignedFailHistory(app);
app.close();
// test TA_CONTAINER_LAUNCH_FAILED for reduce
app =
new FailingAttemptsDuringAssignedMRApp(0, 1,
TaskAttemptEventType.TA_CONTAINER_LAUNCH_FAILED);
testTaskAttemptAssignedFailHistory(app);
app.close();
// test TA_CONTAINER_COMPLETED for map
app =
new FailingAttemptsDuringAssignedMRApp(1, 0,
TaskAttemptEventType.TA_CONTAINER_COMPLETED);
testTaskAttemptAssignedFailHistory(app);
app.close();
// test TA_CONTAINER_COMPLETED for reduce
app =
new FailingAttemptsDuringAssignedMRApp(0, 1,
TaskAttemptEventType.TA_CONTAINER_COMPLETED);
testTaskAttemptAssignedFailHistory(app);
app.close();
// test TA_FAILMSG for map
app =
new FailingAttemptsDuringAssignedMRApp(1, 0,
TaskAttemptEventType.TA_FAILMSG);
testTaskAttemptAssignedFailHistory(app);
app.close();
// test TA_FAILMSG for reduce
app =
new FailingAttemptsDuringAssignedMRApp(0, 1,
TaskAttemptEventType.TA_FAILMSG);
testTaskAttemptAssignedFailHistory(app);
app.close();
// test TA_FAILMSG_BY_CLIENT for map
app =
new FailingAttemptsDuringAssignedMRApp(1, 0,
TaskAttemptEventType.TA_FAILMSG_BY_CLIENT);
testTaskAttemptAssignedFailHistory(app);
app.close();
// test TA_FAILMSG_BY_CLIENT for reduce
app =
new FailingAttemptsDuringAssignedMRApp(0, 1,
TaskAttemptEventType.TA_FAILMSG_BY_CLIENT);
testTaskAttemptAssignedFailHistory(app);
app.close();
// test TA_KILL for map
app =
new FailingAttemptsDuringAssignedMRApp(1, 0,
TaskAttemptEventType.TA_KILL);
testTaskAttemptAssignedKilledHistory(app);
app.close();
// test TA_KILL for reduce
app =
new FailingAttemptsDuringAssignedMRApp(0, 1,
TaskAttemptEventType.TA_KILL);
testTaskAttemptAssignedKilledHistory(app);
app.close();
} finally {
app.close();
}
}
@Test
public void testSingleRackRequest() throws Exception {
TaskAttemptImpl.RequestContainerTransition rct =
new TaskAttemptImpl.RequestContainerTransition(false);
EventHandler eventHandler = mock(EventHandler.class);
String[] hosts = new String[3];
hosts[0] = "host1";
hosts[1] = "host2";
hosts[2] = "host3";
TaskSplitMetaInfo splitInfo =
new TaskSplitMetaInfo(hosts, 0, 128 * 1024 * 1024l);
TaskAttemptImpl mockTaskAttempt =
createMapTaskAttemptImplForTest(eventHandler, splitInfo);
TaskAttemptEvent mockTAEvent = mock(TaskAttemptEvent.class);
rct.transition(mockTaskAttempt, mockTAEvent);
ArgumentCaptor<Event> arg = ArgumentCaptor.forClass(Event.class);
verify(eventHandler, times(2)).handle(arg.capture());
if (!(arg.getAllValues().get(1) instanceof ContainerRequestEvent)) {
fail("Second Event not of type ContainerRequestEvent");
}
ContainerRequestEvent cre =
(ContainerRequestEvent) arg.getAllValues().get(1);
String[] requestedRacks = cre.getRacks();
//Only a single occurrence of /DefaultRack
assertEquals(1, requestedRacks.length);
}
@Test
public void testHostResolveAttempt() throws Exception {
TaskAttemptImpl.RequestContainerTransition rct =
new TaskAttemptImpl.RequestContainerTransition(false);
EventHandler eventHandler = mock(EventHandler.class);
String[] hosts = new String[3];
hosts[0] = "192.168.1.1";
hosts[1] = "host2";
hosts[2] = "host3";
TaskSplitMetaInfo splitInfo =
new TaskSplitMetaInfo(hosts, 0, 128 * 1024 * 1024l);
TaskAttemptImpl mockTaskAttempt =
createMapTaskAttemptImplForTest(eventHandler, splitInfo);
TaskAttemptImpl spyTa = spy(mockTaskAttempt);
when(spyTa.resolveHost(hosts[0])).thenReturn("host1");
spyTa.dataLocalHosts = spyTa.resolveHosts(splitInfo.getLocations());
TaskAttemptEvent mockTAEvent = mock(TaskAttemptEvent.class);
rct.transition(spyTa, mockTAEvent);
verify(spyTa).resolveHost(hosts[0]);
ArgumentCaptor<Event> arg = ArgumentCaptor.forClass(Event.class);
verify(eventHandler, times(2)).handle(arg.capture());
if (!(arg.getAllValues().get(1) instanceof ContainerRequestEvent)) {
fail("Second Event not of type ContainerRequestEvent");
}
Map<String, Boolean> expected = new HashMap<String, Boolean>();
expected.put("host1", true);
expected.put("host2", true);
expected.put("host3", true);
ContainerRequestEvent cre =
(ContainerRequestEvent) arg.getAllValues().get(1);
String[] requestedHosts = cre.getHosts();
for (String h : requestedHosts) {
expected.remove(h);
}
assertEquals(0, expected.size());
}
@Test
public void testMillisCountersUpdate() throws Exception {
verifyMillisCounters(Resource.newInstance(1024, 1), 512);
verifyMillisCounters(Resource.newInstance(2048, 4), 1024);
verifyMillisCounters(Resource.newInstance(10240, 8), 2048);
}
public void verifyMillisCounters(Resource containerResource,
int minContainerSize) throws Exception {
Clock actualClock = SystemClock.getInstance();
ControlledClock clock = new ControlledClock(actualClock);
clock.setTime(10);
MRApp app =
new MRApp(1, 1, false, "testSlotMillisCounterUpdate", true, clock);
app.setAllocatedContainerResource(containerResource);
Configuration conf = new Configuration();
conf.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB,
minContainerSize);
app.setClusterInfo(new ClusterInfo(Resource.newInstance(10240, 1)));
Job job = app.submit(conf);
app.waitForState(job, JobState.RUNNING);
Map<TaskId, Task> tasks = job.getTasks();
assertEquals(2, tasks.size(), "Num tasks is not correct");
Iterator<Task> taskIter = tasks.values().iterator();
Task mTask = taskIter.next();
app.waitForState(mTask, TaskState.RUNNING);
Task rTask = taskIter.next();
app.waitForState(rTask, TaskState.RUNNING);
Map<TaskAttemptId, TaskAttempt> mAttempts = mTask.getAttempts();
assertEquals(1, mAttempts.size(), "Num attempts is not correct");
Map<TaskAttemptId, TaskAttempt> rAttempts = rTask.getAttempts();
assertEquals(1, rAttempts.size(), "Num attempts is not correct");
TaskAttempt mta = mAttempts.values().iterator().next();
TaskAttempt rta = rAttempts.values().iterator().next();
app.waitForState(mta, TaskAttemptState.RUNNING);
app.waitForState(rta, TaskAttemptState.RUNNING);
clock.setTime(11);
app.getContext()
.getEventHandler()
.handle(new TaskAttemptEvent(mta.getID(), TaskAttemptEventType.TA_DONE));
app.getContext()
.getEventHandler()
.handle(new TaskAttemptEvent(rta.getID(), TaskAttemptEventType.TA_DONE));
app.waitForState(job, JobState.SUCCEEDED);
assertThat(mta.getFinishTime()).isEqualTo(11);
assertThat(mta.getLaunchTime()).isEqualTo(10);
assertThat(rta.getFinishTime()).isEqualTo(11);
assertThat(rta.getLaunchTime()).isEqualTo(10);
Counters counters = job.getAllCounters();
int memoryMb = (int) containerResource.getMemorySize();
int vcores = containerResource.getVirtualCores();
assertEquals((int) Math.ceil((float) memoryMb / minContainerSize),
counters.findCounter(JobCounter.SLOTS_MILLIS_MAPS).getValue());
assertEquals((int) Math.ceil((float) memoryMb / minContainerSize),
counters.findCounter(JobCounter.SLOTS_MILLIS_REDUCES).getValue());
assertEquals(1,
counters.findCounter(JobCounter.MILLIS_MAPS).getValue());
assertEquals(1,
counters.findCounter(JobCounter.MILLIS_REDUCES).getValue());
assertEquals(memoryMb,
counters.findCounter(JobCounter.MB_MILLIS_MAPS).getValue());
assertEquals(memoryMb,
counters.findCounter(JobCounter.MB_MILLIS_REDUCES).getValue());
assertEquals(vcores,
counters.findCounter(JobCounter.VCORES_MILLIS_MAPS).getValue());
assertEquals(vcores,
counters.findCounter(JobCounter.VCORES_MILLIS_REDUCES).getValue());
}
private TaskAttemptImpl createMapTaskAttemptImplForTest(
EventHandler eventHandler, TaskSplitMetaInfo taskSplitMetaInfo) {
Clock clock = SystemClock.getInstance();
return createMapTaskAttemptImplForTest(eventHandler, taskSplitMetaInfo,
clock, new JobConf());
}
private TaskAttemptImpl createMapTaskAttemptImplForTest(
EventHandler eventHandler, TaskSplitMetaInfo taskSplitMetaInfo,
Clock clock, JobConf jobConf) {
ApplicationId appId = ApplicationId.newInstance(1, 1);
JobId jobId = MRBuilderUtils.newJobId(appId, 1);
TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
TaskAttemptListener taListener = mock(TaskAttemptListener.class);
Path jobFile = mock(Path.class);
TaskAttemptImpl taImpl =
new MapTaskAttemptImpl(taskId, 1, eventHandler, jobFile, 1,
taskSplitMetaInfo, jobConf, taListener, null,
null, clock, null);
return taImpl;
}
private TaskAttemptImpl createReduceTaskAttemptImplForTest(
EventHandler eventHandler, Clock clock, JobConf jobConf) {
ApplicationId appId = ApplicationId.newInstance(1, 1);
JobId jobId = MRBuilderUtils.newJobId(appId, 1);
TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.REDUCE);
TaskAttemptListener taListener = mock(TaskAttemptListener.class);
Path jobFile = mock(Path.class);
TaskAttemptImpl taImpl =
new ReduceTaskAttemptImpl(taskId, 1, eventHandler, jobFile, 1,
1, jobConf, taListener, null,
null, clock, null);
return taImpl;
}
private void testMRAppHistory(MRApp app) throws Exception {
Configuration conf = new Configuration();
Job job = app.submit(conf);
app.waitForState(job, JobState.FAILED);
Map<TaskId, Task> tasks = job.getTasks();
assertEquals(1, tasks.size(), "Num tasks is not correct");
Task task = tasks.values().iterator().next();
assertEquals(TaskState.FAILED, task
.getReport().getTaskState(), "Task state not correct");
Map<TaskAttemptId, TaskAttempt> attempts = tasks.values().iterator().next()
.getAttempts();
assertEquals(4, attempts.size(), "Num attempts is not correct");
Iterator<TaskAttempt> it = attempts.values().iterator();
TaskAttemptReport report = it.next().getReport();
assertEquals(TaskAttemptState.FAILED,
report.getTaskAttemptState(), "Attempt state not correct");
assertEquals("Test Diagnostic Event", report.getDiagnosticInfo(),
"Diagnostic Information is not Correct");
report = it.next().getReport();
assertEquals(TaskAttemptState.FAILED,
report.getTaskAttemptState(), "Attempt state not correct");
}
private void testTaskAttemptAssignedFailHistory
(FailingAttemptsDuringAssignedMRApp app) throws Exception {
Configuration conf = new Configuration();
Job job = app.submit(conf);
app.waitForState(job, JobState.FAILED);
Map<TaskId, Task> tasks = job.getTasks();
assertTrue(app.getTaStartJHEvent(), "No Ta Started JH Event");
assertTrue(app.getTaFailedJHEvent(), "No Ta Failed JH Event");
}
private void testTaskAttemptAssignedKilledHistory
(FailingAttemptsDuringAssignedMRApp app) throws Exception {
Configuration conf = new Configuration();
Job job = app.submit(conf);
app.waitForState(job, JobState.RUNNING);
Map<TaskId, Task> tasks = job.getTasks();
Task task = tasks.values().iterator().next();
app.waitForState(task, TaskState.SCHEDULED);
Map<TaskAttemptId, TaskAttempt> attempts = task.getAttempts();
TaskAttempt attempt = attempts.values().iterator().next();
app.waitForState(attempt, TaskAttemptState.KILLED);
waitFor(app::getTaStartJHEvent, 100, 800);
waitFor(app::getTaKilledJHEvent, 100, 800);
}
static
|
TestAppender
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/ProductionComponentProcessorTest.java
|
{
"start": 21996,
"end": 22180
}
|
interface ____ extends FooSuper {}");
Source fooImpl =
CompilerTests.javaSource(
"test.FooImpl",
"package test;",
"",
"final
|
Foo
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/odps/OdpsWhoamiTest.java
|
{
"start": 853,
"end": 1329
}
|
class ____ extends TestCase {
public void test_select() throws Exception {
String sql = "WHOAMI";
assertEquals("WHOAMI", SQLUtils.formatOdps(sql));
assertEquals("whoami", SQLUtils.formatOdps(sql, SQLUtils.DEFAULT_LCASE_FORMAT_OPTION));
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, JdbcConstants.ODPS);
SQLStatement stmt = statementList.get(0);
assertEquals("WHOAMI", stmt.toString());
}
}
|
OdpsWhoamiTest
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_375/Issue375Test.java
|
{
"start": 459,
"end": 647
}
|
class ____ {
@ProcessorTest
@WithClasses( { Issue375Mapper.class, Source.class, Target.class, Int.class, Case.class } )
public void shouldForgeNewMappings() {
}
}
|
Issue375Test
|
java
|
elastic__elasticsearch
|
modules/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/StorageOperation.java
|
{
"start": 521,
"end": 764
}
|
enum ____ {
INSERT("InsertObject"),
GET("GetObject"),
LIST("ListObjects");
final String key;
public String key() {
return key;
}
StorageOperation(String key) {
this.key = key;
}
}
|
StorageOperation
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java
|
{
"start": 1230,
"end": 7399
}
|
class ____<
Request extends AbstractBulkByScrollRequest<Request>,
Self extends AbstractBulkByScrollRequestBuilder<Request, Self>> extends ActionRequestLazyBuilder<Request, BulkByScrollResponse> {
private final SearchRequestBuilder source;
private Integer maxDocs;
private Boolean abortOnVersionConflict;
private Boolean refresh;
private TimeValue timeout;
private ActiveShardCount waitForActiveShards;
private TimeValue retryBackoffInitialTime;
private Integer maxRetries;
private Float requestsPerSecond;
private Boolean shouldStoreResult;
private Integer slices;
protected AbstractBulkByScrollRequestBuilder(
ElasticsearchClient client,
ActionType<BulkByScrollResponse> action,
SearchRequestBuilder source
) {
super(client, action);
this.source = source;
initSourceSearchRequest();
}
/*
* The following is normally done within the AbstractBulkByScrollRequest constructor. But that constructor is not called until the
* request() method is called once this builder is complete. Doing it there blows away changes made to the source request.
*/
private void initSourceSearchRequest() {
source.request().scroll(DEFAULT_SCROLL_TIMEOUT);
source.request().source(new SearchSourceBuilder());
source.request().source().size(DEFAULT_SCROLL_SIZE);
}
protected abstract Self self();
/**
* The search used to find documents to process.
*/
public SearchRequestBuilder source() {
return source;
}
/**
* Set the source indices.
*/
public Self source(String... indices) {
source.setIndices(indices);
return self();
}
/**
* Set the query that will filter the source. Just a convenience method for
* easy chaining.
*/
public Self filter(QueryBuilder filter) {
source.setQuery(filter);
return self();
}
/**
* Maximum number of processed documents. Defaults to processing all
* documents.
* @deprecated please use maxDocs(int) instead.
*/
@Deprecated
public Self size(int size) {
return maxDocs(size);
}
/**
* Maximum number of processed documents. Defaults to processing all
* documents.
*/
public Self maxDocs(int maxDocs) {
this.maxDocs = maxDocs;
return self();
}
/**
* Set whether or not version conflicts cause the action to abort.
*/
public Self abortOnVersionConflict(boolean abortOnVersionConflict) {
this.abortOnVersionConflict = abortOnVersionConflict;
return self();
}
/**
* Call refresh on the indexes we've written to after the request ends?
*/
public Self refresh(boolean refresh) {
this.refresh = refresh;
return self();
}
/**
* Timeout to wait for the shards on to be available for each bulk request.
*/
public Self timeout(TimeValue timeout) {
this.timeout = timeout;
return self();
}
/**
* The number of shard copies that must be active before proceeding with the write.
* See {@link ReplicationRequest#waitForActiveShards(ActiveShardCount)} for details.
*/
public Self waitForActiveShards(ActiveShardCount activeShardCount) {
this.waitForActiveShards = activeShardCount;
return self();
}
/**
* Initial delay after a rejection before retrying a bulk request. With the default maxRetries the total backoff for retrying rejections
* is about one minute per bulk request. Once the entire bulk request is successful the retry counter resets.
*/
public Self setRetryBackoffInitialTime(TimeValue retryBackoffInitialTime) {
this.retryBackoffInitialTime = retryBackoffInitialTime;
return self();
}
/**
* Total number of retries attempted for rejections. There is no way to ask for unlimited retries.
*/
public Self setMaxRetries(int maxRetries) {
this.maxRetries = maxRetries;
return self();
}
/**
* Set the throttle for this request in sub-requests per second. {@link Float#POSITIVE_INFINITY} means set no throttle and that is the
* default. Throttling is done between batches, as we start the next scroll requests. That way we can increase the scroll's timeout to
* make sure that it contains any time that we might wait.
*/
public Self setRequestsPerSecond(float requestsPerSecond) {
this.requestsPerSecond = requestsPerSecond;
return self();
}
/**
* Should this task store its result after it has finished?
*/
public Self setShouldStoreResult(boolean shouldStoreResult) {
this.shouldStoreResult = shouldStoreResult;
return self();
}
/**
* The number of slices this task should be divided into. Defaults to 1 meaning the task isn't sliced into subtasks.
*/
public Self setSlices(int slices) {
this.slices = slices;
return self();
}
protected void apply(Request request) {
if (maxDocs != null) {
request.setMaxDocs(maxDocs);
}
if (abortOnVersionConflict != null) {
request.setAbortOnVersionConflict(abortOnVersionConflict);
}
if (refresh != null) {
request.setRefresh(refresh);
}
if (timeout != null) {
request.setTimeout(timeout);
}
if (waitForActiveShards != null) {
request.setWaitForActiveShards(waitForActiveShards);
}
if (retryBackoffInitialTime != null) {
request.setRetryBackoffInitialTime(retryBackoffInitialTime);
}
if (maxRetries != null) {
request.setMaxRetries(maxRetries);
}
if (requestsPerSecond != null) {
request.setRequestsPerSecond(requestsPerSecond);
}
if (shouldStoreResult != null) {
request.setShouldStoreResult(shouldStoreResult);
}
if (slices != null) {
request.setSlices(slices);
}
}
}
|
AbstractBulkByScrollRequestBuilder
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/TruthConstantAsserts.java
|
{
"start": 2135,
"end": 4775
}
|
class ____ extends BugChecker implements MethodInvocationTreeMatcher {
private static final Matcher<ExpressionTree> ASSERT_THAT =
staticMethod().onClass("com.google.common.truth.Truth").named("assertThat");
private static final Matcher<ExpressionTree> TRUTH_SUBJECT_CALL =
instanceMethod()
.onDescendantOf("com.google.common.truth.Subject")
.namedAnyOf("isEqualTo", "isNotEqualTo")
.withParameters("java.lang.Object");
private final ConstantExpressions constantExpressions;
@Inject
TruthConstantAsserts(ConstantExpressions constantExpressions) {
this.constantExpressions = constantExpressions;
}
@Override
public Description matchMethodInvocation(
MethodInvocationTree methodInvocationTree, VisitorState state) {
if (methodInvocationTree.getArguments().isEmpty()) {
return Description.NO_MATCH;
}
if (!TRUTH_SUBJECT_CALL.matches(methodInvocationTree, state)) {
return Description.NO_MATCH;
}
ExpressionTree rec = ASTHelpers.getReceiver(methodInvocationTree);
if (rec == null) {
return Description.NO_MATCH;
}
if (!ASSERT_THAT.matches(rec, state)) {
return Description.NO_MATCH;
}
ExpressionTree expr = getOnlyElement(((MethodInvocationTree) rec).getArguments());
if (expr == null) {
return Description.NO_MATCH;
}
// check that argument of assertThat is a constant
if (!constantIsh(expr, state)) {
return Description.NO_MATCH;
}
// check that expectation isn't a constant
ExpressionTree expectation = getOnlyElement(methodInvocationTree.getArguments());
if (constantIsh(expectation, state)) {
return Description.NO_MATCH;
}
SuggestedFix fix = SuggestedFix.swap(expr, expectation, state);
return describeMatch(methodInvocationTree, fix);
}
private boolean constantIsh(ExpressionTree tree, VisitorState state) {
var constant = constantExpressions.constantExpression(tree, state).orElse(null);
if (constant == null) {
return false;
}
// Identifiers can be considered constants, but they're exactly what we usually assert on! So
// don't consider them to be constants in this context.
AtomicBoolean involvesIdentifiers = new AtomicBoolean();
constant.accept(
new ConstantExpressionVisitor() {
@Override
public void visitIdentifier(Symbol identifier) {
if (!(identifier instanceof MethodSymbol) && !isStatic(identifier)) {
involvesIdentifiers.set(true);
}
}
});
return !involvesIdentifiers.get();
}
}
|
TruthConstantAsserts
|
java
|
spring-projects__spring-boot
|
buildSrc/src/test/java/org/springframework/boot/build/antora/GenerateAntoraPlaybookTests.java
|
{
"start": 1357,
"end": 3711
}
|
class ____ {
@TempDir
File temp;
@Test
void writePlaybookGeneratesExpectedContent() throws Exception {
writePlaybookYml((task) -> {
task.getAntoraExtensions().getXref().getStubs().addAll("appendix:.*", "api:.*", "reference:.*");
ZipContentsCollector zipContentsCollector = task.getAntoraExtensions().getZipContentsCollector();
zipContentsCollector.getAlwaysInclude().set(List.of(new AlwaysInclude("test", "local-aggregate-content")));
zipContentsCollector.getDependencies().add("test-dependency");
});
String actual = Files.readString(this.temp.toPath()
.resolve("rootproject/project/build/generated/docs/antora-playbook/antora-playbook.yml"));
String expected = Files
.readString(Path.of("src/test/resources/org/springframework/boot/build/antora/expected-playbook.yml"));
assertThat(actual.replace('\\', '/')).isEqualToNormalizingNewlines(expected.replace('\\', '/'));
}
@Test
void writePlaybookWhenHasJavadocExcludeGeneratesExpectedContent() throws Exception {
writePlaybookYml((task) -> {
task.getAntoraExtensions().getXref().getStubs().addAll("appendix:.*", "api:.*", "reference:.*");
ZipContentsCollector zipContentsCollector = task.getAntoraExtensions().getZipContentsCollector();
zipContentsCollector.getAlwaysInclude().set(List.of(new AlwaysInclude("test", "local-aggregate-content")));
zipContentsCollector.getDependencies().add("test-dependency");
task.getAsciidocExtensions().getExcludeJavadocExtension().set(true);
});
String actual = Files.readString(this.temp.toPath()
.resolve("rootproject/project/build/generated/docs/antora-playbook/antora-playbook.yml"));
assertThat(actual).doesNotContain("javadoc-extension");
}
private void writePlaybookYml(ThrowingConsumer<GenerateAntoraPlaybook> customizer) throws Exception {
File rootProjectDir = new File(this.temp, "rootproject").getCanonicalFile();
rootProjectDir.mkdirs();
Project rootProject = ProjectBuilder.builder().withProjectDir(rootProjectDir).build();
File projectDir = new File(rootProjectDir, "project");
projectDir.mkdirs();
Project project = ProjectBuilder.builder().withProjectDir(projectDir).withParent(rootProject).build();
project.getTasks()
.register("generateAntoraPlaybook", GenerateAntoraPlaybook.class, customizer::accept)
.get()
.writePlaybookYml();
}
}
|
GenerateAntoraPlaybookTests
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/sql/exec/internal/lock/FollowOnLockingAction.java
|
{
"start": 2180,
"end": 11592
}
|
class ____ implements PostAction {
// Used by Hibernate Reactive
protected final LoadedValuesCollectorImpl loadedValuesCollector;
private final LockMode lockMode;
private final Timeout lockTimeout;
// Used by Hibernate Reactive
protected final Locking.Scope lockScope;
// Used by Hibernate Reactive
protected FollowOnLockingAction(
LoadedValuesCollectorImpl loadedValuesCollector,
LockMode lockMode,
Timeout lockTimeout,
Locking.Scope lockScope) {
this.loadedValuesCollector = loadedValuesCollector;
this.lockMode = lockMode;
this.lockTimeout = lockTimeout;
this.lockScope = lockScope;
}
public static void apply(
LockOptions lockOptions,
QuerySpec lockingTarget,
LockingClauseStrategy lockingClauseStrategy,
JdbcSelectWithActionsBuilder jdbcSelectBuilder) {
final var fromClause = lockingTarget.getFromClause();
final var loadedValuesCollector = resolveLoadedValuesCollector( fromClause, lockingClauseStrategy );
// NOTE: we need to set this separately so that it can get incorporated into
// the JdbcValuesSourceProcessingState for proper callbacks
jdbcSelectBuilder.setLoadedValuesCollector( loadedValuesCollector );
// additionally, add a post-action which uses the collected values.
jdbcSelectBuilder.appendPostAction( new FollowOnLockingAction(
loadedValuesCollector,
lockOptions.getLockMode(),
lockOptions.getTimeout(),
lockOptions.getScope()
) );
}
@Override
public void performPostAction(
StatementAccess jdbcStatementAccess,
Connection jdbcConnection,
ExecutionContext executionContext) {
LockingHelper.logLoadedValues( loadedValuesCollector );
final var session = executionContext.getSession();
// NOTE: we deal with effective graphs here to make sure embedded associations are treated as lazy
final var effectiveEntityGraph = session.getLoadQueryInfluencers().getEffectiveEntityGraph();
final var initialGraph = effectiveEntityGraph.getGraph();
final var initialSemantic = effectiveEntityGraph.getSemantic();
try {
// collect registrations by entity type
final var entitySegments = segmentLoadedValues();
final var collectionSegments = segmentLoadedCollections();
// for each entity-type, prepare a locking select statement per table.
// this is based on the attributes for "state array" ordering purposes -
// we match each attribute to the table it is mapped to and add it to
// the select-list for that table-segment.
entitySegments.forEach( (entityMappingType, entityKeys) -> {
// create a table-lock reference for each table for the entity (keyed by name)
final var tableLocks = prepareTableLocks( entityMappingType, entityKeys, session );
// create a cross-reference of information related to an entity based on its identifier,
// we'll use this later when we adjust the state array and inject state into the entity instance.
final var entityDetailsMap = LockingHelper.resolveEntityKeys( entityKeys, executionContext );
// at this point, we have all the individual locking selects ready to go - execute them
final var lockingOptions = buildLockingOptions(
tableLocks,
entityDetailsMap,
entityMappingType,
effectiveEntityGraph,
entityKeys,
collectionSegments,
session,
executionContext );
tableLocks.forEach( (s, tableLock) ->
tableLock.performActions( entityDetailsMap, lockingOptions, session ) );
} );
}
finally {
// reset the effective graph to whatever it was when we started
effectiveEntityGraph.clear();
session.getLoadQueryInfluencers().applyEntityGraph( initialGraph, initialSemantic );
}
}
// Used by Hibernate Reactive
public QueryOptions buildLockingOptions(
Map<String, TableLock> tableLocks,
Map<Object, EntityDetails> entityDetailsMap,
EntityMappingType entityMappingType,
EffectiveEntityGraph effectiveEntityGraph,
List<EntityKey> entityKeys,
Map<EntityMappingType, Map<PluralAttributeMapping, List<CollectionKey>>> collectionSegments,
SharedSessionContractImplementor session,
ExecutionContext executionContext) {
if ( SQL_EXEC_LOGGER.isDebugEnabled() ) {
SQL_EXEC_LOGGER.startingFollowOnLockingProcess( entityMappingType.getEntityName() );
}
// apply an empty "fetch graph" to make sure any embedded associations reachable from
// any of the DomainResults we will create are treated as lazy
final var graph = entityMappingType.createRootGraph( session );
effectiveEntityGraph.clear();
effectiveEntityGraph.applyGraph( graph, GraphSemantic.FETCH );
entityMappingType.forEachAttributeMapping( (index, attributeMapping) -> {
// we need to handle collections specially (which we do below, so skip them here)
if ( !(attributeMapping instanceof PluralAttributeMapping) ) {
final var tableLock = resolveTableLock( attributeMapping, tableLocks, entityMappingType );
if ( tableLock == null ) {
throw new AssertionFailure( String.format(
Locale.ROOT,
"Unable to locate table for attribute `%s`",
attributeMapping.getNavigableRole().getFullPath()
) );
}
// here we apply the selection for the attribute to the corresponding table-lock ref
tableLock.applyAttribute( index, attributeMapping );
}
} );
// now we do process any collections, if asked
if ( lockScope == Locking.Scope.INCLUDE_COLLECTIONS ) {
SqmMutationStrategyHelper.visitCollectionTables( entityMappingType, (attribute) -> {
// we may need to lock the "collection table".
// the conditions are a bit unclear as to directionality, etc., so for now lock each.
LockingHelper.lockCollectionTable(
attribute,
lockMode,
lockTimeout,
entityDetailsMap,
executionContext
);
} );
}
else if ( lockScope == Locking.Scope.INCLUDE_FETCHES
&& loadedValuesCollector.getCollectedCollections() != null
&& !loadedValuesCollector.getCollectedCollections().isEmpty() ) {
final var attributeKeys = collectionSegments.get( entityMappingType );
if ( attributeKeys != null ) {
for ( var entry : attributeKeys.entrySet() ) {
LockingHelper.lockCollectionTable(
entry.getKey(),
lockMode,
lockTimeout,
entry.getValue(),
executionContext
);
}
}
}
// at this point, we have all the individual locking selects ready to go - execute them
return buildLockingOptions( executionContext );
}
private TableLock resolveTableLock(
AttributeMapping attributeMapping,
Map<String, TableLock> tableSegments,
EntityMappingType entityMappingType) {
final Object key =
entityMappingType.getEntityPersister() instanceof UnionSubclassEntityPersister usp
// In the union-subclass strategy, attributes defined on the
// super are reported as contained by the logical super table.
// See also the hacks in TableSegment to deal with this.
// todo (JdbcOperation) : need to allow for secondary-tables
? usp.getMappedTableDetails().getTableName()
: attributeMapping.getContainingTableExpression();
return tableSegments.get( key );
}
private QueryOptions buildLockingOptions(ExecutionContext executionContext) {
final var lockingQueryOptions = new QueryOptionsImpl();
lockingQueryOptions.getLockOptions().setLockMode( lockMode );
lockingQueryOptions.getLockOptions().setTimeout( lockTimeout );
lockingQueryOptions.getLockOptions().setFollowOnStrategy( Locking.FollowOn.DISALLOW );
if ( executionContext.getQueryOptions().isReadOnly() == Boolean.TRUE ) {
lockingQueryOptions.setReadOnly( true );
}
return lockingQueryOptions;
}
// Used by Hibernate Reactive
protected Map<EntityMappingType, List<EntityKey>> segmentLoadedValues() {
final Map<EntityMappingType, List<EntityKey>> map = new IdentityHashMap<>();
LockingHelper.segmentLoadedValues( loadedValuesCollector.getCollectedEntities(), map );
return map;
}
// Used by Hibernate Reactive
protected Map<EntityMappingType, Map<PluralAttributeMapping, List<CollectionKey>>> segmentLoadedCollections() {
if ( lockScope == Locking.Scope.ROOT_ONLY ) {
return emptyMap();
}
final Map<EntityMappingType, Map<PluralAttributeMapping, List<CollectionKey>>> map = new HashMap<>();
LockingHelper.segmentLoadedCollections( loadedValuesCollector.getCollectedCollections(), lockScope, map );
return map;
}
// Used by Hibernate Reactive
protected Map<String, TableLock> prepareTableLocks(
EntityMappingType entityMappingType,
List<EntityKey> entityKeys,
SharedSessionContractImplementor session) {
final Map<String, TableLock> segments = new HashMap<>();
entityMappingType.forEachTableDetails( (tableDetails) -> segments.put(
tableDetails.getTableName(),
createTableLock( tableDetails, entityMappingType, entityKeys, session )
) );
return segments;
}
// Used by Hibernate Reactive
protected TableLock createTableLock(TableDetails tableDetails, EntityMappingType entityMappingType, List<EntityKey> entityKeys, SharedSessionContractImplementor session) {
return new TableLock( tableDetails, entityMappingType, entityKeys, session );
}
// Used by Hibernate Reactive
protected static LoadedValuesCollectorImpl resolveLoadedValuesCollector(
FromClause fromClause,
LockingClauseStrategy lockingClauseStrategy) {
return new LoadedValuesCollectorImpl( lockingClauseStrategy );
}
public static
|
FollowOnLockingAction
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/propertyref/inheritence/discrim/Person.java
|
{
"start": 193,
"end": 758
}
|
class ____ {
private Long id;
private String name;
private String personId;
/**
* @return Returns the id.
*/
public Long getId() {
return id;
}
/**
* @param id The id to set.
*/
public void setId(Long id) {
this.id = id;
}
/**
* @return Returns the name.
*/
public String getName() {
return name;
}
/**
* @param name The name to set.
*/
public void setName(String name) {
this.name = name;
}
public String getPersonId() {
return personId;
}
public void setPersonId(String personId) {
this.personId = personId;
}
}
|
Person
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_last_Test.java
|
{
"start": 1291,
"end": 1922
}
|
class ____ {
private final Iterable<String> iterable = asList("Homer", "Marge", "Lisa", "Bart", "Maggie");
@Test
void should_fail_if_iterable_is_empty() {
// GIVEN
Iterable<String> iterable = emptyList();
// WHEN
var assertionError = expectAssertionError(() -> assertThat(iterable).last());
// THEN
then(assertionError).hasMessage(actualIsEmpty());
}
@Test
void should_pass_allowing_object_assertions_if_iterable_contains_at_least_one_element() {
// WHEN
ObjectAssert<String> result = assertThat(iterable).last();
// THEN
result.isEqualTo("Maggie");
}
}
|
IterableAssert_last_Test
|
java
|
spring-projects__spring-framework
|
spring-web/src/test/java/org/springframework/web/context/request/async/WebAsyncManagerErrorTests.java
|
{
"start": 11752,
"end": 11862
}
|
class ____ implements Callable<Object> {
@Override
public Object call() {
return 21;
}
}
}
|
StubCallable
|
java
|
quarkusio__quarkus
|
extensions/redis-client/runtime/src/test/java/io/quarkus/redis/datasource/TransactionalGraphCommandsTest.java
|
{
"start": 780,
"end": 4567
}
|
class ____ extends DatasourceTestBase {
private RedisDataSource blocking;
private ReactiveRedisDataSource reactive;
static final String createQuery = "CREATE (:Rider {name:'Valentino Rossi'})-[:rides]->(:Team {name:'Yamaha'}), (:Rider {name:'Dani Pedrosa'})-[:rides]->(:Team {name:'Honda'}), (:Rider {name:'Andrea Dovizioso'})-[:rides]->(:Team {name:'Ducati'})";
static final String query = "MATCH (r:Rider)-[:rides]->(t:Team) WHERE t.name = 'Yamaha' RETURN r.name, t.name";
@BeforeEach
void initialize() {
blocking = new BlockingRedisDataSourceImpl(vertx, redis, api, Duration.ofSeconds(60));
reactive = new ReactiveRedisDataSourceImpl(vertx, redis, api);
}
@AfterEach
public void clear() {
blocking.flushall();
}
@Test
public void graphBlocking() {
TransactionResult result = blocking.withTransaction(tx -> {
TransactionalGraphCommands<String> graph = tx.graph();
assertThat(graph.getDataSource()).isEqualTo(tx);
graph.graphQuery("moto", createQuery); // void
graph.graphQuery("moto", query); // result
graph.graphList(); // "moto"
graph.graphDelete("moto"); // void
graph.graphList(); // empty
});
assertThat(result.size()).isEqualTo(5);
assertThat(result.discarded()).isFalse();
assertThat((List<Map<String, GraphQueryResponseItem>>) result.get(0)).isEmpty();
assertThat((List<Map<String, GraphQueryResponseItem>>) result.get(1)).hasSize(1).allSatisfy(map -> {
GraphQueryResponseItem.ScalarItem driver = map.get("r.name").asScalarItem();
GraphQueryResponseItem.ScalarItem team = map.get("t.name").asScalarItem();
assertThat(driver.asString()).isEqualTo("Valentino Rossi");
assertThat(driver.name()).isEqualTo("r.name");
assertThat(team.asString()).isEqualTo("Yamaha");
assertThat(team.name()).isEqualTo("t.name");
});
assertThat((List<String>) result.get(2)).hasSize(1).containsExactly("moto");
assertThat((Object) result.get(3)).isNull();
assertThat((List<String>) result.get(4)).hasSize(0);
}
@Test
public void graphReactive() {
TransactionResult result = reactive.withTransaction(tx -> {
ReactiveTransactionalGraphCommands<String> graph = tx.graph();
assertThat(graph.getDataSource()).isEqualTo(tx);
return graph.graphQuery("moto", createQuery) // void
.chain(() -> graph.graphQuery("moto", query)) // result
.chain(graph::graphList) // "moto"
.chain(() -> graph.graphDelete("moto")) // void
.chain(graph::graphList); // empty
}).await().indefinitely();
assertThat(result.size()).isEqualTo(5);
assertThat(result.discarded()).isFalse();
assertThat((List<Map<String, GraphQueryResponseItem>>) result.get(0)).isEmpty();
assertThat((List<Map<String, GraphQueryResponseItem>>) result.get(1)).hasSize(1).allSatisfy(map -> {
GraphQueryResponseItem.ScalarItem driver = map.get("r.name").asScalarItem();
GraphQueryResponseItem.ScalarItem team = map.get("t.name").asScalarItem();
assertThat(driver.asString()).isEqualTo("Valentino Rossi");
assertThat(driver.name()).isEqualTo("r.name");
assertThat(team.asString()).isEqualTo("Yamaha");
assertThat(team.name()).isEqualTo("t.name");
});
assertThat((List<String>) result.get(2)).hasSize(1).containsExactly("moto");
assertThat((Object) result.get(3)).isNull();
assertThat((List<String>) result.get(4)).hasSize(0);
}
}
|
TransactionalGraphCommandsTest
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/odps/OdpsDoubleQuoteTest.java
|
{
"start": 385,
"end": 1526
}
|
class ____ extends TestCase {
public void test_0() throws Exception {
String sql = "SELECT my_udtf(1,2,3) as (a, b, c) from employee t1 WHERE t1.name = \"aaa\";";
OdpsStatementParser parser = new OdpsStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
assertEquals(1, statementList.size());
OdpsSchemaStatVisitor visitor = new OdpsSchemaStatVisitor();
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(1, visitor.getColumns().size());
assertEquals(1, visitor.getConditions().size());
assertTrue(visitor.getTables().containsKey(new TableStat.Name("employee")));
assertTrue(visitor.getColumns().contains(new Column("employee", "name")));
}
}
|
OdpsDoubleQuoteTest
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/function/FailableTest.java
|
{
"start": 74213,
"end": 74715
}
|
interface ____ properly defined to throw any exception using String and IOExceptions as
* generic test types.
*/
@Test
void testThrows_FailableBooleanSupplier_IOException() {
assertThrows(IOException.class, () -> new FailableBooleanSupplier<IOException>() {
@Override
public boolean getAsBoolean() throws IOException {
throw new IOException("test");
}
}.getAsBoolean());
}
/**
* Tests that our failable
|
is
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/beanparam/BeanFormParamTest.java
|
{
"start": 1312,
"end": 1507
}
|
interface ____ {
@POST
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
String postFormParams(@BeanParam BeanWithFormParams beanParam);
}
public static
|
FormTestClient
|
java
|
grpc__grpc-java
|
gcp-observability/src/test/java/io/grpc/gcp/observability/MetricsTest.java
|
{
"start": 3461,
"end": 6972
}
|
class ____ implements Runnable {
@Override
public void run() {
Sink mockSink = mock(GcpLogSink.class);
ObservabilityConfig mockConfig = mock(ObservabilityConfig.class);
InternalLoggingChannelInterceptor.Factory mockChannelInterceptorFactory =
mock(InternalLoggingChannelInterceptor.Factory.class);
InternalLoggingServerInterceptor.Factory mockServerInterceptorFactory =
mock(InternalLoggingServerInterceptor.Factory.class);
when(mockConfig.isEnableCloudMonitoring()).thenReturn(true);
when(mockConfig.getProjectId()).thenReturn(PROJECT_ID);
try {
GcpObservability observability =
GcpObservability.grpcInit(
mockSink, mockConfig, mockChannelInterceptorFactory, mockServerInterceptorFactory);
observability.registerStackDriverExporter(PROJECT_ID, CUSTOM_TAGS);
Server server =
ServerBuilder.forPort(0)
.addService(new ObservabilityTestHelper.SimpleServiceImpl())
.build()
.start();
int port = cleanupRule.register(server).getPort();
SimpleServiceGrpc.SimpleServiceBlockingStub stub =
SimpleServiceGrpc.newBlockingStub(
cleanupRule.register(
ManagedChannelBuilder.forAddress("localhost", port).usePlaintext().build()));
assertThat(ObservabilityTestHelper.makeUnaryRpcViaClientStub("buddy", stub))
.isEqualTo("Hello buddy");
// Adding sleep to ensure metrics are exported before querying cloud monitoring backend
TimeUnit.SECONDS.sleep(40);
// This checks Cloud monitoring for the new metrics that was just exported.
MetricServiceClient metricServiceClient = MetricServiceClient.create();
// Restrict time to last 1 minute
long startMillis = System.currentTimeMillis() - ((60 * 1) * 1000);
TimeInterval interval =
TimeInterval.newBuilder()
.setStartTime(Timestamps.fromMillis(startMillis))
.setEndTime(Timestamps.fromMillis(System.currentTimeMillis()))
.build();
// Timeseries data
String metricsFilter =
String.format(
"metric.type=\"custom.googleapis.com/opencensus/grpc.io/client/completed_rpcs\""
+ " AND metric.labels.grpc_client_method=\"%s\""
+ " AND metric.labels.%s=%s",
TEST_CLIENT_METHOD, CUSTOM_TAG_KEY, CUSTOM_TAG_VALUE);
ListTimeSeriesRequest metricsRequest =
ListTimeSeriesRequest.newBuilder()
.setName(ProjectName.of(PROJECT_ID).toString())
.setFilter(metricsFilter)
.setInterval(interval)
.build();
ListTimeSeriesPagedResponse response = metricServiceClient.listTimeSeries(metricsRequest);
assertThat(response.iterateAll()).isNotEmpty();
for (TimeSeries ts : response.iterateAll()) {
assertThat(ts.getMetric().getLabelsMap().get("grpc_client_method"))
.isEqualTo(TEST_CLIENT_METHOD);
assertThat(ts.getMetric().getLabelsMap().get("grpc_client_status")).isEqualTo("OK");
assertThat(ts.getPoints(0).getValue().getInt64Value()).isEqualTo(1);
}
observability.close();
} catch (IOException | InterruptedException e) {
throw new AssertionError("Exception while testing metrics", e);
}
}
}
}
|
StaticTestingClassTestMetricsExporter
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/xcontent/WatcherParams.java
|
{
"start": 2019,
"end": 3171
}
|
class ____ {
private final ToXContent.Params delegate;
private final Map<String, String> params = new HashMap<>();
private Builder(ToXContent.Params delegate) {
this.delegate = delegate;
}
public Builder hideSecrets(boolean hideSecrets) {
params.put(HIDE_SECRETS_KEY, String.valueOf(hideSecrets));
return this;
}
public Builder hideHeaders(boolean hideHeaders) {
params.put(HIDE_HEADERS, String.valueOf(hideHeaders));
return this;
}
public Builder debug(boolean debug) {
params.put(DEBUG_KEY, String.valueOf(debug));
return this;
}
public Builder includeStatus(boolean includeStatus) {
params.put(Watch.INCLUDE_STATUS_KEY, String.valueOf(includeStatus));
return this;
}
public Builder put(String key, Object value) {
params.put(key, String.valueOf(value));
return this;
}
public WatcherParams build() {
return new WatcherParams(Map.copyOf(params), delegate);
}
}
}
|
Builder
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficController.java
|
{
"start": 8841,
"end": 9782
}
|
class ____ %d:%d parent %d:%d(.)*$",
ROOT_QDISC_HANDLE, YARN_ROOT_CLASS_ID, ROOT_QDISC_HANDLE,
ROOT_CLASS_ID));
for (String regex : regexes) {
Pattern pattern = Pattern.compile(regex, Pattern.MULTILINE);
if (pattern.matcher(state).find()) {
LOG.debug("Matched regex: {}", regex);
} else {
String logLine = new StringBuilder("Failed to match regex: ")
.append(regex).append(" Current state: ").append(state).toString();
LOG.warn(logLine);
return false;
}
}
LOG.info("Bootstrap check succeeded");
return true;
}
private String readState() throws ResourceHandlerException {
//Sample state output:
// qdisc htb 42: root refcnt 2 r2q 10 default 2 direct_packets_stat 0
// filter parent 42: protocol ip pref 10 cgroup handle 0x1
//
// filter parent 42: protocol ip pref 10 cgroup handle 0x1
//
//
|
htb
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/ServiceLauncher.java
|
{
"start": 1853,
"end": 2849
}
|
class ____ created. It must be of the type
* {@link Service}</li>
* <li>If it implements
* {@link LaunchableService#bindArgs(Configuration, List)},
* it is given the binding args off the CLI after all general configuration
* arguments have been stripped.</li>
* <li>Its {@link Service#init(Configuration)} and {@link Service#start()}
* methods are called.</li>
* <li>If it implements it, {@link LaunchableService#execute()}
* is called and its return code used as the exit code.</li>
* <li>Otherwise: it waits for the service to stop, assuming that the
* {@link Service#start()} method spawns one or more thread
* to perform work</li>
* <li>If any exception is raised and provides an exit code,
* that is, it implements {@link ExitCodeProvider},
* the return value of {@link ExitCodeProvider#getExitCode()},
* becomes the exit code of the command.</li>
* </ol>
* Error and warning messages are logged to {@code stderr}.
*
* @param <S> service
|
is
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/core/fs/LimitedConnectionsFileSystem.java
|
{
"start": 24446,
"end": 25317
}
|
interface ____ extends Closeable {
/** Gets the progress tracker for this stream. */
StreamProgressTracker getProgressTracker();
/** Gets the current position in the stream, as in number of bytes read or written. */
long getPos() throws IOException;
/**
* Closes the stream asynchronously with a special exception that indicates closing due to
* lack of progress.
*/
void closeDueToTimeout() throws IOException;
/** Checks whether the stream was closed already. */
boolean isClosed();
}
// ------------------------------------------------------------------------
/**
* A tracker for stream progress. This records the number of bytes read / written together with
* a timestamp when the last check happened.
*/
private static final
|
StreamWithTimeout
|
java
|
apache__kafka
|
tools/tools-api/src/main/java/org/apache/kafka/tools/api/StringDecoder.java
|
{
"start": 972,
"end": 1145
}
|
class ____ implements Decoder<String> {
@Override
public String fromBytes(byte[] bytes) {
return new String(bytes, StandardCharsets.UTF_8);
}
}
|
StringDecoder
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/monitor/os/OsInfo.java
|
{
"start": 932,
"end": 3725
}
|
class ____ implements ReportingService.Info {
private static final TransportVersion DOUBLE_PRECISION_ALLOCATED_PROCESSORS_SUPPORT = TransportVersions.V_8_5_0;
private final long refreshInterval;
private final int availableProcessors;
private final Processors allocatedProcessors;
private final String name;
private final String prettyName;
private final String arch;
private final String version;
public OsInfo(
final long refreshInterval,
final int availableProcessors,
final Processors allocatedProcessors,
final String name,
final String prettyName,
final String arch,
final String version
) {
this.refreshInterval = refreshInterval;
this.availableProcessors = availableProcessors;
this.allocatedProcessors = allocatedProcessors;
this.name = name;
this.prettyName = prettyName;
this.arch = arch;
this.version = version;
}
public OsInfo(StreamInput in) throws IOException {
this.refreshInterval = in.readLong();
this.availableProcessors = in.readInt();
if (in.getTransportVersion().onOrAfter(DOUBLE_PRECISION_ALLOCATED_PROCESSORS_SUPPORT)) {
this.allocatedProcessors = Processors.readFrom(in);
} else {
this.allocatedProcessors = Processors.of((double) in.readInt());
}
this.name = in.readOptionalString();
this.prettyName = in.readOptionalString();
this.arch = in.readOptionalString();
this.version = in.readOptionalString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeLong(refreshInterval);
out.writeInt(availableProcessors);
if (out.getTransportVersion().onOrAfter(DOUBLE_PRECISION_ALLOCATED_PROCESSORS_SUPPORT)) {
allocatedProcessors.writeTo(out);
} else {
out.writeInt(getAllocatedProcessors());
}
out.writeOptionalString(name);
out.writeOptionalString(prettyName);
out.writeOptionalString(arch);
out.writeOptionalString(version);
}
public long getRefreshInterval() {
return this.refreshInterval;
}
public int getAvailableProcessors() {
return this.availableProcessors;
}
public int getAllocatedProcessors() {
return allocatedProcessors.roundUp();
}
public double getFractionalAllocatedProcessors() {
return allocatedProcessors.count();
}
public String getName() {
return name;
}
public String getPrettyName() {
return prettyName;
}
public String getArch() {
return arch;
}
public String getVersion() {
return version;
}
static final
|
OsInfo
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_913/Domain.java
|
{
"start": 305,
"end": 1631
}
|
class ____ {
static final Set<String> DEFAULT_STRINGS = new HashSet<>();
static final Set<Long> DEFAULT_LONGS = new HashSet<>();
private Set<String> strings = DEFAULT_STRINGS;
private Set<Long> longs = DEFAULT_LONGS;
private Set<String> stringsInitialized;
private Set<Long> longsInitialized;
private List<String> stringsWithDefault;
public Set<String> getStrings() {
return strings;
}
public void setStrings(Set<String> strings) {
this.strings = strings;
}
public Set<Long> getLongs() {
return longs;
}
public void setLongs(Set<Long> longs) {
this.longs = longs;
}
public Set<String> getStringsInitialized() {
return stringsInitialized;
}
public void setStringsInitialized(Set<String> stringsInitialized) {
this.stringsInitialized = stringsInitialized;
}
public Set<Long> getLongsInitialized() {
return longsInitialized;
}
public void setLongsInitialized(Set<Long> longsInitialized) {
this.longsInitialized = longsInitialized;
}
public List<String> getStringsWithDefault() {
return stringsWithDefault;
}
public void setStringsWithDefault(List<String> stringsWithDefault) {
this.stringsWithDefault = stringsWithDefault;
}
}
|
Domain
|
java
|
spring-projects__spring-framework
|
spring-web/src/test/java/org/springframework/web/util/TagUtilsTests.java
|
{
"start": 4432,
"end": 4471
}
|
class ____ extends TagSupport {
}
}
|
TagC
|
java
|
spring-projects__spring-boot
|
core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/condition/ConditionalOnMissingBeanTests.java
|
{
"start": 23798,
"end": 24051
}
|
class ____ {
@Bean
FactoryBean<ExampleBean> exampleBeanFactoryBean(@Value("${theValue}") String value) {
return new ExampleFactoryBean(value);
}
}
@Configuration(proxyBeanMethods = false)
static
|
FactoryBeanWithBeanMethodArgumentsConfiguration
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/Assertions_assertThat_inHexadecimal_Test.java
|
{
"start": 1069,
"end": 7835
}
|
class ____ {
@Test
void should_assert_byte_in_hexadecimal() {
// WHEN
var assertionError = expectAssertionError(() -> assertThat((byte) 2).inHexadecimal().isEqualTo((byte) 3));
// THEN
then(assertionError).hasMessage(shouldBeEqualMessage("0x02", "0x03"));
}
@Test
void should_assert_signed_byte_in_hexadecimal() {
// WHEN
var assertionError = expectAssertionError(() -> assertThat((byte) -2).inHexadecimal().isEqualTo((byte) 3));
// THEN
then(assertionError).hasMessage(shouldBeEqualMessage("0xFE", "0x03"));
}
@Test
void should_assert_bytes_in_hexadecimal() {
// GIVEN
byte[] actual = { 2, 3 };
byte expected = 1;
// WHEN
var assertionError = expectAssertionError(() -> assertThat(actual).inHexadecimal().contains(expected));
// THEN
then(assertionError).hasMessage(format("%nExpecting byte[]:%n" +
" [0x02, 0x03]%n" +
"to contain:%n" +
" [0x01]%n" +
"but could not find the following byte(s):%n" +
" [0x01]%n"));
}
@Test
void should_assert_short_in_hexadecimal() {
// WHEN
var assertionError = expectAssertionError(() -> assertThat((short) 2).inHexadecimal().isEqualTo((short) 3));
// THEN
then(assertionError).hasMessage(shouldBeEqualMessage("0x0002", "0x0003"));
}
@Test
void should_assert_signed_short_in_hexadecimal() {
// WHEN
var assertionError = expectAssertionError(() -> assertThat((short) -2).inHexadecimal().isEqualTo((short) 3));
// THEN
then(assertionError).hasMessage(shouldBeEqualMessage("0xFFFE", "0x0003"));
}
@Test
void should_assert_shorts_in_hexadecimal() {
// WHEN
var assertionError = expectAssertionError(() -> assertThat(new short[] { (short) 1,
(short) 2 }).inHexadecimal().isEqualTo(new short[] { (short) 3 }));
// THEN
then(assertionError).hasMessage(shouldBeEqualMessage("[0x0001, 0x0002]", "[0x0003]"));
}
@Test
void should_assert_integer_in_hexadecimal() {
// WHEN
var assertionError = expectAssertionError(() -> assertThat(2).inHexadecimal().isEqualTo(3));
// THEN
then(assertionError).hasMessage(shouldBeEqualMessage("0x0000_0002", "0x0000_0003"));
}
@Test
void should_assert_integers_in_hexadecimal() {
// GIVEN
int[] actual = { 1, 2 };
int[] expected = { 2 };
// WHEN
var assertionError = expectAssertionError(() -> assertThat(actual).inHexadecimal().isEqualTo(expected));
// THEN
then(assertionError).hasMessage(shouldBeEqualMessage("[0x0000_0001, 0x0000_0002]", "[0x0000_0002]"));
}
@Test
void should_assert_long_in_hexadecimal() {
// WHEN
var assertionError = expectAssertionError(() -> assertThat(Long.MAX_VALUE).inHexadecimal()
.isEqualTo(Long.MIN_VALUE));
// THEN
then(assertionError).hasMessage(shouldBeEqualMessage("0x7FFF_FFFF_FFFF_FFFF", "0x8000_0000_0000_0000"));
}
@Test
void should_assert_signed_long_in_hexadecimal() {
// WHEN
var assertionError = expectAssertionError(() -> assertThat(-2L).inHexadecimal().isEqualTo(2L));
// THEN
then(assertionError).hasMessage(shouldBeEqualMessage("0xFFFF_FFFF_FFFF_FFFE", "0x0000_0000_0000_0002"));
}
@Test
void should_assert_longs_in_hexadecimal() {
// GIVEN
long[] actual = { -1L, 2L };
long[] expected = { 3L };
// WHEN
var assertionError = expectAssertionError(() -> assertThat(actual).inHexadecimal().isEqualTo(expected));
// THEN
then(assertionError).hasMessage(shouldBeEqualMessage("[0xFFFF_FFFF_FFFF_FFFF, 0x0000_0000_0000_0002]",
"[0x0000_0000_0000_0003]"));
}
@Test
void should_assert_float_in_hexadecimal() {
// WHEN
var assertionError = expectAssertionError(() -> assertThat(4.3f).inHexadecimal().isEqualTo(2.3f));
// THEN
then(assertionError).hasMessage(shouldBeEqualMessage("0x4089_999A", "0x4013_3333"));
}
@Test
void should_assert_floats_in_hexadecimal() {
// GIVEN
float[] actual = { 4.3f, -2f };
float[] expected = { 4.1f };
// WHEN
var assertionError = expectAssertionError(() -> assertThat(actual).inHexadecimal().isEqualTo(expected));
// THEN
then(assertionError).hasMessage(shouldBeEqualMessage("[0x4089_999A, 0xC000_0000]", "[0x4083_3333]"));
}
@Test
void should_assert_double_in_hexadecimal() {
// WHEN
var assertionError = expectAssertionError(() -> assertThat(4.3d).inHexadecimal().isEqualTo(2.3d));
// THEN
then(assertionError).hasMessage(shouldBeEqualMessage("0x4011_3333_3333_3333", "0x4002_6666_6666_6666"));
}
@Test
void should_assert_doubles_in_hexadecimal() {
// GIVEN
double[] actual = { 1d, 2d };
double[] expected = { 3d };
// WHEN
var assertionError = expectAssertionError(() -> assertThat(actual).inHexadecimal().isEqualTo(expected));
// THEN
then(assertionError).hasMessage(shouldBeEqualMessage("[0x3FF0_0000_0000_0000, 0x4000_0000_0000_0000]",
"[0x4008_0000_0000_0000]"));
}
@Test
void should_assert_collections_in_hexadecimal() {
// GIVEN
List<Integer> actual = List.of(1, 2);
List<Integer> expected = List.of(3);
// WHEN
var assertionError = expectAssertionError(() -> assertThat(actual).inHexadecimal().isEqualTo(expected));
// THEN
then(assertionError).hasMessage(shouldBeEqualMessage("[0x0000_0001, 0x0000_0002]", "[0x0000_0003]"));
}
@Test
void should_assert_Character_in_hexadecimal() {
// WHEN
var assertionError = expectAssertionError(() -> assertThat('a').inHexadecimal().isEqualTo('b'));
// THEN
then(assertionError).hasMessage(shouldBeEqualMessage("'0x0061'", "'0x0062'"));
}
@Test
void should_assert_String_in_hexadecimal() {
// WHEN
var assertionError = expectAssertionError(() -> assertThat("a6c").inHexadecimal().isEqualTo("abc"));
// THEN
then(assertionError).hasMessage(shouldBeEqualMessage("\"['0x0061', '0x0036', '0x0063']\"",
"\"['0x0061', '0x0062', '0x0063']\""));
}
@Test
public void should_keep_existing_description_set_before_calling_inHexadecimal() {
// GIVEN
String description = "My description";
// WHEN
var assertionError = expectAssertionError(() -> assertThat("ab").as(description).inHexadecimal().isNull());
// THEN
then(assertionError).hasMessageContaining(description);
}
}
|
Assertions_assertThat_inHexadecimal_Test
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/server/processor/src/main/java/org/jboss/resteasy/reactive/server/processor/util/GeneratedClassOutput.java
|
{
"start": 157,
"end": 470
}
|
class ____ implements ClassOutput {
final List<GeneratedClass> output = new ArrayList<>();
@Override
public void write(String name, byte[] data) {
output.add(new GeneratedClass(name, data));
}
public List<GeneratedClass> getOutput() {
return output;
}
}
|
GeneratedClassOutput
|
java
|
quarkusio__quarkus
|
independent-projects/tools/analytics-common/src/main/java/io/quarkus/analytics/dto/config/AnalyticsLocalConfig.java
|
{
"start": 49,
"end": 189
}
|
interface ____ {
/**
* @return true if the analytics is enabled
* @return
*/
boolean isDisabled();
}
|
AnalyticsLocalConfig
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/ModifiedButNotUsedTest.java
|
{
"start": 1609,
"end": 2225
}
|
class ____ {
void test() {
// BUG: Diagnostic contains:
List<Integer> foo = new ArrayList<>();
foo.add(1);
List<Integer> bar;
// BUG: Diagnostic contains:
bar = new ArrayList<>();
bar.add(1);
}
}
""")
.doTest();
}
@Test
public void sideEffectFreeRefactoring() {
refactoringHelper
.addInputLines(
"Test.java",
"""
import java.util.ArrayList;
import java.util.List;
|
Test
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogDeletionService.java
|
{
"start": 12913,
"end": 13222
}
|
class ____ extends FilterFileSystem {
MockFileSystem() {
super(mock(FileSystem.class));
}
public void initialize(URI name, Configuration conf) throws IOException {}
@Override
public boolean hasPathCapability(Path path, String capability) {
return true;
}
}
}
|
MockFileSystem
|
java
|
google__guava
|
android/guava/src/com/google/common/graph/AbstractNetwork.java
|
{
"start": 1758,
"end": 1853
}
|
class ____ than implement {@link Network} directly.
*
* <p>The methods implemented in this
|
rather
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KeyVaultEndpointBuilderFactory.java
|
{
"start": 12684,
"end": 13011
}
|
class ____ extends AbstractEndpointBuilder implements KeyVaultEndpointBuilder, AdvancedKeyVaultEndpointBuilder {
public KeyVaultEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new KeyVaultEndpointBuilderImpl(path);
}
}
|
KeyVaultEndpointBuilderImpl
|
java
|
apache__flink
|
flink-core/src/test/java/org/apache/flink/api/common/typeutils/base/array/CharPrimitiveArraySerializerTest.java
|
{
"start": 1053,
"end": 1744
}
|
class ____ extends SerializerTestBase<char[]> {
@Override
protected TypeSerializer<char[]> createSerializer() {
return new CharPrimitiveArraySerializer();
}
@Override
protected Class<char[]> getTypeClass() {
return char[].class;
}
@Override
protected int getLength() {
return -1;
}
@Override
protected char[][] getTestData() {
return new char[][] {
new char[] {
0, 1, 2, 3, '\n', '\t', 'a', 'b', 'c', Character.MAX_VALUE, Character.MIN_VALUE
},
new char[] {},
new char[] {100, 200, 315, 0, 17, 0, 0}
};
}
}
|
CharPrimitiveArraySerializerTest
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/rest/FromRestGetCorsTest.java
|
{
"start": 1256,
"end": 3264
}
|
class ____ extends ContextTestSupport {
@Override
protected Registry createCamelRegistry() throws Exception {
Registry jndi = super.createCamelRegistry();
jndi.bind("dummy-rest", new DummyRestConsumerFactory());
return jndi;
}
@Test
public void testCors() throws Exception {
// the rest becomes routes and the input is a seda endpoint created by
// the DummyRestConsumerFactory
getMockEndpoint("mock:update").expectedMessageCount(1);
Exchange out = template.request("seda:post-say-bye", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setBody("I was here");
}
});
assertNotNull(out);
assertEquals(RestConfiguration.CORS_ACCESS_CONTROL_ALLOW_ORIGIN,
out.getMessage().getHeader("Access-Control-Allow-Origin"));
assertEquals(RestConfiguration.CORS_ACCESS_CONTROL_ALLOW_METHODS,
out.getMessage().getHeader("Access-Control-Allow-Methods"));
assertEquals(RestConfiguration.CORS_ACCESS_CONTROL_ALLOW_HEADERS,
out.getMessage().getHeader("Access-Control-Allow-Headers"));
assertEquals(RestConfiguration.CORS_ACCESS_CONTROL_MAX_AGE,
out.getMessage().getHeader("Access-Control-Max-Age"));
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
restConfiguration().host("localhost").enableCORS(true);
rest("/say/hello").get().to("direct:hello");
rest("/say/bye").get().consumes("application/json").to("direct:bye").post().to("mock:update");
from("direct:hello").transform().constant("Hello World");
from("direct:bye").transform().constant("Bye World");
}
};
}
}
|
FromRestGetCorsTest
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.