language
stringclasses
1 value
repo
stringclasses
60 values
path
stringlengths
22
294
class_span
dict
source
stringlengths
13
1.16M
target
stringlengths
1
113
java
apache__camel
components/camel-spring-parent/camel-spring-ai/camel-spring-ai-chat/src/test/java/org/apache/camel/component/springai/chat/SpringAiChatComponentTest.java
{ "start": 1521, "end": 2940 }
class ____ extends CamelTestSupport { private ChatModel mockChatModel; @Override protected void doPreSetup() throws Exception { super.doPreSetup(); // Create a mock ChatModel mockChatModel = mock(ChatModel.class); // Mock response AssistantMessage assistantMessage = new AssistantMessage("Hello! I'm a mock AI assistant."); Generation generation = new Generation(assistantMessage); ChatResponse chatResponse = new ChatResponse(java.util.List.of(generation)); when(mockChatModel.call(any(Prompt.class))).thenReturn(chatResponse); } @Test public void testChatComponent() throws Exception { String response = template.requestBody("direct:chat", "Hello, AI!", String.class); assertNotNull(response); assertEquals("Hello! I'm a mock AI assistant.", response); } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { SpringAiChatComponent component = new SpringAiChatComponent(); component.setChatModel(mockChatModel); context.addComponent("spring-ai-chat", component); from("direct:chat") .to("spring-ai-chat:test"); } }; } }
SpringAiChatComponentTest
java
apache__camel
core/camel-core-model/src/main/java/org/apache/camel/model/HasExpressionType.java
{ "start": 998, "end": 1251 }
interface ____ { /** * Gets the expression definition */ ExpressionDefinition getExpressionType(); /** * Sets the expression definition */ void setExpressionType(ExpressionDefinition expressionType); }
HasExpressionType
java
apache__camel
components/camel-debezium/camel-debezium-db2/src/generated/java/org/apache/camel/component/debezium/db2/DebeziumDb2EndpointConfigurer.java
{ "start": 739, "end": 43554 }
class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter { @Override public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) { DebeziumDb2Endpoint target = (DebeziumDb2Endpoint) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "additionalproperties": case "additionalProperties": target.getConfiguration().setAdditionalProperties(property(camelContext, java.util.Map.class, value)); return true; case "bridgeerrorhandler": case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true; case "cdcchangetablesschema": case "cdcChangeTablesSchema": target.getConfiguration().setCdcChangeTablesSchema(property(camelContext, java.lang.String.class, value)); return true; case "cdccontrolschema": case "cdcControlSchema": target.getConfiguration().setCdcControlSchema(property(camelContext, java.lang.String.class, value)); return true; case "columnexcludelist": case "columnExcludeList": target.getConfiguration().setColumnExcludeList(property(camelContext, java.lang.String.class, value)); return true; case "columnincludelist": case "columnIncludeList": target.getConfiguration().setColumnIncludeList(property(camelContext, java.lang.String.class, value)); return true; case "columnpropagatesourcetype": case "columnPropagateSourceType": target.getConfiguration().setColumnPropagateSourceType(property(camelContext, java.lang.String.class, value)); return true; case "connectionvalidationtimeoutms": case "connectionValidationTimeoutMs": target.getConfiguration().setConnectionValidationTimeoutMs(property(camelContext, java.time.Duration.class, value).toMillis()); return true; case "converters": target.getConfiguration().setConverters(property(camelContext, java.lang.String.class, value)); return true; case "custommetrictags": case "customMetricTags": target.getConfiguration().setCustomMetricTags(property(camelContext, java.lang.String.class, value)); return true; case "databasedbname": case "databaseDbname": target.getConfiguration().setDatabaseDbname(property(camelContext, java.lang.String.class, value)); return true; case "databasehostname": case "databaseHostname": target.getConfiguration().setDatabaseHostname(property(camelContext, java.lang.String.class, value)); return true; case "databasepassword": case "databasePassword": target.getConfiguration().setDatabasePassword(property(camelContext, java.lang.String.class, value)); return true; case "databaseport": case "databasePort": target.getConfiguration().setDatabasePort(property(camelContext, int.class, value)); return true; case "databaseuser": case "databaseUser": target.getConfiguration().setDatabaseUser(property(camelContext, java.lang.String.class, value)); return true; case "datatypepropagatesourcetype": case "datatypePropagateSourceType": target.getConfiguration().setDatatypePropagateSourceType(property(camelContext, java.lang.String.class, value)); return true; case "db2platform": case "db2Platform": target.getConfiguration().setDb2Platform(property(camelContext, java.lang.String.class, value)); return true; case "decimalhandlingmode": case "decimalHandlingMode": target.getConfiguration().setDecimalHandlingMode(property(camelContext, java.lang.String.class, value)); return true; case "errorsmaxretries": case "errorsMaxRetries": target.getConfiguration().setErrorsMaxRetries(property(camelContext, int.class, value)); return true; case "eventprocessingfailurehandlingmode": case "eventProcessingFailureHandlingMode": target.getConfiguration().setEventProcessingFailureHandlingMode(property(camelContext, java.lang.String.class, value)); return true; case "exceptionhandler": case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true; case "exchangepattern": case "exchangePattern": target.setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true; case "executorshutdowntimeoutms": case "executorShutdownTimeoutMs": target.getConfiguration().setExecutorShutdownTimeoutMs(property(camelContext, java.time.Duration.class, value).toMillis()); return true; case "extendedheadersenabled": case "extendedHeadersEnabled": target.getConfiguration().setExtendedHeadersEnabled(property(camelContext, boolean.class, value)); return true; case "guardrailcollectionslimitaction": case "guardrailCollectionsLimitAction": target.getConfiguration().setGuardrailCollectionsLimitAction(property(camelContext, java.lang.String.class, value)); return true; case "guardrailcollectionsmax": case "guardrailCollectionsMax": target.getConfiguration().setGuardrailCollectionsMax(property(camelContext, int.class, value)); return true; case "heartbeatintervalms": case "heartbeatIntervalMs": target.getConfiguration().setHeartbeatIntervalMs(property(camelContext, int.class, value)); return true; case "heartbeattopicsprefix": case "heartbeatTopicsPrefix": target.getConfiguration().setHeartbeatTopicsPrefix(property(camelContext, java.lang.String.class, value)); return true; case "includeschemachanges": case "includeSchemaChanges": target.getConfiguration().setIncludeSchemaChanges(property(camelContext, boolean.class, value)); return true; case "incrementalsnapshotchunksize": case "incrementalSnapshotChunkSize": target.getConfiguration().setIncrementalSnapshotChunkSize(property(camelContext, int.class, value)); return true; case "incrementalsnapshotwatermarkingstrategy": case "incrementalSnapshotWatermarkingStrategy": target.getConfiguration().setIncrementalSnapshotWatermarkingStrategy(property(camelContext, java.lang.String.class, value)); return true; case "internalkeyconverter": case "internalKeyConverter": target.getConfiguration().setInternalKeyConverter(property(camelContext, java.lang.String.class, value)); return true; case "internalvalueconverter": case "internalValueConverter": target.getConfiguration().setInternalValueConverter(property(camelContext, java.lang.String.class, value)); return true; case "maxbatchsize": case "maxBatchSize": target.getConfiguration().setMaxBatchSize(property(camelContext, int.class, value)); return true; case "maxqueuesize": case "maxQueueSize": target.getConfiguration().setMaxQueueSize(property(camelContext, int.class, value)); return true; case "maxqueuesizeinbytes": case "maxQueueSizeInBytes": target.getConfiguration().setMaxQueueSizeInBytes(property(camelContext, long.class, value)); return true; case "messagekeycolumns": case "messageKeyColumns": target.getConfiguration().setMessageKeyColumns(property(camelContext, java.lang.String.class, value)); return true; case "notificationenabledchannels": case "notificationEnabledChannels": target.getConfiguration().setNotificationEnabledChannels(property(camelContext, java.lang.String.class, value)); return true; case "notificationsinktopicname": case "notificationSinkTopicName": target.getConfiguration().setNotificationSinkTopicName(property(camelContext, java.lang.String.class, value)); return true; case "offsetcommitpolicy": case "offsetCommitPolicy": target.getConfiguration().setOffsetCommitPolicy(property(camelContext, java.lang.String.class, value)); return true; case "offsetcommittimeoutms": case "offsetCommitTimeoutMs": target.getConfiguration().setOffsetCommitTimeoutMs(property(camelContext, java.time.Duration.class, value).toMillis()); return true; case "offsetflushintervalms": case "offsetFlushIntervalMs": target.getConfiguration().setOffsetFlushIntervalMs(property(camelContext, java.time.Duration.class, value).toMillis()); return true; case "offsetstorage": case "offsetStorage": target.getConfiguration().setOffsetStorage(property(camelContext, java.lang.String.class, value)); return true; case "offsetstoragefilename": case "offsetStorageFileName": target.getConfiguration().setOffsetStorageFileName(property(camelContext, java.lang.String.class, value)); return true; case "offsetstoragepartitions": case "offsetStoragePartitions": target.getConfiguration().setOffsetStoragePartitions(property(camelContext, int.class, value)); return true; case "offsetstoragereplicationfactor": case "offsetStorageReplicationFactor": target.getConfiguration().setOffsetStorageReplicationFactor(property(camelContext, int.class, value)); return true; case "offsetstoragetopic": case "offsetStorageTopic": target.getConfiguration().setOffsetStorageTopic(property(camelContext, java.lang.String.class, value)); return true; case "openlineageintegrationconfigfilepath": case "openlineageIntegrationConfigFilePath": target.getConfiguration().setOpenlineageIntegrationConfigFilePath(property(camelContext, java.lang.String.class, value)); return true; case "openlineageintegrationdatasetkafkabootstrapservers": case "openlineageIntegrationDatasetKafkaBootstrapServers": target.getConfiguration().setOpenlineageIntegrationDatasetKafkaBootstrapServers(property(camelContext, java.lang.String.class, value)); return true; case "openlineageintegrationenabled": case "openlineageIntegrationEnabled": target.getConfiguration().setOpenlineageIntegrationEnabled(property(camelContext, boolean.class, value)); return true; case "openlineageintegrationjobdescription": case "openlineageIntegrationJobDescription": target.getConfiguration().setOpenlineageIntegrationJobDescription(property(camelContext, java.lang.String.class, value)); return true; case "openlineageintegrationjobnamespace": case "openlineageIntegrationJobNamespace": target.getConfiguration().setOpenlineageIntegrationJobNamespace(property(camelContext, java.lang.String.class, value)); return true; case "openlineageintegrationjobowners": case "openlineageIntegrationJobOwners": target.getConfiguration().setOpenlineageIntegrationJobOwners(property(camelContext, java.lang.String.class, value)); return true; case "openlineageintegrationjobtags": case "openlineageIntegrationJobTags": target.getConfiguration().setOpenlineageIntegrationJobTags(property(camelContext, java.lang.String.class, value)); return true; case "pollintervalms": case "pollIntervalMs": target.getConfiguration().setPollIntervalMs(property(camelContext, java.time.Duration.class, value).toMillis()); return true; case "postprocessors": case "postProcessors": target.getConfiguration().setPostProcessors(property(camelContext, java.lang.String.class, value)); return true; case "providetransactionmetadata": case "provideTransactionMetadata": target.getConfiguration().setProvideTransactionMetadata(property(camelContext, boolean.class, value)); return true; case "queryfetchsize": case "queryFetchSize": target.getConfiguration().setQueryFetchSize(property(camelContext, int.class, value)); return true; case "retriablerestartconnectorwaitms": case "retriableRestartConnectorWaitMs": target.getConfiguration().setRetriableRestartConnectorWaitMs(property(camelContext, java.time.Duration.class, value).toMillis()); return true; case "schemahistoryinternal": case "schemaHistoryInternal": target.getConfiguration().setSchemaHistoryInternal(property(camelContext, java.lang.String.class, value)); return true; case "schemahistoryinternalfilefilename": case "schemaHistoryInternalFileFilename": target.getConfiguration().setSchemaHistoryInternalFileFilename(property(camelContext, java.lang.String.class, value)); return true; case "schemahistoryinternalskipunparseableddl": case "schemaHistoryInternalSkipUnparseableDdl": target.getConfiguration().setSchemaHistoryInternalSkipUnparseableDdl(property(camelContext, boolean.class, value)); return true; case "schemahistoryinternalstoreonlycaptureddatabasesddl": case "schemaHistoryInternalStoreOnlyCapturedDatabasesDdl": target.getConfiguration().setSchemaHistoryInternalStoreOnlyCapturedDatabasesDdl(property(camelContext, boolean.class, value)); return true; case "schemahistoryinternalstoreonlycapturedtablesddl": case "schemaHistoryInternalStoreOnlyCapturedTablesDdl": target.getConfiguration().setSchemaHistoryInternalStoreOnlyCapturedTablesDdl(property(camelContext, boolean.class, value)); return true; case "schemanameadjustmentmode": case "schemaNameAdjustmentMode": target.getConfiguration().setSchemaNameAdjustmentMode(property(camelContext, java.lang.String.class, value)); return true; case "signaldatacollection": case "signalDataCollection": target.getConfiguration().setSignalDataCollection(property(camelContext, java.lang.String.class, value)); return true; case "signalenabledchannels": case "signalEnabledChannels": target.getConfiguration().setSignalEnabledChannels(property(camelContext, java.lang.String.class, value)); return true; case "signalpollintervalms": case "signalPollIntervalMs": target.getConfiguration().setSignalPollIntervalMs(property(camelContext, java.time.Duration.class, value).toMillis()); return true; case "skippedoperations": case "skippedOperations": target.getConfiguration().setSkippedOperations(property(camelContext, java.lang.String.class, value)); return true; case "snapshotdelayms": case "snapshotDelayMs": target.getConfiguration().setSnapshotDelayMs(property(camelContext, java.time.Duration.class, value).toMillis()); return true; case "snapshotfetchsize": case "snapshotFetchSize": target.getConfiguration().setSnapshotFetchSize(property(camelContext, int.class, value)); return true; case "snapshotincludecollectionlist": case "snapshotIncludeCollectionList": target.getConfiguration().setSnapshotIncludeCollectionList(property(camelContext, java.lang.String.class, value)); return true; case "snapshotlocktimeoutms": case "snapshotLockTimeoutMs": target.getConfiguration().setSnapshotLockTimeoutMs(property(camelContext, java.time.Duration.class, value).toMillis()); return true; case "snapshotmode": case "snapshotMode": target.getConfiguration().setSnapshotMode(property(camelContext, java.lang.String.class, value)); return true; case "snapshotmodeconfigurationbasedsnapshotdata": case "snapshotModeConfigurationBasedSnapshotData": target.getConfiguration().setSnapshotModeConfigurationBasedSnapshotData(property(camelContext, boolean.class, value)); return true; case "snapshotmodeconfigurationbasedsnapshotondataerror": case "snapshotModeConfigurationBasedSnapshotOnDataError": target.getConfiguration().setSnapshotModeConfigurationBasedSnapshotOnDataError(property(camelContext, boolean.class, value)); return true; case "snapshotmodeconfigurationbasedsnapshotonschemaerror": case "snapshotModeConfigurationBasedSnapshotOnSchemaError": target.getConfiguration().setSnapshotModeConfigurationBasedSnapshotOnSchemaError(property(camelContext, boolean.class, value)); return true; case "snapshotmodeconfigurationbasedsnapshotschema": case "snapshotModeConfigurationBasedSnapshotSchema": target.getConfiguration().setSnapshotModeConfigurationBasedSnapshotSchema(property(camelContext, boolean.class, value)); return true; case "snapshotmodeconfigurationbasedstartstream": case "snapshotModeConfigurationBasedStartStream": target.getConfiguration().setSnapshotModeConfigurationBasedStartStream(property(camelContext, boolean.class, value)); return true; case "snapshotmodecustomname": case "snapshotModeCustomName": target.getConfiguration().setSnapshotModeCustomName(property(camelContext, java.lang.String.class, value)); return true; case "snapshotselectstatementoverrides": case "snapshotSelectStatementOverrides": target.getConfiguration().setSnapshotSelectStatementOverrides(property(camelContext, java.lang.String.class, value)); return true; case "snapshottablesorderbyrowcount": case "snapshotTablesOrderByRowCount": target.getConfiguration().setSnapshotTablesOrderByRowCount(property(camelContext, java.lang.String.class, value)); return true; case "sourceinfostructmaker": case "sourceinfoStructMaker": target.getConfiguration().setSourceinfoStructMaker(property(camelContext, java.lang.String.class, value)); return true; case "streamingdelayms": case "streamingDelayMs": target.getConfiguration().setStreamingDelayMs(property(camelContext, java.time.Duration.class, value).toMillis()); return true; case "tableexcludelist": case "tableExcludeList": target.getConfiguration().setTableExcludeList(property(camelContext, java.lang.String.class, value)); return true; case "tableignorebuiltin": case "tableIgnoreBuiltin": target.getConfiguration().setTableIgnoreBuiltin(property(camelContext, boolean.class, value)); return true; case "tableincludelist": case "tableIncludeList": target.getConfiguration().setTableIncludeList(property(camelContext, java.lang.String.class, value)); return true; case "timeprecisionmode": case "timePrecisionMode": target.getConfiguration().setTimePrecisionMode(property(camelContext, java.lang.String.class, value)); return true; case "tombstonesondelete": case "tombstonesOnDelete": target.getConfiguration().setTombstonesOnDelete(property(camelContext, boolean.class, value)); return true; case "topicnamingstrategy": case "topicNamingStrategy": target.getConfiguration().setTopicNamingStrategy(property(camelContext, java.lang.String.class, value)); return true; case "topicprefix": case "topicPrefix": target.getConfiguration().setTopicPrefix(property(camelContext, java.lang.String.class, value)); return true; case "transactionmetadatafactory": case "transactionMetadataFactory": target.getConfiguration().setTransactionMetadataFactory(property(camelContext, java.lang.String.class, value)); return true; default: return false; } } @Override public Class<?> getOptionType(String name, boolean ignoreCase) { switch (ignoreCase ? name.toLowerCase() : name) { case "additionalproperties": case "additionalProperties": return java.util.Map.class; case "bridgeerrorhandler": case "bridgeErrorHandler": return boolean.class; case "cdcchangetablesschema": case "cdcChangeTablesSchema": return java.lang.String.class; case "cdccontrolschema": case "cdcControlSchema": return java.lang.String.class; case "columnexcludelist": case "columnExcludeList": return java.lang.String.class; case "columnincludelist": case "columnIncludeList": return java.lang.String.class; case "columnpropagatesourcetype": case "columnPropagateSourceType": return java.lang.String.class; case "connectionvalidationtimeoutms": case "connectionValidationTimeoutMs": return long.class; case "converters": return java.lang.String.class; case "custommetrictags": case "customMetricTags": return java.lang.String.class; case "databasedbname": case "databaseDbname": return java.lang.String.class; case "databasehostname": case "databaseHostname": return java.lang.String.class; case "databasepassword": case "databasePassword": return java.lang.String.class; case "databaseport": case "databasePort": return int.class; case "databaseuser": case "databaseUser": return java.lang.String.class; case "datatypepropagatesourcetype": case "datatypePropagateSourceType": return java.lang.String.class; case "db2platform": case "db2Platform": return java.lang.String.class; case "decimalhandlingmode": case "decimalHandlingMode": return java.lang.String.class; case "errorsmaxretries": case "errorsMaxRetries": return int.class; case "eventprocessingfailurehandlingmode": case "eventProcessingFailureHandlingMode": return java.lang.String.class; case "exceptionhandler": case "exceptionHandler": return org.apache.camel.spi.ExceptionHandler.class; case "exchangepattern": case "exchangePattern": return org.apache.camel.ExchangePattern.class; case "executorshutdowntimeoutms": case "executorShutdownTimeoutMs": return long.class; case "extendedheadersenabled": case "extendedHeadersEnabled": return boolean.class; case "guardrailcollectionslimitaction": case "guardrailCollectionsLimitAction": return java.lang.String.class; case "guardrailcollectionsmax": case "guardrailCollectionsMax": return int.class; case "heartbeatintervalms": case "heartbeatIntervalMs": return int.class; case "heartbeattopicsprefix": case "heartbeatTopicsPrefix": return java.lang.String.class; case "includeschemachanges": case "includeSchemaChanges": return boolean.class; case "incrementalsnapshotchunksize": case "incrementalSnapshotChunkSize": return int.class; case "incrementalsnapshotwatermarkingstrategy": case "incrementalSnapshotWatermarkingStrategy": return java.lang.String.class; case "internalkeyconverter": case "internalKeyConverter": return java.lang.String.class; case "internalvalueconverter": case "internalValueConverter": return java.lang.String.class; case "maxbatchsize": case "maxBatchSize": return int.class; case "maxqueuesize": case "maxQueueSize": return int.class; case "maxqueuesizeinbytes": case "maxQueueSizeInBytes": return long.class; case "messagekeycolumns": case "messageKeyColumns": return java.lang.String.class; case "notificationenabledchannels": case "notificationEnabledChannels": return java.lang.String.class; case "notificationsinktopicname": case "notificationSinkTopicName": return java.lang.String.class; case "offsetcommitpolicy": case "offsetCommitPolicy": return java.lang.String.class; case "offsetcommittimeoutms": case "offsetCommitTimeoutMs": return long.class; case "offsetflushintervalms": case "offsetFlushIntervalMs": return long.class; case "offsetstorage": case "offsetStorage": return java.lang.String.class; case "offsetstoragefilename": case "offsetStorageFileName": return java.lang.String.class; case "offsetstoragepartitions": case "offsetStoragePartitions": return int.class; case "offsetstoragereplicationfactor": case "offsetStorageReplicationFactor": return int.class; case "offsetstoragetopic": case "offsetStorageTopic": return java.lang.String.class; case "openlineageintegrationconfigfilepath": case "openlineageIntegrationConfigFilePath": return java.lang.String.class; case "openlineageintegrationdatasetkafkabootstrapservers": case "openlineageIntegrationDatasetKafkaBootstrapServers": return java.lang.String.class; case "openlineageintegrationenabled": case "openlineageIntegrationEnabled": return boolean.class; case "openlineageintegrationjobdescription": case "openlineageIntegrationJobDescription": return java.lang.String.class; case "openlineageintegrationjobnamespace": case "openlineageIntegrationJobNamespace": return java.lang.String.class; case "openlineageintegrationjobowners": case "openlineageIntegrationJobOwners": return java.lang.String.class; case "openlineageintegrationjobtags": case "openlineageIntegrationJobTags": return java.lang.String.class; case "pollintervalms": case "pollIntervalMs": return long.class; case "postprocessors": case "postProcessors": return java.lang.String.class; case "providetransactionmetadata": case "provideTransactionMetadata": return boolean.class; case "queryfetchsize": case "queryFetchSize": return int.class; case "retriablerestartconnectorwaitms": case "retriableRestartConnectorWaitMs": return long.class; case "schemahistoryinternal": case "schemaHistoryInternal": return java.lang.String.class; case "schemahistoryinternalfilefilename": case "schemaHistoryInternalFileFilename": return java.lang.String.class; case "schemahistoryinternalskipunparseableddl": case "schemaHistoryInternalSkipUnparseableDdl": return boolean.class; case "schemahistoryinternalstoreonlycaptureddatabasesddl": case "schemaHistoryInternalStoreOnlyCapturedDatabasesDdl": return boolean.class; case "schemahistoryinternalstoreonlycapturedtablesddl": case "schemaHistoryInternalStoreOnlyCapturedTablesDdl": return boolean.class; case "schemanameadjustmentmode": case "schemaNameAdjustmentMode": return java.lang.String.class; case "signaldatacollection": case "signalDataCollection": return java.lang.String.class; case "signalenabledchannels": case "signalEnabledChannels": return java.lang.String.class; case "signalpollintervalms": case "signalPollIntervalMs": return long.class; case "skippedoperations": case "skippedOperations": return java.lang.String.class; case "snapshotdelayms": case "snapshotDelayMs": return long.class; case "snapshotfetchsize": case "snapshotFetchSize": return int.class; case "snapshotincludecollectionlist": case "snapshotIncludeCollectionList": return java.lang.String.class; case "snapshotlocktimeoutms": case "snapshotLockTimeoutMs": return long.class; case "snapshotmode": case "snapshotMode": return java.lang.String.class; case "snapshotmodeconfigurationbasedsnapshotdata": case "snapshotModeConfigurationBasedSnapshotData": return boolean.class; case "snapshotmodeconfigurationbasedsnapshotondataerror": case "snapshotModeConfigurationBasedSnapshotOnDataError": return boolean.class; case "snapshotmodeconfigurationbasedsnapshotonschemaerror": case "snapshotModeConfigurationBasedSnapshotOnSchemaError": return boolean.class; case "snapshotmodeconfigurationbasedsnapshotschema": case "snapshotModeConfigurationBasedSnapshotSchema": return boolean.class; case "snapshotmodeconfigurationbasedstartstream": case "snapshotModeConfigurationBasedStartStream": return boolean.class; case "snapshotmodecustomname": case "snapshotModeCustomName": return java.lang.String.class; case "snapshotselectstatementoverrides": case "snapshotSelectStatementOverrides": return java.lang.String.class; case "snapshottablesorderbyrowcount": case "snapshotTablesOrderByRowCount": return java.lang.String.class; case "sourceinfostructmaker": case "sourceinfoStructMaker": return java.lang.String.class; case "streamingdelayms": case "streamingDelayMs": return long.class; case "tableexcludelist": case "tableExcludeList": return java.lang.String.class; case "tableignorebuiltin": case "tableIgnoreBuiltin": return boolean.class; case "tableincludelist": case "tableIncludeList": return java.lang.String.class; case "timeprecisionmode": case "timePrecisionMode": return java.lang.String.class; case "tombstonesondelete": case "tombstonesOnDelete": return boolean.class; case "topicnamingstrategy": case "topicNamingStrategy": return java.lang.String.class; case "topicprefix": case "topicPrefix": return java.lang.String.class; case "transactionmetadatafactory": case "transactionMetadataFactory": return java.lang.String.class; default: return null; } } @Override public Object getOptionValue(Object obj, String name, boolean ignoreCase) { DebeziumDb2Endpoint target = (DebeziumDb2Endpoint) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "additionalproperties": case "additionalProperties": return target.getConfiguration().getAdditionalProperties(); case "bridgeerrorhandler": case "bridgeErrorHandler": return target.isBridgeErrorHandler(); case "cdcchangetablesschema": case "cdcChangeTablesSchema": return target.getConfiguration().getCdcChangeTablesSchema(); case "cdccontrolschema": case "cdcControlSchema": return target.getConfiguration().getCdcControlSchema(); case "columnexcludelist": case "columnExcludeList": return target.getConfiguration().getColumnExcludeList(); case "columnincludelist": case "columnIncludeList": return target.getConfiguration().getColumnIncludeList(); case "columnpropagatesourcetype": case "columnPropagateSourceType": return target.getConfiguration().getColumnPropagateSourceType(); case "connectionvalidationtimeoutms": case "connectionValidationTimeoutMs": return target.getConfiguration().getConnectionValidationTimeoutMs(); case "converters": return target.getConfiguration().getConverters(); case "custommetrictags": case "customMetricTags": return target.getConfiguration().getCustomMetricTags(); case "databasedbname": case "databaseDbname": return target.getConfiguration().getDatabaseDbname(); case "databasehostname": case "databaseHostname": return target.getConfiguration().getDatabaseHostname(); case "databasepassword": case "databasePassword": return target.getConfiguration().getDatabasePassword(); case "databaseport": case "databasePort": return target.getConfiguration().getDatabasePort(); case "databaseuser": case "databaseUser": return target.getConfiguration().getDatabaseUser(); case "datatypepropagatesourcetype": case "datatypePropagateSourceType": return target.getConfiguration().getDatatypePropagateSourceType(); case "db2platform": case "db2Platform": return target.getConfiguration().getDb2Platform(); case "decimalhandlingmode": case "decimalHandlingMode": return target.getConfiguration().getDecimalHandlingMode(); case "errorsmaxretries": case "errorsMaxRetries": return target.getConfiguration().getErrorsMaxRetries(); case "eventprocessingfailurehandlingmode": case "eventProcessingFailureHandlingMode": return target.getConfiguration().getEventProcessingFailureHandlingMode(); case "exceptionhandler": case "exceptionHandler": return target.getExceptionHandler(); case "exchangepattern": case "exchangePattern": return target.getExchangePattern(); case "executorshutdowntimeoutms": case "executorShutdownTimeoutMs": return target.getConfiguration().getExecutorShutdownTimeoutMs(); case "extendedheadersenabled": case "extendedHeadersEnabled": return target.getConfiguration().isExtendedHeadersEnabled(); case "guardrailcollectionslimitaction": case "guardrailCollectionsLimitAction": return target.getConfiguration().getGuardrailCollectionsLimitAction(); case "guardrailcollectionsmax": case "guardrailCollectionsMax": return target.getConfiguration().getGuardrailCollectionsMax(); case "heartbeatintervalms": case "heartbeatIntervalMs": return target.getConfiguration().getHeartbeatIntervalMs(); case "heartbeattopicsprefix": case "heartbeatTopicsPrefix": return target.getConfiguration().getHeartbeatTopicsPrefix(); case "includeschemachanges": case "includeSchemaChanges": return target.getConfiguration().isIncludeSchemaChanges(); case "incrementalsnapshotchunksize": case "incrementalSnapshotChunkSize": return target.getConfiguration().getIncrementalSnapshotChunkSize(); case "incrementalsnapshotwatermarkingstrategy": case "incrementalSnapshotWatermarkingStrategy": return target.getConfiguration().getIncrementalSnapshotWatermarkingStrategy(); case "internalkeyconverter": case "internalKeyConverter": return target.getConfiguration().getInternalKeyConverter(); case "internalvalueconverter": case "internalValueConverter": return target.getConfiguration().getInternalValueConverter(); case "maxbatchsize": case "maxBatchSize": return target.getConfiguration().getMaxBatchSize(); case "maxqueuesize": case "maxQueueSize": return target.getConfiguration().getMaxQueueSize(); case "maxqueuesizeinbytes": case "maxQueueSizeInBytes": return target.getConfiguration().getMaxQueueSizeInBytes(); case "messagekeycolumns": case "messageKeyColumns": return target.getConfiguration().getMessageKeyColumns(); case "notificationenabledchannels": case "notificationEnabledChannels": return target.getConfiguration().getNotificationEnabledChannels(); case "notificationsinktopicname": case "notificationSinkTopicName": return target.getConfiguration().getNotificationSinkTopicName(); case "offsetcommitpolicy": case "offsetCommitPolicy": return target.getConfiguration().getOffsetCommitPolicy(); case "offsetcommittimeoutms": case "offsetCommitTimeoutMs": return target.getConfiguration().getOffsetCommitTimeoutMs(); case "offsetflushintervalms": case "offsetFlushIntervalMs": return target.getConfiguration().getOffsetFlushIntervalMs(); case "offsetstorage": case "offsetStorage": return target.getConfiguration().getOffsetStorage(); case "offsetstoragefilename": case "offsetStorageFileName": return target.getConfiguration().getOffsetStorageFileName(); case "offsetstoragepartitions": case "offsetStoragePartitions": return target.getConfiguration().getOffsetStoragePartitions(); case "offsetstoragereplicationfactor": case "offsetStorageReplicationFactor": return target.getConfiguration().getOffsetStorageReplicationFactor(); case "offsetstoragetopic": case "offsetStorageTopic": return target.getConfiguration().getOffsetStorageTopic(); case "openlineageintegrationconfigfilepath": case "openlineageIntegrationConfigFilePath": return target.getConfiguration().getOpenlineageIntegrationConfigFilePath(); case "openlineageintegrationdatasetkafkabootstrapservers": case "openlineageIntegrationDatasetKafkaBootstrapServers": return target.getConfiguration().getOpenlineageIntegrationDatasetKafkaBootstrapServers(); case "openlineageintegrationenabled": case "openlineageIntegrationEnabled": return target.getConfiguration().isOpenlineageIntegrationEnabled(); case "openlineageintegrationjobdescription": case "openlineageIntegrationJobDescription": return target.getConfiguration().getOpenlineageIntegrationJobDescription(); case "openlineageintegrationjobnamespace": case "openlineageIntegrationJobNamespace": return target.getConfiguration().getOpenlineageIntegrationJobNamespace(); case "openlineageintegrationjobowners": case "openlineageIntegrationJobOwners": return target.getConfiguration().getOpenlineageIntegrationJobOwners(); case "openlineageintegrationjobtags": case "openlineageIntegrationJobTags": return target.getConfiguration().getOpenlineageIntegrationJobTags(); case "pollintervalms": case "pollIntervalMs": return target.getConfiguration().getPollIntervalMs(); case "postprocessors": case "postProcessors": return target.getConfiguration().getPostProcessors(); case "providetransactionmetadata": case "provideTransactionMetadata": return target.getConfiguration().isProvideTransactionMetadata(); case "queryfetchsize": case "queryFetchSize": return target.getConfiguration().getQueryFetchSize(); case "retriablerestartconnectorwaitms": case "retriableRestartConnectorWaitMs": return target.getConfiguration().getRetriableRestartConnectorWaitMs(); case "schemahistoryinternal": case "schemaHistoryInternal": return target.getConfiguration().getSchemaHistoryInternal(); case "schemahistoryinternalfilefilename": case "schemaHistoryInternalFileFilename": return target.getConfiguration().getSchemaHistoryInternalFileFilename(); case "schemahistoryinternalskipunparseableddl": case "schemaHistoryInternalSkipUnparseableDdl": return target.getConfiguration().isSchemaHistoryInternalSkipUnparseableDdl(); case "schemahistoryinternalstoreonlycaptureddatabasesddl": case "schemaHistoryInternalStoreOnlyCapturedDatabasesDdl": return target.getConfiguration().isSchemaHistoryInternalStoreOnlyCapturedDatabasesDdl(); case "schemahistoryinternalstoreonlycapturedtablesddl": case "schemaHistoryInternalStoreOnlyCapturedTablesDdl": return target.getConfiguration().isSchemaHistoryInternalStoreOnlyCapturedTablesDdl(); case "schemanameadjustmentmode": case "schemaNameAdjustmentMode": return target.getConfiguration().getSchemaNameAdjustmentMode(); case "signaldatacollection": case "signalDataCollection": return target.getConfiguration().getSignalDataCollection(); case "signalenabledchannels": case "signalEnabledChannels": return target.getConfiguration().getSignalEnabledChannels(); case "signalpollintervalms": case "signalPollIntervalMs": return target.getConfiguration().getSignalPollIntervalMs(); case "skippedoperations": case "skippedOperations": return target.getConfiguration().getSkippedOperations(); case "snapshotdelayms": case "snapshotDelayMs": return target.getConfiguration().getSnapshotDelayMs(); case "snapshotfetchsize": case "snapshotFetchSize": return target.getConfiguration().getSnapshotFetchSize(); case "snapshotincludecollectionlist": case "snapshotIncludeCollectionList": return target.getConfiguration().getSnapshotIncludeCollectionList(); case "snapshotlocktimeoutms": case "snapshotLockTimeoutMs": return target.getConfiguration().getSnapshotLockTimeoutMs(); case "snapshotmode": case "snapshotMode": return target.getConfiguration().getSnapshotMode(); case "snapshotmodeconfigurationbasedsnapshotdata": case "snapshotModeConfigurationBasedSnapshotData": return target.getConfiguration().isSnapshotModeConfigurationBasedSnapshotData(); case "snapshotmodeconfigurationbasedsnapshotondataerror": case "snapshotModeConfigurationBasedSnapshotOnDataError": return target.getConfiguration().isSnapshotModeConfigurationBasedSnapshotOnDataError(); case "snapshotmodeconfigurationbasedsnapshotonschemaerror": case "snapshotModeConfigurationBasedSnapshotOnSchemaError": return target.getConfiguration().isSnapshotModeConfigurationBasedSnapshotOnSchemaError(); case "snapshotmodeconfigurationbasedsnapshotschema": case "snapshotModeConfigurationBasedSnapshotSchema": return target.getConfiguration().isSnapshotModeConfigurationBasedSnapshotSchema(); case "snapshotmodeconfigurationbasedstartstream": case "snapshotModeConfigurationBasedStartStream": return target.getConfiguration().isSnapshotModeConfigurationBasedStartStream(); case "snapshotmodecustomname": case "snapshotModeCustomName": return target.getConfiguration().getSnapshotModeCustomName(); case "snapshotselectstatementoverrides": case "snapshotSelectStatementOverrides": return target.getConfiguration().getSnapshotSelectStatementOverrides(); case "snapshottablesorderbyrowcount": case "snapshotTablesOrderByRowCount": return target.getConfiguration().getSnapshotTablesOrderByRowCount(); case "sourceinfostructmaker": case "sourceinfoStructMaker": return target.getConfiguration().getSourceinfoStructMaker(); case "streamingdelayms": case "streamingDelayMs": return target.getConfiguration().getStreamingDelayMs(); case "tableexcludelist": case "tableExcludeList": return target.getConfiguration().getTableExcludeList(); case "tableignorebuiltin": case "tableIgnoreBuiltin": return target.getConfiguration().isTableIgnoreBuiltin(); case "tableincludelist": case "tableIncludeList": return target.getConfiguration().getTableIncludeList(); case "timeprecisionmode": case "timePrecisionMode": return target.getConfiguration().getTimePrecisionMode(); case "tombstonesondelete": case "tombstonesOnDelete": return target.getConfiguration().isTombstonesOnDelete(); case "topicnamingstrategy": case "topicNamingStrategy": return target.getConfiguration().getTopicNamingStrategy(); case "topicprefix": case "topicPrefix": return target.getConfiguration().getTopicPrefix(); case "transactionmetadatafactory": case "transactionMetadataFactory": return target.getConfiguration().getTransactionMetadataFactory(); default: return null; } } @Override public Object getCollectionValueType(Object target, String name, boolean ignoreCase) { switch (ignoreCase ? name.toLowerCase() : name) { case "additionalproperties": case "additionalProperties": return java.lang.Object.class; default: return null; } } }
DebeziumDb2EndpointConfigurer
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/collections/QueueTest.java
{ "start": 977, "end": 1900 }
class ____ { @Test public void test(EntityManagerFactoryScope scope) { scope.inTransaction( entityManager -> { Person person = new Person(1L); person.getPhones().add(new Phone(1L, "landline", "028-234-9876")); person.getPhones().add(new Phone(2L, "mobile", "072-122-9876")); entityManager.persist(person); }); scope.inTransaction( entityManager -> { //tag::collections-custom-collection-example[] Person person = entityManager.find(Person.class, 1L); Queue<Phone> phones = person.getPhones(); Phone head = phones.peek(); assertSame(head, phones.poll()); assertEquals(1, phones.size()); //end::collections-custom-collection-example[] }); scope.inTransaction( entityManager -> { Person person = entityManager.find(Person.class, 1L); person.getPhones().clear(); }); } //tag::collections-custom-collection-mapping-example[] @Entity(name = "Person") public static
QueueTest
java
netty__netty
transport-native-io_uring/src/test/java/io/netty/channel/uring/IoUringRemoteIpTest.java
{ "start": 1456, "end": 4347 }
class ____ { @BeforeAll public static void loadJNI() { Assumptions.assumeTrue(IoUring.isAvailable()); } @Test public void testRemoteAddressIpv4() throws Exception { testRemoteAddress(NetUtil.LOCALHOST4, NetUtil.LOCALHOST4); } @Test public void testRemoteAddressIpv6() throws Exception { testRemoteAddress(NetUtil.LOCALHOST6, NetUtil.LOCALHOST6); } @Test public void testRemoteAddressIpv4AndServerAutoDetect() throws Exception { testRemoteAddress(null, NetUtil.LOCALHOST4); } @Test public void testRemoteAddressIpv6ServerAutoDetect() throws Exception { testRemoteAddress(null, NetUtil.LOCALHOST6); } private static void testRemoteAddress(InetAddress server, InetAddress client) throws Exception { final Promise<SocketAddress> promise = ImmediateEventExecutor.INSTANCE.newPromise(); EventLoopGroup bossGroup = new MultiThreadIoEventLoopGroup(1, IoUringIoHandler.newFactory()); Socket socket = new Socket(); try { ServerBootstrap b = new ServerBootstrap(); b.group(bossGroup) .channel(IoUringServerSocketChannel.class) .childHandler(new ChannelInboundHandlerAdapter() { @Override public void channelActive(ChannelHandlerContext ctx) { promise.setSuccess(ctx.channel().remoteAddress()); ctx.close(); } }); // Start the server. ChannelFuture f; InetSocketAddress connectAddress; if (server == null) { f = b.bind(0).sync(); connectAddress = new InetSocketAddress(client, ((InetSocketAddress) f.channel().localAddress()).getPort()); } else { try { f = b.bind(server, 0).sync(); } catch (Throwable cause) { throw new TestAbortedException("Bind failed, address family not supported ?", cause); } connectAddress = (InetSocketAddress) f.channel().localAddress(); } try { socket.bind(new InetSocketAddress(client, 0)); } catch (SocketException e) { throw new TestAbortedException("Bind failed, address family not supported ?", e); } socket.connect(connectAddress); InetSocketAddress addr = (InetSocketAddress) promise.get(); assertEquals(socket.getLocalSocketAddress(), addr); f.channel().close().sync(); } finally { // Shut down all event loops to terminate all threads. bossGroup.shutdownGracefully(); socket.close(); } } }
IoUringRemoteIpTest
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/createTable/MySqlCreateTableTest108_drds.java
{ "start": 325, "end": 1729 }
class ____ extends MysqlTest { public void test_0() throws Exception { String sql = "create table ARCHIVE_ERR_RECORD\n" + "(\n" + "SUBS_ORDER_ID numeric(18,0) not null comment '订单编号',\n" + "ERR_MSG text comment '失败的消息结构',\n" + "ERR_REASON varchar(255) comment '失败原因',\n" + "PART_ID integer not null comment '分区标识(取订单编号中的月份)'\n" + ")\n" + "DBPARTITION BY HASH(SUBS_ORDER_ID)\n" + "TBPARTITION BY UNI_HASH(PART_ID) TBPARTITIONS 12;"; List<SQLStatement> statementList = SQLUtils.parseStatements(sql, JdbcConstants.ALIYUN_DRDS); SQLCreateTableStatement stmt = (SQLCreateTableStatement) statementList.get(0); assertEquals(1, statementList.size()); assertEquals(4, stmt.getTableElementList().size()); assertEquals("CREATE TABLE ARCHIVE_ERR_RECORD (\n" + "\tSUBS_ORDER_ID numeric(18, 0) NOT NULL COMMENT '订单编号',\n" + "\tERR_MSG text COMMENT '失败的消息结构',\n" + "\tERR_REASON varchar(255) COMMENT '失败原因',\n" + "\tPART_ID integer NOT NULL COMMENT '分区标识(取订单编号中的月份)'\n" + ")\n" + "DBPARTITION BY HASH(SUBS_ORDER_ID)\n" + "TBPARTITION BY UNI_HASH(PART_ID) TBPARTITIONS 12;", stmt.toString()); } }
MySqlCreateTableTest108_drds
java
netty__netty
codec-compression/src/main/java/io/netty/handler/codec/compression/Bzip2DivSufSort.java
{ "start": 60272, "end": 69719 }
class ____ { int budget; int chance; TRBudget(final int budget, final int chance) { this.budget = budget; this.chance = chance; } boolean update(final int size, final int n) { budget -= n; if (budget <= 0) { if (--chance == 0) { return false; } budget += size; } return true; } } private void trSort(final int isa, final int n, final int depth) { final int[] SA = this.SA; int first = 0, last; int t; if (-n < SA[0]) { TRBudget budget = new TRBudget(n, trLog(n) * 2 / 3 + 1); do { if ((t = SA[first]) < 0) { first -= t; } else { last = SA[isa + t] + 1; if (1 < last - first) { trIntroSort(isa, isa + depth, isa + n, first, last, budget, n); if (budget.chance == 0) { /* Switch to Larsson-Sadakane sorting algorithm */ if (0 < first) { SA[0] = -first; } lsSort(isa, n, depth); break; } } first = last; } } while (first < n); } } /*---------------------------------------------------------------------------*/ private static int BUCKET_B(final int c0, final int c1) { return (c1 << 8) | c0; } private static int BUCKET_BSTAR(final int c0, final int c1) { return (c0 << 8) | c1; } private int sortTypeBstar(final int[] bucketA, final int[] bucketB) { final byte[] T = this.T; final int[] SA = this.SA; final int n = this.n; final int[] tempbuf = new int[256]; int[] buf; int PAb, ISAb, bufoffset; int i, j, k, t, m, bufsize; int c0, c1; int flag; for (i = 1, flag = 1; i < n; ++i) { if (T[i - 1] != T[i]) { if ((T[i - 1] & 0xff) > (T[i] & 0xff)) { flag = 0; } break; } } i = n - 1; m = n; int ti, ti1, t0; if ((ti = T[i] & 0xff) < (t0 = T[0] & 0xff) || (T[i] == T[0] && flag != 0)) { if (flag == 0) { ++bucketB[BUCKET_BSTAR(ti, t0)]; SA[--m] = i; } else { ++bucketB[BUCKET_B(ti, t0)]; } for (--i; 0 <= i && (ti = T[i] & 0xff) <= (ti1 = T[i + 1] & 0xff); --i) { ++bucketB[BUCKET_B(ti, ti1)]; } } while (0 <= i) { do { ++bucketA[T[i] & 0xff]; } while (0 <= --i && (T[i] & 0xff) >= (T[i + 1] & 0xff)); if (0 <= i) { ++bucketB[BUCKET_BSTAR(T[i] & 0xff, T[i + 1] & 0xff)]; SA[--m] = i; for (--i; 0 <= i && (ti = T[i] & 0xff) <= (ti1 = T[i + 1] & 0xff); --i) { ++bucketB[BUCKET_B(ti, ti1)]; } } } m = n - m; if (m == 0) { for (i = 0; i < n; ++i) { SA[i] = i; } return 0; } for (c0 = 0, i = -1, j = 0; c0 < 256; ++c0) { t = i + bucketA[c0]; bucketA[c0] = i + j; i = t + bucketB[BUCKET_B(c0, c0)]; for (c1 = c0 + 1; c1 < 256; ++c1) { j += bucketB[BUCKET_BSTAR(c0, c1)]; bucketB[(c0 << 8) | c1] = j; i += bucketB[BUCKET_B(c0, c1)]; } } PAb = n - m; ISAb = m; for (i = m - 2; 0 <= i; --i) { t = SA[PAb + i]; c0 = T[t] & 0xff; c1 = T[t + 1] & 0xff; SA[--bucketB[BUCKET_BSTAR(c0, c1)]] = i; } t = SA[PAb + m - 1]; c0 = T[t] & 0xff; c1 = T[t + 1] & 0xff; SA[--bucketB[BUCKET_BSTAR(c0, c1)]] = m - 1; buf = SA; bufoffset = m; bufsize = n - 2 * m; if (bufsize <= 256) { buf = tempbuf; bufoffset = 0; bufsize = 256; } for (c0 = 255, j = m; 0 < j; --c0) { for (c1 = 255; c0 < c1; j = i, --c1) { i = bucketB[BUCKET_BSTAR(c0, c1)]; if (1 < j - i) { subStringSort(PAb, i, j, buf, bufoffset, bufsize, 2, SA[i] == m - 1, n); } } } for (i = m - 1; 0 <= i; --i) { if (0 <= SA[i]) { j = i; do { SA[ISAb + SA[i]] = i; } while (0 <= --i && 0 <= SA[i]); SA[i + 1] = i - j; if (i <= 0) { break; } } j = i; do { SA[ISAb + (SA[i] = ~SA[i])] = j; } while (SA[--i] < 0); SA[ISAb + SA[i]] = j; } trSort(ISAb, m, 1); i = n - 1; j = m; if ((T[i] & 0xff) < (T[0] & 0xff) || (T[i] == T[0] && flag != 0)) { if (flag == 0) { SA[SA[ISAb + --j]] = i; } for (--i; 0 <= i && (T[i] & 0xff) <= (T[i + 1] & 0xff);) { --i; } } while (0 <= i) { for (--i; 0 <= i && (T[i] & 0xff) >= (T[i + 1] & 0xff);) { --i; } if (0 <= i) { SA[SA[ISAb + --j]] = i; for (--i; 0 <= i && (T[i] & 0xff) <= (T[i + 1] & 0xff);) { --i; } } } for (c0 = 255, i = n - 1, k = m - 1; 0 <= c0; --c0) { for (c1 = 255; c0 < c1; --c1) { t = i - bucketB[BUCKET_B(c0, c1)]; bucketB[BUCKET_B(c0, c1)] = i + 1; for (i = t, j = bucketB[BUCKET_BSTAR(c0, c1)]; j <= k; --i, --k) { SA[i] = SA[k]; } } t = i - bucketB[BUCKET_B(c0, c0)]; bucketB[BUCKET_B(c0, c0)] = i + 1; if (c0 < 255) { bucketB[BUCKET_BSTAR(c0, c0 + 1)] = t + 1; } i = bucketA[c0]; } return m; } private int constructBWT(final int[] bucketA, final int[] bucketB) { final byte[] T = this.T; final int[] SA = this.SA; final int n = this.n; int i, j, t = 0; int s, s1; int c0, c1, c2 = 0; int orig = -1; for (c1 = 254; 0 <= c1; --c1) { for (i = bucketB[BUCKET_BSTAR(c1, c1 + 1)], j = bucketA[c1 + 1], t = 0, c2 = -1; i <= j; --j) { if (0 <= (s1 = s = SA[j])) { if (--s < 0) { s = n - 1; } if ((c0 = T[s] & 0xff) <= c1) { SA[j] = ~s1; if (0 < s && (T[s - 1] & 0xff) > c0) { s = ~s; } if (c2 == c0) { SA[--t] = s; } else { if (0 <= c2) { bucketB[BUCKET_B(c2, c1)] = t; } SA[t = bucketB[BUCKET_B(c2 = c0, c1)] - 1] = s; } } } else { SA[j] = ~s; } } } for (i = 0; i < n; ++i) { if (0 <= (s1 = s = SA[i])) { if (--s < 0) { s = n - 1; } if ((c0 = T[s] & 0xff) >= (T[s + 1] & 0xff)) { if (0 < s && (T[s - 1] & 0xff) < c0) { s = ~s; } if (c0 == c2) { SA[++t] = s; } else { if (c2 != -1) { bucketA[c2] = t; // BUGFIX: Original code can write to bucketA[-1] } SA[t = bucketA[c2 = c0] + 1] = s; } } } else { s1 = ~s1; } if (s1 == 0) { SA[i] = T[n - 1]; orig = i; } else { SA[i] = T[s1 - 1]; } } return orig; } /** * Performs a Burrows Wheeler Transform on the input array. * @return the index of the first character of the input array within the output array */ public int bwt() { final int[] SA = this.SA; final byte[] T = this.T; final int n = this.n; final int[] bucketA = new int[BUCKET_A_SIZE]; final int[] bucketB = new int[BUCKET_B_SIZE]; if (n == 0) { return 0; } if (n == 1) { SA[0] = T[0]; return 0; } int m = sortTypeBstar(bucketA, bucketB); if (0 < m) { return constructBWT(bucketA, bucketB); } return 0; } }
TRBudget
java
spring-projects__spring-framework
spring-beans/src/test/java/org/springframework/beans/ExtendedBeanInfoTests.java
{ "start": 32649, "end": 32729 }
interface ____ { Book getBook(); void setBook(Book book); }
BookOperations
java
FasterXML__jackson-databind
src/main/java/tools/jackson/databind/DatabindContext.java
{ "start": 5872, "end": 9954 }
class ____; so let's first // check if any generics info is added; and only then ask factory // to do translation when necessary if (subClassName.indexOf('<') > 0) { // note: may want to try combining with specialization (esp for EnumMap)? // 17-Aug-2017, tatu: As per [databind#1735] need to ensure assignment // compatibility -- needed later anyway, and not doing so may open // security issues. JavaType t = getTypeFactory().constructFromCanonical(subClassName); if (t.isTypeOrSubTypeOf(baseType.getRawClass())) { return t; } } else { Class<?> cls; try { cls = getTypeFactory().findClass(subClassName); } catch (ClassNotFoundException e) { // let caller handle this problem return null; } catch (Exception e) { throw invalidTypeIdException(baseType, subClassName, String.format( "problem: (%s) %s", e.getClass().getName(), ClassUtil.exceptionMessage(e))); } if (baseType.isTypeOrSuperTypeOf(cls)) { return getTypeFactory().constructSpecializedType(baseType, cls); } } throw invalidTypeIdException(baseType, subClassName, "Not a subtype"); } /** * Lookup method similar to {@link #resolveSubType} but one that also validates * that resulting subtype is valid according to given {@link PolymorphicTypeValidator}. */ public JavaType resolveAndValidateSubType(JavaType baseType, String subClass, PolymorphicTypeValidator ptv) { // Off-line the special case of generic (parameterized) type: final int ltIndex = subClass.indexOf('<'); if (ltIndex > 0) { return _resolveAndValidateGeneric(baseType, subClass, ptv, ltIndex); } PolymorphicTypeValidator.Validity vld = ptv.validateSubClassName(this, baseType, subClass); if (vld == Validity.DENIED) { return _throwSubtypeNameNotAllowed(baseType, subClass, ptv); } final Class<?> cls; try { cls = getTypeFactory().findClass(subClass); } catch (ClassNotFoundException e) { // let caller handle this problem return null; } catch (Exception e) { throw invalidTypeIdException(baseType, subClass, String.format( "problem: (%s) %s", e.getClass().getName(), ClassUtil.exceptionMessage(e))); } if (!baseType.isTypeOrSuperTypeOf(cls)) { return _throwNotASubtype(baseType, subClass); } final JavaType subType = getTypeFactory().constructSpecializedType(baseType, cls); // May skip check if type was allowed by subclass name already if (vld != Validity.ALLOWED) { if (ptv.validateSubType(this, baseType, subType) != Validity.ALLOWED) { return _throwSubtypeClassNotAllowed(baseType, subClass, ptv); } } return subType; } private JavaType _resolveAndValidateGeneric(JavaType baseType, String subClass, PolymorphicTypeValidator ptv, int ltIndex) { // 24-Apr-2019, tatu: Not 100% sure if we should pass name with type parameters // or not, but guessing it's more convenient not to have to worry about it so // strip out PolymorphicTypeValidator.Validity vld = ptv.validateSubClassName(this, baseType, subClass.substring(0, ltIndex)); if (vld == Validity.DENIED) { return _throwSubtypeNameNotAllowed(baseType, subClass, ptv); } JavaType subType = getTypeFactory().constructFromCanonical(subClass); if (!subType.isTypeOrSubTypeOf(baseType.getRawClass())) { return _throwNotASubtype(baseType, subClass); } // Unless we were approved already by name, check that actual sub-
names
java
spring-projects__spring-boot
module/spring-boot-health/src/main/java/org/springframework/boot/health/autoconfigure/actuate/endpoint/AvailabilityProbesHealthEndpointGroupsPostProcessor.java
{ "start": 1206, "end": 1775 }
class ____ implements HealthEndpointGroupsPostProcessor { private final boolean addAdditionalPaths; AvailabilityProbesHealthEndpointGroupsPostProcessor(Environment environment) { this.addAdditionalPaths = "true" .equalsIgnoreCase(environment.getProperty("management.endpoint.health.probes.add-additional-paths")); } @Override public HealthEndpointGroups postProcessHealthEndpointGroups(HealthEndpointGroups groups) { return new AvailabilityProbesHealthEndpointGroups(groups, this.addAdditionalPaths); } }
AvailabilityProbesHealthEndpointGroupsPostProcessor
java
quarkusio__quarkus
extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/DelayedAttributes.java
{ "start": 528, "end": 2804 }
class ____ implements Attributes { private static final Logger log = Logger.getLogger(DelayedAttributes.class); private boolean warningLogged = false; private Attributes delegate; /** * Set the actual {@link Attributes} to use as the delegate. * * @param delegate Properly constructed {@link Attributes}. */ public void setAttributesDelegate(Attributes delegate) { this.delegate = delegate; } @Override public <T> T get(AttributeKey<T> attributeKey) { if (delegate == null) { logDelegateNotFound(); return null; } return delegate.get(attributeKey); } @Override public void forEach(BiConsumer<? super AttributeKey<?>, ? super Object> biConsumer) { if (delegate == null) { logDelegateNotFound(); return; } delegate.forEach(biConsumer); } @Override public int size() { if (delegate == null) { logDelegateNotFound(); return 0; } return delegate.size(); } @Override public boolean isEmpty() { if (delegate == null) { logDelegateNotFound(); return true; } return delegate.isEmpty(); } @Override public Map<AttributeKey<?>, Object> asMap() { if (delegate == null) { logDelegateNotFound(); return Collections.emptyMap(); } return delegate.asMap(); } @Override public AttributesBuilder toBuilder() { if (delegate == null) { logDelegateNotFound(); return Attributes.builder(); } return delegate.toBuilder(); } @Override public String toString() { if (delegate == null) { logDelegateNotFound(); return "{}"; } return delegate.toString(); } /** * If we haven't previously logged an error, * log an error about a missing {@code delegate} and set {@code warningLogged=true} */ private void logDelegateNotFound() { if (!warningLogged) { log.warn("No Attributes delegate specified, no action taken."); warningLogged = true; } } }
DelayedAttributes
java
apache__camel
components/camel-jms/src/test/java/org/apache/camel/component/jms/issues/JmsInOutTransferExchangeInflightRepositoryFlushTest.java
{ "start": 1682, "end": 3880 }
class ____ extends AbstractJMSTest { @Order(2) @RegisterExtension public static CamelContextExtension camelContextExtension = new DefaultCamelContextExtension(); protected CamelContext context; protected ProducerTemplate template; protected ConsumerTemplate consumer; @Override protected String getComponentName() { return "activemq"; } @Test public void testTransferExchangeInOut() throws Exception { assertEquals(0, context.getInflightRepository().size()); MockEndpoint result = getMockEndpoint("mock:result"); result.expectedMessageCount(1); template.send("direct:start", exchange -> exchange.getIn().setBody(new SerializableRequestDto("Restless Camel"))); MockEndpoint.assertIsSatisfied(context); assertEquals(0, context.getInflightRepository().size()); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { public void configure() { from("direct:start") .to(ExchangePattern.InOut, "activemq:JmsInOutTransferExchangeInflightRepositoryFlushTest.responseGenerator?transferExchange=true&requestTimeout=5000") .to("mock:result"); from("activemq:JmsInOutTransferExchangeInflightRepositoryFlushTest.responseGenerator?transferExchange=true") .process(exchange -> { // there are 2 inflight (one for both routes) assertEquals(2, exchange.getContext().getInflightRepository().size()); exchange.getIn().setBody(new SerializableResponseDto(true)); }); } }; } @Override public CamelContextExtension getCamelContextExtension() { return camelContextExtension; } @BeforeEach void setUpRequirements() { context = camelContextExtension.getContext(); template = camelContextExtension.getProducerTemplate(); consumer = camelContextExtension.getConsumerTemplate(); } }
JmsInOutTransferExchangeInflightRepositoryFlushTest
java
apache__maven
its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng8414ConsumerPomWithNewFeaturesTest.java
{ "start": 1306, "end": 3856 }
class ____ extends AbstractMavenIntegrationTestCase { /** * Verify behavior of the consumer POM when using a feature that require a newer model. */ @Test void testNotPreserving() throws Exception { Path basedir = extractResources("/mng-8414-consumer-pom-with-new-features").toPath(); Verifier verifier = newVerifier(basedir.toString(), null); verifier.addCliArguments("package", "-Dmaven.consumer.pom.flatten=true"); verifier.execute(); verifier.verifyErrorFreeLog(); verifier.verifyTextInLog( "The consumer POM for org.apache.maven.its:mng-8414:jar:1.0.0-SNAPSHOT cannot be downgraded to 4.0.0."); Path consumerPom = basedir.resolve(Paths.get( "target", "project-local-repo", "org.apache.maven.its", "mng-8414", "1.0.0-SNAPSHOT", "mng-8414-1.0.0-SNAPSHOT-consumer.pom")); assertTrue(Files.exists(consumerPom)); Model consumerPomModel; try (Reader r = Files.newBufferedReader(consumerPom)) { consumerPomModel = new MavenStaxReader().read(r); } assertEquals("4.1.0", consumerPomModel.getModelVersion()); } /** * Verify behavior of the consumer POM when using a feature that require a newer model. */ @Test void testPreserving() throws Exception { Path basedir = extractResources("/mng-8414-consumer-pom-with-new-features").toPath(); Verifier verifier = newVerifier(basedir.toString(), null); verifier.setLogFileName("log-preserving.txt"); verifier.addCliArguments("-f", "pom-preserving.xml", "package", "-Dmaven.consumer.pom.flatten=true"); verifier.execute(); verifier.verifyErrorFreeLog(); verifier.verifyTextNotInLog("cannot be downgraded to 4.0.0."); Path consumerPom = basedir.resolve(Paths.get( "target", "project-local-repo", "org.apache.maven.its", "mng-8414-preserving", "1.0.0-SNAPSHOT", "mng-8414-preserving-1.0.0-SNAPSHOT-consumer.pom")); assertTrue(Files.exists(consumerPom)); Model consumerPomModel; try (Reader r = Files.newBufferedReader(consumerPom)) { consumerPomModel = new MavenStaxReader().read(r); } assertEquals("4.1.0", consumerPomModel.getModelVersion()); } }
MavenITmng8414ConsumerPomWithNewFeaturesTest
java
apache__flink
flink-metrics/flink-metrics-core/src/main/java/org/apache/flink/metrics/groups/SinkCommitterMetricGroup.java
{ "start": 1114, "end": 1837 }
interface ____ extends OperatorMetricGroup { /** The total number of committables arrived. */ Counter getNumCommittablesTotalCounter(); /** The total number of committable failures. */ Counter getNumCommittablesFailureCounter(); /** The total number of committable retry. */ Counter getNumCommittablesRetryCounter(); /** The total number of successful committables. */ Counter getNumCommittablesSuccessCounter(); /** The total number of already committed committables. */ Counter getNumCommittablesAlreadyCommittedCounter(); /** The pending committables. */ void setCurrentPendingCommittablesGauge(Gauge<Integer> currentPendingCommittablesGauge); }
SinkCommitterMetricGroup
java
spring-projects__spring-framework
spring-webflux/src/main/java/org/springframework/web/reactive/result/condition/ConsumesRequestCondition.java
{ "start": 1963, "end": 9114 }
class ____ extends AbstractRequestCondition<ConsumesRequestCondition> { private static final ConsumesRequestCondition EMPTY_CONDITION = new ConsumesRequestCondition(); private final List<ConsumeMediaTypeExpression> expressions; private boolean bodyRequired = true; /** * Creates a new instance from 0 or more "consumes" expressions. * @param consumes expressions with the syntax described in * {@link RequestMapping#consumes()}; if 0 expressions are provided, * the condition will match to every request */ public ConsumesRequestCondition(String... consumes) { this(consumes, null); } /** * Creates a new instance with "consumes" and "header" expressions. * "Header" expressions where the header name is not 'Content-Type' or have * no header value defined are ignored. If 0 expressions are provided in * total, the condition will match to every request * @param consumes as described in {@link RequestMapping#consumes()} * @param headers as described in {@link RequestMapping#headers()} */ public ConsumesRequestCondition(String @Nullable [] consumes, String @Nullable [] headers) { this.expressions = parseExpressions(consumes, headers); if (this.expressions.size() > 1) { Collections.sort(this.expressions); } } private static List<ConsumeMediaTypeExpression> parseExpressions(String @Nullable [] consumes, String @Nullable [] headers) { Set<ConsumeMediaTypeExpression> result = null; if (!ObjectUtils.isEmpty(headers)) { for (String header : headers) { HeadersRequestCondition.HeaderExpression expr = new HeadersRequestCondition.HeaderExpression(header); if ("Content-Type".equalsIgnoreCase(expr.name)) { result = (result != null ? result : new LinkedHashSet<>()); for (MediaType mediaType : MediaType.parseMediaTypes(expr.value)) { result.add(new ConsumeMediaTypeExpression(mediaType, expr.isNegated)); } } } } if (!ObjectUtils.isEmpty(consumes)) { result = (result != null ? result : new LinkedHashSet<>()); for (String consume : consumes) { result.add(new ConsumeMediaTypeExpression(consume)); } } return (result != null ? new ArrayList<>(result) : Collections.emptyList()); } /** * Private constructor for internal when creating matching conditions. */ private ConsumesRequestCondition(List<ConsumeMediaTypeExpression> expressions) { this.expressions = expressions; } /** * Return the contained MediaType expressions. */ public Set<MediaTypeExpression> getExpressions() { return new LinkedHashSet<>(this.expressions); } /** * Returns the media types for this condition excluding negated expressions. */ public Set<MediaType> getConsumableMediaTypes() { Set<MediaType> result = new LinkedHashSet<>(); for (ConsumeMediaTypeExpression expression : this.expressions) { if (!expression.isNegated()) { result.add(expression.getMediaType()); } } return result; } /** * Whether the condition has any media type expressions. */ @Override public boolean isEmpty() { return this.expressions.isEmpty(); } @Override protected Collection<ConsumeMediaTypeExpression> getContent() { return this.expressions; } @Override protected String getToStringInfix() { return " || "; } /** * Whether this condition should expect requests to have a body. * <p>By default this is set to {@code true} in which case it is assumed a * request body is required and this condition matches to the "Content-Type" * header or falls back on "Content-Type: application/octet-stream". * <p>If set to {@code false}, and the request does not have a body, then this * condition matches automatically, i.e. without checking expressions. * @param bodyRequired whether requests are expected to have a body * @since 5.2 */ public void setBodyRequired(boolean bodyRequired) { this.bodyRequired = bodyRequired; } /** * Return the setting for {@link #setBodyRequired(boolean)}. * @since 5.2 */ public boolean isBodyRequired() { return this.bodyRequired; } /** * Returns the "other" instance if it has any expressions; returns "this" * instance otherwise. Practically that means a method-level "consumes" * overrides a type-level "consumes" condition. */ @Override public ConsumesRequestCondition combine(ConsumesRequestCondition other) { return (!other.expressions.isEmpty() ? other : this); } /** * Checks if any of the contained media type expressions match the given * request 'Content-Type' header and returns an instance that is guaranteed * to contain matching expressions only. The match is performed via * {@link MediaType#includes(MediaType)}. * @param exchange the current exchange * @return the same instance if the condition contains no expressions; * or a new condition with matching expressions only; * or {@code null} if no expressions match. */ @Override public @Nullable ConsumesRequestCondition getMatchingCondition(ServerWebExchange exchange) { ServerHttpRequest request = exchange.getRequest(); if (CorsUtils.isPreFlightRequest(request)) { return EMPTY_CONDITION; } if (isEmpty()) { return this; } if (!hasBody(request) && !this.bodyRequired) { return EMPTY_CONDITION; } List<ConsumeMediaTypeExpression> result = getMatchingExpressions(exchange); return !CollectionUtils.isEmpty(result) ? new ConsumesRequestCondition(result) : null; } private boolean hasBody(ServerHttpRequest request) { String contentLength = request.getHeaders().getFirst(HttpHeaders.CONTENT_LENGTH); String transferEncoding = request.getHeaders().getFirst(HttpHeaders.TRANSFER_ENCODING); return StringUtils.hasText(transferEncoding) || (StringUtils.hasText(contentLength) && !contentLength.trim().equals("0")); } private @Nullable List<ConsumeMediaTypeExpression> getMatchingExpressions(ServerWebExchange exchange) { List<ConsumeMediaTypeExpression> result = null; for (ConsumeMediaTypeExpression expression : this.expressions) { if (expression.match(exchange)) { result = result != null ? result : new ArrayList<>(); result.add(expression); } } return result; } /** * Returns: * <ul> * <li>0 if the two conditions have the same number of expressions * <li>Less than 0 if "this" has more or more specific media type expressions * <li>Greater than 0 if "other" has more or more specific media type expressions * </ul> * <p>It is assumed that both instances have been obtained via * {@link #getMatchingCondition(ServerWebExchange)} and each instance contains * the matching consumable media type expression only or is otherwise empty. */ @Override public int compareTo(ConsumesRequestCondition other, ServerWebExchange exchange) { if (this.expressions.isEmpty() && other.expressions.isEmpty()) { return 0; } else if (this.expressions.isEmpty()) { return 1; } else if (other.expressions.isEmpty()) { return -1; } else { return this.expressions.get(0).compareTo(other.expressions.get(0)); } } /** * Parses and matches a single media type expression to a request's 'Content-Type' header. */ static
ConsumesRequestCondition
java
apache__flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/catalog/stats/CatalogColumnStatisticsDataLong.java
{ "start": 1049, "end": 3055 }
class ____ extends CatalogColumnStatisticsDataBase { /** mim value. */ private final Long min; /** max value. */ private final Long max; /** number of distinct values. */ private final Long ndv; public CatalogColumnStatisticsDataLong(Long min, Long max, Long ndv, Long nullCount) { super(nullCount); this.min = min; this.max = max; this.ndv = ndv; } public CatalogColumnStatisticsDataLong( Long min, Long max, Long ndv, Long nullCount, Map<String, String> properties) { super(nullCount, properties); this.min = min; this.max = max; this.ndv = ndv; } public Long getMin() { return min; } public Long getMax() { return max; } public Long getNdv() { return ndv; } public CatalogColumnStatisticsDataLong copy() { return new CatalogColumnStatisticsDataLong( min, max, ndv, getNullCount(), new HashMap<>(getProperties())); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } CatalogColumnStatisticsDataLong that = (CatalogColumnStatisticsDataLong) o; return Objects.equals(min, that.min) && Objects.equals(max, that.max) && Objects.equals(ndv, that.ndv) && Objects.equals(getNullCount(), that.getNullCount()); } @Override public int hashCode() { return Objects.hash(min, max, ndv, getNullCount()); } @Override public String toString() { return "CatalogColumnStatisticsDataLong{" + "min=" + min + ", max=" + max + ", ndv=" + ndv + ", nullCount=" + getNullCount() + '}'; } }
CatalogColumnStatisticsDataLong
java
spring-projects__spring-framework
spring-jms/src/main/java/org/springframework/jms/support/destination/JmsDestinationAccessor.java
{ "start": 1452, "end": 4980 }
class ____ extends JmsAccessor { /** * Timeout value indicating that a receive operation should * check if a message is immediately available without blocking. * @since 4.3 */ public static final long RECEIVE_TIMEOUT_NO_WAIT = -1; /** * Timeout value indicating a blocking receive without timeout. * @since 4.3 */ public static final long RECEIVE_TIMEOUT_INDEFINITE_WAIT = 0; private DestinationResolver destinationResolver = new SimpleDestinationResolver(); private boolean pubSubDomain = false; /** * Set the {@link DestinationResolver} that is to be used to resolve * {@link jakarta.jms.Destination} references for this accessor. * <p>The default resolver is a SimpleDestinationResolver. Specify a * JndiDestinationResolver for resolving destination names as JNDI locations. * @see org.springframework.jms.support.destination.SimpleDestinationResolver * @see org.springframework.jms.support.destination.JndiDestinationResolver */ public void setDestinationResolver(DestinationResolver destinationResolver) { Assert.notNull(destinationResolver, "DestinationResolver must not be null"); this.destinationResolver = destinationResolver; } /** * Return the DestinationResolver for this accessor (never {@code null}). */ public DestinationResolver getDestinationResolver() { return this.destinationResolver; } /** * Configure the destination accessor with knowledge of the JMS domain used. * Default is Point-to-Point (Queues). * <p>This setting primarily indicates what type of destination to resolve * if dynamic destinations are enabled. * @param pubSubDomain "true" for the Publish/Subscribe domain ({@link jakarta.jms.Topic Topics}), * "false" for the Point-to-Point domain ({@link jakarta.jms.Queue Queues}) * @see #setDestinationResolver */ public void setPubSubDomain(boolean pubSubDomain) { this.pubSubDomain = pubSubDomain; } /** * Return whether the Publish/Subscribe domain ({@link jakarta.jms.Topic Topics}) is used. * Otherwise, the Point-to-Point domain ({@link jakarta.jms.Queue Queues}) is used. */ public boolean isPubSubDomain() { return this.pubSubDomain; } /** * Resolve the given destination name into a JMS {@link Destination}, * via this accessor's {@link DestinationResolver}. * @param session the current JMS {@link Session} * @param destinationName the name of the destination * @return the located {@link Destination} * @throws jakarta.jms.JMSException if resolution failed * @see #setDestinationResolver */ protected Destination resolveDestinationName(Session session, String destinationName) throws JMSException { return getDestinationResolver().resolveDestinationName(session, destinationName, isPubSubDomain()); } /** * Actually receive a message from the given consumer. * @param consumer the JMS MessageConsumer to receive with * @param timeout the receive timeout (a negative value indicates * a no-wait receive; 0 indicates an indefinite wait attempt) * @return the JMS Message received, or {@code null} if none * @throws JMSException if thrown by JMS API methods * @since 4.3 * @see #RECEIVE_TIMEOUT_NO_WAIT * @see #RECEIVE_TIMEOUT_INDEFINITE_WAIT */ protected @Nullable Message receiveFromConsumer(MessageConsumer consumer, long timeout) throws JMSException { if (timeout > 0) { return consumer.receive(timeout); } else if (timeout < 0) { return consumer.receiveNoWait(); } else { return consumer.receive(); } } }
JmsDestinationAccessor
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/index/mapper/blockloader/docvalues/fn/Utf8CodePointsFromOrdsBlockLoader.java
{ "start": 15009, "end": 22010 }
class ____ extends BlockDocValuesReader { private final Warnings warnings; private final SortedSetDocValues ordinals; ImmediateOrdinals(Warnings warnings, SortedSetDocValues ordinals) { this.ordinals = ordinals; this.warnings = warnings; } @Override public Block read(BlockFactory factory, Docs docs, int offset, boolean nullsFiltered) throws IOException { if (docs.count() - offset == 1) { return blockForSingleDoc(factory, docs.get(offset)); } int[] ords = readOrds(ordinals, warnings, factory, docs, offset); int[] sortedOrds = null; int[] counts = null; try { sortedOrds = sortedOrds(factory, ords); int compactedLength = compactSorted(sortedOrds); counts = counts(factory, sortedOrds, compactedLength); try (IntBuilder builder = factory.ints(ords.length)) { for (int ord : ords) { if (ord >= 0) { builder.appendInt(counts[Arrays.binarySearch(sortedOrds, 0, compactedLength, ord)]); } else { builder.appendNull(); } } return builder.build(); } } finally { factory.adjustBreaker(-RamUsageEstimator.shallowSizeOf(ords)); if (sortedOrds != null) { factory.adjustBreaker(-RamUsageEstimator.shallowSizeOf(sortedOrds)); } if (counts != null) { factory.adjustBreaker(-RamUsageEstimator.shallowSizeOf(counts)); } } } @Override public void read(int docId, StoredFields storedFields, Builder builder) throws IOException { read(docId, (IntBuilder) builder); } private void read(int docId, IntBuilder builder) throws IOException { if (ordinals.advanceExact(docId) == false) { builder.appendNull(); return; } if (ordinals.docValueCount() != 1) { registerSingleValueWarning(warnings); builder.appendNull(); return; } builder.appendInt(codePointsAtOrd(ordinals.nextOrd())); } @Override public int docId() { return ordinals.docID(); } @Override public String toString() { return "Utf8CodePointsFromOrds.Immediate"; } private Block blockForSingleDoc(BlockFactory factory, int docId) throws IOException { if (ordinals.advanceExact(docId) == false) { return factory.constantNulls(1); } if (ordinals.docValueCount() == 1) { return factory.constantInt(codePointsAtOrd(ordinals.nextOrd()), 1); } registerSingleValueWarning(warnings); return factory.constantNulls(1); } /** * Builds a sorted copy of the loaded ordinals. */ private int[] sortedOrds(BlockFactory factory, int[] ords) { factory.adjustBreaker(RamUsageEstimator.sizeOf(ords)); int[] sortedOrds = ords.clone(); Arrays.sort(sortedOrds); return sortedOrds; } /** * Compacts the array of sorted ordinals into an array of populated ({@code >= 0}), unique ordinals. * @return the length of the unique array */ private int compactSorted(int[] sortedOrds) { int c = 0; int i = 0; while (i < sortedOrds.length && sortedOrds[i] < 0) { i++; } while (i < sortedOrds.length) { if (false == (i > 0 && sortedOrds[i - 1] == sortedOrds[i])) { sortedOrds[c++] = sortedOrds[i]; } i++; } return c; } private int[] counts(BlockFactory factory, int[] compactedSortedOrds, int compactedLength) throws IOException { long size = sizeOfArray(compactedLength); factory.adjustBreaker(size); int[] counts = new int[compactedLength]; for (int i = 0; i < counts.length; i++) { counts[i] = codePointsAtOrd(compactedSortedOrds[i]); } return counts; } /** * Get the count of code points at the ord. * The {@code ord} must be {@code >= 0} or this will fail. */ private int codePointsAtOrd(long ord) throws IOException { return UnicodeUtil.codePointCount(ordinals.lookupOrd(ord)); } } /** * Load an ordinal for each position. Three cases: * <ul> * <li>There is a single ordinal at this position - load the ordinals value in to the array</li> * <li>There are no values at this position - load a -1 - we'll skip loading that later</li> * <li>There are <strong>many</strong> values at this position - load a -1 which we'll skip like above - and emit a warning</li> * </ul> */ private static int[] readOrds(SortedSetDocValues ordinals, Warnings warnings, BlockFactory factory, Docs docs, int offset) throws IOException { int count = docs.count() - offset; long size = sizeOfArray(count); factory.adjustBreaker(size); int[] ords = null; try { ords = new int[docs.count() - offset]; for (int i = offset; i < docs.count(); i++) { int doc = docs.get(i); if (ordinals.advanceExact(doc) == false) { ords[i] = -1; continue; } if (ordinals.docValueCount() != 1) { registerSingleValueWarning(warnings); ords[i] = -1; continue; } ords[i] = Math.toIntExact(ordinals.nextOrd()); } int[] result = ords; ords = null; return result; } finally { if (ords != null) { factory.adjustBreaker(-size); } } } private static Block buildFromCache(BlockFactory factory, int[] cache, int[] ords) { try (IntBuilder builder = factory.ints(ords.length)) { for (int ord : ords) { if (ord >= 0) { builder.appendInt(cache[ord]); } else { builder.appendNull(); } } return builder.build(); } } private static long sizeOfArray(int count) { return RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (long) Integer.BYTES * (long) count); } }
ImmediateOrdinals
java
google__guava
android/guava/src/com/google/common/collect/MultimapBuilder.java
{ "start": 17886, "end": 18381 }
class ____< K0 extends @Nullable Object, V0 extends @Nullable Object> extends SetMultimapBuilder<K0, V0> { SortedSetMultimapBuilder() {} @Override public abstract <K extends K0, V extends V0> SortedSetMultimap<K, V> build(); @Override public <K extends K0, V extends V0> SortedSetMultimap<K, V> build( Multimap<? extends K, ? extends V> multimap) { return (SortedSetMultimap<K, V>) super.<K, V>build(multimap); } } }
SortedSetMultimapBuilder
java
apache__camel
components/camel-kamelet/src/test/java/org/apache/camel/component/kamelet/KameletSourceBridgeErrorHandlerTest.java
{ "start": 3194, "end": 3805 }
class ____ extends DefaultEndpoint { public MyEndpoint(String endpointUri, Component component) { super(endpointUri, component); } @Override public Producer createProducer() { return null; } @Override public Consumer createConsumer(Processor processor) throws Exception { Consumer answer = new MyConsumer(this, processor); configureConsumer(answer); return answer; } @Override public boolean isSingleton() { return true; } } public
MyEndpoint
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockChecksumHelper.java
{ "start": 15042, "end": 27938 }
class ____ { private final DatanodeInfo dn; private final Token<BlockTokenIdentifier> token; LiveBlockInfo(DatanodeInfo dn, Token<BlockTokenIdentifier> token) { this.dn = dn; this.token = token; } DatanodeInfo getDn() { return dn; } Token<BlockTokenIdentifier> getToken() { return token; } } @Override void compute() throws IOException { assert datanodes.length == blockIndices.length; Map<Byte, LiveBlockInfo> liveDns = new HashMap<>(datanodes.length); int blkIndxLen = blockIndices.length; int numDataUnits = ecPolicy.getNumDataUnits(); // Prepare live datanode list. Missing data blocks will be reconstructed // and recalculate checksum. for (int idx = 0; idx < blkIndxLen; idx++) { liveDns.put(blockIndices[idx], new LiveBlockInfo(datanodes[idx], blockTokens[idx])); } long checksumLen = 0; for (int idx = 0; idx < numDataUnits && idx < blkIndxLen; idx++) { // Before populating the blockChecksum at this index, record the byte // offset where it will begin. blockChecksumPositions[idx] = blockChecksumBuf.getLength(); ExtendedBlock block = null; try { block = getInternalBlock(numDataUnits, idx); LiveBlockInfo liveBlkInfo = liveDns.get((byte) idx); if (liveBlkInfo == null) { // reconstruct block and calculate checksum for missing node recalculateChecksum(idx, block.getNumBytes()); } else { try { checksumBlock(block, idx, liveBlkInfo.getToken(), liveBlkInfo.getDn()); } catch (IOException ioe) { String msg = String.format("Exception while reading checksum for block %s at index " + "%d in blockGroup %s", block, idx, blockGroup); LOG.warn(msg, ioe); // reconstruct block and calculate checksum for the failed node recalculateChecksum(idx, block.getNumBytes()); } } checksumLen += block.getNumBytes(); if (checksumLen >= requestedNumBytes) { break; // done with the computation, simply return. } } catch (IOException e) { LOG.warn("Failed to get the checksum for block {} at index {} " + "in blockGroup {}", block, idx, blockGroup, e); throw e; } } BlockChecksumType type = getBlockChecksumOptions().getBlockChecksumType(); switch (type) { case MD5CRC: MD5Hash md5out = MD5Hash.digest(blockChecksumBuf.getData()); setOutBytes(md5out.getDigest()); break; case COMPOSITE_CRC: byte[] digest = reassembleNonStripedCompositeCrc(checksumLen); setOutBytes(digest); break; default: throw new IOException(String.format( "Unrecognized BlockChecksumType: %s", type)); } } /** * @param checksumLen The sum of bytes associated with the block checksum * data being digested into a block-group level checksum. */ private byte[] reassembleNonStripedCompositeCrc(long checksumLen) throws IOException { int numDataUnits = ecPolicy.getNumDataUnits(); CrcComposer crcComposer = CrcComposer.newCrcComposer( getCrcType(), ecPolicy.getCellSize()); // This should hold all the cell-granularity checksums of blk0 // followed by all cell checksums of blk1, etc. We must unstripe the // cell checksums in order of logical file bytes. Also, note that the // length of this array may not equal the the number of actually valid // bytes in the buffer (blockChecksumBuf.getLength()). byte[] flatBlockChecksumData = blockChecksumBuf.getData(); // Initialize byte-level cursors to where each block's checksum begins // inside the combined flattened buffer. int[] blockChecksumCursors = new int[numDataUnits]; for (int idx = 0; idx < numDataUnits; ++idx) { blockChecksumCursors[idx] = blockChecksumPositions[idx]; } // Reassemble cell-level CRCs in the right order. long numFullCells = checksumLen / ecPolicy.getCellSize(); for (long cellIndex = 0; cellIndex < numFullCells; ++cellIndex) { int blockIndex = (int) (cellIndex % numDataUnits); int checksumCursor = blockChecksumCursors[blockIndex]; int cellCrc = CrcUtil.readInt( flatBlockChecksumData, checksumCursor); blockChecksumCursors[blockIndex] += 4; crcComposer.update(cellCrc, ecPolicy.getCellSize()); } if (checksumLen % ecPolicy.getCellSize() != 0) { // Final partial cell. int blockIndex = (int) (numFullCells % numDataUnits); int checksumCursor = blockChecksumCursors[blockIndex]; int cellCrc = CrcUtil.readInt( flatBlockChecksumData, checksumCursor); blockChecksumCursors[blockIndex] += 4; crcComposer.update(cellCrc, checksumLen % ecPolicy.getCellSize()); } byte[] digest = crcComposer.digest(); if (LOG.isDebugEnabled()) { LOG.debug("flatBlockChecksumData.length={}, numDataUnits={}, " + "checksumLen={}, digest={}", flatBlockChecksumData.length, numDataUnits, checksumLen, CrcUtil.toSingleCrcString(digest)); } return digest; } private ExtendedBlock getInternalBlock(int numDataUnits, int idx) { // Sets requested number of bytes in blockGroup which is required to // construct the internal block for computing checksum. long actualNumBytes = blockGroup.getNumBytes(); blockGroup.setNumBytes(requestedNumBytes); ExtendedBlock block = StripedBlockUtil.constructInternalBlock(blockGroup, ecPolicy.getCellSize(), numDataUnits, idx); // Set back actualNumBytes value in blockGroup. blockGroup.setNumBytes(actualNumBytes); return block; } private void checksumBlock(ExtendedBlock block, int blockIdx, Token<BlockTokenIdentifier> blockToken, DatanodeInfo targetDatanode) throws IOException { int timeout = getDatanode().getDnConf().getEcChecksumSocketTimeout(); try (IOStreamPair pair = getDatanode().connectToDN(targetDatanode, timeout, block, blockToken)) { LOG.debug("write to {}: {}, block={}", getDatanode(), Op.BLOCK_CHECKSUM, block); // get block checksum // A BlockGroupCheckum of type COMPOSITE_CRC uses underlying // BlockChecksums also of type COMPOSITE_CRC but with // stripeLength == ecPolicy.getCellSize(). BlockChecksumOptions childOptions; BlockChecksumType groupChecksumType = getBlockChecksumOptions().getBlockChecksumType(); switch (groupChecksumType) { case MD5CRC: childOptions = getBlockChecksumOptions(); break; case COMPOSITE_CRC: childOptions = new BlockChecksumOptions( BlockChecksumType.COMPOSITE_CRC, ecPolicy.getCellSize()); break; default: throw new IOException( "Unknown BlockChecksumType: " + groupChecksumType); } createSender(pair).blockChecksum(block, blockToken, childOptions); final DataTransferProtos.BlockOpResponseProto reply = DataTransferProtos.BlockOpResponseProto.parseFrom( PBHelperClient.vintPrefixed(pair.in)); String logInfo = "for block " + block + " from datanode " + targetDatanode; DataTransferProtoUtil.checkBlockOpStatus(reply, logInfo); DataTransferProtos.OpBlockChecksumResponseProto checksumData = reply.getChecksumResponse(); // read crc-type final DataChecksum.Type ct; if (checksumData.hasCrcType()) { ct = PBHelperClient.convert(checksumData.getCrcType()); } else { LOG.debug("Retrieving checksum from an earlier-version DataNode: " + "inferring checksum by reading first byte"); ct = DataChecksum.Type.DEFAULT; } setOrVerifyChecksumProperties(blockIdx, checksumData.getBytesPerCrc(), checksumData.getCrcPerBlock(), ct); switch (groupChecksumType) { case MD5CRC: //read md5 final MD5Hash md5 = new MD5Hash(checksumData.getBlockChecksum().toByteArray()); md5.write(blockChecksumBuf); LOG.debug("got reply from datanode:{}, md5={}", targetDatanode, md5); break; case COMPOSITE_CRC: BlockChecksumType returnedType = PBHelperClient.convert( checksumData.getBlockChecksumOptions().getBlockChecksumType()); if (returnedType != BlockChecksumType.COMPOSITE_CRC) { throw new IOException(String.format( "Unexpected blockChecksumType '%s', expecting COMPOSITE_CRC", returnedType)); } byte[] checksumBytes = checksumData.getBlockChecksum().toByteArray(); blockChecksumBuf.write(checksumBytes, 0, checksumBytes.length); if (LOG.isDebugEnabled()) { LOG.debug("got reply from datanode:{} for blockIdx:{}, checksum:{}", targetDatanode, blockIdx, CrcUtil.toMultiCrcString(checksumBytes)); } break; default: throw new IOException( "Unknown BlockChecksumType: " + groupChecksumType); } } } /** * Reconstruct this data block and recalculate checksum. * * @param errBlkIndex * error index to be reconstructed and recalculate checksum. * @param blockLength * number of bytes in the block to compute checksum. * @throws IOException */ private void recalculateChecksum(int errBlkIndex, long blockLength) throws IOException { LOG.debug("Recalculate checksum for the missing/failed block index {}", errBlkIndex); byte[] errIndices = new byte[1]; errIndices[0] = (byte) errBlkIndex; StripedReconstructionInfo stripedReconInfo = new StripedReconstructionInfo( blockGroup, ecPolicy, blockIndices, datanodes, errIndices); BlockChecksumType groupChecksumType = getBlockChecksumOptions().getBlockChecksumType(); try (StripedBlockChecksumReconstructor checksumRecon = groupChecksumType == BlockChecksumType.COMPOSITE_CRC ? new StripedBlockChecksumCompositeCrcReconstructor( getDatanode().getErasureCodingWorker(), stripedReconInfo, blockChecksumBuf, blockLength) : new StripedBlockChecksumMd5CrcReconstructor( getDatanode().getErasureCodingWorker(), stripedReconInfo, blockChecksumBuf, blockLength)) { checksumRecon.reconstruct(); DataChecksum checksum = checksumRecon.getChecksum(); long crcPerBlock = checksum.getChecksumSize() <= 0 ? 0 : checksumRecon.getChecksumDataLen() / checksum.getChecksumSize(); setOrVerifyChecksumProperties(errBlkIndex, checksum.getBytesPerChecksum(), crcPerBlock, checksum.getChecksumType()); LOG.debug("Recalculated checksum for the block index:{}, checksum={}", errBlkIndex, checksumRecon.getDigestObject()); } } private void setOrVerifyChecksumProperties(int blockIdx, int bpc, final long cpb, DataChecksum.Type ct) throws IOException { //read byte-per-checksum if (blockIdx == 0) { //first block setBytesPerCRC(bpc); } else if (bpc != getBytesPerCRC()) { throw new IOException("Byte-per-checksum not matched: bpc=" + bpc + " but bytesPerCRC=" + getBytesPerCRC()); } //read crc-per-block if (blockIdx == 0) { setCrcPerBlock(cpb); } if (blockIdx == 0) { // first block setCrcType(ct); } else if (getCrcType() != DataChecksum.Type.MIXED && getCrcType() != ct) { BlockChecksumType groupChecksumType = getBlockChecksumOptions().getBlockChecksumType(); if (groupChecksumType == BlockChecksumType.COMPOSITE_CRC) { throw new IOException(String.format( "BlockChecksumType COMPOSITE_CRC doesn't support MIXED " + "underlying types; previous block was %s, next block is %s", getCrcType(), ct)); } else { setCrcType(DataChecksum.Type.MIXED); } } if (blockIdx == 0) { LOG.debug("set bytesPerCRC={}, crcPerBlock={}", getBytesPerCRC(), getCrcPerBlock()); } } } }
LiveBlockInfo
java
elastic__elasticsearch
x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryIntegrityVerifier.java
{ "start": 42784, "end": 44894 }
class ____ extends IndexInput { private final InputStream inputStream; private final long length; long filePointer = 0L; IndexInputWrapper(InputStream inputStream, long length) { super(""); this.inputStream = inputStream; this.length = length; } @Override public byte readByte() throws IOException { if (isCancelledSupplier.getAsBoolean()) { throw new TaskCancelledException("task cancelled"); } final var read = inputStream.read(); if (read == -1) { throw new EOFException(); } filePointer += 1; blobBytesVerified.incrementAndGet(); return (byte) read; } @Override public void readBytes(byte[] b, int offset, int len) throws IOException { while (len > 0) { if (isCancelledSupplier.getAsBoolean()) { throw new TaskCancelledException("task cancelled"); } final var read = inputStream.read(b, offset, len); if (read == -1) { throw new EOFException(); } filePointer += read; blobBytesVerified.addAndGet(read); len -= read; offset += read; } } @Override public void close() {} @Override public long getFilePointer() { return filePointer; } @Override public void seek(long pos) { if (filePointer != pos) { assert false : "cannot seek"; throw new UnsupportedOperationException("seek"); } } @Override public long length() { return length; } @Override public IndexInput slice(String sliceDescription, long offset, long length) { assert false; throw new UnsupportedOperationException("slice"); } } private static
IndexInputWrapper
java
apache__camel
components/camel-jooq/src/main/java/org/apache/camel/component/jooq/JooqEndpoint.java
{ "start": 1574, "end": 3893 }
class ____ extends ScheduledPollEndpoint { private Expression producerExpression; @UriParam private JooqConfiguration configuration; public JooqEndpoint(String uri, JooqComponent component, JooqConfiguration configuration) { super(uri, component); this.configuration = configuration; } @Override public Producer createProducer() { return new JooqProducer(this, getProducerExpression()); } public JooqConfiguration getConfiguration() { return configuration; } public void setConfiguration(JooqConfiguration configuration) { this.configuration = configuration; } public Expression getProducerExpression() { if (producerExpression == null) { producerExpression = createProducerExpression(); } return producerExpression; } protected Expression createProducerExpression() { final Class<?> type; switch (configuration.getOperation()) { case NONE: type = configuration.getEntityType(); break; case EXECUTE: type = Query.class; break; case FETCH: type = ResultQuery.class; break; default: throw new UnsupportedOperationException("Operation: " + configuration.getOperation()); } return new Expression() { public Object evaluate(Exchange exchange, Class asType) { Object answer = exchange.getIn().getBody(type); if (answer == null) { Object defaultValue = exchange.getIn().getBody(); if (defaultValue != null) { throw RuntimeCamelException .wrapRuntimeCamelException(new NoTypeConversionAvailableException(defaultValue, type)); } answer = exchange.getContext().getInjector().newInstance(type); } return answer; } }; } @Override public Consumer createConsumer(Processor processor) throws Exception { JooqConsumer consumer = new JooqConsumer(this, processor); configureConsumer(consumer); return consumer; } }
JooqEndpoint
java
grpc__grpc-java
xds/src/test/java/io/grpc/xds/XdsNameResolverTest.java
{ "start": 133283, "end": 134000 }
class ____<ReqT, RespT> extends NoopClientCall<ReqT, RespT> { // CallOptions actually received from the channel when the call is created. final CallOptions callOptions; ClientCall.Listener<RespT> listener; TestCall(CallOptions callOptions) { this.callOptions = callOptions; } @Override public void start(ClientCall.Listener<RespT> listener, Metadata headers) { this.listener = listener; } void deliverResponseHeaders() { listener.onHeaders(new Metadata()); } void deliverCompleted() { listener.onClose(Status.OK, new Metadata()); } void deliverErrorStatus() { listener.onClose(Status.UNAVAILABLE, new Metadata()); } } }
TestCall
java
quarkusio__quarkus
extensions/reactive-mssql-client/deployment/src/test/java/io/quarkus/reactive/mssql/client/ConfigActiveFalseDefaultDatasourceStaticInjectionTest.java
{ "start": 445, "end": 1747 }
class ____ { @RegisterExtension static final QuarkusUnitTest config = new QuarkusUnitTest() .overrideConfigKey("quarkus.datasource.active", "false") .assertException(e -> assertThat(e) // Can't use isInstanceOf due to weird classloading in tests .satisfies(t -> assertThat(t.getClass().getName()).isEqualTo(InactiveBeanException.class.getName())) .hasMessageContainingAll("Datasource '<default>' was deactivated through configuration properties.", "To avoid this exception while keeping the bean inactive", // Message from Arc with generic hints "To activate the datasource, set configuration property 'quarkus.datasource.active'" + " to 'true' and configure datasource '<default>'", "Refer to https://quarkus.io/guides/datasource for guidance.", "This bean is injected into", MyBean.class.getName() + "#pool")); @Inject MyBean myBean; @Test public void test() { Assertions.fail("Startup should have failed"); } @ApplicationScoped public static
ConfigActiveFalseDefaultDatasourceStaticInjectionTest
java
apache__camel
components/camel-thrift/src/test/java/org/apache/camel/dataformat/thrift/ThriftMarshalAndUnmarshalSpringTest.java
{ "start": 1521, "end": 3976 }
class ____ extends CamelSpringTestSupport { private static final String WORK_TEST_COMMENT = "This is a test thrift data"; private static final int WORK_TEST_NUM1 = 1; private static final int WORK_TEST_NUM2 = 100; private static final Operation WORK_TEST_OPERATION = Operation.MULTIPLY; @Override protected ClassPathXmlApplicationContext createApplicationContext() { return new ClassPathXmlApplicationContext("org/apache/camel/dataformat/thrift/springDataFormat.xml"); } @Test public void testMarshalAndUnmarshalWithDataFormat() throws Exception { marshalAndUnmarshal("direct:in", "direct:back"); } @Test public void testMarshalAndUnmarshalWithDSL1() throws Exception { marshalAndUnmarshal("direct:marshal", "direct:unmarshalA"); } @Test public void testMarshalAndUnmarshalWithDSL2() throws Exception { marshalAndUnmarshal("direct:marshal", "direct:unmarshalB"); } @Test public void testMarshalAndUnmarshalWithDSL3() { try { context.addRoutes(new RouteBuilder() { @Override public void configure() { from("direct:unmarshalC").unmarshal().thrift(new CamelException("wrong instance")).to("mock:reverse"); } }); fail("Expect the exception here"); } catch (Exception ex) { assertTrue(ex instanceof FailedToCreateRouteException, "Expect FailedToCreateRouteException"); } } private void marshalAndUnmarshal(String inURI, String outURI) throws Exception { Work input = new Work(); input.num1 = WORK_TEST_NUM1; input.num2 = WORK_TEST_NUM2; input.op = WORK_TEST_OPERATION; input.comment = WORK_TEST_COMMENT; MockEndpoint mock = getMockEndpoint("mock:reverse"); mock.expectedMessageCount(1); mock.message(0).body().isInstanceOf(Work.class); mock.message(0).body().isEqualTo(input); Object marshalled = template.requestBody(inURI, input); template.sendBody(outURI, marshalled); mock.assertIsSatisfied(); Work output = mock.getReceivedExchanges().get(0).getIn().getBody(Work.class); assertEquals(WORK_TEST_COMMENT, output.getComment()); assertEquals(WORK_TEST_OPERATION, output.getOp()); assertEquals(WORK_TEST_NUM2, output.getNum2()); } }
ThriftMarshalAndUnmarshalSpringTest
java
spring-cloud__spring-cloud-gateway
spring-cloud-gateway-server-webmvc/src/test/java/org/springframework/cloud/gateway/server/mvc/config/GatewayMvcPropertiesBeanDefinitionRegistrarTests.java
{ "start": 9476, "end": 9513 }
class ____ { } static abstract
Config
java
google__auto
value/src/it/gwtserializer/src/test/java/com/google/auto/value/client/GwtSerializerTest.java
{ "start": 4708, "end": 4975 }
class ____ { public abstract String message(); public abstract SimpleWithBuilder simple(); public static Builder builder() { return new AutoValue_GwtSerializerTest_NestedWithBuilder.Builder(); } @AutoValue.Builder public
NestedWithBuilder
java
google__guava
android/guava/src/com/google/common/collect/MoreCollectors.java
{ "start": 3636, "end": 5734 }
class ____ { static final int MAX_EXTRAS = 4; @Nullable Object element; List<Object> extras; ToOptionalState() { element = null; extras = emptyList(); } IllegalArgumentException multiples(boolean overflow) { StringBuilder sb = new StringBuilder().append("expected one element but was: <").append(element); for (Object o : extras) { sb.append(", ").append(o); } if (overflow) { sb.append(", ..."); } sb.append('>'); throw new IllegalArgumentException(sb.toString()); } void add(Object o) { checkNotNull(o); if (element == null) { this.element = o; } else if (extras.isEmpty()) { // Replace immutable empty list with mutable list. extras = new ArrayList<>(MAX_EXTRAS); extras.add(o); } else if (extras.size() < MAX_EXTRAS) { extras.add(o); } else { throw multiples(true); } } ToOptionalState combine(ToOptionalState other) { if (element == null) { return other; } else if (other.element == null) { return this; } else { if (extras.isEmpty()) { // Replace immutable empty list with mutable list. extras = new ArrayList<>(); } extras.add(other.element); extras.addAll(other.extras); if (extras.size() > MAX_EXTRAS) { extras.subList(MAX_EXTRAS, extras.size()).clear(); throw multiples(true); } return this; } } @IgnoreJRERequirement // see enclosing class (whose annotation Animal Sniffer ignores here...) Optional<Object> getOptional() { if (extras.isEmpty()) { return Optional.ofNullable(element); } else { throw multiples(false); } } Object getElement() { if (element == null) { throw new NoSuchElementException(); } else if (extras.isEmpty()) { return element; } else { throw multiples(false); } } } private MoreCollectors() {} }
ToOptionalState
java
apache__avro
lang/java/tools/src/main/java/org/apache/avro/tool/TrevniMetadataTool.java
{ "start": 1426, "end": 3615 }
class ____ implements Tool { static final JsonFactory FACTORY = new JsonFactory(); private JsonGenerator generator; @Override public String getName() { return "trevni_meta"; } @Override public String getShortDescription() { return "Dumps a Trevni file's metadata as JSON."; } @Override public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception { String filename; boolean pretty = false; if (args.size() == 2 && "-pretty".equals(args.get(0))) { pretty = true; filename = args.get(1); } else if (args.size() == 1) { filename = args.get(0); } else { err.println("Usage: [-pretty] input"); return 1; } dump(TrevniUtil.input(filename), out, pretty); return 0; } /** Read a Trevni file and print each row as a JSON object. */ public void dump(Input input, PrintStream out, boolean pretty) throws IOException { this.generator = FACTORY.createGenerator(out, JsonEncoding.UTF8); if (pretty) { generator.useDefaultPrettyPrinter(); } else { // ensure newline separation MinimalPrettyPrinter pp = new MinimalPrettyPrinter(); pp.setRootValueSeparator(System.getProperty("line.separator")); generator.setPrettyPrinter(pp); } ColumnFileReader reader = new ColumnFileReader(input); generator.writeStartObject(); generator.writeNumberField("rowCount", reader.getRowCount()); generator.writeNumberField("columnCount", reader.getColumnCount()); generator.writeFieldName("metadata"); dump(reader.getMetaData()); generator.writeFieldName("columns"); generator.writeStartArray(); for (ColumnMetaData c : reader.getColumnMetaData()) dump(c); generator.writeEndArray(); generator.writeEndObject(); generator.flush(); out.println(); reader.close(); } private void dump(MetaData<?> meta) throws IOException { generator.writeStartObject(); for (Map.Entry<String, byte[]> e : meta.entrySet()) generator.writeStringField(e.getKey(), new String(e.getValue(), StandardCharsets.ISO_8859_1)); generator.writeEndObject(); } }
TrevniMetadataTool
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/manytomany/generic/ManyToManyNonGenericTest.java
{ "start": 2431, "end": 3231 }
class ____ { @Id @GeneratedValue(strategy = GenerationType.UUID) public UUID id; @ManyToOne(optional = false) @JoinColumn(name = "TREE_ID") public NodeTree tree; @ManyToMany(fetch = FetchType.EAGER, cascade = {CascadeType.PERSIST, CascadeType.DETACH}) @JoinTable(name = "NODE_CHILDREN", joinColumns = {@JoinColumn(name = "TREE_ID", referencedColumnName = "TREE_ID"), @JoinColumn(name = "NODE_ID", referencedColumnName = "ID")}, inverseJoinColumns = {@JoinColumn(name = "CHILD_ID", referencedColumnName = "ID")} ) private final Set<Node> children = new HashSet<>(); public Set<Node> getChildren() { return children; } @Override public String toString() { return "node [%s] parent of %s".formatted(id, children.stream().map(n -> n.id).toList()); } } }
Node
java
spring-projects__spring-boot
module/spring-boot-webflux/src/main/java/org/springframework/boot/webflux/actuate/endpoint/web/AbstractWebFluxEndpointHandlerMapping.java
{ "start": 12718, "end": 18285 }
class ____ implements ReactiveWebOperation { private static final String PATH_SEPARATOR = AntPathMatcher.DEFAULT_PATH_SEPARATOR; private final WebOperation operation; private final OperationInvoker invoker; private final Supplier<Mono<? extends SecurityContext>> securityContextSupplier; private ReactiveWebOperationAdapter(WebOperation operation) { this.operation = operation; this.invoker = getInvoker(operation); this.securityContextSupplier = getSecurityContextSupplier(); } private OperationInvoker getInvoker(WebOperation operation) { OperationInvoker invoker = operation::invoke; if (operation.isBlocking()) { return new ElasticSchedulerInvoker(invoker); } return new ExceptionCapturingInvoker(invoker); } private Supplier<Mono<? extends SecurityContext>> getSecurityContextSupplier() { if (ClassUtils.isPresent("org.springframework.security.core.context.ReactiveSecurityContextHolder", getClass().getClassLoader())) { return this::springSecurityContext; } return this::emptySecurityContext; } Mono<? extends SecurityContext> springSecurityContext() { return ReactiveSecurityContextHolder.getContext() .map((securityContext) -> new ReactiveSecurityContext(securityContext.getAuthentication())) .switchIfEmpty(Mono.just(new ReactiveSecurityContext(null))); } Mono<SecurityContext> emptySecurityContext() { return Mono.just(SecurityContext.NONE); } @Override public Mono<ResponseEntity<Object>> handle(ServerWebExchange exchange, @Nullable Map<String, String> body) { Map<String, Object> arguments = getArguments(exchange, body); OperationArgumentResolver serverNamespaceArgumentResolver = OperationArgumentResolver .of(WebServerNamespace.class, () -> WebServerNamespace .from(WebServerApplicationContext.getServerNamespace(exchange.getApplicationContext()))); return this.securityContextSupplier.get() .map((securityContext) -> new InvocationContext(securityContext, arguments, serverNamespaceArgumentResolver, new ProducibleOperationArgumentResolver( () -> exchange.getRequest().getHeaders().get("Accept")))) .flatMap((invocationContext) -> handleResult((Publisher<?>) this.invoker.invoke(invocationContext), exchange.getRequest().getMethod())); } private Map<String, Object> getArguments(ServerWebExchange exchange, @Nullable Map<String, String> body) { Map<String, Object> arguments = new LinkedHashMap<>(getTemplateVariables(exchange)); String matchAllRemainingPathSegmentsVariable = this.operation.getRequestPredicate() .getMatchAllRemainingPathSegmentsVariable(); if (matchAllRemainingPathSegmentsVariable != null) { arguments.put(matchAllRemainingPathSegmentsVariable, getRemainingPathSegments(exchange)); } if (body != null) { arguments.putAll(body); } exchange.getRequest() .getQueryParams() .forEach((name, values) -> arguments.put(name, (values.size() != 1) ? values : values.get(0))); return arguments; } private Object getRemainingPathSegments(ServerWebExchange exchange) { PathPattern pathPattern = exchange.getAttribute(HandlerMapping.BEST_MATCHING_PATTERN_ATTRIBUTE); Assert.state(pathPattern != null, "'pathPattern' must not be null"); if (pathPattern.hasPatternSyntax()) { String remainingSegments = pathPattern .extractPathWithinPattern(exchange.getRequest().getPath().pathWithinApplication()) .value(); return tokenizePathSegments(remainingSegments); } return tokenizePathSegments(pathPattern.toString()); } private String[] tokenizePathSegments(String value) { String[] segments = StringUtils.tokenizeToStringArray(value, PATH_SEPARATOR, false, true); for (int i = 0; i < segments.length; i++) { if (segments[i].contains("%")) { segments[i] = StringUtils.uriDecode(segments[i], StandardCharsets.UTF_8); } } return segments; } private Map<String, String> getTemplateVariables(ServerWebExchange exchange) { Map<String, String> result = exchange.getAttribute(HandlerMapping.URI_TEMPLATE_VARIABLES_ATTRIBUTE); Assert.state(result != null, "'result' must not be null"); return result; } private Mono<ResponseEntity<Object>> handleResult(@Nullable Publisher<?> result, HttpMethod httpMethod) { if (result instanceof Flux) { result = ((Flux<?>) result).collectList(); } Mono<?> mono = (result != null) ? Mono.from(result) : Mono.empty(); return mono.map(this::toResponseEntity) .onErrorMap(InvalidEndpointRequestException.class, (ex) -> new ResponseStatusException(HttpStatus.BAD_REQUEST, ex.getReason())) .defaultIfEmpty(new ResponseEntity<>( (httpMethod != HttpMethod.GET) ? HttpStatus.NO_CONTENT : HttpStatus.NOT_FOUND)); } private ResponseEntity<Object> toResponseEntity(Object response) { if (!(response instanceof WebEndpointResponse<?> webEndpointResponse)) { return new ResponseEntity<>(response, HttpStatus.OK); } MediaType contentType = (webEndpointResponse.getContentType() != null) ? new MediaType(webEndpointResponse.getContentType()) : null; BodyBuilder builder = ResponseEntity.status(webEndpointResponse.getStatus()); if (contentType != null) { builder = builder.contentType(contentType); } return builder.body(webEndpointResponse.getBody()); } @Override public String toString() { return "Actuator web endpoint '" + this.operation.getId() + "'"; } } /** * Handler for a {@link ReactiveWebOperation}. */ private static final
ReactiveWebOperationAdapter
java
alibaba__nacos
auth/src/test/java/com/alibaba/nacos/auth/parser/http/NamingHttpResourceParserTest.java
{ "start": 1588, "end": 6922 }
class ____ { @Mock private HttpServletRequest request; private NamingHttpResourceParser resourceParser; @BeforeEach void setUp() throws Exception { resourceParser = new NamingHttpResourceParser(); } @Test @Secured() void testParseWithFullContext() throws NoSuchMethodException { Secured secured = getMethodSecure(); Mockito.when(request.getParameter(eq(CommonParams.NAMESPACE_ID))).thenReturn("testNs"); Mockito.when(request.getParameter(eq(CommonParams.GROUP_NAME))).thenReturn("testG"); Mockito.when(request.getParameter(eq(CommonParams.SERVICE_NAME))).thenReturn("testS"); Resource actual = resourceParser.parse(request, secured); assertEquals("testNs", actual.getNamespaceId()); assertEquals("testG", actual.getGroup()); assertEquals("testS", actual.getName()); assertEquals(Constants.Naming.NAMING_MODULE, actual.getType()); } @Test @Secured() void testParseWithoutNamespace() throws NoSuchMethodException { Secured secured = getMethodSecure(); Mockito.when(request.getParameter(eq(CommonParams.GROUP_NAME))).thenReturn("testG"); Mockito.when(request.getParameter(eq(CommonParams.SERVICE_NAME))).thenReturn("testS"); Resource actual = resourceParser.parse(request, secured); assertEquals(Constants.DEFAULT_NAMESPACE_ID, actual.getNamespaceId()); assertEquals("testG", actual.getGroup()); assertEquals("testS", actual.getName()); assertEquals(Constants.Naming.NAMING_MODULE, actual.getType()); } @Test @Secured() void testParseWithoutGroup() throws NoSuchMethodException { Secured secured = getMethodSecure(); Mockito.when(request.getParameter(eq(CommonParams.NAMESPACE_ID))).thenReturn("testNs"); Mockito.when(request.getParameter(eq(CommonParams.SERVICE_NAME))).thenReturn("testS"); Resource actual = resourceParser.parse(request, secured); assertEquals("testNs", actual.getNamespaceId()); assertEquals(Constants.DEFAULT_GROUP, actual.getGroup()); assertEquals("testS", actual.getName()); assertEquals(Constants.Naming.NAMING_MODULE, actual.getType()); } @Test @Secured() void testParseWithGroupInService() throws NoSuchMethodException { Secured secured = getMethodSecure(); Mockito.when(request.getParameter(eq(CommonParams.NAMESPACE_ID))).thenReturn("testNs"); Mockito.when(request.getParameter(eq(CommonParams.SERVICE_NAME))).thenReturn("testG@@testS"); Resource actual = resourceParser.parse(request, secured); assertEquals("testNs", actual.getNamespaceId()); assertEquals("testG", actual.getGroup()); assertEquals("testS", actual.getName()); assertEquals(Constants.Naming.NAMING_MODULE, actual.getType()); } @Test @Secured() void testParseWithoutService() throws NoSuchMethodException { Secured secured = getMethodSecure(); Mockito.when(request.getParameter(eq(CommonParams.NAMESPACE_ID))).thenReturn("testNs"); Mockito.when(request.getParameter(eq(CommonParams.GROUP_NAME))).thenReturn("testG"); Resource actual = resourceParser.parse(request, secured); assertEquals("testNs", actual.getNamespaceId()); assertEquals("testG", actual.getGroup()); assertEquals(StringUtils.EMPTY, actual.getName()); assertEquals(Constants.Naming.NAMING_MODULE, actual.getType()); } @Test @Secured() void testParseWithoutGroupAndService() throws NoSuchMethodException { Secured secured = getMethodSecure(); Mockito.when(request.getParameter(eq(CommonParams.NAMESPACE_ID))).thenReturn("testNs"); Resource actual = resourceParser.parse(request, secured); assertEquals("testNs", actual.getNamespaceId()); assertEquals(StringUtils.EMPTY, actual.getGroup()); assertEquals(StringUtils.EMPTY, actual.getName()); assertEquals(Constants.Naming.NAMING_MODULE, actual.getType()); } @Test @Secured(tags = {"testTag"}) void testParseWithTags() throws NoSuchMethodException { Secured secured = getMethodSecure(); Mockito.when(request.getParameter(eq(CommonParams.NAMESPACE_ID))).thenReturn("testNs"); Mockito.when(request.getParameter(eq(CommonParams.GROUP_NAME))).thenReturn("testG"); Mockito.when(request.getParameter(eq(CommonParams.SERVICE_NAME))).thenReturn("testS"); Resource actual = resourceParser.parse(request, secured); assertEquals("testNs", actual.getNamespaceId()); assertEquals("testG", actual.getGroup()); assertEquals("testS", actual.getName()); assertEquals(Constants.Naming.NAMING_MODULE, actual.getType()); assertTrue(actual.getProperties().containsKey("testTag")); } private Secured getMethodSecure() throws NoSuchMethodException { StackTraceElement[] traces = new Exception().getStackTrace(); StackTraceElement callerElement = traces[1]; String methodName = callerElement.getMethodName(); Method method = this.getClass().getDeclaredMethod(methodName); return method.getAnnotation(Secured.class); } }
NamingHttpResourceParserTest
java
spring-projects__spring-framework
spring-tx/src/main/java/org/springframework/transaction/annotation/ProxyTransactionManagementConfiguration.java
{ "start": 1336, "end": 1767 }
class ____ registers the Spring infrastructure beans * necessary to enable proxy-based annotation-driven transaction management. * * @author Chris Beams * @author Sebastien Deleuze * @since 3.1 * @see EnableTransactionManagement * @see TransactionManagementConfigurationSelector */ @Configuration(proxyBeanMethods = false) @Role(BeanDefinition.ROLE_INFRASTRUCTURE) @ImportRuntimeHints(TransactionRuntimeHints.class) public
that
java
apache__hadoop
hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/TestMojo.java
{ "start": 1450, "end": 3711 }
class ____ extends AbstractMojo { /** * A value for -Dtest= that runs all native tests. */ private final static String ALL_NATIVE = "allNative"; /** * Location of the binary to run. */ @Parameter(required=true) private File binary; /** * Name of this test. * * Defaults to the basename of the binary. So if your binary is /foo/bar/baz, * this will default to 'baz.' */ @Parameter private String testName; /** * Environment variables to pass to the binary. */ @Parameter private Map<String, String> env; /** * Arguments to pass to the binary. */ @Parameter private List<String> args = new LinkedList<String>(); /** * Number of seconds to wait before declaring the test failed. * */ @Parameter(defaultValue="600") private int timeout; /** * The working directory to use. */ @Parameter private File workingDirectory; /** * Path to results directory. */ @Parameter(defaultValue="native-results") private File results; /** * A list of preconditions which must be true for this test to be run. */ @Parameter private Map<String, String> preconditions = new HashMap<String, String>(); /** * If true, pass over the test without an error if the binary is missing. */ @Parameter(defaultValue="false") private boolean skipIfMissing; /** * What result to expect from the test * * Can be either "success", "failure", or "any". */ @Parameter(defaultValue="success") private String expectedResult; /** * The Maven Session Object. */ @Parameter(defaultValue="${session}", readonly=true, required=true) private MavenSession session; // TODO: support Windows private static void validatePlatform() throws MojoExecutionException { if (System.getProperty("os.name").toLowerCase(Locale.ENGLISH) .startsWith("windows")) { throw new MojoExecutionException("CMakeBuilder does not yet support " + "the Windows platform."); } } /** * The test thread waits for the process to terminate. * * Since Process#waitFor doesn't take a timeout argument, we simulate one by * interrupting this thread after a certain amount of time has elapsed. */ private static
TestMojo
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/cascade/circle/Tour.java
{ "start": 199, "end": 1130 }
class ____ { // @Id // @SequenceGenerator(name="TOUR_SEQ", sequenceName="TOUR_SEQ", initialValue=1, allocationSize=1) // @GeneratedValue(strategy=GenerationType.SEQUENCE, generator="TOUR_SEQ") private Long tourID; private long version; private String name; /** A List of nodes contained in this tour. */ // @OneToMany(targetEntity=Node.class, fetch=FetchType.LAZY, cascade={CascadeType.MERGE, CascadeType.REFRESH}, mappedBy="tour") private Set nodes = new HashSet(0); public String getName() { return name; } protected void setTourID(Long tourID) { this.tourID = tourID; } public long getVersion() { return version; } protected void setVersion(long version) { this.version = version; } public void setName(String name) { this.name = name; } public Set getNodes() { return nodes; } public void setNodes(Set nodes) { this.nodes = nodes; } public Long getTourID() { return tourID; } }
Tour
java
elastic__elasticsearch
build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/skip/SkipTests.java
{ "start": 836, "end": 3879 }
class ____ extends TransformTests { @Test public void testAddGlobalSetup() throws Exception { String test_original = "/rest/transform/skip/without_setup_original.yml"; List<ObjectNode> tests = getTests(test_original); String test_transformed = "/rest/transform/skip/without_setup_transformed.yml"; List<ObjectNode> expectedTransformation = getTests(test_transformed); List<ObjectNode> transformedTests = transformTests(tests, Collections.singletonList(new Skip("my reason"))); AssertObjectNodes.areEqual(transformedTests, expectedTransformation); } @Test public void testModifyGlobalSetupWithSkip() throws Exception { String test_original = "/rest/transform/skip/without_setup_original.yml"; List<ObjectNode> tests = getTests(test_original); String test_transformed = "/rest/transform/skip/without_setup_transformed.yml"; List<ObjectNode> expectedTransformation = getTests(test_transformed); List<ObjectNode> transformedTests = transformTests(tests, Collections.singletonList(new Skip("my reason"))); AssertObjectNodes.areEqual(transformedTests, expectedTransformation); } @Test public void testModifyGlobalSetupWithoutSkip() throws Exception { String test_original = "/rest/transform/skip/with_setup_no_skip_original.yml"; List<ObjectNode> tests = getTests(test_original); String test_transformed = "/rest/transform/skip/with_setup_no_skip_transformed.yml"; List<ObjectNode> expectedTransformation = getTests(test_transformed); List<ObjectNode> transformedTests = transformTests(tests, Collections.singletonList(new Skip("my reason"))); AssertObjectNodes.areEqual(transformedTests, expectedTransformation); } @Test public void testModifyGlobalSetupWithFeatures() throws Exception { String test_original = "/rest/transform/skip/with_features_original.yml"; List<ObjectNode> tests = getTests(test_original); String test_transformed = "/rest/transform/skip/with_features_transformed.yml"; List<ObjectNode> expectedTransformation = getTests(test_transformed); List<ObjectNode> transformedTests = transformTests(tests, Collections.singletonList(new Skip("my reason"))); AssertObjectNodes.areEqual(transformedTests, expectedTransformation); } @Test public void testModifyPerTestSetup() throws Exception { String test_original = "/rest/transform/skip/per_test_original.yml"; List<ObjectNode> tests = getTests(test_original); String test_transformed = "/rest/transform/skip/per_test_transformed.yml"; List<ObjectNode> expectedTransformation = getTests(test_transformed); List<ObjectNode> transformedTests = transformTests( tests, List.of(new Skip("Two Test", "my reason"), new Skip("Three Test", "another reason")) ); AssertObjectNodes.areEqual(transformedTests, expectedTransformation); } }
SkipTests
java
spring-projects__spring-boot
module/spring-boot-web-server/src/testFixtures/java/org/springframework/boot/web/servlet/context/AbstractServletWebServerMvcIntegrationTests.java
{ "start": 5431, "end": 5563 }
class ____ { @RequestMapping("/hello") @ResponseBody String sayHello() { return "Hello World"; } } }
HelloWorldController
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/cascade/Soldier.java
{ "start": 483, "end": 1307 }
class ____ implements Serializable { private Integer id; private String name; private Troop troop; @Id @GeneratedValue public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } @ManyToOne(fetch = FetchType.LAZY, cascade = CascadeType.PERSIST) @JoinColumn(name = "troop_fk") public Troop getTroop() { return troop; } public void setTroop(Troop troop) { this.troop = troop; } public boolean equals(Object o) { if ( this == o ) return true; if ( !( o instanceof Soldier ) ) return false; final Soldier soldier = (Soldier) o; if ( !name.equals( soldier.name ) ) return false; return true; } public int hashCode() { return name.hashCode(); } }
Soldier
java
apache__flink
flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/hybrid/tiered/netty/TestingNettyConnectionReader.java
{ "start": 1139, "end": 2059 }
class ____ implements NettyConnectionReader { private final Function<Integer, Buffer> readBufferFunction; private final Supplier<Integer> peekNextBufferSubpartitionIdSupplier; private TestingNettyConnectionReader( Function<Integer, Buffer> readBufferFunction, Supplier<Integer> peekNextBufferSubpartitionIdSupplier) { this.readBufferFunction = readBufferFunction; this.peekNextBufferSubpartitionIdSupplier = peekNextBufferSubpartitionIdSupplier; } @Override public int peekNextBufferSubpartitionId() throws IOException { return peekNextBufferSubpartitionIdSupplier.get(); } @Override public Optional<Buffer> readBuffer(int subpartitionId, int segmentId) { return Optional.of(readBufferFunction.apply(segmentId)); } /** Builder for {@link TestingNettyConnectionReader}. */ public static
TestingNettyConnectionReader
java
mapstruct__mapstruct
processor/src/test/java/org/mapstruct/ap/test/nullvaluemapping/source/Car.java
{ "start": 211, "end": 744 }
class ____ { private String make; private int numberOfSeats; public Car() { } public Car(String make, int numberOfSeats) { this.make = make; this.numberOfSeats = numberOfSeats; } public String getMake() { return make; } public void setMake(String make) { this.make = make; } public int getNumberOfSeats() { return numberOfSeats; } public void setNumberOfSeats(int numberOfSeats) { this.numberOfSeats = numberOfSeats; } }
Car
java
elastic__elasticsearch
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashFromFieldAndLiteralAndLiteralEvaluator.java
{ "start": 1128, "end": 3520 }
class ____ implements EvalOperator.ExpressionEvaluator { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StGeohashFromFieldAndLiteralAndLiteralEvaluator.class); private final Source source; private final EvalOperator.ExpressionEvaluator in; private final StGeohash.GeoHashBoundedGrid bounds; private final DriverContext driverContext; private Warnings warnings; public StGeohashFromFieldAndLiteralAndLiteralEvaluator(Source source, EvalOperator.ExpressionEvaluator in, StGeohash.GeoHashBoundedGrid bounds, DriverContext driverContext) { this.source = source; this.in = in; this.bounds = bounds; this.driverContext = driverContext; } @Override public Block eval(Page page) { try (BytesRefBlock inBlock = (BytesRefBlock) in.eval(page)) { return eval(page.getPositionCount(), inBlock); } } @Override public long baseRamBytesUsed() { long baseRamBytesUsed = BASE_RAM_BYTES_USED; baseRamBytesUsed += in.baseRamBytesUsed(); return baseRamBytesUsed; } public LongBlock eval(int positionCount, BytesRefBlock inBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { boolean allBlocksAreNulls = true; if (!inBlock.isNull(p)) { allBlocksAreNulls = false; } if (allBlocksAreNulls) { result.appendNull(); continue position; } try { StGeohash.fromFieldAndLiteralAndLiteral(result, p, inBlock, this.bounds); } catch (IllegalArgumentException e) { warnings().registerException(e); result.appendNull(); } } return result.build(); } } @Override public String toString() { return "StGeohashFromFieldAndLiteralAndLiteralEvaluator[" + "in=" + in + "]"; } @Override public void close() { Releasables.closeExpectNoException(in); } private Warnings warnings() { if (warnings == null) { this.warnings = Warnings.createWarnings( driverContext.warningsMode(), source.source().getLineNumber(), source.source().getColumnNumber(), source.text() ); } return warnings; } static
StGeohashFromFieldAndLiteralAndLiteralEvaluator
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/reservation/planning/AlignedPlannerWithGreedy.java
{ "start": 1432, "end": 4469 }
class ____ implements ReservationAgent { // Default smoothness factor public static final int DEFAULT_SMOOTHNESS_FACTOR = 10; public static final String SMOOTHNESS_FACTOR = "yarn.resourcemanager.reservation-system.smoothness-factor"; private boolean allocateLeft = false; // Log private static final Logger LOG = LoggerFactory .getLogger(AlignedPlannerWithGreedy.class); // Smoothness factor private ReservationAgent planner; // Constructor public AlignedPlannerWithGreedy() { } @Override public void init(Configuration conf) { int smoothnessFactor = conf.getInt(SMOOTHNESS_FACTOR, DEFAULT_SMOOTHNESS_FACTOR); allocateLeft = conf.getBoolean(FAVOR_EARLY_ALLOCATION, DEFAULT_GREEDY_FAVOR_EARLY_ALLOCATION); // List of algorithms List<ReservationAgent> listAlg = new LinkedList<ReservationAgent>(); // LowCostAligned planning algorithm ReservationAgent algAligned = new IterativePlanner(new StageExecutionIntervalByDemand(), new StageAllocatorLowCostAligned(smoothnessFactor, allocateLeft), allocateLeft); listAlg.add(algAligned); // Greedy planning algorithm ReservationAgent algGreedy = new IterativePlanner(new StageExecutionIntervalUnconstrained(), new StageAllocatorGreedyRLE(allocateLeft), allocateLeft); listAlg.add(algGreedy); // Set planner: // 1. Attempt to execute algAligned // 2. If failed, fall back to algGreedy planner = new TryManyReservationAgents(listAlg); } @Override public boolean createReservation(ReservationId reservationId, String user, Plan plan, ReservationDefinition contract) throws PlanningException { LOG.info("placing the following ReservationRequest: " + contract); try { boolean res = planner.createReservation(reservationId, user, plan, contract); if (res) { LOG.info("OUTCOME: SUCCESS, Reservation ID: " + reservationId.toString() + ", Contract: " + contract.toString()); } else { LOG.info("OUTCOME: FAILURE, Reservation ID: " + reservationId.toString() + ", Contract: " + contract.toString()); } return res; } catch (PlanningException e) { LOG.info("OUTCOME: FAILURE, Reservation ID: " + reservationId.toString() + ", Contract: " + contract.toString()); throw e; } } @Override public boolean updateReservation(ReservationId reservationId, String user, Plan plan, ReservationDefinition contract) throws PlanningException { LOG.info("updating the following ReservationRequest: " + contract); return planner.updateReservation(reservationId, user, plan, contract); } @Override public boolean deleteReservation(ReservationId reservationId, String user, Plan plan) throws PlanningException { LOG.info("removing the following ReservationId: " + reservationId); return planner.deleteReservation(reservationId, user, plan); } }
AlignedPlannerWithGreedy
java
apache__flink
flink-metrics/flink-metrics-graphite/src/main/java/org/apache/flink/metrics/graphite/GraphiteReporter.java
{ "start": 1500, "end": 3503 }
enum ____ { TCP, UDP } @Override public ScheduledReporter getReporter(MetricConfig config) { String host = config.getString(ARG_HOST, null); int port = config.getInteger(ARG_PORT, -1); if (host == null || host.length() == 0 || port < 1) { throw new IllegalArgumentException( "Invalid host/port configuration. Host: " + host + " Port: " + port); } String prefix = config.getString(ARG_PREFIX, null); String conversionRate = config.getString(ARG_CONVERSION_RATE, null); String conversionDuration = config.getString(ARG_CONVERSION_DURATION, null); String protocol = config.getString(ARG_PROTOCOL, "TCP"); com.codahale.metrics.graphite.GraphiteReporter.Builder builder = com.codahale.metrics.graphite.GraphiteReporter.forRegistry(registry); if (prefix != null) { builder.prefixedWith(prefix); } if (conversionRate != null) { builder.convertRatesTo(TimeUnit.valueOf(conversionRate)); } if (conversionDuration != null) { builder.convertDurationsTo(TimeUnit.valueOf(conversionDuration)); } Protocol prot; try { prot = Protocol.valueOf(protocol); } catch (IllegalArgumentException iae) { log.warn( "Invalid protocol configuration: " + protocol + " Expected: TCP or UDP, defaulting to TCP."); prot = Protocol.TCP; } log.info( "Configured GraphiteReporter with {host:{}, port:{}, protocol:{}}", host, port, prot); switch (prot) { case UDP: return builder.build(new GraphiteUDP(host, port)); case TCP: default: return builder.build(new Graphite(host, port)); } } }
Protocol
java
FasterXML__jackson-databind
src/test/java/tools/jackson/databind/ser/AnyGetterOrdering4388Test.java
{ "start": 2547, "end": 2779 }
class ____ { @JsonPropertyOrder(alphabetic = true) @JsonAnyGetter public Map<String, Object> map = new LinkedHashMap<>(); } @JsonPropertyOrder(alphabetic = true) static
AlphabeticOrderOnAnyGetterBean
java
assertj__assertj-core
assertj-core/src/main/java/org/assertj/core/api/AbstractLongAssert.java
{ "start": 944, "end": 1515 }
class ____ all implementations of assertions for {@link Long}s. * * @param <SELF> the "self" type of this assertion class. Please read &quot;<a href="http://bit.ly/1IZIRcY" * target="_blank">Emulating 'self types' using Java Generics to simplify fluent API implementation</a>&quot; * for more details. * * @author Drummond Dawson * @author Yvonne Wang * @author David DIDIER * @author Ansgar Konermann * @author Alex Ruiz * @author Joel Costigliola * @author Mikhail Mazursky * @author Nicolas François * @author Cal027 */ public abstract
for
java
apache__camel
components/camel-csv/src/main/java/org/apache/camel/dataformat/csv/CsvUnmarshaller.java
{ "start": 5175, "end": 5280 }
class ____ the content of the CSV */ @SuppressWarnings("unchecked") private static final
streams
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/query/sqm/tree/expression/SqmXmlElementExpression.java
{ "start": 1173, "end": 4124 }
class ____ extends SelfRenderingSqmFunction<String> implements JpaXmlElementExpression { public SqmXmlElementExpression( SqmFunctionDescriptor descriptor, FunctionRenderer renderer, List<? extends SqmTypedNode<?>> arguments, @Nullable ReturnableType<String> impliedResultType, @Nullable ArgumentsValidator argumentsValidator, FunctionReturnTypeResolver returnTypeResolver, NodeBuilder nodeBuilder, String name) { super( descriptor, renderer, arguments, impliedResultType, argumentsValidator, returnTypeResolver, nodeBuilder, name ); } @Override public SqmXmlElementExpression attribute(String attributeName, Expression<?> expression) { //noinspection unchecked final List<SqmTypedNode<?>> arguments = (List<SqmTypedNode<?>>) getArguments(); if ( arguments.size() > 1 && arguments.get( 1 ) instanceof SqmXmlAttributesExpression attributesExpression ) { attributesExpression.attribute( attributeName, expression ); } else { arguments.add( 1, new SqmXmlAttributesExpression( attributeName, expression ) ); } return this; } @Override public SqmXmlElementExpression content(Expression<?>... expressions) { return content( Arrays.asList(expressions) ); } @Override public SqmXmlElementExpression content(List<? extends Expression<?>> expressions) { //noinspection unchecked final List<SqmTypedNode<?>> arguments = (List<SqmTypedNode<?>>) getArguments(); int contentIndex = 1; if ( arguments.size() > contentIndex ) { if ( arguments.get( contentIndex ) instanceof SqmXmlAttributesExpression ) { contentIndex++; } while ( contentIndex < arguments.size() ) { arguments.remove( arguments.size() - 1 ); } } for ( Expression<?> expression : expressions ) { arguments.add( (SqmTypedNode<?>) expression ); } return this; } @Override public SqmXmlElementExpression copy(SqmCopyContext context) { final SqmXmlElementExpression existing = context.getCopy( this ); if ( existing != null ) { return existing; } final List<SqmTypedNode<?>> arguments = new ArrayList<>( getArguments().size() ); for ( SqmTypedNode<?> argument : getArguments() ) { arguments.add( argument.copy( context ) ); } return context.registerCopy( this, new SqmXmlElementExpression( getFunctionDescriptor(), getFunctionRenderer(), arguments, getImpliedResultType(), getArgumentsValidator(), getReturnTypeResolver(), nodeBuilder(), getFunctionName() ) ); } @Override public void appendHqlString(StringBuilder hql, SqmRenderContext context) { final List<? extends SqmTypedNode<?>> arguments = getArguments(); hql.append( "xmlelement(name " ); arguments.get( 0 ).appendHqlString( hql, context ); for ( int i = 1; i < arguments.size(); i++ ) { hql.append( ',' ); arguments.get( i ).appendHqlString( hql, context ); } hql.append( ')' ); } }
SqmXmlElementExpression
java
spring-projects__spring-boot
module/spring-boot-jdbc/src/test/java/org/springframework/boot/jdbc/autoconfigure/HikariDataSourcePoolMetadataRuntimeHintsTests.java
{ "start": 1480, "end": 2266 }
class ____ { @Test @SuppressWarnings("rawtypes") void importsRegistrar() { Optional<Class[]> imported = MergedAnnotations.from(HikariPoolDataSourceMetadataProviderConfiguration.class) .get(ImportRuntimeHints.class) .getValue("value", Class[].class); assertThat(imported).hasValue(new Class[] { HikariDataSourcePoolMetadataRuntimeHints.class }); } @Test void registersHints() { RuntimeHints runtimeHints = new RuntimeHints(); new HikariDataSourcePoolMetadataRuntimeHints().registerHints(runtimeHints, getClass().getClassLoader()); assertThat(HikariDataSource.class).hasDeclaredFields("pool"); assertThat(RuntimeHintsPredicates.reflection().onFieldAccess(HikariDataSource.class, "pool")) .accepts(runtimeHints); } }
HikariDataSourcePoolMetadataRuntimeHintsTests
java
grpc__grpc-java
xds/src/test/java/io/grpc/xds/RingHashLoadBalancerProviderTest.java
{ "start": 1454, "end": 12839 }
class ____ { private static final String AUTHORITY = "foo.googleapis.com"; private static final String GRPC_EXPERIMENTAL_RING_HASH_SET_REQUEST_HASH_KEY = "GRPC_EXPERIMENTAL_RING_HASH_SET_REQUEST_HASH_KEY"; private final SynchronizationContext syncContext = new SynchronizationContext( new UncaughtExceptionHandler() { @Override public void uncaughtException(Thread t, Throwable e) { throw new AssertionError(e); } }); private final RingHashLoadBalancerProvider provider = new RingHashLoadBalancerProvider(); @Test public void provided() { for (LoadBalancerProvider current : InternalServiceProviders.getCandidatesViaServiceLoader( LoadBalancerProvider.class, getClass().getClassLoader())) { if (current instanceof RingHashLoadBalancerProvider) { return; } } fail("RingHashLoadBalancerProvider not registered"); } @Test public void providesLoadBalancer() { Helper helper = mock(Helper.class); when(helper.getSynchronizationContext()).thenReturn(syncContext); when(helper.getAuthority()).thenReturn(AUTHORITY); assertThat(provider.newLoadBalancer(helper)) .isInstanceOf(RingHashLoadBalancer.class); } @Test public void parseLoadBalancingConfig_valid() throws IOException { String lbConfig = "{\"minRingSize\" : 10, \"maxRingSize\" : 100}"; ConfigOrError configOrError = provider.parseLoadBalancingPolicyConfig(parseJsonObject(lbConfig)); assertThat(configOrError.getConfig()).isNotNull(); RingHashConfig config = (RingHashConfig) configOrError.getConfig(); assertThat(config.minRingSize).isEqualTo(10L); assertThat(config.maxRingSize).isEqualTo(100L); assertThat(config.requestHashHeader).isEmpty(); } @Test public void parseLoadBalancingConfig_missingRingSize_useDefaults() throws IOException { String lbConfig = "{}"; ConfigOrError configOrError = provider.parseLoadBalancingPolicyConfig(parseJsonObject(lbConfig)); assertThat(configOrError.getConfig()).isNotNull(); RingHashConfig config = (RingHashConfig) configOrError.getConfig(); assertThat(config.minRingSize).isEqualTo(RingHashLoadBalancerProvider.DEFAULT_MIN_RING_SIZE); assertThat(config.maxRingSize).isEqualTo(RingHashLoadBalancerProvider.DEFAULT_MAX_RING_SIZE); assertThat(config.requestHashHeader).isEmpty(); } @Test public void parseLoadBalancingConfig_invalid_negativeSize() throws IOException { String lbConfig = "{\"minRingSize\" : -10}"; ConfigOrError configOrError = provider.parseLoadBalancingPolicyConfig(parseJsonObject(lbConfig)); assertThat(configOrError.getError()).isNotNull(); assertThat(configOrError.getError().getCode()).isEqualTo(Code.UNAVAILABLE); assertThat(configOrError.getError().getDescription()) .isEqualTo("Invalid 'minRingSize'/'maxRingSize'"); } @Test public void parseLoadBalancingConfig_invalid_minGreaterThanMax() throws IOException { String lbConfig = "{\"minRingSize\" : 1000, \"maxRingSize\" : 100}"; ConfigOrError configOrError = provider.parseLoadBalancingPolicyConfig(parseJsonObject(lbConfig)); assertThat(configOrError.getError()).isNotNull(); assertThat(configOrError.getError().getCode()).isEqualTo(Code.UNAVAILABLE); assertThat(configOrError.getError().getDescription()) .isEqualTo("Invalid 'minRingSize'/'maxRingSize'"); } @Test public void parseLoadBalancingConfig_ringTooLargeUsesCap() throws IOException { long ringSize = RingHashOptions.MAX_RING_SIZE_CAP + 1; String lbConfig = String.format(Locale.US, "{\"minRingSize\" : 10, \"maxRingSize\" : %d}", ringSize); ConfigOrError configOrError = provider.parseLoadBalancingPolicyConfig(parseJsonObject(lbConfig)); assertThat(configOrError.getConfig()).isNotNull(); RingHashConfig config = (RingHashConfig) configOrError.getConfig(); assertThat(config.minRingSize).isEqualTo(10); assertThat(config.maxRingSize).isEqualTo(RingHashOptions.DEFAULT_RING_SIZE_CAP); assertThat(config.requestHashHeader).isEmpty(); } @Test public void parseLoadBalancingConfig_ringCapCanBeRaised() throws IOException { RingHashOptions.setRingSizeCap(RingHashOptions.MAX_RING_SIZE_CAP); long ringSize = RingHashOptions.MAX_RING_SIZE_CAP; String lbConfig = String.format( Locale.US, "{\"minRingSize\" : %d, \"maxRingSize\" : %d}", ringSize, ringSize); ConfigOrError configOrError = provider.parseLoadBalancingPolicyConfig(parseJsonObject(lbConfig)); assertThat(configOrError.getConfig()).isNotNull(); RingHashConfig config = (RingHashConfig) configOrError.getConfig(); assertThat(config.minRingSize).isEqualTo(RingHashOptions.MAX_RING_SIZE_CAP); assertThat(config.maxRingSize).isEqualTo(RingHashOptions.MAX_RING_SIZE_CAP); assertThat(config.requestHashHeader).isEmpty(); // Reset to avoid affecting subsequent test cases RingHashOptions.setRingSizeCap(RingHashOptions.DEFAULT_RING_SIZE_CAP); } @Test public void parseLoadBalancingConfig_ringCapIsClampedTo8M() throws IOException { RingHashOptions.setRingSizeCap(RingHashOptions.MAX_RING_SIZE_CAP + 1); long ringSize = RingHashOptions.MAX_RING_SIZE_CAP + 1; String lbConfig = String.format( Locale.US, "{\"minRingSize\" : %d, \"maxRingSize\" : %d}", ringSize, ringSize); ConfigOrError configOrError = provider.parseLoadBalancingPolicyConfig(parseJsonObject(lbConfig)); assertThat(configOrError.getConfig()).isNotNull(); RingHashConfig config = (RingHashConfig) configOrError.getConfig(); assertThat(config.minRingSize).isEqualTo(RingHashOptions.MAX_RING_SIZE_CAP); assertThat(config.maxRingSize).isEqualTo(RingHashOptions.MAX_RING_SIZE_CAP); assertThat(config.requestHashHeader).isEmpty(); // Reset to avoid affecting subsequent test cases RingHashOptions.setRingSizeCap(RingHashOptions.DEFAULT_RING_SIZE_CAP); } @Test public void parseLoadBalancingConfig_ringCapCanBeLowered() throws IOException { RingHashOptions.setRingSizeCap(1); long ringSize = 2; String lbConfig = String.format( Locale.US, "{\"minRingSize\" : %d, \"maxRingSize\" : %d}", ringSize, ringSize); ConfigOrError configOrError = provider.parseLoadBalancingPolicyConfig(parseJsonObject(lbConfig)); assertThat(configOrError.getConfig()).isNotNull(); RingHashConfig config = (RingHashConfig) configOrError.getConfig(); assertThat(config.minRingSize).isEqualTo(1); assertThat(config.maxRingSize).isEqualTo(1); assertThat(config.requestHashHeader).isEmpty(); // Reset to avoid affecting subsequent test cases RingHashOptions.setRingSizeCap(RingHashOptions.DEFAULT_RING_SIZE_CAP); } @Test public void parseLoadBalancingConfig_ringCapLowerLimitIs1() throws IOException { RingHashOptions.setRingSizeCap(0); long ringSize = 2; String lbConfig = String.format( Locale.US, "{\"minRingSize\" : %d, \"maxRingSize\" : %d}", ringSize, ringSize); ConfigOrError configOrError = provider.parseLoadBalancingPolicyConfig(parseJsonObject(lbConfig)); assertThat(configOrError.getConfig()).isNotNull(); RingHashConfig config = (RingHashConfig) configOrError.getConfig(); assertThat(config.minRingSize).isEqualTo(1); assertThat(config.maxRingSize).isEqualTo(1); assertThat(config.requestHashHeader).isEmpty(); // Reset to avoid affecting subsequent test cases RingHashOptions.setRingSizeCap(RingHashOptions.DEFAULT_RING_SIZE_CAP); } @Test public void parseLoadBalancingConfig_zeroMinRingSize() throws IOException { String lbConfig = "{\"minRingSize\" : 0, \"maxRingSize\" : 100}"; ConfigOrError configOrError = provider.parseLoadBalancingPolicyConfig(parseJsonObject(lbConfig)); assertThat(configOrError.getError()).isNotNull(); assertThat(configOrError.getError().getCode()).isEqualTo(Code.UNAVAILABLE); assertThat(configOrError.getError().getDescription()) .isEqualTo("Invalid 'minRingSize'/'maxRingSize'"); } @Test public void parseLoadBalancingConfig_minRingSizeGreaterThanMaxRingSize() throws IOException { String lbConfig = "{\"minRingSize\" : 100, \"maxRingSize\" : 10}"; ConfigOrError configOrError = provider.parseLoadBalancingPolicyConfig(parseJsonObject(lbConfig)); assertThat(configOrError.getError()).isNotNull(); assertThat(configOrError.getError().getCode()).isEqualTo(Code.UNAVAILABLE); assertThat(configOrError.getError().getDescription()) .isEqualTo("Invalid 'minRingSize'/'maxRingSize'"); } @Test public void parseLoadBalancingConfig_requestHashHeaderIgnoredWhenEnvVarNotSet() throws IOException { String lbConfig = "{\"minRingSize\" : 10, \"maxRingSize\" : 100, \"requestHashHeader\" : \"dummy-hash\"}"; ConfigOrError configOrError = provider.parseLoadBalancingPolicyConfig(parseJsonObject(lbConfig)); assertThat(configOrError.getConfig()).isNotNull(); RingHashConfig config = (RingHashConfig) configOrError.getConfig(); assertThat(config.minRingSize).isEqualTo(10L); assertThat(config.maxRingSize).isEqualTo(100L); assertThat(config.requestHashHeader).isEmpty(); } @Test public void parseLoadBalancingConfig_requestHashHeaderSetWhenEnvVarSet() throws IOException { System.setProperty(GRPC_EXPERIMENTAL_RING_HASH_SET_REQUEST_HASH_KEY, "true"); try { String lbConfig = "{\"minRingSize\" : 10, \"maxRingSize\" : 100, \"requestHashHeader\" : \"dummy-hash\"}"; ConfigOrError configOrError = provider.parseLoadBalancingPolicyConfig(parseJsonObject(lbConfig)); assertThat(configOrError.getConfig()).isNotNull(); RingHashConfig config = (RingHashConfig) configOrError.getConfig(); assertThat(config.minRingSize).isEqualTo(10L); assertThat(config.maxRingSize).isEqualTo(100L); assertThat(config.requestHashHeader).isEqualTo("dummy-hash"); assertThat(config.toString()).contains("minRingSize=10"); assertThat(config.toString()).contains("maxRingSize=100"); assertThat(config.toString()).contains("requestHashHeader=dummy-hash"); } finally { System.clearProperty(GRPC_EXPERIMENTAL_RING_HASH_SET_REQUEST_HASH_KEY); } } @Test public void parseLoadBalancingConfig_requestHashHeaderUnsetWhenEnvVarSet_useDefaults() throws IOException { System.setProperty(GRPC_EXPERIMENTAL_RING_HASH_SET_REQUEST_HASH_KEY, "true"); try { String lbConfig = "{\"minRingSize\" : 10, \"maxRingSize\" : 100}"; ConfigOrError configOrError = provider.parseLoadBalancingPolicyConfig(parseJsonObject(lbConfig)); assertThat(configOrError.getConfig()).isNotNull(); RingHashConfig config = (RingHashConfig) configOrError.getConfig(); assertThat(config.minRingSize).isEqualTo(10L); assertThat(config.maxRingSize).isEqualTo(100L); assertThat(config.requestHashHeader).isEmpty(); } finally { System.clearProperty(GRPC_EXPERIMENTAL_RING_HASH_SET_REQUEST_HASH_KEY); } } @SuppressWarnings("unchecked") private static Map<String, ?> parseJsonObject(String json) throws IOException { return (Map<String, ?>) JsonParser.parse(json); } }
RingHashLoadBalancerProviderTest
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/operators/util/BloomFilter.java
{ "start": 2549, "end": 9203 }
class ____ { @SuppressWarnings("restriction") private static final int BYTE_ARRAY_BASE_OFFSET = UNSAFE.arrayBaseOffset(byte[].class); protected BitSet bitSet; protected int numHashFunctions; public BloomFilter(int expectedEntries, int byteSize) { checkArgument(expectedEntries > 0, "expectedEntries should be > 0"); this.numHashFunctions = optimalNumOfHashFunctions(expectedEntries, byteSize << 3); this.bitSet = new BitSet(byteSize); } /** A constructor to support rebuilding the BloomFilter from a serialized representation. */ private BloomFilter(BitSet bitSet, int numHashFunctions) { checkNotNull(bitSet, "bitSet must be not null"); checkArgument(numHashFunctions > 0, "numHashFunctions should be > 0"); this.bitSet = bitSet; this.numHashFunctions = numHashFunctions; } public void setBitsLocation(MemorySegment memorySegment, int offset) { this.bitSet.setMemorySegment(memorySegment, offset); } /** * Compute optimal bits number with given input entries and expected false positive probability. * * @param inputEntries * @param fpp * @return optimal bits number */ public static int optimalNumOfBits(long inputEntries, double fpp) { int numBits = (int) (-inputEntries * Math.log(fpp) / (Math.log(2) * Math.log(2))); return numBits; } /** * Compute the false positive probability based on given input entries and bits size. Note: this * is just the math expected value, you should not expect the fpp in real case would under the * return value for certain. * * @param inputEntries * @param bitSize * @return */ public static double estimateFalsePositiveProbability(long inputEntries, int bitSize) { int numFunction = optimalNumOfHashFunctions(inputEntries, bitSize); double p = Math.pow(Math.E, -(double) numFunction * inputEntries / bitSize); double estimatedFPP = Math.pow(1 - p, numFunction); return estimatedFPP; } /** * compute the optimal hash function number with given input entries and bits size, which would * make the false positive probability lowest. * * @param expectEntries * @param bitSize * @return hash function number */ static int optimalNumOfHashFunctions(long expectEntries, long bitSize) { return Math.max(1, (int) Math.round((double) bitSize / expectEntries * Math.log(2))); } public void addHash(int hash32) { int hash1 = hash32; int hash2 = hash32 >>> 16; for (int i = 1; i <= numHashFunctions; i++) { int combinedHash = hash1 + (i * hash2); // hashcode should be positive, flip all the bits if it's negative if (combinedHash < 0) { combinedHash = ~combinedHash; } int pos = combinedHash % bitSet.bitSize(); bitSet.set(pos); } } public boolean testHash(int hash32) { int hash1 = hash32; int hash2 = hash32 >>> 16; for (int i = 1; i <= numHashFunctions; i++) { int combinedHash = hash1 + (i * hash2); // hashcode should be positive, flip all the bits if it's negative if (combinedHash < 0) { combinedHash = ~combinedHash; } int pos = combinedHash % bitSet.bitSize(); if (!bitSet.get(pos)) { return false; } } return true; } public void reset() { this.bitSet.clear(); } @Override public String toString() { StringBuilder output = new StringBuilder(); output.append("BloomFilter:\n"); output.append("\thash function number:").append(numHashFunctions).append("\n"); output.append(bitSet); return output.toString(); } /** Serializing to bytes, note that only heap memory is currently supported. */ public static byte[] toBytes(BloomFilter filter) { byte[] data = filter.bitSet.toBytes(); int byteSize = data.length; byte[] bytes = new byte[8 + byteSize]; UNSAFE.putInt(bytes, BYTE_ARRAY_BASE_OFFSET, filter.numHashFunctions); UNSAFE.putInt(bytes, BYTE_ARRAY_BASE_OFFSET + 4, byteSize); UNSAFE.copyMemory( data, BYTE_ARRAY_BASE_OFFSET, bytes, BYTE_ARRAY_BASE_OFFSET + 8, byteSize); return bytes; } /** Deserializing bytes array to BloomFilter. Currently, only heap memory is supported. */ public static BloomFilter fromBytes(byte[] bytes) { int numHashFunctions = UNSAFE.getInt(bytes, BYTE_ARRAY_BASE_OFFSET); int byteSize = UNSAFE.getInt(bytes, BYTE_ARRAY_BASE_OFFSET + 4); byte[] data = new byte[byteSize]; UNSAFE.copyMemory( bytes, BYTE_ARRAY_BASE_OFFSET + 8, data, BYTE_ARRAY_BASE_OFFSET, byteSize); BitSet bitSet = new BitSet(byteSize); bitSet.setMemorySegment(MemorySegmentFactory.wrap(data), 0); return new BloomFilter(bitSet, numHashFunctions); } public static byte[] mergeSerializedBloomFilters(byte[] bf1Bytes, byte[] bf2Bytes) { return mergeSerializedBloomFilters( bf1Bytes, 0, bf1Bytes.length, bf2Bytes, 0, bf2Bytes.length); } /** Merge the bf2 bytes to bf1. After merge completes, the contents of bf1 will be changed. */ private static byte[] mergeSerializedBloomFilters( byte[] bf1Bytes, int bf1Start, int bf1Length, byte[] bf2Bytes, int bf2Start, int bf2Length) { if (bf1Length != bf2Length) { throw new IllegalArgumentException( String.format( "bf1Length %s does not match bf2Length %s when merging", bf1Length, bf2Length)); } // Validation on hash functions if (UNSAFE.getByte(bf1Bytes, BYTE_ARRAY_BASE_OFFSET + bf1Start) != UNSAFE.getByte(bf2Bytes, BYTE_ARRAY_BASE_OFFSET + bf2Start)) { throw new IllegalArgumentException( "bf1 numHashFunctions does not match bf2 when merging"); } for (int idx = 8 + BYTE_ARRAY_BASE_OFFSET; idx < bf1Length + BYTE_ARRAY_BASE_OFFSET; idx += 1) { byte l1 = UNSAFE.getByte(bf1Bytes, bf1Start + idx); byte l2 = UNSAFE.getByte(bf2Bytes, bf2Start + idx); UNSAFE.putByte(bf1Bytes, bf1Start + idx, (byte) (l1 | l2)); } return bf1Bytes; } }
BloomFilter
java
elastic__elasticsearch
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedActionRequestTests.java
{ "start": 952, "end": 2356 }
class ____ extends AbstractXContentSerializingTestCase<Request> { private String datafeedId; @Before public void setUpDatafeedId() { datafeedId = DatafeedConfigTests.randomValidDatafeedId(); } @Override protected Request createTestInstance() { return new Request(DatafeedConfigTests.createRandomizedDatafeedConfig(randomAlphaOfLength(10), datafeedId, 3600)); } @Override protected Request mutateInstance(Request instance) { return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 } @Override protected Writeable.Reader<Request> instanceReader() { return Request::new; } @Override protected Request doParseInstance(XContentParser parser) { return Request.parseRequest(datafeedId, SearchRequest.DEFAULT_INDICES_OPTIONS, parser); } @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); return new NamedWriteableRegistry(searchModule.getNamedWriteables()); } @Override protected NamedXContentRegistry xContentRegistry() { SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); return new NamedXContentRegistry(searchModule.getNamedXContents()); } }
PutDatafeedActionRequestTests
java
google__auto
factory/src/main/java/com/google/auto/factory/processor/Parameter.java
{ "start": 1870, "end": 5758 }
class ____ { /** * The original type of the parameter, while {@code key().type()} erases the wrapped {@code * Provider}, if any. */ abstract Equivalence.Wrapper<TypeMirror> type(); boolean isPrimitive() { return type().get().getKind().isPrimitive(); } /** The name of the parameter. */ abstract String name(); abstract Key key(); /** Annotations on the parameter (not its type). */ abstract ImmutableList<Equivalence.Wrapper<AnnotationMirror>> annotationWrappers(); ImmutableList<AnnotationMirror> annotations() { return annotationWrappers().stream().map(Equivalence.Wrapper::get).collect(toImmutableList()); } abstract Optional<Equivalence.Wrapper<AnnotationMirror>> nullableWrapper(); Optional<AnnotationMirror> nullable() { return unwrapOptionalEquivalence(nullableWrapper()); } private static Parameter forVariableElement( VariableElement variable, TypeMirror type, Types types, InjectApi injectApi) { ImmutableList<AnnotationMirror> allAnnotations = Stream.of(variable.getAnnotationMirrors(), type.getAnnotationMirrors()) .flatMap(List::stream) .collect(toImmutableList()); Optional<AnnotationMirror> nullable = allAnnotations.stream().filter(Parameter::isNullable).findFirst(); Key key = Key.create(type, allAnnotations, types, injectApi); ImmutableSet<Equivalence.Wrapper<AnnotationMirror>> typeAnnotationWrappers = type.getAnnotationMirrors().stream() .map(AnnotationMirrors.equivalence()::wrap) .collect(toImmutableSet()); ImmutableList<Equivalence.Wrapper<AnnotationMirror>> parameterAnnotationWrappers = variable.getAnnotationMirrors().stream() .map(AnnotationMirrors.equivalence()::wrap) .filter(annotation -> !typeAnnotationWrappers.contains(annotation)) .collect(toImmutableList()); return new AutoValue_Parameter( MoreTypes.equivalence().wrap(type), variable.getSimpleName().toString(), key, parameterAnnotationWrappers, wrapOptionalInEquivalence(AnnotationMirrors.equivalence(), nullable)); } private static boolean isNullable(AnnotationMirror annotation) { TypeElement annotationType = MoreElements.asType(annotation.getAnnotationType().asElement()); return annotationType.getSimpleName().contentEquals("Nullable") || annotationType .getQualifiedName() .toString() // For NullableDecl and NullableType compatibility annotations .startsWith("org.checkerframework.checker.nullness.compatqual.Nullable"); } static ImmutableSet<Parameter> forParameterList( List<? extends VariableElement> variables, List<? extends TypeMirror> variableTypes, Types types, InjectApi injectApi) { checkArgument(variables.size() == variableTypes.size()); ImmutableSet.Builder<Parameter> builder = ImmutableSet.builder(); Set<String> names = Sets.newHashSetWithExpectedSize(variables.size()); for (int i = 0; i < variables.size(); i++) { Parameter parameter = forVariableElement(variables.get(i), variableTypes.get(i), types, injectApi); checkArgument(names.add(parameter.name()), "Duplicate parameter name: %s", parameter.name()); builder.add(parameter); } ImmutableSet<Parameter> parameters = builder.build(); checkArgument(variables.size() == parameters.size()); return parameters; } static ImmutableSet<Parameter> forParameterList( List<? extends VariableElement> variables, Types types, InjectApi injectApi) { List<TypeMirror> variableTypes = Lists.newArrayListWithExpectedSize(variables.size()); for (VariableElement var : variables) { variableTypes.add(var.asType()); } return forParameterList(variables, variableTypes, types, injectApi); } }
Parameter
java
eclipse-vertx__vert.x
vertx-core/src/main/java/io/vertx/core/parsetools/RecordParser.java
{ "start": 1696, "end": 2016 }
class ____'t currently be used for protocols where the text is encoded with something other than * a 1-1 byte-char mapping. * <p> * Please see the documentation for more information. * * @author <a href="http://tfox.org">Tim Fox</a> * @author <a href="mailto:larsdtimm@gmail.com">Lars Timm</a> */ @VertxGen public
can
java
quarkusio__quarkus
extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/services/BlockingMutinyTestService.java
{ "start": 814, "end": 8523 }
class ____ extends MutinyBlockingTestServiceGrpc.BlockingTestServiceImplBase { @Override public Uni<EmptyProtos.Empty> emptyCall(EmptyProtos.Empty request) { assertThat(request).isNotNull(); assertRunOnEventLoop(); assertRunOnDuplicatedContext(); return Uni.createFrom().item(EmptyProtos.Empty.newBuilder().build()); } @Override @Blocking public Uni<EmptyProtos.Empty> emptyCallBlocking(EmptyProtos.Empty request) { assertThat(request).isNotNull(); assertRunOnWorker(); assertRunOnDuplicatedContext(); return Uni.createFrom().item(EmptyProtos.Empty.newBuilder().build()); } @Override public Uni<Messages.SimpleResponse> unaryCall(Messages.SimpleRequest request) { assertThat(request).isNotNull(); assertRunOnEventLoop(); assertRunOnDuplicatedContext(); return Uni.createFrom().item(Messages.SimpleResponse.newBuilder().build()); } @Override @Blocking public Uni<Messages.SimpleResponse> unaryCallBlocking(Messages.SimpleRequest request) { assertThat(request).isNotNull(); assertRunOnWorker(); assertRunOnDuplicatedContext(); return Uni.createFrom().item(Messages.SimpleResponse.newBuilder().build()); } @Override public Multi<Messages.StreamingOutputCallResponse> streamingOutputCall( Messages.StreamingOutputCallRequest request) { assertThat(request).isNotNull(); assertRunOnEventLoop(); Context ctxt = assertRunOnDuplicatedContext(); return Multi.createFrom().range(0, 10) .onItem().invoke(AssertHelper::assertRunOnDuplicatedContext) .onItem().invoke(x -> assertThat(ctxt).isEqualTo(Vertx.currentContext())) .map(i -> ByteString.copyFromUtf8(Integer.toString(i))) .map(s -> Messages.Payload.newBuilder().setBody(s).build()) .map(p -> Messages.StreamingOutputCallResponse.newBuilder().setPayload(p).build()); } @Override @Blocking public Multi<Messages.StreamingOutputCallResponse> streamingOutputCallBlocking( Messages.StreamingOutputCallRequest request) { assertThat(request).isNotNull(); assertRunOnWorker(); assertRunOnDuplicatedContext(); return Multi.createFrom().range(0, 10) .map(i -> ByteString.copyFromUtf8(Integer.toString(i))) .map(s -> Messages.Payload.newBuilder().setBody(s).build()) .map(p -> Messages.StreamingOutputCallResponse.newBuilder().setPayload(p).build()); } @Override public Uni<Messages.StreamingInputCallResponse> streamingInputCall( Multi<Messages.StreamingInputCallRequest> request) { assertRunOnEventLoop(); Context ctxt = assertRunOnDuplicatedContext(); return request.map(i -> i.getPayload().getBody().toStringUtf8()) .collect().asList() .map(list -> { assertRunOnEventLoop(); assertRunOnDuplicatedContext(); assertThat(ctxt).isEqualTo(Vertx.currentContext()); assertThat(list).containsExactly("a", "b", "c", "d"); return Messages.StreamingInputCallResponse.newBuilder().build(); }); } @Override @Blocking public Uni<Messages.StreamingInputCallResponse> streamingInputCallBlocking( Multi<Messages.StreamingInputCallRequest> request) { assertRunOnWorker(); Context ctxt = assertRunOnDuplicatedContext(); return request.map(i -> i.getPayload().getBody().toStringUtf8()) .collect().asList() .map(list -> { assertRunOnWorker(); assertRunOnDuplicatedContext(); assertThat(ctxt).isEqualTo(Vertx.currentContext()); assertThat(list).containsExactly("a", "b", "c", "d"); return Messages.StreamingInputCallResponse.newBuilder().build(); }); } @Override public Multi<Messages.StreamingOutputCallResponse> fullDuplexCall( Multi<Messages.StreamingOutputCallRequest> request) { AtomicInteger counter = new AtomicInteger(); assertRunOnEventLoop(); Context ctxt = assertRunOnDuplicatedContext(); return request .map(r -> r.getPayload().getBody().toStringUtf8()) .map(r -> { assertRunOnEventLoop(); assertRunOnDuplicatedContext(); assertThat(ctxt).isEqualTo(Vertx.currentContext()); return r + counter.incrementAndGet(); }) .map(r -> Messages.Payload.newBuilder().setBody(ByteString.copyFromUtf8(r)).build()) .map(r -> Messages.StreamingOutputCallResponse.newBuilder().setPayload(r).build()); } @Override @Blocking public Multi<Messages.StreamingOutputCallResponse> fullDuplexCallBlocking( Multi<Messages.StreamingOutputCallRequest> request) { AtomicInteger counter = new AtomicInteger(); assertRunOnWorker(); Context ctxt = assertRunOnDuplicatedContext(); return request .map(r -> r.getPayload().getBody().toStringUtf8()) .map(r -> { assertRunOnWorker(); assertRunOnDuplicatedContext(); assertThat(ctxt).isEqualTo(Vertx.currentContext()); return r + counter.incrementAndGet(); }) .map(r -> Messages.Payload.newBuilder().setBody(ByteString.copyFromUtf8(r)).build()) .map(r -> Messages.StreamingOutputCallResponse.newBuilder().setPayload(r).build()); } @Override public Multi<Messages.StreamingOutputCallResponse> halfDuplexCall( Multi<Messages.StreamingOutputCallRequest> request) { assertRunOnEventLoop(); Context ctxt = assertRunOnDuplicatedContext(); return request .map(r -> { assertRunOnEventLoop(); assertRunOnDuplicatedContext(); assertThat(ctxt).isEqualTo(Vertx.currentContext()); return r.getPayload().getBody().toStringUtf8(); }) .map(String::toUpperCase) .collect().asList() .onItem().transformToMulti(s -> Multi.createFrom().iterable(s)) .map(r -> Messages.Payload.newBuilder().setBody(ByteString.copyFromUtf8(r)).build()) .map(r -> Messages.StreamingOutputCallResponse.newBuilder().setPayload(r).build()); } @Override @Blocking public Multi<Messages.StreamingOutputCallResponse> halfDuplexCallBlocking( Multi<Messages.StreamingOutputCallRequest> request) { assertRunOnWorker(); Context ctxt = assertRunOnDuplicatedContext(); return request .map(r -> { assertRunOnWorker(); assertRunOnDuplicatedContext(); assertThat(ctxt).isEqualTo(Vertx.currentContext()); return r.getPayload().getBody().toStringUtf8(); }) .map(String::toUpperCase) .collect().asList() .onItem().transformToMulti(s -> Multi.createFrom().iterable(s)) .map(r -> Messages.Payload.newBuilder().setBody(ByteString.copyFromUtf8(r)).build()) .map(r -> Messages.StreamingOutputCallResponse.newBuilder().setPayload(r).build()); } }
BlockingMutinyTestService
java
apache__flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sinks/CsvTableSink.java
{ "start": 2093, "end": 6552 }
class ____ implements AppendStreamTableSink<Row> { private String path; private String fieldDelim; private int numFiles = -1; private FileSystem.WriteMode writeMode; private String[] fieldNames; private DataType[] fieldTypes; /** * A simple {@link TableSink} to emit data as CSV files. * * @param path The output path to write the Table to. * @param fieldDelim The field delimiter * @param numFiles The number of files to write to * @param writeMode The write mode to specify whether existing files are overwritten or not. * @param fieldNames The field names of the table to emit. * @param fieldTypes The field types of the table to emit. */ public CsvTableSink( String path, String fieldDelim, int numFiles, FileSystem.WriteMode writeMode, String[] fieldNames, DataType[] fieldTypes) { this.path = path; this.fieldDelim = fieldDelim; this.numFiles = numFiles; this.writeMode = writeMode; this.fieldNames = fieldNames; this.fieldTypes = fieldTypes; } /** * A simple {@link TableSink} to emit data as CSV files. * * @param path The output path to write the Table to. * @param fieldDelim The field delimiter * @param numFiles The number of files to write to * @param writeMode The write mode to specify whether existing files are overwritten or not. */ public CsvTableSink( String path, String fieldDelim, int numFiles, FileSystem.WriteMode writeMode) { this.path = path; this.fieldDelim = fieldDelim; this.numFiles = numFiles; this.writeMode = writeMode; } /** * A simple {@link TableSink} to emit data as CSV files using comma as field delimiter, with * default parallelism and write mode. * * @param path The output path to write the Table to. */ public CsvTableSink(String path) { this(path, ","); } /** * A simple {@link TableSink} to emit data as CSV files, with default parallelism and write * mode. * * @param path The output path to write the Table to. * @param fieldDelim The field delimiter */ public CsvTableSink(String path, String fieldDelim) { this(path, fieldDelim, -1, null); } @Override public DataStreamSink<?> consumeDataStream(DataStream<Row> dataStream) { SingleOutputStreamOperator<String> csvRows = dataStream.map(new CsvFormatter(fieldDelim == null ? "," : fieldDelim)); DataStreamSink<String> sink; TextOutputFormat<String> textOutputFormat = new TextOutputFormat<>(new Path(path)); if (writeMode != null) { textOutputFormat.setWriteMode(writeMode); } sink = csvRows.addSink(new OutputFormatSinkFunction<>(textOutputFormat)); if (numFiles > 0) { csvRows.setParallelism(numFiles); sink.setParallelism(numFiles); } else { // if file number is not set, use input parallelism to make it chained. csvRows.getTransformation().setParallelism(dataStream.getParallelism(), false); sink.getTransformation().setParallelism(dataStream.getParallelism(), false); } sink.name(TableConnectorUtils.generateRuntimeName(CsvTableSink.class, fieldNames)); return sink; } @Override public TableSink<Row> configure(String[] fieldNames, TypeInformation<?>[] fieldTypes) { if (this.fieldNames != null || this.fieldTypes != null) { throw new IllegalStateException( "CsvTableSink has already been configured field names and field types."); } DataType[] dataTypes = Arrays.stream(fieldTypes) .map(TypeConversions::fromLegacyInfoToDataType) .toArray(DataType[]::new); return new CsvTableSink(path, fieldDelim, numFiles, writeMode, fieldNames, dataTypes); } @Override public DataType getConsumedDataType() { return getTableSchema().toRowDataType(); } @Override public TableSchema getTableSchema() { return TableSchema.builder().fields(fieldNames, fieldTypes).build(); } /** Formats a Row into a String with fields separated by the field delimiter. */ @Internal public static
CsvTableSink
java
quarkusio__quarkus
integration-tests/maven/src/test/resources-filtered/projects/test-nested-tests-mixed-with-normal-tests/src/test/java/org/acme/HelloResourceTest.java
{ "start": 291, "end": 628 }
class ____ { @Test public void testHelloEndpoint() { given() .when() .get("/app/hello") .then() .statusCode(200) .body(is("Hello from Quarkus REST via config")); } @Nested
NestedInnerClass
java
apache__camel
core/camel-core/src/test/java/org/apache/camel/component/bean/FooService.java
{ "start": 852, "end": 907 }
interface ____ { String hello(String s); }
FooService
java
apache__maven
impl/maven-impl/src/main/java/org/apache/maven/api/services/model/ModelUrlNormalizer.java
{ "start": 1111, "end": 1481 }
interface ____ { /** * Normalizes the well-known URLs of the specified model. * * @param model The model whose URLs should be normalized, may be {@code null}. * @param request The model building request that holds further settings, must not be {@code null}. */ Model normalize(Model model, ModelBuilderRequest request); }
ModelUrlNormalizer
java
spring-projects__spring-boot
core/spring-boot/src/main/java/org/springframework/boot/context/config/StandardConfigDataLocationResolver.java
{ "start": 2082, "end": 14448 }
class ____ implements ConfigDataLocationResolver<StandardConfigDataResource>, Ordered { private static final String PREFIX = "resource:"; static final String CONFIG_NAME_PROPERTY = "spring.config.name"; static final String[] DEFAULT_CONFIG_NAMES = { "application" }; private static final Pattern URL_PREFIX = Pattern.compile("^([a-zA-Z][a-zA-Z0-9*]*?:)(.*$)"); private static final @Nullable String NO_PROFILE = null; private final Log logger; private final List<PropertySourceLoader> propertySourceLoaders; private final String[] configNames; private final LocationResourceLoader resourceLoader; /** * Create a new {@link StandardConfigDataLocationResolver} instance. * @param logFactory the factory for loggers to use * @param binder a binder backed by the initial {@link Environment} * @param resourceLoader a {@link ResourceLoader} used to load resources */ public StandardConfigDataLocationResolver(DeferredLogFactory logFactory, Binder binder, ResourceLoader resourceLoader) { this.logger = logFactory.getLog(StandardConfigDataLocationResolver.class); this.propertySourceLoaders = SpringFactoriesLoader.loadFactories(PropertySourceLoader.class, resourceLoader.getClassLoader()); this.configNames = getConfigNames(binder); this.resourceLoader = new LocationResourceLoader(resourceLoader); } private String[] getConfigNames(Binder binder) { String[] configNames = binder.bind(CONFIG_NAME_PROPERTY, String[].class).orElse(DEFAULT_CONFIG_NAMES); for (String configName : configNames) { validateConfigName(configName); } return configNames; } private void validateConfigName(String name) { Assert.state(!name.contains("*"), () -> "Config name '" + name + "' cannot contain '*'"); } @Override public int getOrder() { return Ordered.LOWEST_PRECEDENCE; } @Override public boolean isResolvable(ConfigDataLocationResolverContext context, ConfigDataLocation location) { return true; } @Override public List<StandardConfigDataResource> resolve(ConfigDataLocationResolverContext context, ConfigDataLocation location) throws ConfigDataNotFoundException { return resolve(getReferences(context, location.split())); } private Set<StandardConfigDataReference> getReferences(ConfigDataLocationResolverContext context, ConfigDataLocation[] configDataLocations) { Set<StandardConfigDataReference> references = new LinkedHashSet<>(); for (ConfigDataLocation configDataLocation : configDataLocations) { references.addAll(getReferences(context, configDataLocation)); } return references; } private Set<StandardConfigDataReference> getReferences(ConfigDataLocationResolverContext context, ConfigDataLocation configDataLocation) { String resourceLocation = getResourceLocation(context, configDataLocation); try { if (isDirectory(resourceLocation)) { return getReferencesForDirectory(configDataLocation, resourceLocation, NO_PROFILE); } return getReferencesForFile(configDataLocation, resourceLocation, NO_PROFILE); } catch (RuntimeException ex) { throw new IllegalStateException("Unable to load config data from '" + configDataLocation + "'", ex); } } @Override public List<StandardConfigDataResource> resolveProfileSpecific(ConfigDataLocationResolverContext context, ConfigDataLocation location, Profiles profiles) { return resolve(getProfileSpecificReferences(context, location.split(), profiles)); } private Set<StandardConfigDataReference> getProfileSpecificReferences(ConfigDataLocationResolverContext context, ConfigDataLocation[] configDataLocations, Profiles profiles) { Set<StandardConfigDataReference> references = new LinkedHashSet<>(); for (String profile : profiles) { for (ConfigDataLocation configDataLocation : configDataLocations) { String resourceLocation = getResourceLocation(context, configDataLocation); references.addAll(getReferences(configDataLocation, resourceLocation, profile)); } } return references; } private String getResourceLocation(ConfigDataLocationResolverContext context, ConfigDataLocation configDataLocation) { String resourceLocation = configDataLocation.getNonPrefixedValue(PREFIX); boolean isFixedPath = resourceLocation.startsWith("/") || URL_PREFIX.matcher(resourceLocation).matches(); if (isFixedPath) { return resourceLocation; } ConfigDataResource parent = context.getParent(); if (parent instanceof StandardConfigDataResource resource) { String parentResourceLocation = resource.getReference().getResourceLocation(); String parentDirectory = parentResourceLocation.substring(0, parentResourceLocation.lastIndexOf("/") + 1); return parentDirectory + resourceLocation; } return resourceLocation; } private Set<StandardConfigDataReference> getReferences(ConfigDataLocation configDataLocation, String resourceLocation, String profile) { if (isDirectory(resourceLocation)) { return getReferencesForDirectory(configDataLocation, resourceLocation, profile); } return getReferencesForFile(configDataLocation, resourceLocation, profile); } private Set<StandardConfigDataReference> getReferencesForDirectory(ConfigDataLocation configDataLocation, String directory, @Nullable String profile) { Set<StandardConfigDataReference> references = new LinkedHashSet<>(); for (String name : this.configNames) { Deque<StandardConfigDataReference> referencesForName = getReferencesForConfigName(name, configDataLocation, directory, profile); references.addAll(referencesForName); } return references; } private Deque<StandardConfigDataReference> getReferencesForConfigName(String name, ConfigDataLocation configDataLocation, String directory, @Nullable String profile) { Deque<StandardConfigDataReference> references = new ArrayDeque<>(); for (PropertySourceLoader propertySourceLoader : this.propertySourceLoaders) { for (String extension : propertySourceLoader.getFileExtensions()) { StandardConfigDataReference reference = new StandardConfigDataReference(configDataLocation, directory, directory + name, profile, extension, propertySourceLoader); if (!references.contains(reference)) { references.addFirst(reference); } } } return references; } private Set<StandardConfigDataReference> getReferencesForFile(ConfigDataLocation configDataLocation, String file, @Nullable String profile) { FileExtensionHint fileExtensionHint = FileExtensionHint.from(file); if (fileExtensionHint.isPresent()) { file = FileExtensionHint.removeFrom(file) + fileExtensionHint; } for (PropertySourceLoader propertySourceLoader : this.propertySourceLoaders) { String fileExtension = getLoadableFileExtension(propertySourceLoader, file); if (fileExtension != null) { String root = file.substring(0, file.length() - fileExtension.length() - 1); StandardConfigDataReference reference = new StandardConfigDataReference(configDataLocation, null, root, profile, (!fileExtensionHint.isPresent()) ? fileExtension : null, propertySourceLoader); return Collections.singleton(reference); } } if (configDataLocation.isOptional()) { return Collections.emptySet(); } if (configDataLocation.hasPrefix(PREFIX) || configDataLocation.hasPrefix(ResourceUtils.FILE_URL_PREFIX) || configDataLocation.hasPrefix(ResourceUtils.CLASSPATH_URL_PREFIX) || configDataLocation.toString().indexOf(':') == -1) { throw new IllegalStateException("File extension is not known to any PropertySourceLoader. " + "If the location is meant to reference a directory, it must end in '/' or File.separator"); } throw new IllegalStateException( "Incorrect ConfigDataLocationResolver chosen or file extension is not known to any PropertySourceLoader. " + "If the location is meant to reference a directory, it must end in '/' or File.separator. " + "The location is being resolved using the StandardConfigDataLocationResolver, " + "check the location prefix if a different resolver is expected"); } private @Nullable String getLoadableFileExtension(PropertySourceLoader loader, String file) { for (String fileExtension : loader.getFileExtensions()) { if (StringUtils.endsWithIgnoreCase(file, fileExtension)) { return fileExtension; } } return null; } private boolean isDirectory(String resourceLocation) { return resourceLocation.endsWith("/") || resourceLocation.endsWith(File.separator); } private List<StandardConfigDataResource> resolve(Set<StandardConfigDataReference> references) { List<StandardConfigDataResource> resolved = new ArrayList<>(); for (StandardConfigDataReference reference : references) { resolved.addAll(resolve(reference)); } if (resolved.isEmpty()) { resolved.addAll(resolveEmptyDirectories(references)); } return resolved; } private Collection<StandardConfigDataResource> resolveEmptyDirectories( Set<StandardConfigDataReference> references) { Set<StandardConfigDataResource> empty = new LinkedHashSet<>(); for (StandardConfigDataReference reference : references) { if (reference.getDirectory() != null) { empty.addAll(resolveEmptyDirectories(reference)); } } return empty; } private Set<StandardConfigDataResource> resolveEmptyDirectories(StandardConfigDataReference reference) { if (!this.resourceLoader.isPattern(reference.getResourceLocation())) { return resolveNonPatternEmptyDirectories(reference); } return resolvePatternEmptyDirectories(reference); } private Set<StandardConfigDataResource> resolveNonPatternEmptyDirectories(StandardConfigDataReference reference) { String directory = reference.getDirectory(); Assert.state(directory != null, "'directory' must not be null"); Resource resource = this.resourceLoader.getResource(directory); return (resource instanceof ClassPathResource || !resource.exists()) ? Collections.emptySet() : Collections.singleton(new StandardConfigDataResource(reference, resource, true)); } private Set<StandardConfigDataResource> resolvePatternEmptyDirectories(StandardConfigDataReference reference) { String directory = reference.getDirectory(); Assert.state(directory != null, "'directory' must not be null"); Resource[] subdirectories = this.resourceLoader.getResources(directory, ResourceType.DIRECTORY); ConfigDataLocation location = reference.getConfigDataLocation(); if (!location.isOptional() && ObjectUtils.isEmpty(subdirectories)) { String message = String.format("Config data location '%s' contains no subdirectories", location); throw new ConfigDataLocationNotFoundException(location, message, null); } return Arrays.stream(subdirectories) .filter(Resource::exists) .map((resource) -> new StandardConfigDataResource(reference, resource, true)) .collect(Collectors.toCollection(LinkedHashSet::new)); } private List<StandardConfigDataResource> resolve(StandardConfigDataReference reference) { if (!this.resourceLoader.isPattern(reference.getResourceLocation())) { return resolveNonPattern(reference); } return resolvePattern(reference); } private List<StandardConfigDataResource> resolveNonPattern(StandardConfigDataReference reference) { Resource resource = this.resourceLoader.getResource(reference.getResourceLocation()); if (!resource.exists() && reference.isSkippable()) { logSkippingResource(reference); return Collections.emptyList(); } return Collections.singletonList(createConfigResourceLocation(reference, resource)); } private List<StandardConfigDataResource> resolvePattern(StandardConfigDataReference reference) { List<StandardConfigDataResource> resolved = new ArrayList<>(); for (Resource resource : this.resourceLoader.getResources(reference.getResourceLocation(), ResourceType.FILE)) { if (!resource.exists() && reference.isSkippable()) { logSkippingResource(reference); } else { resolved.add(createConfigResourceLocation(reference, resource)); } } return resolved; } private void logSkippingResource(StandardConfigDataReference reference) { this.logger.trace(LogMessage.format("Skipping missing resource %s", reference)); } private StandardConfigDataResource createConfigResourceLocation(StandardConfigDataReference reference, Resource resource) { return new StandardConfigDataResource(reference, resource); } }
StandardConfigDataLocationResolver
java
apache__flink
flink-core-api/src/main/java/org/apache/flink/api/common/typeinfo/TypeDescriptors.java
{ "start": 1055, "end": 1126 }
interface ____ create TypeInformation instances. */ @Experimental public
to
java
apache__logging-log4j2
log4j-layout-template-json-test/src/test/java/org/apache/logging/log4j/layout/template/json/util/RecyclerFactoriesTest.java
{ "start": 1511, "end": 5436 }
class ____ { @Test void test_RecyclerFactoryConverter() throws Exception { // Check if the type converter is registered. final TypeConverter<?> converter = TypeConverterRegistry.getInstance().findCompatibleConverter(RecyclerFactory.class); Assertions.assertThat(converter).isNotNull(); // Check dummy recycler factory. { final Object actualDummyRecyclerFactory = converter.convert("dummy"); Assertions.assertThat(actualDummyRecyclerFactory).isSameAs(DummyRecyclerFactory.getInstance()); } // Check thread-local recycler factory. { final Object actualThreadLocalRecyclerFactory = converter.convert("threadLocal"); Assertions.assertThat(actualThreadLocalRecyclerFactory).isSameAs(ThreadLocalRecyclerFactory.getInstance()); } // Check queueing recycler factory. { final Object actualQueueingRecyclerFactory = converter.convert("queue"); Assertions.assertThat(actualQueueingRecyclerFactory).isInstanceOf(QueueingRecyclerFactory.class); } // Check queueing recycler factory with supplier. { final Object recyclerFactory = converter.convert("queue:supplier=java.util.ArrayDeque.new"); Assertions.assertThat(recyclerFactory).isInstanceOf(QueueingRecyclerFactory.class); final QueueingRecyclerFactory queueingRecyclerFactory = (QueueingRecyclerFactory) recyclerFactory; final Recycler<Object> recycler = queueingRecyclerFactory.create(Object::new); Assertions.assertThat(recycler).isInstanceOf(QueueingRecycler.class); final QueueingRecycler<Object> queueingRecycler = (QueueingRecycler<Object>) recycler; Assertions.assertThat(queueingRecycler.getQueue()).isInstanceOf(ArrayDeque.class); } // Check queueing recycler factory with capacity. { final Object actualQueueingRecyclerFactory = converter.convert("queue:capacity=100"); Assertions.assertThat(actualQueueingRecyclerFactory).isInstanceOf(QueueingRecyclerFactory.class); } // Check queueing recycler factory with supplier and capacity. { final Object recyclerFactory = converter.convert( "queue:" + "supplier=java.util.concurrent.ArrayBlockingQueue.new," + "capacity=100"); Assertions.assertThat(recyclerFactory).isInstanceOf(QueueingRecyclerFactory.class); final QueueingRecyclerFactory queueingRecyclerFactory = (QueueingRecyclerFactory) recyclerFactory; final Recycler<Object> recycler = queueingRecyclerFactory.create(Object::new); Assertions.assertThat(recycler).isInstanceOf(QueueingRecycler.class); final QueueingRecycler<Object> queueingRecycler = (QueueingRecycler<Object>) recycler; Assertions.assertThat(queueingRecycler.getQueue()).isInstanceOf(ArrayBlockingQueue.class); final ArrayBlockingQueue<Object> queue = (ArrayBlockingQueue<Object>) queueingRecycler.getQueue(); Assertions.assertThat(queue.remainingCapacity()).isEqualTo(100); } } @Test @LoggerContextSource("recyclerFactoryCustomizedJsonTemplateLayoutLogging.xml") void test_RecyclerFactoryConverter_using_XML_config(final @Named(value = "List") ListAppender appender) throws Exception { final JsonTemplateLayout layout = (JsonTemplateLayout) appender.getLayout(); final Field field = JsonTemplateLayout.class.getDeclaredField("contextRecycler"); field.setAccessible(true); final QueueingRecycler<?> contextRecycler = (QueueingRecycler<?>) field.get(layout); final MpmcArrayQueue<?> queue = (MpmcArrayQueue<?>) contextRecycler.getQueue(); Assertions.assertThat(queue.capacity()).isEqualTo(512); } }
RecyclerFactoriesTest
java
elastic__elasticsearch
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java
{ "start": 1059, "end": 3871 }
class ____ extends AbstractConvertFunction.AbstractEvaluator { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToBooleanFromDoubleEvaluator.class); private final EvalOperator.ExpressionEvaluator d; public ToBooleanFromDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator d, DriverContext driverContext) { super(driverContext, source); this.d = d; } @Override public EvalOperator.ExpressionEvaluator next() { return d; } @Override public Block evalVector(Vector v) { DoubleVector vector = (DoubleVector) v; int positionCount = v.getPositionCount(); if (vector.isConstant()) { return driverContext.blockFactory().newConstantBooleanBlockWith(evalValue(vector, 0), positionCount); } try (BooleanBlock.Builder builder = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { for (int p = 0; p < positionCount; p++) { builder.appendBoolean(evalValue(vector, p)); } return builder.build(); } } private boolean evalValue(DoubleVector container, int index) { double value = container.getDouble(index); return ToBoolean.fromDouble(value); } @Override public Block evalBlock(Block b) { DoubleBlock block = (DoubleBlock) b; int positionCount = block.getPositionCount(); try (BooleanBlock.Builder builder = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); int end = start + valueCount; boolean positionOpened = false; boolean valuesAppended = false; for (int i = start; i < end; i++) { boolean value = evalValue(block, i); if (positionOpened == false && valueCount > 1) { builder.beginPositionEntry(); positionOpened = true; } builder.appendBoolean(value); valuesAppended = true; } if (valuesAppended == false) { builder.appendNull(); } else if (positionOpened) { builder.endPositionEntry(); } } return builder.build(); } } private boolean evalValue(DoubleBlock container, int index) { double value = container.getDouble(index); return ToBoolean.fromDouble(value); } @Override public String toString() { return "ToBooleanFromDoubleEvaluator[" + "d=" + d + "]"; } @Override public void close() { Releasables.closeExpectNoException(d); } @Override public long baseRamBytesUsed() { long baseRamBytesUsed = BASE_RAM_BYTES_USED; baseRamBytesUsed += d.baseRamBytesUsed(); return baseRamBytesUsed; } public static
ToBooleanFromDoubleEvaluator
java
elastic__elasticsearch
x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/permission/PermissionTests.java
{ "start": 1672, "end": 6075 }
class ____ extends ESTestCase { private static final RestrictedIndices EMPTY_RESTRICTED_INDICES = new RestrictedIndices(Automatons.EMPTY); private Role permission; @Before public void init() { Role.Builder builder = Role.builder(EMPTY_RESTRICTED_INDICES, "test"); builder.add(MONITOR, "test_*", "/foo.*/"); builder.add(READ, "baz_*foo", "/fool.*bar/"); builder.add(MONITOR, "/bar.*/"); builder.add(CREATE, "ingest_foo*"); permission = builder.build(); } public void testAllowedIndicesMatcherAction() throws Exception { testAllowedIndicesMatcher(permission.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name())); } public void testAllowedIndicesMatcherForMappingUpdates() throws Exception { for (String mappingUpdateActionName : List.of(TransportPutMappingAction.TYPE.name(), TransportAutoPutMappingAction.TYPE.name())) { IndexAbstraction mockIndexAbstraction = mock(IndexAbstraction.class); IsResourceAuthorizedPredicate indexPredicate = permission.indices().allowedIndicesMatcher(mappingUpdateActionName); // mapping updates are still permitted on indices and aliases when(mockIndexAbstraction.getName()).thenReturn("ingest_foo" + randomAlphaOfLength(3)); when(mockIndexAbstraction.getType()).thenReturn(IndexAbstraction.Type.CONCRETE_INDEX); assertThat(indexPredicate.test(mockIndexAbstraction), is(true)); when(mockIndexAbstraction.getType()).thenReturn(IndexAbstraction.Type.ALIAS); assertThat(indexPredicate.test(mockIndexAbstraction), is(true)); // mapping updates are NOT permitted on data streams and backing indices when(mockIndexAbstraction.getType()).thenReturn(IndexAbstraction.Type.DATA_STREAM); assertThat(indexPredicate.test(mockIndexAbstraction), is(false)); when(mockIndexAbstraction.getType()).thenReturn(IndexAbstraction.Type.CONCRETE_INDEX); when(mockIndexAbstraction.getParentDataStream()).thenReturn( DataStreamTestHelper.newInstance("ds", List.of(new Index("idx", UUIDs.randomBase64UUID(random())))) ); assertThat(indexPredicate.test(mockIndexAbstraction), is(false)); } } public void testAllowedIndicesMatcherActionCaching() throws Exception { IsResourceAuthorizedPredicate matcher1 = permission.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()); IsResourceAuthorizedPredicate matcher2 = permission.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()); assertThat(matcher1, is(matcher2)); } public void testBuildEmptyRole() { Role.Builder permission = Role.builder(EMPTY_RESTRICTED_INDICES, "some_role"); Role role = permission.build(); assertThat(role, notNullValue()); assertThat(role.cluster(), notNullValue()); assertThat(role.indices(), notNullValue()); assertThat(role.runAs(), notNullValue()); } public void testRunAs() { Role permission = Role.builder(EMPTY_RESTRICTED_INDICES, "some_role").runAs(new Privilege("name", "user1", "run*")).build(); assertThat(permission.runAs().check("user1"), is(true)); assertThat(permission.runAs().check("user"), is(false)); assertThat(permission.runAs().check("run" + randomAlphaOfLengthBetween(1, 10)), is(true)); } // "baz_*foo", "/fool.*bar/" private void testAllowedIndicesMatcher(IsResourceAuthorizedPredicate indicesMatcher) { assertThat(indicesMatcher.test(mockIndexAbstraction("foobar")), is(false)); assertThat(indicesMatcher.test(mockIndexAbstraction("fool")), is(false)); assertThat(indicesMatcher.test(mockIndexAbstraction("fool2bar")), is(true)); assertThat(indicesMatcher.test(mockIndexAbstraction("baz_foo")), is(true)); assertThat(indicesMatcher.test(mockIndexAbstraction("barbapapa")), is(false)); } private IndexAbstraction mockIndexAbstraction(String name) { IndexAbstraction mock = mock(IndexAbstraction.class); when(mock.getName()).thenReturn(name); when(mock.getType()).thenReturn( randomFrom(IndexAbstraction.Type.CONCRETE_INDEX, IndexAbstraction.Type.ALIAS, IndexAbstraction.Type.DATA_STREAM) ); return mock; } }
PermissionTests
java
spring-cloud__spring-cloud-gateway
spring-cloud-gateway-server-webflux/src/main/java/org/springframework/cloud/gateway/handler/predicate/VersionRoutePredicateFactory.java
{ "start": 1407, "end": 3856 }
class ____ extends AbstractRoutePredicateFactory<VersionRoutePredicateFactory.Config> { private static final Log log = LogFactory.getLog(VersionRoutePredicateFactory.class); private final @Nullable ApiVersionStrategy apiVersionStrategy; public VersionRoutePredicateFactory(@Nullable ApiVersionStrategy apiVersionStrategy) { super(Config.class); this.apiVersionStrategy = apiVersionStrategy; } private static void traceMatch(String prefix, @Nullable Object desired, @Nullable Object actual, boolean match) { if (log.isTraceEnabled()) { log.trace(String.format("%s \"%s\" %s against value \"%s\"", prefix, desired, match ? "matches" : "does not match", actual)); } } @Override public List<String> shortcutFieldOrder() { return List.of("version"); } @Override public Predicate<ServerWebExchange> apply(Config config) { if (apiVersionStrategy instanceof DefaultApiVersionStrategy strategy) { String version = config.version; if (version != null) { strategy .addMappedVersion((version.endsWith("+") ? version.substring(0, version.length() - 1) : version)); } } return new GatewayPredicate() { @Override public boolean test(ServerWebExchange exchange) { ServerHttpRequest request = exchange.getRequest(); if (config.parsedVersion == null) { Assert.state(apiVersionStrategy != null, "No ApiVersionStrategy to parse version with"); String version = config.version; Objects.requireNonNull(version, "version must not be null"); config.parsedVersion = apiVersionStrategy.parseVersion(version); } Comparable<?> requestVersion = (Comparable<?>) request.getAttributes() .get(HandlerMapping.API_VERSION_ATTRIBUTE); if (requestVersion == null) { traceMatch("Version", config.version, null, false); return false; } int result = compareVersions(config.parsedVersion, requestVersion); boolean match = (config.baselineVersion ? result <= 0 : result == 0); traceMatch("Version", config.version, requestVersion, match); return match; } private <V extends Comparable<V>> int compareVersions(Object v1, Object v2) { return ((V) v1).compareTo((V) v2); } @Override public Object getConfig() { return config; } @Override public String toString() { return String.format("Version: %s", config.version + (config.baselineVersion ? "+" : "")); } }; } public static
VersionRoutePredicateFactory
java
quarkusio__quarkus
integration-tests/main/src/main/java/io/quarkus/it/websocket/ServerDtoDecoder.java
{ "start": 231, "end": 732 }
class ____ implements Decoder.TextStream<Dto> { @Override public Dto decode(Reader reader) { try (JsonReader jsonReader = Json.createReader(reader)) { JsonObject jsonObject = jsonReader.readObject(); Dto result = new Dto(); result.setContent(jsonObject.getString("content")); return result; } } @Override public void init(EndpointConfig config) { } @Override public void destroy() { } }
ServerDtoDecoder
java
FasterXML__jackson-databind
src/main/java/tools/jackson/databind/ser/jackson/RawSerializer.java
{ "start": 471, "end": 1605 }
class ____<T> extends StdSerializer<T> { /** * Constructor takes in expected type of values; but since caller * typically cannot really provide actual type parameter, we will * just take wild card and coerce type. */ public RawSerializer(Class<?> cls) { super(cls); } @Override public void serialize(T value, JsonGenerator g, SerializationContext ctxt) throws JacksonException { g.writeRawValue(value.toString()); } @Override public void serializeWithType(T value, JsonGenerator g, SerializationContext ctxt, TypeSerializer typeSer) throws JacksonException { WritableTypeId typeIdDef = typeSer.writeTypePrefix(g, ctxt, typeSer.typeId(value, JsonToken.VALUE_EMBEDDED_OBJECT)); serialize(value, g, ctxt); typeSer.writeTypeSuffix(g, ctxt, typeIdDef); } @Override public void acceptJsonFormatVisitor(JsonFormatVisitorWrapper visitor, JavaType typeHint) { // type not really known, but since it is a JSON string: visitStringFormat(visitor, typeHint); } }
RawSerializer
java
google__guava
android/guava/src/com/google/common/primitives/Doubles.java
{ "start": 21944, "end": 28416 }
class ____ extends AbstractList<Double> implements RandomAccess, Serializable { final double[] array; final int start; final int end; DoubleArrayAsList(double[] array) { this(array, 0, array.length); } DoubleArrayAsList(double[] array, int start, int end) { this.array = array; this.start = start; this.end = end; } @Override public int size() { return end - start; } @Override public boolean isEmpty() { return false; } @Override public Double get(int index) { checkElementIndex(index, size()); return array[start + index]; } @Override /* * This is an override that is not directly visible to callers, so NewApi will catch calls to * Collection.spliterator() where necessary. */ @IgnoreJRERequirement public Spliterator.OfDouble spliterator() { return Spliterators.spliterator(array, start, end, 0); } @Override public boolean contains(@Nullable Object target) { // Overridden to prevent a ton of boxing return (target instanceof Double) && Doubles.indexOf(array, (Double) target, start, end) != -1; } @Override public int indexOf(@Nullable Object target) { // Overridden to prevent a ton of boxing if (target instanceof Double) { int i = Doubles.indexOf(array, (Double) target, start, end); if (i >= 0) { return i - start; } } return -1; } @Override public int lastIndexOf(@Nullable Object target) { // Overridden to prevent a ton of boxing if (target instanceof Double) { int i = Doubles.lastIndexOf(array, (Double) target, start, end); if (i >= 0) { return i - start; } } return -1; } @Override public Double set(int index, Double element) { checkElementIndex(index, size()); double oldValue = array[start + index]; // checkNotNull for GWT (do not optimize) array[start + index] = checkNotNull(element); return oldValue; } @Override public List<Double> subList(int fromIndex, int toIndex) { int size = size(); checkPositionIndexes(fromIndex, toIndex, size); if (fromIndex == toIndex) { return Collections.emptyList(); } return new DoubleArrayAsList(array, start + fromIndex, start + toIndex); } @Override public boolean equals(@Nullable Object object) { if (object == this) { return true; } if (object instanceof DoubleArrayAsList) { DoubleArrayAsList that = (DoubleArrayAsList) object; int size = size(); if (that.size() != size) { return false; } for (int i = 0; i < size; i++) { if (array[start + i] != that.array[that.start + i]) { return false; } } return true; } return super.equals(object); } @Override public int hashCode() { int result = 1; for (int i = start; i < end; i++) { result = 31 * result + Double.hashCode(array[i]); } return result; } @Override public String toString() { StringBuilder builder = new StringBuilder(size() * 12); builder.append('[').append(array[start]); for (int i = start + 1; i < end; i++) { builder.append(", ").append(array[i]); } return builder.append(']').toString(); } double[] toDoubleArray() { return Arrays.copyOfRange(array, start, end); } @GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 0; } /** * This is adapted from the regex suggested by {@link Double#valueOf(String)} for prevalidating * inputs. All valid inputs must pass this regex, but it's semantically fine if not all inputs * that pass this regex are valid -- only a performance hit is incurred, not a semantics bug. */ @GwtIncompatible // regular expressions static final java.util.regex.Pattern FLOATING_POINT_PATTERN = fpPattern(); @GwtIncompatible // regular expressions private static java.util.regex.Pattern fpPattern() { /* * We use # instead of * for possessive quantifiers. This lets us strip them out when building * the regex for RE2 (which doesn't support them) but leave them in when building it for * java.util.regex (where we want them in order to avoid catastrophic backtracking). */ String decimal = "(?:\\d+#(?:\\.\\d*#)?|\\.\\d+#)"; String completeDec = decimal + "(?:[eE][+-]?\\d+#)?[fFdD]?"; String hex = "(?:[0-9a-fA-F]+#(?:\\.[0-9a-fA-F]*#)?|\\.[0-9a-fA-F]+#)"; String completeHex = "0[xX]" + hex + "[pP][+-]?\\d+#[fFdD]?"; String fpPattern = "[+-]?(?:NaN|Infinity|" + completeDec + "|" + completeHex + ")"; fpPattern = fpPattern.replace( "#", "+" ); return java.util.regex.Pattern .compile(fpPattern); } /** * Parses the specified string as a double-precision floating point value. The ASCII character * {@code '-'} (<code>'&#92;u002D'</code>) is recognized as the minus sign. * * <p>Unlike {@link Double#parseDouble(String)}, this method returns {@code null} instead of * throwing an exception if parsing fails. Valid inputs are exactly those accepted by {@link * Double#valueOf(String)}, except that leading and trailing whitespace is not permitted. * * <p>This implementation is likely to be faster than {@code Double.parseDouble} if many failures * are expected. * * @param string the string representation of a {@code double} value * @return the floating point value represented by {@code string}, or {@code null} if {@code * string} has a length of zero or cannot be parsed as a {@code double} value * @throws NullPointerException if {@code string} is {@code null} * @since 14.0 */ @GwtIncompatible // regular expressions public static @Nullable Double tryParse(String string) { if (FLOATING_POINT_PATTERN.matcher(string).matches()) { // TODO(lowasser): could be potentially optimized, but only with // extensive testing try { return Double.parseDouble(string); } catch (NumberFormatException e) { // Double.parseDouble has changed specs several times, so fall through // gracefully } } return null; } }
DoubleArrayAsList
java
apache__camel
test-infra/camel-test-infra-pinecone/src/test/java/org/apache/camel/test/infra/pinecone/services/PineconeServiceFactory.java
{ "start": 2360, "end": 2493 }
class ____ extends PineconeLocalContainerInfraService implements PineconeService { } public static
PineconeLocalContainerService
java
spring-projects__spring-security
config/src/test/java/org/springframework/security/config/annotation/web/configurers/HeadersConfigurerTests.java
{ "start": 34557, "end": 35004 }
class ____ { @Bean SecurityFilterChain filterChain(HttpSecurity http) throws Exception { // @formatter:off http .headers((headers) -> headers .defaultsDisabled() .httpPublicKeyPinning((hpkp) -> hpkp .addSha256Pins("d6qzRu9zOECb90Uez27xWltNsj0e1Md7GkYYkVoZWmM=") .includeSubDomains(true))); return http.build(); // @formatter:on } } @Configuration @EnableWebSecurity static
HpkpConfigIncludeSubDomains
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/pc/InstanceIdentityTest.java
{ "start": 1085, "end": 1386 }
class ____ { @Test public void testEnhancedImmutableEntity(SessionFactoryScope scope) { scope.inTransaction( session -> { final ImmutableEntity entity1 = new ImmutableEntity( 1, "entity_1" ); session.persist( entity1 ); // false warning, bytecode enhancement of the test
InstanceIdentityTest
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/id/SelectGenerator.java
{ "start": 2097, "end": 2559 }
class ____ the natural separation of responsibility between the * {@linkplain OnExecutionGenerator generator} and the coordinating code, since its * role is to specify how the generated value is <em>retrieved</em>. * * @see org.hibernate.annotations.NaturalId * @see org.hibernate.id.insert.UniqueKeySelectingDelegate * * @author Gavin King * * @implNote This also implements the {@code select} generation type in {@code hbm.xml} mappings. */ public
breaks
java
quarkusio__quarkus
extensions/arc/deployment/src/test/java/io/quarkus/arc/test/profile/UnlessBuildProfileAllAnyTest.java
{ "start": 2531, "end": 2812 }
class ____ implements UnlessBuildProfileBean { @Override public String profile() { return "anyOf-test"; } } // Not active, the "build" profile is active @ApplicationScoped @UnlessBuildProfile("build") public static
AnyOfTestBean
java
spring-projects__spring-framework
spring-websocket/src/test/java/org/springframework/web/socket/sockjs/transport/session/HttpSockJsSessionTests.java
{ "start": 1929, "end": 3611 }
class ____ extends AbstractSockJsSessionTests<TestAbstractHttpSockJsSession> { protected MockHttpServletRequest servletRequest = new MockHttpServletRequest(); protected MockHttpServletResponse servletResponse = new MockHttpServletResponse(); protected ServerHttpRequest request = new ServletServerHttpRequest(this.servletRequest); protected ServerHttpResponse response = new ServletServerHttpResponse(this.servletResponse); private SockJsFrameFormat frameFormat = new DefaultSockJsFrameFormat("%s"); @Override protected TestAbstractHttpSockJsSession initSockJsSession() { return new TestAbstractHttpSockJsSession(this.sockJsConfig, this.webSocketHandler, null); } @BeforeEach @Override protected void setUp() { super.setUp(); this.servletRequest.setAsyncSupported(true); } @Test void handleInitialRequest() throws Exception { this.session.handleInitialRequest(this.request, this.response, this.frameFormat); assertThat(this.servletResponse.getContentAsString()).isEqualTo("hhh\no"); assertThat(this.servletRequest.isAsyncStarted()).isTrue(); verify(this.webSocketHandler).afterConnectionEstablished(this.session); } @Test void handleSuccessiveRequest() throws Exception { this.session.getMessageCache().add("x"); this.session.handleSuccessiveRequest(this.request, this.response, this.frameFormat); assertThat(this.servletRequest.isAsyncStarted()).isTrue(); assertThat(this.session.wasHeartbeatScheduled()).isTrue(); assertThat(this.session.wasCacheFlushed()).isTrue(); assertThat(this.servletResponse.getContentAsString()).isEqualTo("hhh\n"); verifyNoMoreInteractions(this.webSocketHandler); } static
HttpSockJsSessionTests
java
elastic__elasticsearch
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java
{ "start": 39439, "end": 39655 }
class ____ implements ChunkedInference { @Override public Iterator<Chunk> chunksAsByteReference(XContent xcontent) { return Collections.emptyIterator(); } } }
EmptyChunkedInference
java
netty__netty
common/src/main/java/io/netty/util/internal/DefaultPriorityQueue.java
{ "start": 1174, "end": 6608 }
class ____<T extends PriorityQueueNode> extends AbstractQueue<T> implements PriorityQueue<T> { private static final PriorityQueueNode[] EMPTY_ARRAY = new PriorityQueueNode[0]; private final Comparator<T> comparator; private T[] queue; private int size; @SuppressWarnings("unchecked") public DefaultPriorityQueue(Comparator<T> comparator, int initialSize) { this.comparator = ObjectUtil.checkNotNull(comparator, "comparator"); queue = (T[]) (initialSize != 0 ? new PriorityQueueNode[initialSize] : EMPTY_ARRAY); } @Override public int size() { return size; } @Override public boolean isEmpty() { return size == 0; } @Override public boolean contains(Object o) { if (!(o instanceof PriorityQueueNode)) { return false; } PriorityQueueNode node = (PriorityQueueNode) o; return contains(node, node.priorityQueueIndex(this)); } @Override public boolean containsTyped(T node) { return contains(node, node.priorityQueueIndex(this)); } @Override public void clear() { for (int i = 0; i < size; ++i) { T node = queue[i]; if (node != null) { node.priorityQueueIndex(this, INDEX_NOT_IN_QUEUE); queue[i] = null; } } size = 0; } @Override public void clearIgnoringIndexes() { size = 0; } @Override public boolean offer(T e) { if (e.priorityQueueIndex(this) != INDEX_NOT_IN_QUEUE) { throw new IllegalArgumentException("e.priorityQueueIndex(): " + e.priorityQueueIndex(this) + " (expected: " + INDEX_NOT_IN_QUEUE + ") + e: " + e); } // Check that the array capacity is enough to hold values by doubling capacity. if (size >= queue.length) { // Use a policy which allows for a 0 initial capacity. Same policy as JDK's priority queue, double when // "small", then grow by 50% when "large". queue = Arrays.copyOf(queue, queue.length + ((queue.length < 64) ? (queue.length + 2) : (queue.length >>> 1))); } bubbleUp(size++, e); return true; } @Override public T poll() { if (size == 0) { return null; } T result = queue[0]; result.priorityQueueIndex(this, INDEX_NOT_IN_QUEUE); T last = queue[--size]; queue[size] = null; if (size != 0) { // Make sure we don't add the last element back. bubbleDown(0, last); } return result; } @Override public T peek() { return (size == 0) ? null : queue[0]; } @SuppressWarnings("unchecked") @Override public boolean remove(Object o) { final T node; try { node = (T) o; } catch (ClassCastException e) { return false; } return removeTyped(node); } @Override public boolean removeTyped(T node) { int i = node.priorityQueueIndex(this); if (!contains(node, i)) { return false; } node.priorityQueueIndex(this, INDEX_NOT_IN_QUEUE); if (--size == 0 || size == i) { // If there are no node left, or this is the last node in the array just remove and return. queue[i] = null; return true; } // Move the last element where node currently lives in the array. T moved = queue[i] = queue[size]; queue[size] = null; // priorityQueueIndex will be updated below in bubbleUp or bubbleDown // Make sure the moved node still preserves the min-heap properties. if (comparator.compare(node, moved) < 0) { bubbleDown(i, moved); } else { bubbleUp(i, moved); } return true; } @Override public void priorityChanged(T node) { int i = node.priorityQueueIndex(this); if (!contains(node, i)) { return; } // Preserve the min-heap property by comparing the new priority with parents/children in the heap. if (i == 0) { bubbleDown(i, node); } else { // Get the parent to see if min-heap properties are violated. int iParent = (i - 1) >>> 1; T parent = queue[iParent]; if (comparator.compare(node, parent) < 0) { bubbleUp(i, node); } else { bubbleDown(i, node); } } } @Override public Object[] toArray() { return Arrays.copyOf(queue, size); } @SuppressWarnings("unchecked") @Override public <X> X[] toArray(X[] a) { if (a.length < size) { return (X[]) Arrays.copyOf(queue, size, a.getClass()); } System.arraycopy(queue, 0, a, 0, size); if (a.length > size) { a[size] = null; } return a; } /** * This iterator does not return elements in any particular order. */ @Override public Iterator<T> iterator() { return new PriorityQueueIterator(); } private final
DefaultPriorityQueue
java
apache__camel
components/camel-quartz/src/test/java/org/apache/camel/component/quartz/SpringQuartzConsumerTwoAppsClusteredRecoveryTest.java
{ "start": 1670, "end": 4558 }
class ____ { protected final Logger log = LoggerFactory.getLogger(getClass()); @Test public void testQuartzPersistentStoreClusteredApp() throws Exception { // boot up the database the two apps are going to share inside a clustered quartz setup AbstractXmlApplicationContext db = newAppContext("SpringQuartzConsumerClusteredAppDatabase.xml"); // now launch the first clustered app which will acquire the quartz database lock and become the master AbstractXmlApplicationContext app = newAppContext("SpringQuartzConsumerRecoveryClusteredAppOne.xml"); // now let's simulate a crash of the first app (the quartz instance 'app-one') log.warn("The first app is going to crash NOW!"); IOHelper.close(app); log.warn("Crashed..."); log.warn("Crashed..."); log.warn("Crashed..."); Thread.sleep(2000); // as well as the second one which will run in slave mode as it will not be able to acquire the same lock AbstractXmlApplicationContext app2 = newAppContext("SpringQuartzConsumerRecoveryClusteredAppTwo.xml"); app2.start(); // wait long enough until the second app takes it over... Awaitility.await().untilAsserted(() -> { CamelContext camel2 = app2.getBean("camelContext2-" + getClass().getSimpleName(), CamelContext.class); MockEndpoint mock2 = camel2.getEndpoint("mock:result", MockEndpoint.class); mock2.expectedMinimumMessageCount(2); mock2.expectedMessagesMatches(new ClusteringPredicate(false)); mock2.assertIsSatisfied(); }); // inside the logs one can then clearly see how the route of the second app ('app-two') starts consuming: // 2013-09-30 11:22:20,349 [main ] WARN erTwoAppsClusteredFailoverTest - Crashed... // 2013-09-30 11:22:20,349 [main ] WARN erTwoAppsClusteredFailoverTest - Crashed... // 2013-09-30 11:22:20,349 [main ] WARN erTwoAppsClusteredFailoverTest - Crashed... // 2013-09-30 11:22:35,340 [_ClusterManager] INFO LocalDataSourceJobStore - ClusterManager: detected 1 failed or restarted instances. // 2013-09-30 11:22:35,340 [_ClusterManager] INFO LocalDataSourceJobStore - ClusterManager: Scanning for instance "app-one"'s failed in-progress jobs. // 2013-09-30 11:22:35,369 [eduler_Worker-1] INFO triggered - Exchange[ExchangePattern: InOnly, BodyType: String, Body: clustering PONGS!] // and as the last step shutdown the second app as well as the database IOHelper.close(app2, db); } private AbstractXmlApplicationContext newAppContext(String config) { return CamelSpringTestSupport.newAppContext(config, getClass()); } private static
SpringQuartzConsumerTwoAppsClusteredRecoveryTest
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestGlobPaths.java
{ "start": 26278, "end": 27335 }
class ____ { FSTestWrapperGlobTest(boolean useFc) { if (useFc) { this.privWrap = new FileContextTestWrapper(privilegedFc); this.wrap = new FileContextTestWrapper(fc); } else { this.privWrap = new FileSystemTestWrapper(privilegedFs); this.wrap = new FileSystemTestWrapper(fs); } } abstract void run() throws Exception; final FSTestWrapper privWrap; final FSTestWrapper wrap; } /** * Run a glob test on FileSystem. */ private void testOnFileSystem(FSTestWrapperGlobTest test) throws Exception { try { fc.mkdir(new Path(USER_DIR), FsPermission.getDefault(), true); test.run(); } finally { fc.delete(new Path(USER_DIR), true); } } /** * Run a glob test on FileContext. */ private void testOnFileContext(FSTestWrapperGlobTest test) throws Exception { try { fs.mkdirs(new Path(USER_DIR)); test.run(); } finally { cleanupDFS(); } } /** * Accept all paths. */ private static
FSTestWrapperGlobTest
java
apache__commons-lang
src/test/java/org/apache/commons/lang3/concurrent/TimedSemaphoreTest.java
{ "start": 1708, "end": 1790 }
class ____ extends AbstractLangTest { /** * A test thread
TimedSemaphoreTest
java
alibaba__nacos
config/src/test/java/com/alibaba/nacos/config/server/paramcheck/ConfigListenerHttpParamExtractorTest.java
{ "start": 1347, "end": 4607 }
class ____ { ConfigListenerHttpParamExtractor configListenerHttpParamExtractor; @Mock HttpServletRequest httpServletRequest; @Test void testNormal() { String listenerConfigsString = getListenerConfigsString(); Mockito.when(httpServletRequest.getParameter(eq("Listening-Configs"))).thenReturn(listenerConfigsString); configListenerHttpParamExtractor = new ConfigListenerHttpParamExtractor(); configListenerHttpParamExtractor.extractParam(httpServletRequest); } @Test void testError() { String listenerConfigsString = getErrorListenerConfigsString(); Mockito.when(httpServletRequest.getParameter(eq("Listening-Configs"))).thenReturn(listenerConfigsString); configListenerHttpParamExtractor = new ConfigListenerHttpParamExtractor(); try { configListenerHttpParamExtractor.extractParam(httpServletRequest); assertTrue(false); } catch (Throwable throwable) { throwable.printStackTrace(); assertTrue(throwable instanceof IllegalArgumentException); } } private String getListenerConfigsString() { ConfigInfo configInfo1 = new ConfigInfo(); configInfo1.setDataId("2345678901"); configInfo1.setGroup("1234445"); configInfo1.setMd5("234567"); configInfo1.setTenant("222345"); ConfigInfo configInfo2 = new ConfigInfo(); configInfo2.setDataId("2345678902"); configInfo2.setGroup("1234445"); configInfo2.setMd5(null); configInfo2.setTenant(null); ConfigInfo configInfo3 = new ConfigInfo(); configInfo3.setDataId("2345678903"); configInfo3.setGroup("1234445"); configInfo3.setMd5("12345"); configInfo3.setTenant(null); ConfigInfo configInfo4 = new ConfigInfo(); configInfo4.setDataId("234567844"); configInfo4.setGroup("1234445"); configInfo4.setMd5("12345"); configInfo4.setTenant(null); List<ConfigInfo> configInfoList = Arrays.asList(configInfo4, configInfo3, configInfo2, configInfo1); StringBuilder sb = new StringBuilder(); for (ConfigInfo configInfo : configInfoList) { sb.append(configInfo.getDataId()).append(WORD_SEPARATOR); sb.append(configInfo.getGroup()).append(WORD_SEPARATOR); if (StringUtils.isBlank(configInfo.getTenant())) { sb.append(configInfo.getMd5()).append(LINE_SEPARATOR); } else { sb.append(configInfo.getMd5()).append(WORD_SEPARATOR); sb.append(configInfo.getTenant()).append(LINE_SEPARATOR); } } return sb.toString(); } private String getErrorListenerConfigsString() { ConfigInfo configInfo1 = new ConfigInfo(); configInfo1.setDataId("2345678901"); List<ConfigInfo> configInfoList = Arrays.asList(configInfo1); StringBuilder sb = new StringBuilder(); for (ConfigInfo configInfo : configInfoList) { sb.append(configInfo.getDataId()).append(WORD_SEPARATOR); } return sb.toString(); } }
ConfigListenerHttpParamExtractorTest
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/NoOpTimelineWriterImpl.java
{ "start": 1757, "end": 3596 }
class ____ extends AbstractService implements TimelineWriter{ private static final Logger LOG = LoggerFactory.getLogger(NoOpTimelineWriterImpl.class); public NoOpTimelineWriterImpl() { super(NoOpTimelineWriterImpl.class.getName()); LOG.info("NoOpTimelineWriter is configured. All the writes to the backend" + " are ignored"); } @Override public TimelineWriteResponse write(TimelineCollectorContext context, TimelineEntities data, UserGroupInformation callerUgi) throws IOException { LOG.debug("NoOpTimelineWriter is configured. Not storing " + "TimelineEntities."); return new TimelineWriteResponse(); } @Override public TimelineWriteResponse write(TimelineCollectorContext context, TimelineDomain domain) throws IOException { LOG.debug("NoOpTimelineWriter is configured. Not storing " + "TimelineEntities."); return new TimelineWriteResponse(); } @Override public TimelineWriteResponse aggregate(TimelineEntity data, TimelineAggregationTrack track) throws IOException { LOG.debug("NoOpTimelineWriter is configured. Not aggregating " + "TimelineEntities."); return new TimelineWriteResponse(); } @Override public void flush() throws IOException { LOG.debug("NoOpTimelineWriter is configured. Ignoring flush call"); } @Override public TimelineHealth getHealthStatus() { return new TimelineHealth(TimelineHealth.TimelineHealthStatus.RUNNING, "NoOpTimelineWriter is configured. "); } }
NoOpTimelineWriterImpl
java
spring-projects__spring-framework
spring-context/src/main/java/org/springframework/validation/beanvalidation/BeanValidationBeanRegistrationAotProcessor.java
{ "start": 2424, "end": 3059 }
class ____ implements BeanRegistrationAotProcessor { private static final boolean BEAN_VALIDATION_PRESENT = ClassUtils.isPresent( "jakarta.validation.Validation", BeanValidationBeanRegistrationAotProcessor.class.getClassLoader()); private static final Log logger = LogFactory.getLog(BeanValidationBeanRegistrationAotProcessor.class); @Override public @Nullable BeanRegistrationAotContribution processAheadOfTime(RegisteredBean registeredBean) { if (BEAN_VALIDATION_PRESENT) { return BeanValidationDelegate.processAheadOfTime(registeredBean); } return null; } /** * Inner
BeanValidationBeanRegistrationAotProcessor
java
apache__flink
flink-libraries/flink-state-processing-api/src/main/java/org/apache/flink/state/api/functions/KeyedStateBootstrapFunction.java
{ "start": 2797, "end": 3054 }
class ____ { /** A {@link TimerService} for querying time and registering timers. */ public abstract TimerService timerService(); /** Get key of the element being processed. */ public abstract K getCurrentKey(); } }
Context
java
spring-projects__spring-framework
spring-test/src/main/java/org/springframework/test/context/event/AfterTestExecutionEvent.java
{ "start": 1121, "end": 1256 }
class ____ extends TestContextEvent { public AfterTestExecutionEvent(TestContext source) { super(source); } }
AfterTestExecutionEvent
java
quarkusio__quarkus
independent-projects/bootstrap/core/src/main/java/io/quarkus/bootstrap/jbang/JBangBuilderImpl.java
{ "start": 1017, "end": 5972 }
class ____ { public static Map<String, Object> postBuild( Path appClasses, Path pomFile, List<Map.Entry<String, String>> repositories, List<Map.Entry<String, Path>> dependencies, Properties configurationProperties, boolean nativeImage) { final MavenArtifactResolver quarkusResolver; try { final BootstrapMavenContext mvnCtx = new BootstrapMavenContext(BootstrapMavenContext.config() .setCurrentProject(pomFile.getParent().toString())); final List<RemoteRepository> remoteRepos = new ArrayList<>(mvnCtx.getRemoteRepositories()); repositories.forEach(repo -> { remoteRepos.add(new RemoteRepository.Builder(repo.getKey(), "default", repo.getValue()).build()); }); quarkusResolver = MavenArtifactResolver.builder() .setRepositorySystem(mvnCtx.getRepositorySystem()) .setRepositorySystemSession(mvnCtx.getRepositorySystemSession()) .setRemoteRepositoryManager(mvnCtx.getRemoteRepositoryManager()) .setRemoteRepositories(remoteRepos) .build(); } catch (BootstrapMavenException e) { throw new IllegalStateException("Failed to initialize Quarkus bootstrap Maven resolver", e); } try { Path target = Files.createTempDirectory("quarkus-jbang"); final ResolvedArtifactDependency appArtifact = new ResolvedArtifactDependency("dev.jbang.user", "quarkus", null, "jar", "999-SNAPSHOT", appClasses); final QuarkusBootstrap.Builder builder = QuarkusBootstrap.builder() .setBaseClassLoader(JBangBuilderImpl.class.getClassLoader()) .setMavenArtifactResolver(quarkusResolver) .setProjectRoot(pomFile.getParent()) .setTargetDirectory(target) .setManagingProject(new GACTV("io.quarkus", "quarkus-bom", "", "pom", getQuarkusVersion())) .setForcedDependencies(dependencies.stream().map(s -> { String[] parts = s.getKey().split(":"); // The format of maven coordinate used in what jbang calls `canonical` form. // The form is described here: https://github.com/jbangdev/jbang/blob/main/src/main/java/dev/jbang/dependencies/MavenCoordinate.java#L118 // Despite the fact that is non standard it's still used for compatibility reasons by the IntegrationManager: // https://github.com/jbangdev/jbang/blob/main/src/main/java/dev/jbang/spi/IntegrationManager.java#L73 Dependency artifact; if (parts.length == 3) { artifact = new ArtifactDependency(parts[0], parts[1], null, ArtifactCoords.TYPE_JAR, parts[2]); } else if (parts.length == 4) { artifact = new ArtifactDependency(parts[0], parts[1], null, parts[2], parts[3]); } else if (parts.length == 5) { artifact = new ArtifactDependency(parts[0], parts[1], parts[2], parts[3], parts[4]); } else { throw new RuntimeException("Invalid artifact " + s.getKey()); } //artifact.setPath(s.getValue()); return artifact; }).collect(Collectors.toList())) .setAppArtifact(appArtifact) .setIsolateDeployment(true) .setBuildSystemProperties(configurationProperties) .setRuntimeProperties(configurationProperties) .setMode(QuarkusBootstrap.Mode.PROD); CuratedApplication app = builder .build().bootstrap(); if (nativeImage) { System.setProperty("quarkus.native.enabled", "true"); } Map<String, Object> output = new HashMap<>(); app.runInAugmentClassLoader("io.quarkus.deployment.jbang.JBangAugmentorImpl", output); return output; } catch (Exception e) { throw new RuntimeException(e); } } private static String getQuarkusVersion() { try (InputStream in = JBangBuilderImpl.class.getClassLoader().getResourceAsStream("quarkus-version.txt")) { ByteArrayOutputStream out = new ByteArrayOutputStream(); byte[] buf = new byte[10]; int r; while ((r = in.read(buf)) > 0) { out.write(buf, 0, r); } return new String(out.toByteArray(), StandardCharsets.UTF_8); } catch (IOException e) { throw new RuntimeException(e); } } }
JBangBuilderImpl
java
netty__netty
transport/src/test/java/io/netty/channel/ThreadPerChannelEventLoopGroupTest.java
{ "start": 1457, "end": 3890 }
class ____ { private static final ChannelHandler NOOP_HANDLER = new ChannelHandlerAdapter() { @Override public boolean isSharable() { return true; } }; @Test public void testTerminationFutureSuccessInLog() throws Exception { for (int i = 0; i < 2; i++) { ThreadPerChannelEventLoopGroup loopGroup = new ThreadPerChannelEventLoopGroup(64); runTest(loopGroup); } } @Test public void testTerminationFutureSuccessReflectively() throws Exception { Field terminationFutureField = ThreadPerChannelEventLoopGroup.class.getDeclaredField("terminationFuture"); terminationFutureField.setAccessible(true); final Exception[] exceptionHolder = new Exception[1]; for (int i = 0; i < 2; i++) { ThreadPerChannelEventLoopGroup loopGroup = new ThreadPerChannelEventLoopGroup(64); Promise<?> promise = new DefaultPromise<Void>(GlobalEventExecutor.INSTANCE) { @Override public Promise<Void> setSuccess(Void result) { try { return super.setSuccess(result); } catch (IllegalStateException e) { exceptionHolder[0] = e; throw e; } } }; terminationFutureField.set(loopGroup, promise); runTest(loopGroup); } // The global event executor will not terminate, but this will give the test a chance to fail. GlobalEventExecutor.INSTANCE.awaitTermination(100, TimeUnit.MILLISECONDS); assertNull(exceptionHolder[0]); } private static void runTest(ThreadPerChannelEventLoopGroup loopGroup) throws InterruptedException { int taskCount = 100; EventExecutor testExecutor = new TestEventExecutor(); ChannelGroup channelGroup = new DefaultChannelGroup(testExecutor); while (taskCount-- > 0) { Channel channel = new EmbeddedChannel(NOOP_HANDLER); loopGroup.register(new DefaultChannelPromise(channel, testExecutor)); channelGroup.add(channel); } channelGroup.close().sync(); loopGroup.shutdownGracefully(100, 200, TimeUnit.MILLISECONDS).sync(); assertTrue(loopGroup.isTerminated()); } private static
ThreadPerChannelEventLoopGroupTest
java
micronaut-projects__micronaut-core
http-client/src/main/java/io/micronaut/http/client/netty/BlockHint.java
{ "start": 1118, "end": 2888 }
class ____ public for use in micronaut-oracle-cloud. * * @param blockedThread Thread that is blocked * @param next Next node in the linked list of blocked threads * @author Jonas Konrad * @since 4.0.0 */ @Internal public record BlockHint(Thread blockedThread, @Nullable BlockHint next) { public static BlockHint willBlockThisThread() { return new BlockHint(Thread.currentThread(), null); } @Nullable public static BlockHint combine(@Nullable BlockHint a, @Nullable BlockHint b) { if (a == null) { return b; } else if (b == null) { return a; } else if (a.next == null) { return new BlockHint(a.blockedThread, b); } else if (b.next == null) { return new BlockHint(b.blockedThread, a); } else { throw new UnsupportedOperationException( "would need to build a new linked list here, but we never need this"); } } @NonNull static HttpClientException createException() { return new HttpClientException( "Failed to perform blocking request on the event loop because request execution " + "would be dispatched on the same event loop. This would lead to a deadlock. " + "Either configure the HTTP client to use a different event loop, or use the " + "reactive HTTP client. " + "https://docs.micronaut.io/latest/guide/index.html#clientConfiguration"); } boolean blocks(EventExecutor eventLoop) { BlockHint bh = this; while (bh != null) { if (eventLoop.inEventLoop(bh.blockedThread)) { return true; } bh = bh.next; } return false; } }
is
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/dialect/function/Product.java
{ "start": 239, "end": 912 }
class ____ { private Long id; private int length; private long weight; private BigDecimal price; private Date date; public Long getId() { return id; } public void setId( Long id ) { this.id = id; } public int getLength() { return length; } public void setLength( int length ) { this.length = length; } public long getWeight() { return weight; } public void setWeight( long weight ) { this.weight = weight; } public BigDecimal getPrice() { return price; } public void setPrice( BigDecimal price ) { this.price = price; } public Date getDate() { return date; } public void setDate(Date date) { this.date = date; } }
Product
java
apache__kafka
group-coordinator/src/main/java/org/apache/kafka/coordinator/group/OffsetExpirationCondition.java
{ "start": 1012, "end": 1094 }
class ____ used to check * how offsets for the group should be expired. */ public
is
java
elastic__elasticsearch
x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointTests.java
{ "start": 3195, "end": 9421 }
class ____ extends ESSingleNodeTestCase { private TransportService transportService; private ClusterService clusterService; private IndicesService indicesService; private ThreadPool threadPool; private IndexNameExpressionResolver indexNameExpressionResolver; private Client client; private Task transformTask; private final String indexNamePattern = "test_index-"; private String[] testIndices; private int numberOfNodes; private int numberOfIndices; private int numberOfShards; private TestTransportGetCheckpointAction getCheckpointAction; private TestTransportGetCheckpointNodeAction getCheckpointNodeAction; private ClusterState clusterStateWithIndex; @Override @Before public void setUp() throws Exception { super.setUp(); numberOfNodes = randomIntBetween(1, 10); numberOfIndices = randomIntBetween(1, 10); // create at least as many shards as nodes, so every node has at least 1 shard numberOfShards = randomIntBetween(numberOfNodes, numberOfNodes * 3); threadPool = new TestThreadPool("GetCheckpointActionTests"); indexNameExpressionResolver = new MockResolver(); clusterService = getInstanceFromNode(ClusterService.class); indicesService = getInstanceFromNode(IndicesService.class); MockTransport mockTransport = new MockTransport() { @Override protected void onSendRequest(long requestId, String action, TransportRequest request, DiscoveryNode node) { if (action.equals(GetCheckpointNodeAction.NAME)) { GetCheckpointNodeAction.Request getCheckpointNodeActionRequest = (GetCheckpointNodeAction.Request) request; Task task = getCheckpointNodeActionRequest.createTask(123, "type", "action", null, Map.of()); getCheckpointNodeAction.execute( task, getCheckpointNodeActionRequest, ActionListener.wrap(r -> { this.handleResponse(requestId, r); }, e -> { this.handleError(requestId, new TransportException(e.getMessage(), e)); }) ); } } }; transportService = mockTransport.createTransportService( clusterService.getSettings(), threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, boundAddress -> clusterService.localNode(), null, emptySet() ); transportService.start(); transportService.acceptIncomingRequests(); List<String> testIndicesList = new ArrayList<>(); for (int i = 0; i < numberOfIndices; ++i) { testIndicesList.add(indexNamePattern + i); } testIndices = testIndicesList.toArray(new String[0]); clusterStateWithIndex = ClusterState.builder(ClusterStateCreationUtils.state(numberOfNodes, testIndices, numberOfShards)) .putCompatibilityVersions("node01", TransportVersions.V_8_5_0, Map.of()) .build(); client = mock(Client.class); transformTask = new Task( 1L, "persistent", "action", TransformField.PERSISTENT_TASK_DESCRIPTION_PREFIX + "the_id", TaskId.EMPTY_TASK_ID, Collections.emptyMap() ); getCheckpointAction = new TestTransportGetCheckpointAction(); getCheckpointNodeAction = new TestTransportGetCheckpointNodeAction(); } @Override @After public void tearDown() throws Exception { ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); threadPool = null; super.tearDown(); } public void testEmptyCheckpoint() throws InterruptedException { GetCheckpointAction.Request request = new GetCheckpointAction.Request( Strings.EMPTY_ARRAY, IndicesOptions.LENIENT_EXPAND_OPEN, null, null, TimeValue.timeValueSeconds(5) ); assertCheckpointAction(request, response -> { assertNotNull(response.getCheckpoints()); Map<String, long[]> checkpoints = response.getCheckpoints(); assertTrue(checkpoints.isEmpty()); }); } public void testSingleIndexRequest() throws InterruptedException { GetCheckpointAction.Request request = new GetCheckpointAction.Request( new String[] { indexNamePattern + "0" }, IndicesOptions.LENIENT_EXPAND_OPEN, null, null, TimeValue.timeValueSeconds(5) ); assertCheckpointAction(request, response -> { assertNotNull(response.getCheckpoints()); Map<String, long[]> checkpoints = response.getCheckpoints(); assertEquals(1, checkpoints.size()); assertTrue(checkpoints.containsKey(indexNamePattern + "0")); for (int i = 0; i < numberOfShards; ++i) { assertEquals(42 + i, checkpoints.get(indexNamePattern + "0")[i]); } assertEquals(numberOfNodes, getCheckpointNodeAction.getCalls()); }); } public void testMultiIndexRequest() throws InterruptedException { GetCheckpointAction.Request request = new GetCheckpointAction.Request( testIndices, IndicesOptions.LENIENT_EXPAND_OPEN, null, null, TimeValue.timeValueSeconds(5) ); assertCheckpointAction(request, response -> { assertNotNull(response.getCheckpoints()); Map<String, long[]> checkpoints = response.getCheckpoints(); assertEquals(testIndices.length, checkpoints.size()); for (int i = 0; i < this.numberOfIndices; ++i) { assertTrue(checkpoints.containsKey(indexNamePattern + i)); for (int j = 0; j < numberOfShards; ++j) { assertEquals(42 + i + j, checkpoints.get(indexNamePattern + i)[j]); } } assertEquals(numberOfNodes, getCheckpointNodeAction.getCalls()); }); }
TransformGetCheckpointTests
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodeRole.java
{ "start": 907, "end": 8821 }
class ____ implements Comparable<DiscoveryNodeRole> { private final String roleName; /** * The name of the role. * * @return the role name */ public final String roleName() { return roleName; } private final String roleNameAbbreviation; /** * The abbreviation of the name of the role. This is used in the cat nodes API to display an abbreviated version of the name of the * role. * * @return the role name abbreviation */ public final String roleNameAbbreviation() { return roleNameAbbreviation; } private final boolean canContainData; /** * Indicates whether a node with this role can contain data. * * @return true if a node with this role can contain data, otherwise false */ public final boolean canContainData() { return canContainData; } private final boolean isKnownRole; /** * Whether or not the role is enabled by default given the specified settings * * @param settings the settings instance * @return true if the role is enabled by default given the specified settings, otherwise false */ public boolean isEnabledByDefault(final Settings settings) { return true; } /** * Validate this role against all configured roles. Implementors are expected to throw an {@link IllegalArgumentException} when the * combination of configured roles is invalid with this role. * * @param roles the complete set of configured roles */ public void validateRoles(final List<DiscoveryNodeRole> roles) { } protected DiscoveryNodeRole(final String roleName, final String roleNameAbbreviation) { this(roleName, roleNameAbbreviation, false); } protected DiscoveryNodeRole(final String roleName, final String roleNameAbbreviation, final boolean canContainData) { this(true, roleName, roleNameAbbreviation, canContainData); } private DiscoveryNodeRole( final boolean isKnownRole, final String roleName, final String roleNameAbbreviation, final boolean canContainData ) { this.isKnownRole = isKnownRole; this.roleName = Objects.requireNonNull(roleName); this.roleNameAbbreviation = Objects.requireNonNull(roleNameAbbreviation); this.canContainData = canContainData; } @Override public final boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DiscoveryNodeRole that = (DiscoveryNodeRole) o; return roleName.equals(that.roleName) && roleNameAbbreviation.equals(that.roleNameAbbreviation) && canContainData == that.canContainData && isKnownRole == that.isKnownRole; } @Override public final int hashCode() { return Objects.hash(isKnownRole, roleName(), roleNameAbbreviation(), canContainData()); } @Override public final int compareTo(final DiscoveryNodeRole o) { return roleName.compareTo(o.roleName); } @Override public final String toString() { return "DiscoveryNodeRole{" + "roleName='" + roleName + '\'' + ", roleNameAbbreviation='" + roleNameAbbreviation + '\'' + ", canContainData=" + canContainData + (isKnownRole ? "" : ", isKnownRole=false") + '}'; } /** * Represents the role for a data node. */ public static final DiscoveryNodeRole DATA_ROLE = new DiscoveryNodeRole("data", "d", true) { @Override public boolean isEnabledByDefault(Settings settings) { return DiscoveryNode.isStateless(settings) == false; } }; /** * Represents the role for a content node. */ public static final DiscoveryNodeRole DATA_CONTENT_NODE_ROLE = new DiscoveryNodeRole("data_content", "s", true) { @Override public boolean isEnabledByDefault(final Settings settings) { return DiscoveryNode.hasRole(settings, DiscoveryNodeRole.DATA_ROLE); } }; /** * Represents the role for a hot node. */ public static final DiscoveryNodeRole DATA_HOT_NODE_ROLE = new DiscoveryNodeRole("data_hot", "h", true) { @Override public boolean isEnabledByDefault(final Settings settings) { return DiscoveryNode.hasRole(settings, DiscoveryNodeRole.DATA_ROLE); } }; /** * Represents the role for a warm node. */ public static final DiscoveryNodeRole DATA_WARM_NODE_ROLE = new DiscoveryNodeRole("data_warm", "w", true) { @Override public boolean isEnabledByDefault(final Settings settings) { return DiscoveryNode.hasRole(settings, DiscoveryNodeRole.DATA_ROLE); } }; /** * Represents the role for a cold node. */ public static final DiscoveryNodeRole DATA_COLD_NODE_ROLE = new DiscoveryNodeRole("data_cold", "c", true) { @Override public boolean isEnabledByDefault(final Settings settings) { return DiscoveryNode.hasRole(settings, DiscoveryNodeRole.DATA_ROLE); } }; /** * Represents the role for a frozen node. */ public static final DiscoveryNodeRole DATA_FROZEN_NODE_ROLE = new DiscoveryNodeRole("data_frozen", "f", true) { @Override public boolean isEnabledByDefault(final Settings settings) { return DiscoveryNode.hasRole(settings, DiscoveryNodeRole.DATA_ROLE); } }; /** * Represents the role for an ingest node. */ public static final DiscoveryNodeRole INGEST_ROLE = new DiscoveryNodeRole("ingest", "i"); /** * Represents the role for a master-eligible node. */ public static final DiscoveryNodeRole MASTER_ROLE = new DiscoveryNodeRole("master", "m"); /** * Represents the role for a voting-only node. */ public static final DiscoveryNodeRole VOTING_ONLY_NODE_ROLE = new DiscoveryNodeRole("voting_only", "v") { @Override public boolean isEnabledByDefault(final Settings settings) { return false; } @Override public void validateRoles(final List<DiscoveryNodeRole> roles) { if (roles.contains(MASTER_ROLE) == false) { throw new IllegalArgumentException("voting-only node must be master-eligible"); } } }; /** * Represents the role for a node that can be a remote cluster client. */ public static final DiscoveryNodeRole REMOTE_CLUSTER_CLIENT_ROLE = new DiscoveryNodeRole("remote_cluster_client", "r"); /** * Represents the role for a machine learning node. */ public static final DiscoveryNodeRole ML_ROLE = new DiscoveryNodeRole("ml", "l"); /** * Represents the role for a transform node. */ public static final DiscoveryNodeRole TRANSFORM_ROLE = new DiscoveryNodeRole("transform", "t"); /** * Represents the role for an index node. */ public static final DiscoveryNodeRole INDEX_ROLE = new DiscoveryNodeRole("index", "I", true) { @Override public boolean isEnabledByDefault(Settings settings) { return DiscoveryNode.isStateless(settings); } }; /** * Represents the role for a search node. */ public static final DiscoveryNodeRole SEARCH_ROLE = new DiscoveryNodeRole("search", "S", true) { public boolean isEnabledByDefault(Settings settings) { return false; } }; /** * Represents an unknown role. This can occur if a newer version adds a role that an older version does not know about, or a newer * version removes a role that an older version knows about. */ static
DiscoveryNodeRole
java
spring-projects__spring-framework
spring-context/src/test/java/org/springframework/cache/interceptor/CacheResolverCustomizationTests.java
{ "start": 8147, "end": 8429 }
class ____ extends AbstractCacheResolver { private NullCacheResolver(CacheManager cacheManager) { super(cacheManager); } @Override protected @Nullable Collection<String> getCacheNames(CacheOperationInvocationContext<?> context) { return null; } } }
NullCacheResolver
java
quarkusio__quarkus
integration-tests/hibernate-orm-panache/src/main/java/io/quarkus/it/panache/defaultpu/JAXBEntity.java
{ "start": 569, "end": 980 }
class ____ extends PanacheEntity { @XmlAttribute(name = "Named") public String namedAnnotatedProp; @XmlTransient public String transientProp; @XmlAttribute public String defaultAnnotatedProp; @XmlElements({ @XmlElement(name = "array1"), @XmlElement(name = "array2") }) public String arrayAnnotatedProp; public String unAnnotatedProp; }
JAXBEntity
java
alibaba__nacos
core/src/test/java/com/alibaba/nacos/core/code/condition/PathRequestConditionTest.java
{ "start": 947, "end": 1233 }
class ____ { @Test void testPathRequestCondition() { PathRequestCondition pathRequestCondition = new PathRequestCondition("method" + REQUEST_PATH_SEPARATOR + "path"); assertTrue(pathRequestCondition.toString().contains("path")); } }
PathRequestConditionTest
java
elastic__elasticsearch
test/framework/src/main/java/org/elasticsearch/cluster/DiskUsageIntegTestCase.java
{ "start": 7577, "end": 9612 }
class ____ extends FilterFileSystemProvider { private final Map<Path, TestFileStore> trackedPaths = newConcurrentMap(); private final Path rootDir; TestFileSystemProvider(FileSystem delegateInstance, Path rootDir) { super("diskthreshold://", delegateInstance); this.rootDir = new FilterPath(rootDir, fileSystem); } Path getRootDir() { return rootDir; } void addTrackedPath(Path path) { assertTrue(path + " starts with " + rootDir, path.startsWith(rootDir)); final FileStore fileStore; try { fileStore = super.getFileStore(path); } catch (IOException e) { throw new AssertionError("unexpected", e); } assertNull(trackedPaths.put(path, new TestFileStore(fileStore, getScheme(), path))); } @Override public FileStore getFileStore(Path path) { return getTestFileStore(path); } TestFileStore getTestFileStore(Path path) { final TestFileStore fileStore = trackedPaths.entrySet() .stream() .filter(e -> path.startsWith(e.getKey())) .map(Map.Entry::getValue) .findAny() .orElse(null); if (fileStore != null) { return fileStore; } // We check the total size available for translog in InternalEngine constructor and we allow that here, // expecting to match a unique root path. assertTrue(path + " not tracked and not translog", path.getFileName().toString().equals("translog")); final Set<Path> containingPaths = trackedPaths.keySet().stream().filter(path::startsWith).collect(Collectors.toSet()); assertThat(path + " not contained in a unique tracked path", containingPaths, hasSize(1)); return trackedPaths.get(containingPaths.iterator().next()); } } }
TestFileSystemProvider
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/event/Event.java
{ "start": 1037, "end": 1145 }
interface ____<TYPE extends Enum<TYPE>> { TYPE getType(); long getTimestamp(); String toString(); }
Event
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/bag/EagerBagsTest.java
{ "start": 971, "end": 2571 }
class ____ { @Test public void testIt(SessionFactoryScope scope) { scope.inTransaction( session -> { EntityB b = new EntityB( 1l, "b" ); EntityC c = new EntityC( 1l, "c" ); EntityC c1 = new EntityC( 2l, "c1" ); b.addAttribute( c ); b.addAttribute( c1 ); EntityB b1 = new EntityB( 2l, "b1" ); EntityC c2 = new EntityC( 3l, "c2" ); EntityC c3 = new EntityC( 4l, "c3" ); EntityC c4 = new EntityC( 5l, "c4" ); b1.addAttribute( c2 ); b1.addAttribute( c3 ); b1.addAttribute( c4 ); EntityA a = new EntityA( 1l, "a" ); a.addAttribute( b ); a.addAttribute( b1 ); session.persist( c ); session.persist( c1 ); session.persist( c2 ); session.persist( c3 ); session.persist( c4 ); session.persist( b ); session.persist( b1 ); session.persist( a ); } ); scope.inTransaction( session -> { EntityA entityA = session.find( EntityA.class, 1l ); Collection<EntityB> attributes = entityA.attributes; assertThat( attributes.size() ).isEqualTo( 2 ); boolean findB = false; boolean findB1 = false; for ( EntityB entityB : attributes ) { Collection<EntityC> entityCS = entityB.attributes; if ( entityB.getName().equals( "b" ) ) { assertThat( entityCS.size() ).isEqualTo( 2 ); findB = true; } else { assertThat( entityCS.size() ).isEqualTo( 3 ); findB1 = true; } } assertTrue( findB ); assertTrue( findB1 ); } ); } @Entity(name = "EntityA") public static
EagerBagsTest
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/injection/guice/TypeLiteral.java
{ "start": 2514, "end": 2686 }
class ____<T> { final Class<? super T> rawType; final Type type; final int hashCode; /** * Constructs a new type literal. Derives represented
TypeLiteral
java
spring-projects__spring-boot
integration-test/spring-boot-sni-integration-tests/src/intTest/java/org/springframework/boot/sni/SniIntegrationTests.java
{ "start": 1384, "end": 3294 }
class ____ { private static final Map<String, String> SERVER_START_MESSAGES = Map.ofEntries(Map.entry("netty", "Netty started"), Map.entry("tomcat", "Tomcat initialized")); public static final String PRIMARY_SERVER_NAME = "hello.example.com"; public static final String ALT_SERVER_NAME = "hello-alt.example.com"; private static final Integer SERVER_PORT = 8443; private static final Network SHARED_NETWORK = Network.newNetwork(); @ParameterizedTest @CsvSource({ "reactive,netty", "reactive,tomcat", "servlet,tomcat" }) void home(String webStack, String server) { try (ApplicationContainer serverContainer = new ServerApplicationContainer(webStack, server)) { serverContainer.start(); try { Awaitility.await().atMost(Duration.ofSeconds(60)).until(serverContainer::isRunning); } catch (ConditionTimeoutException ex) { System.out.println(serverContainer.getLogs()); throw ex; } String serverLogs = serverContainer.getLogs(); assertThat(serverLogs).contains(SERVER_START_MESSAGES.get(server)); try (ApplicationContainer clientContainer = new ClientApplicationContainer()) { clientContainer.start(); Awaitility.await().atMost(Duration.ofSeconds(60)).until(() -> !clientContainer.isRunning()); String clientLogs = clientContainer.getLogs(); assertServerCalledWithName(clientLogs, PRIMARY_SERVER_NAME); assertServerCalledWithName(clientLogs, ALT_SERVER_NAME); clientContainer.stop(); } serverContainer.stop(); } } private void assertServerCalledWithName(String clientLogs, String serverName) { assertThat(clientLogs).contains("Calling server at 'https://" + serverName + ":8443/'") .contains("Hello from https://" + serverName + ":8443/"); assertThat(clientLogs).contains("Calling server actuator at 'https://" + serverName + ":8444/actuator/health'") .contains("\"status\":\"UP\""); } static final
SniIntegrationTests
java
netty__netty
transport-classes-io_uring/src/main/java/io/netty/channel/uring/MsgHdrMemoryArray.java
{ "start": 701, "end": 2054 }
class ____ { static final long NO_ID = 0; private final MsgHdrMemory[] hdrs; private final int capacity; private final long[] ids; private boolean released; private int idx; MsgHdrMemoryArray(short capacity) { assert capacity >= 0; this.capacity = capacity; hdrs = new MsgHdrMemory[capacity]; ids = new long[capacity]; for (int i = 0; i < hdrs.length; i++) { hdrs[i] = new MsgHdrMemory((short) i); ids[i] = NO_ID; } } boolean isFull() { return idx == hdrs.length; } MsgHdrMemory nextHdr() { if (isFull()) { return null; } return hdrs[idx++]; } void restoreNextHdr(MsgHdrMemory hdr) { assert hdr.idx() == idx - 1; idx--; } MsgHdrMemory hdr(int idx) { return hdrs[idx]; } long id(int idx) { return ids[idx]; } void setId(int idx, long id) { ids[idx] = id; } void clear() { Arrays.fill(ids, 0, idx, NO_ID); idx = 0; } int length() { return idx; } void release() { assert !released; released = true; for (MsgHdrMemory hdr: hdrs) { hdr.release(); } } int capacity() { return capacity; } }
MsgHdrMemoryArray