input stringlengths 28 18.7k | output stringlengths 39 1.69k |
|---|---|
testClearCache ( ) { createCacheLocations ( ) ; fileCacheProvider ( ) . clearCache ( ) ; java . util . List < org . dcm4chee . archive . entity . Location > cacheLocations = selectLocations ( org . dcm4chee . archive . prefetch . PriorsFileCacheProviderIT . PRIORS_STORAGE_SYSTEM_GROUP_ID ) ; for ( int i = 0 ; ( i < 10 ) && ( ( cacheLocations . size ( ) ) > 0 ) ; i ++ ) { org . dcm4chee . archive . prefetch . PriorsFileCacheProviderIT . LOG . info ( "Waiting<sp>for<sp>{}<sp>cache<sp>location(s)<sp>to<sp>be<sp>deleted" , cacheLocations . size ( ) ) ; java . lang . Thread . sleep ( 500 ) ; cacheLocations = selectLocations ( org . dcm4chee . archive . prefetch . PriorsFileCacheProviderIT . PRIORS_STORAGE_SYSTEM_GROUP_ID ) ; } "<AssertPlaceHolder>" ; } size ( ) { return locationPks . size ( ) ; } | org . junit . Assert . assertEquals ( 0 , cacheLocations . size ( ) ) |
testGetTitle ( ) { final java . lang . String errorKey = org . drools . workbench . screens . guided . dtable . client . resources . i18n . GuidedDecisionTableErraiConstants . CalculationTypePage_CalculationType ; final java . lang . String errorMessage = "Title" ; when ( translationService . format ( errorKey ) ) . thenReturn ( errorMessage ) ; final java . lang . String title = page . getTitle ( ) ; "<AssertPlaceHolder>" ; } getTitle ( ) { return translate ( GuidedDecisionTableErraiConstants . AttributeColumnPage_AddNewAttribute ) ; } | org . junit . Assert . assertEquals ( errorMessage , title ) |
testFetchByPrimaryKeyExisting ( ) { com . liferay . dynamic . data . mapping . model . DDMStructure newDDMStructure = addDDMStructure ( ) ; com . liferay . dynamic . data . mapping . model . DDMStructure existingDDMStructure = _persistence . fetchByPrimaryKey ( newDDMStructure . getPrimaryKey ( ) ) ; "<AssertPlaceHolder>" ; } getPrimaryKey ( ) { return _amImageEntryId ; } | org . junit . Assert . assertEquals ( existingDDMStructure , newDDMStructure ) |
testInstanceCreationWithName ( ) { java . lang . String in = "myInstantiatedInstance" ; org . osgi . framework . ServiceReference ref = ipojoHelper . getServiceReferenceByName ( org . apache . felix . ipojo . architecture . Architecture . class . getName ( ) , in ) ; "<AssertPlaceHolder>" ; } getName ( ) { return m_name ; } | org . junit . Assert . assertNotNull ( ref ) |
getUsersForOrganization_Admin_Authorized ( ) { java . lang . reflect . Method method = beanClass . getMethod ( "getUsersForOrganization" ) ; boolean isRoleAllowed = isRoleAllowed ( method , UserRoleType . ORGANIZATION_ADMIN ) ; "<AssertPlaceHolder>" ; } isRoleAllowed ( java . lang . reflect . Method , org . oscm . internal . types . enumtypes . UserRoleType ) { javax . annotation . security . RolesAllowed rolesAllowed = method . getAnnotation ( javax . annotation . security . RolesAllowed . class ) ; if ( rolesAllowed == null ) { return true ; } for ( java . lang . String role : rolesAllowed . value ( ) ) { if ( role . equals ( roleType . name ( ) ) ) { return true ; } } return false ; } | org . junit . Assert . assertTrue ( isRoleAllowed ) |
testPrimaryKeys ( ) { java . sql . DatabaseMetaData dbmd = con . getMetaData ( ) ; "<AssertPlaceHolder>" ; java . sql . ResultSet rs = dbmd . getPrimaryKeys ( null , null , "pg_class" ) ; rs . close ( ) ; } getMetaData ( ) { checkClosed ( ) ; if ( ( rsMetaData ) == null ) { rsMetaData = createMetaData ( ) ; } return rsMetaData ; } | org . junit . Assert . assertNotNull ( dbmd ) |
testCommitLog ( ) { org . apache . cayenne . commitlog . db . Auditable2 a1 = context . newObject ( org . apache . cayenne . commitlog . db . Auditable2 . class ) ; a1 . setCharProperty1 ( "yy" ) ; a1 . setCharProperty2 ( "zz" ) ; org . apache . cayenne . commitlog . db . Auditable2 a2 = context . newObject ( org . apache . cayenne . commitlog . db . Auditable2 . class ) ; a2 . setCharProperty1 ( "yy" ) ; a2 . setCharProperty2 ( "zz" ) ; context . commitChanges ( ) ; java . util . List < java . lang . Object [ ] > logs = auditLog . selectAll ( ) ; "<AssertPlaceHolder>" ; } size ( ) { return neighbors . size ( ) ; } | org . junit . Assert . assertEquals ( 2 , logs . size ( ) ) |
test14 ( ) { org . evosuite . Properties . CLIENT_ON_THREAD = true ; org . evosuite . Properties . PRINT_TO_SYSTEM = true ; org . evosuite . Properties . TIMEOUT = 5000000 ; org . evosuite . testcase . DefaultTestCase tc = org . evosuite . symbolic . SymbolicObserverTest . build_test_input_14 ( ) ; System . out . println ( "TestCase=" ) ; System . out . println ( tc . toCode ( ) ) ; org . evosuite . symbolic . PathCondition pc = org . evosuite . symbolic . ConcolicExecution . executeConcolic ( tc ) ; java . util . List < org . evosuite . symbolic . BranchCondition > branch_conditions = pc . getBranchConditions ( ) ; org . evosuite . symbolic . SymbolicObserverTest . printConstraints ( branch_conditions ) ; "<AssertPlaceHolder>" ; } size ( ) { return theTest . size ( ) ; } | org . junit . Assert . assertEquals ( 1 , branch_conditions . size ( ) ) |
returnOverwriteTest7 ( ) { soot . jimple . infoflow . IInfoflow infoflow = initInfoflow ( ) ; java . util . List < java . lang . String > epoints = new java . util . ArrayList < java . lang . String > ( ) ; epoints . add ( "<soot.jimple.infoflow.test.OverwriteTestCode:<sp>void<sp>returnOverwrite7()>" ) ; infoflow . computeInfoflow ( appPath , libPath , epoints , sources , sinks ) ; checkInfoflow ( infoflow , 1 ) ; "<AssertPlaceHolder>" ; } getResults ( ) { return this . results ; } | org . junit . Assert . assertEquals ( 1 , infoflow . getResults ( ) . size ( ) ) |
should_calculate_parameters_count_from_body_with_two_params ( ) { cucumber . runtime . java8 . Java8StepDefinition java8StepDefinition = cucumber . runtime . java8 . Java8StepDefinition . create ( "I<sp>have<sp>some<sp>step" , StepdefBody . A2 . class , twoParamStep ( ) , typeRegistry ) ; "<AssertPlaceHolder>" ; } getParameterCount ( ) { return parameterTypes . length ; } | org . junit . Assert . assertEquals ( java . lang . Integer . valueOf ( 2 ) , java8StepDefinition . getParameterCount ( ) ) |
testMissingFieldInWriteRawRecord ( ) { final java . util . List < org . apache . nifi . serialization . record . RecordField > fields = new java . util . ArrayList ( ) ; fields . add ( new org . apache . nifi . serialization . record . RecordField ( "id" , RecordFieldType . STRING . getDataType ( ) ) ) ; fields . add ( new org . apache . nifi . serialization . record . RecordField ( "name" , RecordFieldType . STRING . getDataType ( ) ) ) ; final org . apache . nifi . serialization . record . RecordSchema schema = new org . apache . nifi . serialization . SimpleRecordSchema ( fields ) ; final java . util . Map < java . lang . String , java . lang . Object > values = new java . util . LinkedHashMap ( ) ; values . put ( "id" , "1" ) ; final org . apache . nifi . serialization . record . Record record = new org . apache . nifi . serialization . record . MapRecord ( schema , values ) ; final java . io . ByteArrayOutputStream baos = new java . io . ByteArrayOutputStream ( ) ; try ( final org . apache . nifi . json . WriteJsonResult writer = new org . apache . nifi . json . WriteJsonResult ( org . mockito . Mockito . mock ( org . apache . nifi . logging . ComponentLog . class ) , schema , new org . apache . nifi . schema . access . SchemaNameAsAttribute ( ) , baos , false , org . apache . nifi . record . NullSuppression . NEVER_SUPPRESS , OutputGrouping . OUTPUT_ARRAY , null , null , null ) ) { writer . beginRecordSet ( ) ; writer . writeRawRecord ( record ) ; writer . finishRecordSet ( ) ; } final byte [ ] data = baos . toByteArray ( ) ; final java . lang . String expected = "[{\"id\":\"1\"}]" ; final java . lang . String output = new java . lang . String ( data , java . nio . charset . StandardCharsets . UTF_8 ) ; "<AssertPlaceHolder>" ; } toByteArray ( ) { final org . apache . nifi . processors . beats . frame . BeatsFrame frame = response . toFrame ( ) ; return encoder . encode ( frame ) ; } | org . junit . Assert . assertEquals ( expected , output ) |
servletConfigValueForBoolean ( ) { final org . jasig . cas . client . configuration . ConfigurationKey < java . lang . Boolean > key = ConfigurationKeys . ACCEPT_ANY_PROXY ; final java . lang . Boolean value = Boolean . TRUE ; final org . springframework . mock . web . MockServletContext context = ( ( org . springframework . mock . web . MockServletContext ) ( this . filterConfig . getServletContext ( ) ) ) ; context . addInitParameter ( key . getName ( ) , value . toString ( ) ) ; "<AssertPlaceHolder>" ; } getBoolean ( org . jasig . cas . client . configuration . ConfigurationKey ) { return this . configurationStrategy . getBoolean ( configurationKey ) ; } | org . junit . Assert . assertEquals ( value , this . impl . getBoolean ( key ) ) |
managerRemovedOnKill ( ) { jetbrains . jetpad . base . edt . TestEdtManager manager = managers . createEdtManager ( "a" ) ; manager . kill ( ) ; "<AssertPlaceHolder>" ; } getManagers ( ) { return java . util . Collections . unmodifiableList ( managers ) ; } | org . junit . Assert . assertTrue ( managers . getManagers ( ) . isEmpty ( ) ) |
testGetEncryptionMethodNotEncrypted ( ) { org . w3c . dom . Document document = xmlHelpers . getXMLDocumentOfSAMLMessage ( message ) ; org . w3c . dom . NodeList nl = xmlHelpers . getAssertions ( document ) ; "<AssertPlaceHolder>" ; } getEncryptionMethod ( org . w3c . dom . Node ) { if ( ( assertion == null ) || ( ! ( assertion . getLocalName ( ) . equals ( "EncryptedAssertion" ) ) ) ) { return "no<sp>encryption" ; } org . w3c . dom . Element encryptionMethod = ( ( org . w3c . dom . Element ) ( ( ( org . w3c . dom . Element ) ( assertion ) ) . getElementsByTagNameNS ( "*" , "EncryptionMethod" ) . item ( 0 ) ) ) ; return getAttributeValueByName ( encryptionMethod , "Algorithm" ) ; } | org . junit . Assert . assertEquals ( "no<sp>encryption" , xmlHelpers . getEncryptionMethod ( nl . item ( 0 ) ) ) |
put_Existing_NotSameKey ( ) { long now = java . lang . System . currentTimeMillis ( ) ; java . util . Date key1 = new java . util . Date ( now ) ; java . util . Date value1 = new java . util . Date ( now ) ; cache . put ( key1 , value1 ) ; java . util . Date key2 = new java . util . Date ( now ) ; java . util . Date value2 = new java . util . Date ( now ) ; cache . put ( key2 , value2 ) ; value2 . setTime ( ( now + 1 ) ) ; "<AssertPlaceHolder>" ; } get ( K ) { return super . get ( key ) ; } | org . junit . Assert . assertSame ( value2 , cache . get ( key2 ) ) |
testReadRpcInputStreamsOneException ( ) { for ( int jn = 0 ; jn < 2 ; jn ++ ) { org . apache . hadoop . hdfs . qjournal . client . TestQuorumJournalManagerUnit . futureReturns ( getJournaledEditsReponse ( 1 , ( jn + 1 ) ) ) . when ( spyLoggers . get ( jn ) ) . getJournaledEdits ( 1 , QuorumJournalManager . QJM_RPC_MAX_TXNS_DEFAULT ) ; } org . apache . hadoop . hdfs . qjournal . client . TestQuorumJournalManagerUnit . futureThrows ( new java . io . IOException ( ) ) . when ( spyLoggers . get ( 2 ) ) . getJournaledEdits ( 1 , QuorumJournalManager . QJM_RPC_MAX_TXNS_DEFAULT ) ; java . util . List < org . apache . hadoop . hdfs . server . namenode . EditLogInputStream > streams = com . google . common . collect . Lists . newArrayList ( ) ; qjm . selectInputStreams ( streams , 1 , true , true ) ; "<AssertPlaceHolder>" ; org . apache . hadoop . hdfs . qjournal . QJMTestUtil . verifyEdits ( streams , 1 , 1 ) ; } size ( ) { return loggers . size ( ) ; } | org . junit . Assert . assertEquals ( 1 , streams . size ( ) ) |
matchesHasNotBeenSavedWithNullGlobalId ( ) { org . mockito . Mockito . when ( mockObject . editingContext ( ) ) . thenReturn ( mockEditingContext ) ; org . mockito . Mockito . when ( mockEditingContext . globalIDForObject ( mockObject ) ) . thenReturn ( null ) ; boolean result = matcher . matchesSafely ( mockObject ) ; "<AssertPlaceHolder>" ; } matchesSafely ( T extends com . webobjects . eocontrol . EOEnterpriseObject ) { com . webobjects . eocontrol . EOEditingContext editingContext = enterpriseObject . editingContext ( ) ; if ( editingContext == null ) { throw new java . lang . IllegalArgumentException ( "The<sp>enterprise<sp>object<sp>has<sp>no<sp>editing<sp>context<sp>reference.<sp>Are<sp>you<sp>sure<sp>the<sp>enterprise<sp>object<sp>was<sp>inserted<sp>into<sp>an<sp>editing<sp>context?" ) ; } com . webobjects . eocontrol . EOGlobalID globalId = editingContext . globalIDForObject ( enterpriseObject ) ; boolean hasBeenSaved = ! ( ( globalId == null ) || ( globalId . isTemporary ( ) ) ) ; @ com . wounit . matchers . SuppressWarnings ( "unchecked" ) com . webobjects . foundation . NSDictionary < java . lang . String , java . lang . Object > committedSnapshotForObject = editingContext . committedSnapshotForObject ( enterpriseObject ) ; hasBeenSaved = hasBeenSaved && ( enterpriseObject . changesFromSnapshot ( committedSnapshotForObject ) . isEmpty ( ) ) ; status = ( hasBeenSaved ) ? "saved" : "unsaved" ; return hasBeenSaved ; } | org . junit . Assert . assertThat ( result , org . hamcrest . CoreMatchers . is ( false ) ) |
shouldFireListenersWhenMessage ( ) { final com . calclab . emite . xtesting . handlers . MessageReceivedTestHandler handler = new com . calclab . emite . xtesting . handlers . MessageReceivedTestHandler ( ) ; room . addMessageReceivedHandler ( handler ) ; final com . calclab . emite . core . stanzas . Message message = new com . calclab . emite . core . stanzas . Message ( "message" , uri ( "room@domain" ) , uri ( "someone@domain/res" ) ) ; eventBus . fireEvent ( new com . calclab . emite . core . events . MessageReceivedEvent ( message ) ) ; "<AssertPlaceHolder>" ; } getLastMessage ( ) { return hasEvent ( ) ? getLastEvent ( ) . getMessage ( ) : null ; } | org . junit . Assert . assertEquals ( message , handler . getLastMessage ( ) ) |
testFormatting ( ) { java . lang . String expectedResult = "ip=hello.wav<sp>dr=no_name_driver<sp>ln=en-US<sp>eik=#<sp>mrt=100<sp>wit=100<sp>pst=50<sp>hw=57616974<sp>in=dtmf_speech<sp>mn=1<sp>mx=1<sp>pr=true" ; org . restcomm . connect . mgcp . AsrSignal asrSignal = new org . restcomm . connect . mgcp . AsrSignal ( driver , org . restcomm . connect . mgcp . AsrSignalTest . DEFAULT_LANG , initialPrompts , endInputKey , maximumRecTimer , waitingInputTimer , timeAfterSpeech , hotWords , input , numberOfDigits , partialResult ) ; java . lang . String actualResult = asrSignal . toString ( ) ; "<AssertPlaceHolder>" ; } toString ( ) { return ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( "PhoneNumberParameters<sp>[voiceUrl=" + ( voiceUrl ) ) + ",<sp>smsUrl=" 0 ) + ( voiceMethod ) ) + ",<sp>doVoiceCallerIdLookup=" ) + ( doVoiceCallerIdLookup ) ) + ",<sp>smsUrl=" ) + ( smsUrl ) ) + ",<sp>smsMethod=" ) + ( smsMethod ) ) + ",<sp>ussdUrl=" ) + ( ussdUrl ) ) + ",<sp>ussdMethod=" ) + ( ussdMethod ) ) + ",<sp>faxUrl=" ) + ( faxUrl ) ) + ",<sp>faxMethod=" ) + ( faxMethod ) ) + ",<sp>phoneNumberType=" ) + ( phoneNumberType ) ) + "]" ; } | org . junit . Assert . assertEquals ( expectedResult , actualResult ) |
shouldNotAddPropertyIfPropertyNameIsNull ( ) { final uk . gov . gchq . gaffer . data . element . Properties properties = new uk . gov . gchq . gaffer . data . element . Properties ( ) ; properties . put ( null , "propertyValue1" ) ; "<AssertPlaceHolder>" ; } size ( ) { return map . size ( ) ; } | org . junit . Assert . assertEquals ( 0 , properties . size ( ) ) |
testGetSubscribingToComponent0 ( java . lang . String , java . util . Map , org . apache . commons . configuration . Configuration ) { new com . boozallen . cognition . ingest . storm . topology . Expectations ( ) { { boltConf . getInt ( com . boozallen . cognition . ingest . storm . topology . SUBSCRIBE_TO_BOLT , ( - 1 ) ) ; result = - 1 ; } } ; java . lang . String result = topology . getSubscribingToComponent ( prevComponent , boltNumberToId , boltConf ) ; "<AssertPlaceHolder>" ; } getSubscribingToComponent ( java . lang . String , com . boozallen . cognition . ingest . storm . topology . Map , org . apache . commons . configuration . Configuration ) { int subscribingToBoltNumber = boltConf . getInt ( com . boozallen . cognition . ingest . storm . topology . SUBSCRIBE_TO_BOLT , ( - 1 ) ) ; boolean isSubscribeToBoltDeclared = subscribingToBoltNumber >= 0 ; if ( isSubscribeToBoltDeclared && ( boltNumberToId . containsKey ( subscribingToBoltNumber ) ) ) return boltNumberToId . get ( subscribingToBoltNumber ) ; else return prevComponent ; } | org . junit . Assert . assertThat ( result , org . hamcrest . core . Is . is ( prevComponent ) ) |
testSpace1 ( ) { java . lang . String process = align ( "<sp>" , "foo<sp>bar" ) ; "<AssertPlaceHolder>" ; } align ( java . lang . String , java . lang . String ) { return new osmedile . intellij . stringmanip . align . ColumnAligner ( new osmedile . intellij . stringmanip . align . ColumnAlignerModel ( separator ) ) . align ( text ) ; } | org . junit . Assert . assertThat ( process , org . hamcrest . CoreMatchers . is ( "foo<sp>bar" ) ) |
testMaterializeTopicsWithNoPartitions ( ) { java . util . Map < java . lang . String , java . util . List < org . apache . kafka . common . TopicPartition > > materializedTopics = consumeBenchSpec ( java . util . Arrays . asList ( "topic[1-3]" , "secondTopic" ) ) . materializeTopics ( ) ; java . util . Map < java . lang . String , java . util . List < org . apache . kafka . common . TopicPartition > > expected = new java . util . HashMap ( ) ; expected . put ( "topic1" , new java . util . ArrayList ( ) ) ; expected . put ( "topic2" , new java . util . ArrayList ( ) ) ; expected . put ( "topic3" , new java . util . ArrayList ( ) ) ; expected . put ( "secondTopic" , new java . util . ArrayList ( ) ) ; "<AssertPlaceHolder>" ; } put ( org . apache . kafka . connect . data . Field , java . lang . Object ) { if ( null == field ) throw new org . apache . kafka . connect . errors . DataException ( "field<sp>cannot<sp>be<sp>null." ) ; org . apache . kafka . connect . data . ConnectSchema . validateValue ( field . name ( ) , field . schema ( ) , value ) ; values [ field . index ( ) ] = value ; return this ; } | org . junit . Assert . assertEquals ( expected , materializedTopics ) |
testInvalidConfig ( ) { com . fasterxml . jackson . databind . ObjectMapper om = new com . fasterxml . jackson . databind . ObjectMapper ( ) ; com . fasterxml . jackson . databind . JsonNode json = om . readTree ( com . google . common . io . Resources . getResource ( org . onosproject . dhcprelay . config . DhcpRelayConfigTest . INVALID_CONFIG_FILE_PATH ) ) ; org . onosproject . dhcprelay . config . DefaultDhcpRelayConfig config = new org . onosproject . dhcprelay . config . DefaultDhcpRelayConfig ( ) ; json = json . path ( "apps" ) . path ( org . onosproject . dhcprelay . DhcpRelayManager . DHCP_RELAY_APP ) . path ( DefaultDhcpRelayConfig . KEY ) ; config . init ( org . onosproject . dhcprelay . config . DhcpRelayConfigTest . APP_ID , DefaultDhcpRelayConfig . KEY , json , om , null ) ; "<AssertPlaceHolder>" ; } isValid ( ) { return hasField ( org . onosproject . driver . optical . config . FlowTableConfig . ENTRIES ) ; } | org . junit . Assert . assertFalse ( config . isValid ( ) ) |
testGetScriptOverrideAnnotation ( ) { java . util . Map < java . lang . Class < ? > , org . richfaces . javascript . LibraryFunction > defaultMapping = com . google . common . collect . ImmutableMap . < java . lang . Class < ? > , org . richfaces . javascript . LibraryFunction > of ( org . richfaces . validator . ValidatorWithFacesResource . class , function ) ; serviceImpl = new org . richfaces . javascript . ClientScriptServiceImpl ( defaultMapping ) ; org . richfaces . javascript . LibraryFunction script = getScript ( null , org . richfaces . validator . ValidatorWithFacesResource . class ) ; "<AssertPlaceHolder>" ; } getScript ( javax . faces . component . behavior . ClientBehaviorContext , javax . faces . component . behavior . ClientBehavior ) { javax . faces . context . FacesContext facesContext = behaviorContext . getFacesContext ( ) ; org . richfaces . component . behavior . ComponentControlBehavior controlBehavior = ( ( org . richfaces . component . behavior . ComponentControlBehavior ) ( behavior ) ) ; java . lang . String apiFunctionName = controlBehavior . getOperation ( ) ; java . lang . String targetSourceString = controlBehavior . getTarget ( ) ; java . lang . String selector = controlBehavior . getSelector ( ) ; if ( ( isEmpty ( apiFunctionName ) ) || ( ( isEmpty ( targetSourceString ) ) && ( isEmpty ( selector ) ) ) ) { throw new java . lang . IllegalArgumentException ( "One<sp>of<sp>the<sp>necessary<sp>attributes<sp>is<sp>null<sp>or<sp>empty.<sp>Check<sp>operation<sp>attribute<sp>and<sp>selector<sp>or<sp>target<sp>attributes." ) ; } org . ajax4jsf . javascript . JSFunctionDefinition callback = new org . ajax4jsf . javascript . JSFunctionDefinition ( ) ; callback . addParameter ( new org . ajax4jsf . javascript . JSReference ( org . richfaces . renderkit . ComponentControlBehaviorRenderer . REF_EVENT ) ) ; callback . addParameter ( new org . ajax4jsf . javascript . JSReference ( org . richfaces . renderkit . ComponentControlBehaviorRenderer . REF_COMPONENT ) ) ; java . lang . StringBuffer script = new java . lang . StringBuffer ( ) ; script . append ( org . richfaces . renderkit . ComponentControlBehaviorRenderer . REF_COMPONENT ) . append ( "['" ) . append ( apiFunctionName ) . append ( "']." ) . append ( "apply" ) . append ( "(" ) ; java . util . List < java . lang . Object > apiFunctionParams = createSignature ( controlBehavior ) ; script . append ( org . richfaces . renderkit . ComponentControlBehaviorRenderer . REF_COMPONENT ) . append ( "," ) . append ( org . ajax4jsf . javascript . ScriptUtils . toScript ( apiFunctionParams . toArray ( ) ) ) . append ( ");" ) ; callback . addToBody ( script ) ; java . util . Map < java . lang . String , java . lang . Object > parameters = new java . util . HashMap < java . lang . String , java . lang . Object > ( ) ; parameters . put ( org . richfaces . renderkit . ComponentControlBehaviorRenderer . PARAM_CALLBACK , callback ) ; parameters . put ( org . richfaces . renderkit . ComponentControlBehaviorRenderer . PARAM_TARGET , resolveTargets ( facesContext , targetSourceString ) ) ; parameters . put ( org . richfaces . renderkit . ComponentControlBehaviorRenderer . PARAM_SELECTOR , selector ) ; java . lang . String onBeforeOperation = controlBehavior . getOnbeforeoperation ( ) ; if ( ( null != onBeforeOperation ) && ( ! ( onBeforeOperation . isEmpty ( ) ) ) ) { org . ajax4jsf . javascript . JSFunctionDefinition onBeforeOperationFunction = new org . ajax4jsf . javascript . JSFunctionDefinition ( new org . ajax4jsf . javascript . JSReference ( org . richfaces . renderkit . ComponentControlBehaviorRenderer . REF_EVENT ) ) ; onBeforeOperationFunction . addToBody ( onBeforeOperation ) ; parameters . put ( org . richfaces . renderkit . ComponentControlBehaviorRenderer . PARAM_ONBEFOREOPERATION , onBeforeOperationFunction ) ; } org . ajax4jsf . javascript . JSFunction eventFunction = new org . ajax4jsf . javascript . JSFunction ( org . richfaces . renderkit . ComponentControlBehaviorRenderer . FUNC_NAME ) ; eventFunction . addParameter ( new org . ajax4jsf . javascript . JSReference ( org . richfaces . renderkit . ComponentControlBehaviorRenderer . REF_EVENT ) ) ; eventFunction . addParameter ( parameters ) ; java . lang . StringBuffer execution = new java . lang . StringBuffer ( ) ; execution . append ( eventFunction . toScript ( ) ) ; execution . append ( ";<sp>return<sp>false;" ) ; return execution . toString ( ) ; } | org . junit . Assert . assertSame ( function , script ) |
returnsTrueEvenIfGitDirectoriesAreNotEqual ( ) { java . nio . file . Path neq1 = java . nio . file . Paths . get ( getClass ( ) . getResource ( "/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueEvenIfGitDirectoriesAreNotEqual/eq1" ) . toURI ( ) ) ; java . nio . file . Path neq2 = java . nio . file . Paths . get ( getClass ( ) . getResource ( "/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueEvenIfGitDirectoriesAreNotEqual/eq2" ) . toURI ( ) ) ; "<AssertPlaceHolder>" ; } gitDirectoriesAreEqual ( java . nio . file . Path , java . nio . file . Path ) { uk . ac . ic . wlgitbridge . snapshot . servermock . util . Set < java . lang . String > dir1Contents = uk . ac . ic . wlgitbridge . snapshot . servermock . util . FileUtil . getAllRecursivelyInDirectoryApartFrom ( dir1 , dir1 . resolve ( ".git" ) ) ; uk . ac . ic . wlgitbridge . snapshot . servermock . util . Set < java . lang . String > dir2Contents = uk . ac . ic . wlgitbridge . snapshot . servermock . util . FileUtil . getAllRecursivelyInDirectoryApartFrom ( dir2 , dir2 . resolve ( ".git" ) ) ; return uk . ac . ic . wlgitbridge . snapshot . servermock . util . FileUtil . filesAreEqual ( dir1 , dir2 , dir1Contents , dir2Contents ) ; } | org . junit . Assert . assertTrue ( uk . ac . ic . wlgitbridge . snapshot . servermock . util . FileUtil . gitDirectoriesAreEqual ( neq1 , neq2 ) ) |
testCreatePost ( ) { com . packtpub . techbuzz . entities . Post post = new com . packtpub . techbuzz . entities . Post ( ) ; post . setTitle ( "Sample<sp>Post" ) ; post . setDescription ( "Sample<sp>Post<sp>description" ) ; post . setUserId ( 1 ) ; post . setCreatedOn ( new java . util . Date ( ) ) ; post . addTag ( new com . packtpub . techbuzz . entities . Tag ( 1 ) ) ; com . packtpub . techbuzz . entities . Post createdPost = postService . createPost ( post ) ; "<AssertPlaceHolder>" ; logger . debug ( ( "New<sp>Post<sp>Id:<sp>" + ( createdPost . getId ( ) ) ) ) ; } setUserId ( int ) { this . userId = userId ; } | org . junit . Assert . assertNotNull ( createdPost ) |
testGetClassName ( ) { java . lang . String className = com . picocontainer . classname . ClassName . class . getName ( ) ; com . picocontainer . classname . ClassName key = new com . picocontainer . classname . ClassName ( className ) ; "<AssertPlaceHolder>" ; } toString ( ) { return ( ( ( "BuilderNode:<sp>" + ( this . getClass ( ) . getName ( ) ) ) + "<sp>(\"" ) + ( getNodeName ( ) ) ) + "\")" ; } | org . junit . Assert . assertEquals ( className , key . toString ( ) ) |
addAllIndexNormalTest ( ) { java . util . List < java . lang . Integer > toAddList = new java . util . ArrayList ( 5 ) ; toAddList . add ( 100 ) ; toAddList . add ( 200 ) ; toAddList . add ( 300 ) ; toAddList . add ( 400 ) ; toAddList . add ( 500 ) ; org . threadly . concurrent . collections . ConcurrentArrayList . DataSet < java . lang . Integer > result = org . threadly . concurrent . collections . ConcurrentArrayListDataSetTest . orderedNormal . addAll ( 5 , toAddList ) . addAll ( 0 , toAddList ) ; java . lang . Integer [ ] expectedResult = new java . lang . Integer [ ] { 100 , 200 , 300 , 400 , 500 , 0 , 1 , 2 , 3 , 4 , 100 , 200 , 300 , 400 , 500 , 5 , 6 , 7 , 8 , 9 } ; "<AssertPlaceHolder>" ; } makeDataSet ( java . lang . Object [ ] , int , int ) { return new org . threadly . concurrent . collections . ConcurrentArrayList . DataSet ( dataArray , startPosition , endPosition , 0 , 0 ) ; } | org . junit . Assert . assertTrue ( result . equals ( org . threadly . concurrent . collections . ConcurrentArrayListDataSetTest . makeDataSet ( expectedResult , 0 , expectedResult . length ) ) ) |
testDynamicQueryByProjectionMissing ( ) { com . liferay . portal . kernel . dao . orm . DynamicQuery dynamicQuery = com . liferay . portal . kernel . dao . orm . DynamicQueryFactoryUtil . forClass ( com . liferay . dynamic . data . lists . model . DDLRecord . class , _dynamicQueryClassLoader ) ; dynamicQuery . setProjection ( com . liferay . portal . kernel . dao . orm . ProjectionFactoryUtil . property ( "recordId" ) ) ; dynamicQuery . add ( com . liferay . portal . kernel . dao . orm . RestrictionsFactoryUtil . in ( "recordId" , new java . lang . Object [ ] { com . liferay . portal . kernel . test . util . RandomTestUtil . nextLong ( ) } ) ) ; java . util . List < java . lang . Object > result = _persistence . findWithDynamicQuery ( dynamicQuery ) ; "<AssertPlaceHolder>" ; } size ( ) { if ( ( _workflowTaskAssignees ) != null ) { return _workflowTaskAssignees . size ( ) ; } return _kaleoTaskAssignmentInstanceLocalService . getKaleoTaskAssignmentInstancesCount ( _kaleoTaskInstanceToken . getKaleoTaskInstanceTokenId ( ) ) ; } | org . junit . Assert . assertEquals ( 0 , result . size ( ) ) |
testExecutePowerUpTaskForDiscverableNodeWrongOSCreds ( ) { System . out . println ( "[TS]:<sp>testExecutePowerUpTaskForDiscverableNodeWrongOSCreds" ) ; com . vmware . vrack . hms . common . notification . TaskResponse TR = new com . vmware . vrack . hms . common . notification . TaskResponse ( new com . vmware . vrack . hms . common . servernodes . api . ServerNode ( "4" , "10.28.197.204" , "ADMIN" , "ADMIN" ) ) ; com . vmware . vrack . hms . task . oob . ipmi . PowerUpServerTask PUSTask = new com . vmware . vrack . hms . task . oob . ipmi . PowerUpServerTask ( TR ) ; PUSTask . executeTask ( ) ; System . out . println ( ( "[TS]:<sp>Expected<sp>Result<sp>:<sp>Server<sp>Node<sp>is<sp>Powered<sp>=<sp>True<sp>,<sp>Actual<sp>Result<sp>:<sp>Server<sp>Node<sp>is<sp>Powered<sp>=<sp>" + ( TR . getNode ( ) . isPowered ( ) ) ) ) ; "<AssertPlaceHolder>" ; } getNode ( ) { return node ; } | org . junit . Assert . assertTrue ( TR . getNode ( ) . isPowered ( ) ) |
testDeleteProcessInstanceInfoLogByStatus ( ) { int status = pilTestData [ 5 ] . getStatus ( ) ; org . kie . internal . runtime . manager . audit . query . ProcessInstanceLogDeleteBuilder updateBuilder = this . processInstanceLogDelete ( ) . status ( status ) ; int result = updateBuilder . build ( ) . execute ( ) ; "<AssertPlaceHolder>" ; } build ( ) { return new org . kie . internal . query . ParametrizedQuery < org . kie . api . task . model . TaskSummary > ( ) { private org . jbpm . query . jpa . data . QueryWhere queryWhere = new org . jbpm . query . jpa . data . QueryWhere ( getQueryWhere ( ) ) ; @ org . jbpm . services . task . impl . Override public java . util . List < org . kie . api . task . model . TaskSummary > getResultList ( ) { org . jbpm . services . task . commands . TaskSummaryQueryCommand cmd = new org . jbpm . services . task . commands . TaskSummaryQueryCommand ( queryWhere ) ; cmd . setUserId ( userId ) ; return executor . execute ( cmd ) ; } } ; } | org . junit . Assert . assertEquals ( 1 , result ) |
testGetItemSpacing ( ) { org . eclipse . rap . rwt . theme . BoxDimensions actual = org . eclipse . rap . rwt . internal . theme . TabFolderThemeAdapter_Test . getThemeAdapter ( folder ) . getItemMargin ( item ) ; "<AssertPlaceHolder>" ; } getItemMargin ( org . eclipse . swt . widgets . TabItem ) { return getCssBoxDimensions ( "TabItem" , "margin" , item ) . dimensions ; } | org . junit . Assert . assertEquals ( new org . eclipse . rap . rwt . theme . BoxDimensions ( 1 , 1 , 1 , 1 ) , actual ) |
isDynamic ( ) { childResolvers = new java . util . ArrayList ( ) ; childResolvers . add ( getResolver ( null , resolvingContext , false ) ) ; childResolvers . add ( getResolver ( null , resolvingContext , true ) ) ; resolver = createCollectionResolver ( childResolvers ) ; "<AssertPlaceHolder>" ; } isDynamic ( ) { org . mule . runtime . module . extension . internal . runtime . resolver . ValueResolver resolver = getResolver ( null ) ; when ( resolver . isDynamic ( ) ) . thenReturn ( true ) ; set . add ( "whatever" , resolver ) ; org . junit . Assert . assertThat ( set . isDynamic ( ) , org . hamcrest . CoreMatchers . is ( true ) ) ; } | org . junit . Assert . assertThat ( resolver . isDynamic ( ) , org . hamcrest . CoreMatchers . is ( true ) ) |
test_140_2_17to22 ( ) { final org . osgi . framework . BundleContext context = getBundleContext ( ) ; org . osgi . framework . hooks . service . FindHook findHook = new org . osgi . framework . hooks . service . FindHook ( ) { @ org . eclipse . equinox . http . servlet . tests . Override public void find ( org . osgi . framework . BundleContext bundleContext , java . lang . String name , java . lang . String filter , boolean allServices , java . util . Collection < org . osgi . framework . ServiceReference < ? > > references ) { if ( bundleContext != context ) { return ; } for ( java . util . Iterator < org . osgi . framework . ServiceReference < ? > > iterator = references . iterator ( ) ; iterator . hasNext ( ) ; ) { org . osgi . framework . ServiceReference < ? > sr = iterator . next ( ) ; if ( org . eclipse . equinox . http . servlet . tests . DEFAULT . equals ( sr . getProperty ( HttpWhiteboardConstants . HTTP_WHITEBOARD_CONTEXT_NAME ) ) ) { iterator . remove ( ) ; } } } } ; registrations . add ( context . registerService ( org . osgi . framework . hooks . service . FindHook . class , findHook , null ) ) ; java . util . concurrent . atomic . AtomicReference < javax . servlet . ServletContext > sc1 = new java . util . concurrent . atomic . AtomicReference < javax . servlet . ServletContext > ( ) ; java . util . Dictionary < java . lang . String , java . lang . Object > properties = new java . util . Hashtable < java . lang . String , java . lang . Object > ( ) ; properties . put ( HttpWhiteboardConstants . HTTP_WHITEBOARD_LISTENER , "true" ) ; org . osgi . framework . ServiceRegistration < javax . servlet . ServletContextListener > serviceRegistration = context . registerService ( javax . servlet . ServletContextListener . class , new org . eclipse . equinox . http . servlet . tests . util . MockSCL ( sc1 ) , properties ) ; registrations . add ( serviceRegistration ) ; "<AssertPlaceHolder>" ; } get ( ) { throwIfCanceled ( ) ; while ( ! ( isDone ( ) ) ) org . eclipse . equinox . concurrent . future . SingleOperationFuture . wait ( ) ; throwIfCanceled ( ) ; return resultValue ; } | org . junit . Assert . assertNull ( sc1 . get ( ) ) |
test_bs_skip_utf32 ( ) { org . jerlang . type . Binary expected = new org . jerlang . type . Binary ( new int [ ] { 'B' } ) ; org . jerlang . type . List params = org . jerlang . type . List . of ( new org . jerlang . type . Binary ( new int [ ] { 0 , 0 , 0 , 65 , 'B' } ) ) ; org . jerlang . type . Term result = org . jerlang . erts . Erlang . apply ( org . jerlang . type . Atom . of ( "bs_skip_utf" ) , org . jerlang . type . Atom . of ( "test32" ) , params ) ; "<AssertPlaceHolder>" ; } of ( byte [ ] ) { return org . jerlang . type . Atom . of ( new java . lang . String ( bytes , ISO_8859_1 ) ) ; } | org . junit . Assert . assertEquals ( expected , result ) |
testParseValueForDatabaseWrite ( ) { java . lang . Long expected = new java . lang . Long ( 123 ) ; java . lang . Object result = parser . parseValueForDatabaseWrite ( "syn123" ) ; "<AssertPlaceHolder>" ; } parseValueForDatabaseWrite ( java . lang . String ) { if ( Boolean . TRUE . toString ( ) . equalsIgnoreCase ( value ) ) { return Boolean . TRUE ; } else if ( Boolean . FALSE . toString ( ) . equalsIgnoreCase ( value ) ) { return Boolean . FALSE ; } throw new java . lang . IllegalArgumentException ( ( "Not<sp>a<sp>boolean:<sp>" + value ) ) ; } | org . junit . Assert . assertEquals ( expected , result ) |
shouldRetrieveLoadBalancerByIdAndAccountId ( ) { org . openstack . atlas . service . domain . services . LoadBalancer dbLoadBalancer = loadBalancerService . create ( loadBalancer ) ; org . openstack . atlas . service . domain . services . LoadBalancer loadBalancer = loadBalancerService . get ( dbLoadBalancer . getId ( ) , dbLoadBalancer . getAccountId ( ) ) ; "<AssertPlaceHolder>" ; } getId ( ) { return id ; } | org . junit . Assert . assertNotNull ( loadBalancer . getId ( ) ) |
testEqualsUnequal1 ( ) { java . lang . String a = "92948782094488478231212478987482988429808779810457634781384756794987" ; int aScale = - 24 ; java . lang . String b = "7472334223847623782375469293018787918347987234564568" ; int bScale = 13 ; java . math . BigDecimal aNumber = new java . math . BigDecimal ( new java . math . BigInteger ( a ) , aScale ) ; java . math . BigDecimal bNumber = new java . math . BigDecimal ( new java . math . BigInteger ( b ) , bScale ) ; "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { if ( ! ( object instanceof org . teavm . classlib . java . text . TParsePosition ) ) { return false ; } org . teavm . classlib . java . text . TParsePosition pos = ( ( org . teavm . classlib . java . text . TParsePosition ) ( object ) ) ; return ( ( currentPosition ) == ( pos . currentPosition ) ) && ( ( errorIndex ) == ( pos . errorIndex ) ) ; } | org . junit . Assert . assertFalse ( aNumber . equals ( bNumber ) ) |
shouldGetCubeByStarredCubeId ( ) { java . lang . String cubeId = "tomcat_46fd2cc1-0084-42a8-9ffd-35f305a08dcc" ; org . arquillian . cube . spi . Cube cube = createCubeMock ( cubeId ) ; cubeRegistry . addCube ( cube ) ; org . arquillian . cube . spi . Cube < ? > resolvedCube = cubeRegistry . getCube ( "tomcat*" ) ; "<AssertPlaceHolder>" ; } getCube ( java . lang . String ) { return cubeRegistryInstance . get ( ) . getCube ( cubeId ) ; } | org . junit . Assert . assertSame ( cube , resolvedCube ) |
testPrimaryCardinality ( ) { java . lang . String expectedSql = "<sp>(SELECT<sp>P.ID,<sp>+<sp>COUNT(S.ROOT_ID)<sp>AS<sp>CARD" 2 + ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( "<sp>(SELECT<sp>P.ID,<sp>+<sp>COUNT(S.ROOT_ID)<sp>AS<sp>CARD" 4 + "<sp>JOIN" ) + "<sp>(SELECT<sp>P.ID,<sp>+<sp>COUNT(S.OWNER_ID)<sp>AS<sp>CARD" ) + "<sp>FROM<sp>SOME_TABLE<sp>AS<sp>P" ) + "<sp>(SELECT<sp>P.ID,<sp>+<sp>COUNT(S.ROOT_ID)<sp>AS<sp>CARD" 3 ) + "<sp>ON<sp>(P.ID<sp>=<sp>S.OWNER_ID)" ) + "<sp>(SELECT<sp>P.ID,<sp>+<sp>COUNT(S.ROOT_ID)<sp>AS<sp>CARD" 5 ) + "<sp>ON<sp>(P0.ID<sp>=<sp>T0.ID)" ) + "<sp>JOIN" ) + "<sp>(SELECT<sp>P.ID,<sp>+<sp>COUNT(S.ROOT_ID)<sp>AS<sp>CARD" ) + "<sp>FROM<sp>SOME_TABLE<sp>AS<sp>P" ) + "<sp>LEFT<sp>JOIN<sp>SECONDARY_TWO<sp>AS<sp>S" ) + "<sp>ON<sp>(P.ID<sp>=<sp>S.ROOT_ID)" ) + "<sp>WHERE<sp>P.ID<sp>>=<sp>:BMINID<sp>AND<sp>P.ID<sp><=<sp>:BMAXID<sp>GROUP<sp>BY<sp>P.ID)<sp>T1" ) + "<sp>(SELECT<sp>P.ID,<sp>+<sp>COUNT(S.ROOT_ID)<sp>AS<sp>CARD" 0 ) + "<sp>WHERE<sp>P0.ID<sp>>=<sp>:BMINID<sp>AND<sp>P0.ID<sp><=<sp>:BMAXID" ) + "<sp>(SELECT<sp>P.ID,<sp>+<sp>COUNT(S.ROOT_ID)<sp>AS<sp>CARD" 1 ) ; org . sagebionetworks . repo . model . dbo . TableMapping primaryMapping = mapping ; java . util . List < org . sagebionetworks . repo . model . dbo . TableMapping > secondaryMappings = com . google . common . collect . Lists . newArrayList ( secondaryOne , secondaryTwo ) ; java . lang . String sql = org . sagebionetworks . repo . model . dbo . DMLUtils . createPrimaryCardinalitySql ( primaryMapping , secondaryMappings ) ; "<AssertPlaceHolder>" ; } createPrimaryCardinalitySql ( org . sagebionetworks . repo . model . dbo . TableMapping , java . util . List ) { java . lang . StringBuilder builder = new java . lang . StringBuilder ( ) ; java . lang . String primaryBackupColumnName = org . sagebionetworks . repo . model . dbo . DMLUtils . getBackupIdColumnName ( primaryMapping ) . getColumnName ( ) ; builder . append ( "SELECT<sp>P0." ) ; builder . append ( primaryBackupColumnName ) ; builder . append ( "<sp>AND<sp>P0." 3 ) ; int index = 0 ; for ( org . sagebionetworks . repo . model . dbo . TableMapping secondary : secondaryMappings ) { builder . append ( "<sp>AND<sp>P0." 2 ) . append ( index ) . append ( "." ) ; builder . append ( "<sp>AND<sp>P0." 9 ) ; index ++ ; } builder . append ( "<sp>AS<sp>CARD" ) ; builder . append ( ")<sp>T" 0 ) . append ( primaryMapping . getTableName ( ) ) . append ( "<sp>AS<sp>P0" ) ; index = 0 ; for ( org . sagebionetworks . repo . model . dbo . TableMapping secondary : secondaryMappings ) { builder . append ( "<sp>AND<sp>P0." 6 ) ; builder . append ( org . sagebionetworks . repo . model . dbo . DMLUtils . createCardinalitySubQueryForSecondary ( primaryMapping , secondary ) ) ; builder . append ( ")<sp>T" ) . append ( index ) ; builder . append ( "<sp>ON<sp>(P0." ) . append ( primaryBackupColumnName ) ; builder . append ( "<sp>AND<sp>P0." 4 ) ; builder . append ( "T" ) . append ( index ) . append ( "." ) . append ( primaryBackupColumnName ) ; builder . append ( "<sp>AND<sp>P0." 8 ) ; index ++ ; } builder . append ( "<sp>WHERE" ) ; builder . append ( "<sp>P0." ) ; builder . append ( org . sagebionetworks . repo . model . dbo . DMLUtils . getBackupIdColumnName ( primaryMapping ) . getColumnName ( ) ) ; builder . append ( "<sp>AND<sp>P0." 0 ) . append ( org . sagebionetworks . repo . model . dbo . DMLUtils . BIND_MIN_ID ) . append ( "<sp>AND<sp>P0." ) ; builder . append ( org . sagebionetworks . repo . model . dbo . DMLUtils . getBackupIdColumnName ( primaryMapping ) . getColumnName ( ) ) ; builder . append ( "<sp>AND<sp>P0." 7 ) . append ( org . sagebionetworks . repo . model . dbo . DMLUtils . BIND_MAX_ID ) ; builder . append ( "<sp>AND<sp>P0." 5 ) . append ( primaryBackupColumnName ) . append ( "<sp>AND<sp>P0." 1 ) ; return builder . toString ( ) ; } | org . junit . Assert . assertEquals ( expectedSql , sql ) |
testSPSShouldNotLeakXattrIfSatisfyStoragePolicyCallOnECFiles ( ) { org . apache . hadoop . fs . StorageType [ ] [ ] diskTypes = new org . apache . hadoop . fs . StorageType [ ] [ ] { new org . apache . hadoop . fs . StorageType [ ] { org . apache . hadoop . fs . StorageType . SSD , org . apache . hadoop . fs . StorageType . DISK } , new org . apache . hadoop . fs . StorageType [ ] { org . apache . hadoop . fs . StorageType . SSD , org . apache . hadoop . fs . StorageType . DISK } , new org . apache . hadoop . fs . StorageType [ ] { org . apache . hadoop . fs . StorageType . SSD , org . apache . hadoop . fs . StorageType . DISK } , new org . apache . hadoop . fs . StorageType [ ] { org . apache . hadoop . fs . StorageType . SSD , org . apache . hadoop . fs . StorageType . DISK } , new org . apache . hadoop . fs . StorageType [ ] { org . apache . hadoop . fs . StorageType . SSD , org . apache . hadoop . fs . StorageType . DISK } , new org . apache . hadoop . fs . StorageType [ ] { org . apache . hadoop . fs . StorageType . DISK , org . apache . hadoop . fs . StorageType . SSD } , new org . apache . hadoop . fs . StorageType [ ] { org . apache . hadoop . fs . StorageType . DISK , org . apache . hadoop . fs . StorageType . SSD } , new org . apache . hadoop . fs . StorageType [ ] { org . apache . hadoop . fs . StorageType . DISK , org . apache . hadoop . fs . StorageType . SSD } , new org . apache . hadoop . fs . StorageType [ ] { org . apache . hadoop . fs . StorageType . DISK , org . apache . hadoop . fs . StorageType . SSD } , new org . apache . hadoop . fs . StorageType [ ] { org . apache . hadoop . fs . StorageType . DISK , org . apache . hadoop . fs . StorageType . SSD } } ; int defaultStripedBlockSize = ( org . apache . hadoop . hdfs . StripedFileTestUtil . getDefaultECPolicy ( ) . getCellSize ( ) ) * 4 ; config . setLong ( DFSConfigKeys . DFS_BLOCK_SIZE_KEY , defaultStripedBlockSize ) ; config . setLong ( DFSConfigKeys . DFS_HEARTBEAT_INTERVAL_KEY , 1L ) ; config . setLong ( DFSConfigKeys . DFS_NAMENODE_REDUNDANCY_INTERVAL_SECONDS_KEY , 1L ) ; config . setBoolean ( DFSConfigKeys . DFS_NAMENODE_REDUNDANCY_CONSIDERLOAD_KEY , false ) ; try { hdfsCluster = startCluster ( config , diskTypes , diskTypes . length , org . apache . hadoop . hdfs . server . sps . TestExternalStoragePolicySatisfier . STORAGES_PER_DATANODE , org . apache . hadoop . hdfs . server . sps . TestExternalStoragePolicySatisfier . CAPACITY ) ; dfs = hdfsCluster . getFileSystem ( ) ; dfs . enableErasureCodingPolicy ( org . apache . hadoop . hdfs . StripedFileTestUtil . getDefaultECPolicy ( ) . getName ( ) ) ; org . apache . hadoop . hdfs . protocol . ClientProtocol client = org . apache . hadoop . hdfs . NameNodeProxies . createProxy ( config , hdfsCluster . getFileSystem ( 0 ) . getUri ( ) , org . apache . hadoop . hdfs . protocol . ClientProtocol . class ) . getProxy ( ) ; java . lang . String fooDir = "/foo" ; client . mkdirs ( fooDir , new org . apache . hadoop . fs . permission . FsPermission ( ( ( short ) ( 777 ) ) ) , true ) ; client . setErasureCodingPolicy ( fooDir , org . apache . hadoop . hdfs . StripedFileTestUtil . getDefaultECPolicy ( ) . getName ( ) ) ; final java . lang . String testFile = "/foo/bar" ; long fileLen = 20 * defaultStripedBlockSize ; org . apache . hadoop . hdfs . DFSTestUtil . createFile ( dfs , new org . apache . hadoop . fs . Path ( testFile ) , fileLen , ( ( short ) ( 3 ) ) , 0 ) ; client . setStoragePolicy ( fooDir , HdfsConstants . ONESSD_STORAGE_POLICY_NAME ) ; dfs . satisfyStoragePolicy ( new org . apache . hadoop . fs . Path ( testFile ) ) ; org . apache . hadoop . hdfs . protocol . LocatedBlocks locatedBlocks = client . getBlockLocations ( testFile , 0 , fileLen ) ; for ( org . apache . hadoop . hdfs . protocol . LocatedBlock lb : locatedBlocks . getLocatedBlocks ( ) ) { for ( org . apache . hadoop . fs . StorageType type : lb . getStorageTypes ( ) ) { "<AssertPlaceHolder>" ; } } org . apache . hadoop . hdfs . DFSTestUtil . waitForXattrRemoved ( testFile , org . apache . hadoop . hdfs . server . sps . XATTR_SATISFY_STORAGE_POLICY , hdfsCluster . getNamesystem ( ) , 30000 ) ; } finally { shutdownCluster ( ) ; } } getStorageTypes ( ) { java . util . EnumSet < org . apache . hadoop . fs . StorageType > storageTypes = java . util . EnumSet . noneOf ( org . apache . hadoop . fs . StorageType . class ) ; for ( org . apache . hadoop . hdfs . server . blockmanagement . DatanodeStorageInfo dsi : getStorageInfos ( ) ) { storageTypes . add ( dsi . getStorageType ( ) ) ; } return storageTypes ; } | org . junit . Assert . assertEquals ( StorageType . DISK , type ) |
testIsFieldBindingValidWhenBindableNewColumnWithExistingBindingsNoClash ( ) { doReturn ( true ) . when ( plugin ) . isBindable ( ) ; doReturn ( true ) . when ( plugin ) . isNewColumn ( ) ; doReturn ( "$n" ) . when ( plugin ) . getBinding ( ) ; doReturn ( java . util . Collections . singletonList ( mockFactPattern ( "$a" ) ) ) . when ( model ) . getConditions ( ) ; "<AssertPlaceHolder>" ; } isFieldBindingValid ( ) { if ( ! ( isBindable ( ) ) ) { return true ; } final java . lang . String binding = getBinding ( ) ; if ( ( binding == null ) || ( binding . isEmpty ( ) ) ) { return true ; } if ( ! ( isNewColumn ( ) ) ) { if ( binding . equals ( originalCondition ( ) . getBinding ( ) ) ) { return true ; } } return ! ( makeBRLRuleModel ( ) . isVariableNameUsed ( binding ) ) ; } | org . junit . Assert . assertTrue ( plugin . isFieldBindingValid ( ) ) |
canSpecifyMaxSamples ( ) { impl = new org . fishwife . jrugged . SampledQuantile ( 10 ) ; for ( int i = 0 ; i < 20 ; i ++ ) impl . addSample ( 0 ) ; "<AssertPlaceHolder>" ; } getNumSamples ( ) { return samples . size ( ) ; } | org . junit . Assert . assertEquals ( 10 , impl . getNumSamples ( ) ) |
testIsMatching_allFalse ( ) { org . deegree . gml . reference . matcher . MultipleReferencePatternMatcher matcher = new org . deegree . gml . reference . matcher . MultipleReferencePatternMatcher ( ) ; matcher . addMatcherToApply ( mockMatcher ( false ) ) ; matcher . addMatcherToApply ( mockMatcher ( false ) ) ; "<AssertPlaceHolder>" ; } isMatching ( java . lang . String ) { for ( org . deegree . gml . reference . matcher . ReferencePatternMatcher matcherToApply : matchersToApply ) { if ( matcherToApply . isMatching ( url ) ) return true ; } return false ; } | org . junit . Assert . assertThat ( matcher . isMatching ( "test" ) , org . hamcrest . CoreMatchers . is ( false ) ) |
mustNotReturnAnyFaultyServices ( ) { byte [ ] buffer = com . offbynull . portmapper . mappers . upnpigd . externalmessages . RootUpnpIgdResponseTest . FAULTY_BUFFER . getBytes ( "US-ASCII" ) ; com . offbynull . portmapper . mappers . upnpigd . externalmessages . RootUpnpIgdResponse resp = new com . offbynull . portmapper . mappers . upnpigd . externalmessages . RootUpnpIgdResponse ( new java . net . URL ( "http://fake:80/IGD.xml" ) , buffer ) ; java . util . List < com . offbynull . portmapper . mappers . upnpigd . externalmessages . RootUpnpIgdResponse . ServiceReference > services = resp . getServices ( ) ; "<AssertPlaceHolder>" ; } getServices ( ) { return services ; } | org . junit . Assert . assertEquals ( 0 , services . size ( ) ) |
executeExpressionValue ( ) { org . springframework . integration . xquery . support . XQueryParameter param = new org . springframework . integration . xquery . support . XQueryParameter ( "paramName" , "headers['numbers'].?[#this<sp>><sp>5]" ) ; java . util . List < java . lang . Integer > numbers = java . util . Arrays . asList ( 1 , 3 , 4 , 2 , 5 , 6 , 7 ) ; org . springframework . messaging . Message < java . lang . String > message = org . springframework . integration . support . MessageBuilder . withPayload ( "" ) . setHeader ( "numbers" , numbers ) . build ( ) ; @ org . springframework . integration . xquery . support . SuppressWarnings ( "unchecked" ) java . util . Collection < java . lang . Integer > coll = ( ( java . util . Collection < java . lang . Integer > ) ( param . evaluate ( message ) ) ) ; "<AssertPlaceHolder>" ; } evaluate ( org . springframework . messaging . Message ) { if ( isValueSet ) { return parameterValue ; } else { if ( ( messageProcessor ) != null ) { return messageProcessor . processMessage ( message ) ; } else { return null ; } } } | org . junit . Assert . assertEquals ( 2 , coll . size ( ) ) |
testSkipFileInputStreamOffset ( ) { java . io . File f = java . io . File . createTempFile ( "xxx" , "xxx" ) ; f . deleteOnExit ( ) ; java . io . FileOutputStream fout = new java . io . FileOutputStream ( f ) ; fout . write ( "a" . getBytes ( ) ) ; fout . close ( ) ; java . io . FileInputStream fin = new java . io . FileInputStream ( f ) ; try { fin . skip ( 1000 ) ; } catch ( java . lang . Exception e ) { org . junit . Assert . fail ( "can't<sp>skip<sp>more<sp>than<sp>length" ) ; } "<AssertPlaceHolder>" ; } close ( ) { try { this . appContext . close ( ) ; } catch ( javax . naming . NamingException e ) { try { this . kernelContext . close ( ) ; } catch ( javax . naming . NamingException e1 ) { throw e ; } } } | org . junit . Assert . assertEquals ( ( - 1 ) , fin . read ( ) ) |
testDelete ( ) { javax . naming . ldap . LdapContext ctxRoot = getContext ( "uid=admin,ou=system" , getService ( ) , org . apache . directory . server . core . jndi . MixedCaseIT . SUFFIX_DN ) ; java . lang . String dn = "ou=Test" ; javax . naming . directory . Attributes attributes = org . apache . directory . api . ldap . model . ldif . LdifUtils . createJndiAttributes ( "objectClass:<sp>top" , "objectClass:<sp>organizationalUnit" , "ou:<sp>Test" ) ; javax . naming . directory . DirContext ctx = ctxRoot . createSubcontext ( dn , attributes ) ; "<AssertPlaceHolder>" ; ctxRoot . destroySubcontext ( dn ) ; javax . naming . directory . SearchControls sc = new javax . naming . directory . SearchControls ( ) ; sc . setSearchScope ( SearchControls . OBJECT_SCOPE ) ; try { ctxRoot . search ( dn , "(objectClass=*)" , sc ) ; org . junit . Assert . fail ( "Search<sp>should<sp>throw<sp>exception." ) ; } catch ( javax . naming . NamingException e ) { } } createSubcontext ( javax . naming . Name , javax . naming . directory . Attributes ) { if ( null == attrs ) { return ( ( javax . naming . directory . DirContext ) ( super . createSubcontext ( name ) ) ) ; } org . apache . directory . api . ldap . model . name . Dn target = buildTarget ( org . apache . directory . api . ldap . util . JndiUtils . fromName ( name ) ) ; attrs = org . apache . directory . api . ldap . model . entry . AttributeUtils . toCaseInsensitive ( attrs ) ; javax . naming . directory . Attributes attributes = ( ( javax . naming . directory . Attributes ) ( attrs . clone ( ) ) ) ; try { org . apache . directory . api . ldap . model . entry . Entry serverEntry = org . apache . directory . server . core . api . entry . ServerEntryUtils . toServerEntry ( attributes , target , getDirectoryService ( ) . getSchemaManager ( ) ) ; doAddOperation ( target , serverEntry ) ; } catch ( java . lang . Exception e ) { org . apache . directory . api . ldap . util . JndiUtils . wrap ( e ) ; } org . apache . directory . server . core . jndi . ServerLdapContext ctx = null ; try { ctx = new org . apache . directory . server . core . jndi . ServerLdapContext ( getService ( ) , getSession ( ) . getEffectivePrincipal ( ) , org . apache . directory . api . ldap . util . JndiUtils . toName ( target ) ) ; } catch ( java . lang . Exception e ) { org . apache . directory . api . ldap . util . JndiUtils . wrap ( e ) ; } return ctx ; } | org . junit . Assert . assertNotNull ( ctx ) |
test ( ) { org . geotools . renderer . lite . StreamingRenderer renderer = new org . geotools . renderer . lite . StreamingRenderer ( ) ; org . geotools . map . MapContent map = new org . geotools . map . MapContent ( ) ; java . net . URL raster = getClass ( ) . getResource ( ( ( rasterBase ) + ".png" ) ) ; org . geotools . coverage . grid . GridCoverage2D gc = readGeoReferencedImageFile ( new java . io . File ( raster . toURI ( ) ) ) ; map . addLayer ( loadGeoReferencedImageFile ( gc , "test" ) ) ; renderer . setMapContent ( map ) ; java . awt . image . BufferedImage image = new java . awt . image . BufferedImage ( 400 , 300 , java . awt . image . BufferedImage . TYPE_INT_ARGB ) ; org . locationtech . jts . geom . Envelope env = new org . locationtech . jts . geom . Envelope ( 1880352 , 5825436 , 1884352 , 5828436 ) ; org . geotools . geometry . jts . ReferencedEnvelope refenv = new org . geotools . geometry . jts . ReferencedEnvelope ( env , gc . getCoordinateReferenceSystem ( ) ) ; java . util . concurrent . atomic . AtomicReference < java . lang . Exception > error = new java . util . concurrent . atomic . AtomicReference ( ) ; renderer . addRenderListener ( new org . geotools . renderer . RenderListener ( ) { @ org . geotools . renderer . lite . gridcoverage2d . Override public void featureRenderer ( org . opengis . feature . simple . SimpleFeature feature ) { } @ org . geotools . renderer . lite . gridcoverage2d . Override public void errorOccurred ( java . lang . Exception e ) { error . set ( e ) ; } } ) ; renderer . paint ( image . createGraphics ( ) , new java . awt . Rectangle ( 400 , 300 ) , refenv ) ; map . dispose ( ) ; "<AssertPlaceHolder>" ; } get ( ) { return synchronizer . innerGet ( ) ; } | org . junit . Assert . assertNull ( error . get ( ) ) |
testRDFConfigurationWriter ( ) { org . aksw . limes . core . io . config . Configuration conf = new org . aksw . limes . core . io . config . Configuration ( ) ; conf . addPrefix ( "geom" , "/resources/RDFWriterTestConfig.ttl" 4 ) ; conf . addPrefix ( "geos" , "/resources/RDFWriterTestConfig.ttl" 8 ) ; conf . addPrefix ( "/resources/RDFWriterTestConfig.ttl" 9 , "http://linkedgeodata.org/ontology/" ) ; org . aksw . limes . core . io . config . KBInfo src = new org . aksw . limes . core . io . config . KBInfo ( ) ; src . setId ( "/resources/RDFWriterTestConfig.ttl" 1 ) ; src . setEndpoint ( "/resources/RDFWriterTestConfig.ttl" 3 ) ; src . setVar ( "?x" ) ; src . setPageSize ( 2000 ) ; src . setRestrictions ( new java . util . ArrayList < java . lang . String > ( java . util . Arrays . asList ( new java . lang . String [ ] { "?x<sp>a<sp>lgdo:RelayBox" } ) ) ) ; src . setProperties ( java . util . Arrays . asList ( new java . lang . String [ ] { "/resources/RDFWriterTestConfig.ttl" 2 } ) ) ; conf . setSourceInfo ( src ) ; org . aksw . limes . core . io . config . KBInfo target = new org . aksw . limes . core . io . config . KBInfo ( ) ; target . setId ( "/resources/RDFWriterTestConfig.ttl" 1 ) ; target . setEndpoint ( "/resources/RDFWriterTestConfig.ttl" 3 ) ; target . setVar ( "/resources/RDFWriterTestConfig.ttl" 5 ) ; target . setPageSize ( 2000 ) ; target . setRestrictions ( new java . util . ArrayList < java . lang . String > ( java . util . Arrays . asList ( new java . lang . String [ ] { "?x<sp>a<sp>lgdo:RelayBox" } ) ) ) ; target . setProperties ( java . util . Arrays . asList ( new java . lang . String [ ] { "/resources/RDFWriterTestConfig.ttl" 2 } ) ) ; conf . setTargetInfo ( target ) ; conf . setMetricExpression ( "geo_hausdorff(x.polygon,<sp>y.polygon)" ) ; conf . setAcceptanceFile ( "lgd_relaybox_verynear.nt" ) ; conf . setAcceptanceThreshold ( 0.9 ) ; conf . setAcceptanceRelation ( "/resources/RDFWriterTestConfig.ttl" 0 ) ; conf . setVerificationFile ( "lgd_relaybox_near.nt" ) ; conf . setVerificationThreshold ( 0.5 ) ; conf . setVerificationRelation ( "/resources/RDFWriterTestConfig.ttl" 0 ) ; conf . setExecutionEngine ( "default" ) ; conf . setExecutionPlanner ( "default" ) ; conf . setExecutionRewriter ( "default" ) ; conf . setOutputFormat ( "/resources/RDFWriterTestConfig.ttl" 6 ) ; java . lang . String filePath = ( org . aksw . limes . core . io . config . writer . RDFConfigurationWriterTest . SYSTEM_DIR ) + "/resources/RDFWriterTestConfig.ttl" ; org . aksw . limes . core . io . config . writer . RDFConfigurationWriter writer = new org . aksw . limes . core . io . config . writer . RDFConfigurationWriter ( ) ; try { writer . write ( conf , filePath ) ; } catch ( java . io . IOException e ) { e . printStackTrace ( ) ; } java . io . File file = new java . io . File ( filePath ) ; "<AssertPlaceHolder>" ; file . delete ( ) ; } write ( org . aksw . limes . core . io . mapping . AMapping , java . lang . String ) { java . lang . String format = outputFile . substring ( ( ( outputFile . indexOf ( "n3" 1 ) ) + 1 ) ) . trim ( ) . toLowerCase ( ) ; switch ( format ) { case "n3" : case "nt" : write ( mapping , outputFile , "N-TRIPLE" ) ; break ; case "n3" 0 : write ( mapping , outputFile , "TTL" ) ; break ; case "rdf" : write ( mapping , outputFile , "RDF/XML" ) ; break ; case "jsonld" : write ( mapping , outputFile , "JSON-LD" ) ; break ; default : org . aksw . limes . core . io . mapping . writer . RDFMappingWriter . logger . error ( ( ( "Serialization<sp>" + format ) + "<sp>is<sp>not<sp>yet<sp>implemented,<sp>exit<sp>with<sp>error!" ) ) ; throw new java . lang . RuntimeException ( ) ; } } | org . junit . Assert . assertTrue ( file . exists ( ) ) |
joinCpuLoadBo2List ( ) { java . util . List < com . navercorp . pinpoint . common . server . bo . stat . join . JoinCpuLoadBo > joinCpuLoadBoList = new java . util . ArrayList < com . navercorp . pinpoint . common . server . bo . stat . join . JoinCpuLoadBo > ( ) ; com . navercorp . pinpoint . common . server . bo . stat . join . JoinCpuLoadBo joinCpuLoadBo = com . navercorp . pinpoint . common . server . bo . stat . join . JoinCpuLoadBo . joinCpuLoadBoList ( joinCpuLoadBoList , 1496988667231L ) ; "<AssertPlaceHolder>" ; } joinCpuLoadBoList ( java . util . List , java . lang . Long ) { int boCount = joinCpuLoadBoList . size ( ) ; if ( joinCpuLoadBoList . isEmpty ( ) ) { return com . navercorp . pinpoint . common . server . bo . stat . join . JoinCpuLoadBo . EMPTY_JOIN_CPU_LOAD_BO ; } com . navercorp . pinpoint . common . server . bo . stat . join . JoinCpuLoadBo newJoinCpuLoadBo = new com . navercorp . pinpoint . common . server . bo . stat . join . JoinCpuLoadBo ( ) ; com . navercorp . pinpoint . common . server . bo . stat . join . JoinCpuLoadBo initJoinCpuLoadBo = joinCpuLoadBoList . get ( 0 ) ; newJoinCpuLoadBo . setId ( initJoinCpuLoadBo . getId ( ) ) ; newJoinCpuLoadBo . setTimestamp ( timestamp ) ; double sumJvmCpuLoad = 0.0 ; java . lang . String maxJvmCpuAgentId = initJoinCpuLoadBo . getMaxJvmCpuAgentId ( ) ; double maxJvmCpuLoad = initJoinCpuLoadBo . getMaxJvmCpuLoad ( ) ; java . lang . String minJvmCpuAgentId = initJoinCpuLoadBo . getMinJvmCpuAgentId ( ) ; double minJvmCpuLoad = initJoinCpuLoadBo . getMinJvmCpuLoad ( ) ; double sumSystemCpuLoad = 0.0 ; java . lang . String maxSysCpuAgentId = initJoinCpuLoadBo . getMaxSysCpuAgentId ( ) ; double maxSystemCpuLoad = initJoinCpuLoadBo . getMaxSystemCpuLoad ( ) ; java . lang . String minSysCpuAgentId = initJoinCpuLoadBo . getMinSysCpuAgentId ( ) ; double minSystemCpuLoad = initJoinCpuLoadBo . getMinSystemCpuLoad ( ) ; for ( com . navercorp . pinpoint . common . server . bo . stat . join . JoinCpuLoadBo joinCpuLoadBo : joinCpuLoadBoList ) { sumJvmCpuLoad += joinCpuLoadBo . getJvmCpuLoad ( ) ; if ( ( joinCpuLoadBo . getMaxJvmCpuLoad ( ) ) > maxJvmCpuLoad ) { maxJvmCpuLoad = joinCpuLoadBo . getMaxJvmCpuLoad ( ) ; maxJvmCpuAgentId = joinCpuLoadBo . getMaxJvmCpuAgentId ( ) ; } if ( ( joinCpuLoadBo . getMinJvmCpuLoad ( ) ) < minJvmCpuLoad ) { minJvmCpuLoad = joinCpuLoadBo . getMinJvmCpuLoad ( ) ; minJvmCpuAgentId = joinCpuLoadBo . getMinJvmCpuAgentId ( ) ; } sumSystemCpuLoad += joinCpuLoadBo . getSystemCpuLoad ( ) ; if ( ( joinCpuLoadBo . getMaxSystemCpuLoad ( ) ) > maxSystemCpuLoad ) { maxSystemCpuLoad = joinCpuLoadBo . getMaxSystemCpuLoad ( ) ; maxSysCpuAgentId = joinCpuLoadBo . getMaxSysCpuAgentId ( ) ; } if ( ( joinCpuLoadBo . getMinSystemCpuLoad ( ) ) < minSystemCpuLoad ) { minSystemCpuLoad = joinCpuLoadBo . getMinSystemCpuLoad ( ) ; minSysCpuAgentId = joinCpuLoadBo . getMinSysCpuAgentId ( ) ; } } newJoinCpuLoadBo . setJvmCpuLoad ( ( sumJvmCpuLoad / ( ( double ) ( boCount ) ) ) ) ; newJoinCpuLoadBo . setMaxJvmCpuLoad ( maxJvmCpuLoad ) ; newJoinCpuLoadBo . setMaxJvmCpuAgentId ( maxJvmCpuAgentId ) ; newJoinCpuLoadBo . setMinJvmCpuLoad ( minJvmCpuLoad ) ; newJoinCpuLoadBo . setMinJvmCpuAgentId ( minJvmCpuAgentId ) ; newJoinCpuLoadBo . setSystemCpuLoad ( ( sumSystemCpuLoad / ( ( double ) ( boCount ) ) ) ) ; newJoinCpuLoadBo . setMinSystemCpuLoad ( minSystemCpuLoad ) ; newJoinCpuLoadBo . setMinSysCpuAgentId ( minSysCpuAgentId ) ; newJoinCpuLoadBo . setMaxSystemCpuLoad ( maxSystemCpuLoad ) ; newJoinCpuLoadBo . setMaxSysCpuAgentId ( maxSysCpuAgentId ) ; return newJoinCpuLoadBo ; } | org . junit . Assert . assertEquals ( joinCpuLoadBo , JoinCpuLoadBo . EMPTY_JOIN_CPU_LOAD_BO ) |
testSubscriptionReq ( ) { final org . eclipse . hono . adapter . mqtt . CommandSubscription subscription = org . eclipse . hono . adapter . mqtt . CommandSubscription . fromTopic ( "c/tenant/device/qx/#" , null ) ; "<AssertPlaceHolder>" ; } fromTopic ( java . lang . String , org . eclipse . hono . auth . Device ) { try { return new org . eclipse . hono . adapter . mqtt . CommandSubscription ( topic , authenticatedDevice ) ; } catch ( final java . lang . IllegalArgumentException e ) { org . eclipse . hono . adapter . mqtt . CommandSubscription . LOG . debug ( e . getMessage ( ) ) ; return null ; } } | org . junit . Assert . assertNull ( subscription ) |
testMaterializeAppEngineFlexFiles_objectifyListenerWithObjectify6 ( ) { com . google . cloud . tools . eclipse . appengine . newproject . AppEngineProjectConfig config = new com . google . cloud . tools . eclipse . appengine . newproject . AppEngineProjectConfig ( ) ; config . setAppEngineLibraries ( java . util . Collections . singleton ( new com . google . cloud . tools . eclipse . appengine . libraries . model . Library ( "objectify6" ) ) ) ; com . google . cloud . tools . eclipse . appengine . newproject . CodeTemplates . materializeAppEngineFlexFiles ( project , config , monitor ) ; "<AssertPlaceHolder>" ; } objectifyListenerClassExists ( ) { return project . getFile ( "src/main/java/ObjectifyWebListener.java" ) . exists ( ) ; } | org . junit . Assert . assertTrue ( objectifyListenerClassExists ( ) ) |
testEmptyConfigurationsSource ( ) { configureActionProvider ( ) ; actionProvider . setConfigurationAdmin ( configurationAdmin ) ; org . osgi . service . cm . Configuration [ ] configurations = new org . osgi . service . cm . Configuration [ 0 ] ; when ( source . getId ( ) ) . thenReturn ( org . codice . ddf . registry . report . action . provider . RegistryReportActionProviderTest . SAMPLE_SOURCE_ID ) ; when ( configurationAdmin . listConfigurations ( java . lang . String . format ( "(id=%s)" , org . codice . ddf . registry . report . action . provider . RegistryReportActionProviderTest . SAMPLE_SOURCE_ID ) ) ) . thenReturn ( configurations ) ; java . util . List < ddf . action . Action > actions = actionProvider . getActions ( source ) ; "<AssertPlaceHolder>" ; } size ( ) { return map . size ( ) ; } | org . junit . Assert . assertThat ( actions . size ( ) , org . hamcrest . Matchers . is ( 0 ) ) |
testReadFinish ( ) { reader = new com . google . cloud . storage . BlobReadChannel ( options , com . google . cloud . storage . BlobReadChannelTest . BLOB_ID , com . google . cloud . storage . BlobReadChannelTest . EMPTY_RPC_OPTIONS ) ; byte [ ] result = new byte [ ] { } ; java . nio . ByteBuffer readBuffer = java . nio . ByteBuffer . allocate ( com . google . cloud . storage . BlobReadChannelTest . DEFAULT_CHUNK_SIZE ) ; expect ( storageRpcMock . read ( com . google . cloud . storage . BlobReadChannelTest . BLOB_ID . toPb ( ) , com . google . cloud . storage . BlobReadChannelTest . EMPTY_RPC_OPTIONS , 0 , com . google . cloud . storage . BlobReadChannelTest . DEFAULT_CHUNK_SIZE ) ) . andReturn ( com . google . cloud . Tuple . of ( "etag" , result ) ) ; replay ( storageRpcMock ) ; "<AssertPlaceHolder>" ; } read ( com . google . spanner . v1 . ReadRequest ) { return readCallable ( ) . call ( request ) ; } | org . junit . Assert . assertEquals ( ( - 1 ) , reader . read ( readBuffer ) ) |
testScrollingOnRightClick ( ) { openTestURL ( ) ; org . openqa . selenium . WebElement tree = getDriver ( ) . findElement ( org . openqa . selenium . By . id ( TreeScrollingOnRightClick . TREE_ID ) ) ; tree . click ( ) ; for ( int down = 0 ; down < 50 ; down ++ ) { tree . sendKeys ( Keys . ARROW_DOWN ) ; } java . lang . Thread . sleep ( 1000 ) ; org . openqa . selenium . Point item40Location = getTreeNode ( "Node<sp>40" ) . getLocation ( ) ; org . openqa . selenium . WebElement item45 = getTreeNode ( "Node<sp>45" ) ; new org . openqa . selenium . interactions . Actions ( getDriver ( ) ) . moveToElement ( item45 ) . contextClick ( item45 ) . perform ( ) ; org . openqa . selenium . Point item40Location2 = getTreeNode ( "Node<sp>40" ) . getLocation ( ) ; "<AssertPlaceHolder>" ; } getY ( ) { return y ; } | org . junit . Assert . assertEquals ( item40Location . getY ( ) , item40Location2 . getY ( ) ) |
sendAStringMessageAndAssertThatListenerIsInvoked ( ) { this . wireTap . preSend ( new org . springframework . integration . message . GenericMessage < org . opencredo . esper . sample . SampleEvent > ( new org . opencredo . esper . sample . SampleEvent ( ) ) , null ) ; "<AssertPlaceHolder>" ; } getNumberOfTimesInvoked ( ) { return numberOfTimesInvoked ; } | org . junit . Assert . assertEquals ( 1 , listener . getNumberOfTimesInvoked ( ) ) |
writeOptional ( ) { org . javacs . lsp . LSP . respond ( writer , 1 , java . util . Optional . of ( 1 ) ) ; org . javacs . lsp . var expected = "Content-Length:<sp>35\r\n\r\n{\"jsonrpc\":\"2.0\",\"id\":1,\"result\":1}" ; "<AssertPlaceHolder>" ; } bufferToString ( ) { try { org . javacs . lsp . var available = buffer . available ( ) ; org . javacs . lsp . var bytes = new byte [ available ] ; org . javacs . lsp . var read = buffer . read ( bytes ) ; assert read == available ; return new java . lang . String ( bytes , java . nio . charset . Charset . forName ( "UTF-8" ) ) ; } catch ( java . io . IOException e ) { throw new java . lang . RuntimeException ( e ) ; } } | org . junit . Assert . assertThat ( bufferToString ( ) , equalTo ( expected ) ) |
threadPoolRejectionAvailableInGetFallbackUsingExecute ( ) { setUpTenacityCommand ( 1 , 100 ) ; sleepCommand ( 80 ) . observe ( ) ; final int numberOfRejectCommands = 5 ; final java . util . concurrent . ExecutorService executorService = java . util . concurrent . Executors . newFixedThreadPool ( numberOfRejectCommands ) ; final java . util . List < java . util . concurrent . Callable < java . lang . Boolean > > rejectCommands = com . google . common . collect . Lists . newArrayListWithExpectedSize ( numberOfRejectCommands ) ; for ( int i = 0 ; i < numberOfRejectCommands ; i ++ ) { rejectCommands . add ( ( ) -> threadPoolRejectionCommand ( ) . execute ( ) ) ; } final java . util . Collection < java . util . concurrent . Future < java . lang . Boolean > > results = executorService . invokeAll ( rejectCommands ) ; boolean rejectionFound = false ; for ( final java . util . concurrent . Future < java . lang . Boolean > future : results ) { if ( future . get ( ) ) { rejectionFound = true ; } } executorService . shutdownNow ( ) ; "<AssertPlaceHolder>" ; } execute ( ) { return build ( ) . execute ( ) ; } | org . junit . Assert . assertTrue ( rejectionFound ) |
test_that_adapting_page_with_wrong_resourceType_returns_null ( ) { com . day . cq . wcm . api . Page wrongPage = mock ( com . day . cq . wcm . api . Page . class ) ; org . apache . sling . api . resource . Resource wrongContentResource = mock ( org . apache . sling . api . resource . Resource . class ) ; when ( wrongPage . getContentResource ( ) ) . thenReturn ( wrongContentResource ) ; when ( wrongContentResource . isResourceType ( GenericListImpl . RT_GENERIC_LIST ) ) . thenReturn ( false ) ; com . adobe . acs . commons . genericlists . GenericList section = adaptToGenericList ( wrongPage ) ; "<AssertPlaceHolder>" ; } adaptToGenericList ( com . day . cq . wcm . api . Page ) { return adapterFactory . getAdapter ( page , com . adobe . acs . commons . genericlists . GenericList . class ) ; } | org . junit . Assert . assertNull ( section ) |
testResourcesCleanup3 ( ) { final int nbThreads = 5 ; final int nbAppend = 10 ; final java . lang . String baseDir = getTestPath ( ) ; System . out . println ( ( "BaseDir<sp>:<sp>" + baseDir ) ) ; System . out . println ( ( "PID<sp>:<sp>" + ( getPID ( ) ) ) ) ; final net . openhft . chronicle . VanillaChronicle chronicle = new net . openhft . chronicle . VanillaChronicle ( baseDir , VanillaChronicleConfig . DEFAULT . clone ( ) . entriesPerCycle ( ( 1L << 20 ) ) . cycleLength ( 1000 , false ) . cycleFormat ( "yyyyMMddHHmmss" ) . indexBlockSize ( 64 ) . dataBlockSize ( 64 ) . dataCacheCapacity ( ( nbThreads + 1 ) ) . indexCacheCapacity ( 2 ) ) ; chronicle . clear ( ) ; try { final java . util . concurrent . ExecutorService es = java . util . concurrent . Executors . newFixedThreadPool ( nbThreads ) ; final java . util . concurrent . CountDownLatch latch = new java . util . concurrent . CountDownLatch ( nbThreads ) ; for ( int i = 0 ; i < nbThreads ; i ++ ) { es . execute ( new java . lang . Runnable ( ) { @ net . openhft . chronicle . Override public void run ( ) { try { final net . openhft . chronicle . ExcerptAppender appender = chronicle . createAppender ( ) ; for ( int counter = 0 ; counter < nbAppend ; counter ++ ) { appender . startExcerpt ( 4 ) ; appender . writeInt ( counter ) ; appender . finish ( ) ; sleep ( 2000 ) ; } appender . close ( ) ; latch . countDown ( ) ; } catch ( java . lang . Exception e ) { e . printStackTrace ( ) ; } } } ) ; } es . shutdown ( ) ; latch . await ( ) ; chronicle . checkCounts ( 1 , 1 ) ; } catch ( java . lang . Exception e ) { e . printStackTrace ( ) ; } finally { chronicle . close ( ) ; chronicle . clear ( ) ; "<AssertPlaceHolder>" ; } } clear ( ) { } | org . junit . Assert . assertFalse ( new java . io . File ( baseDir ) . exists ( ) ) |
append_validNameValidValue_valueShouldBeAddedToTheMap ( ) { org . geotools . feature . ComplexFeatureBuilder builder = new org . geotools . feature . ComplexFeatureBuilder ( FakeTypes . Mine . MINETYPE_TYPE ) ; org . opengis . feature . ComplexAttribute mineNameProperty = org . geotools . feature . ComplexFeatureBuilderTest . getAMineNameProperty ( "mine<sp>1" , true ) ; builder . append ( FakeTypes . Mine . NAME_mineName , mineNameProperty ) ; java . lang . Object actualValue = builder . values . get ( FakeTypes . Mine . NAME_mineName ) . get ( 0 ) ; "<AssertPlaceHolder>" ; } get ( java . net . URL ) { if ( url . getQuery ( ) . toLowerCase ( ) . contains ( "capabilities" ) ) { java . net . URL caps = org . geotools . test . TestData . getResource ( org . geotools . ows . wms . test . WMSParserTest . class , capsResource ) ; return new org . geotools . ows . MockHttpResponse ( caps , "text/xml" ) ; } else { throw new java . lang . IllegalArgumentException ( ( "Don't<sp>know<sp>how<sp>to<sp>handle<sp>a<sp>get<sp>request<sp>over<sp>" + ( url . toExternalForm ( ) ) ) ) ; } } | org . junit . Assert . assertSame ( mineNameProperty , actualValue ) |
testSetErrors ( ) { final java . util . List < fr . gouv . vitam . common . error . VitamError > errorList = new java . util . ArrayList ( ) ; errorList . add ( fr . gouv . vitam . common . error . VitamErrorTest . vitamError ) ; fr . gouv . vitam . common . error . VitamErrorTest . vitamError . addAllErrors ( errorList ) ; "<AssertPlaceHolder>" ; } getErrors ( ) { return errors ; } | org . junit . Assert . assertTrue ( fr . gouv . vitam . common . error . VitamErrorTest . vitamError . getErrors ( ) . contains ( fr . gouv . vitam . common . error . VitamErrorTest . vitamError ) ) |
testModelEmpty ( ) { final org . prop4j . explain . solvers . SatSolver solver = getInstance ( ) ; final java . util . Map < java . lang . Object , java . lang . Boolean > expected = java . util . Collections . emptyMap ( ) ; final java . util . Map < java . lang . Object , java . lang . Boolean > actual = solver . getModel ( ) ; "<AssertPlaceHolder>" ; } getModel ( ) { if ( ! ( isSatisfiable ( ) ) ) { throw new java . lang . IllegalStateException ( "Problem<sp>is<sp>unsatisfiable" ) ; } final int [ ] indexes = getOracle ( ) . model ( ) ; final java . util . Map < java . lang . Object , java . lang . Boolean > model = new java . util . LinkedHashMap ( ) ; for ( final int index : indexes ) { final org . prop4j . Literal l = getLiteralFromIndex ( index ) ; model . put ( l . var , l . positive ) ; } return model ; } | org . junit . Assert . assertEquals ( expected , actual ) |
testDatumEindeIsNietGevuld ( ) { final java . util . List < nl . bzk . brp . model . basis . BerichtEntiteit > berichtEntiteits = new nl . bzk . brp . bijhouding . business . regels . impl . bijhouding . huwelijkgeregistreerdpartnerschap . acties . registratieeindehuwelijkpartnerschap . BRBY0443 ( ) . voerRegelUit ( maakHuidigeSituatie ( null ) , nieuweSituatie , null , null ) ; "<AssertPlaceHolder>" ; } isEmpty ( ) { return elementen . isEmpty ( ) ; } | org . junit . Assert . assertTrue ( berichtEntiteits . isEmpty ( ) ) |
getRuntimeInfo ( ) { org . talend . daikon . runtime . RuntimeInfo runtimeInfo = definition . getRuntimeInfo ( ExecutionEngine . BEAM , null , ConnectorTopology . INCOMING ) ; "<AssertPlaceHolder>" ; } getRuntimeInfo ( org . talend . components . api . component . runtime . ExecutionEngine , org . talend . components . api . properties . ComponentProperties , org . talend . components . api . component . ConnectorTopology ) { assertEngineCompatibility ( engine ) ; assertConnectorTopologyCompatibility ( connectorTopology ) ; try { return new org . talend . components . api . component . runtime . JarRuntimeInfo ( new java . net . URL ( "mvn:org.talend.components/processing-runtime" ) , org . talend . components . api . component . runtime . DependenciesReader . computeDependenciesFilePath ( "org.talend.components" , "components-runtime" ) , "org.talend.components.processing.runtime.limit.LimitRuntime" ) ; } catch ( java . net . MalformedURLException e ) { throw new org . talend . components . api . exception . ComponentException ( e ) ; } } | org . junit . Assert . assertNotNull ( runtimeInfo ) |
getServletURI_simpleServlet ( ) { final java . lang . String uri = "/servlet" ; mock . checking ( new org . jmock . Expectations ( ) { { allowing ( req ) . getServletPath ( ) ; will ( returnValue ( uri ) ) ; allowing ( req ) . getPathInfo ( ) ; will ( returnValue ( null ) ) ; } } ) ; "<AssertPlaceHolder>" ; } getServletURI ( javax . servlet . http . HttpServletRequest ) { java . lang . String uriName = req . getServletPath ( ) ; java . lang . String pathInfo = req . getPathInfo ( ) ; if ( pathInfo != null ) uriName = uriName . concat ( pathInfo ) ; if ( ( uriName == null ) || ( ( uriName . length ( ) ) == 0 ) ) uriName = "/" ; uriName = com . ibm . ws . util . WSUtil . resolveURI ( uriName ) ; int sindex ; if ( ( sindex = uriName . indexOf ( ";" ) ) != ( - 1 ) ) { uriName = uriName . substring ( 0 , sindex ) ; } if ( ( uriName . indexOf ( ":" ) ) >= 0 ) { uriName = uriName . replaceAll ( ":" , "%3A" ) ; } return uriName ; } | org . junit . Assert . assertEquals ( uri , handler . getServletURI ( req ) ) |
testFilesOnlyDoesntCauseRecreate ( ) { buildRule . executeTarget ( "testFilesOnlyDoesntCauseRecreateSetup" ) ; java . io . File testFile = new java . io . File ( buildRule . getOutputDir ( ) , "test3.zip" ) ; org . junit . Assume . assumeTrue ( "Could<sp>not<sp>change<sp>file<sp>modification<sp>time" , testFile . setLastModified ( ( ( testFile . lastModified ( ) ) - ( ( org . apache . tools . ant . util . FileUtils . getFileUtils ( ) . getFileTimestampGranularity ( ) ) * 5 ) ) ) ) ; long l = testFile . lastModified ( ) ; buildRule . executeTarget ( "testFilesOnlyDoesntCauseRecreate" ) ; "<AssertPlaceHolder>" ; } lastModified ( ) { if ( ( file ) != null ) { return file . getTimestamp ( ) . getTimeInMillis ( ) ; } return 0 ; } | org . junit . Assert . assertEquals ( l , testFile . lastModified ( ) ) |
testJarCommand ( ) { com . liferay . osgi . bundle . builder . commands . JarCommand jarCommand = new com . liferay . osgi . bundle . builder . commands . JarCommand ( ) ; java . io . File jarFile = new java . io . File ( _projectDir , "build/com.liferay.blade.authenticator.shiro.jar" ) ; jarCommand . build ( _getOSGiBundleBuilderArgs ( jarFile ) ) ; "<AssertPlaceHolder>" ; java . io . File actualDir = temporaryFolder . newFolder ( "actual" ) ; com . liferay . osgi . bundle . builder . commands . OSGiBundleBuilderCommandTest . _unzip ( jarFile , actualDir ) ; com . liferay . osgi . bundle . builder . commands . OSGiBundleBuilderCommandTest . _compareJarDirs ( _expectedDir , actualDir ) ; } exists ( ) { return false ; } | org . junit . Assert . assertTrue ( jarFile . exists ( ) ) |
testDynamicQueryByProjectionMissing ( ) { com . liferay . portal . kernel . dao . orm . DynamicQuery dynamicQuery = com . liferay . portal . kernel . dao . orm . DynamicQueryFactoryUtil . forClass ( com . liferay . dynamic . data . mapping . model . DDMStructure . class , _dynamicQueryClassLoader ) ; dynamicQuery . setProjection ( com . liferay . portal . kernel . dao . orm . ProjectionFactoryUtil . property ( "structureId" ) ) ; dynamicQuery . add ( com . liferay . portal . kernel . dao . orm . RestrictionsFactoryUtil . in ( "structureId" , new java . lang . Object [ ] { com . liferay . portal . kernel . test . util . RandomTestUtil . nextLong ( ) } ) ) ; java . util . List < java . lang . Object > result = _persistence . findWithDynamicQuery ( dynamicQuery ) ; "<AssertPlaceHolder>" ; } size ( ) { if ( ( _workflowTaskAssignees ) != null ) { return _workflowTaskAssignees . size ( ) ; } return _kaleoTaskAssignmentInstanceLocalService . getKaleoTaskAssignmentInstancesCount ( _kaleoTaskInstanceToken . getKaleoTaskInstanceTokenId ( ) ) ; } | org . junit . Assert . assertEquals ( 0 , result . size ( ) ) |
shouldIncludeLaunchClasspathByDefault ( ) { final org . pitest . mutationtest . config . ReportOptions actual = parseAddingRequiredArgs ( "" ) ; "<AssertPlaceHolder>" ; } isIncludeLaunchClasspath ( ) { return this . includeLaunchClasspath ; } | org . junit . Assert . assertTrue ( actual . isIncludeLaunchClasspath ( ) ) |
testCloneNotSupportedException ( ) { java . lang . Object notCloned = new org . apache . commons . lang3 . text . StrTokenizer ( ) { @ org . apache . commons . lang3 . text . Override java . lang . Object cloneReset ( ) throws java . lang . CloneNotSupportedException { throw new java . lang . CloneNotSupportedException ( "test" ) ; } } . clone ( ) ; "<AssertPlaceHolder>" ; } clone ( ) { com . google . javascript . jscomp . CompilerOptions clone = ( ( com . google . javascript . jscomp . CompilerOptions ) ( super . clone ( ) ) ) ; return clone ; } | org . junit . Assert . assertNull ( notCloned ) |
longWrapper ( ) { model . setMyLong ( 1L ) ; com . google . appengine . api . datastore . Entity entity = meta . modelToEntity ( model ) ; com . google . appengine . api . datastore . Key key = ds . put ( entity ) ; com . google . appengine . api . datastore . Entity entity2 = ds . get ( key ) ; org . slim3 . datastore . model . Hoge model2 = meta . entityToModel ( entity2 ) ; "<AssertPlaceHolder>" ; } getMyLong ( ) { return myLong ; } | org . junit . Assert . assertThat ( model2 . getMyLong ( ) , org . hamcrest . CoreMatchers . is ( 1L ) ) |
testCompareScreen_acceptsBufferedImage_retries ( ) { com . vaadin . testbench . Parameters . setMaxScreenshotRetries ( 4 ) ; try { java . awt . image . BufferedImage mockImg = createNiceMock ( java . awt . image . BufferedImage . class ) ; org . openqa . selenium . WebDriver driver = mockScreenshotDriver ( 4 , false ) ; com . vaadin . testbench . screenshot . ImageComparison icMock = createMock ( com . vaadin . testbench . screenshot . ImageComparison . class ) ; expect ( icMock . imageEqualToReference ( isA ( java . awt . image . BufferedImage . class ) , isA ( java . awt . image . BufferedImage . class ) , eq ( "bar<sp>name" ) , eq ( com . vaadin . testbench . Parameters . getScreenshotComparisonTolerance ( ) ) ) ) . andReturn ( false ) . times ( 4 ) ; replay ( driver , icMock ) ; com . vaadin . testbench . commands . TestBenchCommandExecutor tbce = new com . vaadin . testbench . commands . TestBenchCommandExecutor ( icMock , null ) ; tbce . setDriver ( com . vaadin . testbench . TestBench . createDriver ( driver , tbce ) ) ; "<AssertPlaceHolder>" ; verify ( driver , icMock ) ; } finally { com . vaadin . testbench . Parameters . setMaxScreenshotRetries ( 2 ) ; } } compareScreen ( java . awt . image . BufferedImage , java . lang . String ) { return com . vaadin . testbench . commands . ScreenshotComparator . compareScreen ( reference , referenceName , getCommandExecutor ( ) . getImageComparison ( ) , ( ( org . openqa . selenium . TakesScreenshot ) ( this ) ) , ( ( org . openqa . selenium . HasCapabilities ) ( getDriver ( ) ) ) ) ; } | org . junit . Assert . assertFalse ( tbce . compareScreen ( mockImg , "bar<sp>name" ) ) |
testAschemaReader ( ) { eu . esdihumboldt . hale . common . schema . model . Schema schema = readSchema ( ) ; "<AssertPlaceHolder>" ; } readSchema ( ) { eu . esdihumboldt . hale . io . jdbc . JDBCSchemaReader schemaReader = new eu . esdihumboldt . hale . io . jdbc . JDBCSchemaReader ( ) ; JDBCURI = new eu . esdihumboldt . hale . io . jdbc . mssql . MsSqlURIBuilder ( ) . createJdbcUri ( HOST , DATABASE ) ; schemaReader . setSource ( new eu . esdihumboldt . hale . common . core . io . supplier . NoStreamInputSupplier ( JDBCURI ) ) ; schemaReader . setParameter ( JDBCSchemaReader . PARAM_USER , eu . esdihumboldt . hale . common . core . io . Value . of ( USER_NAME ) ) ; schemaReader . setParameter ( JDBCSchemaReader . PARAM_PASSWORD , eu . esdihumboldt . hale . common . core . io . Value . of ( PASSWORD ) ) ; schemaReader . setParameter ( JDBCSchemaReader . SCHEMAS , eu . esdihumboldt . hale . common . core . io . Value . of ( SCHEMA ) ) ; eu . esdihumboldt . hale . common . core . io . report . IOReport report = schemaReader . execute ( new eu . esdihumboldt . hale . common . core . io . impl . LogProgressIndicator ( ) ) ; org . junit . Assert . assertTrue ( report . isSuccess ( ) ) ; org . junit . Assert . assertTrue ( report . getErrors ( ) . isEmpty ( ) ) ; eu . esdihumboldt . hale . common . schema . model . Schema schema = schemaReader . getSchema ( ) ; org . junit . Assert . assertNotNull ( schema ) ; return schema ; } | org . junit . Assert . assertNotNull ( schema ) |
testFileChecksum ( ) { final org . apache . hadoop . fs . Path p = new org . apache . hadoop . fs . Path ( "har://file-localhost/foo.har/file1" ) ; final org . apache . hadoop . fs . HarFileSystem harfs = new org . apache . hadoop . fs . HarFileSystem ( ) ; try { "<AssertPlaceHolder>" ; } finally { if ( harfs != null ) { harfs . close ( ) ; } } } getFileChecksum ( org . apache . hadoop . fs . Path ) { return dfs . getFileChecksum ( getUriPath ( f ) , Long . MAX_VALUE ) ; } | org . junit . Assert . assertEquals ( null , harfs . getFileChecksum ( p ) ) |
testGetTimeRange ( ) { org . eclipse . tracecompass . tmf . core . timestamp . TmfTimeRange result = fixture . getTimeRange ( ) ; "<AssertPlaceHolder>" ; } getTimeRange ( ) { return fTimeRange ; } | org . junit . Assert . assertNotNull ( result ) |
allLabelsExist ( ) { java . util . Set < java . lang . String > localLabels = new java . util . HashSet ( asList ( org . jenkinsci . plugins . github . pullrequest . events . impl . GitHubPRLabelNotExistsEventTest . LOCALLY_TESTED , org . jenkinsci . plugins . github . pullrequest . events . impl . GitHubPRLabelNotExistsEventTest . MERGE , org . jenkinsci . plugins . github . pullrequest . events . impl . GitHubPRLabelNotExistsEventTest . REVIEWED ) ) ; java . util . List < org . jenkinsci . plugins . github . pullrequest . events . impl . GHLabel > remoteLabels = asList ( testLabel , reviewedLabel , mergeLabel ) ; commonExpectations ( localLabels ) ; when ( issue . getLabels ( ) ) . thenReturn ( remoteLabels ) ; when ( testLabel . getName ( ) ) . thenReturn ( org . jenkinsci . plugins . github . pullrequest . events . impl . GitHubPRLabelNotExistsEventTest . LOCALLY_TESTED ) ; when ( reviewedLabel . getName ( ) ) . thenReturn ( org . jenkinsci . plugins . github . pullrequest . events . impl . GitHubPRLabelNotExistsEventTest . REVIEWED ) ; when ( mergeLabel . getName ( ) ) . thenReturn ( org . jenkinsci . plugins . github . pullrequest . events . impl . GitHubPRLabelNotExistsEventTest . MERGE ) ; org . jenkinsci . plugins . github . pullrequest . GitHubPRCause cause = new org . jenkinsci . plugins . github . pullrequest . events . impl . GitHubPRLabelNotExistsEvent ( labels , false ) . check ( newGitHubPRDecisionContext ( ) . withPrTrigger ( trigger ) . withLocalPR ( localPR ) . withRemotePR ( remotePr ) . withListener ( listener ) . build ( ) ) ; "<AssertPlaceHolder>" ; } build ( ) { requireNonNull ( tagHandler ) ; requireNonNull ( scmSource ) ; requireNonNull ( listener ) ; return new com . github . kostyasha . github . integration . generic . GitHubTagDecisionContext ( remoteTag , localTag , localRepo , tagHandler , scmSource , listener ) ; } | org . junit . Assert . assertNull ( cause ) |
passwordExpired ( ) { final com . ibm . ws . security . collaborator . CollaboratorUtils cu = mock . mock ( com . ibm . ws . security . collaborator . CollaboratorUtils . class ) ; final java . lang . String jaasEntryName = com . ibm . ws . security . authentication . utility . JaasLoginConfigConstants . SYSTEM_WEB_INBOUND ; subjectManager . clearSubjects ( ) ; final javax . security . auth . Subject subject = createAuthenticatedSubject ( ) ; final com . ibm . ws . security . authentication . AuthenticationData authenticationData = createAuthenticationData ( user , password ) ; mock . checking ( new org . jmock . Expectations ( ) { { one ( config ) . getLogoutOnHttpSessionExpire ( ) ; will ( returnValue ( false ) ) ; one ( req ) . getRequestedSessionId ( ) ; will ( returnValue ( "abc" ) ) ; one ( req ) . isRequestedSessionIdValid ( ) ; will ( returnValue ( false ) ) ; allowing ( authnService ) . authenticate ( with ( equal ( jaasEntryName ) ) , with ( com . ibm . ws . webcontainer . security . internal . AuthenticateApiTest . matchingAuthenticationData ( authenticationData ) ) , with ( equal ( ( ( javax . security . auth . Subject ) ( null ) ) ) ) ) ; will ( throwException ( new com . ibm . ws . security . authentication . PasswordExpiredException ( "authn<sp>failed" ) ) ) ; one ( cu ) . getUserRegistryRealm ( securityServiceRef ) ; will ( returnValue ( "joe" ) ) ; allowing ( securityServiceRef ) . getService ( ) ; will ( returnValue ( securityService ) ) ; allowing ( securityService ) . getAuthenticationService ( ) ; will ( returnValue ( authnService ) ) ; allowing ( authnService ) . getAuthCacheService ( ) ; allowing ( req ) . getRemoteUser ( ) ; allowing ( req ) . getUserPrincipal ( ) ; allowing ( ssoCookieHelper ) . getSSOCookiename ( ) ; allowing ( req ) . getCookies ( ) ; allowing ( req ) . getServletContext ( ) ; will ( returnValue ( mockServletContext ) ) ; allowing ( mockServletContext ) . getAttribute ( with ( any ( java . lang . String . class ) ) ) ; will ( returnValue ( null ) ) ; allowing ( unauthSubjectService ) . getUnauthenticatedSubject ( ) ; } } ) ; com . ibm . ws . webcontainer . security . AuthenticateApi authApi = new com . ibm . ws . webcontainer . security . AuthenticateApi ( ssoCookieHelper , securityServiceRef , cu , null , new com . ibm . wsspi . kernel . service . utils . ConcurrentServiceReferenceMap < java . lang . String , com . ibm . ws . webcontainer . security . UnprotectedResourceService > ( "unprotectedResourceService" ) , unauthSubjectService ) ; com . ibm . ws . webcontainer . security . internal . BasicAuthAuthenticator basicAuthAuthenticator = new com . ibm . ws . webcontainer . security . internal . BasicAuthAuthenticator ( authnService , userRegistry , ssoCookieHelper , config ) ; try { authApi . login ( req , resp , user , password , config , basicAuthAuthenticator ) ; org . junit . Assert . fail ( "PasswordExpiredException<sp>not<sp>thrown!" ) ; } catch ( java . lang . Exception e ) { boolean foundException = false ; if ( e instanceof com . ibm . websphere . security . web . PasswordExpiredException ) { foundException = true ; } "<AssertPlaceHolder>" ; } } getMessage ( ) { return message ; } | org . junit . Assert . assertEquals ( e . getMessage ( ) , true , foundException ) |
testGetAdapter_forDisplayAdapter ( ) { java . lang . Object adapter = display . getAdapter ( org . eclipse . swt . internal . widgets . IDisplayAdapter . class ) ; "<AssertPlaceHolder>" ; } getAdapter ( java . lang . Class ) { if ( adapter == ( org . eclipse . ui . model . IWorkbenchAdapter . class ) ) { return this ; } return org . eclipse . core . runtime . Platform . getAdapterManager ( ) . getAdapter ( this , adapter ) ; } | org . junit . Assert . assertTrue ( ( adapter instanceof org . eclipse . swt . internal . widgets . IDisplayAdapter ) ) |
testBasic ( ) { org . infinispan . manager . CacheContainer cacheContainer = ( ( org . infinispan . manager . CacheContainer ) ( new javax . naming . InitialContext ( ) . lookup ( "java:jboss/infinispan/container/server" ) ) ) ; org . infinispan . Cache < java . lang . String , java . lang . String > cache = cacheContainer . getCache ( "default" ) ; cache . put ( "ham" , "biscuit" ) ; "<AssertPlaceHolder>" ; } get ( org . wildfly . swarm . swagger . SwaggerConfig$Key ) { return config . get ( key ) ; } | org . junit . Assert . assertEquals ( "biscuit" , cache . get ( "ham" ) ) |
testGetElementCount ( ) { "<AssertPlaceHolder>" ; } getElementCount ( ) { return 4 ; } | org . junit . Assert . assertThat ( group . getElementCount ( ) , org . hamcrest . CoreMatchers . is ( org . hamcrest . CoreMatchers . equalTo ( 3 ) ) ) |
canProcessSuccessfullResuls ( ) { kong . unirest . PagedList < java . lang . String > list = new kong . unirest . PagedList ( ) ; list . addAll ( asList ( mkRequest ( "foo" ) , mkRequest ( null ) , mkRequest ( "baz" ) ) ) ; final java . util . List < java . lang . String > processed = new java . util . ArrayList ( ) ; list . ifSuccess ( ( e ) -> processed . add ( e . getBody ( ) ) ) ; "<AssertPlaceHolder>" ; } getBody ( ) { return java . util . Optional . of ( this ) ; } | org . junit . Assert . assertEquals ( java . util . Arrays . asList ( "foo" , "baz" ) , processed ) |
testSplitDagWithMultiReduces ( ) { io . cdap . cdap . etl . planner . ConnectorDag cdag = io . cdap . cdap . etl . planner . ConnectorDag . builder ( ) . addConnection ( "n1" , "n3" ) . addConnection ( "n2" , "n3" ) . addConnection ( "n3" , "n5" ) . addConnection ( "n5" 0 , "n5" ) . addConnection ( "n5" , "n7" ) . addConnection ( "n6" , "n7" ) . addConnection ( "n9" , "n7" ) . addConnection ( "n7" , "n8" ) . addReduceNodes ( "n3" , "n5" , "n7" ) . build ( ) ; cdag . insertConnectors ( ) ; java . util . Set < io . cdap . cdap . etl . planner . Dag > actual = new java . util . HashSet ( cdag . split ( ) ) ; io . cdap . cdap . etl . planner . Dag dag1 = new io . cdap . cdap . etl . planner . Dag ( com . google . common . collect . ImmutableSet . of ( new io . cdap . cdap . etl . proto . Connection ( "n1" , "n3" ) , new io . cdap . cdap . etl . proto . Connection ( "n2" , "n3" ) , new io . cdap . cdap . etl . proto . Connection ( "n3" , "n5.connector" ) ) ) ; io . cdap . cdap . etl . planner . Dag dag2 = new io . cdap . cdap . etl . planner . Dag ( com . google . common . collect . ImmutableSet . of ( new io . cdap . cdap . etl . proto . Connection ( "n5" 0 , "n5.connector" ) ) ) ; io . cdap . cdap . etl . planner . Dag dag3 = new io . cdap . cdap . etl . planner . Dag ( com . google . common . collect . ImmutableSet . of ( new io . cdap . cdap . etl . proto . Connection ( "n5.connector" , "n5" ) , new io . cdap . cdap . etl . proto . Connection ( "n5" , "n7.connector" ) ) ) ; io . cdap . cdap . etl . planner . Dag dag4 = new io . cdap . cdap . etl . planner . Dag ( com . google . common . collect . ImmutableSet . of ( new io . cdap . cdap . etl . proto . Connection ( "n6" , "n7.connector" ) , new io . cdap . cdap . etl . proto . Connection ( "n9" , "n7.connector" ) ) ) ; io . cdap . cdap . etl . planner . Dag dag5 = new io . cdap . cdap . etl . planner . Dag ( com . google . common . collect . ImmutableSet . of ( new io . cdap . cdap . etl . proto . Connection ( "n7.connector" , "n7" ) , new io . cdap . cdap . etl . proto . Connection ( "n7" , "n8" ) ) ) ; java . util . Set < io . cdap . cdap . etl . planner . Dag > expected = com . google . common . collect . ImmutableSet . of ( dag1 , dag2 , dag3 , dag4 , dag5 ) ; "<AssertPlaceHolder>" ; } of ( A , B ) { return new io . cdap . cdap . common . utils . ImmutablePair < > ( first , second ) ; } | org . junit . Assert . assertEquals ( expected , actual ) |
testGetGeneralPlaceDgTlv ( ) { de . persosim . simulator . tlv . ConstructedTlvDataObject received = de . persosim . simulator . perso . AbstractProfile . getGeneralPlaceDgTlv ( new de . persosim . simulator . tlv . TlvTag ( ( ( byte ) ( 113 ) ) ) , "HEIDESTRASSE<sp>17" , "KÖLN" , null , "D" , "51147" ) ; de . persosim . simulator . tlv . ConstructedTlvDataObject expected = new de . persosim . simulator . tlv . ConstructedTlvDataObject ( de . persosim . simulator . utils . HexString . toByteArray ( "712C302AAA110C0F484549444553545241535345203137AB070C054BC3964C4EAD03130144AE0713053531313437" ) ) ; "<AssertPlaceHolder>" ; } toByteArray ( ) { return oidByteArray . clone ( ) ; } | org . junit . Assert . assertArrayEquals ( expected . toByteArray ( ) , received . toByteArray ( ) ) |
testResolveBundleCapabilityConnectedRegionAllowed ( ) { org . eclipse . equinox . region . internal . tests . hook . RegionFilter filter = createBundleFilter ( org . eclipse . equinox . region . internal . tests . hook . RegionResolverHookTests . BUNDLE_B , org . eclipse . equinox . region . internal . tests . hook . RegionResolverHookTests . BUNDLE_VERSION ) ; region ( org . eclipse . equinox . region . internal . tests . hook . RegionResolverHookTests . REGION_A ) . connectRegion ( region ( org . eclipse . equinox . region . internal . tests . hook . RegionResolverHookTests . REGION_B ) , filter ) ; this . candidates . add ( bundleCapability ( org . eclipse . equinox . region . internal . tests . hook . RegionResolverHookTests . BUNDLE_B ) ) ; this . resolverHook . filterMatches ( bundleRequirement ( org . eclipse . equinox . region . internal . tests . hook . RegionResolverHookTests . BUNDLE_A ) , this . candidates ) ; "<AssertPlaceHolder>" ; } contains ( org . eclipse . core . runtime . ILogListener ) { synchronized ( org . eclipse . core . internal . runtime . RuntimeLog . logListeners ) { return org . eclipse . core . internal . runtime . RuntimeLog . logListeners . contains ( listener ) ; } } | org . junit . Assert . assertTrue ( this . candidates . contains ( bundleCapability ( org . eclipse . equinox . region . internal . tests . hook . RegionResolverHookTests . BUNDLE_B ) ) ) |
testFindByPage ( ) { com . github . pagehelper . Page < com . xiaolyuh . domain . model . Person > persons = personService . findByPage ( 1 , 2 ) ; com . xiaolyuh . page . PageInfo < com . xiaolyuh . domain . model . Person > pageInfo = new com . xiaolyuh . page . PageInfo ( persons ) ; "<AssertPlaceHolder>" ; logger . debug ( pageInfo . toString ( ) ) ; logger . debug ( com . alibaba . fastjson . JSON . toJSONString ( pageInfo ) ) ; } | org . junit . Assert . assertNotNull ( persons ) |
testDdl_TYPE_SMALLINT ( ) { java . lang . String ddl = ( "CREATE<sp>FOREIGN<sp>TABLE<sp>ONE_TYPE<sp>(" + ( org . teiid . designer . vdb . dynamic . TestModelToDdlGenerator . BQT2_TYPE_DDL . TYPE_SMALLINT ) ) + ")<sp>OPTIONS(UPDATABLE<sp>'TRUE');" ; java . lang . String expectedDdl = ( "CREATE<sp>FOREIGN<sp>TABLE<sp>ONE_TYPE<sp>(" + ( org . teiid . designer . vdb . dynamic . TestModelToDdlGenerator . EXPECTED_BQT2_TYPE_DDL . TYPE_SMALLINT ) ) + ")<sp>OPTIONS(UPDATABLE<sp>'TRUE');" ; java . lang . String generatedDdl = roundTrip ( ddl , false ) ; "<AssertPlaceHolder>" ; } roundTrip ( java . lang . String , boolean ) { org . teiid . designer . core . workspace . ModelResource modelResource = createModelResource ( ddl , isVirtual ) ; java . lang . String generatedDdl = generator . generate ( modelResource ) ; generatedDdl = removeWhitespace ( generatedDdl ) ; return generatedDdl ; } | org . junit . Assert . assertEquals ( expectedDdl , generatedDdl ) |
putNoPreviousValueSucceed ( ) { com . microsoft . azure . sdk . iot . deps . twin . TwinCollection twinCollection = new com . microsoft . azure . sdk . iot . deps . twin . TwinCollection ( ) ; java . lang . Object lastBrand = twinCollection . putFinal ( tests . unit . com . microsoft . azure . sdk . iot . deps . twin . TwinCollectionTest . VALID_KEY_NAME , "NewNiceCar" ) ; "<AssertPlaceHolder>" ; } putFinal ( java . lang . String , java . lang . Object ) { if ( ( key == null ) || ( key . isEmpty ( ) ) ) { if ( ( ! ( key . equals ( com . microsoft . azure . sdk . iot . deps . twin . TwinCollection . VERSION_TAG ) ) ) && ( ! ( key . equals ( com . microsoft . azure . sdk . iot . deps . twin . TwinCollection . METADATA_TAG ) ) ) ) { com . microsoft . azure . sdk . iot . deps . serializer . ParserUtility . validateMap ( this , com . microsoft . azure . sdk . iot . deps . twin . TwinCollection . MAX_TWIN_LEVEL , true ) ; } return last ; } | org . junit . Assert . assertNull ( lastBrand ) |
testArgumentValueConverter ( ) { final org . jboss . dmr . ModelNode node = HeadersArgumentValueConverter . INSTANCE . fromString ( ctx , ( "{<sp>rollout<sp>" + ( "groupE" 0 + "groupD(rolling-to-servers=true,max-failed-servers=1)<sp>^<sp>groupE<sp>rollback-across-groups}" ) ) ) ; final org . jboss . dmr . ModelNode expectedHeaders = new org . jboss . dmr . ModelNode ( ) ; final org . jboss . dmr . ModelNode rolloutPlan = expectedHeaders . get ( Util . ROLLOUT_PLAN ) ; final org . jboss . dmr . ModelNode inSeries = rolloutPlan . get ( Util . IN_SERIES ) ; org . jboss . dmr . ModelNode concurrent = new org . jboss . dmr . ModelNode ( ) ; org . jboss . dmr . ModelNode cg = concurrent . get ( Util . CONCURRENT_GROUPS ) ; org . jboss . dmr . ModelNode group = cg . get ( "groupA" ) ; group . get ( "groupE" 4 ) . set ( "true" ) ; group . get ( "max-failure-percentage" ) . set ( "groupE" 3 ) ; group = cg . get ( "groupB" ) ; inSeries . add ( ) . set ( concurrent ) ; org . jboss . dmr . ModelNode sg = new org . jboss . dmr . ModelNode ( ) ; group = sg . get ( Util . SERVER_GROUP ) ; group . get ( "groupC" ) ; inSeries . add ( ) . set ( sg ) ; concurrent = new org . jboss . dmr . ModelNode ( ) ; cg = concurrent . get ( Util . CONCURRENT_GROUPS ) ; group = cg . get ( "groupE" 1 ) ; group . get ( "groupE" 4 ) . set ( "true" ) ; group . get ( "max-failed-servers" ) . set ( "groupE" 2 ) ; cg . get ( "groupE" ) ; inSeries . add ( ) . set ( concurrent ) ; rolloutPlan . get ( "rollback-across-groups" ) . set ( "true" ) ; "<AssertPlaceHolder>" ; } set ( org . wildfly . core . launcher . Arguments$Argument ) { if ( argument != null ) { map . put ( argument . getKey ( ) , java . util . Collections . singleton ( argument ) ) ; } } | org . junit . Assert . assertEquals ( expectedHeaders , node ) |
test_minus_longPeriodUnit_zero ( ) { java . time . LocalDate t = TEST_2007_07_15 . minus ( 0 , ChronoUnit . DAYS ) ; "<AssertPlaceHolder>" ; } minus ( long , java . time . temporal . TemporalUnit ) { return amountToSubtract == ( Long . MIN_VALUE ) ? plus ( Long . MAX_VALUE , unit ) . plus ( 1 , unit ) : plus ( ( - amountToSubtract ) , unit ) ; } | org . junit . Assert . assertSame ( t , TEST_2007_07_15 ) |
runTest ( ) { boolean result = checkNoError ( "Social_Activities_Create_Todo_Node" ) ; "<AssertPlaceHolder>" ; } getNoErrorMsg ( ) { return noErrorMsg ; } | org . junit . Assert . assertTrue ( getNoErrorMsg ( ) , result ) |
testNPlusOne ( ) { indi . mybatis . flying . pojo . Role2_ r = new indi . mybatis . flying . pojo . Role2_ ( ) ; r . setId ( 1 ) ; r . setName ( "root" ) ; role2Service . insert ( r ) ; indi . mybatis . flying . pojo . Role2_ r2 = new indi . mybatis . flying . pojo . Role2_ ( ) ; r2 . setId ( 2 ) ; r2 . setName ( "user" ) ; role2Service . insert ( r2 ) ; indi . mybatis . flying . pojo . Account2_ a = new indi . mybatis . flying . pojo . Account2_ ( ) ; a . setId ( 21 ) ; a . setEmail ( "10" ) ; a . setRole ( r ) ; account2Service . insert ( a ) ; indi . mybatis . flying . pojo . Account2_ a2 = new indi . mybatis . flying . pojo . Account2_ ( ) ; a2 . setId ( 22 ) ; a2 . setEmail ( "11" ) ; a2 . setRole ( r ) ; account2Service . insert ( a2 ) ; java . util . Collection < indi . mybatis . flying . pojo . Account2_ > accounts = account2Service . selectAll ( new indi . mybatis . flying . pojo . Account2_ ( ) ) ; "<AssertPlaceHolder>" ; } selectAll ( indi . mybatis . flying . pojo . LoginLog_ ) { return supportSelectAll ( mapper , t ) ; } | org . junit . Assert . assertEquals ( 2 , accounts . size ( ) ) |
testGetPipelineNames ( ) { java . util . List < java . lang . String > values = mock ( java . util . List . class ) ; when ( pipelineService . getPipelineNames ( org . guvnor . ala . services . backend . impl . PipelineServiceBackendImplTest . PROVIDER_TYPE_NAME , org . guvnor . ala . services . backend . impl . PipelineServiceBackendImplTest . PROVIDER_TYPE_VERSION , org . guvnor . ala . services . backend . impl . PipelineServiceBackendImplTest . PAGE , org . guvnor . ala . services . backend . impl . PipelineServiceBackendImplTest . PAGE_SIZE , org . guvnor . ala . services . backend . impl . PipelineServiceBackendImplTest . SORT , org . guvnor . ala . services . backend . impl . PipelineServiceBackendImplTest . SORT_ORDER ) ) . thenReturn ( values ) ; java . util . List < java . lang . String > result = pipelineServiceBackend . getPipelineNames ( providerType , org . guvnor . ala . services . backend . impl . PipelineServiceBackendImplTest . PAGE , org . guvnor . ala . services . backend . impl . PipelineServiceBackendImplTest . PAGE_SIZE , org . guvnor . ala . services . backend . impl . PipelineServiceBackendImplTest . SORT , org . guvnor . ala . services . backend . impl . PipelineServiceBackendImplTest . SORT_ORDER ) ; verify ( pipelineService , times ( 1 ) ) . getPipelineNames ( org . guvnor . ala . services . backend . impl . PipelineServiceBackendImplTest . PROVIDER_TYPE_NAME , org . guvnor . ala . services . backend . impl . PipelineServiceBackendImplTest . PROVIDER_TYPE_VERSION , org . guvnor . ala . services . backend . impl . PipelineServiceBackendImplTest . PAGE , org . guvnor . ala . services . backend . impl . PipelineServiceBackendImplTest . PAGE_SIZE , org . guvnor . ala . services . backend . impl . PipelineServiceBackendImplTest . SORT , org . guvnor . ala . services . backend . impl . PipelineServiceBackendImplTest . SORT_ORDER ) ; "<AssertPlaceHolder>" ; } getPipelineNames ( java . lang . String , java . lang . String , java . lang . Integer , java . lang . Integer , java . lang . String , boolean ) { return pipelineRegistry . getPipelines ( providerTypeName , providerTypeVersion , page , pageSize , sort , sortOrder ) . stream ( ) . map ( Pipeline :: getName ) . collect ( java . util . stream . Collectors . toList ( ) ) ; } | org . junit . Assert . assertEquals ( values , result ) |
createPrimeFaces ( ) { wizardAction . openNewLiferayJsfProjectWizard ( ) ; wizardAction . newLiferayJsf . prepareGradle ( project . getName ( ) , com . liferay . ide . ui . jsf . tests . PRIMEFACES ) ; wizardAction . finish ( ) ; jobAction . waitForNoRunningJobs ( ) ; "<AssertPlaceHolder>" ; viewAction . project . closeAndDelete ( project . getName ( ) ) ; } visibleFileTry ( java . lang . String [ ] ) { try { return _getProjects ( ) . isVisible ( files ) ; } catch ( java . lang . Exception e ) { _getProjects ( ) . setFocus ( ) ; try { java . lang . String [ ] parents = java . util . Arrays . copyOfRange ( files , 0 , ( ( files . length ) - 1 ) ) ; _getProjects ( ) . expand ( parents ) ; _getProjects ( ) . contextMenu ( com . liferay . ide . ui . liferay . action . REFRESH , parents ) ; ide . sleep ( 2000 ) ; } catch ( java . lang . Exception e1 ) { } for ( int i = ( files . length ) - 1 ; i > 0 ; i -- ) { java . lang . String [ ] parents = java . util . Arrays . copyOfRange ( files , 0 , ( ( files . length ) - i ) ) ; org . eclipse . swtbot . swt . finder . widgets . SWTBotTreeItem parent = _getProjects ( ) . getTreeItem ( parents ) ; _getProjects ( ) . expand ( parents ) ; java . lang . String subnode = files [ ( ( files . length ) - i ) ] ; _jobAction . waitForSubnode ( parent , subnode , com . liferay . ide . ui . liferay . action . REFRESH ) ; } return _getProjects ( ) . isVisible ( files ) ; } } | org . junit . Assert . assertTrue ( viewAction . project . visibleFileTry ( project . getName ( ) ) ) |
testClearWeakMakesObjectWeak ( ) { com . eclipsesource . v8 . V8Value object = new com . eclipsesource . v8 . V8Object ( v8 ) . setWeak ( ) . clearWeak ( ) ; "<AssertPlaceHolder>" ; object . close ( ) ; } isWeak ( ) { v8 . checkThread ( ) ; v8 . checkReleased ( ) ; return v8 . isWeak ( v8 . getV8RuntimePtr ( ) , getHandle ( ) ) ; } | org . junit . Assert . assertFalse ( object . isWeak ( ) ) |
testJiraCsv154_withCommentMarker ( ) { final java . lang . String comment = "This<sp>is<sp>a<sp>header<sp>comment" ; final org . apache . commons . csv . CSVFormat format = CSVFormat . EXCEL . withHeader ( "H1" , "H2" ) . withCommentMarker ( '#' ) . withHeaderComments ( comment ) ; final java . lang . StringBuilder out = new java . lang . StringBuilder ( ) ; try ( final org . apache . commons . csv . CSVPrinter printer = format . print ( out ) ) { printer . print ( "A" ) ; printer . print ( "B" ) ; } final java . lang . String s = out . toString ( ) ; "<AssertPlaceHolder>" ; } toString ( ) { return ( ( ( ( ( ( ( "CSVRecord<sp>[comment=" + ( comment ) ) + ",<sp>mapping=" ) + ( mapping ) ) + ",<sp>recordNumber=" ) + ( recordNumber ) ) + ",<sp>values=" ) + ( java . util . Arrays . toString ( values ) ) ) + "]" ; } | org . junit . Assert . assertTrue ( s , s . contains ( comment ) ) |
stopQuery_alreadyStopped ( ) { final org . apache . rya . streams . api . RyaStreamsClient mockClient = mock ( org . apache . rya . streams . api . RyaStreamsClient . class ) ; final org . apache . rya . streams . api . interactor . StopQuery stopQuery = mock ( org . apache . rya . streams . api . interactor . StopQuery . class ) ; when ( mockClient . getStopQuery ( ) ) . thenReturn ( stopQuery ) ; final org . apache . rya . streams . api . interactor . GetQuery getQuery = mock ( org . apache . rya . streams . api . interactor . GetQuery . class ) ; when ( mockClient . getGetQuery ( ) ) . thenReturn ( getQuery ) ; final org . apache . rya . shell . SharedShellState state = new org . apache . rya . shell . SharedShellState ( ) ; state . connectedToAccumulo ( mock ( org . apache . rya . api . client . accumulo . AccumuloConnectionDetails . class ) , mock ( org . apache . rya . api . client . RyaClient . class ) ) ; state . connectedToInstance ( "unitTest" ) ; state . connectedToRyaStreams ( mockClient ) ; final java . util . UUID queryId = java . util . UUID . randomUUID ( ) ; when ( getQuery . getQuery ( eq ( queryId ) ) ) . thenReturn ( java . util . Optional . of ( new org . apache . rya . streams . api . entity . StreamsQuery ( queryId , "sparql" , false , false ) ) ) ; final org . apache . rya . shell . RyaStreamsCommands commands = new org . apache . rya . shell . RyaStreamsCommands ( state , mock ( org . apache . rya . shell . util . SparqlPrompt . class ) , mock ( org . apache . rya . shell . util . ConsolePrinter . class ) ) ; final java . lang . String message = commands . stopQuery ( queryId . toString ( ) ) ; verify ( stopQuery , never ( ) ) . stop ( queryId ) ; final java . lang . String expected = "That<sp>query<sp>is<sp>already<sp>stopped." ; "<AssertPlaceHolder>" ; } stop ( java . util . UUID ) { requireNonNull ( queryId ) ; repository . updateIsActive ( queryId , false ) ; } | org . junit . Assert . assertEquals ( expected , message ) |
testInvalidIdColon ( ) { java . lang . String id = "content:634e8505-bd4b-436e-97e8-2045d1b0d265" . replace ( "-" , "" ) ; java . lang . String qualifier = "zoom-and-enhanced-overview" ; ddf . catalog . content . data . ContentItem item = new ddf . catalog . content . data . impl . ContentItemImpl ( id , qualifier , null , "" , null ) ; "<AssertPlaceHolder>" ; } validate ( ddf . catalog . content . data . ContentItem ) { if ( ( item == null ) || ( org . apache . commons . lang . StringUtils . isBlank ( item . getUri ( ) ) ) ) { return false ; } if ( org . apache . commons . lang . StringUtils . isNotBlank ( item . getQualifier ( ) ) ) { boolean qualifierValid = ddf . catalog . content . data . impl . ContentItemValidator . validateInput ( item . getQualifier ( ) , ddf . catalog . content . data . impl . ContentItemValidator . QUALIFIER_PATTERN ) ; if ( ! qualifierValid ) { return false ; } } if ( ddf . catalog . content . data . impl . ContentItemValidator . CONTENT_PATTERN . matcher ( item . getUri ( ) ) . matches ( ) ) { return true ; } return false ; } | org . junit . Assert . assertThat ( ddf . catalog . content . data . impl . ContentItemValidator . validate ( item ) , org . hamcrest . Matchers . is ( false ) ) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.