input stringlengths 28 18.7k | output stringlengths 39 1.69k |
|---|---|
removeAll_0arg ( ) { java . util . Map < java . lang . Long , java . lang . String > data = createLSData ( 3 ) ; cache . putAll ( data ) ; cache . removeAll ( ) ; for ( java . lang . Long key : data . keySet ( ) ) { "<AssertPlaceHolder>" ; } } | org . junit . Assert . assertFalse ( cache . containsKey ( key ) ) |
testNegativeArgument ( ) { final java . lang . String [ ] args = new java . lang . String [ ] { "-b" , "-1" } ; final org . apache . commons . cli . CommandLine cl = parser . parse ( options , args ) ; "<AssertPlaceHolder>" ; } getOptionValue ( org . apache . commons . cli . Option ) { if ( option == null ) { return null ; } final java . lang . String [ ] values = getOptionValues ( option ) ; return values == null ? null : values [ 0 ] ; } | org . junit . Assert . assertEquals ( "-1" , cl . getOptionValue ( "b" ) ) |
testTwoClassesTwoExcludeWithWildcard ( ) { japicmp . cmp . JarArchiveComparatorOptions options = new japicmp . cmp . JarArchiveComparatorOptions ( ) ; options . getFilters ( ) . getExcludes ( ) . add ( new japicmp . filter . JavaDocLikeClassFilter ( "japicmp.*" ) ) ; java . util . List < japicmp . model . JApiClass > jApiClasses = japicmp . cmp . ClassesHelper . compareClasses ( options , new japicmp . cmp . ClassesHelper . ClassesGenerator ( ) { @ japicmp . cmp . Override public java . util . List < javassist . CtClass > createOldClasses ( javassist . ClassPool classPool ) throws japicmp . cmp . Exception { javassist . CtClass ctClass1 = japicmp . util . CtClassBuilder . create ( ) . name ( "japicmp.Homer" ) . addToClassPool ( classPool ) ; javassist . CtClass ctClass2 = japicmp . util . CtClassBuilder . create ( ) . name ( "japicmp.Marge" ) . addToClassPool ( classPool ) ; return java . util . Arrays . asList ( ctClass1 , ctClass2 ) ; } @ japicmp . cmp . Override public java . util . List < javassist . CtClass > createNewClasses ( javassist . ClassPool classPool ) throws japicmp . cmp . Exception { javassist . CtClass ctClass1 = japicmp . util . CtClassBuilder . create ( ) . name ( "japicmp.Homer" ) . addToClassPool ( classPool ) ; javassist . CtClass ctClass2 = japicmp . util . CtClassBuilder . create ( ) . name ( "japicmp.Marge" ) . addToClassPool ( classPool ) ; return java . util . Arrays . asList ( ctClass1 , ctClass2 ) ; } } ) ; "<AssertPlaceHolder>" ; } addToClassPool ( javassist . ClassPool ) { javassist . CtClass ctClass ; if ( this . superclass . isPresent ( ) ) { ctClass = classPool . makeClass ( this . name , this . superclass . get ( ) ) ; } else { ctClass = classPool . makeClass ( this . name ) ; } ctClass . setModifiers ( this . modifier ) ; for ( java . lang . String annotation : annotations ) { javassist . bytecode . ClassFile classFile = ctClass . getClassFile ( ) ; javassist . bytecode . ConstPool constPool = classFile . getConstPool ( ) ; javassist . bytecode . AnnotationsAttribute attr = new javassist . bytecode . AnnotationsAttribute ( constPool , javassist . bytecode . AnnotationsAttribute . visibleTag ) ; javassist . bytecode . annotation . Annotation annot = new javassist . bytecode . annotation . Annotation ( annotation , constPool ) ; attr . setAnnotation ( annot ) ; ctClass . getClassFile2 ( ) . addAttribute ( attr ) ; } for ( javassist . CtClass interfaceCtClass : interfaces ) { ctClass . addInterface ( interfaceCtClass ) ; } return ctClass ; } | org . junit . Assert . assertThat ( jApiClasses . size ( ) , org . hamcrest . core . Is . is ( 0 ) ) |
testLeftOuterJoin ( ) { java . lang . String sqlText = "and<sp>C.TYPE=UPPER(A.type)\n" 0 + ( ( ( ( ( ( "FROM<sp>TABLE_A<sp>A\n" + "LEFT<sp>JOIN<sp>TABLE_B<sp>B\n" ) + "ON<sp>A.ID<sp>=<sp>B.ID\n" ) + "LEFT<sp>JOIN<sp>TABLE_C<sp>C\n" ) + "ON<sp>C.SOURCE=<sp>\'PBD_SMARTWORKS\'\n" ) + "and<sp>C.TYPE=UPPER(A.type)\n" ) + "WHERE<sp>B.BD<sp>=<sp>1" ) ; java . sql . ResultSet rs = methodWatcher . executeQuery ( sqlText ) ; java . lang . String s = TestUtils . FormattedResult . ResultFactory . toStringUnsorted ( rs ) ; rs . close ( ) ; java . lang . String expected = "ACCOUNT<sp>|CATEGORY<sp>|SUB_CATEGORY<sp>|\n" + ( "----------------------------------\n" + "<sp>ACCOUNT<sp>|CATEGORY<sp>|SUB_CATEGORY<sp>|" ) ; "<AssertPlaceHolder>" ; } close ( ) { } | org . junit . Assert . assertEquals ( s , expected , s ) |
testMagLeverenGeenAttenderingOfSelectieDienst ( ) { org . mockito . Mockito . when ( verstrekkingsbeperkingService . heeftGeldigeVerstrekkingsbeperking ( any ( ) , any ( ) ) ) . thenReturn ( false ) ; nl . bzk . brp . domain . algemeen . Autorisatiebundel autorisatiebundel = maakAutorisatiebundel ( SoortDienst . GEEF_DETAILS_PERSOON ) ; boolean magLeveren = verstrekkingsbeperkingfilter . magLeveren ( TestBuilders . PERSOON_MET_HANDELINGEN , null , autorisatiebundel ) ; "<AssertPlaceHolder>" ; } magLeveren ( nl . bzk . brp . domain . leveringmodel . persoon . Persoonslijst , nl . bzk . brp . domain . algemeen . Populatie , nl . bzk . brp . domain . algemeen . Autorisatiebundel ) { final nl . bzk . algemeenbrp . dal . domein . brp . enums . SoortDienst soortDienst = autorisatiebundel . getDienst ( ) . getSoortDienst ( ) ; if ( ! ( ( ( soortDienst == ( nl . bzk . algemeenbrp . dal . domein . brp . enums . SoortDienst . ATTENDERING ) ) || ( soortDienst == ( nl . bzk . algemeenbrp . dal . domein . brp . enums . SoortDienst . SELECTIE ) ) ) || ( soortDienst == ( nl . bzk . algemeenbrp . dal . domein . brp . enums . SoortDienst . MUTATIELEVERING_OP_BASIS_VAN_AFNEMERINDICATIE ) ) ) ) { return true ; } final nl . bzk . algemeenbrp . dal . domein . brp . entity . Partij partij = autorisatiebundel . getPartij ( ) ; final boolean leveringMagDoorgaan ; if ( soortDienst == ( nl . bzk . algemeenbrp . dal . domein . brp . enums . SoortDienst . MUTATIELEVERING_OP_BASIS_VAN_AFNEMERINDICATIE ) ) { boolean heeftVerstrekkingsBeperkingVorigeHandeling = ( ( persoon . beeldVan ( ) . vorigeHandeling ( ) ) != null ) && ( verstrekkingsbeperkingService . heeftGeldigeVerstrekkingsbeperking ( persoon . beeldVan ( ) . vorigeHandeling ( ) , partij ) ) ; boolean heeftVertrekkingsbeperkingNu = verstrekkingsbeperkingService . heeftGeldigeVerstrekkingsbeperking ( persoon . getNuNuBeeld ( ) , partij ) ; leveringMagDoorgaan = ! ( heeftVerstrekkingsBeperkingVorigeHandeling && heeftVertrekkingsbeperkingNu ) ; if ( ! leveringMagDoorgaan ) { nl . bzk . brp . service . mutatielevering . leveringbepaling . filter . VerstrekkingsbeperkingfilterImpl . LOGGER . debug ( ( "Levering<sp>voor<sp>mutatielevering<sp>obv<sp>afnemerindicatie<sp>mag<sp>niet<sp>doorgaan,<sp>er<sp>geldt<sp>een<sp>" + "verstrekkingsbeperking<sp>voor<sp>persoon<sp>{}<sp>op<sp>partij<sp>{}" ) , persoon . getId ( ) , partij . getCode ( ) ) ; } } else { leveringMagDoorgaan = ! ( verstrekkingsbeperkingService . heeftGeldigeVerstrekkingsbeperking ( persoon . getNuNuBeeld ( ) , partij ) ) ; if ( ! leveringMagDoorgaan ) { nl . bzk . brp . service . mutatielevering . leveringbepaling . filter . VerstrekkingsbeperkingfilterImpl . LOGGER . debug ( ( "Levering<sp>mag<sp>niet<sp>doorgaan,<sp>er<sp>geldt<sp>een<sp>" + "verstrekkingsbeperking<sp>voor<sp>persoon<sp>{}<sp>op<sp>partij<sp>{}" ) , persoon . getId ( ) , partij . getCode ( ) ) ; } } return leveringMagDoorgaan ; } | org . junit . Assert . assertTrue ( magLeveren ) |
testComputeSpanningHierarchyReverse ( ) { com . google . devtools . depan . model . GraphNode [ ] nodeArray = com . google . devtools . depan . test . TestUtils . buildNodes ( 5 ) ; com . google . devtools . depan . model . GraphModel test = com . google . devtools . depan . test . TestUtils . buildComplete ( nodeArray , TestUtils . RELATION ) ; java . util . Map < com . google . devtools . depan . model . GraphNode , ? extends com . google . devtools . depan . nodes . trees . SuccessorEdges > result = com . google . devtools . depan . nodes . trees . Trees . computeSpanningHierarchy ( test , TestUtils . REVERSE ) ; "<AssertPlaceHolder>" ; } buildAllNodes ( java . util . Map ) { java . util . Set < com . google . devtools . depan . model . GraphNode > result = com . google . common . collect . Sets . newHashSet ( ) ; for ( java . util . Map . Entry < com . google . devtools . depan . model . GraphNode , ? extends com . google . devtools . depan . nodes . trees . SuccessorEdges > entry : map . entrySet ( ) ) { result . add ( entry . getKey ( ) ) ; result . addAll ( entry . getValue ( ) . computeSuccessorNodes ( ) ) ; } return result ; } | org . junit . Assert . assertEquals ( 5 , buildAllNodes ( result ) . size ( ) ) |
qNameNoNamespace ( ) { "<AssertPlaceHolder>" ; } qName ( java . lang . String , java . lang . String ) { return annis . model . AnnisNode . qName ( namespace , name , ":" ) ; } | org . junit . Assert . assertThat ( annis . model . AnnisNode . qName ( null , "name" ) , org . hamcrest . Matchers . is ( "name" ) ) |
testEmptyPartition ( ) { reset ( _partitionFilter ) ; when ( _partitionFilter . filter ( org . mockito . Matchers . < java . lang . Iterable < com . bazaarvoice . ostrich . ServiceEndPoint > > any ( ) , any ( com . bazaarvoice . ostrich . PartitionContext . class ) ) ) . thenReturn ( com . google . common . collect . ImmutableList . < com . bazaarvoice . ostrich . ServiceEndPoint > of ( ) ) ; boolean called = _pool . execute ( com . bazaarvoice . ostrich . pool . AbstractServicePoolTestingHarness . NEVER_RETRY , new com . bazaarvoice . ostrich . ServiceCallback < com . bazaarvoice . ostrich . pool . AbstractServicePoolTestingHarness . Service , java . lang . Boolean > ( ) { @ com . bazaarvoice . ostrich . pool . Override public com . bazaarvoice . ostrich . pool . Boolean call ( com . bazaarvoice . ostrich . pool . AbstractServicePoolTestingHarness . Service service ) throws com . bazaarvoice . ostrich . exceptions . ServiceException { return true ; } } ) ; "<AssertPlaceHolder>" ; } call ( S ) { try { return method . invoke ( service , args ) ; } catch ( java . lang . IllegalAccessException e ) { throw new java . lang . RuntimeException ( e ) ; } catch ( java . lang . reflect . InvocationTargetException e ) { com . google . common . base . Throwables . throwIfUnchecked ( e . getTargetException ( ) ) ; throw new java . lang . RuntimeException ( e . getTargetException ( ) ) ; } } | org . junit . Assert . assertFalse ( called ) |
testI2os_BigInt_lengthLongerThanOctetString ( ) { java . math . BigInteger bigInt = new java . math . BigInteger ( "42" ) ; byte [ ] expectedResult = new byte [ ] { ( ( byte ) ( 0 ) ) , ( ( byte ) ( 0 ) ) , ( ( byte ) ( 0 ) ) , ( ( byte ) ( 42 ) ) } ; byte [ ] result = de . persosim . simulator . crypto . Tr03111Utils . i2os ( bigInt , expectedResult . length ) ; "<AssertPlaceHolder>" ; } i2os ( java . math . BigInteger , int ) { if ( ( x . compareTo ( BigInteger . ZERO ) ) < 0 ) { throw new java . lang . IllegalArgumentException ( "x<sp>must<sp>be<sp>non-negative" ) ; } byte [ ] result = de . persosim . simulator . utils . Utils . toUnsignedByteArray ( x ) ; return de . persosim . simulator . crypto . Tr03111Utils . i2os ( result , l ) ; } | org . junit . Assert . assertArrayEquals ( expectedResult , result ) |
testMaxInRange ( ) { parameter . setMaximumValue ( 0 ) ; parameter . configure ( org . apache . flink . api . java . utils . ParameterTool . fromArgs ( new java . lang . String [ ] { "--test" , "-1" } ) ) ; "<AssertPlaceHolder>" ; } getValue ( ) { return value ; } | org . junit . Assert . assertEquals ( new java . lang . Long ( ( - 1 ) ) , parameter . getValue ( ) ) |
testSingleOptionalOptionRequestMissing ( ) { final com . lexicalscope . jewel . cli . TestCliImpl . SingleOptionalOption option = new com . lexicalscope . jewel . cli . CliInterfaceImpl < com . lexicalscope . jewel . cli . TestCliImpl . SingleOptionalOption > ( com . lexicalscope . jewel . cli . TestCliImpl . SingleOptionalOption . class ) . parseArguments ( new java . lang . String [ ] { } ) ; "<AssertPlaceHolder>" ; } parseArguments ( java . lang . String [ ] ) { impl . parseArguments ( parsedArguments , arguments ) ; } | org . junit . Assert . assertThat ( option . getName ( ) , equalTo ( null ) ) |
testReadChineeseChars ( ) { java . lang . String s = "" ; javax . ws . rs . ext . MessageBodyReader < java . lang . String > p = new org . apache . cxf . jaxrs . provider . PrimitiveTextProvider ( ) ; java . lang . String value = p . readFrom ( java . lang . String . class , null , new java . lang . annotation . Annotation [ ] { } , javax . ws . rs . core . MediaType . valueOf ( ( ( javax . ws . rs . core . MediaType . APPLICATION_XML ) + ";charset=UTF-8" ) ) , null , new java . io . ByteArrayInputStream ( s . getBytes ( StandardCharsets . UTF_8 ) ) ) ; "<AssertPlaceHolder>" ; } getBytes ( java . lang . Object ) { return ( ( byte [ ] ) ( object ) ) ; } | org . junit . Assert . assertEquals ( s , value ) |
whenDefaults ( ) { final java . lang . Class < ? > cls = org . apache . isis . core . metamodel . facets . object . domainobjectlayout . DomainObjectLayoutFactoryTest . CustomerWithDefaults . class ; facetFactory . process ( new org . apache . isis . core . metamodel . facets . FacetFactory . ProcessClassContext ( cls , null , mockMethodRemover , facetHolder ) ) ; final org . apache . isis . core . metamodel . facetapi . Facet facet = facetHolder . getFacet ( org . apache . isis . core . metamodel . facets . object . paged . PagedFacet . class ) ; "<AssertPlaceHolder>" ; expectNoMethodsRemoved ( ) ; } getFacet ( java . lang . Class ) { final org . apache . isis . core . metamodel . facetapi . FacetHolder facetHolder = getAction ( ) ; return facetHolder . getFacet ( facetType ) ; } | org . junit . Assert . assertNull ( facet ) |
failFast ( ) { java . util . function . Predicate < java . lang . String > predicate1 = createPredicate ( false ) ; java . util . function . Predicate < java . lang . String > predicate2 = createPredicate ( true ) ; predicate = org . mule . runtime . core . api . util . func . CompositePredicate . of ( predicate1 , predicate2 ) ; "<AssertPlaceHolder>" ; org . mockito . InOrder inOrder = inOrder ( predicate1 , predicate2 ) ; inOrder . verify ( predicate1 ) . test ( org . apache . commons . lang3 . StringUtils . EMPTY ) ; inOrder . verify ( predicate2 , never ( ) ) . test ( org . apache . commons . lang3 . StringUtils . EMPTY ) ; } test ( T ) { for ( java . util . function . Predicate < T > predicate : predicates ) { if ( ! ( predicate . test ( t ) ) ) { return false ; } } return true ; } | org . junit . Assert . assertThat ( predicate . test ( "" ) , org . hamcrest . CoreMatchers . is ( false ) ) |
euqal_shouldPreventAddingTwoEqualObjectsToACollection ( ) { java . util . Set < org . openmrs . CohortMembership > cohortMembershipSet = new java . util . LinkedHashSet ( ) ; final int PATIENT_ID = 12 ; org . openmrs . CohortMembership cohortMembershipOne = new org . openmrs . CohortMembership ( PATIENT_ID ) ; org . openmrs . CohortMembership cohortMembershipTwo = new org . openmrs . CohortMembership ( PATIENT_ID ) ; java . util . Date startDate1 = new java . util . Date ( TIMESTAMP_START_DATE ) ; java . util . Date endDate1 = new java . util . Date ( TIMESTAMP_END_DATE ) ; java . util . Date startDate2 = new java . util . Date ( TIMESTAMP_START_DATE ) ; java . util . Date endDate2 = new java . util . Date ( TIMESTAMP_END_DATE ) ; cohortMembershipOne . setEndDate ( endDate1 ) ; cohortMembershipOne . setStartDate ( startDate1 ) ; cohortMembershipTwo . setStartDate ( startDate2 ) ; cohortMembershipTwo . setEndDate ( endDate2 ) ; cohortMembershipSet . add ( cohortMembershipOne ) ; cohortMembershipSet . add ( cohortMembershipTwo ) ; "<AssertPlaceHolder>" ; } size ( ) { return getMemberships ( ) . stream ( ) . filter ( ( m ) -> ! ( m . getVoided ( ) ) ) . collect ( java . util . stream . Collectors . toList ( ) ) . size ( ) ; } | org . junit . Assert . assertEquals ( cohortMembershipSet . size ( ) , 1 ) |
listTest ( ) { com . jfireframework . baseutil . collection . buffer . ByteBuf < ? > buf = com . jfireframework . baseutil . collection . buffer . HeapByteBuf . allocate ( 256 ) ; java . util . ArrayList < com . jframework . licp . test . basetest . data . BaseData > list = new java . util . ArrayList < com . jframework . licp . test . basetest . data . BaseData > ( ) ; for ( int i = 0 ; i < 5 ; i ++ ) { list . add ( new com . jframework . licp . test . basetest . data . BaseData ( i ) ) ; } com . jfireframework . licp . Licp lbse = new com . jfireframework . licp . Licp ( ) ; buf . clear ( ) ; lbse . serialize ( list , buf ) ; byte [ ] src = buf . toArray ( ) ; java . nio . ByteBuffer buffer = java . nio . ByteBuffer . allocate ( src . length ) ; buffer . put ( src ) . flip ( ) ; java . util . ArrayList < com . jframework . licp . test . basetest . data . BaseData > result = ( ( java . util . ArrayList < com . jframework . licp . test . basetest . data . BaseData > ) ( lbse . deserialize ( buffer ) ) ) ; "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { if ( entity instanceof com . jfireframework . codejson . test . NestData ) { com . jfireframework . codejson . test . NestData target = ( ( com . jfireframework . codejson . test . NestData ) ( entity ) ) ; if ( ( name . equals ( target . getName ( ) ) ) && ( ( age ) == ( target . getAge ( ) ) ) ) { return true ; } else { return false ; } } else { return false ; } } | org . junit . Assert . assertTrue ( list . equals ( result ) ) |
determinePathToJar_withSpaces ( ) { final java . net . URL jarUrl = java . net . URI . create ( "file:/home/example/with%20spaces/git-credential-manager-1.0.0.jar!/com/microsoft/alm/gitcredentialmanager/" ) . toURL ( ) ; final java . lang . String actual = com . microsoft . alm . gitcredentialmanager . Program . determinePathToJar ( jarUrl ) ; final java . lang . String expected = "/home/example/with<sp>spaces/git-credential-manager-1.0.0.jar" ; "<AssertPlaceHolder>" ; } determinePathToJar ( java . net . URL ) { final java . lang . String packageName = com . microsoft . alm . gitcredentialmanager . Program . class . getPackage ( ) . getName ( ) ; final java . lang . String resourcePath = resourceURL . getPath ( ) ; final java . lang . String decodedResourcePath ; try { decodedResourcePath = java . net . URLDecoder . decode ( resourcePath , UriHelper . UTF_8 ) ; } catch ( final java . io . UnsupportedEncodingException e ) { throw new java . lang . Error ( e ) ; } final java . lang . String packagePath = packageName . replace ( "." , "/" ) ; final java . lang . String resourceSuffix = ( "!/" + packagePath ) + "/" ; java . lang . String jarPath = decodedResourcePath . replace ( resourceSuffix , "" ) ; jarPath = jarPath . replace ( "file:" , "" ) ; return jarPath ; } | org . junit . Assert . assertEquals ( expected , actual ) |
testOverwriteAllowsAndDenies ( ) { discord4j . core . object . util . PermissionSet base = discord4j . core . object . util . PermissionSet . of ( discord4j . core . util . SEND_MESSAGES ) ; java . util . List < discord4j . core . object . PermissionOverwrite > roleOverwrites = java . util . Collections . emptyList ( ) ; discord4j . core . object . PermissionOverwrite memberOverwrite = discord4j . core . util . PermissionUtilTest . overwrite ( discord4j . core . object . util . PermissionSet . of ( discord4j . core . util . PRIORITY_SPEAKER ) , discord4j . core . object . util . PermissionSet . of ( discord4j . core . util . SEND_MESSAGES ) ) ; discord4j . core . object . util . PermissionSet actual = discord4j . core . util . PermissionUtil . computePermissions ( base , null , roleOverwrites , memberOverwrite ) ; discord4j . core . object . util . PermissionSet expected = discord4j . core . object . util . PermissionSet . of ( discord4j . core . util . PRIORITY_SPEAKER ) ; "<AssertPlaceHolder>" ; } of ( long ) { return new discord4j . core . object . util . PermissionSet ( rawValue ) ; } | org . junit . Assert . assertEquals ( expected , actual ) |
constructorIsNullableArgumentShouldSetIsNullable ( ) { boolean isNullable = true ; resourceEditProperty = new ch . puzzle . itc . mobiliar . builders . ResourceEditPropertyBuilder ( ) . withIsNullable ( isNullable ) . build ( ) ; "<AssertPlaceHolder>" ; } build ( ) { buildResource ( buildResourceType ( RUNTIME . type ) , RUNTIME . name ) ; buildResourceType ( NODE1 . type ) ; buildContextAndPlatform ( ) ; as = buildResource ( buildResourceType ( AS . type ) , "as" ) ; app = buildResource ( buildResourceType ( APP . type ) , "app" ) ; buildConsumedRelation ( as , app , ForeignableOwner . AMW ) ; ad = buildResource ( buildResourceType ( AD . type ) , "ad" ) ; ws = buildResource ( buildResourceType ( WS . type ) , "ws" ) ; } | org . junit . Assert . assertEquals ( isNullable , resourceEditProperty . isNullable ( ) ) |
testPoll_whenReadyAndNoEvent ( ) { watchService . setRunning ( true ) ; com . peircean . glusterfs . GlusterWatchKey mockKey = mock ( com . peircean . glusterfs . GlusterWatchKey . class ) ; watchService . getPaths ( ) . add ( mockKey ) ; doReturn ( true ) . when ( mockKey ) . isValid ( ) ; doReturn ( true ) . when ( mockKey ) . isReady ( ) ; doReturn ( false ) . when ( mockKey ) . update ( ) ; java . nio . file . WatchKey key = watchService . poll ( ) ; "<AssertPlaceHolder>" ; org . mockito . Mockito . verify ( mockKey ) . isValid ( ) ; org . mockito . Mockito . verify ( mockKey ) . isReady ( ) ; org . mockito . Mockito . verify ( mockKey ) . update ( ) ; } poll ( ) { if ( ! ( running ) ) { throw new java . nio . file . ClosedWatchServiceException ( ) ; } java . nio . file . WatchKey pending = popPending ( ) ; if ( null != pending ) { return pending ; } for ( com . peircean . glusterfs . GlusterWatchKey k : paths ) { if ( ( ( k . isValid ( ) ) && ( k . isReady ( ) ) ) && ( k . update ( ) ) ) { pendingPaths . add ( k ) ; } } return popPending ( ) ; } | org . junit . Assert . assertEquals ( null , key ) |
testBuildWithDisabledDefaultConstraints ( ) { unit . setActive ( false ) ; unit . setSecurity ( false ) ; org . lnu . is . domain . person . work . PersonWork context = new org . lnu . is . domain . person . work . PersonWork ( ) ; java . lang . String expectedQuery = "SELECT<sp>e<sp>FROM<sp>PersonWork<sp>e<sp>" ; org . lnu . is . pagination . MultiplePagedSearch < org . lnu . is . domain . person . work . PersonWork > pagedSearch = new org . lnu . is . pagination . MultiplePagedSearch ( ) ; pagedSearch . setEntity ( context ) ; java . lang . String actualQuery = unit . build ( pagedSearch ) ; "<AssertPlaceHolder>" ; } setEntity ( T ) { this . entity = entity ; } | org . junit . Assert . assertEquals ( expectedQuery , actualQuery ) |
testEmptyChannel ( ) { try { org . apache . flink . streaming . runtime . io . BufferSpiller . SpilledBufferOrEventSequence seq = new org . apache . flink . streaming . runtime . io . BufferSpiller . SpilledBufferOrEventSequence ( tempFile , fileChannel , buffer , pageSize ) ; seq . open ( ) ; "<AssertPlaceHolder>" ; } catch ( java . lang . Exception e ) { e . printStackTrace ( ) ; org . junit . Assert . fail ( e . getMessage ( ) ) ; } } getNext ( ) { if ( ( buffer . remaining ( ) ) < ( org . apache . flink . streaming . runtime . io . BufferSpiller . SpilledBufferOrEventSequence . HEADER_LENGTH ) ) { buffer . compact ( ) ; while ( ( buffer . position ( ) ) < ( org . apache . flink . streaming . runtime . io . BufferSpiller . SpilledBufferOrEventSequence . HEADER_LENGTH ) ) { if ( ( fileChannel . read ( buffer ) ) == ( - 1 ) ) { if ( ( buffer . position ( ) ) == 0 ) { return null ; } else { throw new java . io . IOException ( "Found<sp>trailing<sp>incomplete<sp>buffer<sp>or<sp>event" ) ; } } } buffer . flip ( ) ; } final int channel = buffer . getInt ( ) ; final int length = buffer . getInt ( ) ; final boolean isBuffer = ( buffer . get ( ) ) == 0 ; if ( isBuffer ) { if ( length > ( pageSize ) ) { throw new java . io . IOException ( java . lang . String . format ( "Spilled<sp>buffer<sp>(%d<sp>bytes)<sp>is<sp>larger<sp>than<sp>page<sp>size<sp>of<sp>(%d<sp>bytes)" , length , pageSize ) ) ; } org . apache . flink . core . memory . MemorySegment seg = org . apache . flink . core . memory . MemorySegmentFactory . allocateUnpooledSegment ( pageSize ) ; int segPos = 0 ; int bytesRemaining = length ; while ( true ) { int toCopy = java . lang . Math . min ( buffer . remaining ( ) , bytesRemaining ) ; if ( toCopy > 0 ) { seg . put ( segPos , buffer , toCopy ) ; segPos += toCopy ; bytesRemaining -= toCopy ; } if ( bytesRemaining == 0 ) { break ; } else { buffer . clear ( ) ; if ( ( fileChannel . read ( buffer ) ) == ( - 1 ) ) { throw new java . io . IOException ( "Found<sp>trailing<sp>incomplete<sp>buffer" ) ; } buffer . flip ( ) ; } } org . apache . flink . runtime . io . network . buffer . Buffer buf = new org . apache . flink . runtime . io . network . buffer . NetworkBuffer ( seg , org . apache . flink . runtime . io . network . buffer . FreeingBufferRecycler . INSTANCE ) ; buf . setSize ( length ) ; return new org . apache . flink . runtime . io . network . partition . consumer . BufferOrEvent ( buf , channel ) ; } else { if ( length > ( ( buffer . capacity ( ) ) - ( org . apache . flink . streaming . runtime . io . BufferSpiller . SpilledBufferOrEventSequence . HEADER_LENGTH ) ) ) { throw new java . io . IOException ( "Event<sp>is<sp>too<sp>large" ) ; } if ( ( buffer . remaining ( ) ) < length ) { buffer . compact ( ) ; while ( ( buffer . position ( ) ) < length ) { if ( ( fileChannel . read ( buffer ) ) == ( - 1 ) ) { throw new java . io . IOException ( "Found<sp>trailing<sp>incomplete<sp>event" ) ; } } buffer . flip ( ) ; } int oldLimit = buffer . limit ( ) ; buffer . limit ( ( ( buffer . position ( ) ) + length ) ) ; org . apache . flink . runtime . event . AbstractEvent evt = org . apache . flink . runtime . io . network . api . serialization . EventSerializer . fromSerializedEvent ( buffer , getClass ( ) . getClassLoader ( ) ) ; buffer . limit ( oldLimit ) ; return new org . apache . flink . runtime . io . network . partition . consumer . BufferOrEvent ( evt , channel ) ; } } | org . junit . Assert . assertNull ( seq . getNext ( ) ) |
testIndexOfKey ( ) { java . lang . String value1 = com . silverpeas . jcrutil . RandomGenerator . getRandomString ( ) ; java . lang . String value2 = com . silverpeas . jcrutil . RandomGenerator . getRandomString ( ) ; java . lang . String value3 = com . silverpeas . jcrutil . RandomGenerator . getRandomString ( ) ; com . stratelia . silverpeas . silverstatistics . model . TypeStatistics instance = new com . stratelia . silverpeas . silverstatistics . model . TypeStatistics ( ) ; java . util . Collection < java . lang . String > allKeys = java . util . Arrays . asList ( value1 , value2 , value3 ) ; instance . setAllKeys ( allKeys ) ; int result = instance . indexOfKey ( value2 ) ; "<AssertPlaceHolder>" ; } is ( T ) { return java . util . Objects . equals ( this . value , value ) ; } | org . junit . Assert . assertThat ( result , is ( 1 ) ) |
serverGroup2 ( ) { org . jboss . hal . dmr . ResourceAddress input = new org . jboss . hal . dmr . ResourceAddress ( ) . add ( "server-group" , "main-server-group" ) . add ( "system-property" , "*" ) ; org . jboss . hal . dmr . ResourceAddress expected = new org . jboss . hal . dmr . ResourceAddress ( ) . add ( "server-group" , "*" ) . add ( "system-property" , "*" ) ; org . jboss . hal . dmr . ResourceAddress result = processor . apply ( input ) ; "<AssertPlaceHolder>" ; } apply ( org . jboss . hal . meta . AddressTemplate ) { org . jboss . hal . meta . AddressTemplate modified = org . jboss . hal . meta . AddressTemplate . ROOT ; if ( ( template != null ) && ( ! ( AddressTemplate . ROOT . equals ( template ) ) ) ) { java . util . List < java . lang . String [ ] > segments = stream ( template . spliterator ( ) , false ) . map ( ( segment ) -> { if ( segment . contains ( "=" ) ) { return com . google . common . base . Splitter . on ( '=' ) . omitEmptyStrings ( ) . trimResults ( ) . limit ( 2 ) . splitToList ( segment ) . toArray ( new java . lang . String [ 2 ] ) ; } return new java . lang . String [ ] { segment , null } ; } ) . collect ( toList ( ) ) ; java . lang . StringBuilder builder = new java . lang . StringBuilder ( ) ; org . jboss . hal . meta . description . SegmentProcessor . process ( segments , ( segment ) -> { builder . append ( "/" ) . append ( segment [ 0 ] ) ; if ( ( segment [ 1 ] ) != null ) { builder . append ( "=" ) . append ( segment [ 1 ] ) ; } } ) ; modified = org . jboss . hal . meta . AddressTemplate . of ( builder . toString ( ) ) ; } org . jboss . hal . meta . description . ResourceDescriptionTemplateProcessor . logger . debug ( "{}<sp>-><sp>{}" , template , modified ) ; return modified ; } | org . junit . Assert . assertEquals ( expected , result ) |
shouldReturnBoundaryForCloseSquareBracket ( ) { edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType type = edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType . getType ( 0 , ']' , 1 ) ; "<AssertPlaceHolder>" ; } getType ( int , char , int ) { if ( java . lang . Character . isUpperCase ( ch ) ) { return edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType . UPPER_CASE_LETTER ; } else if ( java . lang . Character . isLowerCase ( ch ) ) { return edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType . LETTER ; } else if ( java . lang . Character . isDigit ( ch ) ) { return edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType . DIGIT ; } else if ( ( index == 0 ) && ( ch == ( edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType . SINGLE_QUOTE ) ) ) { return edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType . ESCAPING_QUOTE ; } else if ( ( index == ( length - 1 ) ) && ( ch == ( edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType . SINGLE_QUOTE ) ) ) { return edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType . ESCAPING_QUOTE ; } else if ( ch == '-' ) { return edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType . LETTER ; } else if ( ch == ( edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType . SINGLE_QUOTE ) ) { return edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType . LETTER ; } else if ( ch == '<sp>' ) { return edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType . BOUNDARY ; } else if ( ( 33 <= ch ) && ( ch <= 47 ) ) { return edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType . BOUNDARY ; } else if ( ( 58 <= ch ) && ( ch <= 64 ) ) { return edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType . BOUNDARY ; } else if ( ( 91 <= ch ) && ( ch <= 96 ) ) { return edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType . BOUNDARY ; } else if ( ( 123 <= ch ) && ( ch <= 126 ) ) { return edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType . BOUNDARY ; } else if ( ch == '\t' ) { return edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType . BOUNDARY ; } else if ( ch == '\n' ) { return edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType . BOUNDARY ; } else { return edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType . LETTER ; } } | org . junit . Assert . assertEquals ( EntityNameCharType . BOUNDARY , type ) |
getRuntimeItemByPipelineExecutionKeyExisting ( ) { org . guvnor . ala . ui . model . PipelineExecutionTraceKey traceKey = new org . guvnor . ala . ui . model . PipelineExecutionTraceKey ( org . guvnor . ala . ui . backend . service . RuntimeServiceImplTest . PIPELINE_EXECUTION_ID ) ; java . util . List < org . guvnor . ala . services . api . RuntimeQueryResultItem > singleResult = mockRuntimeQueryResultItemList ( 1 ) ; when ( runtimeProvisioningService . executeQuery ( any ( org . guvnor . ala . services . api . RuntimeQuery . class ) ) ) . thenReturn ( singleResult ) ; org . guvnor . ala . ui . model . RuntimeListItem expectedItem = buildExpectedResult ( singleResult ) . iterator ( ) . next ( ) ; org . guvnor . ala . ui . model . RuntimeListItem result = service . getRuntimeItem ( traceKey ) ; "<AssertPlaceHolder>" ; } getRuntimeItem ( org . guvnor . ala . ui . model . PipelineExecutionTraceKey ) { checkNotNull ( "pipelineExecutionTraceKey" , pipelineExecutionTraceKey ) ; final org . guvnor . ala . services . api . RuntimeQuery query = org . guvnor . ala . services . api . RuntimeQueryBuilder . newInstance ( ) . withPipelineExecutionId ( pipelineExecutionTraceKey . getId ( ) ) . build ( ) ; return buildRuntimeQueryResult ( runtimeProvisioningService . executeQuery ( query ) ) . stream ( ) . findFirst ( ) . orElse ( null ) ; } | org . junit . Assert . assertEquals ( expectedItem , result ) |
shouldSerializeDeserialize ( ) { final org . apache . tinkerpop . gremlin . structure . io . gryo . GryoMapper mapper = builder . get ( ) . create ( ) ; final org . apache . tinkerpop . shaded . kryo . Kryo kryo = mapper . createMapper ( ) ; try ( final java . io . OutputStream stream = new java . io . ByteArrayOutputStream ( ) ) { final org . apache . tinkerpop . shaded . kryo . io . Output out = new org . apache . tinkerpop . shaded . kryo . io . Output ( stream ) ; final java . util . Map < java . lang . String , java . lang . Object > props = new java . util . HashMap ( ) ; final java . util . List < java . util . Map < java . lang . String , java . lang . Object > > propertyNames = new java . util . ArrayList ( 1 ) ; final java . util . Map < java . lang . String , java . lang . Object > propertyName = new java . util . HashMap ( ) ; propertyName . put ( GraphSONTokens . ID , "x" ) ; propertyName . put ( GraphSONTokens . KEY , "x" ) ; propertyName . put ( GraphSONTokens . VALUE , "no-way-this-will-ever-work" ) ; propertyNames . add ( propertyName ) ; props . put ( "x" , propertyNames ) ; final org . apache . tinkerpop . gremlin . structure . util . detached . DetachedVertex v = new org . apache . tinkerpop . gremlin . structure . util . detached . DetachedVertex ( 100 , org . apache . tinkerpop . gremlin . structure . Vertex . DEFAULT_LABEL , props ) ; kryo . writeClassAndObject ( out , v ) ; try ( final java . io . InputStream inputStream = new java . io . ByteArrayInputStream ( out . toBytes ( ) ) ) { final org . apache . tinkerpop . shaded . kryo . io . Input input = new org . apache . tinkerpop . shaded . kryo . io . Input ( inputStream ) ; final org . apache . tinkerpop . gremlin . structure . util . detached . DetachedVertex readX = ( ( org . apache . tinkerpop . gremlin . structure . util . detached . DetachedVertex ) ( kryo . readClassAndObject ( input ) ) ) ; "<AssertPlaceHolder>" ; } } } value ( java . lang . String ) { return this . element . value ( key ) ; } | org . junit . Assert . assertEquals ( "no-way-this-will-ever-work" , readX . value ( "x" ) ) |
testGetObjectFactoriesOnly ( ) { org . springframework . core . io . Resource resource1 = new org . springframework . core . io . ClassPathResource ( "classpath:dummy-context.xml" ) ; org . springframework . core . io . Resource resource2 = new org . springframework . core . io . ClassPathResource ( "classpath:dummy-context.xml" ) ; factory . setFactories ( java . util . Arrays . < org . springframework . batch . core . configuration . support . ApplicationContextFactory > asList ( new org . springframework . batch . core . configuration . support . GenericApplicationContextFactory ( resource1 ) , new org . springframework . batch . core . configuration . support . GenericApplicationContextFactory ( resource2 ) ) ) ; factory . afterPropertiesSet ( ) ; "<AssertPlaceHolder>" ; } getObject ( ) { return stepLocator . getStep ( stepName ) ; } | org . junit . Assert . assertEquals ( 2 , factory . getObject ( ) . length ) |
testConvertCAROObo2Owl ( ) { org . semanticweb . owlapi . model . OWLOntology owlOnt = convertOBOFile ( "caro.obo" ) ; "<AssertPlaceHolder>" ; } convertOBOFile ( java . lang . String ) { org . semanticweb . owlapi . model . OWLOntology convert = convert ( parseOBOFile ( fn ) ) ; writeOWL ( convert ) ; return convert ; } | org . junit . Assert . assertNotNull ( owlOnt ) |
testGetIntDefault ( ) { java . lang . String key = "SomeKey" ; int expResult = 85 ; getSettings ( ) . setString ( key , "blue" ) ; int result = getSettings ( ) . getInt ( key , expResult ) ; "<AssertPlaceHolder>" ; } getInt ( java . lang . String , int ) { int value ; try { value = java . lang . Integer . parseInt ( getString ( key ) ) ; } catch ( java . lang . NumberFormatException ex ) { if ( ! ( getString ( key , "" ) . isEmpty ( ) ) ) { org . owasp . dependencycheck . utils . Settings . LOGGER . debug ( "Could<sp>not<sp>convert<sp>property<sp>'{}={}'<sp>to<sp>an<sp>int;<sp>using<sp>{}<sp>instead." , key , getString ( key ) , defaultValue ) ; } value = defaultValue ; } return value ; } | org . junit . Assert . assertEquals ( expResult , result ) |
jsonObject_equal2 ( ) { org . oscm . json . JsonObject jsonObject = givenJsonObject ( ) ; "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { if ( ( this ) == o ) { return true ; } if ( ( o == null ) || ( ( org . oscm . converter . utils . Pair . getClass ( ) ) != ( o . getClass ( ) ) ) ) { return false ; } org . oscm . converter . utils . Pair < ? , ? > pair = ( ( org . oscm . converter . utils . Pair < ? , ? > ) ( o ) ) ; return ( first . equals ( pair . first ) ) && ( second . equals ( pair . second ) ) ; } | org . junit . Assert . assertTrue ( jsonObject . equals ( jsonObject ) ) |
testUtf8StringCompare ( ) { org . lilyproject . hbaseindex . StringIndexFieldDefinition fieldDef = new org . lilyproject . hbaseindex . StringIndexFieldDefinition ( "foobar" ) ; byte [ ] string1 = fieldDef . asRowKey ( ) . serialize ( org . apache . hadoop . hbase . util . Bytes . toBytes ( "être" ) ) ; byte [ ] string2 = fieldDef . asRowKey ( ) . serialize ( org . apache . hadoop . hbase . util . Bytes . toBytes ( "heureux" ) ) ; "<AssertPlaceHolder>" ; } serialize ( java . util . Set ) { org . lilyproject . bytes . api . DataOutput builder = new org . lilyproject . bytes . impl . DataOutputImpl ( ) ; builder . writeVInt ( strings . size ( ) ) ; for ( java . lang . String permission : strings ) { builder . writeVUTF ( permission ) ; } return builder . toByteArray ( ) ; } | org . junit . Assert . assertTrue ( ( ( org . apache . hadoop . hbase . util . Bytes . compareTo ( string1 , string2 ) ) > 0 ) ) |
testTimerRemoval ( ) { final java . lang . String drl = ( ( ( ( ( ( ( ( ( ( ( ( ( ( "package<sp>org.drools.compiler.test\n" + "import<sp>" ) + ( java . util . concurrent . TimeUnit . class . getName ( ) ) ) + "rule<sp>TimerRule<sp>\n" 7 ) + "rule<sp>TimerRule<sp>\n" 0 ) + "global<sp>" ) + ( java . util . concurrent . CountDownLatch . class . getName ( ) ) ) + "rule<sp>TimerRule<sp>\n" 5 ) + "rule<sp>TimerRule<sp>\n" ) + "<sp>timer<sp>(int:100<sp>50)<sp>\n" ) + "rule<sp>TimerRule<sp>\n" 3 ) + "then<sp>\n" ) + "rule<sp>TimerRule<sp>\n" 6 ) + "rule<sp>TimerRule<sp>\n" 4 ) + "rule<sp>TimerRule<sp>\n" 1 ) + "<sp>end" ; final org . kie . api . KieBase kbase = org . drools . testcoverage . common . util . KieBaseUtil . getKieBaseFromKieModuleFromDrl ( "timer-and-calendar-test" , kieBaseTestConfiguration , drl ) ; final org . kie . api . runtime . KieSession ksession = kbase . newKieSession ( ) ; try { final java . util . concurrent . CountDownLatch latch = new java . util . concurrent . CountDownLatch ( 1 ) ; final java . util . List < java . lang . Integer > list = java . util . Collections . synchronizedList ( new java . util . ArrayList ( ) ) ; ksession . setGlobal ( "rule<sp>TimerRule<sp>\n" 2 , list ) ; ksession . setGlobal ( "rule<sp>TimerRule<sp>\n" 8 , latch ) ; ksession . fireAllRules ( ) ; java . lang . Thread . sleep ( 500 ) ; kbase . removeRule ( "org.drools.compiler.test" , "TimerRule" ) ; ksession . fireAllRules ( ) ; latch . countDown ( ) ; java . lang . Thread . sleep ( 500 ) ; ksession . fireAllRules ( ) ; list . clear ( ) ; java . lang . Thread . sleep ( 500 ) ; ksession . fireAllRules ( ) ; "<AssertPlaceHolder>" ; } finally { ksession . dispose ( ) ; } } size ( ) { return rulesFired . size ( ) ; } | org . junit . Assert . assertEquals ( 0 , list . size ( ) ) |
testResumeRead_NotPaused ( ) { establishConnection ( ) ; clientChannel . resumeRead ( ) ; net . xenqtt . message . SubAckMessage msg = new net . xenqtt . message . SubAckMessage ( 1 , new net . xenqtt . message . QoS [ ] { } ) ; brokerChannel . send ( msg , blockingCommand ) ; "<AssertPlaceHolder>" ; closeConnection ( ) ; } readWrite ( int , int , long ) { long end = ( timeoutMillis == 0 ) ? 0 : ( java . lang . System . currentTimeMillis ( ) ) + timeoutMillis ; clientHandler . clearMessages ( ) ; brokerHandler . clearMessages ( ) ; while ( ( ( brokerHandler . messageCount ( ) ) < brokerMessageCount ) || ( ( clientHandler . messageCount ( ) ) < clientMessageCount ) ) { if ( end != 0 ) { long time = end - ( java . lang . System . currentTimeMillis ( ) ) ; if ( time > 0 ) { selector . select ( time ) ; } } else { selector . select ( ) ; } java . util . Iterator < java . nio . channels . SelectionKey > iter = selector . selectedKeys ( ) . iterator ( ) ; if ( ( ! ( iter . hasNext ( ) ) ) && ( ( java . lang . System . currentTimeMillis ( ) ) >= end ) ) { return false ; } while ( iter . hasNext ( ) ) { java . nio . channels . SelectionKey key = iter . next ( ) ; net . xenqtt . message . MqttChannel channel = ( ( net . xenqtt . message . MqttChannel ) ( key . attachment ( ) ) ) ; if ( ( key . isValid ( ) ) && ( key . isReadable ( ) ) ) { channel . read ( ( ( now ) + 10 ) ) ; } if ( ( key . isValid ( ) ) && ( key . isWritable ( ) ) ) { channel . write ( now ) ; } iter . remove ( ) ; } } return true ; } | org . junit . Assert . assertTrue ( readWrite ( 1 , 0 , 1000 ) ) |
testDefaultConstructor ( ) { subject = new org . zenoss . app . metricservice . api . model . RateFormatException ( ) ; "<AssertPlaceHolder>" ; } | org . junit . Assert . assertNotNull ( subject ) |
testGet_Args ( ) { java . lang . String result = new ninja . cero . sqltemplate . core . template . PlainText ( ) . get ( "select<sp>*<sp>from<sp>emp" , new java . lang . String [ ] { "1" , "2" } ) ; "<AssertPlaceHolder>" ; } | org . junit . Assert . assertThat ( result , org . hamcrest . CoreMatchers . is ( "select<sp>*<sp>from<sp>emp" ) ) |
testNotInWithCorrelatedSubQueryOrAntijoin ( ) { java . sql . ResultSet rs = methodWatcher . executeQuery ( ( "select<sp>empnum<sp>from<sp>staff<sp>where<sp>empnum<sp>not<sp>in<sp>" + "(select<sp>works.empnum<sp>from<sp>works<sp>where<sp>staff.empnum<sp>=<sp>works.empnum)" ) ) ; com . splicemachine . derby . impl . sql . execute . operations . joins . List results = com . splicemachine . homeless . TestUtils . resultSetToMaps ( rs ) ; "<AssertPlaceHolder>" ; } size ( ) { return entries . size ( ) ; } | org . junit . Assert . assertEquals ( 1 , results . size ( ) ) |
testFlatten ( ) { int [ ] [ ] [ ] test = new int [ 2 ] [ 3 ] [ 4 ] ; int [ ] expected = new int [ 24 ] ; int count = 0 ; for ( int index1 = 0 ; index1 < ( test . length ) ; index1 ++ ) { for ( int index2 = 0 ; index2 < ( test [ index1 ] . length ) ; index2 ++ ) { for ( int index3 = 0 ; index3 < ( test [ index1 ] [ index2 ] . length ) ; index3 ++ ) { expected [ count ] = count ; test [ index1 ] [ index2 ] [ index3 ] = count ++ ; } } } int [ ] result = ( ( int [ ] ) ( nom . tam . util . ArrayFuncs . flatten ( test ) ) ) ; "<AssertPlaceHolder>" ; } flatten ( java . lang . Object ) { int [ ] dimens = nom . tam . util . ArrayFuncs . getDimensions ( input ) ; if ( ( dimens . length ) <= 1 ) { return input ; } int size = 1 ; for ( int dimen : dimens ) { size *= dimen ; } java . lang . Object flat = nom . tam . util . ArrayFuncs . newInstance ( nom . tam . util . ArrayFuncs . getBaseClass ( input ) , size ) ; nom . tam . util . array . MultiArrayCopier . copyInto ( input , flat ) ; return flat ; } | org . junit . Assert . assertArrayEquals ( expected , result ) |
testBasic2 ( ) { tl . lin . data . array . ArrayListWritable < tl . lin . data . pair . PairOfInts > data = new tl . lin . data . array . ArrayListWritable < tl . lin . data . pair . PairOfInts > ( ) ; data . add ( new tl . lin . data . pair . PairOfInts ( 1 , 2 ) ) ; data . add ( new tl . lin . data . pair . PairOfInts ( 3 , 4 ) ) ; data . add ( new tl . lin . data . pair . PairOfInts ( 5 , 6 ) ) ; data . add ( new tl . lin . data . pair . PairOfInts ( 7 , 8 ) ) ; java . io . ByteArrayOutputStream bytesOut = new java . io . ByteArrayOutputStream ( ) ; java . io . DataOutputStream dataOut = new java . io . DataOutputStream ( bytesOut ) ; data . write ( dataOut ) ; tl . lin . data . array . ArrayListWritable < tl . lin . data . pair . PairOfInts > reconstructed = new tl . lin . data . array . ArrayListWritable < tl . lin . data . pair . PairOfInts > ( ) ; reconstructed . readFields ( new java . io . DataInputStream ( new java . io . ByteArrayInputStream ( bytesOut . toByteArray ( ) ) ) ) ; for ( int i = 0 ; i < ( data . size ( ) ) ; i ++ ) { "<AssertPlaceHolder>" ; } } get ( int ) { tl . lin . data . map . TMapIV . Entry < V > p = getEntry ( key ) ; return p == null ? null : p . value ; } | org . junit . Assert . assertEquals ( data . get ( i ) , reconstructed . get ( i ) ) |
searchEmptyIndexShouldReturnNullRevision ( ) { final com . b2international . index . revision . RevisionFixtures . RevisionData revision = getRevision ( com . b2international . index . revision . MAIN , com . b2international . index . revision . RevisionFixtures . RevisionData . class , com . b2international . index . revision . STORAGE_KEY1 ) ; "<AssertPlaceHolder>" ; } getRevision ( java . lang . String , java . lang . Class , java . lang . String ) { return index ( ) . read ( branch , ( index ) -> index . get ( type , key ) ) ; } | org . junit . Assert . assertNull ( revision ) |
testSize ( ) { de . metanome . algorithm_helper . data_structures . PositionListIndex pli = fixture . getFirstPLI ( ) ; "<AssertPlaceHolder>" ; } getFirstPLISize ( ) { return getFirstPLI ( ) . clusters . size ( ) ; } | org . junit . Assert . assertEquals ( fixture . getFirstPLISize ( ) , pli . size ( ) ) |
testAddArtifactWithExistingMetadataButNonMatchingClassifier ( ) { artifact = org . eclipse . tycho . p2 . target . PomDependencyCollectorTest . artifactWithClassifier ( "classifier-not-in-p2-metadata" ) ; subject . addArtifactWithExistingMetadata ( artifact , org . eclipse . tycho . p2 . target . PomDependencyCollectorTest . existingMetadata ( ) ) ; java . util . Collection < org . eclipse . equinox . p2 . metadata . IInstallableUnit > units = getTargetPlatformUnits ( ) ; "<AssertPlaceHolder>" ; } size ( ) { return map . size ( ) ; } | org . junit . Assert . assertThat ( units . size ( ) , org . hamcrest . CoreMatchers . is ( 0 ) ) |
iterateOverPartial ( ) { consumer = new org . apache . kafka . clients . consumer . KafkaConsumer < java . lang . String , java . lang . String > ( consumerProps , new org . apache . crunch . kafka . ClusterTest . StringSerDe ( ) , new org . apache . crunch . kafka . ClusterTest . StringSerDe ( ) ) ; int loops = 10 ; int numPerLoop = 100 ; int numPerPartition = 50 ; org . apache . crunch . kafka . ClusterTest . writeData ( props , topic , "batch" , loops , numPerLoop ) ; java . util . Map < org . apache . kafka . common . TopicPartition , org . apache . crunch . Pair < java . lang . Long , java . lang . Long > > offsets = new java . util . HashMap ( ) ; startOffsets = org . apache . crunch . kafka . KafkaRecordsIterableIT . getStartOffsets ( props , topic ) ; for ( Map . Entry < org . apache . kafka . common . TopicPartition , java . lang . Long > entry : startOffsets . entrySet ( ) ) { offsets . put ( entry . getKey ( ) , org . apache . crunch . Pair . of ( entry . getValue ( ) , ( ( entry . getValue ( ) ) + numPerPartition ) ) ) ; } java . lang . Iterable < org . apache . crunch . Pair < java . lang . String , java . lang . String > > data = new org . apache . crunch . kafka . KafkaRecordsIterable ( consumer , offsets , new java . util . Properties ( ) ) ; int count = 0 ; for ( org . apache . crunch . Pair < java . lang . String , java . lang . String > event : data ) { count ++ ; } "<AssertPlaceHolder>" ; } size ( ) { return 3 ; } | org . junit . Assert . assertThat ( count , org . hamcrest . core . Is . is ( ( ( startOffsets . size ( ) ) * numPerPartition ) ) ) |
UID_availability ( ) { com . mysema . rdfbean . object . ConfigurationBuilder builder = new com . mysema . rdfbean . object . ConfigurationBuilder ( ) ; builder . addClass ( com . mysema . rdfbean . object . ConfigurationBuilderTest . Person . class ) ; builder . addClass ( com . mysema . rdfbean . object . ConfigurationBuilderTest . Department . class ) ; builder . addClass ( com . mysema . rdfbean . object . ConfigurationBuilderTest . Company . class ) ; builder . addClass ( com . mysema . rdfbean . object . ConfigurationBuilderTest . Labeled . class ) ; com . mysema . rdfbean . object . Configuration configuration = builder . build ( ) ; for ( com . mysema . rdfbean . object . MappedClass mc : configuration . getMappedClasses ( ) ) { "<AssertPlaceHolder>" ; } } getUID ( ) { return uid ; } | org . junit . Assert . assertNotNull ( mc . getUID ( ) ) |
shouldUseCustomComputerUsingMockitoAndBoundBox ( ) { final int EXPECTED_RESULT = 1 ; com . octo . android . sample . ui . HelloAndroidActivity activityUnderTest = org . robolectric . Robolectric . buildActivity ( com . octo . android . sample . ui . HelloAndroidActivity . class ) . create ( ) . get ( ) ; com . octo . android . sample . ui . BoundBoxOfHelloAndroidActivity boundBoxOfHelloAndroidActivity = new com . octo . android . sample . ui . BoundBoxOfHelloAndroidActivity ( activityUnderTest ) ; com . octo . android . sample . model . Computer mockComputer = org . mockito . Mockito . mock ( com . octo . android . sample . model . Computer . class ) ; org . mockito . Mockito . when ( mockComputer . getResult ( ) ) . thenReturn ( EXPECTED_RESULT ) ; boundBoxOfHelloAndroidActivity . setComputer ( mockComputer ) ; boundBoxOfHelloAndroidActivity . boundBox_getButton ( ) . performClick ( ) ; org . mockito . Mockito . verify ( mockComputer , org . mockito . Mockito . times ( 1 ) ) . getResult ( ) ; java . lang . String textViewHelloString = boundBoxOfHelloAndroidActivity . boundBox_getTextView ( ) . getText ( ) . toString ( ) ; "<AssertPlaceHolder>" ; } getResult ( ) { return com . octo . android . sample . model . DummyComputer . RESULT ; } | org . junit . Assert . assertThat ( textViewHelloString , org . hamcrest . CoreMatchers . equalTo ( java . lang . String . valueOf ( EXPECTED_RESULT ) ) ) |
resolveNonDfs ( ) { jcifs . CIFSContext context = getContext ( ) ; context = withTestNTLMCredentials ( context ) ; jcifs . DfsResolver dfs = context . getDfs ( ) ; jcifs . DfsReferralData ref = dfs . resolve ( context , getTestServer ( ) , getTestShare ( ) , "" ) ; "<AssertPlaceHolder>" ; } getTestShare ( ) { java . lang . String testShare ; testShare = getProperties ( ) . get ( TestProperties . TEST_SHARE_MAIN ) ; if ( testShare == null ) { testShare = "test" ; } return testShare ; } | org . junit . Assert . assertNull ( ref ) |
testAllowTapPlanTBeOptional ( ) { org . tap4j . parser . Tap13Parser parser = new org . tap4j . parser . Tap13Parser ( java . nio . charset . Charset . defaultCharset ( ) . toString ( ) , true , false ) ; org . tap4j . model . TestSet testSet = parser . parseFile ( new java . io . File ( org . tap4j . parser . issue3406964 . TestDirectives . class . getResource ( "/org/tap4j/parser/issueGitHub22/issue-22-tap-stream.tap" ) . getFile ( ) ) ) ; "<AssertPlaceHolder>" ; } getTestResults ( ) { return this . testResults ; } | org . junit . Assert . assertEquals ( 4 , testSet . getTestResults ( ) . size ( ) ) |
testContinuousLearning ( ) { org . hawkular . datamining . forecast . ModelData rModel = org . hawkular . datamining . forecast . ModelReader . read ( "wnLowVariance" ) ; org . hawkular . datamining . forecast . models . SimpleExponentialSmoothing . SimpleExOptimizer optimizer = org . hawkular . datamining . forecast . models . SimpleExponentialSmoothing . optimizer ( ) ; org . hawkular . datamining . forecast . models . TimeSeriesModel modelInit = optimizer . minimizedMSE ( rModel . getData ( ) ) ; org . hawkular . datamining . forecast . models . TimeSeriesModel continuousModel = new org . hawkular . datamining . forecast . models . ContinuousModel ( org . hawkular . datamining . forecast . models . SimpleExponentialSmoothing . createWithSmoothingParam ( optimizer . result ( ) [ 0 ] ) ) ; rModel . getData ( ) . forEach ( ( dataPoint ) -> { System . out . println ( dataPoint ) ; continuousModel . learn ( dataPoint ) ; } ) ; org . hawkular . datamining . forecast . stats . AccuracyStatistics batchInitStatistics = modelInit . initStatistics ( ) ; org . hawkular . datamining . forecast . stats . AccuracyStatistics continuousLearnStatistics = continuousModel . runStatistics ( ) ; "<AssertPlaceHolder>" ; } getMse ( ) { return mse ; } | org . junit . Assert . assertTrue ( ( ( continuousLearnStatistics . getMse ( ) ) > ( batchInitStatistics . getMse ( ) ) ) ) |
testToMap ( ) { java . lang . String [ ] values = new java . lang . String [ ] { "key1:value1" , "key2:value2" , "key3:value3" } ; java . lang . String separator = ":" ; java . util . Map < java . lang . String , java . lang . String > expResult = new java . util . HashMap < java . lang . String , java . lang . String > ( ) ; expResult . put ( "key1" , "value1" ) ; expResult . put ( "key2" , "value2" ) ; expResult . put ( "key3" , "value3" ) ; java . util . Map < java . lang . String , java . lang . String > result = com . adobe . acs . commons . util . ParameterUtil . toMap ( values , separator ) ; "<AssertPlaceHolder>" ; } toMap ( java . lang . String [ ] , java . lang . String ) { return com . adobe . acs . commons . util . ParameterUtil . toMap ( values , separator , false , null ) ; } | org . junit . Assert . assertEquals ( expResult , result ) |
testExportsTheVersionOnStaging ( ) { com . liferay . exportimport . kernel . lar . ExportImportThreadLocal . setPortletStagingInProcess ( true ) ; try { com . liferay . portal . kernel . repository . model . FileEntry fileEntry = addStagedModel ( stagingGroup , addCompanyDependencies ( ) ) ; fileEntry = addVersion ( fileEntry ) ; fileEntry = addVersion ( fileEntry ) ; fileEntry = addVersion ( fileEntry ) ; exportImportStagedModel ( fileEntry ) ; com . liferay . portal . kernel . repository . model . FileEntry importedFileEntry = getStagedModel ( fileEntry . getUuid ( ) , liveGroup ) ; "<AssertPlaceHolder>" ; } finally { com . liferay . exportimport . kernel . lar . ExportImportThreadLocal . setPortletStagingInProcess ( false ) ; } } getVersion ( ) { if ( ( _version ) == null ) { return "" ; } else { return _version ; } } | org . junit . Assert . assertEquals ( fileEntry . getVersion ( ) , importedFileEntry . getVersion ( ) ) |
testBasic ( ) { org . apache . druid . server . router . RendezvousHasher hasher = new org . apache . druid . server . router . RendezvousHasher ( ) ; java . util . Set < java . lang . String > nodes = new java . util . HashSet ( ) ; nodes . add ( "localhost:1" ) ; nodes . add ( "localhost:2" ) ; nodes . add ( "localhost:3" ) ; nodes . add ( "localhost:4" ) ; nodes . add ( "localhost:5" ) ; java . util . Map < java . lang . String , java . lang . String > uuidServerMap = new java . util . HashMap ( ) ; for ( int i = 0 ; i < ( org . apache . druid . server . RendezvousHasherTest . NUM_ITERATIONS ) ; i ++ ) { java . util . UUID objectId = java . util . UUID . randomUUID ( ) ; java . lang . String targetServer = hasher . chooseNode ( nodes , org . apache . druid . java . util . common . StringUtils . toUtf8 ( objectId . toString ( ) ) ) ; uuidServerMap . put ( objectId . toString ( ) , targetServer ) ; } for ( int i = 0 ; i < 2 ; i ++ ) { for ( Map . Entry < java . lang . String , java . lang . String > entry : uuidServerMap . entrySet ( ) ) { java . lang . String targetServer = hasher . chooseNode ( nodes , org . apache . druid . java . util . common . StringUtils . toUtf8 ( entry . getKey ( ) ) ) ; "<AssertPlaceHolder>" ; } } } getValue ( ) { return value ; } | org . junit . Assert . assertEquals ( entry . getValue ( ) , targetServer ) |
shouldWriteNameOfService ( ) { com . graphhopper . jsprit . core . problem . VehicleRoutingProblem . Builder builder = VehicleRoutingProblem . Builder . newInstance ( ) ; com . graphhopper . jsprit . core . problem . job . Service s1 = Service . Builder . newInstance ( "1" ) . setName ( "cleaning" ) . addSizeDimension ( 0 , 1 ) . setLocation ( com . graphhopper . jsprit . io . util . TestUtils . loc ( "loc" ) ) . setServiceTime ( 2.0 ) . build ( ) ; com . graphhopper . jsprit . core . problem . VehicleRoutingProblem vrp = builder . addJob ( s1 ) . build ( ) ; com . graphhopper . jsprit . core . problem . VehicleRoutingProblem readVrp = writeAndRereadXml ( vrp ) ; com . graphhopper . jsprit . core . problem . job . Service s1_read = ( ( com . graphhopper . jsprit . core . problem . job . Service ) ( readVrp . getJobs ( ) . get ( "1" ) ) ) ; "<AssertPlaceHolder>" ; } getName ( ) { return name ; } | org . junit . Assert . assertTrue ( s1_read . getName ( ) . equals ( "cleaning" ) ) |
loadAllResellerKeysWithinPeriod_multipleOrganizations ( ) { givenOrganizationsWithRoles ( OrganizationRoleType . BROKER , OrganizationRoleType . RESELLER , OrganizationRoleType . BROKER ) ; java . util . List < java . lang . Long > resellerKeys = dao . loadAllResellerKeysWithinPeriod ( java . lang . System . currentTimeMillis ( ) ) ; "<AssertPlaceHolder>" ; } size ( ) { return categoriesForMarketplace . size ( ) ; } | org . junit . Assert . assertEquals ( 1 , resellerKeys . size ( ) ) |
testScalarAddNaN ( ) { org . apache . commons . math . complex . Complex x = new org . apache . commons . math . complex . Complex ( 3.0 , 4.0 ) ; double yDouble = Double . NaN ; org . apache . commons . math . complex . Complex yComplex = new org . apache . commons . math . complex . Complex ( yDouble ) ; "<AssertPlaceHolder>" ; } add ( org . jfree . chart . axis . TickUnit ) { if ( unit == null ) { throw new java . lang . NullPointerException ( "Null<sp>'unit'<sp>argument." ) ; } this . tickUnits . add ( unit ) ; java . util . Collections . sort ( this . tickUnits ) ; } | org . junit . Assert . assertEquals ( x . add ( yComplex ) , x . add ( yDouble ) ) |
test37setUserRolesByExternalID ( ) { setup ( ) ; org . apache . ranger . db . XXPortalUserRoleDao xPortalUserRoleDao = org . mockito . Mockito . mock ( org . apache . ranger . db . XXPortalUserRoleDao . class ) ; org . apache . ranger . view . VXUser vXUser = vxUser ( ) ; org . apache . ranger . view . VXPortalUser userProfile = userProfile ( ) ; java . util . List < org . apache . ranger . view . VXString > vStringRolesList = new java . util . ArrayList < org . apache . ranger . view . VXString > ( ) ; org . apache . ranger . view . VXString vXStringObj = new org . apache . ranger . view . VXString ( ) ; vXStringObj . setValue ( "ROLE_USER" ) ; vStringRolesList . add ( vXStringObj ) ; java . util . List < org . apache . ranger . entity . XXPortalUserRole > xPortalUserRoleList = new java . util . ArrayList < org . apache . ranger . entity . XXPortalUserRole > ( ) ; org . apache . ranger . entity . XXPortalUserRole XXPortalUserRole = new org . apache . ranger . entity . XXPortalUserRole ( ) ; XXPortalUserRole . setId ( org . apache . ranger . biz . TestXUserMgr . userId ) ; XXPortalUserRole . setUserId ( org . apache . ranger . biz . TestXUserMgr . userId ) ; XXPortalUserRole . setUserRole ( "ROLE_USER" ) ; xPortalUserRoleList . add ( XXPortalUserRole ) ; java . util . List < org . apache . ranger . entity . XXUserPermission > xUserPermissionsList = new java . util . ArrayList < org . apache . ranger . entity . XXUserPermission > ( ) ; org . apache . ranger . entity . XXUserPermission xUserPermissionObj = xxUserPermission ( ) ; xUserPermissionsList . add ( xUserPermissionObj ) ; java . util . List < org . apache . ranger . entity . XXGroupPermission > xGroupPermissionList = new java . util . ArrayList < org . apache . ranger . entity . XXGroupPermission > ( ) ; org . apache . ranger . entity . XXGroupPermission xGroupPermissionObj = xxGroupPermission ( ) ; xGroupPermissionList . add ( xGroupPermissionObj ) ; java . util . List < org . apache . ranger . view . VXGroupPermission > groupPermList = new java . util . ArrayList < org . apache . ranger . view . VXGroupPermission > ( ) ; org . apache . ranger . view . VXGroupPermission groupPermission = vxGroupPermission ( ) ; groupPermList . add ( groupPermission ) ; org . mockito . Mockito . when ( daoManager . getXXPortalUserRole ( ) ) . thenReturn ( xPortalUserRoleDao ) ; org . mockito . Mockito . when ( xPortalUserRoleDao . findByUserId ( org . apache . ranger . biz . TestXUserMgr . userId ) ) . thenReturn ( xPortalUserRoleList ) ; org . mockito . Mockito . when ( xUserMgr . getXUser ( org . apache . ranger . biz . TestXUserMgr . userId ) ) . thenReturn ( vXUser ) ; org . mockito . Mockito . when ( userMgr . getUserProfileByLoginId ( vXUser . getName ( ) ) ) . thenReturn ( userProfile ) ; java . util . List < java . lang . String > permissionList = new java . util . ArrayList < java . lang . String > ( ) ; permissionList . add ( RangerConstants . MODULE_USER_GROUPS ) ; org . apache . ranger . view . VXUser loggedInUser = vxUser ( ) ; java . util . List < java . lang . String > loggedInUserRole = new java . util . ArrayList < java . lang . String > ( ) ; loggedInUserRole . add ( RangerConstants . ROLE_ADMIN ) ; loggedInUser . setId ( 8L ) ; loggedInUser . setName ( "testuser" ) ; loggedInUser . setUserRoleList ( loggedInUserRole ) ; org . mockito . Mockito . when ( xUserService . getXUserByUserName ( "admin" ) ) . thenReturn ( loggedInUser ) ; org . apache . ranger . db . XXModuleDefDao mockxxModuleDefDao = org . mockito . Mockito . mock ( org . apache . ranger . db . XXModuleDefDao . class ) ; org . mockito . Mockito . when ( daoManager . getXXModuleDef ( ) ) . thenReturn ( mockxxModuleDefDao ) ; org . mockito . Mockito . when ( mockxxModuleDefDao . findAccessibleModulesByUserId ( 8L , 8L ) ) . thenReturn ( permissionList ) ; org . apache . ranger . view . VXStringList vXStringList = xUserMgr . setUserRolesByExternalID ( org . apache . ranger . biz . TestXUserMgr . userId , vStringRolesList ) ; "<AssertPlaceHolder>" ; } setUserRolesByExternalID ( java . lang . Long , org . apache . ranger . rest . VXStringList ) { return xUserMgr . setUserRolesByExternalID ( userId , roleList . getVXStrings ( ) ) ; } | org . junit . Assert . assertNotNull ( vXStringList ) |
testCanceledCase ( ) { monitor . setCanceled ( true ) ; org . eclipse . jdt . ls . core . internal . BuildWorkspaceStatus result = handler . buildWorkspace ( false , monitor ) ; "<AssertPlaceHolder>" ; } buildWorkspace ( boolean , org . eclipse . core . runtime . IProgressMonitor ) { try { if ( monitor . isCanceled ( ) ) { return org . eclipse . jdt . ls . core . internal . BuildWorkspaceStatus . CANCELLED ; } projectsManager . cleanupResources ( projectsManager . getDefaultProject ( ) ) ; org . eclipse . core . resources . IProject [ ] projects = org . eclipse . jdt . ls . core . internal . ProjectUtils . getAllProjects ( ) ; for ( org . eclipse . core . resources . IProject project : projects ) { if ( ! ( project . equals ( projectsManager . getDefaultProject ( ) ) ) ) { java . lang . String uri = org . eclipse . jdt . ls . core . internal . JDTUtils . getFileURI ( project ) ; connection . publishDiagnostics ( new org . eclipse . lsp4j . PublishDiagnosticsParams ( org . eclipse . jdt . ls . core . internal . ResourceUtils . toClientUri ( uri ) , java . util . Collections . emptyList ( ) ) ) ; } } if ( forceReBuild ) { org . eclipse . core . resources . ResourcesPlugin . getWorkspace ( ) . build ( org . eclipse . core . resources . IncrementalProjectBuilder . CLEAN_BUILD , monitor ) ; org . eclipse . core . resources . ResourcesPlugin . getWorkspace ( ) . build ( org . eclipse . core . resources . IncrementalProjectBuilder . FULL_BUILD , monitor ) ; } else { org . eclipse . core . resources . ResourcesPlugin . getWorkspace ( ) . build ( org . eclipse . core . resources . IncrementalProjectBuilder . INCREMENTAL_BUILD , monitor ) ; } java . util . List < org . eclipse . core . resources . IMarker > problemMarkers = new java . util . ArrayList ( ) ; for ( org . eclipse . core . resources . IProject project : projects ) { if ( ! ( project . equals ( projectsManager . getDefaultProject ( ) ) ) ) { java . util . List < org . eclipse . core . resources . IMarker > markers = org . eclipse . jdt . ls . core . internal . ResourceUtils . getErrorMarkers ( project ) ; if ( markers != null ) { problemMarkers . addAll ( markers ) ; } } } java . util . List < java . lang . String > errors = problemMarkers . stream ( ) . filter ( ( m ) -> ( m . getAttribute ( IMarker . SEVERITY , 0 ) ) == IMarker . SEVERITY_ERROR ) . map ( ( e ) -> convertMarker ( e ) ) . collect ( java . util . stream . Collectors . toList ( ) ) ; if ( errors . isEmpty ( ) ) { return org . eclipse . jdt . ls . core . internal . BuildWorkspaceStatus . SUCCEED ; } else { java . lang . String newline = java . lang . System . getProperty ( "line.separator" ) ; org . eclipse . jdt . ls . core . internal . JavaLanguageServerPlugin . logError ( ( ( "Error<sp>occured<sp>while<sp>building<sp>workspace.<sp>Details:<sp>" + newline ) + ( java . lang . String . join ( newline , errors ) ) ) ) ; return org . eclipse . jdt . ls . core . internal . BuildWorkspaceStatus . WITH_ERROR ; } } catch ( org . eclipse . core . runtime . CoreException e ) { org . eclipse . jdt . ls . core . internal . JavaLanguageServerPlugin . logException ( "Failed<sp>to<sp>build<sp>workspace." , e ) ; return org . eclipse . jdt . ls . core . internal . BuildWorkspaceStatus . FAILED ; } catch ( org . eclipse . core . runtime . OperationCanceledException e ) { return org . eclipse . jdt . ls . core . internal . BuildWorkspaceStatus . CANCELLED ; } } | org . junit . Assert . assertEquals ( result , BuildWorkspaceStatus . CANCELLED ) |
testFindByLHINCodeOrgNameProgramName ( ) { java . lang . String orgLHINCode1 = "100" ; java . lang . String orgLHINCode2 = "200" ; java . lang . String orgName1 = "alpha" ; java . lang . String orgName2 = "bravo" ; java . lang . String programName1 = "Program1" ; java . lang . String programName2 = "Program2" ; org . oscarehr . common . model . OcanConnexOption ocanConnexOption1 = new org . oscarehr . common . model . OcanConnexOption ( ) ; org . oscarehr . common . dao . utils . EntityDataGenerator . generateTestDataForModelClass ( ocanConnexOption1 ) ; ocanConnexOption1 . setLHINCode ( orgLHINCode1 ) ; ocanConnexOption1 . setOrgName ( orgName1 ) ; ocanConnexOption1 . setProgramName ( programName1 ) ; dao . persist ( ocanConnexOption1 ) ; org . oscarehr . common . model . OcanConnexOption ocanConnexOption2 = new org . oscarehr . common . model . OcanConnexOption ( ) ; org . oscarehr . common . dao . utils . EntityDataGenerator . generateTestDataForModelClass ( ocanConnexOption2 ) ; ocanConnexOption2 . setLHINCode ( orgLHINCode2 ) ; ocanConnexOption2 . setOrgName ( orgName2 ) ; ocanConnexOption2 . setProgramName ( programName2 ) ; dao . persist ( ocanConnexOption2 ) ; org . oscarehr . common . model . OcanConnexOption ocanConnexOption3 = new org . oscarehr . common . model . OcanConnexOption ( ) ; org . oscarehr . common . dao . utils . EntityDataGenerator . generateTestDataForModelClass ( ocanConnexOption3 ) ; ocanConnexOption3 . setLHINCode ( orgLHINCode1 ) ; ocanConnexOption3 . setOrgName ( orgName1 ) ; ocanConnexOption3 . setProgramName ( programName1 ) ; dao . persist ( ocanConnexOption3 ) ; java . util . List < org . oscarehr . common . model . OcanConnexOption > expectedResult = new java . util . ArrayList < org . oscarehr . common . model . OcanConnexOption > ( java . util . Arrays . asList ( ocanConnexOption1 , ocanConnexOption3 ) ) ; java . util . List < org . oscarehr . common . model . OcanConnexOption > result = dao . findByLHINCodeOrgNameProgramName ( orgLHINCode1 , orgName1 , programName1 ) ; org . apache . log4j . Logger logger = org . oscarehr . util . MiscUtils . getLogger ( ) ; if ( ( result . size ( ) ) != ( expectedResult . size ( ) ) ) { logger . warn ( "Array<sp>sizes<sp>do<sp>not<sp>match." ) ; org . junit . Assert . fail ( "Array<sp>sizes<sp>do<sp>not<sp>match." ) ; } for ( int i = 0 ; i < ( expectedResult . size ( ) ) ; i ++ ) { if ( ! ( expectedResult . get ( i ) . equals ( result . get ( i ) ) ) ) { logger . warn ( "Items<sp>do<sp>not<sp>match." ) ; org . junit . Assert . fail ( "Items<sp>do<sp>not<sp>match." ) ; } } "<AssertPlaceHolder>" ; } get ( java . lang . String ) { try { return terser . get ( path ) ; } catch ( ca . uhn . hl7v2 . HL7Exception e ) { oscar . oscarLab . ca . all . parsers . CLSHandler . logger . warn ( ( "Unable<sp>to<sp>get<sp>field<sp>at<sp>" + path ) , e ) ; return null ; } } | org . junit . Assert . assertTrue ( true ) |
testSetterWithoutGetterCreatesUngettableCollectionMethodProperty ( ) { com . codiform . moo . property . Property property = com . codiform . moo . property . PropertyFactory . createProperty ( getCollectionMethod ( "setExplicitMethod" , java . util . Set . class ) , AccessMode . METHOD ) ; "<AssertPlaceHolder>" ; } canGetValue ( ) { return true ; } | org . junit . Assert . assertFalse ( property . canGetValue ( ) ) |
test4 ( ) { java . lang . String process = align ( "|" , "|<sp>foo<sp>1<sp>|<sp>foo<sp>2\n|<sp>foo<sp>1<sp>|<sp>foo<sp>2\n" ) ; "<AssertPlaceHolder>" ; } align ( java . lang . String , java . lang . String ) { return new osmedile . intellij . stringmanip . align . ColumnAligner ( new osmedile . intellij . stringmanip . align . ColumnAlignerModel ( separator ) ) . align ( text ) ; } | org . junit . Assert . assertThat ( process , org . hamcrest . CoreMatchers . is ( "|<sp>foo<sp>1<sp>|<sp>foo<sp>2\n|<sp>foo<sp>1<sp>|<sp>foo<sp>2\n" ) ) |
persistedIdsShouldStillBeCounted ( ) { org . neo4j . io . fs . StoreChannel channel = getStoreChannel ( ) ; int batchSize = 10 ; org . neo4j . kernel . impl . store . id . FreeIdKeeper keeper = new org . neo4j . kernel . impl . store . id . FreeIdKeeper ( channel , batchSize , true ) ; for ( int i = 0 ; i < batchSize ; i ++ ) { keeper . freeId ( i ) ; } int extraIds = 3 ; for ( int i = batchSize ; i < ( batchSize + extraIds ) ; i ++ ) { keeper . freeId ( i ) ; } "<AssertPlaceHolder>" ; } getCount ( ) { return count ; } | org . junit . Assert . assertEquals ( ( batchSize + extraIds ) , keeper . getCount ( ) ) |
canProcessTextToSpeech ( ) { javax . sound . sampled . AudioInputStream audio = mary . generateAudio ( "some<sp>text" ) ; "<AssertPlaceHolder>" ; } generateAudio ( java . lang . String ) { verifyInputTypeIsText ( ) ; verifyOutputTypeIsAudio ( ) ; verifyVoiceIsAvailableForLocale ( ) ; marytts . datatypes . MaryData in = getMaryDataFromText ( text ) ; marytts . datatypes . MaryData out = process ( in ) ; return out . getAudio ( ) ; } | org . junit . Assert . assertNotNull ( audio ) |
testNestedTransaction ( ) { com . ctrip . platform . dal . dao . annotation . normal . BaseTransactionAnnoClass test = create ( ) ; "<AssertPlaceHolder>" ; } performNest ( ) { org . junit . Assert . assertTrue ( com . ctrip . platform . dal . dao . client . DalTransactionManager . isInTransaction ( ) ) ; org . junit . Assert . assertEquals ( noShardDb , com . ctrip . platform . dal . dao . client . DalTransactionManager . getLogicDbName ( ) ) ; perform ( ) ; return com . ctrip . platform . dal . dao . annotation . normal . BaseTransactionAnnoClass . DONE ; } | org . junit . Assert . assertEquals ( DONE , test . performNest ( ) ) |
testTrim ( ) { out = new java . io . BufferedWriter ( new java . io . FileWriter ( org . apache . hadoop . conf . TestConfiguration . CONFIG ) ) ; startConfig ( ) ; java . lang . String [ ] whitespaces = new java . lang . String [ ] { "" , "<sp>" , "\n" , "\t" } ; java . lang . String [ ] name = new java . lang . String [ 100 ] ; for ( int i = 0 ; i < ( name . length ) ; i ++ ) { name [ i ] = "foo" + i ; java . lang . StringBuilder prefix = new java . lang . StringBuilder ( ) ; java . lang . StringBuilder postfix = new java . lang . StringBuilder ( ) ; for ( int j = 0 ; j < 3 ; j ++ ) { prefix . append ( whitespaces [ org . apache . hadoop . conf . TestConfiguration . RAN . nextInt ( whitespaces . length ) ] ) ; postfix . append ( whitespaces [ org . apache . hadoop . conf . TestConfiguration . RAN . nextInt ( whitespaces . length ) ] ) ; } appendProperty ( ( ( prefix + ( name [ i ] ) ) + postfix ) , ( ( name [ i ] ) + ".value" ) ) ; } endConfig ( ) ; conf . addResource ( new org . apache . hadoop . fs . Path ( org . apache . hadoop . conf . TestConfiguration . CONFIG ) ) ; for ( java . lang . String n : name ) { "<AssertPlaceHolder>" ; } } get ( java . lang . String ) { java . lang . String [ ] names = handleDeprecation ( org . apache . hadoop . conf . Configuration . deprecationContext . get ( ) , name ) ; java . lang . String result = null ; for ( java . lang . String n : names ) { result = substituteVars ( getProps ( ) . getProperty ( n ) ) ; } return result ; } | org . junit . Assert . assertEquals ( ( n + ".value" ) , conf . get ( n ) ) |
testSchedulingVertexOnlyWithBroadcast ( ) { setupDAGVertexOnlyWithBroadcast ( 30 , 1 , 1 ) ; vertexManager . onVertexStateUpdated ( new org . apache . tez . dag . api . event . VertexStateUpdate ( "v0" , org . apache . tez . dag . api . event . VertexState . CONFIGURED ) ) ; vertexManager . onVertexStateUpdated ( new org . apache . tez . dag . api . event . VertexStateUpdate ( "v1" , org . apache . tez . dag . api . event . VertexState . CONFIGURED ) ) ; vertexManager . onVertexManagerEventReceived ( getVMEvent ( 250 , "v0" , 0 ) ) ; vertexManager . onVertexManagerEventReceived ( getVMEvent ( 200 , "v1" , 0 ) ) ; verify ( ctx , times ( 1 ) ) . reconfigureVertex ( eq ( 30 ) , any ( org . apache . tez . dag . api . VertexLocationHint . class ) , edgePropertiesCaptor . capture ( ) ) ; "<AssertPlaceHolder>" ; vertexManager . onVertexStarted ( null ) ; vertexManager . onSourceTaskCompleted ( getTaId ( "v0" , 0 ) ) ; vertexManager . onSourceTaskCompleted ( getTaId ( "v1" , 0 ) ) ; verifyScheduleRequest ( 0 ) ; vertexManager . onVertexStateUpdated ( new org . apache . tez . dag . api . event . VertexStateUpdate ( "v2" , org . apache . tez . dag . api . event . VertexState . RUNNING ) ) ; verifyScheduleRequest ( 1 , 0 , 1 , 6 , 7 ) ; } getValue ( ) { return raw . getValue ( ) ; } | org . junit . Assert . assertFalse ( edgePropertiesCaptor . getValue ( ) . containsKey ( "v2" ) ) |
testGetPluginManager ( ) { org . eclipse . core . resources . IWorkspaceRoot workspaceRoot = org . eclipse . core . resources . ResourcesPlugin . getWorkspace ( ) . getRoot ( ) ; org . eclipse . core . resources . IProject theProject = workspaceRoot . getProject ( org . eclipse . thym . core . HybridProjectTest . PROJECT_NAME ) ; org . eclipse . thym . core . HybridProject hProject = org . eclipse . thym . core . HybridProject . getHybridProject ( theProject ) ; "<AssertPlaceHolder>" ; } getPluginManager ( ) { if ( ( pluginManager ) == null ) { pluginManager = new org . eclipse . thym . core . plugin . CordovaPluginManager ( this ) ; } return pluginManager ; } | org . junit . Assert . assertNotNull ( hProject . getPluginManager ( ) ) |
testIsCopyForeignKeys ( ) { classUnderTest . setCopyForeignKeys ( true ) ; "<AssertPlaceHolder>" ; } isCopyForeignKeys ( ) { return copyForeignKeys ; } | org . junit . Assert . assertEquals ( true , classUnderTest . isCopyForeignKeys ( ) ) |
testEmptyId ( ) { "<AssertPlaceHolder>" ; } getEmptyId ( ) { return com . amazonaws . mobileconnectors . amazonmobileanalytics . internal . core . idresolver . Id . EMPTY_ID ; } | org . junit . Assert . assertThat ( com . amazonaws . mobileconnectors . amazonmobileanalytics . internal . core . idresolver . Id . getEmptyId ( ) . getValue ( ) , org . hamcrest . Matchers . is ( "" ) ) |
testFetchByPrimaryKeysWithMultiplePrimaryKeysWhereNoPrimaryKeysExist ( ) { long pk1 = com . liferay . portal . kernel . test . util . RandomTestUtil . nextLong ( ) ; long pk2 = com . liferay . portal . kernel . test . util . RandomTestUtil . nextLong ( ) ; java . util . Set < java . io . Serializable > primaryKeys = new java . util . HashSet < java . io . Serializable > ( ) ; primaryKeys . add ( pk1 ) ; primaryKeys . add ( pk2 ) ; java . util . Map < java . io . Serializable , com . liferay . knowledge . base . model . KBFolder > kbFolders = _persistence . fetchByPrimaryKeys ( primaryKeys ) ; "<AssertPlaceHolder>" ; } isEmpty ( ) { return _portalCacheListeners . isEmpty ( ) ; } | org . junit . Assert . assertTrue ( kbFolders . isEmpty ( ) ) |
testSubNetworkNodeTableColumnsPropagate ( ) { defaultSetup ( ) ; sub . getDefaultNodeTable ( ) . createColumn ( "ASDFASDF" , org . cytoscape . model . subnetwork . Integer . class , true ) ; org . cytoscape . model . subnetwork . CySubNetwork sub2 = root . addSubNetwork ( ) ; "<AssertPlaceHolder>" ; } createColumn ( org . cytoscape . model . CyNetwork , org . cytoscape . jobs . SUIDUtil$Identifiable , java . lang . String ) { java . lang . Class < ? extends org . cytoscape . model . CyIdentifiable > clazz = null ; switch ( type ) { case NETWORK : clazz = org . cytoscape . model . CyNetwork . class ; break ; case NODE : clazz = org . cytoscape . model . CyNode . class ; break ; case EDGE : clazz = org . cytoscape . model . CyEdge . class ; break ; case UNKNOWN : return null ; } org . cytoscape . model . CyTable table = network . getTable ( clazz , CyNetwork . HIDDEN_ATTRS ) ; if ( ( table . getColumn ( columnName ) ) == null ) table . createColumn ( columnName , org . cytoscape . jobs . Long . class , false ) ; return table ; } | org . junit . Assert . assertNotNull ( sub2 . getDefaultNodeTable ( ) . getColumn ( "ASDFASDF" ) ) |
output_header_quote_style ( ) { com . asakusafw . runtime . io . text . csv . CsvTextFormat format = com . asakusafw . runtime . io . text . csv . CsvTextFormat . builder ( ) . withHeaderQuoteStyle ( QuoteStyle . ALWAYS ) . build ( ) ; java . lang . String [ ] results = com . asakusafw . runtime . io . text . csv . CsvTextFormatTest . write ( format , new java . lang . String [ ] [ ] { new java . lang . String [ ] { "Hello!" } } ) ; "<AssertPlaceHolder>" ; } is ( java . lang . String ) { com . asakusafw . dmdl . java . util . JavaName jn = com . asakusafw . dmdl . java . util . JavaName . of ( new com . asakusafw . dmdl . model . AstSimpleName ( null , name ) ) ; jn . addFirst ( "is" ) ; java . lang . Object result = invoke ( jn . toMemberName ( ) ) ; return ( ( java . lang . Boolean ) ( result ) ) ; } | org . junit . Assert . assertThat ( results , org . hamcrest . CoreMatchers . is ( new java . lang . String [ ] { "Hello!" } ) ) |
emptyAutoDeletionIfSetup ( ) { jetbrains . jetpad . projectional . cell . ProjectionalPropertySynchronizerTest . AutoDeleteChild child = new jetbrains . jetpad . projectional . cell . ProjectionalPropertySynchronizerTest . AutoDeleteChild ( ) ; container . child . set ( child ) ; jetbrains . jetpad . cell . Cell childCell = focusChild ( child ) ; jetbrains . jetpad . cell . action . CellActions . toEnd ( childCell ) . run ( ) ; backspace ( ) ; backspace ( ) ; "<AssertPlaceHolder>" ; } get ( ) { return myValue . get ( ) ; } | org . junit . Assert . assertNull ( container . child . get ( ) ) |
willSoonExpireFalse ( ) { long expirationLong = ( java . lang . System . currentTimeMillis ( ) ) + ( ( 1000 * 60 ) * 11 ) ; java . text . SimpleDateFormat sdf = new java . text . SimpleDateFormat ( "yyyy-MM-dd<sp>HH:mm:ss" ) ; sdf . setTimeZone ( java . util . TimeZone . getTimeZone ( "GMT" ) ) ; java . lang . String expirationStr = sdf . format ( new java . util . Date ( expirationLong ) ) ; java . lang . String ak = "ak" ; java . lang . String sk = "sk" ; java . lang . String token = "token" ; long duration = 6000L ; com . aliyuncs . auth . InstanceProfileCredentials credentials = new com . aliyuncs . auth . InstanceProfileCredentials ( ak , sk , token , expirationStr , duration ) ; "<AssertPlaceHolder>" ; } willSoonExpire ( ) { long now = java . lang . System . currentTimeMillis ( ) ; return ( ( this . roleSessionDurationSeconds ) * ( 1 - ( expireFact ) ) ) > ( ( ( expiration ) - now ) / 1000 ) ; } | org . junit . Assert . assertFalse ( credentials . willSoonExpire ( ) ) |
testCreateDropRole ( ) { java . lang . String roleName = "test-drop-role" ; java . lang . String grantor = "grantor" ; long seqId = sentryStore . createRole ( org . apache . sentry . provider . db . generic . service . persistent . TestDelegateSentryStore . SEARCH , roleName , grantor ) . getSequenceId ( ) ; "<AssertPlaceHolder>" ; } dropRole ( java . lang . String , java . lang . String , boolean ) { org . apache . sentry . provider . db . service . thrift . TDropSentryRoleRequest request = new org . apache . sentry . provider . db . service . thrift . TDropSentryRoleRequest ( ) ; request . setProtocol_version ( ThriftConstants . TSENTRY_SERVICE_VERSION_CURRENT ) ; request . setRequestorUserName ( requestorUserName ) ; request . setRoleName ( roleName ) ; try { org . apache . sentry . provider . db . service . thrift . TDropSentryRoleResponse response = client . drop_sentry_role ( request ) ; org . apache . sentry . service . thrift . Status status = org . apache . sentry . service . thrift . Status . fromCode ( response . getStatus ( ) . getValue ( ) ) ; if ( ifExists && ( status == ( org . apache . sentry . service . thrift . Status . NO_SUCH_OBJECT ) ) ) { return ; } org . apache . sentry . service . thrift . Status . throwIfNotOk ( response . getStatus ( ) ) ; } catch ( org . apache . thrift . TException e ) { throw new org . apache . sentry . SentryUserException ( org . apache . sentry . provider . db . service . thrift . SentryPolicyServiceClientDefaultImpl . THRIFT_EXCEPTION_MESSAGE , e ) ; } } | org . junit . Assert . assertEquals ( ( seqId + 1 ) , sentryStore . dropRole ( org . apache . sentry . provider . db . generic . service . persistent . TestDelegateSentryStore . SEARCH , roleName , grantor ) . getSequenceId ( ) ) |
applyHandlerWithLocal ( ) { java . util . concurrent . atomic . AtomicReference < java . util . Optional < java . lang . Integer > > localValue = new java . util . concurrent . atomic . AtomicReference ( ) ; io . trane . future . Local < java . lang . Integer > l = io . trane . future . Local . apply ( ) ; l . update ( 1 ) ; io . trane . future . Promise < java . lang . Integer > p = io . trane . future . Promise . apply ( ( ex ) -> { } ) ; l . update ( 2 ) ; p . ensure ( ( ) -> localValue . set ( l . get ( ) ) ) ; p . setValue ( 1 ) ; "<AssertPlaceHolder>" ; } get ( ) { final java . util . Optional < ? > [ ] ctx = io . trane . future . Local . threadLocal . get ( ) ; if ( ( ctx == null ) || ( ( ctx . length ) <= ( position ) ) ) return java . util . Optional . empty ( ) ; final java . util . Optional < ? > v = ctx [ position ] ; if ( v == null ) return java . util . Optional . empty ( ) ; else return ( ( java . util . Optional < T > ) ( v ) ) ; } | org . junit . Assert . assertEquals ( java . util . Optional . of ( 1 ) , localValue . get ( ) ) |
test ( ) { java . lang . String str = "abcdefghijk1234567.1111<sp>(<sp>" ; System . out . println ( str ) ; java . io . ByteArrayOutputStream baos = new java . io . ByteArrayOutputStream ( ) ; org . fastcatsearch . ir . io . DataOutput output = new org . apache . lucene . store . OutputStreamDataOutput ( baos ) ; output . writeString ( str ) ; output . flush ( ) ; byte [ ] buffer = baos . toByteArray ( ) ; java . io . ByteArrayInputStream bais = new java . io . ByteArrayInputStream ( buffer ) ; org . fastcatsearch . ir . io . DataInput input = new org . apache . lucene . store . InputStreamDataInput ( bais ) ; java . lang . String actual = input . readString ( ) ; System . out . println ( actual ) ; "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { return key . equals ( ( ( org . fastcatsearch . job . state . TaskKey ) ( other ) ) . key ) ; } | org . junit . Assert . assertTrue ( actual . equals ( str ) ) |
urlMatcherWithNumericNameTest ( ) { final com . github . wuic . util . UrlMatcher urlMatcher = com . github . wuic . util . UrlUtils . urlMatcher ( "/workflow/4000/4000/nut.js" ) ; "<AssertPlaceHolder>" ; } matches ( ) { return matches ; } | org . junit . Assert . assertFalse ( urlMatcher . matches ( ) ) |
testDSAKey ( ) { java . security . KeyPair keyPair = instance . decodePrivateKey ( "src/test/resources/dsa" , "" ) ; "<AssertPlaceHolder>" ; } | org . junit . Assert . assertNotNull ( keyPair ) |
testDynamicQueryByProjectionMissing ( ) { com . liferay . portal . kernel . dao . orm . DynamicQuery dynamicQuery = com . liferay . portal . kernel . dao . orm . DynamicQueryFactoryUtil . forClass ( com . liferay . dynamic . data . mapping . model . DDMStructureVersion . class , _dynamicQueryClassLoader ) ; dynamicQuery . setProjection ( com . liferay . portal . kernel . dao . orm . ProjectionFactoryUtil . property ( "structureVersionId" ) ) ; dynamicQuery . add ( com . liferay . portal . kernel . dao . orm . RestrictionsFactoryUtil . in ( "structureVersionId" , new java . lang . Object [ ] { com . liferay . portal . kernel . test . util . RandomTestUtil . nextLong ( ) } ) ) ; java . util . List < java . lang . Object > result = _persistence . findWithDynamicQuery ( dynamicQuery ) ; "<AssertPlaceHolder>" ; } size ( ) { if ( ( _workflowTaskAssignees ) != null ) { return _workflowTaskAssignees . size ( ) ; } return _kaleoTaskAssignmentInstanceLocalService . getKaleoTaskAssignmentInstancesCount ( _kaleoTaskInstanceToken . getKaleoTaskInstanceTokenId ( ) ) ; } | org . junit . Assert . assertEquals ( 0 , result . size ( ) ) |
testGetRow ( ) { byte [ ] bytes = "row" . getBytes ( com . pentaho . big . data . bundles . impl . shim . hbase . ResultImplTest . UTF_8 ) ; when ( delegate . getRow ( ) ) . thenReturn ( bytes ) ; "<AssertPlaceHolder>" ; } getRow ( ) { return result . getRow ( ) ; } | org . junit . Assert . assertArrayEquals ( bytes , result . getRow ( ) ) |
testScalarMultipleAggregateViaSparkPath ( ) { sqlText = "select<sp>count(a2),<sp>avg(cast(b2<sp>as<sp>float)),<sp>sum(distinct<sp>c2),<sp>max(distinct<sp>d2),<sp>count(distinct<sp>e2)<sp>from<sp>t2<sp>--splice-properties<sp>useSpark=true" ; expected = "select<sp>count(distinct<sp>c2),<sp>sum(d2),<sp>count(distinct<sp>e2)<sp>from<sp>t1,<sp>t2<sp>--splice-properties<sp>useSpark=true\n<sp>where<sp>b1=b2" 0 + ( "------------------------\n" + "40960<sp>|<sp>5<sp>|61440<sp>|<sp>5<sp>|" 3 ) ; rs = methodWatcher . executeQuery ( sqlText ) ; resultString = TestUtils . FormattedResult . ResultFactory . toStringUnsorted ( rs ) ; "<AssertPlaceHolder>" ; rs . close ( ) ; } toStringUnsorted ( com . splicemachine . homeless . ResultSet ) { return com . splicemachine . homeless . TestUtils . FormattedResult . ResultFactory . convert ( "" , rs , false ) . toString ( ) . trim ( ) ; } | org . junit . Assert . assertEquals ( ( ( ( ( ( ( "select<sp>count(distinct<sp>c2),<sp>sum(d2),<sp>count(distinct<sp>e2)<sp>from<sp>t1,<sp>t2<sp>--splice-properties<sp>useSpark=true\n<sp>where<sp>b1=b2" 1 + sqlText ) + "select<sp>count(distinct<sp>c2),<sp>sum(d2),<sp>count(distinct<sp>e2)<sp>from<sp>t1,<sp>t2<sp>--splice-properties<sp>useSpark=true\n<sp>where<sp>b1=b2" 1 ) + "select<sp>count(distinct<sp>c2),<sp>sum(d2),<sp>count(distinct<sp>e2)<sp>from<sp>t1,<sp>t2<sp>--splice-properties<sp>useSpark=true\n<sp>where<sp>b1=b2" 3 ) + expected ) + "40960<sp>|<sp>5<sp>|61440<sp>|<sp>5<sp>|" 0 ) + resultString ) , expected , resultString ) |
writeFromRedisTest ( ) { com . amazon . verticles . CacheVerticleTest . LOGGER . info ( "<sp>---><sp>Testcase:<sp>writeFromRedisTest" ) ; io . vertx . core . json . JsonObject message = io . vertx . core . json . JsonObject . mapFrom ( prepareData ( ) ) ; eb . send ( Constants . CACHE_REDIS_EVENTBUS_ADDRESS , message , ( res ) -> { if ( res . succeeded ( ) ) { java . lang . Object body = res . result ( ) . body ( ) ; com . amazon . verticles . CacheVerticleTest . LOGGER . info ( ( "Received<sp>result<sp>" + body ) ) ; "<AssertPlaceHolder>" ; } else { com . amazon . verticles . CacheVerticleTest . LOGGER . info ( res . cause ( ) ) ; org . junit . Assert . fail ( ) ; } } ) ; } prepareData ( ) { com . amazon . vo . TrackingMessage msg = new com . amazon . vo . TrackingMessage ( ) ; msg . setMessageId ( "messageId" ) ; msg . setUserAgent ( "myUserAgent" ) ; msg . setProgramId ( "12345" ) ; msg . setProgramName ( "myProgram" ) ; msg . setCustomerName ( "myCustomer" ) ; msg . setCustomerId ( 1234 ) ; msg . setChecksum ( "check123" ) ; msg . setValid ( true ) ; return msg ; } | org . junit . Assert . assertNotNull ( body ) |
addLastTest ( ) { for ( int i = 0 ; i < ( TEST_QTY ) ; i ++ ) { java . lang . String str = java . lang . Integer . toString ( i ) ; testList . addLast ( str ) ; "<AssertPlaceHolder>" ; } } getLast ( ) { T result = peekLast ( ) ; if ( result == null ) { throw new java . util . NoSuchElementException ( ) ; } return result ; } | org . junit . Assert . assertEquals ( str , testList . getLast ( ) ) |
testSerializeSubtitution ( ) { com . turn . shapeshifter . NamedSchema schema = com . turn . shapeshifter . NamedSchema . of ( com . turn . shapeshifter . testing . TestProtos . Union . getDescriptor ( ) , "Union" ) . substitute ( "string_value" , "$ref" ) ; com . turn . shapeshifter . SchemaRegistry registry = new com . turn . shapeshifter . SchemaRegistry ( ) ; registry . register ( schema ) ; com . turn . shapeshifter . testing . TestProtos . Union union = com . turn . shapeshifter . testing . TestProtos . Union . newBuilder ( ) . setStringValue ( "foo" ) . build ( ) ; com . fasterxml . jackson . databind . JsonNode result = new com . turn . shapeshifter . NamedSchemaSerializer ( schema ) . serialize ( union , registry ) ; "<AssertPlaceHolder>" ; } get ( com . google . protobuf . Descriptors . Descriptor ) { return com . turn . shapeshifter . AutoSchema . of ( descriptor ) ; } | org . junit . Assert . assertNotNull ( result . get ( "$ref" ) ) |
testColumnStd ( ) { org . nd4j . linalg . factory . Nd4j . MAX_ELEMENTS_PER_SLICE = Integer . MAX_VALUE ; org . nd4j . linalg . factory . Nd4j . MAX_SLICES_TO_PRINT = Integer . MAX_VALUE ; org . nd4j . linalg . api . ndarray . INDArray twoByThree = org . nd4j . linalg . factory . Nd4j . linspace ( 1 , 600 , 600 ) . reshape ( 150 , 4 ) ; org . nd4j . linalg . api . ndarray . INDArray columnStd = twoByThree . std ( 0 ) ; org . nd4j . linalg . api . ndarray . INDArray assertion = org . nd4j . linalg . factory . Nd4j . create ( new float [ ] { 173.78148F , 173.78148F , 173.78148F , 173.78148F } ) ; "<AssertPlaceHolder>" ; } create ( double [ ] , int [ ] , int [ ] , long ) { shape = org . nd4j . linalg . factory . Nd4j . getEnsuredShape ( shape ) ; if ( ( shape . length ) == 1 ) { if ( ( shape [ 0 ] ) == ( data . length ) ) { shape = new int [ ] { 1 , data . length } ; } else throw new org . nd4j . linalg . exception . ND4JIllegalStateException ( ( ( ( "Shape<sp>of<sp>the<sp>new<sp>array<sp>" + ( org . nd4j . linalg . factory . Arrays . toString ( shape ) ) ) + "<sp>doesn't<sp>match<sp>data<sp>length:<sp>" ) + ( data . length ) ) ) ; } org . nd4j . linalg . factory . Nd4j . checkShapeValues ( data . length , shape ) ; org . nd4j . linalg . factory . INDArray ret = org . nd4j . linalg . factory . Nd4j . INSTANCE . create ( data , shape , stride , offset ) ; org . nd4j . linalg . factory . Nd4j . logCreationIfNecessary ( ret ) ; return ret ; } | org . junit . Assert . assertEquals ( assertion , columnStd ) |
test_muilt_parent ( ) { org . nutz . ioc . Ioc ioc = org . nutz . ioc . json . Utils . I ( org . nutz . ioc . json . Utils . J ( "fox" , "name:'P',age:10" ) , org . nutz . ioc . json . Utils . J ( "f2" , "parent:'fox'" ) , org . nutz . ioc . json . Utils . J ( "f3" , "parent:'f2'" ) ) ; org . nutz . ioc . json . pojo . Animal f3 = ioc . get ( org . nutz . ioc . json . pojo . Animal . class , "f3" ) ; "<AssertPlaceHolder>" ; } getAge ( ) { return age ; } | org . junit . Assert . assertEquals ( 10 , f3 . getAge ( ) ) |
testCreateMedicalDevicesSectionBuilder ( ) { org . openhealthtools . mdht . uml . cda . builder . SectionBuilder < org . openhealthtools . mdht . uml . cda . ihe . MedicalDevicesSection > sectionBuilder = org . openhealthtools . mdht . uml . cda . ihe . builder . IHEBuilderFactory . createMedicalDevicesSectionBuilder ( ) ; org . openhealthtools . mdht . uml . cda . ihe . MedicalDevicesSection section = sectionBuilder . buildSection ( ) ; "<AssertPlaceHolder>" ; Diagnostician . INSTANCE . validate ( section ) ; org . openhealthtools . mdht . uml . cda . util . CDAUtil . saveSnippet ( section , System . out ) ; } buildSection ( ) { org . openhealthtools . mdht . uml . cda . Section section = CDAFactory . eINSTANCE . createSection ( ) ; construct ( section ) ; return section ; } | org . junit . Assert . assertNotNull ( section ) |
remoteDifferent_Operations_blocking ( ) { org . ebayopensource . turmeric . runtime . tests . service1 . sample . types1 . MyMessage msg = org . ebayopensource . turmeric . runtime . tests . common . util . TestUtils . createTestMessage ( ) ; org . ebayopensource . turmeric . runtime . sif . service . Service service = org . ebayopensource . turmeric . runtime . sif . service . ServiceFactory . create ( "test1" , "remote" , serverUri . toURL ( ) ) ; service . createDispatch ( "echoString" ) . invokeAsync ( ECHO_STRING ) ; msg . setBody ( msg . getBody ( ) ) ; service . createDispatch ( "myTestOperation" ) . invokeAsync ( msg ) ; java . util . List < javax . xml . ws . Response < ? > > responseList = service . poll ( true , false ) ; "<AssertPlaceHolder>" ; } poll ( boolean , boolean ) { java . util . List < javax . xml . ws . Response < ? > > result = null ; try { m_proxy = getProxy ( ) ; } catch ( com . ebay . soaframework . common . exceptions . ServiceException serviceException ) { throw com . ebay . soaframework . common . exceptions . ServiceRuntimeException . wrap ( serviceException ) ; } result = m_proxy . poll ( param0 , param1 ) ; return result ; } | org . junit . Assert . assertTrue ( ( ( responseList . size ( ) ) == 2 ) ) |
testFileCloseAfterEnteringMaintenance ( ) { org . apache . hadoop . hdfs . TestMaintenanceState . LOG . info ( "Starting<sp>testFileCloseAfterEnteringMaintenance" ) ; int expirationInMs = 30 * 1000 ; int numDataNodes = 3 ; int numNameNodes = 1 ; getConf ( ) . setInt ( DFSConfigKeys . DFS_NAMENODE_REPLICATION_MIN_KEY , 2 ) ; startCluster ( numNameNodes , numDataNodes ) ; getCluster ( ) . waitActive ( ) ; org . apache . hadoop . hdfs . server . namenode . FSNamesystem fsn = getCluster ( ) . getNameNode ( ) . getNamesystem ( ) ; java . util . List < java . lang . String > hosts = new java . util . ArrayList ( ) ; for ( org . apache . hadoop . hdfs . server . datanode . DataNode dn : getCluster ( ) . getDataNodes ( ) ) { hosts . add ( dn . getDisplayName ( ) ) ; putNodeInService ( 0 , dn . getDatanodeUuid ( ) ) ; } "<AssertPlaceHolder>" ; org . apache . hadoop . fs . Path openFile = new org . apache . hadoop . fs . Path ( "/testClosingFileInMaintenance.dat" ) ; writeFile ( getCluster ( ) . getFileSystem ( ) , openFile , ( ( short ) ( 3 ) ) ) ; org . apache . hadoop . fs . FSDataOutputStream fsDataOutputStream = getCluster ( ) . getFileSystem ( ) . append ( openFile ) ; byte [ ] bytes = new byte [ 1024 ] ; fsDataOutputStream . write ( bytes ) ; fsDataOutputStream . hsync ( ) ; org . apache . hadoop . hdfs . protocol . LocatedBlocks lbs = org . apache . hadoop . hdfs . server . namenode . NameNodeAdapter . getBlockLocations ( getCluster ( ) . getNameNode ( 0 ) , openFile . toString ( ) , 0 , ( 3 * ( blockSize ) ) ) ; org . apache . hadoop . hdfs . protocol . DatanodeInfo [ ] dnInfos4LastBlock = lbs . getLastLocatedBlock ( ) . getLocations ( ) ; takeNodeOutofService ( 0 , com . google . common . collect . Lists . newArrayList ( dnInfos4LastBlock [ 0 ] . getDatanodeUuid ( ) , dnInfos4LastBlock [ 1 ] . getDatanodeUuid ( ) ) , ( ( org . apache . hadoop . util . Time . now ( ) ) + expirationInMs ) , null , null , AdminStates . ENTERING_MAINTENANCE ) ; fsDataOutputStream . close ( ) ; cleanupFile ( getCluster ( ) . getFileSystem ( ) , openFile ) ; } getNumLiveDataNodes ( ) { return this . router . getMetrics ( ) . getNumLiveNodes ( ) ; } | org . junit . Assert . assertEquals ( numDataNodes , fsn . getNumLiveDataNodes ( ) ) |
testLoadPluginWithCustomizedScheduler ( ) { org . apache . hadoop . yarn . server . nodemanager . containermanager . resourceplugin . ResourcePluginManager rpm = new org . apache . hadoop . yarn . server . nodemanager . containermanager . resourceplugin . ResourcePluginManager ( ) ; org . apache . hadoop . yarn . server . nodemanager . containermanager . resourceplugin . DeviceMappingManager dmm = new org . apache . hadoop . yarn . server . nodemanager . containermanager . resourceplugin . DeviceMappingManager ( mock ( org . apache . hadoop . yarn . server . nodemanager . Context . class ) ) ; org . apache . hadoop . yarn . server . nodemanager . containermanager . resourceplugin . DeviceMappingManager dmmSpy = spy ( dmm ) ; org . apache . hadoop . yarn . server . nodemanager . containermanager . resourceplugin . ResourcePluginManager rpmSpy = spy ( rpm ) ; rpmSpy . setDeviceMappingManager ( dmmSpy ) ; nm = new org . apache . hadoop . yarn . server . nodemanager . containermanager . resourceplugin . TestResourcePluginManager . MyMockNM ( rpmSpy ) ; conf . setBoolean ( YarnConfiguration . NM_PLUGGABLE_DEVICE_FRAMEWORK_ENABLED , true ) ; conf . setStrings ( YarnConfiguration . NM_PLUGGABLE_DEVICE_FRAMEWORK_DEVICE_CLASSES , ( ( ( org . apache . hadoop . yarn . server . nodemanager . containermanager . resourceplugin . FakeTestDevicePlugin1 . class . getCanonicalName ( ) ) + "," ) + ( org . apache . hadoop . yarn . server . nodemanager . containermanager . resourceplugin . FakeTestDevicePlugin5 . class . getCanonicalName ( ) ) ) ) ; nm . init ( conf ) ; nm . start ( ) ; verify ( rpmSpy , times ( 1 ) ) . checkInterfaceCompatibility ( org . apache . hadoop . yarn . server . nodemanager . api . deviceplugin . DevicePlugin . class , org . apache . hadoop . yarn . server . nodemanager . containermanager . resourceplugin . FakeTestDevicePlugin1 . class ) ; verify ( dmmSpy , times ( 1 ) ) . addDevicePluginScheduler ( any ( java . lang . String . class ) , any ( org . apache . hadoop . yarn . server . nodemanager . api . deviceplugin . DevicePluginScheduler . class ) ) ; "<AssertPlaceHolder>" ; } getDevicePluginSchedulers ( ) { return devicePluginSchedulers ; } | org . junit . Assert . assertEquals ( 1 , dmm . getDevicePluginSchedulers ( ) . size ( ) ) |
testPatchFilePath ( ) { final java . lang . String patchFilePath = xenServer56SP2Resource . getPatchFilePath ( ) ; final java . lang . String patch = "scripts/vm/hypervisor/xenserver/xenserver56fp1/patch" ; "<AssertPlaceHolder>" ; } getPatchFilePath ( ) { return "scripts/vm/hypervisor/xenserver/xenserver60/patch" ; } | org . junit . Assert . assertEquals ( patch , patchFilePath ) |
initialState ( ) { book . twju . chapter_2 . Timeline timeline = book . twju . chapter_2 . Listing_3_DelegateSetup_TimelineTest . createTimeline ( ) ; "<AssertPlaceHolder>" ; } getFetchCount ( ) { return fetchCount ; } | org . junit . Assert . assertTrue ( ( ( timeline . getFetchCount ( ) ) > 0 ) ) |
testXCodeContextWithEmptyProjectName ( ) { java . util . HashMap < java . lang . String , java . lang . String > managedOptions = new java . util . HashMap < java . lang . String , java . lang . String > ( ) ; managedOptions . put ( Options . ManagedOption . PROJECT . getOptionName ( ) , "" ) ; com . sap . prd . mobile . ios . mios . Options options = new com . sap . prd . mobile . ios . mios . Options ( null , managedOptions ) ; try { new com . sap . prd . mobile . ios . mios . XCodeContext ( java . util . Arrays . asList ( "clean" , "build" ) , com . sap . prd . mobile . ios . mios . XCodeContextTest . projectDirectory , System . out , null , options ) ; } catch ( com . sap . prd . mobile . ios . mios . Options ex ) { "<AssertPlaceHolder>" ; throw ex ; } } getViolated ( ) { return violated ; } | org . junit . Assert . assertEquals ( Options . ManagedOption . PROJECT , ex . getViolated ( ) ) |
cantZoomOutIfMinValue ( ) { pipe . actions . gui . ZoomManager zoomUI = new pipe . actions . gui . ZoomUI ( pipe . views . ZoomUITest . MIN_ZOOM , pipe . views . ZoomUITest . ZOOM_INCREMENT , pipe . views . ZoomUITest . MAX_ZOOM , pipe . views . ZoomUITest . MIN_ZOOM , null ) ; "<AssertPlaceHolder>" ; } canZoomOut ( ) { int newPercent = ( percent ) - ( pipe . constants . GUIConstants . ZOOM_DELTA ) ; return newPercent >= ( pipe . constants . GUIConstants . ZOOM_MIN ) ; } | org . junit . Assert . assertFalse ( zoomUI . canZoomOut ( ) ) |
testNotMissingReference ( ) { com . liferay . portal . kernel . xml . Element bookmarksEntryElement = _portletDataContext . getExportDataElement ( _bookmarksEntry ) ; _portletDataContext . addReferenceElement ( _bookmarksEntry , bookmarksEntryElement , _bookmarksFolder , PortletDataContext . REFERENCE_TYPE_PARENT , false ) ; com . liferay . portal . kernel . xml . Element missingReferencesElement = _portletDataContext . getMissingReferencesElement ( ) ; java . util . List < com . liferay . portal . kernel . xml . Element > missingReferenceElements = missingReferencesElement . elements ( ) ; "<AssertPlaceHolder>" ; } toString ( ) { com . liferay . petra . string . StringBundler sb = new com . liferay . petra . string . StringBundler ( 23 ) ; sb . append ( ",<sp>width=" 1 ) ; sb . append ( uuid ) ; sb . append ( ",<sp>width=" 0 ) ; sb . append ( amImageEntryId ) ; sb . append ( ",<sp>groupId=" ) ; sb . append ( groupId ) ; sb . append ( ",<sp>companyId=" ) ; sb . append ( companyId ) ; sb . append ( ",<sp>createDate=" ) ; sb . append ( createDate ) ; sb . append ( ",<sp>configurationUuid=" ) ; sb . append ( configurationUuid ) ; sb . append ( ",<sp>fileVersionId=" ) ; sb . append ( fileVersionId ) ; sb . append ( ",<sp>mimeType=" ) ; sb . append ( mimeType ) ; sb . append ( ",<sp>height=" ) ; sb . append ( height ) ; sb . append ( ",<sp>width=" ) ; sb . append ( width ) ; sb . append ( ",<sp>size=" ) ; sb . append ( size ) ; sb . append ( "}" ) ; return sb . toString ( ) ; } | org . junit . Assert . assertEquals ( missingReferenceElements . toString ( ) , 0 , missingReferenceElements . size ( ) ) |
testBetween ( ) { com . j256 . ormlite . stmt . Where < com . j256 . ormlite . stmt . Foo , java . lang . String > where = new com . j256 . ormlite . stmt . Where < com . j256 . ormlite . stmt . Foo , java . lang . String > ( createTableInfo ( ) , null , databaseType ) ; int low = 1 ; int high = 1 ; where . between ( Foo . VAL_COLUMN_NAME , low , high ) ; java . lang . StringBuilder whereSb = new java . lang . StringBuilder ( ) ; where . appendSql ( null , whereSb , new java . util . ArrayList < com . j256 . ormlite . stmt . ArgumentHolder > ( ) ) ; java . lang . StringBuilder sb = new java . lang . StringBuilder ( ) ; databaseType . appendEscapedEntityName ( sb , Foo . VAL_COLUMN_NAME ) ; sb . append ( "<sp>BETWEEN<sp>" ) . append ( low ) ; sb . append ( "<sp>AND<sp>" ) . append ( high ) ; sb . append ( '<sp>' ) ; "<AssertPlaceHolder>" ; } toString ( ) { return ( ( ( ( getClass ( ) . getSimpleName ( ) ) + ":name=" ) + ( field . getName ( ) ) ) + ",class=" ) + ( field . getDeclaringClass ( ) . getSimpleName ( ) ) ; } | org . junit . Assert . assertEquals ( sb . toString ( ) , whereSb . toString ( ) ) |
testProcessorWorkerCountWithMultipleSegmentsWithOneThread ( ) { tokenStore . storeToken ( new org . axonframework . eventhandling . GlobalSequenceTrackingToken ( 1L ) , "test" , 0 ) ; tokenStore . storeToken ( new org . axonframework . eventhandling . GlobalSequenceTrackingToken ( 2L ) , "test" , 1 ) ; configureProcessor ( org . axonframework . eventhandling . TrackingEventProcessorConfiguration . forSingleThreadedProcessing ( ) ) ; testSubject . start ( ) ; java . lang . Thread . sleep ( 200 ) ; "<AssertPlaceHolder>" ; } activeProcessorThreads ( ) { return this . activeSegments . size ( ) ; } | org . junit . Assert . assertThat ( testSubject . activeProcessorThreads ( ) , org . hamcrest . CoreMatchers . is ( 1 ) ) |
testIsNotRevealedWhenPartiallyOutOfRightBottomEdge ( ) { org . eclipse . swt . graphics . Point origin = new org . eclipse . swt . graphics . Point ( 0 , 0 ) ; org . eclipse . swt . graphics . Rectangle clientArea = createRectangle ( 0 , 0 , 100 , 100 ) ; org . eclipse . swt . graphics . Rectangle controlBounds = createRectangle ( 95 , 95 , 10 , 10 ) ; boolean isRevealed = com . eclipsesource . tabris . internal . ScrollingCompositeUtil . isRevealed ( origin , clientArea , controlBounds ) ; "<AssertPlaceHolder>" ; } isRevealed ( org . eclipse . swt . graphics . Point , org . eclipse . swt . graphics . Rectangle , org . eclipse . swt . graphics . Rectangle ) { boolean verticalVisible = com . eclipsesource . tabris . internal . ScrollingCompositeUtil . isVerticallyVisible ( origin , clientArea , controlBounds ) ; boolean horizontalVisible = com . eclipsesource . tabris . internal . ScrollingCompositeUtil . isHorizontallyVisible ( origin , clientArea , controlBounds ) ; return horizontalVisible && verticalVisible ; } | org . junit . Assert . assertFalse ( isRevealed ) |
testGrowing ( ) { final int maxSize = com . carrotsearch . randomizedtesting . RandomizedTest . between ( 10 , 20 ) ; final org . neo4j . graphalgo . core . utils . queue . IntPriorityQueue queue = org . neo4j . graphalgo . core . utils . queue . IntPriorityQueue . min ( 1 ) ; for ( int i = 0 ; i < maxSize ; i ++ ) { queue . add ( i , com . carrotsearch . randomizedtesting . RandomizedTest . randomIntBetween ( 1 , 5 ) ) ; } "<AssertPlaceHolder>" ; } size ( ) { return size ; } | org . junit . Assert . assertEquals ( queue . size ( ) , maxSize ) |
testGetWrappedInputStream ( ) { org . apache . hadoop . fs . Path src = getTestRootPath ( fSys , "test/hadoop/file" ) ; createFile ( src ) ; org . apache . hadoop . fs . FSDataInputStream in = fSys . open ( src ) ; java . io . InputStream is = in . getWrappedStream ( ) ; in . close ( ) ; "<AssertPlaceHolder>" ; } close ( ) { cleanupTimer . cancel ( ) ; } | org . junit . Assert . assertNotNull ( is ) |
testGetCountOfCommentsByTopic ( ) { int count = mockCommentServiceImpl . getCountOfCommentsByTopic ( ModuleHelper . attachType , ModuleHelper . attachId ) ; verify ( mockCommentMapper ) . countByExample ( argThat ( new com . onboard . test . exampleutils . ExampleMatcher < com . onboard . domain . mapper . model . CommentExample > ( ) { @ com . onboard . service . collaboration . impl . test . Override public boolean matches ( com . onboard . domain . mapper . model . common . BaseExample example ) { return ( com . onboard . test . exampleutils . CriterionVerifier . verifyEqualTo ( example , "attachId" , ModuleHelper . attachId ) ) && ( com . onboard . test . exampleutils . CriterionVerifier . verifyEqualTo ( example , "attachType" , ModuleHelper . attachType ) ) ; } } ) ) ; "<AssertPlaceHolder>" ; } verifyEqualTo ( com . onboard . domain . mapper . model . common . BaseExample , java . lang . String , java . lang . Object ) { com . onboard . domain . mapper . model . common . Criterion criterion = com . onboard . test . exampleutils . CriterionVerifier . getCriterionByCondition ( baseExample , ( field + ( com . onboard . test . exampleutils . CriterionVerifier . CriteriaType . EQUAL_TO ) ) ) ; return com . onboard . test . exampleutils . CriterionVerifier . checkCriterion ( criterion , value ) ; } | org . junit . Assert . assertEquals ( count , ModuleHelper . count ) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.