signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class TxXATerminator { /** * / * ( non - Javadoc )
* @ see javax . resource . spi . XATerminator # commit ( javax . transaction . xa . Xid , boolean ) */
public void commit ( Xid xid , boolean onePhase ) throws XAException { } }
|
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "commit" , new Object [ ] { xid , onePhase } ) ; final JCATranWrapper txWrapper ; try { validateXid ( xid ) ; // Get the wrapper adding an association in the process
txWrapper = getTxWrapper ( xid , true ) ; } catch ( XAException e ) { if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "commit" , "caught XAException: " + XAReturnCodeHelper . convertXACode ( e . errorCode ) ) ; throw e ; } try { if ( onePhase ) { // Perform one - phase commit
txWrapper . commitOnePhase ( ) ; } else { // Perform ordinary commit ( prepare has already been called )
txWrapper . commit ( ) ; } } catch ( XAException e ) { TxExecutionContextHandler . removeAssociation ( txWrapper ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "commit" , "rethrowing XAException: " + XAReturnCodeHelper . convertXACode ( e . errorCode ) ) ; throw e ; } TxExecutionContextHandler . removeAssociation ( txWrapper ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "commit" ) ;
|
public class WalkerFactory { /** * Tell if the given axis goes downword . Bogus name , if you can think of
* a better one , please do tell . This really has to do with inverting
* attribute axis .
* @ param axis One of Axis . XXX .
* @ return true if the axis is not a child axis and does not go up from
* the axis root . */
public static boolean isDownwardAxisOfMany ( int axis ) { } }
|
return ( ( Axis . DESCENDANTORSELF == axis ) || ( Axis . DESCENDANT == axis ) || ( Axis . FOLLOWING == axis ) // | | ( Axis . FOLLOWINGSIBLING = = axis )
|| ( Axis . PRECEDING == axis ) // | | ( Axis . PRECEDINGSIBLING = = axis )
) ;
|
public class LinkedDataFragmentBase { /** * Adds an RDF description of page links to the given RDF model .
* This method may be overridden in subclasses .
* @ param model */
public void addControls ( final Model model ) { } }
|
final URIBuilder pagedURL ; try { pagedURL = new URIBuilder ( fragmentURL ) ; } catch ( URISyntaxException e ) { throw new IllegalArgumentException ( e ) ; } final Resource fragmentId = model . createResource ( fragmentURL ) ; final Resource firstPageId = model . createResource ( pagedURL . setParameter ( ILinkedDataFragmentRequest . PARAMETERNAME_PAGE , "1" ) . toString ( ) ) ; fragmentId . addProperty ( CommonResources . HYDRA_FIRSTPAGE , firstPageId ) ; if ( pageNumber > 1 ) { final String prevPageNumber = Long . toString ( pageNumber - 1 ) ; final Resource prevPageId = model . createResource ( pagedURL . setParameter ( ILinkedDataFragmentRequest . PARAMETERNAME_PAGE , prevPageNumber ) . toString ( ) ) ; fragmentId . addProperty ( CommonResources . HYDRA_PREVIOUSPAGE , prevPageId ) ; } if ( ! isLastPage ) { final String nextPageNumber = Long . toString ( pageNumber + 1 ) ; final Resource nextPageId = model . createResource ( pagedURL . setParameter ( ILinkedDataFragmentRequest . PARAMETERNAME_PAGE , nextPageNumber ) . toString ( ) ) ; fragmentId . addProperty ( CommonResources . HYDRA_NEXTPAGE , nextPageId ) ; }
|
public class GetServiceGraphRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetServiceGraphRequest getServiceGraphRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( getServiceGraphRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getServiceGraphRequest . getStartTime ( ) , STARTTIME_BINDING ) ; protocolMarshaller . marshall ( getServiceGraphRequest . getEndTime ( ) , ENDTIME_BINDING ) ; protocolMarshaller . marshall ( getServiceGraphRequest . getGroupName ( ) , GROUPNAME_BINDING ) ; protocolMarshaller . marshall ( getServiceGraphRequest . getGroupARN ( ) , GROUPARN_BINDING ) ; protocolMarshaller . marshall ( getServiceGraphRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class CopyableFile { /** * Compute the correct { @ link OwnerAndPermission } obtained from replicating source owner and permissions and applying
* the { @ link PreserveAttributes } rules for fromPath and every ancestor up to but excluding toPath .
* @ return A list of the computed { @ link OwnerAndPermission } s starting from fromPath , up to but excluding toPath .
* @ throws IOException if toPath is not an ancestor of fromPath . */
public static List < OwnerAndPermission > resolveReplicatedOwnerAndPermissionsRecursively ( FileSystem sourceFs , Path fromPath , Path toPath , CopyConfiguration copyConfiguration ) throws IOException { } }
|
if ( ! PathUtils . isAncestor ( toPath , fromPath ) ) { throw new IOException ( String . format ( "toPath %s must be an ancestor of fromPath %s." , toPath , fromPath ) ) ; } List < OwnerAndPermission > ownerAndPermissions = Lists . newArrayList ( ) ; Path currentPath = fromPath ; while ( PathUtils . isAncestor ( toPath , currentPath . getParent ( ) ) ) { ownerAndPermissions . add ( resolveReplicatedOwnerAndPermission ( sourceFs , currentPath , copyConfiguration ) ) ; currentPath = currentPath . getParent ( ) ; } return ownerAndPermissions ;
|
public class SelectRestClauseParser { /** * Parse select rest . */
public final void parse ( ) { } }
|
Collection < Keyword > unsupportedRestKeywords = new LinkedList < > ( ) ; unsupportedRestKeywords . addAll ( Arrays . asList ( DefaultKeyword . UNION , DefaultKeyword . INTERSECT , DefaultKeyword . EXCEPT , DefaultKeyword . MINUS ) ) ; unsupportedRestKeywords . addAll ( Arrays . asList ( getUnsupportedKeywordsRest ( ) ) ) ; lexerEngine . unsupportedIfEqual ( unsupportedRestKeywords . toArray ( new Keyword [ unsupportedRestKeywords . size ( ) ] ) ) ;
|
public class WeakFastHashMap { /** * Return < code > true < / code > if this map contains a mapping for the
* specified key .
* @ param key the key to be searched for
* @ return true if the map contains the key */
@ Override public boolean containsKey ( Object key ) { } }
|
if ( fast ) { return ( map . containsKey ( key ) ) ; } else { synchronized ( map ) { return ( map . containsKey ( key ) ) ; } }
|
public class LogFactory { /** * Configure logging provider from configuration object . Configuration object format is entirely under provider control ;
* this method just pass it as it is .
* If provider configuration fails , most probably because of bad configuration , reset provider to default .
* @ param config configuration object . */
public static void config ( Config config ) { } }
|
try { provider . config ( config ) ; } catch ( Throwable t ) { provider = new DefaultLogProvider ( ) ; Log log = provider . getLogger ( LogFactory . class . getName ( ) ) ; log . error ( "Fail on logger provider configuration. Reset logging system to default provider." ) ; log . dump ( "Logging configuration stack dump:" , t ) ; }
|
public class SearchAddressBooksResult { /** * The address books that meet the specified set of filter criteria , in sort order .
* @ param addressBooks
* The address books that meet the specified set of filter criteria , in sort order . */
public void setAddressBooks ( java . util . Collection < AddressBookData > addressBooks ) { } }
|
if ( addressBooks == null ) { this . addressBooks = null ; return ; } this . addressBooks = new java . util . ArrayList < AddressBookData > ( addressBooks ) ;
|
public class HypergraphEdge { /** * Computes the center of gravity for this edge ( see Aloul , Markov , and Sakallah ) .
* @ param nodeOrdering the node ordering for which the COG is computed
* @ return the center of gravity for this edge */
public double centerOfGravity ( final Map < HypergraphNode < T > , Integer > nodeOrdering ) { } }
|
int cog = 0 ; for ( final HypergraphNode < T > node : this . nodes ) { final Integer level = nodeOrdering . get ( node ) ; if ( level == null ) throw new IllegalStateException ( "Could not find node " + node + " in the node ordering." ) ; cog += level ; } return ( double ) cog / this . nodes . size ( ) ;
|
public class AbstractJsonDeserializer { /** * Convenience method for subclasses .
* @ param paramName the name of the parameter
* @ param errorMessage the errormessage to add to the exception if the param does not exist .
* @ return a stringparameter with given name . If it does not exist and the errormessage is provided ,
* an IOException is thrown with that message . if the errormessage is not provided , null is returned .
* @ throws IOException Exception if the paramname does not exist and an errormessage is provided . */
protected Boolean getBooleanParam ( String paramName , String errorMessage ) throws IOException { } }
|
return getBooleanParam ( paramName , errorMessage , ( Map < String , Object > ) inputParams . get ( ) ) ;
|
public class GrpcServerFactoryAutoConfiguration { /** * The server lifecycle bean for netty based server .
* @ param factory The factory used to create the lifecycle .
* @ return The inter - process server lifecycle bean . */
@ ConditionalOnMissingBean @ ConditionalOnClass ( name = { } }
|
"io.netty.channel.Channel" , "io.grpc.netty.NettyServerBuilder" } ) @ Bean public GrpcServerLifecycle nettyGrpcServerLifecycle ( final NettyGrpcServerFactory factory ) { return new GrpcServerLifecycle ( factory ) ;
|
public class Interval { /** * parse when there are two date - times */
private static Interval parseEndDateTime ( Instant start , ZoneOffset offset , CharSequence endStr ) { } }
|
try { TemporalAccessor temporal = DateTimeFormatter . ISO_DATE_TIME . parseBest ( endStr , OffsetDateTime :: from , LocalDateTime :: from ) ; if ( temporal instanceof OffsetDateTime ) { OffsetDateTime odt = ( OffsetDateTime ) temporal ; return Interval . of ( start , odt . toInstant ( ) ) ; } else { // infer offset from start if not specified by end
LocalDateTime ldt = ( LocalDateTime ) temporal ; return Interval . of ( start , ldt . toInstant ( offset ) ) ; } } catch ( DateTimeParseException ex ) { Instant end = Instant . parse ( endStr ) ; return Interval . of ( start , end ) ; }
|
public class OutJampWebSocket { /** * Sends a message to a given address */
public void query ( WebSocket session , HeadersAmp headers , String from , long qid , String to , String methodName , PodRef podCaller , Object ... args ) throws IOException { } }
|
/* try ( Writer out = session . getBasicRemote ( ) . getSendWriter ( ) ) {
init ( out ) ;
query ( headers , from , qid , to , methodName , args ) ; */
|
public class lbwlm { /** * Use this API to delete lbwlm resources of given names . */
public static base_responses delete ( nitro_service client , String wlmname [ ] ) throws Exception { } }
|
base_responses result = null ; if ( wlmname != null && wlmname . length > 0 ) { lbwlm deleteresources [ ] = new lbwlm [ wlmname . length ] ; for ( int i = 0 ; i < wlmname . length ; i ++ ) { deleteresources [ i ] = new lbwlm ( ) ; deleteresources [ i ] . wlmname = wlmname [ i ] ; } result = delete_bulk_request ( client , deleteresources ) ; } return result ;
|
public class ApiOvhOrder { /** * Create order
* REST : POST / order / email / exchange / { organizationName } / service / { exchangeService } / accountUpgrade / { duration }
* @ param newQuota [ required ] New storage quota for that account
* @ param primaryEmailAddress [ required ] The account you wish to upgrade
* @ param organizationName [ required ] The internal name of your exchange organization
* @ param exchangeService [ required ] The internal name of your exchange service
* @ param duration [ required ] Duration */
public OvhOrder email_exchange_organizationName_service_exchangeService_accountUpgrade_duration_POST ( String organizationName , String exchangeService , String duration , OvhAccountQuotaEnum newQuota , String primaryEmailAddress ) throws IOException { } }
|
String qPath = "/order/email/exchange/{organizationName}/service/{exchangeService}/accountUpgrade/{duration}" ; StringBuilder sb = path ( qPath , organizationName , exchangeService , duration ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "newQuota" , newQuota ) ; addBody ( o , "primaryEmailAddress" , primaryEmailAddress ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhOrder . class ) ;
|
public class CustomDictionary { /** * 最长匹配
* @ param text 文本
* @ param processor 处理器 */
public static void parseLongestText ( String text , AhoCorasickDoubleArrayTrie . IHit < CoreDictionary . Attribute > processor ) { } }
|
if ( trie != null ) { final int [ ] lengthArray = new int [ text . length ( ) ] ; final CoreDictionary . Attribute [ ] attributeArray = new CoreDictionary . Attribute [ text . length ( ) ] ; char [ ] charArray = text . toCharArray ( ) ; DoubleArrayTrie < CoreDictionary . Attribute > . Searcher searcher = dat . getSearcher ( charArray , 0 ) ; while ( searcher . next ( ) ) { lengthArray [ searcher . begin ] = searcher . length ; attributeArray [ searcher . begin ] = searcher . value ; } trie . parseText ( charArray , new AhoCorasickDoubleArrayTrie . IHit < CoreDictionary . Attribute > ( ) { @ Override public void hit ( int begin , int end , CoreDictionary . Attribute value ) { int length = end - begin ; if ( length > lengthArray [ begin ] ) { lengthArray [ begin ] = length ; attributeArray [ begin ] = value ; } } } ) ; for ( int i = 0 ; i < charArray . length ; ) { if ( lengthArray [ i ] == 0 ) { ++ i ; } else { processor . hit ( i , i + lengthArray [ i ] , attributeArray [ i ] ) ; i += lengthArray [ i ] ; } } } else dat . parseLongestText ( text , processor ) ;
|
public class AuthCredentialToSubjectResolverManager { /** * Resolve the { @ link IAuthSubject } from the specified credentials .
* @ param aCredentials
* The credentials to be transformed .
* @ return < code > null < / code > if no subject matches the specified credentials . */
@ Nullable public static IAuthSubject getSubjectFromCredentials ( @ Nonnull final IAuthCredentials aCredentials ) { } }
|
for ( final IAuthCredentialToSubjectResolverSPI aHdl : s_aHdlList ) if ( aHdl . supportsCredentials ( aCredentials ) ) { final IAuthSubject aSubject = aHdl . getSubjectFromCredentials ( aCredentials ) ; if ( aSubject != null ) return aSubject ; } return null ;
|
public class BinarySearch { /** * Search for the value in the reverse sorted short array and return the index .
* @ param shortArray array that we are searching in .
* @ param value value that is being searched in the array .
* @ return the index where the value is found in the array , else - 1. */
public static int searchDescending ( short [ ] shortArray , short value ) { } }
|
int start = 0 ; int end = shortArray . length - 1 ; int middle = 0 ; while ( start <= end ) { middle = ( start + end ) >> 1 ; if ( value == shortArray [ middle ] ) { return middle ; } if ( value > shortArray [ middle ] ) { end = middle - 1 ; } else { start = middle + 1 ; } } return - 1 ;
|
public class AbstractHttpStack { /** * Reads the contents of HttpEntity into a byte [ ] . */
public final byte [ ] getContentBytes ( HttpURLConnection connection , ByteArrayPool byteArrayPool ) throws IOException { } }
|
InputStream inputStream ; try { inputStream = connection . getInputStream ( ) ; } catch ( IOException ioe ) { inputStream = connection . getErrorStream ( ) ; if ( inputStream == null ) { throw ioe ; } } return getContentBytes ( inputStream , byteArrayPool , connection . getContentLength ( ) ) ;
|
public class DRL6Expressions { /** * src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 325:1 : inExpression returns [ BaseDescr result ] : left = relationalExpression ( ( not _ key in _ key ) = > not _ key in = in _ key LEFT _ PAREN e1 = expression ( COMMA e2 = expression ) * RIGHT _ PAREN | in = in _ key LEFT _ PAREN e1 = expression ( COMMA e2 = expression ) * RIGHT _ PAREN ) ? ; */
public final BaseDescr inExpression ( ) throws RecognitionException { } }
|
BaseDescr result = null ; BaseDescr left = null ; ParserRuleReturnScope e1 = null ; ParserRuleReturnScope e2 = null ; ConstraintConnectiveDescr descr = null ; BaseDescr leftDescr = null ; BindingDescr binding = null ; try { // src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 328:3 : ( left = relationalExpression ( ( not _ key in _ key ) = > not _ key in = in _ key LEFT _ PAREN e1 = expression ( COMMA e2 = expression ) * RIGHT _ PAREN | in = in _ key LEFT _ PAREN e1 = expression ( COMMA e2 = expression ) * RIGHT _ PAREN ) ? )
// src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 328:5 : left = relationalExpression ( ( not _ key in _ key ) = > not _ key in = in _ key LEFT _ PAREN e1 = expression ( COMMA e2 = expression ) * RIGHT _ PAREN | in = in _ key LEFT _ PAREN e1 = expression ( COMMA e2 = expression ) * RIGHT _ PAREN ) ?
{ pushFollow ( FOLLOW_relationalExpression_in_inExpression1615 ) ; left = relationalExpression ( ) ; state . _fsp -- ; if ( state . failed ) return result ; if ( state . backtracking == 0 ) { if ( buildDescr ) { result = left ; } if ( left instanceof BindingDescr ) { binding = ( BindingDescr ) left ; leftDescr = new AtomicExprDescr ( binding . getExpression ( ) ) ; } else { leftDescr = left ; } } // src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 337:5 : ( ( not _ key in _ key ) = > not _ key in = in _ key LEFT _ PAREN e1 = expression ( COMMA e2 = expression ) * RIGHT _ PAREN | in = in _ key LEFT _ PAREN e1 = expression ( COMMA e2 = expression ) * RIGHT _ PAREN ) ?
int alt36 = 3 ; int LA36_0 = input . LA ( 1 ) ; if ( ( LA36_0 == ID ) ) { int LA36_1 = input . LA ( 2 ) ; if ( ( LA36_1 == ID ) ) { int LA36_3 = input . LA ( 3 ) ; if ( ( LA36_3 == LEFT_PAREN ) && ( ( ( ( helper . validateIdentifierKey ( DroolsSoftKeywords . NOT ) ) ) && synpred8_DRL6Expressions ( ) ) ) ) { alt36 = 1 ; } } else if ( ( LA36_1 == LEFT_PAREN ) && ( ( ( helper . validateIdentifierKey ( DroolsSoftKeywords . IN ) ) ) ) ) { alt36 = 2 ; } } switch ( alt36 ) { case 1 : // src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 337:6 : ( not _ key in _ key ) = > not _ key in = in _ key LEFT _ PAREN e1 = expression ( COMMA e2 = expression ) * RIGHT _ PAREN
{ pushFollow ( FOLLOW_not_key_in_inExpression1635 ) ; not_key ( ) ; state . _fsp -- ; if ( state . failed ) return result ; pushFollow ( FOLLOW_in_key_in_inExpression1639 ) ; in_key ( ) ; state . _fsp -- ; if ( state . failed ) return result ; match ( input , LEFT_PAREN , FOLLOW_LEFT_PAREN_in_inExpression1641 ) ; if ( state . failed ) return result ; if ( state . backtracking == 0 ) { helper . emit ( Location . LOCATION_LHS_INSIDE_CONDITION_ARGUMENT ) ; } pushFollow ( FOLLOW_expression_in_inExpression1663 ) ; e1 = expression ( ) ; state . _fsp -- ; if ( state . failed ) return result ; if ( state . backtracking == 0 ) { descr = ConstraintConnectiveDescr . newAnd ( ) ; RelationalExprDescr rel = new RelationalExprDescr ( "!=" , false , null , leftDescr , ( e1 != null ? ( ( DRL6Expressions . expression_return ) e1 ) . result : null ) ) ; descr . addOrMerge ( rel ) ; result = descr ; } // src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 345:7 : ( COMMA e2 = expression ) *
loop34 : while ( true ) { int alt34 = 2 ; int LA34_0 = input . LA ( 1 ) ; if ( ( LA34_0 == COMMA ) ) { alt34 = 1 ; } switch ( alt34 ) { case 1 : // src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 345:8 : COMMA e2 = expression
{ match ( input , COMMA , FOLLOW_COMMA_in_inExpression1682 ) ; if ( state . failed ) return result ; pushFollow ( FOLLOW_expression_in_inExpression1686 ) ; e2 = expression ( ) ; state . _fsp -- ; if ( state . failed ) return result ; if ( state . backtracking == 0 ) { RelationalExprDescr rel = new RelationalExprDescr ( "!=" , false , null , leftDescr , ( e2 != null ? ( ( DRL6Expressions . expression_return ) e2 ) . result : null ) ) ; descr . addOrMerge ( rel ) ; } } break ; default : break loop34 ; } } match ( input , RIGHT_PAREN , FOLLOW_RIGHT_PAREN_in_inExpression1707 ) ; if ( state . failed ) return result ; if ( state . backtracking == 0 ) { helper . emit ( Location . LOCATION_LHS_INSIDE_CONDITION_END ) ; } } break ; case 2 : // src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 351:7 : in = in _ key LEFT _ PAREN e1 = expression ( COMMA e2 = expression ) * RIGHT _ PAREN
{ pushFollow ( FOLLOW_in_key_in_inExpression1723 ) ; in_key ( ) ; state . _fsp -- ; if ( state . failed ) return result ; match ( input , LEFT_PAREN , FOLLOW_LEFT_PAREN_in_inExpression1725 ) ; if ( state . failed ) return result ; if ( state . backtracking == 0 ) { helper . emit ( Location . LOCATION_LHS_INSIDE_CONDITION_ARGUMENT ) ; } pushFollow ( FOLLOW_expression_in_inExpression1747 ) ; e1 = expression ( ) ; state . _fsp -- ; if ( state . failed ) return result ; if ( state . backtracking == 0 ) { descr = ConstraintConnectiveDescr . newOr ( ) ; RelationalExprDescr rel = new RelationalExprDescr ( "==" , false , null , leftDescr , ( e1 != null ? ( ( DRL6Expressions . expression_return ) e1 ) . result : null ) ) ; descr . addOrMerge ( rel ) ; result = descr ; } // src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 359:7 : ( COMMA e2 = expression ) *
loop35 : while ( true ) { int alt35 = 2 ; int LA35_0 = input . LA ( 1 ) ; if ( ( LA35_0 == COMMA ) ) { alt35 = 1 ; } switch ( alt35 ) { case 1 : // src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 359:8 : COMMA e2 = expression
{ match ( input , COMMA , FOLLOW_COMMA_in_inExpression1766 ) ; if ( state . failed ) return result ; pushFollow ( FOLLOW_expression_in_inExpression1770 ) ; e2 = expression ( ) ; state . _fsp -- ; if ( state . failed ) return result ; if ( state . backtracking == 0 ) { RelationalExprDescr rel = new RelationalExprDescr ( "==" , false , null , leftDescr , ( e2 != null ? ( ( DRL6Expressions . expression_return ) e2 ) . result : null ) ) ; descr . addOrMerge ( rel ) ; } } break ; default : break loop35 ; } } match ( input , RIGHT_PAREN , FOLLOW_RIGHT_PAREN_in_inExpression1791 ) ; if ( state . failed ) return result ; if ( state . backtracking == 0 ) { helper . emit ( Location . LOCATION_LHS_INSIDE_CONDITION_END ) ; } } break ; } } if ( state . backtracking == 0 ) { if ( binding != null && descr != null ) descr . addOrMerge ( binding ) ; } } catch ( RecognitionException re ) { throw re ; } finally { // do for sure before leaving
} return result ;
|
public class MariaDbX509KeyManager { /** * Search aliases corresponding to algorithms and issuers .
* @ param keyTypes list of algorithms
* @ param issuers list of issuers ;
* @ return list of corresponding aliases */
private ArrayList < String > searchAccurateAliases ( String [ ] keyTypes , Principal [ ] issuers ) { } }
|
if ( keyTypes == null || keyTypes . length == 0 ) { return null ; } ArrayList < String > accurateAliases = new ArrayList < > ( ) ; for ( Map . Entry < String , KeyStore . PrivateKeyEntry > mapEntry : privateKeyHash . entrySet ( ) ) { Certificate [ ] certs = mapEntry . getValue ( ) . getCertificateChain ( ) ; String alg = certs [ 0 ] . getPublicKey ( ) . getAlgorithm ( ) ; for ( String keyType : keyTypes ) { if ( alg . equals ( keyType ) ) { if ( issuers != null && issuers . length != 0 ) { checkLoop : for ( Certificate cert : certs ) { if ( cert instanceof X509Certificate ) { X500Principal certificateIssuer = ( ( X509Certificate ) cert ) . getIssuerX500Principal ( ) ; for ( Principal issuer : issuers ) { if ( certificateIssuer . equals ( issuer ) ) { accurateAliases . add ( mapEntry . getKey ( ) ) ; break checkLoop ; } } } } } else { accurateAliases . add ( mapEntry . getKey ( ) ) ; } } } } return accurateAliases ;
|
public class DefaultNamespaceService { /** * { @ inheritDoc } */
@ Override public void compileNamespace ( String resourceLocation ) throws ResourceDownloadError , IndexingFailure { } }
|
if ( resourceLocation == null ) { throw new InvalidArgument ( "resourceLocation" , resourceLocation ) ; } synchronized ( resourceLocation ) { // compile
final String indexPath = doCompile ( resourceLocation ) ; // close opened namespace index
closeNamespace ( resourceLocation ) ; // reopen namespace index
openNamespace ( resourceLocation , indexPath ) ; }
|
public class SmartLdapGroupStore { /** * Returns an < code > Iterator < / code > over the < code > Collection < / code > of < code > IEntityGroups
* < / code > that are members of this < code > IEntityGroup < / code > .
* @ return java . util . Iterator
* @ param group org . apereo . portal . groups . IEntityGroup */
@ Override public Iterator findMemberGroups ( IEntityGroup group ) throws GroupsException { } }
|
if ( isTreeRefreshRequired ( ) ) { refreshTree ( ) ; } log . debug ( "Invoking findMemberGroups() for group: {}" , group . getLocalKey ( ) ) ; List < IEntityGroup > rslt = new ArrayList < > ( ) ; List < String > list = groupsTree . getChildren ( ) . get ( group . getLocalKey ( ) ) ; if ( list != null ) { // should only reach this code if its a SmartLdap managed group . . .
for ( String s : list ) { rslt . add ( groupsTree . getGroups ( ) . get ( s ) ) ; } } return rslt . iterator ( ) ;
|
public class DialogBuilder { /** * makes a String unique by appending a numerical suffix
* @ param _ xElementContainer the com . sun . star . container . XNameAccess container
* that the new Element is going to be inserted to
* @ param _ sElementName the StemName of the Element */
public static String createUniqueName ( XNameAccess _xElementContainer , String _sElementName ) { } }
|
boolean bElementexists = true ; int i = 1 ; String sIncSuffix = "" ; String BaseName = _sElementName ; while ( bElementexists ) { bElementexists = _xElementContainer . hasByName ( _sElementName ) ; if ( bElementexists ) { i += 1 ; _sElementName = BaseName + Integer . toString ( i ) ; } } return _sElementName ;
|
public class SoyExpression { /** * Returns an Expression of a non - null { @ link SoyValueProvider } providing this value . */
public Expression boxAsSoyValueProvider ( ) { } }
|
if ( soyType ( ) . equals ( NullType . getInstance ( ) ) ) { if ( delegate == NULL || delegate == NULL_BOXED ) { return FieldRef . NULL_PROVIDER . accessor ( ) ; } // otherwise this expression might have side effects , evaluate it as a statement then return
// the NULL _ PROVIDER
return toStatement ( ) . then ( FieldRef . NULL_PROVIDER . accessor ( ) ) ; } if ( delegate . isNonNullable ( ) ) { // Every SoyValue is - a SoyValueProvider , so if it is non - null
return box ( ) ; } if ( isBoxed ( ) ) { return new Expression ( BytecodeUtils . SOY_VALUE_PROVIDER_TYPE , delegate . features ( ) . plus ( Feature . NON_NULLABLE ) ) { @ Override protected void doGen ( CodeBuilder adapter ) { Label end = new Label ( ) ; delegate . gen ( adapter ) ; adapter . dup ( ) ; adapter . ifNonNull ( end ) ; adapter . pop ( ) ; FieldRef . NULL_PROVIDER . accessStaticUnchecked ( adapter ) ; adapter . mark ( end ) ; } } ; } return new Expression ( BytecodeUtils . SOY_VALUE_PROVIDER_TYPE , delegate . features ( ) . plus ( Feature . NON_NULLABLE ) ) { @ Override protected void doGen ( CodeBuilder adapter ) { Label end = new Label ( ) ; delegate . gen ( adapter ) ; adapter . dup ( ) ; Label nonNull = new Label ( ) ; adapter . ifNonNull ( nonNull ) ; adapter . pop ( ) ; // pop the null value and replace with the nullprovider
FieldRef . NULL_PROVIDER . accessStaticUnchecked ( adapter ) ; adapter . goTo ( end ) ; adapter . mark ( nonNull ) ; doBox ( adapter , soyRuntimeType ) ; adapter . mark ( end ) ; } } ;
|
public class PathService { /** * Returns a path with the given root ( or no root , if null ) and the given names . */
public JimfsPath createPath ( @ Nullable Name root , Iterable < Name > names ) { } }
|
ImmutableList < Name > nameList = ImmutableList . copyOf ( Iterables . filter ( names , NOT_EMPTY ) ) ; if ( root == null && nameList . isEmpty ( ) ) { // ensure the canonical empty path ( one empty string name ) is used rather than a path with
// no root and no names
return emptyPath ( ) ; } return createPathInternal ( root , nameList ) ;
|
public class Splash { /** * Parse the param line and add it to this properties object .
* ( ie . , key = value ) .
* @ properties The properties object to add this params to .
* @ param strParam param line in the format param = value */
public static String getParam ( String [ ] args , String strParam ) { } }
|
if ( ( args != null ) && ( strParam != null ) ) { for ( int i = 0 ; i < args . length ; i ++ ) { if ( args [ i ] != null ) { int iIndex = args [ i ] . indexOf ( '=' ) ; if ( iIndex != - 1 ) { if ( strParam . equalsIgnoreCase ( args [ i ] . substring ( 0 , iIndex ) ) ) return args [ i ] . substring ( iIndex + 1 , args [ i ] . length ( ) ) ; } } } } return null ;
|
public class UpdateablePageCollection { /** * Removes the given PageWrapper from this PageCollection .
* @ param page The PageWrapper to remove . This page will have its offset set to PagePointer . NO _ OFFSET . */
@ Override synchronized void remove ( PageWrapper page ) { } }
|
super . remove ( page ) ; if ( this . incompleteNewPageOffset == page . getOffset ( ) ) { this . incompleteNewPageOffset = PagePointer . NO_OFFSET ; } this . deletedPageOffsets . add ( page . getOffset ( ) ) ; page . setOffset ( PagePointer . NO_OFFSET ) ;
|
public class syslog_ui_cmd { /** * Use this API to fetch filtered set of syslog _ ui _ cmd resources .
* filter string should be in JSON format . eg : " vm _ state : DOWN , name : [ a - z ] + " */
public static syslog_ui_cmd [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
|
syslog_ui_cmd obj = new syslog_ui_cmd ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; syslog_ui_cmd [ ] response = ( syslog_ui_cmd [ ] ) obj . getfiltered ( service , option ) ; return response ;
|
public class AWSResourceGroupsTaggingAPIClient { /** * Returns all tag keys in the specified region for the AWS account .
* @ param getTagKeysRequest
* @ return Result of the GetTagKeys operation returned by the service .
* @ throws InvalidParameterException
* A parameter is missing or a malformed string or invalid or out - of - range value was supplied for the
* request parameter .
* @ throws ThrottledException
* The request was denied to limit the frequency of submitted requests .
* @ throws InternalServiceException
* The request processing failed because of an unknown error , exception , or failure . You can retry the
* request .
* @ throws PaginationTokenExpiredException
* A < code > PaginationToken < / code > is valid for a maximum of 15 minutes . Your request was denied because the
* specified < code > PaginationToken < / code > has expired .
* @ sample AWSResourceGroupsTaggingAPI . GetTagKeys
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / resourcegroupstaggingapi - 2017-01-26 / GetTagKeys "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public GetTagKeysResult getTagKeys ( GetTagKeysRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeGetTagKeys ( request ) ;
|
public class DefaultExternalContentManager { /** * Determines the mime type of a given file
* @ param file for which the mime type needs to be detected
* @ return the detected mime type */
private String determineMimeType ( File file ) { } }
|
String mimeType = MIME_MAP . getContentType ( file ) ; // if mimeType detection failed , fall back to the default
if ( mimeType == null || mimeType . equalsIgnoreCase ( "" ) ) { mimeType = DEFAULT_MIMETYPE ; } return mimeType ;
|
public class FSEditLog { /** * Create empty edit log files .
* Initialize the output stream for logging .
* @ throws IOException */
synchronized void open ( ) throws IOException { } }
|
if ( syncer == null ) { syncer = new SyncThread ( ) ; syncThread = new Thread ( syncer ) ; syncThread . start ( ) ; } if ( state != State . BETWEEN_LOG_SEGMENTS ) throw new IOException ( "Bad state: " + state ) ; startLogSegment ( getLastWrittenTxId ( ) + 1 , true ) ; if ( state != State . IN_SEGMENT ) throw new IOException ( "Bad state: " + state ) ;
|
public class AdditionalAnswers { /** * An answer that directly forwards the calls to the delegate . The delegate may or may not be of the same type as the mock .
* If the type is different , a matching method needs to be found on delegate type otherwise an exception is thrown .
* Useful for spies or partial mocks of objects that are difficult to mock
* or spy using the usual spy API . Possible use cases :
* < ul >
* < li > Final classes but with an interface < / li >
* < li > Already custom proxied object < / li >
* < li > Special objects with a finalize method , i . e . to avoid executing it 2 times < / li >
* < / ul >
* The difference with the regular spy :
* < ul >
* < li >
* The regular spy ( { @ link Mockito # spy ( Object ) } ) contains < strong > all < / strong > state from the spied instance
* and the methods are invoked on the spy . The spied instance is only used at mock creation to copy the state from .
* If you call a method on a regular spy and it internally calls other methods on this spy , those calls are remembered
* for verifications , and they can be effectively stubbed .
* < / li >
* < li >
* The mock that delegates simply delegates all methods to the delegate .
* The delegate is used all the time as methods are delegated onto it .
* If you call a method on a mock that delegates and it internally calls other methods on this mock ,
* those calls are < strong > not < / strong > remembered for verifications , stubbing does not have effect on them , too .
* Mock that delegates is less powerful than the regular spy but it is useful when the regular spy cannot be created .
* < / li >
* < / ul >
* An example with a final class that we want to delegate to :
* < pre class = " code " > < code class = " java " >
* final class DontYouDareToMockMe implements list { . . . }
* DontYouDareToMockMe awesomeList = new DontYouDareToMockMe ( ) ;
* List mock = mock ( List . class , delegatesTo ( awesomeList ) ) ;
* < / code > < / pre >
* This feature suffers from the same drawback as the spy .
* The mock will call the delegate if you use regular when ( ) . then ( ) stubbing style .
* Since the real implementation is called this might have some side effects .
* Therefore you should to use the doReturn | Throw | Answer | CallRealMethod stubbing style . Example :
* < pre class = " code " > < code class = " java " >
* List listWithDelegate = mock ( List . class , AdditionalAnswers . delegatesTo ( awesomeList ) ) ;
* / / Impossible : real method is called so listWithDelegate . get ( 0 ) throws IndexOutOfBoundsException ( the list is yet empty )
* when ( listWithDelegate . get ( 0 ) ) . thenReturn ( " foo " ) ;
* / / You have to use doReturn ( ) for stubbing
* doReturn ( " foo " ) . when ( listWithDelegate ) . get ( 0 ) ;
* < / code > < / pre >
* @ param delegate The delegate to forward calls to . It does not have to be of the same type as the mock ( although it usually is ) .
* The only requirement is that the instance should have compatible method signatures including the return values .
* Only the methods that were actually executed on the mock need to be present on the delegate type .
* @ return the answer
* @ since 1.9.5 */
public static < T > Answer < T > delegatesTo ( Object delegate ) { } }
|
return ( Answer < T > ) new ForwardsInvocations ( delegate ) ;
|
public class SocialNetworkUtilities { /** * Get distance between geographical coordinates
* @ param point1 Point1
* @ param point2 Point2
* @ return Distance ( double ) */
public static Double getDistanceBetweenCoordinates ( Tuple2 < Double , Double > point1 , Tuple2 < Double , Double > point2 ) { } }
|
// sqrt ( ( x2 - x1 ) ^ 2 + ( y2 - y2 ) ^ 2 )
Double xDiff = point1 . _1 ( ) - point2 . _1 ( ) ; Double yDiff = point1 . _2 ( ) - point2 . _2 ( ) ; return Math . sqrt ( xDiff * xDiff + yDiff * yDiff ) ;
|
public class UploadServlet { /** * Process an HTML get or post .
* @ exceptionServletException From inherited class .
* @ exceptionIOException From inherited class . */
public void doProcess ( HttpServletRequest req , HttpServletResponse res ) throws ServletException , IOException { } }
|
PrintStream out = System . out ; Properties properties = new Properties ( ) ; String strTargetDirectory = this . getDestDirectory ( ) ; String strReceiveMessage = gstrBlank ; try { MultipartRequest multi = new MultipartRequest ( req , strTargetDirectory , MAX_SIZE ) ; this . parseProperties ( properties , multi ) ; if ( DEBUG ) out . println ( ) ; if ( DEBUG ) out . println ( "Files:" ) ; Enumeration < ? > files = multi . getFileNames ( ) ; while ( files . hasMoreElements ( ) ) { String name = ( String ) files . nextElement ( ) ; String filename = multi . getFilesystemName ( name ) ; String type = multi . getContentType ( name ) ; File f = multi . getFile ( name ) ; if ( DEBUG ) { out . println ( "name: " + name ) ; out . println ( "filename: " + filename ) ; out . println ( "type: " + type ) ; if ( f != null ) { out . println ( "f.toString(): " + f . toString ( ) ) ; out . println ( "f.getName(): " + f . getName ( ) ) ; out . println ( "f.exists(): " + f . exists ( ) ) ; out . println ( "f.length(): " + f . length ( ) ) ; out . println ( ) ; } } if ( f != null ) strReceiveMessage += this . successfulFileUpload ( f , properties ) ; } } catch ( IOException ex ) { String strError = ex . getMessage ( ) ; if ( strError . toLowerCase ( ) . indexOf ( "isn't multipart/form-data" ) != - 1 ) strReceiveMessage = gstrBlank ; // Didn ' t specify a file ( probably first time through )
else strReceiveMessage = "<p>Previous upload not successful. Error: " + ex . getMessage ( ) + "</p>" + RETURN ; } this . sendForm ( req , res , strReceiveMessage , properties ) ;
|
public class CharacterStreamWritePool { /** * Write a single character . */
public ISynchronizationPoint < IOException > write ( char c ) { } }
|
if ( ! ( stream instanceof ICharacterStream . Writable . Buffered ) ) return write ( new char [ ] { c } , 0 , 1 ) ; ISynchronizationPoint < IOException > last = lastWrite ; if ( last . isUnblocked ( ) ) { lastWrite = ( ( ICharacterStream . Writable . Buffered ) stream ) . writeAsync ( c ) ; return lastWrite ; } SynchronizationPoint < IOException > ours = new SynchronizationPoint < > ( ) ; lastWrite = ours ; last . listenInline ( ( ) -> { ( ( ICharacterStream . Writable . Buffered ) stream ) . writeAsync ( c ) . listenInline ( ours ) ; } , ours ) ; return ours ;
|
public class MatchParserImpl { /** * BetweenPredicate : : = Expression ( < NOT > ) ? < BETWEEN > Expression < AND > Expression */
final public Selector BetweenPredicate ( ) throws ParseException { } }
|
Selector expr1 , expr2 , expr3 ; boolean neg = false ; expr1 = Expression ( ) ; switch ( ( jj_ntk == - 1 ) ? jj_ntk ( ) : jj_ntk ) { case NOT : jj_consume_token ( NOT ) ; neg = true ; break ; default : jj_la1 [ 16 ] = jj_gen ; ; } jj_consume_token ( BETWEEN ) ; expr2 = Expression ( ) ; jj_consume_token ( AND ) ; expr3 = Expression ( ) ; Selector ans = ParseUtil . convertRange ( expr1 , expr2 , expr3 ) ; if ( neg ) { if ( true ) return new OperatorImpl ( Operator . NOT , ans ) ; } else { if ( true ) return ans ; } throw new Error ( "Missing return statement in function" ) ;
|
public class FnObject { /** * Determines whether the result of executing the specified function
* on the target object and the specified object parameter are equal
* in value , this is , whether < tt > functionResult . compareTo ( object ) = = 0 < / tt > .
* Both the function result and the specified object have to implement
* { @ link Comparable } .
* @ param object the object to compare to the target
* @ return true if both objects are equal according to " compareTo " , false if not . */
public static final < X > Function < X , Boolean > eqValueBy ( final IFunction < X , ? > by , final Object object ) { } }
|
return FnFunc . chain ( by , eqValue ( object ) ) ;
|
public class IncrementalArrayData { /** * Loads each increment until full range has been loading , halting in between increment until instructed to
* proceed . */
private void loadLoop ( ) throws InterruptedException { } }
|
boolean firstItem = true ; boolean moreAvailable = true ; // Loop until all loaded .
while ( moreAvailable ) { // Thread interruptions terminate the loop .
if ( currentThread ( ) . isInterrupted ( ) ) { throw new InterruptedException ( ) ; } try { setLoading ( true ) ; // Load next increment of items .
final Result < ? extends T > result = load ( ) ; moreAvailable = result != null && result . getRemaining ( ) > 0 ; setAvailable ( result != null ? result . getRemaining ( ) : 0 ) ; // If invalidated while shown , we lazily clear the data so the user doesn ' t see blank data while loading .
final boolean needToClear = firstItem ; firstItem = false ; runOnUiThread ( new Runnable ( ) { @ Override public void run ( ) { List < ? extends T > elements = result != null ? result . getElements ( ) : Collections . < T > emptyList ( ) ; if ( needToClear ) { overwriteResult ( elements ) ; } else { appendResult ( elements ) ; } setLoading ( false ) ; } } ) ; } catch ( InterruptedException e ) { throw e ; } catch ( InterruptedIOException e ) { InterruptedException interruptedException = new InterruptedException ( ) ; interruptedException . initCause ( e ) ; throw interruptedException ; } catch ( final Throwable e ) { runOnUiThread ( new Runnable ( ) { @ Override public void run ( ) { notifyError ( e ) ; } } ) ; mError = true ; setLoading ( false ) ; } // Block until instructed to continue , even if an error occurred .
// In this case , loading must be explicitly resumed .
block ( ) ; }
|
public class IntTuples { /** * Lexicographically increment the given tuple in the given range , and
* store the result in the given result tuple . It is assumed that the
* elements of the given tuple are
* { @ link # areElementsGreaterThanOrEqual ( IntTuple , IntTuple ) greater than
* or equal to } the values in the given minimum tuple . < br >
* < br >
* Note that in contrast to most other methods in this class , the
* given result tuple may < b > not < / b > be < code > null < / code > ( but it
* may be identical to the input tuple ) .
* @ param t The input tuple
* @ param min The minimum values
* @ param max The maximum values
* @ param result The result tuple
* @ return Whether the tuple could be incremented without causing an
* overflow */
public static boolean incrementLexicographically ( IntTuple t , IntTuple min , IntTuple max , MutableIntTuple result ) { } }
|
Utils . checkForEqualSize ( t , min ) ; Utils . checkForEqualSize ( t , max ) ; Utils . checkForEqualSize ( t , result ) ; if ( result != t ) { result . set ( t ) ; } return incrementLexicographically ( result , min , max , result . getSize ( ) - 1 ) ;
|
public class XPathParser { /** * Insert room for operation . This will NOT set
* the length value of the operation , but will update
* the length value for the total expression .
* @ param pos The position where the op is to be inserted .
* @ param length The length of the operation space in the op map .
* @ param op The op code to the inserted . */
void insertOp ( int pos , int length , int op ) { } }
|
int totalLen = m_ops . getOp ( OpMap . MAPINDEX_LENGTH ) ; for ( int i = totalLen - 1 ; i >= pos ; i -- ) { m_ops . setOp ( i + length , m_ops . getOp ( i ) ) ; } m_ops . setOp ( pos , op ) ; m_ops . setOp ( OpMap . MAPINDEX_LENGTH , totalLen + length ) ;
|
public class A_CmsJspValueWrapper { /** * Parses the wrapped value to a Long integer . < p >
* Note that the result is an Object of type { @ link java . lang . Long } ,
* so in case the wrapped value can not be converted to a number , < code > null < / code > is returned .
* This means you can check for an < code > empty < / code > result in the EL . < p >
* @ return the Long integer value
* @ see # getToInteger ( ) */
public Long getToLong ( ) { } }
|
if ( m_long == null ) { try { m_long = new Long ( Long . parseLong ( getToString ( ) ) ) ; } catch ( NumberFormatException e ) { LOG . info ( e . getLocalizedMessage ( ) ) ; } } return m_long ;
|
public class RelationalOperations { /** * Returns true if polyline _ a contains envelope _ b . */
private static boolean polylineContainsEnvelope_ ( Polyline polyline_a , Envelope envelope_b , double tolerance , ProgressTracker progress_tracker ) { } }
|
Envelope2D env_a = new Envelope2D ( ) , env_b = new Envelope2D ( ) ; envelope_b . queryEnvelope2D ( env_b ) ; polyline_a . queryEnvelope2D ( env_a ) ; if ( ! envelopeInfContainsEnvelope_ ( env_a , env_b , tolerance ) ) return false ; if ( env_b . getHeight ( ) > tolerance && env_b . getWidth ( ) > tolerance ) return false ; // when treated as an area , lines cannot contain
// areas .
if ( env_b . getHeight ( ) <= tolerance && env_b . getWidth ( ) <= tolerance ) { // Treat
// as
// point
Point2D pt_b = envelope_b . getCenterXY ( ) ; return linearPathContainsPoint_ ( polyline_a , pt_b , tolerance ) ; } // Treat as polyline
Polyline polyline_b = new Polyline ( ) ; Point p = new Point ( ) ; envelope_b . queryCornerByVal ( 0 , p ) ; polyline_b . startPath ( p ) ; envelope_b . queryCornerByVal ( 2 , p ) ; polyline_b . lineTo ( p ) ; return linearPathWithinLinearPath_ ( polyline_b , polyline_a , tolerance , false ) ;
|
public class EmailAutoCompleteTextView { /** * Sets the visibility of the clear button . The clear button must also be enabled for it to be visible .
* @ see { @ link # setClearButtonEnabled ( boolean ) }
* @ param visible true if the clear button should be visible , otherwise , false . */
public void setClearVisible ( boolean visible ) { } }
|
if ( mClearButtonEnabled ) { final Drawable d = ( visible ? mTappableDrawable : null ) ; final Drawable [ ] drawables = getCompoundDrawables ( ) ; if ( drawables != null ) { setCompoundDrawables ( drawables [ 0 ] , drawables [ 1 ] , d , drawables [ 3 ] ) ; } else { Log . w ( TAG , "No clear button is available." ) ; } }
|
public class CleaneLingStyleSolver { /** * Initializes the internal solver state . */
private void initialize ( ) { } }
|
this . level = 0 ; this . next = 0 ; this . empty = null ; this . scoreIncrement = 1 ; this . ignore = null ; this . vars = new LNGVector < > ( ) ; this . vals = new LNGByteVector ( ) ; this . phases = new LNGByteVector ( ) ; this . decisions = new LNGDoublePriorityQueue ( ) ; this . control = new LNGVector < > ( ) ; this . trail = new LNGIntVector ( ) ; this . addedlits = new LNGIntVector ( 100 ) ; this . seen = new LNGIntVector ( ) ; this . frames = new LNGIntVector ( ) ; this . watches = new LNGVector < > ( ) ; this . stats = new CLStats ( ) ; this . limits = new CLLimits ( ) ; this . model = new LNGBooleanVector ( ) ; this . control . push ( new CLFrame ( ) ) ;
|
public class S3UtteranceReader { /** * { @ inheritDoc } */
@ Override public InputStream read ( final String locale ) { } }
|
Validate . notNull ( locale , "Locale must not be blank." ) ; final String resourcePath = getLeadingPath ( ) + locale + getResourceLocation ( ) ; final S3Object s3Object = s3Client . getObject ( bucketName , resourcePath ) ; Validate . notNull ( s3Object , "Resource " + resourcePath + " does not exist in bucket with name " + bucketName ) ; return s3Object . getObjectContent ( ) ;
|
public class DatabaseRecordIterator { /** * Fetch the next record . */
private void fetchRecord ( ) throws SQLException { } }
|
if ( rs . next ( ) ) { long revision = rs . getLong ( 1 ) ; String journalId = rs . getString ( 2 ) ; String producerId = rs . getString ( 3 ) ; DataInputStream dataIn = new DataInputStream ( rs . getBinaryStream ( 4 ) ) ; record = new ReadRecord ( journalId , producerId , revision , dataIn , 0 , resolver , npResolver ) ; } else { isEOF = true ; }
|
public class EnumKeyDeserializer { /** * { @ inheritDoc } */
@ Override protected E doDeserialize ( String key , JsonDeserializationContext ctx ) { } }
|
try { return Enum . valueOf ( enumClass , key ) ; } catch ( IllegalArgumentException ex ) { if ( ctx . isReadUnknownEnumValuesAsNull ( ) ) { return null ; } throw ex ; }
|
public class XMLWriter { /** * Escape a string to include it in XML ( this is automatically called by other methods of this class when needed ) . */
public static String escape ( CharSequence s ) { } }
|
StringBuilder str = new StringBuilder ( ) ; int len = s . length ( ) ; for ( int i = 0 ; i < len ; ++ i ) { char c = s . charAt ( i ) ; if ( c == '&' ) str . append ( "&" ) ; else if ( c == '"' ) str . append ( """ ) ; else if ( c == '\'' ) str . append ( "'" ) ; else if ( c == '>' ) str . append ( ">" ) ; else if ( c == '<' ) str . append ( "<" ) ; else if ( c < 32 ) str . append ( "&#" ) . append ( ( int ) c ) . append ( ';' ) ; else str . append ( c ) ; } return str . toString ( ) ;
|
public class BranchingEnumerator { /** * Move the pointer one step forward . Move is made exactly matching the corresponding nucleotide in the reference
* sequence , so this method prevents branching in the current position . */
private void move1 ( ) { } }
|
if ( node == null ) return ; if ( position >= reference . size ( ) ) { node = null ; return ; } node = node . links [ reference . codeAt ( position ++ ) ] ;
|
public class CloudRedisClient { /** * Deletes a specific Redis instance . Instance stops serving and data is deleted .
* < p > Sample code :
* < pre > < code >
* try ( CloudRedisClient cloudRedisClient = CloudRedisClient . create ( ) ) {
* InstanceName name = InstanceName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ INSTANCE ] " ) ;
* cloudRedisClient . deleteInstanceAsync ( name . toString ( ) ) . get ( ) ;
* < / code > < / pre >
* @ param name Required . Redis instance resource name using the form :
* ` projects / { project _ id } / locations / { location _ id } / instances / { instance _ id } ` where ` location _ id `
* refers to a GCP region
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi ( "The surface for long-running operations is not stable yet and may change in the future." ) public final OperationFuture < Empty , OperationMetadata > deleteInstanceAsync ( String name ) { } }
|
DeleteInstanceRequest request = DeleteInstanceRequest . newBuilder ( ) . setName ( name ) . build ( ) ; return deleteInstanceAsync ( request ) ;
|
public class OtpErlangList { /** * Get all the elements from the list as an array .
* @ return an array containing all of the list ' s elements . */
public OtpErlangObject [ ] elements ( ) { } }
|
if ( arity ( ) == 0 ) { return NO_ELEMENTS ; } final OtpErlangObject [ ] res = new OtpErlangObject [ arity ( ) ] ; System . arraycopy ( elems , 0 , res , 0 , res . length ) ; return res ;
|
public class DateUtils { /** * Format date by given pattern .
* @ param date date to be handled .
* @ param pattern pattern use to handle given date .
* @ return a string object of format date by given pattern . */
public static String getDateFormat ( final Date date , final String pattern ) { } }
|
SimpleDateFormat simpleDateFormat = buildDateFormat ( pattern ) ; return simpleDateFormat . format ( date ) ;
|
public class Examples { /** * / * cancel mutualfunds order */
public void cancelMFOrder ( KiteConnect kiteConnect ) throws KiteException , IOException { } }
|
kiteConnect . cancelMFOrder ( "668604240868430" ) ; System . out . println ( "cancel order successful" ) ;
|
public class RadioButtonGroup { /** * This method will add a radio button to the group with the given value . The value must be unique for the group .
* The radio button returned by this method must be added to the required location in the UI Component tree .
* The radio button group uses the { @ link String } representation of the radio button ' s value to identify which
* button has been selected . As the string representation of the radio button ' s value is sent to the client , be
* mindful that it should not be too large .
* @ param value a unique value for the radio button .
* @ return the radio button that was added to the group */
public WRadioButton addRadioButton ( final Object value ) { } }
|
WRadioButton radioButton = new WRadioButton ( this ) ; radioButton . setData ( value ) ; return radioButton ;
|
public class AmazonCodeDeployClient { /** * < note >
* The newer BatchGetDeploymentTargets should be used instead because it works with all compute types .
* < code > ListDeploymentInstances < / code > throws an exception if it is used with a compute platform other than
* EC2 / On - premises or AWS Lambda .
* < / note >
* Lists the instance for a deployment associated with the IAM user or AWS account .
* @ param listDeploymentInstancesRequest
* Represents the input of a ListDeploymentInstances operation .
* @ return Result of the ListDeploymentInstances operation returned by the service .
* @ throws DeploymentIdRequiredException
* At least one deployment ID must be specified .
* @ throws DeploymentDoesNotExistException
* The deployment with the IAM user or AWS account does not exist .
* @ throws DeploymentNotStartedException
* The specified deployment has not started .
* @ throws InvalidNextTokenException
* The next token was specified in an invalid format .
* @ throws InvalidDeploymentIdException
* At least one of the deployment IDs was specified in an invalid format .
* @ throws InvalidInstanceStatusException
* The specified instance status does not exist .
* @ throws InvalidInstanceTypeException
* An invalid instance type was specified for instances in a blue / green deployment . Valid values include
* " Blue " for an original environment and " Green " for a replacement environment .
* @ throws InvalidDeploymentInstanceTypeException
* An instance type was specified for an in - place deployment . Instance types are supported for blue / green
* deployments only .
* @ throws InvalidTargetFilterNameException
* The target filter name is invalid .
* @ throws InvalidComputePlatformException
* The computePlatform is invalid . The computePlatform should be < code > Lambda < / code > or < code > Server < / code > .
* @ sample AmazonCodeDeploy . ListDeploymentInstances
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / codedeploy - 2014-10-06 / ListDeploymentInstances "
* target = " _ top " > AWS API Documentation < / a > */
@ Override @ Deprecated public ListDeploymentInstancesResult listDeploymentInstances ( ListDeploymentInstancesRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeListDeploymentInstances ( request ) ;
|
public class JsonJacksonFormat { /** * Parses an object from a string . */
public < T > T read ( final String s , final DataTypeDescriptor < T > descriptor ) throws Exception { } }
|
if ( descriptor == null ) throw new NullPointerException ( "descriptor" ) ; if ( s == null ) return null ; JsonParser parser = factory . createParser ( s ) ; return read ( parser , descriptor ) ;
|
public class FlashImpl { /** * Saves the value of setRedirect ( ) for the next request , if it was true */
private void _saveRedirectValue ( FacesContext facesContext ) { } }
|
ExternalContext externalContext = facesContext . getExternalContext ( ) ; // This request contains a redirect . This condition is in general
// triggered by a NavigationHandler . After a redirect all request scope
// values get lost , so in order to preserve this value we need to
// pass it between request . One strategy is use a cookie that is never sent
// to the client . Other alternative is use the session map .
// See _ restoreRedirectValue ( ) for restoring this value .
HttpServletResponse httpResponse = ExternalContextUtils . getHttpServletResponse ( externalContext ) ; if ( httpResponse != null ) { Cookie cookie = _createFlashCookie ( FLASH_REDIRECT , "true" , externalContext ) ; httpResponse . addCookie ( cookie ) ; } else { externalContext . getSessionMap ( ) . put ( FLASH_REDIRECT , true ) ; }
|
public class ReadAheadQueue { /** * Notification that a full message has been received . Counters should be updated .
* @ param queueData */
private void notifyMessageReceived ( QueueData queueData ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "notifyMessageReceived" , queueData ) ; long messageLength = queueData . getMessageLength ( ) ; if ( trackBytes ) { bytesReceivedSinceLastRequestForMsgs += messageLength ; } // Update the message arrival time
queueData . updateArrivalTime ( approxTimeThread . getApproxTime ( ) ) ; messagesReceived ++ ; currentBytesOnQueue += messageLength ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "notifyMessageReceived" ) ;
|
public class AbstractFxmlView { /** * Gets the URL resource . This will be derived from applied annotation value
* or from naming convention .
* @ param annotation
* the annotation as defined by inheriting class .
* @ return the URL resource */
private URL getURLResource ( final FXMLView annotation ) { } }
|
if ( annotation != null && ! annotation . value ( ) . equals ( "" ) ) { return getClass ( ) . getResource ( annotation . value ( ) ) ; } else { return getClass ( ) . getResource ( getFxmlPath ( ) ) ; }
|
public class AnalysisResults { /** * Get the results of the i - th performed run of the given search when solving the given problem .
* @ param problemID ID of the problem
* @ param searchID ID of the applied search
* @ param i search run index
* @ return results of i - th run of the given search when solving the given problem
* @ throws UnknownIDException if an unknown problem or search ID is given
* @ throws IndexOutOfBoundsException if there is no i - th run for this search and problem */
public SearchRunResults < SolutionType > getRun ( String problemID , String searchID , int i ) { } }
|
if ( ! results . containsKey ( problemID ) ) { throw new UnknownIDException ( "Unknown problem ID " + problemID + "." ) ; } if ( ! results . get ( problemID ) . containsKey ( searchID ) ) { throw new UnknownIDException ( "Unknown search ID " + searchID + " for problem " + problemID + "." ) ; } return results . get ( problemID ) . get ( searchID ) . get ( i ) ;
|
public class MemcachedNodesManager { /** * Changes the sessionId by setting the given jvmRoute and replacing the memcachedNodeId if it ' s currently
* set to a failoverNodeId .
* @ param sessionId the current session id
* @ param jvmRoute the new jvmRoute to set .
* @ return the session id with maybe new jvmRoute and / or new memcachedId . */
public String changeSessionIdForTomcatFailover ( @ Nonnull final String sessionId , final String jvmRoute ) { } }
|
final String newSessionId = jvmRoute != null && ! jvmRoute . trim ( ) . isEmpty ( ) ? _sessionIdFormat . changeJvmRoute ( sessionId , jvmRoute ) : _sessionIdFormat . stripJvmRoute ( sessionId ) ; if ( isEncodeNodeIdInSessionId ( ) ) { final String nodeId = _sessionIdFormat . extractMemcachedId ( newSessionId ) ; if ( _failoverNodeIds != null && _failoverNodeIds . contains ( nodeId ) ) { final String newNodeId = _nodeIdService . getAvailableNodeId ( nodeId ) ; if ( newNodeId != null ) { return _sessionIdFormat . createNewSessionId ( newSessionId , newNodeId ) ; } } } return newSessionId ;
|
public class CreateJobRequest { /** * The default arguments for this job .
* You can specify arguments here that your own job - execution script consumes , as well as arguments that AWS Glue
* itself consumes .
* For information about how to specify and consume your own Job arguments , see the < a
* href = " http : / / docs . aws . amazon . com / glue / latest / dg / aws - glue - programming - python - calling . html " > Calling AWS Glue APIs
* in Python < / a > topic in the developer guide .
* For information about the key - value pairs that AWS Glue consumes to set up your job , see the < a
* href = " http : / / docs . aws . amazon . com / glue / latest / dg / aws - glue - programming - etl - glue - arguments . html " > Special Parameters
* Used by AWS Glue < / a > topic in the developer guide .
* @ param defaultArguments
* The default arguments for this job . < / p >
* You can specify arguments here that your own job - execution script consumes , as well as arguments that AWS
* Glue itself consumes .
* For information about how to specify and consume your own Job arguments , see the < a
* href = " http : / / docs . aws . amazon . com / glue / latest / dg / aws - glue - programming - python - calling . html " > Calling AWS Glue
* APIs in Python < / a > topic in the developer guide .
* For information about the key - value pairs that AWS Glue consumes to set up your job , see the < a
* href = " http : / / docs . aws . amazon . com / glue / latest / dg / aws - glue - programming - etl - glue - arguments . html " > Special
* Parameters Used by AWS Glue < / a > topic in the developer guide .
* @ return Returns a reference to this object so that method calls can be chained together . */
public CreateJobRequest withDefaultArguments ( java . util . Map < String , String > defaultArguments ) { } }
|
setDefaultArguments ( defaultArguments ) ; return this ;
|
public class FileStreamStore { /** * Write from buf to file
* @ param offset of block
* @ param buf ByteBuffer to write
* @ return long offset where buffer begin was write or - 1 if error */
public synchronized long write ( final ByteBuffer buf ) { } }
|
if ( ! validState ) { throw new InvalidStateException ( ) ; } final int packet_size = ( HEADER_LEN + buf . limit ( ) + FOOTER_LEN ) ; // short + int + data + byte
final boolean useDirectIO = ( packet_size > ( 1 << bits ) ) ; try { if ( useDirectIO ) { log . warn ( "WARN: usingDirectIO packet size is greater (" + packet_size + ") than file buffer (" + bufOutput . capacity ( ) + ")" ) ; } // Align output
if ( alignBlocks && ! useDirectIO ) { final int diffOffset = nextBlockBoundary ( offsetOutputUncommited ) ; if ( packet_size > diffOffset ) { // log . warn ( " WARN : aligning offset = " + offsetOutputUncommited + " to = " +
// ( offsetOutputUncommited + diffOffset ) + " needed = " + packet _ size + " allowed = " +
// diffOffset ) ;
alignBuffer ( diffOffset ) ; offsetOutputUncommited += diffOffset ; } } // Remember current offset
final long offset = offsetOutputUncommited ; // Write pending buffered data to disk
if ( bufOutput . remaining ( ) < packet_size ) { flushBuffer ( ) ; } // Write new data to buffer
bufOutput . put ( ( byte ) ( ( MAGIC >> 8 ) & 0xFF ) ) ; // Header - Magic ( short , 2 bytes , msb - first )
bufOutput . put ( ( byte ) ( MAGIC & 0xFF ) ) ; // Header - Magic ( short , 2 bytes , lsb - last )
bufOutput . putInt ( buf . limit ( ) ) ; // Header - Data Size ( int , 4 bytes )
if ( useDirectIO ) { bufOutput . flip ( ) ; fcOutput . write ( new ByteBuffer [ ] { bufOutput , buf , ByteBuffer . wrap ( new byte [ ] { MAGIC_FOOT } ) } ) ; // Write Header + Data + Footer
bufOutput . clear ( ) ; offsetOutputUncommited = offsetOutputCommited = fcOutput . position ( ) ; if ( syncOnFlush ) { fcOutput . force ( false ) ; if ( callback != null ) callback . synched ( offsetOutputCommited ) ; } } else { bufOutput . put ( buf ) ; // Data Body
bufOutput . put ( MAGIC_FOOT ) ; // Footer
// Increment offset of buffered data ( header + user - data )
offsetOutputUncommited += packet_size ; if ( flushOnWrite ) { flushBuffer ( ) ; } } return offset ; } catch ( Exception e ) { log . error ( "Exception in write()" , e ) ; } return - 1L ;
|
public class DoradusServer { /** * Write version to local file */
private static void writeVersionToVerFile ( String version ) throws IOException { } }
|
// declared in a try - with - resource statement , it will be closed regardless of it completes normally or not
try ( PrintWriter writer = new PrintWriter ( new File ( DoradusServer . class . getResource ( "/" + VERSION_FILE ) . getPath ( ) ) ) ) { writer . write ( version ) ; }
|
public class AVIMConversationEventHandler { /** * 当前用户被禁言通知处理函数
* @ param client 聊天客户端
* @ param conversation 对话
* @ param operator 操作者 id */
public void onMuted ( AVIMClient client , AVIMConversation conversation , String operator ) { } }
|
LOGGER . d ( "Notification --- " + " you are muted by " + operator ) ;
|
public class URIBaseMethods { /** * Sets a path ( s ) for the URI .
* Note : File separator is handled in this method
* and should not be passed as parameter .
* @ return The updated { @ link T } object . */
public T path ( String path ) { } }
|
if ( path . length ( ) == 0 ) { this . path += encodePath ( path ) ; } else { this . path += "/" + encodePath ( path ) ; } return returnThis ( ) ;
|
public class Matrix4x3f { /** * Apply rotation of < code > angles . x < / code > radians about the X axis , followed by a rotation of < code > angles . y < / code > radians about the Y axis and
* followed by a rotation of < code > angles . z < / code > radians about the Z axis .
* When used with a right - handed coordinate system , the produced rotation will rotate a vector
* counter - clockwise around the rotation axis , when viewing along the negative axis direction towards the origin .
* When used with a left - handed coordinate system , the rotation is clockwise .
* If < code > M < / code > is < code > this < / code > matrix and < code > R < / code > the rotation matrix ,
* then the new matrix will be < code > M * R < / code > . So when transforming a
* vector < code > v < / code > with the new matrix by using < code > M * R * v < / code > , the
* rotation will be applied first !
* This method is equivalent to calling : < code > rotateX ( angles . x ) . rotateY ( angles . y ) . rotateZ ( angles . z ) < / code >
* @ param angles
* the Euler angles
* @ return this */
public Matrix4x3f rotateXYZ ( Vector3f angles ) { } }
|
return rotateXYZ ( angles . x , angles . y , angles . z ) ;
|
public class NeuralNetwork { /** * This method matches the exact operation of gemv _ row _ optimized in order to match all the bits */
public double [ ] formNNInputs ( ) { } }
|
double [ ] input2ActFun = new double [ _outSize ] ; int cols = _inputs . length ; int rows = input2ActFun . length ; int extra = cols - cols % 8 ; int multiple = ( cols / 8 ) * 8 - 1 ; int idx = 0 ; for ( int row = 0 ; row < rows ; row ++ ) { double psum0 = 0 , psum1 = 0 , psum2 = 0 , psum3 = 0 , psum4 = 0 , psum5 = 0 , psum6 = 0 , psum7 = 0 ; for ( int col = 0 ; col < multiple ; col += 8 ) { int off = idx + col ; psum0 += _weightsAndBias . _wValues [ off ] * _inputs [ col ] ; psum1 += _weightsAndBias . _wValues [ off + 1 ] * _inputs [ col + 1 ] ; psum2 += _weightsAndBias . _wValues [ off + 2 ] * _inputs [ col + 2 ] ; psum3 += _weightsAndBias . _wValues [ off + 3 ] * _inputs [ col + 3 ] ; psum4 += _weightsAndBias . _wValues [ off + 4 ] * _inputs [ col + 4 ] ; psum5 += _weightsAndBias . _wValues [ off + 5 ] * _inputs [ col + 5 ] ; psum6 += _weightsAndBias . _wValues [ off + 6 ] * _inputs [ col + 6 ] ; psum7 += _weightsAndBias . _wValues [ off + 7 ] * _inputs [ col + 7 ] ; } input2ActFun [ row ] += psum0 + psum1 + psum2 + psum3 ; input2ActFun [ row ] += psum4 + psum5 + psum6 + psum7 ; for ( int col = extra ; col < cols ; col ++ ) { input2ActFun [ row ] += _weightsAndBias . _wValues [ idx + col ] * _inputs [ col ] ; } input2ActFun [ row ] += _weightsAndBias . _bValues [ row ] ; idx += cols ; } return input2ActFun ;
|
public class TypicalFaicliApiFailureHook { @ Override public ApiResponse handleValidationError ( ApiFailureResource resource ) { } }
|
final FaicliUnifiedFailureType failureType = FaicliUnifiedFailureType . VALIDATION_ERROR ; final FaicliUnifiedFailureResult result = createFailureResult ( failureType , resource , null ) ; return asJson ( result ) . httpStatus ( prepareBusinessFailureStatus ( ) ) ;
|
public class Uploader { /** * Upload a photo from a byte - array .
* @ param data
* The photo data as a byte array
* @ param metaData
* The meta data
* @ return photoId or ticketId
* @ throws FlickrException */
@ Override public String upload ( byte [ ] data , UploadMetaData metaData ) throws FlickrException { } }
|
Payload payload = new Payload ( data ) ; return sendUploadRequest ( metaData , payload ) ;
|
public class CmsSubscriptionManager { /** * Sets the name of the database pool to use . < p >
* @ param poolName the name of the database pool to use */
public void setPoolName ( String poolName ) { } }
|
if ( m_frozen ) { throw new CmsRuntimeException ( Messages . get ( ) . container ( Messages . ERR_CONFIG_SUBSCRIPTIONMANAGER_FROZEN_0 ) ) ; } m_poolName = poolName ;
|
public class PrimitiveWrapper { /** * Convert all wrapper types in { @ code types } to their primitive
* counter parts .
* @ param types
* The array of types that should be converted .
* @ return A new array where all wrapped types have been converted to their
* primitive counter part . */
public static Class < ? > [ ] toPrimitiveType ( Class < ? > [ ] types ) { } }
|
if ( types == null ) { throw new IllegalArgumentException ( "types cannot be null" ) ; } Class < ? > [ ] convertedTypes = new Class < ? > [ types . length ] ; for ( int i = 0 ; i < types . length ; i ++ ) { final Class < ? > originalType = types [ i ] ; Class < ? > primitiveType = primitiveWrapper . get ( originalType ) ; if ( primitiveType == null ) { convertedTypes [ i ] = originalType ; } else { convertedTypes [ i ] = primitiveType ; } } return convertedTypes ;
|
public class GvmCluster { /** * Adds the specified cluster to this cluster .
* @ param cluster the cluster to be added */
void add ( GvmCluster < S , K > cluster ) { } }
|
if ( cluster == this ) throw new IllegalArgumentException ( ) ; if ( cluster . count == 0 ) return ; // nothing to do
if ( count == 0 ) { set ( cluster ) ; } else { count += cluster . count ; // TODO accelerate add
m0 += cluster . m0 ; clusters . space . add ( m1 , cluster . m1 ) ; clusters . space . add ( m2 , cluster . m2 ) ; update ( ) ; }
|
public class PolicyDefinitionsInner { /** * Gets all the built in policy definitions .
* @ param nextPageLink The NextLink from the previous successful call to List operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; PolicyDefinitionInner & gt ; object */
public Observable < Page < PolicyDefinitionInner > > listBuiltInNextAsync ( final String nextPageLink ) { } }
|
return listBuiltInNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < PolicyDefinitionInner > > , Page < PolicyDefinitionInner > > ( ) { @ Override public Page < PolicyDefinitionInner > call ( ServiceResponse < Page < PolicyDefinitionInner > > response ) { return response . body ( ) ; } } ) ;
|
public class Days { /** * Returns a new instance with the specified number of days added .
* This instance is immutable and unaffected by this method call .
* @ param days the amount of days to add , may be negative
* @ return the new period plus the specified number of days
* @ throws ArithmeticException if the result overflows an int */
public Days plus ( int days ) { } }
|
if ( days == 0 ) { return this ; } return Days . days ( FieldUtils . safeAdd ( getValue ( ) , days ) ) ;
|
public class BenchmarkAssociationSpeedRandom { private FastQueue < TupleDesc_F64 > createSet ( Random rand ) { } }
|
FastQueue < TupleDesc_F64 > ret = new FastQueue < > ( 10 , TupleDesc_F64 . class , ( ) -> new TupleDesc_F64 ( DOF ) ) ; for ( int i = 0 ; i < NUM_FEATURES ; i ++ ) { TupleDesc_F64 t = ret . grow ( ) ; for ( int j = 0 ; j < DOF ; j ++ ) { t . value [ j ] = ( rand . nextDouble ( ) - 0.5 ) * 20 ; } } return ret ;
|
public class S2SBaseFormGenerator { /** * Sort the attachments . */
public void sortAttachments ( ByteArrayInputStream byteArrayInputStream ) { } }
|
List < String > attachmentNameList = new ArrayList < > ( ) ; List < AttachmentData > attacmentList = getAttachments ( ) ; List < AttachmentData > tempAttacmentList = new ArrayList < > ( ) ; try { DocumentBuilderFactory domParserFactory = DocumentBuilderFactory . newInstance ( ) ; DocumentBuilder domParser = domParserFactory . newDocumentBuilder ( ) ; Document document = domParser . parse ( byteArrayInputStream ) ; byteArrayInputStream . close ( ) ; NodeList fileLocationList = document . getElementsByTagName ( NARRATIVE_ATTACHMENT_FILE_LOCATION ) ; for ( int itemLocation = 0 ; itemLocation < fileLocationList . getLength ( ) ; itemLocation ++ ) { String attachmentName = fileLocationList . item ( itemLocation ) . getAttributes ( ) . item ( 0 ) . getNodeValue ( ) ; String [ ] name = attachmentName . split ( KEY_VALUE_SEPARATOR ) ; String fileName = name [ name . length - 1 ] ; attachmentNameList . add ( fileName ) ; } } catch ( Exception e ) { LOG . error ( e . getMessage ( ) , e ) ; } for ( String attachmentName : attachmentNameList ) { for ( AttachmentData attachment : attacmentList ) { String [ ] names = attachment . getContentId ( ) . split ( KEY_VALUE_SEPARATOR ) ; String fileName = names [ names . length - 1 ] ; if ( fileName . equalsIgnoreCase ( attachmentName ) ) { tempAttacmentList . add ( attachment ) ; } } } if ( tempAttacmentList . size ( ) > 0 ) { attachments . clear ( ) ; for ( AttachmentData tempAttachment : tempAttacmentList ) { attachments . add ( tempAttachment ) ; } }
|
public class WebSocketScopeManager { /** * Set the application scope for this manager .
* @ param appScope */
public void setApplication ( IScope appScope ) { } }
|
log . debug ( "Application scope: {}" , appScope ) ; this . appScope = appScope ; // add the name to the collection ( no ' / ' prefix )
activeRooms . add ( appScope . getName ( ) ) ;
|
public class SwiffyFallbackAsset { /** * Sets the asset value for this SwiffyFallbackAsset .
* @ param asset * The Swiffy asset . */
public void setAsset ( com . google . api . ads . admanager . axis . v201808 . CreativeAsset asset ) { } }
|
this . asset = asset ;
|
public class Bech32 { /** * Expand a HRP for use in checksum computation . */
private static byte [ ] expandHrp ( final String hrp ) { } }
|
int hrpLength = hrp . length ( ) ; byte ret [ ] = new byte [ hrpLength * 2 + 1 ] ; for ( int i = 0 ; i < hrpLength ; ++ i ) { int c = hrp . charAt ( i ) & 0x7f ; // Limit to standard 7 - bit ASCII
ret [ i ] = ( byte ) ( ( c >>> 5 ) & 0x07 ) ; ret [ i + hrpLength + 1 ] = ( byte ) ( c & 0x1f ) ; } ret [ hrpLength ] = 0 ; return ret ;
|
public class GeneratorMain { /** * Initialize cluster labels and models .
* Clusters that are set to " reassign " will have their labels set to null , or
* if there is only one possible reassignment , to this target label .
* @ param generators Cluster generators
* @ param labels Labels ( output )
* @ param models Models ( output )
* @ param reassign Pattern for clusters to reassign . */
private void initLabelsAndModels ( ArrayList < GeneratorInterface > generators , ClassLabel [ ] labels , Model [ ] models , Pattern reassign ) { } }
|
int existingclusters = 0 ; if ( reassign != null ) { for ( int i = 0 ; i < labels . length ; i ++ ) { final GeneratorInterface curclus = generators . get ( i ) ; if ( ! reassign . matcher ( curclus . getName ( ) ) . find ( ) ) { labels [ i ] = new SimpleClassLabel ( curclus . getName ( ) ) ; models [ i ] = curclus . makeModel ( ) ; ++ existingclusters ; } } if ( existingclusters == 0 ) { LOG . warning ( "All clusters matched the 'reassign' pattern. Ignoring." ) ; } if ( existingclusters == 1 ) { // No need to test - only one possible answer .
for ( int i = 0 ; i < labels . length ; i ++ ) { if ( labels [ i ] != null ) { Arrays . fill ( labels , labels [ i ] ) ; Arrays . fill ( models , models [ i ] ) ; break ; } } } if ( existingclusters == labels . length ) { LOG . warning ( "No clusters matched the 'reassign' pattern." ) ; } } // Default case , every cluster has a label and model .
if ( existingclusters == 0 ) { for ( int i = 0 ; i < labels . length ; i ++ ) { final GeneratorInterface curclus = generators . get ( i ) ; labels [ i ] = new SimpleClassLabel ( curclus . getName ( ) ) ; models [ i ] = curclus . makeModel ( ) ; } }
|
public class MavenLinkerPublisher2 { /** * Synchronize because { @ link Run # addOrReplaceAction ( hudson . model . Action ) } is not thread safe */
@ Override public synchronized void process ( StepContext context , Element mavenSpyLogsElt ) throws IOException , InterruptedException { } }
|
Run < ? , ? > run = context . get ( Run . class ) ; // we replace instead of because we want to refresh the cache org . jenkinsci . plugins . pipeline . maven . publishers . MavenReport . getGeneratedArtifacts ( )
run . addOrReplaceAction ( new MavenReport ( run ) ) ;
|
public class PdfPublicKeySecurityHandler { /** * Routine for decode output of PdfContentByte . escapeString ( byte [ ] bytes ) .
* It should be moved to PdfContentByte . */
static public byte [ ] unescapedString ( byte [ ] bytes ) throws BadPdfFormatException { } }
|
ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; int index = 0 ; if ( bytes [ 0 ] != '(' && bytes [ bytes . length - 1 ] != ')' ) throw new BadPdfFormatException ( "Expect '(' and ')' at begin and end of the string." ) ; while ( index < bytes . length ) { if ( bytes [ index ] == '\\' ) { index ++ ; switch ( bytes [ index ] ) { case 'b' : baos . write ( '\b' ) ; break ; case 'f' : baos . write ( '\f' ) ; break ; case 't' : baos . write ( '\t' ) ; break ; case 'n' : baos . write ( '\n' ) ; break ; case 'r' : baos . write ( '\r' ) ; break ; case '(' : baos . write ( '(' ) ; break ; case ')' : baos . write ( ')' ) ; break ; case '\\' : baos . write ( '\\' ) ; break ; } } else baos . write ( bytes [ index ] ) ; index ++ ; } return baos . toByteArray ( ) ;
|
public class ContentTypeFilter { /** * Add a content type header to any request whose path matches one of the
* supplied paths . */
@ Override public void doFilter ( ServletRequest request , ServletResponse response , FilterChain chain ) throws IOException , ServletException { } }
|
HttpServletRequest httpServletRequest = ( HttpServletRequest ) request ; for ( String path : mediaTypes . keySet ( ) ) { if ( matches ( httpServletRequest , path ) ) { response . setContentType ( mediaTypes . get ( path ) ) ; break ; } } chain . doFilter ( request , response ) ;
|
public class UpdateDescription { /** * Find the diff between two documents .
* < p > NOTE : This does not do a full diff on { @ link BsonArray } . If there is
* an inequality between the old and new array , the old array will
* simply be replaced by the new one .
* @ param beforeDocument original document
* @ param afterDocument document to diff on
* @ param onKey the key for our depth level
* @ param updatedFields contiguous document of updated fields ,
* nested or otherwise
* @ param removedFields contiguous list of removedFields ,
* nested or otherwise
* @ return a description of the updated fields and removed keys between the documents */
private static UpdateDescription diff ( final @ Nonnull BsonDocument beforeDocument , final @ Nonnull BsonDocument afterDocument , final @ Nullable String onKey , final BsonDocument updatedFields , final Set < String > removedFields ) { } }
|
// for each key in this document . . .
for ( final Map . Entry < String , BsonValue > entry : beforeDocument . entrySet ( ) ) { final String key = entry . getKey ( ) ; // don ' t worry about the _ id or version field for now
if ( key . equals ( "_id" ) || key . equals ( DOCUMENT_VERSION_FIELD ) ) { continue ; } final BsonValue oldValue = entry . getValue ( ) ; final String actualKey = onKey == null ? key : String . format ( "%s.%s" , onKey , key ) ; // if the key exists in the other document AND both are BsonDocuments
// diff the documents recursively , carrying over the keys to keep
// updatedFields and removedFields flat .
// this will allow us to reference whole objects as well as nested
// properties .
// else if the key does not exist , the key has been removed .
if ( afterDocument . containsKey ( key ) ) { final BsonValue newValue = afterDocument . get ( key ) ; if ( oldValue instanceof BsonDocument && newValue instanceof BsonDocument ) { diff ( ( BsonDocument ) oldValue , ( BsonDocument ) newValue , actualKey , updatedFields , removedFields ) ; } else if ( ! oldValue . equals ( newValue ) ) { updatedFields . put ( actualKey , newValue ) ; } } else { removedFields . add ( actualKey ) ; } } // for each key in the other document . . .
for ( final Map . Entry < String , BsonValue > entry : afterDocument . entrySet ( ) ) { final String key = entry . getKey ( ) ; // don ' t worry about the _ id or version field for now
if ( key . equals ( "_id" ) || key . equals ( DOCUMENT_VERSION_FIELD ) ) { continue ; } final BsonValue newValue = entry . getValue ( ) ; // if the key is not in the this document ,
// it is a new key with a new value .
// updatedFields will included keys that must
// be newly created .
final String actualKey = onKey == null ? key : String . format ( "%s.%s" , onKey , key ) ; if ( ! beforeDocument . containsKey ( key ) ) { updatedFields . put ( actualKey , newValue ) ; } } return new UpdateDescription ( updatedFields , removedFields ) ;
|
public class UriEscape { /** * Perform am URI path segment < strong > unescape < / strong > operation
* on a < tt > char [ ] < / tt > input .
* This method will unescape every percent - encoded ( < tt > % HH < / tt > ) sequences present in input ,
* even for those characters that do not need to be percent - encoded in this context ( unreserved characters
* can be percent - encoded even if / when this is not required , though it is not generally considered a
* good practice ) .
* This method will use specified < tt > encoding < / tt > in order to determine the characters specified in the
* percent - encoded byte sequences .
* This method is < strong > thread - safe < / strong > .
* @ param text the < tt > char [ ] < / tt > to be unescaped .
* @ param offset the position in < tt > text < / tt > at which the escape operation should start .
* @ param len the number of characters in < tt > text < / tt > that should be escaped .
* @ param writer the < tt > java . io . Writer < / tt > to which the unescaped result will be written . Nothing will
* be written at all to this writer if input is < tt > null < / tt > .
* @ param encoding the encoding to be used for unescaping .
* @ throws IOException if an input / output exception occurs */
public static void unescapeUriPathSegment ( final char [ ] text , final int offset , final int len , final Writer writer , final String encoding ) throws IOException { } }
|
if ( writer == null ) { throw new IllegalArgumentException ( "Argument 'writer' cannot be null" ) ; } if ( encoding == null ) { throw new IllegalArgumentException ( "Argument 'encoding' cannot be null" ) ; } final int textLen = ( text == null ? 0 : text . length ) ; if ( offset < 0 || offset > textLen ) { throw new IllegalArgumentException ( "Invalid (offset, len). offset=" + offset + ", len=" + len + ", text.length=" + textLen ) ; } if ( len < 0 || ( offset + len ) > textLen ) { throw new IllegalArgumentException ( "Invalid (offset, len). offset=" + offset + ", len=" + len + ", text.length=" + textLen ) ; } UriEscapeUtil . unescape ( text , offset , len , writer , UriEscapeUtil . UriEscapeType . PATH_SEGMENT , encoding ) ;
|
public class AbstractIoAcceptor { /** * { @ inheritDoc }
* @ org . apache . xbean . Property nestedType = " java . net . SocketAddress " */
@ Override public final void setDefaultLocalAddresses ( SocketAddress firstLocalAddress , SocketAddress ... otherLocalAddresses ) { } }
|
if ( otherLocalAddresses == null ) { otherLocalAddresses = new SocketAddress [ 0 ] ; } Collection < SocketAddress > newLocalAddresses = new ArrayList < > ( otherLocalAddresses . length + 1 ) ; newLocalAddresses . add ( firstLocalAddress ) ; Collections . addAll ( newLocalAddresses , otherLocalAddresses ) ; setDefaultLocalAddresses ( newLocalAddresses ) ;
|
public class Model { /** * Gets attribute value as < code > Integer < / code > .
* If there is a { @ link Converter } registered for the attribute that converts from Class < code > S < / code > to Class
* < code > java . lang . Integer < / code > , given the attribute value is an instance of < code > S < / code > , then it will be used ,
* otherwise performs a conversion using { @ link Convert # toInteger ( Object ) } .
* @ param attributeName name of attribute to convert
* @ return value converted to < code > Integer < / code > */
public Integer getInteger ( String attributeName ) { } }
|
Object value = getRaw ( attributeName ) ; Converter < Object , Integer > converter = modelRegistryLocal . converterForValue ( attributeName , value , Integer . class ) ; return converter != null ? converter . convert ( value ) : Convert . toInteger ( value ) ;
|
public class GradientWrapper { /** * Returns the interpolated color that you get if you multiply the delta between
* color2 and color1 with the given fraction ( for each channel ) and interpolation . The fraction should
* be a value between 0 and 1.
* @ param COLOR1 The first color as integer in the hex format 0xALPHA RED GREEN BLUE , e . g . 0xFF00FF00 for a pure green
* @ param COLOR2 The second color as integer in the hex format 0xALPHA RED GREEN BLUE e . g . 0xFFFF0000 for a pure red
* @ param FRACTION The fraction between those two colors that we would like to get e . g . 0.5f will result in the color 0xFF808000
* @ return the interpolated color between color1 and color2 calculated by the given fraction and interpolation */
private Color interpolateColor ( final Color COLOR1 , final Color COLOR2 , final float FRACTION ) { } }
|
assert ( Float . compare ( FRACTION , 0f ) >= 0 && Float . compare ( FRACTION , 1f ) <= 0 ) ; final float INT_TO_FLOAT_CONST = 1f / 255f ; final float RED1 = COLOR1 . getRed ( ) * INT_TO_FLOAT_CONST ; final float GREEN1 = COLOR1 . getGreen ( ) * INT_TO_FLOAT_CONST ; final float BLUE1 = COLOR1 . getBlue ( ) * INT_TO_FLOAT_CONST ; final float ALPHA1 = COLOR1 . getAlpha ( ) * INT_TO_FLOAT_CONST ; final float RED2 = COLOR2 . getRed ( ) * INT_TO_FLOAT_CONST ; final float GREEN2 = COLOR2 . getGreen ( ) * INT_TO_FLOAT_CONST ; final float BLUE2 = COLOR2 . getBlue ( ) * INT_TO_FLOAT_CONST ; final float ALPHA2 = COLOR2 . getAlpha ( ) * INT_TO_FLOAT_CONST ; final float DELTA_RED = RED2 - RED1 ; final float DELTA_GREEN = GREEN2 - GREEN1 ; final float DELTA_BLUE = BLUE2 - BLUE1 ; final float DELTA_ALPHA = ALPHA2 - ALPHA1 ; float red = RED1 + ( DELTA_RED * FRACTION ) ; float green = GREEN1 + ( DELTA_GREEN * FRACTION ) ; float blue = BLUE1 + ( DELTA_BLUE * FRACTION ) ; float alpha = ALPHA1 + ( DELTA_ALPHA * FRACTION ) ; red = red < 0f ? 0f : ( red > 1f ? 1f : red ) ; green = green < 0f ? 0f : ( green > 1f ? 1f : green ) ; blue = blue < 0f ? 0f : ( blue > 1f ? 1f : blue ) ; alpha = alpha < 0f ? 0f : ( alpha > 1f ? 1f : alpha ) ; return new Color ( red , green , blue , alpha ) ;
|
public class RootInjector { /** * Injects the root webelement into an element .
* @ param root The root webelement .
* @ param element The element .
* @ since 0.3.0 */
public static < E > void rootElement ( WebElement root , E element ) { } }
|
if ( element == null || root == null ) return ; if ( element instanceof WebElement ) return ; for ( Class < ? > clazz = element . getClass ( ) ; ! clazz . equals ( Object . class ) ; clazz = clazz . getSuperclass ( ) ) { if ( Enhancer . isEnhanced ( clazz ) ) continue ; Field [ ] fields = clazz . getDeclaredFields ( ) ; for ( Field field : fields ) { setRootElementField ( root , element , field ) ; } }
|
public class StoreConfig { /** * Gets an integer property via a string property name .
* @ param pName - the property name
* @ param defaultValue - the default property value
* @ return an integer property */
public int getInt ( String pName , int defaultValue ) { } }
|
String pValue = _properties . getProperty ( pName ) ; return parseInt ( pName , pValue , defaultValue ) ;
|
public class ViewDefinition { /** * Returns a builder for a BigQuery view definition .
* @ param query the query used to generate the table
* @ param functions user - defined functions that can be used by the query */
public static Builder newBuilder ( String query , List < UserDefinedFunction > functions ) { } }
|
return newBuilder ( query ) . setUserDefinedFunctions ( functions ) ;
|
public class MeasurementUnitsImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setXoaUnits ( Integer newXoaUnits ) { } }
|
Integer oldXoaUnits = xoaUnits ; xoaUnits = newXoaUnits ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . MEASUREMENT_UNITS__XOA_UNITS , oldXoaUnits , xoaUnits ) ) ;
|
public class PerceptronPOSTagger { /** * 标注
* @ param wordList
* @ return */
@ Override public String [ ] tag ( List < String > wordList ) { } }
|
String [ ] termArray = new String [ wordList . size ( ) ] ; wordList . toArray ( termArray ) ; return tag ( termArray ) ;
|
public class CmsVfsModePropertyEditor { /** * Builds the fields for the configured properties in the first tab . < p > */
private void internalBuildConfiguredFields ( ) { } }
|
Map < String , CmsClientProperty > ownProps = m_handler . getOwnProperties ( ) ; List < String > keys = new ArrayList < String > ( m_propertyConfig . keySet ( ) ) ; moveToTop ( keys , CmsClientProperty . PROPERTY_NAVTEXT ) ; moveToTop ( keys , CmsClientProperty . PROPERTY_DESCRIPTION ) ; moveToTop ( keys , CmsClientProperty . PROPERTY_TITLE ) ; for ( String propName : keys ) { buildField ( ownProps , propName , Mode . effective ) ; }
|
public class Graph { /** * Returns a transposed graph from this graph */
public Graph < T > transpose ( ) { } }
|
Builder < T > builder = new Builder < > ( ) ; builder . addNodes ( nodes ) ; // reverse edges
edges . keySet ( ) . forEach ( u -> { edges . get ( u ) . stream ( ) . forEach ( v -> builder . addEdge ( v , u ) ) ; } ) ; return builder . build ( ) ;
|
public class NotificationGlobal { /** * Enable / disable notification view .
* @ param enable */
public void setViewEnabled ( boolean enable ) { } }
|
final NotificationRootView root = mWindow . mRoot ; root . setViewEnabled ( enable ) ; if ( enable && mView == null ) { mView = root . getView ( ) ; mView . initialize ( this ) ; mView . addStateListener ( new ViewStateListener ( ) ) ; }
|
public class DelegatedClientAuthenticationAction { /** * Find delegated client by name base client .
* @ param request the request
* @ param clientName the client name
* @ param service the service
* @ return the base client */
protected BaseClient < Credentials , CommonProfile > findDelegatedClientByName ( final HttpServletRequest request , final String clientName , final Service service ) { } }
|
val client = ( BaseClient < Credentials , CommonProfile > ) this . clients . findClient ( clientName ) ; LOGGER . debug ( "Delegated authentication client is [{}] with service [{}}" , client , service ) ; if ( service != null ) { request . setAttribute ( CasProtocolConstants . PARAMETER_SERVICE , service . getId ( ) ) ; if ( ! isDelegatedClientAuthorizedForService ( client , service ) ) { LOGGER . warn ( "Delegated client [{}] is not authorized by service [{}]" , client , service ) ; throw new UnauthorizedServiceException ( UnauthorizedServiceException . CODE_UNAUTHZ_SERVICE , StringUtils . EMPTY ) ; } } return client ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.