signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class SIMPReferenceStream { /** * Removes all items from this reference stream
* @ param tran - the transaction to perform the removals under
* @ throws MessageStoreException */
public void removeAll ( Transaction tran ) throws MessageStoreException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "removeAll" , tran ) ; while ( this . removeFirstMatching ( null , tran ) != null ) ; remove ( tran , NO_LOCK_ID ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "removeAll" ) ; |
public class HawkbitCommonUtil { /** * Get formatted label . Appends ellipses if content does not fit the label .
* @ param labelContent
* content
* @ return Label */
public static Label getFormatedLabel ( final String labelContent ) { } } | final Label labelValue = new Label ( labelContent , ContentMode . HTML ) ; labelValue . setSizeFull ( ) ; labelValue . addStyleName ( SPUIDefinitions . TEXT_STYLE ) ; labelValue . addStyleName ( "label-style" ) ; return labelValue ; |
public class PhynixxXAResource { /** * finds the transactional branch of the current XAResource associated with
* die XID
* Prepares to perform a commit . May actually perform a commit in the flag
* commitOnPrepare is set to true .
* This method is called to ask the resource manager to prepare for a
* transaction commit of the transaction specified in xid .
* @ param xid
* A global transaction identifier .
* @ return A value indicating the resource manager ' s vote on the outcome of
* the transaction . The possible values are : XA _ RDONLY or XA _ OK . If
* the resource manager wants to roll back the transaction , it
* should do so by throwing an appropriate XAException in the
* prepare method .
* @ throws XAException
* An error has occurred . Possible exception values are : XA _ RB ,
* XAER _ RMERR , XAER _ RMFAIL , XAER _ NOTA , XAER _ INVAL , or
* XAER _ PROTO . */
@ Override public int prepare ( Xid xid ) throws XAException { } } | try { LOG . debug ( "PhynixxXAResource[" + this . getId ( ) + "]:prepare prepare to perform a commit for XID=" + xid ) ; XATransactionalBranch < C > transactionalBranch = this . xaConnection . toGlobalTransactionBranch ( ) ; if ( xid == null ) { LOG . error ( "No XID" ) ; throw new XAException ( XAException . XAER_INVAL ) ; } if ( transactionalBranch == null ) { LOG . error ( "XAConnection is not associated to a global Transaction" ) ; throw new XAException ( XAException . XAER_PROTO ) ; } // assert that the current xaConnection is associated to this XID
if ( ! transactionalBranch . getXid ( ) . equals ( xid ) ) { LOG . error ( "XAResource " + this + " isnt't active for XID=" + xid ) ; throw new XAException ( XAException . XAER_PROTO ) ; } // must find connection for this transaction
int retVal = transactionalBranch . prepare ( ) ; if ( retVal == XAResource . XA_RDONLY ) { this . xaConnection . closeTransactionalBranch ( xid ) ; } return retVal ; } catch ( XAException xaExc ) { LOG . error ( "PhynixxXAResource[" + this . getId ( ) + "]:prepare xid='" + xid + " ERROR " + ConstantsPrinter . getXAErrorCode ( xaExc . errorCode ) ) ; throw xaExc ; } catch ( Exception ex ) { LOG . error ( "PhynixxXAResource.prepare(" + xid + ") on XAResourceProgressState " + this . xaId + " :: " + ex + "\n" + ExceptionUtils . getStackTrace ( ex ) ) ; throw new DelegatedRuntimeException ( "prepare(" + xid + ") on XAResourceProgressState " + this . xaId , ex ) ; } |
public class JShellTool { /** * start the built - in editor */
private boolean builtInEdit ( String initialText , Consumer < String > saveHandler , Consumer < String > errorHandler ) { } } | try { ServiceLoader < BuildInEditorProvider > sl = ServiceLoader . load ( BuildInEditorProvider . class ) ; // Find the highest ranking provider
BuildInEditorProvider provider = null ; for ( BuildInEditorProvider p : sl ) { if ( provider == null || p . rank ( ) > provider . rank ( ) ) { provider = p ; } } if ( provider != null ) { provider . edit ( getResourceString ( "jshell.label.editpad" ) , initialText , saveHandler , errorHandler ) ; return true ; } else { errormsg ( "jshell.err.no.builtin.editor" ) ; } } catch ( RuntimeException ex ) { errormsg ( "jshell.err.cant.launch.editor" , ex ) ; } fluffmsg ( "jshell.msg.try.set.editor" ) ; return false ; |
public class WriterVisualizer { /** * Will create a Writer of the outstream .
* @ param outstream */
@ Override public void writeOutput ( VisualizerInput input , OutputStream outstream ) { } } | try { OutputStreamWriter writer = new OutputStreamWriter ( outstream , getCharacterEncoding ( ) ) ; writeOutput ( input , writer ) ; writer . flush ( ) ; } catch ( IOException ex ) { log . error ( "Exception when writing visualizer output." , ex ) ; StringWriter strWriter = new StringWriter ( ) ; ex . printStackTrace ( new PrintWriter ( strWriter ) ) ; try { outstream . write ( strWriter . toString ( ) . getBytes ( "UTF-8" ) ) ; } catch ( IOException ex1 ) { log . error ( null , ex ) ; } } |
public class JodaBeanBinWriter { /** * Writes the bean to an array of bytes .
* @ param bean the bean to output , not null
* @ param rootType true to output the root type
* @ return the binary data , not null */
public byte [ ] write ( Bean bean , boolean rootType ) { } } | ByteArrayOutputStream baos = new ByteArrayOutputStream ( 1024 ) ; try { write ( bean , rootType , baos ) ; } catch ( IOException ex ) { throw new IllegalStateException ( ex ) ; } return baos . toByteArray ( ) ; |
public class DRL6StrictParser { /** * lhsEval : = EVAL LEFT _ PAREN conditionalExpression RIGHT _ PAREN
* @ param ce
* @ return
* @ throws org . antlr . runtime . RecognitionException */
private BaseDescr lhsEval ( CEDescrBuilder < ? , ? > ce ) throws RecognitionException { } } | EvalDescrBuilder < ? > eval = null ; try { eval = helper . start ( ce , EvalDescrBuilder . class , null ) ; match ( input , DRL6Lexer . ID , DroolsSoftKeywords . EVAL , null , DroolsEditorType . KEYWORD ) ; if ( state . failed ) return null ; if ( ! parseEvalExpression ( eval ) ) return null ; } catch ( RecognitionException e ) { throw e ; } finally { helper . end ( EvalDescrBuilder . class , eval ) ; } return eval != null ? eval . getDescr ( ) : null ; |
public class BsonGenerator { /** * Creates a new embedded document or array
* @ param array true if the embedded object is an array
* @ throws IOException if the document could not be created */
protected void _writeStartObject ( boolean array ) throws IOException { } } | _writeArrayFieldNameIfNeeded ( ) ; if ( _currentDocument != null ) { // embedded document / array
_buffer . putByte ( _typeMarker , array ? BsonConstants . TYPE_ARRAY : BsonConstants . TYPE_DOCUMENT ) ; } _currentDocument = new DocumentInfo ( _currentDocument , _buffer . size ( ) , array ) ; reserveHeader ( ) ; |
public class hqlParser { /** * hql . g : 653:1 : compoundExpr : ( collectionExpr | path | ( OPEN ! ( subQuery | ( expression ( COMMA ! expression ) * ) ) CLOSE ! ) ) ; */
public final hqlParser . compoundExpr_return compoundExpr ( ) throws RecognitionException { } } | hqlParser . compoundExpr_return retval = new hqlParser . compoundExpr_return ( ) ; retval . start = input . LT ( 1 ) ; CommonTree root_0 = null ; Token OPEN281 = null ; Token COMMA284 = null ; Token CLOSE286 = null ; ParserRuleReturnScope collectionExpr279 = null ; ParserRuleReturnScope path280 = null ; ParserRuleReturnScope subQuery282 = null ; ParserRuleReturnScope expression283 = null ; ParserRuleReturnScope expression285 = null ; CommonTree OPEN281_tree = null ; CommonTree COMMA284_tree = null ; CommonTree CLOSE286_tree = null ; try { // hql . g : 654:2 : ( collectionExpr | path | ( OPEN ! ( subQuery | ( expression ( COMMA ! expression ) * ) ) CLOSE ! ) )
int alt102 = 3 ; switch ( input . LA ( 1 ) ) { case ELEMENTS : case INDICES : { alt102 = 1 ; } break ; case IDENT : { alt102 = 2 ; } break ; case OPEN : { alt102 = 3 ; } break ; default : NoViableAltException nvae = new NoViableAltException ( "" , 102 , 0 , input ) ; throw nvae ; } switch ( alt102 ) { case 1 : // hql . g : 654:4 : collectionExpr
{ root_0 = ( CommonTree ) adaptor . nil ( ) ; pushFollow ( FOLLOW_collectionExpr_in_compoundExpr3322 ) ; collectionExpr279 = collectionExpr ( ) ; state . _fsp -- ; adaptor . addChild ( root_0 , collectionExpr279 . getTree ( ) ) ; } break ; case 2 : // hql . g : 655:4 : path
{ root_0 = ( CommonTree ) adaptor . nil ( ) ; pushFollow ( FOLLOW_path_in_compoundExpr3327 ) ; path280 = path ( ) ; state . _fsp -- ; adaptor . addChild ( root_0 , path280 . getTree ( ) ) ; } break ; case 3 : // hql . g : 656:4 : ( OPEN ! ( subQuery | ( expression ( COMMA ! expression ) * ) ) CLOSE ! )
{ root_0 = ( CommonTree ) adaptor . nil ( ) ; // hql . g : 656:4 : ( OPEN ! ( subQuery | ( expression ( COMMA ! expression ) * ) ) CLOSE ! )
// hql . g : 656:5 : OPEN ! ( subQuery | ( expression ( COMMA ! expression ) * ) ) CLOSE !
{ OPEN281 = ( Token ) match ( input , OPEN , FOLLOW_OPEN_in_compoundExpr3333 ) ; // hql . g : 656:11 : ( subQuery | ( expression ( COMMA ! expression ) * ) )
int alt101 = 2 ; int LA101_0 = input . LA ( 1 ) ; if ( ( LA101_0 == EOF || LA101_0 == CLOSE || LA101_0 == FROM || LA101_0 == GROUP || LA101_0 == HAVING || LA101_0 == ORDER || LA101_0 == SELECT || LA101_0 == SKIP || LA101_0 == TAKE || LA101_0 == UNION || LA101_0 == WHERE ) ) { alt101 = 1 ; } else if ( ( LA101_0 == ALL || LA101_0 == ANY || LA101_0 == AVG || LA101_0 == BNOT || LA101_0 == CASE || LA101_0 == COLON || LA101_0 == COUNT || LA101_0 == ELEMENTS || LA101_0 == EMPTY || LA101_0 == EXISTS || LA101_0 == FALSE || LA101_0 == IDENT || LA101_0 == INDICES || LA101_0 == MAX || ( LA101_0 >= MIN && LA101_0 <= MINUS ) || LA101_0 == NOT || ( LA101_0 >= NULL && LA101_0 <= NUM_LONG ) || LA101_0 == OPEN || ( LA101_0 >= PARAM && LA101_0 <= PLUS ) || LA101_0 == QUOTED_String || LA101_0 == SOME || LA101_0 == SUM || LA101_0 == TRUE ) ) { alt101 = 2 ; } else { NoViableAltException nvae = new NoViableAltException ( "" , 101 , 0 , input ) ; throw nvae ; } switch ( alt101 ) { case 1 : // hql . g : 656:13 : subQuery
{ pushFollow ( FOLLOW_subQuery_in_compoundExpr3338 ) ; subQuery282 = subQuery ( ) ; state . _fsp -- ; adaptor . addChild ( root_0 , subQuery282 . getTree ( ) ) ; } break ; case 2 : // hql . g : 656:24 : ( expression ( COMMA ! expression ) * )
{ // hql . g : 656:24 : ( expression ( COMMA ! expression ) * )
// hql . g : 656:25 : expression ( COMMA ! expression ) *
{ pushFollow ( FOLLOW_expression_in_compoundExpr3343 ) ; expression283 = expression ( ) ; state . _fsp -- ; adaptor . addChild ( root_0 , expression283 . getTree ( ) ) ; // hql . g : 656:36 : ( COMMA ! expression ) *
loop100 : while ( true ) { int alt100 = 2 ; int LA100_0 = input . LA ( 1 ) ; if ( ( LA100_0 == COMMA ) ) { alt100 = 1 ; } switch ( alt100 ) { case 1 : // hql . g : 656:37 : COMMA ! expression
{ COMMA284 = ( Token ) match ( input , COMMA , FOLLOW_COMMA_in_compoundExpr3346 ) ; pushFollow ( FOLLOW_expression_in_compoundExpr3349 ) ; expression285 = expression ( ) ; state . _fsp -- ; adaptor . addChild ( root_0 , expression285 . getTree ( ) ) ; } break ; default : break loop100 ; } } } } break ; } CLOSE286 = ( Token ) match ( input , CLOSE , FOLLOW_CLOSE_in_compoundExpr3356 ) ; } } break ; } retval . stop = input . LT ( - 1 ) ; retval . tree = ( CommonTree ) adaptor . rulePostProcessing ( root_0 ) ; adaptor . setTokenBoundaries ( retval . tree , retval . start , retval . stop ) ; } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; retval . tree = ( CommonTree ) adaptor . errorNode ( input , retval . start , input . LT ( - 1 ) , re ) ; } finally { // do for sure before leaving
} return retval ; |
public class JawnFilter { /** * Creates a new HttpServletRequest object .
* Useful , as we cannot modify an existing ServletRequest .
* Used when resources needs to have the { controller } stripped from the servletPath .
* @ author MTD */
private final static HttpServletRequest createServletRequest ( final HttpServletRequest req , final String translatedPath ) { } } | return new HttpServletRequestWrapper ( req ) { @ Override public String getServletPath ( ) { return translatedPath ; } } ; |
public class ConstructorInstantiator { /** * Evalutes { @ code constructor } against the currently found { @ code matchingConstructors } and determines if
* it ' s a better match to the given arguments , a worse match , or an equivalently good match .
* This method tries to emulate the behavior specified in
* < a href = " https : / / docs . oracle . com / javase / specs / jls / se8 / html / jls - 15 . html # jls - 15.12.2 " > JLS 15.12.2 . Compile - Time
* Step 2 : Determine Method Signature < / a > . A constructor X is deemed to be a better match than constructor Y to the
* given argument list if they are both applicable , constructor X has at least one parameter than is more specific
* than the corresponding parameter of constructor Y , and constructor Y has no parameter than is more specific than
* the corresponding parameter in constructor X .
* If { @ code constructor } is a better match than the constructors in the { @ code matchingConstructors } list , the list
* is cleared , and it ' s added to the list as a singular best matching constructor ( so far ) . < br / >
* If { @ code constructor } is an equivalently good of a match as the constructors in the { @ code matchingConstructors }
* list , it ' s added to the list . < br / >
* If { @ code constructor } is a worse match than the constructors in the { @ code matchingConstructors } list , the list
* will remain unchanged .
* @ param matchingConstructors A list of equivalently best matching constructors found so far
* @ param constructor The constructor to be evaluated against this list */
private void evaluateConstructor ( List < Constructor < ? > > matchingConstructors , Constructor < ? > constructor ) { } } | boolean newHasBetterParam = false ; boolean existingHasBetterParam = false ; Class < ? > [ ] paramTypes = constructor . getParameterTypes ( ) ; for ( int i = 0 ; i < paramTypes . length ; ++ i ) { Class < ? > paramType = paramTypes [ i ] ; if ( ! paramType . isPrimitive ( ) ) { for ( Constructor < ? > existingCtor : matchingConstructors ) { Class < ? > existingParamType = existingCtor . getParameterTypes ( ) [ i ] ; if ( paramType != existingParamType ) { if ( paramType . isAssignableFrom ( existingParamType ) ) { existingHasBetterParam = true ; } else { newHasBetterParam = true ; } } } } } if ( ! existingHasBetterParam ) { matchingConstructors . clear ( ) ; } if ( newHasBetterParam || ! existingHasBetterParam ) { matchingConstructors . add ( constructor ) ; } |
public class AWSCodePipelineClient { /** * Enables artifacts in a pipeline to transition to a stage in a pipeline .
* @ param enableStageTransitionRequest
* Represents the input of an EnableStageTransition action .
* @ return Result of the EnableStageTransition operation returned by the service .
* @ throws ValidationException
* The validation was specified in an invalid format .
* @ throws PipelineNotFoundException
* The specified pipeline was specified in an invalid format or cannot be found .
* @ throws StageNotFoundException
* The specified stage was specified in an invalid format or cannot be found .
* @ sample AWSCodePipeline . EnableStageTransition
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / codepipeline - 2015-07-09 / EnableStageTransition "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public EnableStageTransitionResult enableStageTransition ( EnableStageTransitionRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeEnableStageTransition ( request ) ; |
public class DocumentSubscriptions { /** * Creates a data subscription in a database . The subscription will expose all documents that match the specified subscription options for a given type .
* @ param options Subscription options
* @ param clazz Document class
* @ param < T > Document class
* @ return created subscription */
public < T > String create ( Class < T > clazz , SubscriptionCreationOptions options ) { } } | return create ( clazz , options , null ) ; |
public class Stream { /** * Returns a new stream of { @ link Indexed } that where each item ' s index is equal to its sequence number .
* @ return a new stream of { @ link Indexed } that where each item ' s index is equal to its sequence number . */
public Stream < Indexed < T > > index ( ) { } } | return map ( new Func1 < T , Indexed < T > > ( ) { int i ; @ Override public Indexed < T > call ( T value ) { return new Indexed < > ( i ++ , value ) ; } } ) ; |
public class InternalXbaseParser { /** * InternalXbase . g : 5216:1 : ruleXCatchClause returns [ EObject current = null ] : ( ( ( ' catch ' ) = > otherlv _ 0 = ' catch ' ) otherlv _ 1 = ' ( ' ( ( lv _ declaredParam _ 2_0 = ruleFullJvmFormalParameter ) ) otherlv _ 3 = ' ) ' ( ( lv _ expression _ 4_0 = ruleXExpression ) ) ) ; */
public final EObject ruleXCatchClause ( ) throws RecognitionException { } } | EObject current = null ; Token otherlv_0 = null ; Token otherlv_1 = null ; Token otherlv_3 = null ; EObject lv_declaredParam_2_0 = null ; EObject lv_expression_4_0 = null ; enterRule ( ) ; try { // InternalXbase . g : 5222:2 : ( ( ( ( ' catch ' ) = > otherlv _ 0 = ' catch ' ) otherlv _ 1 = ' ( ' ( ( lv _ declaredParam _ 2_0 = ruleFullJvmFormalParameter ) ) otherlv _ 3 = ' ) ' ( ( lv _ expression _ 4_0 = ruleXExpression ) ) ) )
// InternalXbase . g : 5223:2 : ( ( ( ' catch ' ) = > otherlv _ 0 = ' catch ' ) otherlv _ 1 = ' ( ' ( ( lv _ declaredParam _ 2_0 = ruleFullJvmFormalParameter ) ) otherlv _ 3 = ' ) ' ( ( lv _ expression _ 4_0 = ruleXExpression ) ) )
{ // InternalXbase . g : 5223:2 : ( ( ( ' catch ' ) = > otherlv _ 0 = ' catch ' ) otherlv _ 1 = ' ( ' ( ( lv _ declaredParam _ 2_0 = ruleFullJvmFormalParameter ) ) otherlv _ 3 = ' ) ' ( ( lv _ expression _ 4_0 = ruleXExpression ) ) )
// InternalXbase . g : 5224:3 : ( ( ' catch ' ) = > otherlv _ 0 = ' catch ' ) otherlv _ 1 = ' ( ' ( ( lv _ declaredParam _ 2_0 = ruleFullJvmFormalParameter ) ) otherlv _ 3 = ' ) ' ( ( lv _ expression _ 4_0 = ruleXExpression ) )
{ // InternalXbase . g : 5224:3 : ( ( ' catch ' ) = > otherlv _ 0 = ' catch ' )
// InternalXbase . g : 5225:4 : ( ' catch ' ) = > otherlv _ 0 = ' catch '
{ otherlv_0 = ( Token ) match ( input , 84 , FOLLOW_44 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_0 , grammarAccess . getXCatchClauseAccess ( ) . getCatchKeyword_0 ( ) ) ; } } otherlv_1 = ( Token ) match ( input , 49 , FOLLOW_13 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_1 , grammarAccess . getXCatchClauseAccess ( ) . getLeftParenthesisKeyword_1 ( ) ) ; } // InternalXbase . g : 5235:3 : ( ( lv _ declaredParam _ 2_0 = ruleFullJvmFormalParameter ) )
// InternalXbase . g : 5236:4 : ( lv _ declaredParam _ 2_0 = ruleFullJvmFormalParameter )
{ // InternalXbase . g : 5236:4 : ( lv _ declaredParam _ 2_0 = ruleFullJvmFormalParameter )
// InternalXbase . g : 5237:5 : lv _ declaredParam _ 2_0 = ruleFullJvmFormalParameter
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXCatchClauseAccess ( ) . getDeclaredParamFullJvmFormalParameterParserRuleCall_2_0 ( ) ) ; } pushFollow ( FOLLOW_29 ) ; lv_declaredParam_2_0 = ruleFullJvmFormalParameter ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXCatchClauseRule ( ) ) ; } set ( current , "declaredParam" , lv_declaredParam_2_0 , "org.eclipse.xtext.xbase.Xbase.FullJvmFormalParameter" ) ; afterParserOrEnumRuleCall ( ) ; } } } otherlv_3 = ( Token ) match ( input , 50 , FOLLOW_4 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_3 , grammarAccess . getXCatchClauseAccess ( ) . getRightParenthesisKeyword_3 ( ) ) ; } // InternalXbase . g : 5258:3 : ( ( lv _ expression _ 4_0 = ruleXExpression ) )
// InternalXbase . g : 5259:4 : ( lv _ expression _ 4_0 = ruleXExpression )
{ // InternalXbase . g : 5259:4 : ( lv _ expression _ 4_0 = ruleXExpression )
// InternalXbase . g : 5260:5 : lv _ expression _ 4_0 = ruleXExpression
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXCatchClauseAccess ( ) . getExpressionXExpressionParserRuleCall_4_0 ( ) ) ; } pushFollow ( FOLLOW_2 ) ; lv_expression_4_0 = ruleXExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXCatchClauseRule ( ) ) ; } set ( current , "expression" , lv_expression_4_0 , "org.eclipse.xtext.xbase.Xbase.XExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ; |
public class SchemaBuilder { /** * Shortcut for { @ link # createMaterializedView ( CqlIdentifier , CqlIdentifier )
* createMaterializedView ( CqlIdentifier . fromCql ( keyspaceName ) , CqlIdentifier . fromCql ( viewName ) } */
@ NonNull public static CreateMaterializedViewStart createMaterializedView ( @ Nullable String keyspace , @ NonNull String viewName ) { } } | return createMaterializedView ( keyspace == null ? null : CqlIdentifier . fromCql ( keyspace ) , CqlIdentifier . fromCql ( viewName ) ) ; |
public class UIViewRoot { /** * - - - - - Private Methods */
private static String getIdentifier ( String target ) { } } | // check map
String id = LOCATION_IDENTIFIER_MAP . get ( target ) ; if ( id == null ) { id = LOCATION_IDENTIFIER_PREFIX + target ; LOCATION_IDENTIFIER_MAP . put ( target , id ) ; } return id ; |
public class JsonReader { /** * Advances the position until after the next newline character . If the line
* is terminated by " \ r \ n " , the ' \ n ' must be consumed as whitespace by the
* caller . */
private void skipToEndOfLine ( ) throws IOException { } } | while ( pos < limit || fillBuffer ( 1 ) ) { char c = buffer [ pos ++ ] ; if ( c == '\n' ) { lineNumber ++ ; lineStart = pos ; break ; } else if ( c == '\r' ) { break ; } } |
public class LWJGL3TypeConversions { /** * Convert stencil ops to GL constants .
* @ param op The op .
* @ return The resulting GL constant . */
public static int stencilOperationToGL ( final JCGLStencilOperation op ) { } } | switch ( op ) { case STENCIL_OP_DECREMENT : return GL11 . GL_DECR ; case STENCIL_OP_DECREMENT_WRAP : return GL14 . GL_DECR_WRAP ; case STENCIL_OP_INCREMENT : return GL11 . GL_INCR ; case STENCIL_OP_INCREMENT_WRAP : return GL14 . GL_INCR_WRAP ; case STENCIL_OP_INVERT : return GL11 . GL_INVERT ; case STENCIL_OP_KEEP : return GL11 . GL_KEEP ; case STENCIL_OP_REPLACE : return GL11 . GL_REPLACE ; case STENCIL_OP_ZERO : return GL11 . GL_ZERO ; } throw new UnreachableCodeException ( ) ; |
public class FileUtil { /** * Utility to convert { @ link File } to { @ link URL } .
* @ param filePath the path of the file
* @ return the { @ link URL } representation of the file .
* @ throws MalformedURLException
* @ throws IllegalArgumentException if the file path is null , empty or blank */
public static URL convertFileToURL ( String filePath ) throws MalformedURLException { } } | CheckArg . isNotEmpty ( filePath , "filePath" ) ; File file = new File ( filePath . trim ( ) ) ; return file . toURI ( ) . toURL ( ) ; |
public class FullDTDReader { /** * Method called to read in the external subset definition . */
public static DTDSubset readExternalSubset ( WstxInputSource src , ReaderConfig cfg , DTDSubset intSubset , boolean constructFully , int xmlVersion ) throws XMLStreamException { } } | FullDTDReader r = new FullDTDReader ( src , cfg , intSubset , constructFully , xmlVersion ) ; return r . parseDTD ( ) ; |
public class CmsCreateSiteThread { /** * Saves outputstream of favicon as resource . < p >
* @ param siteRoot site root of considered site . */
private void saveFavIcon ( String siteRoot ) { } } | if ( m_os == null ) { return ; } if ( m_os . size ( ) == 0 ) { return ; } getReport ( ) . println ( Messages . get ( ) . container ( Messages . RPT_SITE_SET_FAVICON_0 ) , I_CmsReport . FORMAT_DEFAULT ) ; CmsResource favicon = null ; try { favicon = m_cms . createResource ( siteRoot + CmsSiteManager . FAVICON , OpenCms . getResourceManager ( ) . getResourceType ( CmsResourceTypeImage . getStaticTypeName ( ) ) ) ; } catch ( CmsVfsResourceAlreadyExistsException e ) { // OK , Resource already there
try { favicon = m_cms . readResource ( siteRoot + CmsSiteManager . FAVICON ) ; } catch ( CmsException e2 ) { // no , it wasn ' t . .
getReport ( ) . println ( Messages . get ( ) . container ( Messages . RPT_SITE_ERROR_FAVICON_0 ) , I_CmsReport . FORMAT_ERROR ) ; getReport ( ) . println ( e ) ; getReport ( ) . println ( e2 ) ; return ; } } catch ( CmsIllegalArgumentException | CmsException e ) { getReport ( ) . println ( Messages . get ( ) . container ( Messages . RPT_SITE_ERROR_FAVICON_0 ) , I_CmsReport . FORMAT_ERROR ) ; getReport ( ) . println ( e ) ; return ; } try { m_cms . lockResource ( siteRoot + CmsSiteManager . FAVICON ) ; CmsFile faviconFile = new CmsFile ( favicon ) ; faviconFile . setContents ( m_os . toByteArray ( ) ) ; m_cms . writeFile ( faviconFile ) ; m_cms . unlockResource ( siteRoot + CmsSiteManager . FAVICON ) ; } catch ( CmsException e ) { getReport ( ) . println ( Messages . get ( ) . container ( Messages . RPT_SITE_ERROR_FAVICON_0 ) , I_CmsReport . FORMAT_ERROR ) ; getReport ( ) . println ( e ) ; return ; } |
public class ContentSpec { /** * Set the Copyright Year ( s ) of the Content Specification and the book it creates .
* @ param copyrightYear The year ( s ) for the Copyright . */
public void setCopyrightYear ( final String copyrightYear ) { } } | if ( copyrightYear == null && this . copyrightYear == null ) { return ; } else if ( copyrightYear == null ) { removeChild ( this . copyrightYear ) ; this . copyrightYear = null ; } else if ( this . copyrightYear == null ) { this . copyrightYear = new KeyValueNode < String > ( CommonConstants . CS_COPYRIGHT_YEAR_TITLE , copyrightYear ) ; appendChild ( this . copyrightYear , false ) ; } else { this . copyrightYear . setValue ( copyrightYear ) ; } |
public class FastBlurFilter { /** * < p > Writes a rectangular area of pixels in the destination
* < code > BufferedImage < / code > . Calling this method on
* an image of type different from < code > BufferedImage . TYPE _ INT _ ARGB < / code >
* and < code > BufferedImage . TYPE _ INT _ RGB < / code > will unmanage the image . < / p >
* @ param img the destination image
* @ param x the x location at which to start storing pixels
* @ param y the y location at which to start storing pixels
* @ param w the width of the rectangle of pixels to store
* @ param h the height of the rectangle of pixels to store
* @ param pixels an array of pixels , stored as integers
* @ throws IllegalArgumentException is < code > pixels < / code > is non - null and
* of length & lt ; w * h */
private static void setPixels ( BufferedImage img , int x , int y , int w , int h , int [ ] pixels ) { } } | if ( pixels == null || w == 0 || h == 0 ) { return ; } else if ( pixels . length < w * h ) { throw new IllegalArgumentException ( "pixels array must have a length >= w*h" ) ; } final int imageType = img . getType ( ) ; if ( imageType == BufferedImage . TYPE_INT_ARGB || imageType == BufferedImage . TYPE_INT_RGB ) { final WritableRaster raster = img . getRaster ( ) ; raster . setDataElements ( x , y , w , h , pixels ) ; } else { // Unmanages the image
img . setRGB ( x , y , w , h , pixels , 0 , w ) ; } |
public class TextBoxView { /** * Sets the font variant .
* @ param newFontVariant
* the new font variant */
protected void setFontVariant ( String newFontVariant ) { } } | if ( fontVariant == null || ! fontVariant . equals ( newFontVariant ) ) { FontVariant val [ ] = FontVariant . values ( ) ; for ( FontVariant aVal : val ) { if ( aVal . toString ( ) . equals ( newFontVariant ) ) { fontVariant = newFontVariant ; invalidateCache ( ) ; return ; } } } |
public class FluentMatchingR { /** * Runs through the possible matches and executes the specified action of the first match and
* returns the result .
* @ throws MatchException if no match is found . */
public R getMatch ( ) { } } | for ( Pattern < T , R > pattern : patterns ) { if ( pattern . matches ( value ) ) { return pattern . apply ( value ) ; } } throw new MatchException ( "No match found for " + value ) ; |
public class GetCampaignActivitiesRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetCampaignActivitiesRequest getCampaignActivitiesRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( getCampaignActivitiesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getCampaignActivitiesRequest . getApplicationId ( ) , APPLICATIONID_BINDING ) ; protocolMarshaller . marshall ( getCampaignActivitiesRequest . getCampaignId ( ) , CAMPAIGNID_BINDING ) ; protocolMarshaller . marshall ( getCampaignActivitiesRequest . getPageSize ( ) , PAGESIZE_BINDING ) ; protocolMarshaller . marshall ( getCampaignActivitiesRequest . getToken ( ) , TOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class AmazonRedshiftClient { /** * Deletes the specified manual snapshot . The snapshot must be in the < code > available < / code > state , with no other
* users authorized to access the snapshot .
* Unlike automated snapshots , manual snapshots are retained even after you delete your cluster . Amazon Redshift
* does not delete your manual snapshots . You must delete manual snapshot explicitly to avoid getting charged . If
* other accounts are authorized to access the snapshot , you must revoke all of the authorizations before you can
* delete the snapshot .
* @ param deleteClusterSnapshotRequest
* @ return Result of the DeleteClusterSnapshot operation returned by the service .
* @ throws InvalidClusterSnapshotStateException
* The specified cluster snapshot is not in the < code > available < / code > state , or other accounts are
* authorized to access the snapshot .
* @ throws ClusterSnapshotNotFoundException
* The snapshot identifier does not refer to an existing cluster snapshot .
* @ sample AmazonRedshift . DeleteClusterSnapshot
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / redshift - 2012-12-01 / DeleteClusterSnapshot " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public Snapshot deleteClusterSnapshot ( DeleteClusterSnapshotRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDeleteClusterSnapshot ( request ) ; |
public class Symm { /** * Generate a 2048 based Key from which we extract our code base
* @ return
* @ throws IOException */
public byte [ ] keygen ( ) throws IOException { } } | byte inkey [ ] = new byte [ 0x600 ] ; new SecureRandom ( ) . nextBytes ( inkey ) ; ByteArrayOutputStream baos = new ByteArrayOutputStream ( 0x800 ) ; base64url . encode ( new ByteArrayInputStream ( inkey ) , baos ) ; return baos . toByteArray ( ) ; |
public class StreamExecutionEnvironment { /** * Ads a data source with a custom type information thus opening a
* { @ link DataStream } . Only in very special cases does the user need to
* support type information . Otherwise use
* { @ link # addSource ( org . apache . flink . streaming . api . functions . source . SourceFunction ) }
* @ param function
* the user defined function
* @ param < OUT >
* type of the returned stream
* @ param typeInfo
* the user defined type information for the stream
* @ return the data stream constructed */
public < OUT > DataStreamSource < OUT > addSource ( SourceFunction < OUT > function , TypeInformation < OUT > typeInfo ) { } } | return addSource ( function , "Custom Source" , typeInfo ) ; |
public class BigtableClusterUtilities { /** * Sets a cluster size to a specific size .
* @ param clusterId
* @ param zoneId
* @ param newSize
* @ throws InterruptedException if the cluster is in the middle of updating , and an interrupt was
* received */
public void setClusterSize ( String clusterId , String zoneId , int newSize ) throws InterruptedException { } } | setClusterSize ( instanceName . toClusterName ( clusterId ) . getClusterName ( ) , newSize ) ; |
public class KeyedStream { /** * Applies a reduce transformation on the grouped data stream grouped on by
* the given key position . The { @ link ReduceFunction } will receive input
* values based on the key value . Only input values with the same key will
* go to the same reducer .
* @ param reducer
* The { @ link ReduceFunction } that will be called for every
* element of the input values with the same key .
* @ return The transformed DataStream . */
public SingleOutputStreamOperator < T > reduce ( ReduceFunction < T > reducer ) { } } | return transform ( "Keyed Reduce" , getType ( ) , new StreamGroupedReduce < T > ( clean ( reducer ) , getType ( ) . createSerializer ( getExecutionConfig ( ) ) ) ) ; |
public class XMLUtil { /** * Replies the boolean value that corresponds to the specified attribute ' s path .
* < p > The path is an ordered list of tag ' s names and ended by the name of
* the attribute .
* Be careful about the fact that the names are case sensitives .
* @ param document is the XML document to explore .
* @ param path is the list of and ended by the attribute ' s name .
* @ return the boolean value of the specified attribute or < code > 0 < / code > . */
@ Pure public static boolean getAttributeBoolean ( Node document , String ... path ) { } } | assert document != null : AssertMessages . notNullParameter ( 0 ) ; return getAttributeBooleanWithDefault ( document , true , false , path ) ; |
public class StreamMetrics { /** * This method increments the counter of failed Stream seal operations in the system as well as the failed seal
* attempts for this specific Stream .
* @ param scope Scope .
* @ param streamName Name of the Stream . */
public void sealStreamFailed ( String scope , String streamName ) { } } | DYNAMIC_LOGGER . incCounterValue ( globalMetricName ( SEAL_STREAM_FAILED ) , 1 ) ; DYNAMIC_LOGGER . incCounterValue ( SEAL_STREAM_FAILED , 1 , streamTags ( scope , streamName ) ) ; |
public class BoundedBuffer { /** * D638088 - implemented split locks for get queue */
private void notifyGet_ ( ) { } } | // a notification may be lost in some cases - however
// as none of the threads wait endlessly , a waiting thread
// will either be notified , or will eventually wakeup
int lastWaitIndex = getQueueIndex ( getQueueLocks_ , getQueueCounter_ , false ) ; int lockIndex = lastWaitIndex ; for ( int i = 0 ; i < getQueueLocks_ . length ; i ++ ) { // are threads waiting on this queue ?
if ( getQueueLocks_ [ lockIndex ] . threadsWaiting > 0 ) { synchronized ( getQueueLocks_ [ lockIndex ] ) { // make sure we are actually notifying somebody
// now that the lock is held
if ( getQueueLocks_ [ lockIndex ] . threadsWaiting > 0 ) { getQueueLocks_ [ lockIndex ] . notify ( ) ; return ; } else { // somebody stole my notify , make up for it
// by allowing another attempt
i -- ; } } } // check to see whether a new thread has waited
int checkIndex = getQueueIndex ( getQueueLocks_ , getQueueCounter_ , false ) ; if ( checkIndex != lastWaitIndex ) { // restart scan from the new wait index
lockIndex = checkIndex ; lastWaitIndex = lockIndex ; i = 0 ; } else { // increment to next element
lockIndex = ++ lockIndex % getQueueLocks_ . length ; } } // D638088 - if we get here , the entire lock array was scanned and
// nobody was found to notify . |
public class TuneInfos { /** * Add each element of collection c as new lines in field b */
public void add ( byte b , Collection c ) { } } | if ( ( c != null ) && ( c . size ( ) > 0 ) ) { String s2 = get ( b ) ; for ( Object aC : c ) { String s = ( String ) aC ; if ( s2 == null ) s2 = s ; else s2 += lineSeparator + s ; } set ( b , s2 ) ; } |
public class OpBool { /** * Create a String expression from a Expression
* @ param left
* @ param right
* @ return String expression
* @ throws TemplateException */
public static ExprBoolean toExprBoolean ( Expression left , Expression right , int operation ) { } } | if ( left instanceof Literal && right instanceof Literal ) { Boolean l = ( ( Literal ) left ) . getBoolean ( null ) ; Boolean r = ( ( Literal ) right ) . getBoolean ( null ) ; if ( l != null && r != null ) { switch ( operation ) { case Factory . OP_BOOL_AND : return left . getFactory ( ) . createLitBoolean ( l . booleanValue ( ) && r . booleanValue ( ) , left . getStart ( ) , right . getEnd ( ) ) ; case Factory . OP_BOOL_OR : return left . getFactory ( ) . createLitBoolean ( l . booleanValue ( ) || r . booleanValue ( ) , left . getStart ( ) , right . getEnd ( ) ) ; case Factory . OP_BOOL_XOR : return left . getFactory ( ) . createLitBoolean ( l . booleanValue ( ) ^ r . booleanValue ( ) , left . getStart ( ) , right . getEnd ( ) ) ; } } } return new OpBool ( left , right , operation ) ; |
public class NumbersAreUnsignedIntsLinkedHashMap { /** * a ctor that allows passing a map . */
@ Override public Object put ( String key , Object val ) { } } | val = val != null && val instanceof Number ? Number . class . cast ( val ) . intValue ( ) : val ; return super . put ( key , val ) ; |
public class YObject { /** * syck _ yobject _ initialize */
@ JRubyMethod public static IRubyObject yaml_initialize ( IRubyObject self , IRubyObject klass , IRubyObject ivars ) { } } | ( ( RubyObject ) self ) . fastSetInstanceVariable ( "@class" , klass ) ; ( ( RubyObject ) self ) . fastSetInstanceVariable ( "@ivars" , ivars ) ; return self ; |
public class LiveEventsInner { /** * Create Live Event .
* Creates a Live Event .
* @ param resourceGroupName The name of the resource group within the Azure subscription .
* @ param accountName The Media Services account name .
* @ param liveEventName The name of the Live Event .
* @ param parameters Live Event properties needed for creation .
* @ param autoStart The flag indicates if auto start the Live Event .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < LiveEventInner > beginCreateAsync ( String resourceGroupName , String accountName , String liveEventName , LiveEventInner parameters , Boolean autoStart , final ServiceCallback < LiveEventInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( beginCreateWithServiceResponseAsync ( resourceGroupName , accountName , liveEventName , parameters , autoStart ) , serviceCallback ) ; |
public class ExceptionUtils { /** * Returns a { @ link Mono } containing an { @ link IllegalArgumentException } with the configured message
* @ param format A < a href = " . . / util / Formatter . html # syntax " > format string < / a >
* @ param args Arguments referenced by the format specifiers in the format string . If there are more arguments than format specifiers , the extra arguments are ignored . The number of arguments
* is variable and may be zero . The maximum number of arguments is limited by the maximum dimension of a Java array as defined by < cite > The Java & trade ; Virtual Machine
* Specification < / cite > . The behaviour on a { @ code null } argument depends on the < a href = " . . / util / Formatter . html # syntax " > conversion < / a > .
* @ param < T > the type of the { @ link Mono } being converted
* @ return a { @ link Mono } containing the error */
public static < T > Mono < T > illegalArgument ( String format , Object ... args ) { } } | String message = String . format ( format , args ) ; return Mono . error ( new IllegalArgumentException ( message ) ) ; |
public class PojoDataParser { /** * { @ inheritDoc } */
@ NonNull @ Override public List < Card > parseGroup ( @ NonNull JSONArray data , @ NonNull final ServiceManager serviceManager ) { } } | final CardResolver cardResolver = serviceManager . getService ( CardResolver . class ) ; Preconditions . checkState ( cardResolver != null , "Must register CardResolver into ServiceManager first" ) ; final MVHelper cellResolver = serviceManager . getService ( MVHelper . class ) ; Preconditions . checkState ( cellResolver != null , "Must register CellResolver into ServiceManager first" ) ; final int size = data . length ( ) ; final List < Card > result = new ArrayList < > ( size ) ; for ( int i = 0 ; i < size ; i ++ ) { JSONObject cardData = data . optJSONObject ( i ) ; final Card card = parseSingleGroup ( cardData , serviceManager ) ; if ( card != null ) { if ( card instanceof IDelegateCard ) { List < Card > cards = ( ( IDelegateCard ) card ) . getCards ( new CardResolver ( ) { @ Override public Card create ( String type ) { Card c = cardResolver . create ( type ) ; c . serviceManager = serviceManager ; c . id = card . id ; c . setStringType ( type ) ; c . rowId = card . rowId ; return c ; } } ) ; for ( Card c : cards ) { if ( c . isValid ( ) ) { result . add ( c ) ; } } } else { result . add ( card ) ; } } } cellResolver . resolver ( ) . setCards ( result ) ; return result ; |
public class ArrayFunctions { /** * Returned expression results in the new array with value pre - pended . */
public static Expression arrayPrepend ( Expression expression , Expression value ) { } } | return x ( "ARRAY_PREPEND(" + value . toString ( ) + ", " + expression . toString ( ) + ")" ) ; |
public class CmsToolbarNewButton { /** * Creates a list item representing a redirect . < p >
* @ return the new list item */
private CmsCreatableListItem makeNavigationLevelItem ( ) { } } | CmsNewResourceInfo typeInfo = getController ( ) . getData ( ) . getNewNavigationLevelElementInfo ( ) ; CmsListItemWidget widget = new CmsListItemWidget ( typeInfo ) ; CmsCreatableListItem listItem = new CmsCreatableListItem ( widget , typeInfo , NewEntryType . regular ) ; listItem . initMoveHandle ( CmsSitemapView . getInstance ( ) . getTree ( ) . getDnDHandler ( ) ) ; return listItem ; |
public class BatchCreateObjectResponseMarshaller { /** * Marshall the given parameter object . */
public void marshall ( BatchCreateObjectResponse batchCreateObjectResponse , ProtocolMarshaller protocolMarshaller ) { } } | if ( batchCreateObjectResponse == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( batchCreateObjectResponse . getObjectIdentifier ( ) , OBJECTIDENTIFIER_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class BellaDatiServiceImpl { /** * Deserialization . Sets up the element lists and maps as empty objects .
* @ param in Input stream of object to be de - serialized
* @ throws IOException Thrown if IO error occurs during class reading
* @ throws ClassNotFoundException Thrown if desired class does not exist */
private void readObject ( ObjectInputStream in ) throws IOException , ClassNotFoundException { } } | in . defaultReadObject ( ) ; try { Field domainList = getClass ( ) . getDeclaredField ( "domainList" ) ; domainList . setAccessible ( true ) ; domainList . set ( this , new DomainList ( ) ) ; Field dashboardList = getClass ( ) . getDeclaredField ( "dashboardList" ) ; dashboardList . setAccessible ( true ) ; dashboardList . set ( this , new DashboardList ( ) ) ; Field reportList = getClass ( ) . getDeclaredField ( "reportList" ) ; reportList . setAccessible ( true ) ; reportList . set ( this , new ReportList ( ) ) ; Field dataSetList = getClass ( ) . getDeclaredField ( "dataSetList" ) ; dataSetList . setAccessible ( true ) ; dataSetList . set ( this , new DataSetList ( ) ) ; Field commentLists = getClass ( ) . getDeclaredField ( "commentLists" ) ; commentLists . setAccessible ( true ) ; commentLists . set ( this , Collections . synchronizedMap ( new HashMap < String , PaginatedList < Comment > > ( ) ) ) ; Field reportAttributeValues = getClass ( ) . getDeclaredField ( "dataSetAttributeValues" ) ; reportAttributeValues . setAccessible ( true ) ; reportAttributeValues . set ( this , new HashMap < String , Map < String , CachedListImpl < AttributeValue > > > ( ) ) ; Field dataSourceList = getClass ( ) . getDeclaredField ( "dataSourceList" ) ; dataSourceList . setAccessible ( true ) ; dataSourceList . set ( this , new HashMap < String , CachedListImpl < DataSource > > ( ) ) ; Field importFormList = getClass ( ) . getDeclaredField ( "importFormList" ) ; importFormList . setAccessible ( true ) ; importFormList . set ( this , new ImportFormList ( ) ) ; Field dataSourceImportList = getClass ( ) . getDeclaredField ( "dataSourceImportList" ) ; dataSourceImportList . setAccessible ( true ) ; dataSourceImportList . set ( this , new HashMap < String , CachedListImpl < DataSourceImport > > ( ) ) ; } catch ( NoSuchFieldException e ) { throw new InternalConfigurationException ( "Failed to set service fields" , e ) ; } catch ( IllegalAccessException e ) { throw new InternalConfigurationException ( "Failed to set service fields" , e ) ; } catch ( SecurityException e ) { throw new InternalConfigurationException ( "Failed to set service fields" , e ) ; } catch ( IllegalArgumentException e ) { throw new InternalConfigurationException ( "Failed to set service fields" , e ) ; } |
public class Light { /** * Creates new contact filter for this light with given parameters
* @ param categoryBits - see { @ link Filter # categoryBits }
* @ param groupIndex - see { @ link Filter # groupIndex }
* @ param maskBits - see { @ link Filter # maskBits } */
public void setContactFilter ( short categoryBits , short groupIndex , short maskBits ) { } } | filterA = new Filter ( ) ; filterA . categoryBits = categoryBits ; filterA . groupIndex = groupIndex ; filterA . maskBits = maskBits ; |
public class JCuda { /** * Make a compute stream wait on an event .
* < pre >
* cudaError _ t cudaStreamWaitEvent (
* cudaStream _ t stream ,
* cudaEvent _ t event ,
* unsigned int flags )
* < / pre >
* < div >
* < p > Make a compute stream wait on an event .
* Makes all future work submitted to < tt > stream < / tt > wait until < tt > event < / tt > reports completion before beginning execution . This
* synchronization will be performed efficiently on the device . The event
* < tt > event < / tt > may be from a different
* context than < tt > stream < / tt > , in which case this function will perform
* cross - device synchronization .
* < p > The stream < tt > stream < / tt > will wait
* only for the completion of the most recent host call to cudaEventRecord ( )
* on < tt > event < / tt > . Once this call has returned , any functions
* ( including cudaEventRecord ( ) and cudaEventDestroy ( ) ) may be called on
* < tt > event < / tt > again , and the subsequent calls will not have any
* effect on < tt > stream < / tt > .
* < p > If < tt > stream < / tt > is NULL , any future
* work submitted in any stream will wait for < tt > event < / tt > to complete
* before beginning execution . This effectively creates a barrier for all
* future work submitted to the device on
* this thread .
* < p > If cudaEventRecord ( ) has not been called
* on < tt > event < / tt > , this call acts as if the record has already
* completed , and so is a functional no - op .
* < div >
* < span > Note : < / span >
* < p > Note that this
* function may also return error codes from previous , asynchronous
* launches .
* < / div >
* < / div >
* @ param stream Stream to wait
* @ param event Event to wait on
* @ param flags Parameters for the operation ( must be 0)
* @ return cudaSuccess , cudaErrorInvalidResourceHandle
* @ see JCuda # cudaStreamCreate
* @ see JCuda # cudaStreamCreateWithFlags
* @ see JCuda # cudaStreamQuery
* @ see JCuda # cudaStreamSynchronize
* @ see JCuda # cudaStreamAddCallback
* @ see JCuda # cudaStreamDestroy */
public static int cudaStreamWaitEvent ( cudaStream_t stream , cudaEvent_t event , int flags ) { } } | return checkResult ( cudaStreamWaitEventNative ( stream , event , flags ) ) ; |
public class JedisSortedSet { /** * Adds to this set all of the elements in the specified map of members and their score .
* @ param scoredMember the members to add together with their scores
* @ return the number of members actually added */
public long addAll ( final Map < String , Double > scoredMember ) { } } | return doWithJedis ( new JedisCallable < Long > ( ) { @ Override public Long call ( Jedis jedis ) { return jedis . zadd ( getKey ( ) , scoredMember ) ; } } ) ; |
public class ImageModerationsImpl { /** * Returns probabilities of the image containing racy or adult content .
* @ param imageStream The image file .
* @ param evaluateFileInputOptionalParameter the object representing the optional parameters to be set before calling this API
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < Evaluate > evaluateFileInputAsync ( byte [ ] imageStream , EvaluateFileInputOptionalParameter evaluateFileInputOptionalParameter , final ServiceCallback < Evaluate > serviceCallback ) { } } | return ServiceFuture . fromResponse ( evaluateFileInputWithServiceResponseAsync ( imageStream , evaluateFileInputOptionalParameter ) , serviceCallback ) ; |
public class DefaultPermissionChecker { /** * Gets the permission to access an inode path given a user and its groups .
* @ param user the user
* @ param groups the groups this user belongs to
* @ param path the inode path
* @ param inodeList the list of inodes in the path
* @ return the permission */
private Mode . Bits getPermissionInternal ( String user , List < String > groups , String path , List < InodeView > inodeList ) { } } | int size = inodeList . size ( ) ; Preconditions . checkArgument ( size > 0 , PreconditionMessage . EMPTY_FILE_INFO_LIST_FOR_PERMISSION_CHECK ) ; // bypass checking permission for super user or super group of Alluxio file system .
if ( isPrivilegedUser ( user , groups ) ) { return Mode . Bits . ALL ; } // traverses from root to the parent dir to all inodes included by this path are executable
for ( int i = 0 ; i < size - 1 ; i ++ ) { try { checkInode ( user , groups , inodeList . get ( i ) , Mode . Bits . EXECUTE , path ) ; } catch ( AccessControlException e ) { return Mode . Bits . NONE ; } } InodeView inode = inodeList . get ( inodeList . size ( ) - 1 ) ; if ( inode == null ) { return Mode . Bits . NONE ; } return inode . getPermission ( user , groups ) . toModeBits ( ) ; |
public class DefaultAnimationsBuilder { /** * @ param croutonView
* The croutonView which gets animated .
* @ return The default Animation for a hiding { @ link Crouton } . */
static Animation buildDefaultSlideOutUpAnimation ( View croutonView ) { } } | if ( ! areLastMeasuredOutAnimationHeightAndCurrentEqual ( croutonView ) || ( null == slideOutUpAnimation ) ) { slideOutUpAnimation = new TranslateAnimation ( 0 , 0 , // X : from , to
0 , - croutonView . getMeasuredHeight ( ) // Y : from , to
) ; slideOutUpAnimation . setDuration ( DURATION ) ; setLastOutAnimationHeight ( croutonView . getMeasuredHeight ( ) ) ; } return slideOutUpAnimation ; |
public class Dynamic { /** * Represents a constant that is resolved by invoking a constructor .
* @ param constructor The constructor to invoke to create the represented constant value .
* @ param rawArguments The constructor ' s constant arguments .
* @ return A dynamic constant that is resolved by the supplied constuctor . */
public static Dynamic ofInvocation ( Constructor < ? > constructor , List < ? > rawArguments ) { } } | return ofInvocation ( new MethodDescription . ForLoadedConstructor ( constructor ) , rawArguments ) ; |
public class EcorePackageRenameStrategy { /** * Replies the text region that is corresponding to the package name .
* @ param script the script .
* @ return the region . */
protected ITextRegion getOriginalPackageRegion ( final SarlScript script ) { } } | return this . locationInFileProvider . getFullTextRegion ( script , XtendPackage . Literals . XTEND_FILE__PACKAGE , 0 ) ; |
public class HadoopJobUtils { /** * Invalidates a Hadoop authentication token file */
public static void cancelHadoopTokens ( HadoopSecurityManager hadoopSecurityManager , String userToProxy , File tokenFile , Logger log ) { } } | if ( tokenFile == null ) { return ; } try { hadoopSecurityManager . cancelTokens ( tokenFile , userToProxy , log ) ; } catch ( HadoopSecurityManagerException e ) { log . error ( e . getCause ( ) + e . getMessage ( ) ) ; } catch ( Exception e ) { log . error ( e . getCause ( ) + e . getMessage ( ) ) ; } if ( tokenFile . exists ( ) ) { tokenFile . delete ( ) ; } |
public class ParseUtils { /** * Returns the index of the first character in toParse from idx that is not a " space " .
* @ param toParse the string to skip space on .
* @ param idx the index to start skipping space from .
* @ return the index of the first character in toParse from idx that is not a " space . */
public static int skipSpaces ( String toParse , int idx ) { } } | while ( isBlank ( toParse . charAt ( idx ) ) && idx < toParse . length ( ) ) ++ idx ; return idx ; |
public class CacheOnDisk { /** * Call this method to read a specified template which contains the cache ids from the disk .
* @ param template
* - template id .
* @ param delete
* - boolean to delete the template after reading
* @ return valueSet - the collection of cache ids . */
public ValueSet readTemplate ( String template , boolean delete ) { } } | Result result = htod . readTemplate ( template , delete ) ; if ( result . returnCode == HTODDynacache . DISK_EXCEPTION ) { stopOnError ( result . diskException ) ; this . htod . returnToResultPool ( result ) ; return HTODDynacache . EMPTY_VS ; } ValueSet valueSet = ( ValueSet ) result . data ; if ( valueSet == null ) { valueSet = HTODDynacache . EMPTY_VS ; } this . htod . returnToResultPool ( result ) ; return valueSet ; |
public class AiMesh { /** * Returns the vertex color . < p >
* This method is part of the wrapped API ( see { @ link AiWrapperProvider }
* for details on wrappers ) . < p >
* The built - in behavior is to return a { @ link AiColor } .
* @ param vertex the vertex index
* @ param colorset the color set
* @ param wrapperProvider the wrapper provider ( used for type inference )
* @ return the vertex color wrapped as object */
public < V3 , M4 , C , N , Q > C getWrappedColor ( int vertex , int colorset , AiWrapperProvider < V3 , M4 , C , N , Q > wrapperProvider ) { } } | if ( ! hasColors ( colorset ) ) { throw new IllegalStateException ( "mesh has no colorset " + colorset ) ; } checkVertexIndexBounds ( vertex ) ; return wrapperProvider . wrapColor ( m_colorsets [ colorset ] , vertex * 4 * SIZEOF_FLOAT ) ; |
public class NameNode { /** * { @ inheritDoc } */
public void concat ( String trg , String [ ] src , boolean restricted ) throws IOException { } } | namesystem . concat ( trg , src , restricted ) ; |
public class WebhooksInner { /** * Create the webhook identified by webhook name .
* @ param resourceGroupName Name of an Azure Resource group .
* @ param automationAccountName The name of the automation account .
* @ param webhookName The webhook name .
* @ param parameters The create or update parameters for webhook .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < WebhookInner > createOrUpdateAsync ( String resourceGroupName , String automationAccountName , String webhookName , WebhookCreateOrUpdateParameters parameters , final ServiceCallback < WebhookInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( createOrUpdateWithServiceResponseAsync ( resourceGroupName , automationAccountName , webhookName , parameters ) , serviceCallback ) ; |
public class GeneratedJavaFileAccess { /** * Prepends the addition of required imports of the employed annotations .
* Since the ' typeComment ' is a { @ link JavaFileAccess . JavaTypeAwareStringConcatenation }
* any optionally required imports are already processed and tracked in { @ link # imports } . */
@ Override public CharSequence getContent ( ) { } } | CharSequence _xblockexpression = null ; { final Consumer < IClassAnnotation > _function = ( IClassAnnotation it ) -> { this . importType ( it . getAnnotationImport ( ) ) ; } ; this . getClassAnnotations ( ) . forEach ( _function ) ; _xblockexpression = super . getContent ( ) ; } return _xblockexpression ; |
public class JBBPCompiler { /** * Register a name field info item in a named field list .
* @ param normalizedName normalized name of the named field
* @ param offset the named field offset
* @ param namedFields the named field info list for registration
* @ param token the token for the field
* @ throws JBBPCompilationException if there is already a registered field for
* the path */
private static void registerNamedField ( final String normalizedName , final int structureBorder , final int offset , final List < JBBPNamedFieldInfo > namedFields , final JBBPToken token ) { } } | for ( int i = namedFields . size ( ) - 1 ; i >= structureBorder ; i -- ) { final JBBPNamedFieldInfo info = namedFields . get ( i ) ; if ( info . getFieldPath ( ) . equals ( normalizedName ) ) { throw new JBBPCompilationException ( "Duplicated named field detected [" + normalizedName + ']' , token ) ; } } namedFields . add ( new JBBPNamedFieldInfo ( normalizedName , normalizedName , offset ) ) ; |
public class LdapConfigManager { /** * Return the list of properties supported for a given LDAP entity .
* This is an overloaded method to support getSupportedProperties ( String , List )
* @ param ldapEntity : A given LDAP entity
* @ param propNames : List of property names read from data object
* @ return list of properties supported by repository for given LDAP entity
* If the list propNames contain VALUE _ ALL _ PROPERTIES i . e ' * ' , then return the list of properties without any modification
* Code will handle ' * ' later on */
public List < String > getSupportedProperties ( LdapEntity ldapEntity , List < String > propNames ) { } } | List < String > prop = new ArrayList < String > ( ) ; for ( String propName : propNames ) { if ( propName . equals ( SchemaConstants . VALUE_ALL_PROPERTIES ) ) { prop . add ( propName ) ; continue ; } // call the getAttribute method to see if its supported by LDAP
String attrName = ldapEntity . getAttribute ( propName ) ; if ( attrName == null ) { // check the property to attribute map to see if the property defined
// in data object is mapped to a different ldap attribute
attrName = iPropToAttrMap . get ( propName ) ; } if ( attrName != null ) { prop . add ( propName ) ; } } return prop ; |
public class SchematronValidatingParser { /** * Checks the given schematron phase for the XML file and returns the
* validation status .
* @ param doc
* The XML file to validate ( Document )
* @ param schemaFile
* The string path to the schematron file to use
* @ param phase
* The string phase name ( contained in schematron file )
* @ return Whether there were validation errors or not ( boolean ) */
public boolean checkSchematronRules ( Document doc , String schemaFile , String phase ) throws Exception { } } | boolean isValid = false ; if ( doc == null || doc . getDocumentElement ( ) == null ) return isValid ; try { ClassLoader loader = this . getClass ( ) . getClassLoader ( ) ; URL url = loader . getResource ( schemaFile ) ; this . schemaFile = new File ( URLDecoder . decode ( url . getFile ( ) , "UTF-8" ) ) ; } catch ( Exception e ) { assert false : "Entity body not found. " + e . toString ( ) ; } this . phase = phase ; Document returnDoc = parse ( doc , null , null ) ; if ( returnDoc != null ) { isValid = true ; } return isValid ; |
public class Parsers { /** * A { @ link Parser } that always succeeds and invokes { @ code runnable } . */
@ Deprecated public static Parser < ? > runnable ( final Runnable runnable ) { } } | return new Parser < Object > ( ) { @ Override boolean apply ( ParseContext ctxt ) { runnable . run ( ) ; return true ; } @ Override public String toString ( ) { return runnable . toString ( ) ; } } ; |
public class TargetController { /** * Returns all current { @ link Target } s
* @ return the response entity with all the properties of the targets and 200 OK status
* @ throws DeployerException if an error ocurred */
@ RequestMapping ( value = GET_ALL_TARGETS_URL , method = RequestMethod . GET ) public ResponseEntity < List < Target > > getAllTargets ( ) throws DeployerException { } } | List < Target > targets = targetService . getAllTargets ( ) ; return new ResponseEntity < > ( targets , RestServiceUtils . setLocationHeader ( new HttpHeaders ( ) , BASE_URL + GET_ALL_TARGETS_URL ) , HttpStatus . OK ) ; |
public class Tuple5 { /** * Split this tuple into two tuples of degree 1 and 4. */
public final Tuple2 < Tuple1 < T1 > , Tuple4 < T2 , T3 , T4 , T5 > > split1 ( ) { } } | return new Tuple2 < > ( limit1 ( ) , skip1 ( ) ) ; |
public class GeoPackageCoreImpl { /** * { @ inheritDoc } */
@ Override public GeometryColumns createFeatureTableWithMetadata ( GeometryColumns geometryColumns , BoundingBox boundingBox , long srsId , List < FeatureColumn > columns ) { } } | // Get the SRS
SpatialReferenceSystem srs = getSrs ( srsId ) ; // Create the Geometry Columns table
createGeometryColumnsTable ( ) ; // Create the user feature table
FeatureTable table = new FeatureTable ( geometryColumns . getTableName ( ) , columns ) ; createFeatureTable ( table ) ; try { // Create the contents
Contents contents = new Contents ( ) ; contents . setTableName ( geometryColumns . getTableName ( ) ) ; contents . setDataType ( ContentsDataType . FEATURES ) ; contents . setIdentifier ( geometryColumns . getTableName ( ) ) ; // contents . setLastChange ( new Date ( ) ) ;
if ( boundingBox != null ) { contents . setMinX ( boundingBox . getMinLongitude ( ) ) ; contents . setMinY ( boundingBox . getMinLatitude ( ) ) ; contents . setMaxX ( boundingBox . getMaxLongitude ( ) ) ; contents . setMaxY ( boundingBox . getMaxLatitude ( ) ) ; } contents . setSrs ( srs ) ; getContentsDao ( ) . create ( contents ) ; table . setContents ( contents ) ; // Create new geometry columns
geometryColumns . setContents ( contents ) ; geometryColumns . setSrs ( contents . getSrs ( ) ) ; getGeometryColumnsDao ( ) . create ( geometryColumns ) ; } catch ( RuntimeException e ) { deleteTableQuietly ( geometryColumns . getTableName ( ) ) ; throw e ; } catch ( SQLException e ) { deleteTableQuietly ( geometryColumns . getTableName ( ) ) ; throw new GeoPackageException ( "Failed to create table and metadata: " + geometryColumns . getTableName ( ) , e ) ; } return geometryColumns ; |
public class OptionalLongSubject { /** * Fails if the { @ link OptionalLong } is present or the subject is null . */
public void isEmpty ( ) { } } | if ( actual ( ) == null ) { failWithActual ( simpleFact ( "expected empty optional" ) ) ; } else if ( actual ( ) . isPresent ( ) ) { failWithoutActual ( simpleFact ( "expected to be empty" ) , fact ( "but was present with value" , actual ( ) . getAsLong ( ) ) ) ; } |
public class ComparableExtensions { /** * The comparison operator < code > less than or equals < / code > .
* @ param left
* a comparable
* @ param right
* the value to compare with
* @ return < code > left . compareTo ( right ) < = 0 < / code > */
@ Pure /* not guaranteed , since compareTo ( ) is invoked */
@ Inline ( "($1.compareTo($2) <= 0)" ) public static < C > boolean operator_lessEqualsThan ( Comparable < ? super C > left , C right ) { } } | return left . compareTo ( right ) <= 0 ; |
public class AccessLogInterceptor { /** * 获得请求中要操作的userId
* @ return 用户id */
protected Integer getUserId ( ) { } } | Integer userId = ThreadContext . getContext ( UserWebConstant . CTX_USERID ) ; if ( userId != null ) { return userId ; } return null ; |
public class DescribeAccountLimitsResult { /** * An account limit structure that contain a list of AWS CloudFormation account limits and their values .
* @ return An account limit structure that contain a list of AWS CloudFormation account limits and their values . */
public java . util . List < AccountLimit > getAccountLimits ( ) { } } | if ( accountLimits == null ) { accountLimits = new com . amazonaws . internal . SdkInternalList < AccountLimit > ( ) ; } return accountLimits ; |
public class BaseLabels { /** * Returns labels based on the text file resource . */
protected ArrayList < String > getLabels ( String textResource ) throws IOException { } } | ArrayList < String > labels = new ArrayList < > ( ) ; File resourceFile = getResourceFile ( ) ; // Download if required
try ( InputStream is = new BufferedInputStream ( new FileInputStream ( resourceFile ) ) ; Scanner s = new Scanner ( is ) ) { while ( s . hasNextLine ( ) ) { labels . add ( s . nextLine ( ) ) ; } } return labels ; |
public class WebSiteRequest { /** * Determines if the request is for a Lynx browser */
public boolean isLynx ( ) { } } | if ( ! isLynxDone ) { String agent = req . getHeader ( "user-agent" ) ; isLynx = agent != null && agent . toLowerCase ( Locale . ROOT ) . contains ( "lynx" ) ; isLynxDone = true ; } return isLynx ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link GeneralConversionRefType } { @ code > }
* @ param value
* Java instance representing xml element ' s value .
* @ return
* the new instance of { @ link JAXBElement } { @ code < } { @ link GeneralConversionRefType } { @ code > } */
@ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "definedByConversion" ) public JAXBElement < GeneralConversionRefType > createDefinedByConversion ( GeneralConversionRefType value ) { } } | return new JAXBElement < GeneralConversionRefType > ( _DefinedByConversion_QNAME , GeneralConversionRefType . class , null , value ) ; |
public class UserRoleJetty { /** * < p > Setter for id . < / p >
* @ param pId reference */
@ Override public final void setItsId ( final IdUserRoleJetty pId ) { } } | this . itsId = pId ; if ( this . itsId == null ) { this . itsUser = null ; this . itsRole = null ; } else { this . itsUser = this . itsId . getItsUser ( ) ; this . itsRole = this . itsId . getItsRole ( ) ; } |
public class AnnotationsClassLoader { /** * Find specified resource in local repositories .
* @ return the loaded resource , or null if the resource isn ' t found */
protected ResourceEntry findResourceInternal ( String name , String path ) { } } | if ( ! started ) { log . info ( sm . getString ( "webappClassLoader.stopped" , name ) ) ; return null ; } if ( ( name == null ) || ( path == null ) ) return null ; ResourceEntry entry = ( ResourceEntry ) resourceEntries . get ( name ) ; if ( entry != null ) return entry ; int contentLength = - 1 ; InputStream binaryStream = null ; int jarFilesLength = jarFiles . length ; int repositoriesLength = repositories . length ; int i ; Resource resource = null ; boolean fileNeedConvert = false ; for ( i = 0 ; ( entry == null ) && ( i < repositoriesLength ) ; i ++ ) { try { String fullPath = repositories [ i ] + path ; Object lookupResult = resources . lookup ( fullPath ) ; if ( lookupResult instanceof Resource ) { resource = ( Resource ) lookupResult ; } // Note : Not getting an exception here means the resource was
// found
if ( securityManager != null ) { PrivilegedAction dp = new PrivilegedFindResource ( files [ i ] , path ) ; entry = ( ResourceEntry ) AccessController . doPrivileged ( dp ) ; } else { entry = findResourceInternal ( files [ i ] , path ) ; } ResourceAttributes attributes = ( ResourceAttributes ) resources . getAttributes ( fullPath ) ; contentLength = ( int ) attributes . getContentLength ( ) ; entry . lastModified = attributes . getLastModified ( ) ; if ( resource != null ) { try { binaryStream = resource . streamContent ( ) ; } catch ( IOException e ) { return null ; } if ( needConvert ) { if ( path . endsWith ( ".properties" ) ) { fileNeedConvert = true ; } } // Register the full path for modification checking
// Note : Only syncing on a ' constant ' object is needed
synchronized ( allPermission ) { int j ; long [ ] result2 = new long [ lastModifiedDates . length + 1 ] ; for ( j = 0 ; j < lastModifiedDates . length ; j ++ ) { result2 [ j ] = lastModifiedDates [ j ] ; } result2 [ lastModifiedDates . length ] = entry . lastModified ; lastModifiedDates = result2 ; String [ ] result = new String [ paths . length + 1 ] ; for ( j = 0 ; j < paths . length ; j ++ ) { result [ j ] = paths [ j ] ; } result [ paths . length ] = fullPath ; paths = result ; } } } catch ( NamingException e ) { } } if ( ( entry == null ) && ( notFoundResources . containsKey ( name ) ) ) return null ; JarEntry jarEntry = null ; synchronized ( jarFiles ) { if ( ! openJARs ( ) ) { return null ; } for ( i = 0 ; ( entry == null ) && ( i < jarFilesLength ) ; i ++ ) { jarEntry = jarFiles [ i ] . getJarEntry ( path ) ; if ( jarEntry != null ) { entry = new ResourceEntry ( ) ; try { entry . codeBase = getURL ( jarRealFiles [ i ] , false ) ; String jarFakeUrl = getURI ( jarRealFiles [ i ] ) . toString ( ) ; jarFakeUrl = "jar:" + jarFakeUrl + "!/" + path ; entry . source = new URL ( jarFakeUrl ) ; entry . lastModified = jarRealFiles [ i ] . lastModified ( ) ; } catch ( MalformedURLException e ) { return null ; } contentLength = ( int ) jarEntry . getSize ( ) ; try { entry . manifest = jarFiles [ i ] . getManifest ( ) ; binaryStream = jarFiles [ i ] . getInputStream ( jarEntry ) ; } catch ( IOException e ) { return null ; } // Extract resources contained in JAR to the workdir
if ( antiJARLocking && ! ( path . endsWith ( ".class" ) ) ) { byte [ ] buf = new byte [ 1024 ] ; File resourceFile = new File ( loaderDir , jarEntry . getName ( ) ) ; if ( ! resourceFile . exists ( ) ) { Enumeration entries = jarFiles [ i ] . entries ( ) ; while ( entries . hasMoreElements ( ) ) { JarEntry jarEntry2 = ( JarEntry ) entries . nextElement ( ) ; if ( ! ( jarEntry2 . isDirectory ( ) ) && ( ! jarEntry2 . getName ( ) . endsWith ( ".class" ) ) ) { resourceFile = new File ( loaderDir , jarEntry2 . getName ( ) ) ; resourceFile . getParentFile ( ) . mkdirs ( ) ; FileOutputStream os = null ; InputStream is = null ; try { is = jarFiles [ i ] . getInputStream ( jarEntry2 ) ; os = new FileOutputStream ( resourceFile ) ; while ( true ) { int n = is . read ( buf ) ; if ( n <= 0 ) { break ; } os . write ( buf , 0 , n ) ; } } catch ( IOException e ) { // Ignore
} finally { try { if ( is != null ) { is . close ( ) ; } } catch ( IOException e ) { } try { if ( os != null ) { os . close ( ) ; } } catch ( IOException e ) { } } } } } } } } if ( entry == null ) { synchronized ( notFoundResources ) { notFoundResources . put ( name , name ) ; } return null ; } if ( binaryStream != null ) { byte [ ] binaryContent = new byte [ contentLength ] ; int pos = 0 ; try { while ( true ) { int n = binaryStream . read ( binaryContent , pos , binaryContent . length - pos ) ; if ( n <= 0 ) break ; pos += n ; } binaryStream . close ( ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; return null ; } catch ( Exception e ) { e . printStackTrace ( ) ; return null ; } if ( fileNeedConvert ) { String str = new String ( binaryContent , 0 , pos ) ; try { binaryContent = str . getBytes ( "UTF-8" ) ; } catch ( Exception e ) { return null ; } } entry . binaryContent = binaryContent ; // The certificates are only available after the JarEntry
// associated input stream has been fully read
if ( jarEntry != null ) { entry . certificates = jarEntry . getCertificates ( ) ; } } } // Add the entry in the local resource repository
synchronized ( resourceEntries ) { // Ensures that all the threads which may be in a race to load
// a particular class all end up with the same ResourceEntry
// instance
ResourceEntry entry2 = ( ResourceEntry ) resourceEntries . get ( name ) ; if ( entry2 == null ) { resourceEntries . put ( name , entry ) ; } else { entry = entry2 ; } } return entry ; |
public class BundlePathMappingBuilder { /** * Detects all files that belong to the bundle and adds them to the bundle
* path mapping .
* @ return the bundlePathMapping */
public BundlePathMapping build ( ) { } } | if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "Creating bundle path List for " + this . bundle . getId ( ) ) ; } BundlePathMapping bundlePathMapping = new BundlePathMapping ( this . bundle ) ; bundlePathMapping . setPathMappings ( strPathMappings ) ; List < PathMapping > pathMappings = bundlePathMapping . getPathMappings ( ) ; Map < String , VariantSet > variants = new TreeMap < > ( ) ; if ( pathMappings != null ) { for ( PathMapping pathMapping : pathMappings ) { boolean isGeneratedPath = generatorRegistry . isPathGenerated ( pathMapping . getPath ( ) ) ; // Handle generated resources
// path ends in / , the folder is included without subfolders
if ( pathMapping . isDirectory ( ) ) { addItemsFromDir ( bundlePathMapping , pathMapping , false ) ; } // path ends in / , the folder is included with all subfolders
else if ( pathMapping . isRecursive ( ) ) { addItemsFromDir ( bundlePathMapping , pathMapping , true ) ; } else if ( pathMapping . getPath ( ) . endsWith ( fileExtension ) ) { addPathMapping ( bundlePathMapping , asPath ( pathMapping . getPath ( ) , isGeneratedPath ) ) ; } else if ( generatorRegistry . isPathGenerated ( pathMapping . getPath ( ) ) ) { addPathMapping ( bundlePathMapping , pathMapping . getPath ( ) ) ; } else if ( pathMapping . getPath ( ) . endsWith ( LICENSES_FILENAME ) ) { bundlePathMapping . getLicensesPathList ( ) . add ( asPath ( pathMapping . getPath ( ) , isGeneratedPath ) ) ; } else { throw new BundlingProcessException ( "Wrong mapping [" + pathMapping + "] for bundle [" + this . bundle . getName ( ) + "]. Please check configuration. " ) ; } if ( isGeneratedPath ) { // Add variants
variants = VariantUtils . concatVariants ( variants , generatorRegistry . getAvailableVariants ( pathMapping . getPath ( ) ) ) ; } } } bundle . setVariants ( variants ) ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "Finished creating bundle path List for " + this . bundle . getId ( ) ) ; } return bundlePathMapping ; |
public class FoundationLoggingPatternLayout { /** * Returns PatternParser used to parse the conversion string . Subclasses may
* override this to return a subclass of PatternParser which recognize
* custom conversion characters .
* @ since 0.9.0 */
protected org . apache . log4j . helpers . PatternParser createPatternParser ( final String pattern ) { } } | return new FoundationLoggingPatternParser ( pattern ) ; |
public class RelationalExpression { /** * Perform a relational comparison . */
public Object evaluate ( ) { } } | if ( ! isCompileTimeConstant ( ) ) { return super . evaluate ( ) ; } Object lhsValue = getLHS ( ) . evaluate ( ) ; Object rhsValue = getRHS ( ) . evaluate ( ) ; IType lhsType = getLHS ( ) . getType ( ) ; IType rhsType = getRHS ( ) . getType ( ) ; if ( _strOperator . equals ( ">" ) ) { if ( BeanAccess . isNumericType ( lhsType ) ) { return compareNumbers ( lhsValue , rhsValue , lhsType , rhsType ) > 0 ; } else { if ( BeanAccess . isBeanType ( lhsType ) ) { if ( BeanAccess . isBeanType ( rhsType ) ) { if ( lhsType . isAssignableFrom ( rhsType ) ) { if ( JavaTypes . COMPARABLE ( ) . isAssignableFrom ( lhsType ) ) { // noinspection unchecked
return ( ( Comparable ) lhsValue ) . compareTo ( rhsValue ) > 0 ; } } } } } } else if ( _strOperator . equals ( "<" ) ) { if ( BeanAccess . isNumericType ( lhsType ) ) { return compareNumbers ( lhsValue , rhsValue , lhsType , rhsType ) < 0 ; } else { if ( BeanAccess . isBeanType ( lhsType ) ) { if ( BeanAccess . isBeanType ( rhsType ) ) { if ( lhsType . isAssignableFrom ( rhsType ) ) { if ( JavaTypes . COMPARABLE ( ) . isAssignableFrom ( lhsType ) ) { // noinspection unchecked
return ( ( Comparable ) lhsValue ) . compareTo ( rhsValue ) < 0 ; } } } } } } else if ( _strOperator . equals ( ">=" ) ) { if ( BeanAccess . isNumericType ( lhsType ) ) { return compareNumbers ( lhsValue , rhsValue , lhsType , rhsType ) >= 0 ; } else { if ( BeanAccess . isBeanType ( lhsType ) ) { if ( BeanAccess . isBeanType ( rhsType ) ) { if ( lhsType . isAssignableFrom ( rhsType ) ) { if ( JavaTypes . COMPARABLE ( ) . isAssignableFrom ( lhsType ) ) { // noinspection unchecked
return ( ( Comparable ) lhsValue ) . compareTo ( rhsValue ) >= 0 ; } } } } } } else // if ( _ strOperator . equals ( " < = " ) )
{ if ( BeanAccess . isNumericType ( lhsType ) ) { return compareNumbers ( lhsValue , rhsValue , lhsType , rhsType ) <= 0 ; } else { if ( BeanAccess . isBeanType ( lhsType ) ) { if ( BeanAccess . isBeanType ( rhsType ) ) { if ( lhsType . isAssignableFrom ( rhsType ) ) { if ( JavaTypes . COMPARABLE ( ) . isAssignableFrom ( lhsType ) ) { // noinspection unchecked
return ( ( Comparable ) lhsValue ) . compareTo ( rhsValue ) <= 0 ; } } } } } } throw new UnsupportedOperationException ( "Operands are not compile-time constants.\n" + "(see http://java.sun.com/docs/books/jls/third_edition/html/expressions.html#5313)" ) ; |
public class Metric2Registry { /** * Removes the metric with the given name .
* @ param name the name of the metric
* @ return whether or not the metric was removed */
public boolean remove ( MetricName name ) { } } | final Metric metric = metrics . remove ( name ) ; if ( metric != null ) { // We have to unregister the Metric with the legacy Dropwizard Metric registry as
// well to support existing reports and listeners
metricRegistry . remove ( name . toGraphiteName ( ) ) ; return true ; } return false ; |
public class CommonOps_DDF2 { /** * Transposes matrix ' a ' and stores the results in ' b ' : < br >
* < br >
* b < sub > ij < / sub > = a < sub > ji < / sub > < br >
* where ' b ' is the transpose of ' a ' .
* @ param input The original matrix . Not modified .
* @ param output Where the transpose is stored . If null a new matrix is created . Modified .
* @ return The transposed matrix . */
public static DMatrix2x2 transpose ( DMatrix2x2 input , DMatrix2x2 output ) { } } | if ( input == null ) input = new DMatrix2x2 ( ) ; output . a11 = input . a11 ; output . a12 = input . a21 ; output . a21 = input . a12 ; output . a22 = input . a22 ; return output ; |
public class IIIFPresentationApiController { /** * The manifest response contains sufficient information for the client to initialize itself and begin to display
* something quickly to the user . The manifest resource represents a single object and any intellectual work or works
* embodied within that object . In particular it includes the descriptive , rights and linking information for the
* object . It then embeds the sequence ( s ) of canvases that should be rendered to the user .
* @ param identifier unique id of object to be shown
* @ param request request containing client information for logging
* @ return the JSON - Manifest
* @ throws NotFoundException if manifest can not be delivered
* @ throws de . digitalcollections . iiif . presentation . model . api . exceptions . InvalidDataException if manifest can not be read
* @ see < a href = " http : / / iiif . io / api / presentation / 2.0 / # manifest " > IIIF 2.0 < / a > */
@ CrossOrigin ( allowedHeaders = { } } | "*" } , origins = { "*" } ) @ RequestMapping ( value = { "{identifier}/manifest" , "{identifier}" } , method = RequestMethod . GET , produces = "application/json" ) @ ResponseBody public Manifest getManifest ( @ PathVariable String identifier , HttpServletRequest request ) throws NotFoundException , InvalidDataException { HttpLoggingUtilities . addRequestClientInfoToMDC ( request ) ; MDC . put ( "manifestId" , identifier ) ; try { Manifest manifest = presentationService . getManifest ( identifier ) ; LOGGER . info ( "Serving manifest for {}" , identifier ) ; return manifest ; } catch ( NotFoundException e ) { LOGGER . info ( "Did not find manifest for {}" , identifier ) ; throw e ; } catch ( InvalidDataException e ) { LOGGER . error ( "Bad data for {}" , identifier ) ; throw e ; } finally { MDC . clear ( ) ; } |
public class MethodIdentifier { /** * Get the parameter type names , as strings .
* @ return the parameter type names */
public String [ ] getParameterTypes ( ) { } } | final String [ ] parameterTypes = this . parameterTypes ; return parameterTypes == NO_STRINGS ? parameterTypes : parameterTypes . clone ( ) ; |
public class DartSuperAccessorsPass { /** * Wraps a property string in a JSCompiler _ renameProperty call .
* < p > Should only be called in phases running before { @ link RenameProperties } ,
* if such a pass is even used ( see { @ link # renameProperties } ) . */
private Node renameProperty ( Node propertyName ) { } } | checkArgument ( propertyName . isString ( ) ) ; if ( ! renameProperties ) { return propertyName ; } Node call = IR . call ( IR . name ( NodeUtil . JSC_PROPERTY_NAME_FN ) . srcref ( propertyName ) , propertyName ) ; call . srcref ( propertyName ) ; call . putBooleanProp ( Node . FREE_CALL , true ) ; call . putBooleanProp ( Node . IS_CONSTANT_NAME , true ) ; return call ; |
public class ConfigurationFileStore { /** * Get the named store . Create it if it does not exist
* @ param name of config
* @ return store
* @ throws ConfigException */
@ Override public ConfigurationStore getStore ( final String name ) throws ConfigException { } } | try { final File dir = new File ( dirPath ) ; final String newPath = dirPath + name ; final File [ ] files = dir . listFiles ( new DirsOnly ( ) ) ; for ( final File f : files ) { if ( f . getName ( ) . equals ( name ) ) { return new ConfigurationFileStore ( newPath ) ; } } final File newDir = new File ( newPath ) ; if ( ! newDir . mkdir ( ) ) { throw new ConfigException ( "Unable to create directory " + newPath ) ; } return new ConfigurationFileStore ( newPath ) ; } catch ( final Throwable t ) { throw new ConfigException ( t ) ; } |
public class SubCommandMetaGetRO { /** * Parses command - line and gets read - only metadata .
* @ param args Command - line input
* @ param printHelp Tells whether to print help only or execute command
* actually
* @ throws IOException */
@ SuppressWarnings ( "unchecked" ) public static void executeCommand ( String [ ] args ) throws IOException { } } | OptionParser parser = getParser ( ) ; // declare parameters
List < String > metaKeys = null ; String url = null ; List < Integer > nodeIds = null ; Boolean allNodes = true ; List < String > storeNames = null ; // parse command - line input
args = AdminToolUtils . copyArrayAddFirst ( args , "--" + OPT_HEAD_META_GET_RO ) ; OptionSet options = parser . parse ( args ) ; if ( options . has ( AdminParserUtils . OPT_HELP ) ) { printHelp ( System . out ) ; return ; } // check required options and / or conflicting options
AdminParserUtils . checkRequired ( options , OPT_HEAD_META_GET_RO ) ; AdminParserUtils . checkRequired ( options , AdminParserUtils . OPT_URL ) ; AdminParserUtils . checkOptional ( options , AdminParserUtils . OPT_NODE , AdminParserUtils . OPT_ALL_NODES ) ; AdminParserUtils . checkRequired ( options , AdminParserUtils . OPT_STORE ) ; // load parameters
metaKeys = ( List < String > ) options . valuesOf ( OPT_HEAD_META_GET_RO ) ; url = ( String ) options . valueOf ( AdminParserUtils . OPT_URL ) ; if ( options . has ( AdminParserUtils . OPT_NODE ) ) { nodeIds = ( List < Integer > ) options . valuesOf ( AdminParserUtils . OPT_NODE ) ; allNodes = false ; } storeNames = ( List < String > ) options . valuesOf ( AdminParserUtils . OPT_STORE ) ; // execute command
AdminClient adminClient = AdminToolUtils . getAdminClient ( url ) ; if ( allNodes ) { nodeIds = AdminToolUtils . getAllNodeIds ( adminClient ) ; } if ( metaKeys . size ( ) == 1 && metaKeys . get ( 0 ) . equals ( METAKEY_ALL ) ) { metaKeys = Lists . newArrayList ( ) ; metaKeys . add ( KEY_MAX_VERSION ) ; metaKeys . add ( KEY_CURRENT_VERSION ) ; metaKeys . add ( KEY_STORAGE_FORMAT ) ; } doMetaGetRO ( adminClient , nodeIds , storeNames , metaKeys ) ; |
public class GetLoggingTargetCmd { /** * Executes the GetLoggingTargetCmd TANGO command */
public Any execute ( DeviceImpl device , Any in_any ) throws DevFailed { } } | Util . out4 . println ( "GetLoggingTargetCmd::execute(): arrived" ) ; String string = null ; try { string = extract_DevString ( in_any ) ; } catch ( DevFailed df ) { Util . out3 . println ( "GetLoggingTargetCmd::execute() --> Wrong argument type" ) ; Except . re_throw_exception ( df , "API_IncompatibleCmdArgumentType" , "Imcompatible command argument type, expected type is : DevVarStringArray" , "GetLoggingTargetCmd.execute" ) ; } Any out_any = insert ( Logging . instance ( ) . get_logging_target ( string ) ) ; Util . out4 . println ( "Leaving GetLoggingTargetCmd.execute()" ) ; return out_any ; |
public class Ssh2RsaPublicKey { /** * ( non - Javadoc )
* @ see com . sshtools . ssh . SshPublicKey # init ( byte [ ] , int , int ) */
public void init ( byte [ ] blob , int start , int len ) throws SshException { } } | ByteArrayReader bar = new ByteArrayReader ( blob , start , len ) ; try { // this . hostKey = hostKey ;
RSAPublicKeySpec rsaKey ; // Extract the key information
String header = bar . readString ( ) ; if ( ! header . equals ( getAlgorithm ( ) ) ) { throw new SshException ( "The encoded key is not RSA" , SshException . INTERNAL_ERROR ) ; } BigInteger e = bar . readBigInteger ( ) ; BigInteger n = bar . readBigInteger ( ) ; rsaKey = new RSAPublicKeySpec ( n , e ) ; try { KeyFactory kf = JCEProvider . getProviderForAlgorithm ( JCEAlgorithms . JCE_RSA ) == null ? KeyFactory . getInstance ( JCEAlgorithms . JCE_RSA ) : KeyFactory . getInstance ( JCEAlgorithms . JCE_RSA , JCEProvider . getProviderForAlgorithm ( JCEAlgorithms . JCE_RSA ) ) ; pubKey = ( RSAPublicKey ) kf . generatePublic ( rsaKey ) ; } catch ( Exception ex ) { throw new SshException ( "Failed to obtain RSA key instance from JCE" , SshException . INTERNAL_ERROR , ex ) ; } } catch ( IOException ioe ) { throw new SshException ( "Failed to read encoded key data" , SshException . INTERNAL_ERROR ) ; } finally { try { bar . close ( ) ; } catch ( IOException e ) { } } |
public class SrvI18n { /** * < p > Evaluate message by given key for given language . < / p >
* @ param pKey key of message
* @ param pLang e . g . " en " , " ru " , etc . */
@ Override public final String getMsg ( final String pKey , final String pLang ) { } } | try { ResourceBundle mb = messagesMap . get ( pLang ) ; if ( mb != null ) { return mb . getString ( pKey ) ; } else { return getMsg ( pKey ) ; } } catch ( Exception e ) { return "[" + pKey + "]-" + pLang ; } |
public class MergeResources { /** * Compute the list of resource set to be used during execution based all the inputs . */
List < ResourceSet > computeResourceSetList ( ) { } } | List < ResourceSet > sourceFolderSets = resSetSupplier . get ( ) ; int size = sourceFolderSets . size ( ) + 4 ; if ( libraries != null ) { size += libraries . getArtifacts ( ) . size ( ) ; } List < ResourceSet > resourceSetList = Lists . newArrayListWithExpectedSize ( size ) ; // add at the beginning since the libraries are less important than the folder based
// resource sets .
// get the dependencies first
if ( libraries != null ) { Set < ResolvedArtifactResult > libArtifacts = libraries . getArtifacts ( ) ; // the order of the artifact is descending order , so we need to reverse it .
for ( ResolvedArtifactResult artifact : libArtifacts ) { ResourceSet resourceSet = new ResourceSet ( MergeManifests . getArtifactName ( artifact ) , null , null , validateEnabled ) ; resourceSet . setFromDependency ( true ) ; resourceSet . addSource ( artifact . getFile ( ) ) ; // add to 0 always , since we need to reverse the order .
resourceSetList . add ( 0 , resourceSet ) ; } } // add the folder based next
resourceSetList . addAll ( sourceFolderSets ) ; // We add the generated folders to the main set
List < File > generatedResFolders = Lists . newArrayList ( ) ; generatedResFolders . addAll ( renderscriptResOutputDir . getFiles ( ) ) ; generatedResFolders . addAll ( generatedResOutputDir . getFiles ( ) ) ; // add the generated files to the main set .
final ResourceSet mainResourceSet = sourceFolderSets . get ( 0 ) ; assert mainResourceSet . getConfigName ( ) . equals ( BuilderConstants . MAIN ) ; mainResourceSet . addSources ( generatedResFolders ) ; return resourceSetList ; |
public class Bool { /** * Apply the operation to two operands , and return the result .
* @ param right non - null reference to the evaluated right operand .
* @ return non - null reference to the XObject that represents the result of the operation .
* @ throws javax . xml . transform . TransformerException */
public XObject operate ( XObject right ) throws javax . xml . transform . TransformerException { } } | if ( XObject . CLASS_BOOLEAN == right . getType ( ) ) return right ; else return right . bool ( ) ? XBoolean . S_TRUE : XBoolean . S_FALSE ; |
public class SibRaEndpointActivation { /** * Closes the connection for the given messaging engine if there is one
* open .
* @ param meUuid
* the UUID for the messaging engine to close the connection for */
protected void closeConnection ( final String meUuid ) { } } | final String methodName = "closeConnection" ; if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . entry ( this , TRACE , methodName , meUuid ) ; } closeConnection ( meUuid , false ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . exit ( this , TRACE , methodName ) ; } |
public class OAuth20Utils { /** * Check if the callback url is valid .
* @ param registeredService the registered service
* @ param redirectUri the callback url
* @ return whether the callback url is valid */
public static boolean checkCallbackValid ( final @ NonNull RegisteredService registeredService , final String redirectUri ) { } } | val registeredServiceId = registeredService . getServiceId ( ) ; LOGGER . debug ( "Found: [{}] vs redirectUri: [{}]" , registeredService , redirectUri ) ; if ( ! redirectUri . matches ( registeredServiceId ) ) { LOGGER . error ( "Unsupported [{}]: [{}] does not match what is defined for registered service: [{}]. " + "Service is considered unauthorized. Verify the service definition in the registry is correct " + "and does in fact match the client [{}]" , OAuth20Constants . REDIRECT_URI , redirectUri , registeredServiceId , redirectUri ) ; return false ; } return true ; |
public class Session { /** * This methods appends the given task to the end of the taskQueue and set the calling thread is sleep state .
* @ param task The task to append to the end of the taskQueue .
* @ throws InterruptedException if another thread interrupted the current thread before or while the current thread
* was waiting for a notification . The interrupted status of the current thread is cleared when this
* exception is thrown .
* @ throws ExecutionException if anything happens while execution */
private final Future < Void > executeTask ( final ITask task ) throws TaskExecutionException { } } | if ( task instanceof IOTask ) { final Future < Void > returnVal = executor . submit ( ( IOTask ) task ) ; return returnVal ; } else { try { task . call ( ) ; } catch ( final Exception exc ) { throw new TaskExecutionException ( new ExecutionException ( exc ) ) ; } return null ; } // LOGGER . info ( " Added a " + task + " to the TaskQueue " ) ; |
public class CmsContextMenuOverlay { /** * Opens next to the given parent item . < p >
* @ param parentMenuItem the perent item */
public void openNextTo ( CmsContextMenuItemWidget parentMenuItem ) { } } | int left = parentMenuItem . getAbsoluteLeft ( ) + parentMenuItem . getOffsetWidth ( ) ; int top = parentMenuItem . getAbsoluteTop ( ) - Window . getScrollTop ( ) ; showAt ( left , top ) ; |
public class Message { /** * Create a MessageCreator to execute create .
* @ param pathAccountSid The SID of the Account that will create the resource
* @ param to The destination phone number
* @ param from The phone number that initiated the message
* @ param mediaUrl The URL of the media to send with the message
* @ return MessageCreator capable of executing the create */
public static MessageCreator creator ( final String pathAccountSid , final com . twilio . type . PhoneNumber to , final com . twilio . type . PhoneNumber from , final List < URI > mediaUrl ) { } } | return new MessageCreator ( pathAccountSid , to , from , mediaUrl ) ; |
public class WonderPushRestClient { /** * Thin wrapper to the { @ link AsyncHttpClient } library . */
private static void request ( final Request request ) { } } | if ( null == request ) { WonderPush . logError ( "Request with null request." ) ; return ; } WonderPush . safeDefer ( new Runnable ( ) { @ Override public void run ( ) { // Decorate parameters
WonderPushRequestParamsDecorator . decorate ( request . getResource ( ) , request . getParams ( ) ) ; // Generate signature
BasicHeader authorizationHeader = request . getAuthorizationHeader ( ) ; // Headers
BasicHeader [ ] headers = null ; if ( null != authorizationHeader ) { headers = new BasicHeader [ 1 ] ; headers [ 0 ] = authorizationHeader ; } String url = WonderPushUriHelper . getAbsoluteUrl ( request . getResource ( ) ) ; WonderPush . logDebug ( "requesting url: " + request . getMethod ( ) + " " + url + "?" + request . getParams ( ) . getURLEncodedString ( ) ) ; // TODO : support other contentTypes such as " application / json "
String contentType = "application/x-www-form-urlencoded" ; // Handler
final ResponseHandler handler = request . getHandler ( ) ; HttpEntity entity = null ; if ( request . getParams ( ) != null ) { try { entity = request . getParams ( ) != null ? request . getParams ( ) . getEntity ( null ) : null ; } catch ( IOException ex ) { WonderPush . logError ( "Failed to create HttpEntity from params " + request . getParams ( ) , ex ) ; if ( handler != null ) { handler . onFailure ( ex , new Response ( "" ) ) ; } return ; } } final long sendDate = SystemClock . elapsedRealtime ( ) ; JsonHttpResponseHandler jsonHandler = new JsonHttpResponseHandler ( ) { @ Override public void onProgress ( long bytesWritten , long totalSize ) { // mute this
} @ Override public void onSuccess ( int statusCode , Header [ ] headers , JSONObject response ) { syncTime ( response ) ; WonderPush . setNetworkAvailable ( true ) ; if ( handler != null ) { handler . onSuccess ( statusCode , new Response ( response ) ) ; } } @ Override public void onSuccess ( int statusCode , Header [ ] headers , JSONArray response ) { WonderPush . logError ( "Unexpected JSONArray answer: " + statusCode + " headers: " + Arrays . toString ( headers ) + " response: (" + response . length ( ) + ") " + response . toString ( ) ) ; } @ Override public void onFailure ( int statusCode , Header [ ] headers , Throwable throwable , JSONObject errorResponse ) { WonderPush . logError ( "Error answer: " + statusCode + " headers: " + Arrays . toString ( headers ) + " response: " + errorResponse ) ; syncTime ( errorResponse ) ; WonderPush . logDebug ( "Request Error: " + errorResponse ) ; WonderPush . setNetworkAvailable ( errorResponse != null ) ; if ( handler != null ) { handler . onFailure ( throwable , new Response ( errorResponse ) ) ; } } @ Override public void onFailure ( int statusCode , Header [ ] headers , Throwable throwable , JSONArray errorResponse ) { WonderPush . logError ( "Unexpected JSONArray error answer: " + statusCode + " headers: " + Arrays . toString ( headers ) + " response: (" + errorResponse . length ( ) + ") " + errorResponse . toString ( ) ) ; this . onFailure ( statusCode , headers , errorResponse . toString ( ) , throwable ) ; } @ Override public void onFailure ( int statusCode , Header [ ] headers , String responseString , Throwable throwable ) { WonderPush . logError ( "Unexpected string error answer: " + statusCode + " headers: " + Arrays . toString ( headers ) + " response: (" + responseString . length ( ) + ") \"" + responseString + "\"" ) ; WonderPush . setNetworkAvailable ( false ) ; if ( handler != null ) { handler . onFailure ( throwable , new Response ( responseString ) ) ; } } @ Override public void onSuccess ( int statusCode , Header [ ] headers , String responseString ) { WonderPush . logError ( "Unexpected string answer: " + statusCode + " headers: " + Arrays . toString ( headers ) + " response: (" + responseString . length ( ) + ") \"" + responseString + "\"" ) ; } private void syncTime ( JSONObject data ) { long recvDate = SystemClock . elapsedRealtime ( ) ; if ( data == null || ! data . has ( "_serverTime" ) ) { return ; } TimeSync . syncTimeWithServer ( sendDate , recvDate , data . optLong ( "_serverTime" ) , data . optLong ( "_serverTook" ) ) ; } } ; // NO UNNECESSARY WORK HERE , because of timed request
switch ( request . getMethod ( ) ) { case GET : sClient . get ( null , url , headers , request . getParams ( ) , jsonHandler ) ; break ; case PUT : sClient . put ( null , url , headers , entity , contentType , jsonHandler ) ; break ; case POST : sClient . post ( null , url , headers , entity , contentType , jsonHandler ) ; break ; case PATCH : sClient . patch ( null , url , headers , entity , contentType , jsonHandler ) ; break ; case DELETE : sClient . delete ( null , url , headers , request . getParams ( ) , jsonHandler ) ; break ; default : jsonHandler . sendFailureMessage ( 0 , null , null , new UnsupportedOperationException ( "Unhandled method " + request . getMethod ( ) ) ) ; } } } , 0 ) ; |
public class NodeSelector { /** * Check the { @ link NegationSpecifier } .
* This method will add the { @ link Selector } from the specifier in
* a list and invoke { @ link # check ( List ) } with that list as the argument .
* @ param specifier The negation specifier .
* @ return A set of nodes after invoking { @ link # check ( List ) } .
* @ throws NodeSelectorException In case of an error . */
private Collection < Node > checkNegationSpecifier ( final NegationSpecifier specifier ) throws NodeSelectorException { } } | Collection < Selector > parts = new LinkedHashSet < Selector > ( 1 ) ; parts . add ( specifier . getSelector ( ) ) ; return check ( parts ) ; |
public class BartenderBuilderHeartbeat { /** * The local pod refers to the server itself . */
private UpdatePod initLocalPod ( ) { } } | ServerBartender serverSelf = _bartender . serverSelf ( ) ; ServicesAmp rampManager = AmpSystem . currentManager ( ) ; UpdatePodBuilder podBuilder = new UpdatePodBuilder ( ) ; podBuilder . name ( "local" ) ; podBuilder . cluster ( _bartender . serverSelf ( ) . getCluster ( ) ) ; // int count = Math . min ( 3 , rack . getServerLength ( ) ) ;
ServerPod serverPod = new ServerPod ( 0 , serverSelf ) ; ServerPod [ ] servers = new ServerPod [ ] { serverPod } ; podBuilder . pod ( servers ) ; // int depth = Math . min ( 3 , handles . length ) ;
podBuilder . primaryCount ( 1 ) ; podBuilder . depth ( 1 ) ; UpdatePod updatePod = podBuilder . build ( ) ; return new UpdatePod ( updatePod , new String [ ] { serverSelf . getId ( ) } , 0 ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.