signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class XsdAsmInterfaces { /** * Adds information about the attribute group interface to the attributeGroupInterfaces variable . * @ param attributeGroup The attributeGroup to add . */ private void addAttributeGroup ( XsdAttributeGroup attributeGroup ) { } }
String interfaceName = firstToUpper ( attributeGroup . getName ( ) ) ; if ( ! attributeGroupInterfaces . containsKey ( interfaceName ) ) { List < XsdAttribute > ownElements = attributeGroup . getXsdElements ( ) . filter ( attribute -> attribute . getParent ( ) . equals ( attributeGroup ) ) . map ( attribute -> ( XsdAttribute ) attribute ) . collect ( Collectors . toList ( ) ) ; List < String > parentNames = attributeGroup . getAttributeGroups ( ) . stream ( ) . map ( XsdNamedElements :: getName ) . collect ( Collectors . toList ( ) ) ; AttributeHierarchyItem attributeHierarchyItemItem = new AttributeHierarchyItem ( parentNames , ownElements ) ; attributeGroupInterfaces . put ( interfaceName , attributeHierarchyItemItem ) ; attributeGroup . getAttributeGroups ( ) . forEach ( this :: addAttributeGroup ) ; }
public class RxFile { /** * Get path from Uri , for an ImageDocument . */ public static Observable < String > getPathFromUriForImageDocument ( final Context context , final String mediaDocumentId ) { } }
return Observable . fromCallable ( new Func0 < String > ( ) { @ Override public String call ( ) { String pathFound = null ; Cursor cursor = context . getContentResolver ( ) . query ( MediaStore . Images . Media . EXTERNAL_CONTENT_URI , null , Constants . ID_COLUMN_VALUE + " =?" , new String [ ] { mediaDocumentId } , null ) ; if ( cursor != null ) { if ( cursor . moveToFirst ( ) ) { pathFound = cursor . getString ( cursor . getColumnIndexOrThrow ( MediaStore . MediaColumns . DATA ) ) ; } cursor . close ( ) ; logDebug ( "Path found:" + pathFound ) ; } return pathFound ; } } ) ;
public class ListStreamConsumersRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListStreamConsumersRequest listStreamConsumersRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listStreamConsumersRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listStreamConsumersRequest . getStreamARN ( ) , STREAMARN_BINDING ) ; protocolMarshaller . marshall ( listStreamConsumersRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listStreamConsumersRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( listStreamConsumersRequest . getStreamCreationTimestamp ( ) , STREAMCREATIONTIMESTAMP_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class DescribeReservedElasticsearchInstancesResult { /** * List of reserved Elasticsearch instances . * @ param reservedElasticsearchInstances * List of reserved Elasticsearch instances . */ public void setReservedElasticsearchInstances ( java . util . Collection < ReservedElasticsearchInstance > reservedElasticsearchInstances ) { } }
if ( reservedElasticsearchInstances == null ) { this . reservedElasticsearchInstances = null ; return ; } this . reservedElasticsearchInstances = new java . util . ArrayList < ReservedElasticsearchInstance > ( reservedElasticsearchInstances ) ;
public class CommerceVirtualOrderItemPersistenceImpl { /** * Returns the commerce virtual order items before and after the current commerce virtual order item in the ordered set where uuid = & # 63 ; . * @ param commerceVirtualOrderItemId the primary key of the current commerce virtual order item * @ param uuid the uuid * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the previous , current , and next commerce virtual order item * @ throws NoSuchVirtualOrderItemException if a commerce virtual order item with the primary key could not be found */ @ Override public CommerceVirtualOrderItem [ ] findByUuid_PrevAndNext ( long commerceVirtualOrderItemId , String uuid , OrderByComparator < CommerceVirtualOrderItem > orderByComparator ) throws NoSuchVirtualOrderItemException { } }
CommerceVirtualOrderItem commerceVirtualOrderItem = findByPrimaryKey ( commerceVirtualOrderItemId ) ; Session session = null ; try { session = openSession ( ) ; CommerceVirtualOrderItem [ ] array = new CommerceVirtualOrderItemImpl [ 3 ] ; array [ 0 ] = getByUuid_PrevAndNext ( session , commerceVirtualOrderItem , uuid , orderByComparator , true ) ; array [ 1 ] = commerceVirtualOrderItem ; array [ 2 ] = getByUuid_PrevAndNext ( session , commerceVirtualOrderItem , uuid , orderByComparator , false ) ; return array ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; }
public class InternalPureXbaseParser { /** * InternalPureXbase . g : 4504:1 : ruleXVariableDeclaration returns [ EObject current = null ] : ( ( ) ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) ( ( ( ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) ) = > ( ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ( ( lv _ name _ 4_0 = ruleValidID ) ) ) ) | ( ( lv _ name _ 5_0 = ruleValidID ) ) ) ( otherlv _ 6 = ' = ' ( ( lv _ right _ 7_0 = ruleXExpression ) ) ) ? ) ; */ public final EObject ruleXVariableDeclaration ( ) throws RecognitionException { } }
EObject current = null ; Token lv_writeable_1_0 = null ; Token otherlv_2 = null ; Token otherlv_6 = null ; EObject lv_type_3_0 = null ; AntlrDatatypeRuleToken lv_name_4_0 = null ; AntlrDatatypeRuleToken lv_name_5_0 = null ; EObject lv_right_7_0 = null ; enterRule ( ) ; try { // InternalPureXbase . g : 4510:2 : ( ( ( ) ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) ( ( ( ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) ) = > ( ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ( ( lv _ name _ 4_0 = ruleValidID ) ) ) ) | ( ( lv _ name _ 5_0 = ruleValidID ) ) ) ( otherlv _ 6 = ' = ' ( ( lv _ right _ 7_0 = ruleXExpression ) ) ) ? ) ) // InternalPureXbase . g : 4511:2 : ( ( ) ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) ( ( ( ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) ) = > ( ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ( ( lv _ name _ 4_0 = ruleValidID ) ) ) ) | ( ( lv _ name _ 5_0 = ruleValidID ) ) ) ( otherlv _ 6 = ' = ' ( ( lv _ right _ 7_0 = ruleXExpression ) ) ) ? ) { // InternalPureXbase . g : 4511:2 : ( ( ) ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) ( ( ( ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) ) = > ( ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ( ( lv _ name _ 4_0 = ruleValidID ) ) ) ) | ( ( lv _ name _ 5_0 = ruleValidID ) ) ) ( otherlv _ 6 = ' = ' ( ( lv _ right _ 7_0 = ruleXExpression ) ) ) ? ) // InternalPureXbase . g : 4512:3 : ( ) ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) ( ( ( ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) ) = > ( ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ( ( lv _ name _ 4_0 = ruleValidID ) ) ) ) | ( ( lv _ name _ 5_0 = ruleValidID ) ) ) ( otherlv _ 6 = ' = ' ( ( lv _ right _ 7_0 = ruleXExpression ) ) ) ? { // InternalPureXbase . g : 4512:3 : ( ) // InternalPureXbase . g : 4513:4: { if ( state . backtracking == 0 ) { current = forceCreateModelElement ( grammarAccess . getXVariableDeclarationAccess ( ) . getXVariableDeclarationAction_0 ( ) , current ) ; } } // InternalPureXbase . g : 4519:3 : ( ( ( lv _ writeable _ 1_0 = ' var ' ) ) | otherlv _ 2 = ' val ' ) int alt80 = 2 ; int LA80_0 = input . LA ( 1 ) ; if ( ( LA80_0 == 18 ) ) { alt80 = 1 ; } else if ( ( LA80_0 == 19 ) ) { alt80 = 2 ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return current ; } NoViableAltException nvae = new NoViableAltException ( "" , 80 , 0 , input ) ; throw nvae ; } switch ( alt80 ) { case 1 : // InternalPureXbase . g : 4520:4 : ( ( lv _ writeable _ 1_0 = ' var ' ) ) { // InternalPureXbase . g : 4520:4 : ( ( lv _ writeable _ 1_0 = ' var ' ) ) // InternalPureXbase . g : 4521:5 : ( lv _ writeable _ 1_0 = ' var ' ) { // InternalPureXbase . g : 4521:5 : ( lv _ writeable _ 1_0 = ' var ' ) // InternalPureXbase . g : 4522:6 : lv _ writeable _ 1_0 = ' var ' { lv_writeable_1_0 = ( Token ) match ( input , 18 , FOLLOW_11 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( lv_writeable_1_0 , grammarAccess . getXVariableDeclarationAccess ( ) . getWriteableVarKeyword_1_0_0 ( ) ) ; } if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getXVariableDeclarationRule ( ) ) ; } setWithLastConsumed ( current , "writeable" , true , "var" ) ; } } } } break ; case 2 : // InternalPureXbase . g : 4535:4 : otherlv _ 2 = ' val ' { otherlv_2 = ( Token ) match ( input , 19 , FOLLOW_11 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_2 , grammarAccess . getXVariableDeclarationAccess ( ) . getValKeyword_1_1 ( ) ) ; } } break ; } // InternalPureXbase . g : 4540:3 : ( ( ( ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) ) = > ( ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ( ( lv _ name _ 4_0 = ruleValidID ) ) ) ) | ( ( lv _ name _ 5_0 = ruleValidID ) ) ) int alt81 = 2 ; int LA81_0 = input . LA ( 1 ) ; if ( ( LA81_0 == RULE_ID ) ) { int LA81_1 = input . LA ( 2 ) ; if ( ( synpred37_InternalPureXbase ( ) ) ) { alt81 = 1 ; } else if ( ( true ) ) { alt81 = 2 ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return current ; } NoViableAltException nvae = new NoViableAltException ( "" , 81 , 1 , input ) ; throw nvae ; } } else if ( ( LA81_0 == 15 ) && ( synpred37_InternalPureXbase ( ) ) ) { alt81 = 1 ; } else if ( ( LA81_0 == 41 ) && ( synpred37_InternalPureXbase ( ) ) ) { alt81 = 1 ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return current ; } NoViableAltException nvae = new NoViableAltException ( "" , 81 , 0 , input ) ; throw nvae ; } switch ( alt81 ) { case 1 : // InternalPureXbase . g : 4541:4 : ( ( ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) ) = > ( ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ( ( lv _ name _ 4_0 = ruleValidID ) ) ) ) { // InternalPureXbase . g : 4541:4 : ( ( ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) ) = > ( ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ( ( lv _ name _ 4_0 = ruleValidID ) ) ) ) // InternalPureXbase . g : 4542:5 : ( ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) ) = > ( ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ( ( lv _ name _ 4_0 = ruleValidID ) ) ) { // InternalPureXbase . g : 4555:5 : ( ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ( ( lv _ name _ 4_0 = ruleValidID ) ) ) // InternalPureXbase . g : 4556:6 : ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ( ( lv _ name _ 4_0 = ruleValidID ) ) { // InternalPureXbase . g : 4556:6 : ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) // InternalPureXbase . g : 4557:7 : ( lv _ type _ 3_0 = ruleJvmTypeReference ) { // InternalPureXbase . g : 4557:7 : ( lv _ type _ 3_0 = ruleJvmTypeReference ) // InternalPureXbase . g : 4558:8 : lv _ type _ 3_0 = ruleJvmTypeReference { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXVariableDeclarationAccess ( ) . getTypeJvmTypeReferenceParserRuleCall_2_0_0_0_0 ( ) ) ; } pushFollow ( FOLLOW_12 ) ; lv_type_3_0 = ruleJvmTypeReference ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXVariableDeclarationRule ( ) ) ; } set ( current , "type" , lv_type_3_0 , "org.eclipse.xtext.xbase.Xtype.JvmTypeReference" ) ; afterParserOrEnumRuleCall ( ) ; } } } // InternalPureXbase . g : 4575:6 : ( ( lv _ name _ 4_0 = ruleValidID ) ) // InternalPureXbase . g : 4576:7 : ( lv _ name _ 4_0 = ruleValidID ) { // InternalPureXbase . g : 4576:7 : ( lv _ name _ 4_0 = ruleValidID ) // InternalPureXbase . g : 4577:8 : lv _ name _ 4_0 = ruleValidID { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXVariableDeclarationAccess ( ) . getNameValidIDParserRuleCall_2_0_0_1_0 ( ) ) ; } pushFollow ( FOLLOW_63 ) ; lv_name_4_0 = ruleValidID ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXVariableDeclarationRule ( ) ) ; } set ( current , "name" , lv_name_4_0 , "org.eclipse.xtext.xbase.Xtype.ValidID" ) ; afterParserOrEnumRuleCall ( ) ; } } } } } } break ; case 2 : // InternalPureXbase . g : 4597:4 : ( ( lv _ name _ 5_0 = ruleValidID ) ) { // InternalPureXbase . g : 4597:4 : ( ( lv _ name _ 5_0 = ruleValidID ) ) // InternalPureXbase . g : 4598:5 : ( lv _ name _ 5_0 = ruleValidID ) { // InternalPureXbase . g : 4598:5 : ( lv _ name _ 5_0 = ruleValidID ) // InternalPureXbase . g : 4599:6 : lv _ name _ 5_0 = ruleValidID { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXVariableDeclarationAccess ( ) . getNameValidIDParserRuleCall_2_1_0 ( ) ) ; } pushFollow ( FOLLOW_63 ) ; lv_name_5_0 = ruleValidID ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXVariableDeclarationRule ( ) ) ; } set ( current , "name" , lv_name_5_0 , "org.eclipse.xtext.xbase.Xtype.ValidID" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; } // InternalPureXbase . g : 4617:3 : ( otherlv _ 6 = ' = ' ( ( lv _ right _ 7_0 = ruleXExpression ) ) ) ? int alt82 = 2 ; int LA82_0 = input . LA ( 1 ) ; if ( ( LA82_0 == 20 ) ) { alt82 = 1 ; } switch ( alt82 ) { case 1 : // InternalPureXbase . g : 4618:4 : otherlv _ 6 = ' = ' ( ( lv _ right _ 7_0 = ruleXExpression ) ) { otherlv_6 = ( Token ) match ( input , 20 , FOLLOW_3 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_6 , grammarAccess . getXVariableDeclarationAccess ( ) . getEqualsSignKeyword_3_0 ( ) ) ; } // InternalPureXbase . g : 4622:4 : ( ( lv _ right _ 7_0 = ruleXExpression ) ) // InternalPureXbase . g : 4623:5 : ( lv _ right _ 7_0 = ruleXExpression ) { // InternalPureXbase . g : 4623:5 : ( lv _ right _ 7_0 = ruleXExpression ) // InternalPureXbase . g : 4624:6 : lv _ right _ 7_0 = ruleXExpression { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXVariableDeclarationAccess ( ) . getRightXExpressionParserRuleCall_3_1_0 ( ) ) ; } pushFollow ( FOLLOW_2 ) ; lv_right_7_0 = ruleXExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXVariableDeclarationRule ( ) ) ; } set ( current , "right" , lv_right_7_0 , "org.eclipse.xtext.xbase.Xbase.XExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; } } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class RuntimeManagerMain { /** * Verify that the environment information in execution state matches the request */ protected void validateExecutionState ( String topologyName , ExecutionEnvironment . ExecutionState executionState ) throws TopologyRuntimeManagementException { } }
String stateCluster = executionState . getCluster ( ) ; String stateRole = executionState . getRole ( ) ; String stateEnv = executionState . getEnviron ( ) ; String configCluster = Context . cluster ( config ) ; String configRole = Context . role ( config ) ; String configEnv = Context . environ ( config ) ; if ( ! stateCluster . equals ( configCluster ) || ! stateRole . equals ( configRole ) || ! stateEnv . equals ( configEnv ) ) { String currentState = String . format ( "%s/%s/%s" , stateCluster , stateRole , stateEnv ) ; String configState = String . format ( "%s/%s/%s" , configCluster , configRole , configEnv ) ; throw new TopologyRuntimeManagementException ( String . format ( "cluster/role/environ does not match. Topology '%s' is running at %s, not %s" , topologyName , currentState , configState ) ) ; }
public class CommerceAddressPersistenceImpl { /** * Returns all the commerce addresses where commerceRegionId = & # 63 ; . * @ param commerceRegionId the commerce region ID * @ return the matching commerce addresses */ @ Override public List < CommerceAddress > findByCommerceRegionId ( long commerceRegionId ) { } }
return findByCommerceRegionId ( commerceRegionId , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ;
public class Node { /** * syck _ alloc _ seq */ public static Node allocSeq ( ) { } }
Data . Seq s = new Data . Seq ( ) ; s . style = SeqStyle . None ; s . idx = 0 ; s . capa = YAML . ALLOC_CT ; s . items = new Object [ s . capa ] ; Node n = KindTag . Seq . allocNode ( ) ; n . data = s ; return n ;
public class UpdateGroupPermissionHandler { /** * UpdateGroupPermission Method . */ public void updateGroupPermission ( int iGroupID ) { } }
if ( m_recUserPermission == null ) m_recUserPermission = new UserPermission ( this . getOwner ( ) . findRecordOwner ( ) ) ; Record m_recUserGroup = ( ( ReferenceField ) m_recUserPermission . getField ( UserPermission . USER_GROUP_ID ) ) . getReferenceRecord ( ) ; m_recUserGroup . setOpenMode ( m_recUserGroup . getOpenMode ( ) & ~ DBConstants . OPEN_READ_ONLY ) ; // Read and write if ( m_recUserPermission . getListener ( SubFileFilter . class ) == null ) m_recUserPermission . addListener ( new SubFileFilter ( m_recUserGroup ) ) ; try { m_recUserGroup . addNew ( ) ; m_recUserGroup . getCounterField ( ) . setValue ( iGroupID ) ; if ( m_recUserGroup . seek ( null ) ) { m_recUserGroup . edit ( ) ; StringBuffer sb = new StringBuffer ( ) ; m_recUserPermission . close ( ) ; while ( m_recUserPermission . hasNext ( ) ) { m_recUserPermission . next ( ) ; Record recUserResource = ( ( ReferenceField ) m_recUserPermission . getField ( UserPermission . USER_RESOURCE_ID ) ) . getReference ( ) ; String strResource = recUserResource . getField ( UserResource . RESOURCE_CLASS ) . toString ( ) ; StringTokenizer tokenizer = new StringTokenizer ( strResource , "\n\t ," ) ; while ( tokenizer . hasMoreTokens ( ) ) { String strClass = tokenizer . nextToken ( ) ; int startThin = strClass . indexOf ( Constants . THIN_SUBPACKAGE , 0 ) ; if ( startThin != - 1 ) // Remove the " . thin " reference strClass = strClass . substring ( 0 , startThin ) + strClass . substring ( startThin + Constants . THIN_SUBPACKAGE . length ( ) ) ; if ( strClass . length ( ) > 0 ) { sb . append ( strClass ) . append ( '\t' ) ; sb . append ( m_recUserPermission . getField ( UserPermission . ACCESS_LEVEL ) . toString ( ) ) . append ( '\t' ) ; sb . append ( m_recUserPermission . getField ( UserPermission . LOGIN_LEVEL ) . toString ( ) ) . append ( "\t\n" ) ; } } } m_recUserGroup . getField ( UserGroup . ACCESS_MAP ) . setString ( sb . toString ( ) ) ; m_recUserGroup . set ( ) ; } } catch ( DBException e ) { e . printStackTrace ( ) ; }
public class PorterStemmer { /** * step5 ( ) takes off - ant , - ence etc . , in context < c > vcvc < v > . */ private final void step5 ( ) { } }
if ( k == 0 ) { return ; } switch ( b [ k - 1 ] ) { case 'a' : if ( endWith ( "al" ) ) { break ; } return ; case 'c' : if ( endWith ( "ance" ) ) { break ; } if ( endWith ( "ence" ) ) { break ; } return ; case 'e' : if ( endWith ( "er" ) ) { break ; } return ; case 'i' : if ( endWith ( "ic" ) ) { break ; } return ; case 'l' : if ( endWith ( "able" ) ) { break ; } if ( endWith ( "ible" ) ) { break ; } return ; case 'n' : if ( endWith ( "ant" ) ) { break ; } if ( endWith ( "ement" ) ) { break ; } if ( endWith ( "ment" ) ) { break ; } /* element etc . not stripped before the m */ if ( endWith ( "ent" ) ) { break ; } return ; case 'o' : if ( endWith ( "ion" ) && j >= 0 && ( b [ j ] == 's' || b [ j ] == 't' ) ) { break ; } if ( endWith ( "ou" ) ) { break ; } return ; /* takes care of - ous */ case 's' : if ( endWith ( "ism" ) ) { break ; } return ; case 't' : if ( endWith ( "ate" ) ) { break ; } if ( endWith ( "iti" ) ) { break ; } return ; case 'u' : if ( endWith ( "ous" ) ) { break ; } return ; case 'v' : if ( endWith ( "ive" ) ) { break ; } return ; case 'z' : if ( endWith ( "ize" ) ) { break ; } return ; default : return ; } if ( m ( ) > 1 ) { k = j ; }
public class BaseDuoSecurityAuthenticationService { /** * Sign http request . * @ param request the request * @ param id the id * @ return the http */ @ SneakyThrows protected Http signHttpAuthRequest ( final Http request , final String id ) { } }
request . addParam ( "username" , id ) ; request . addParam ( "factor" , "auto" ) ; request . addParam ( "device" , "auto" ) ; request . signRequest ( duoProperties . getDuoIntegrationKey ( ) , duoProperties . getDuoSecretKey ( ) ) ; return request ;
public class Regex { /** * Matches the whole input and returns the matched string * @ param reader * @ return * @ throws IOException * @ throws SyntaxErrorException */ public String getMatch ( InputReader reader ) throws IOException , SyntaxErrorException { } }
int rc = match ( reader ) ; if ( rc == 1 && reader . read ( ) == - 1 ) { return reader . getString ( ) ; } else { throw new SyntaxErrorException ( "syntax error" + "\n" + reader . getLineNumber ( ) + ": " + reader . getLine ( ) + "\n" + pointer ( reader . getColumnNumber ( ) + 2 ) ) ; }
public class FixedShardsDistribution { /** * Allocates shards in a round robin fashion for the servers , ignoring those without segments . * @ return int [ ] with the number of shards per server */ private static int [ ] allocateShardsToNodes ( int numShards , int numNodes , List < Set < Integer > > weightPerServer ) { } }
int [ ] shardsPerServer = new int [ numNodes ] ; Iterator < Integer > cyclicNodeIterator = Stream . iterate ( 0 , i -> ( i + 1 ) % numNodes ) . iterator ( ) ; while ( numShards > 0 ) { int slot = cyclicNodeIterator . next ( ) ; if ( ! weightPerServer . get ( slot ) . isEmpty ( ) ) { shardsPerServer [ slot ] ++ ; numShards -- ; } } return shardsPerServer ;
public class WApplicationRenderer { /** * Paints the given WApplication . * @ param component the WApplication to paint . * @ param renderContext the RenderContext to paint to . */ @ Override public void doRender ( final WComponent component , final WebXmlRenderContext renderContext ) { } }
WApplication application = ( WApplication ) component ; XmlStringBuilder xml = renderContext . getWriter ( ) ; UIContext uic = UIContextHolder . getCurrent ( ) ; String focusId = uic . getFocussedId ( ) ; // Check that this is the top level component if ( application . getParent ( ) != null ) { LOG . warn ( "WApplication component should be the top level component." ) ; } xml . appendTagOpen ( "ui:application" ) ; xml . appendAttribute ( "id" , component . getId ( ) ) ; xml . appendOptionalAttribute ( "class" , component . getHtmlClass ( ) ) ; xml . appendUrlAttribute ( "applicationUrl" , uic . getEnvironment ( ) . getPostPath ( ) ) ; xml . appendUrlAttribute ( "ajaxUrl" , uic . getEnvironment ( ) . getWServletPath ( ) ) ; xml . appendOptionalAttribute ( "unsavedChanges" , application . hasUnsavedChanges ( ) , "true" ) ; xml . appendOptionalAttribute ( "title" , application . getTitle ( ) ) ; xml . appendOptionalAttribute ( "defaultFocusId" , uic . isFocusRequired ( ) && ! Util . empty ( focusId ) , focusId ) ; xml . appendOptionalUrlAttribute ( "icon" , WApplication . getIcon ( ) ) ; xml . appendClose ( ) ; // Tracking enabled globally if ( TrackingUtil . isTrackingEnabled ( ) ) { xml . appendTagOpen ( "ui:analytic" ) ; xml . appendAttribute ( "clientId" , TrackingUtil . getClientId ( ) ) ; xml . appendOptionalAttribute ( "cd" , TrackingUtil . getCookieDomain ( ) ) ; xml . appendOptionalAttribute ( "dcd" , TrackingUtil . getDataCollectionDomain ( ) ) ; xml . appendOptionalAttribute ( "name" , TrackingUtil . getApplicationName ( ) ) ; xml . appendEnd ( ) ; } // Hidden fields Map < String , String > hiddenFields = uic . getEnvironment ( ) . getHiddenParameters ( ) ; if ( hiddenFields != null ) { for ( Map . Entry < String , String > entry : hiddenFields . entrySet ( ) ) { xml . appendTagOpen ( "ui:param" ) ; xml . appendAttribute ( "name" , entry . getKey ( ) ) ; xml . appendAttribute ( "value" , entry . getValue ( ) ) ; xml . appendEnd ( ) ; } } // Custom CSS Resources ( if any ) for ( WApplication . ApplicationResource resource : application . getCssResources ( ) ) { String url = resource . getTargetUrl ( ) ; if ( ! Util . empty ( url ) ) { xml . appendTagOpen ( "ui:css" ) ; xml . appendUrlAttribute ( "url" , url ) ; xml . appendEnd ( ) ; } } // Custom JavaScript Resources ( if any ) for ( WApplication . ApplicationResource resource : application . getJsResources ( ) ) { String url = resource . getTargetUrl ( ) ; if ( ! Util . empty ( url ) ) { xml . appendTagOpen ( "ui:js" ) ; xml . appendUrlAttribute ( "url" , url ) ; xml . appendEnd ( ) ; } } paintChildren ( application , renderContext ) ; xml . appendEndTag ( "ui:application" ) ;
public class Fork { /** * Check data quality . * @ return whether data publishing is successful and data should be committed */ private boolean checkDataQuality ( Optional < Object > schema ) throws Exception { } }
if ( this . branches > 1 ) { this . forkTaskState . setProp ( ConfigurationKeys . EXTRACTOR_ROWS_EXPECTED , this . taskState . getProp ( ConfigurationKeys . EXTRACTOR_ROWS_EXPECTED ) ) ; this . forkTaskState . setProp ( ConfigurationKeys . EXTRACTOR_ROWS_EXTRACTED , this . taskState . getProp ( ConfigurationKeys . EXTRACTOR_ROWS_EXTRACTED ) ) ; } String writerRecordsWrittenKey = ForkOperatorUtils . getPropertyNameForBranch ( ConfigurationKeys . WRITER_RECORDS_WRITTEN , this . branches , this . index ) ; if ( this . writer . isPresent ( ) ) { this . forkTaskState . setProp ( ConfigurationKeys . WRITER_ROWS_WRITTEN , this . writer . get ( ) . recordsWritten ( ) ) ; this . taskState . setProp ( writerRecordsWrittenKey , this . writer . get ( ) . recordsWritten ( ) ) ; } else { this . forkTaskState . setProp ( ConfigurationKeys . WRITER_ROWS_WRITTEN , 0L ) ; this . taskState . setProp ( writerRecordsWrittenKey , 0L ) ; } if ( schema . isPresent ( ) ) { this . forkTaskState . setProp ( ConfigurationKeys . EXTRACT_SCHEMA , schema . get ( ) . toString ( ) ) ; } try { // Do task - level quality checking TaskLevelPolicyCheckResults taskResults = this . taskContext . getTaskLevelPolicyChecker ( this . forkTaskState , this . branches > 1 ? this . index : - 1 ) . executePolicies ( ) ; TaskPublisher publisher = this . taskContext . getTaskPublisher ( this . forkTaskState , taskResults ) ; switch ( publisher . canPublish ( ) ) { case SUCCESS : return true ; case CLEANUP_FAIL : this . logger . error ( "Cleanup failed for task " + this . taskId ) ; break ; case POLICY_TESTS_FAIL : this . logger . error ( "Not all quality checking passed for task " + this . taskId ) ; break ; case COMPONENTS_NOT_FINISHED : this . logger . error ( "Not all components completed for task " + this . taskId ) ; break ; default : break ; } return false ; } catch ( Throwable t ) { this . logger . error ( "Failed to check task-level data quality" , t ) ; return false ; }
public class ServersInner { /** * Creates a new server or updates an existing server . The update action will overwrite the existing server . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param parameters The required parameters for creating or updating a server . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the ServerInner object if successful . */ public ServerInner beginCreate ( String resourceGroupName , String serverName , ServerForCreate parameters ) { } }
return beginCreateWithServiceResponseAsync ( resourceGroupName , serverName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ;
public class CategoricalParameterRangeMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CategoricalParameterRange categoricalParameterRange , ProtocolMarshaller protocolMarshaller ) { } }
if ( categoricalParameterRange == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( categoricalParameterRange . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( categoricalParameterRange . getValues ( ) , VALUES_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class SimpleInternalFrame { /** * Creates and answers the header panel , that consists of : an icon , a title * label , a tool bar , and a gradient background . * @ param label * the label to paint the icon and text * @ param bar * the panel ' s tool bar * @ return the panel ' s built header area */ private JPanel buildHeader ( JLabel label , JToolBar bar ) { } }
gradientPanel = new GradientPanel ( new BorderLayout ( ) , getHeaderBackground ( ) ) ; label . setOpaque ( false ) ; gradientPanel . add ( label , BorderLayout . WEST ) ; gradientPanel . setBorder ( BorderFactory . createEmptyBorder ( 3 , 4 , 3 , 1 ) ) ; headerPanel = new JPanel ( new BorderLayout ( ) ) ; headerPanel . add ( gradientPanel , BorderLayout . CENTER ) ; setToolBar ( bar ) ; headerPanel . setBorder ( new RaisedHeaderBorder ( ) ) ; headerPanel . setOpaque ( false ) ; return headerPanel ;
public class GetChangeTokenRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetChangeTokenRequest getChangeTokenRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getChangeTokenRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AlphabetFactory { /** * 不再增加新的词 * @ param b */ public void setStopIncrement ( boolean b ) { } }
Iterator < String > it = maps . keySet ( ) . iterator ( ) ; while ( it . hasNext ( ) ) { String key = it . next ( ) ; maps . get ( key ) . setStopIncrement ( b ) ; }
public class RouteSelector { /** * Prepares the proxy servers to try . */ private void resetNextProxy ( HttpUrl url , Proxy proxy ) { } }
if ( proxy != null ) { // If the user specifies a proxy , try that and only that . proxies = Collections . singletonList ( proxy ) ; } else { // Try each of the ProxySelector choices until one connection succeeds . List < Proxy > proxiesOrNull = address . proxySelector ( ) . select ( url . uri ( ) ) ; proxies = proxiesOrNull != null && ! proxiesOrNull . isEmpty ( ) ? Util . immutableList ( proxiesOrNull ) : Util . immutableList ( Proxy . NO_PROXY ) ; } nextProxyIndex = 0 ;
public class ChangeMessageVisibilityBatchResult { /** * A list of < code > < a > BatchResultErrorEntry < / a > < / code > items . * @ return A list of < code > < a > BatchResultErrorEntry < / a > < / code > items . */ public java . util . List < BatchResultErrorEntry > getFailed ( ) { } }
if ( failed == null ) { failed = new com . amazonaws . internal . SdkInternalList < BatchResultErrorEntry > ( ) ; } return failed ;
public class Allure { /** * Adds link to current test or step ( or fixture ) if any . Takes no effect * if no test run at the moment . * @ param name the name of link . * @ param type the type of link , used to display link icon in the report . * @ param url the link ' s url . */ public static void link ( final String name , final String type , final String url ) { } }
final Link link = new Link ( ) . setName ( name ) . setType ( type ) . setUrl ( url ) ; getLifecycle ( ) . updateTestCase ( testResult -> testResult . getLinks ( ) . add ( link ) ) ;
public class AmazonNeptuneClient { /** * Disassociates an Identity and Access Management ( IAM ) role from a DB cluster . * @ param removeRoleFromDBClusterRequest * @ return Result of the RemoveRoleFromDBCluster operation returned by the service . * @ throws DBClusterNotFoundException * < i > DBClusterIdentifier < / i > does not refer to an existing DB cluster . * @ throws DBClusterRoleNotFoundException * The specified IAM role Amazon Resource Name ( ARN ) is not associated with the specified DB cluster . * @ throws InvalidDBClusterStateException * The DB cluster is not in a valid state . * @ sample AmazonNeptune . RemoveRoleFromDBCluster * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / neptune - 2014-10-31 / RemoveRoleFromDBCluster " * target = " _ top " > AWS API Documentation < / a > */ @ Override public RemoveRoleFromDBClusterResult removeRoleFromDBCluster ( RemoveRoleFromDBClusterRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeRemoveRoleFromDBCluster ( request ) ;
public class NamespaceAlreadyExistsException { /** * The < code > CreatorRequestId < / code > that was used to create the namespace . * @ param creatorRequestId * The < code > CreatorRequestId < / code > that was used to create the namespace . */ @ com . fasterxml . jackson . annotation . JsonProperty ( "CreatorRequestId" ) public void setCreatorRequestId ( String creatorRequestId ) { } }
this . creatorRequestId = creatorRequestId ;
public class DeployerResolverOverriderConverter { /** * Creates a new ServerDetails object for resolver , this will take URL and name from the deployer ServerDetails as a default behaviour */ private ServerDetails createInitialResolveDetailsFromDeployDetails ( ServerDetails deployerDetails ) { } }
RepositoryConf oldResolveRepositoryConfig = deployerDetails . getResolveReleaseRepository ( ) ; RepositoryConf oldSnapshotResolveRepositoryConfig = deployerDetails . getResolveSnapshotRepository ( ) ; RepositoryConf resolverReleaseRepos = oldResolveRepositoryConfig == null ? RepositoryConf . emptyRepositoryConfig : oldResolveRepositoryConfig ; RepositoryConf resolveSnapshotRepos = oldSnapshotResolveRepositoryConfig == null ? RepositoryConf . emptyRepositoryConfig : oldSnapshotResolveRepositoryConfig ; return new ServerDetails ( deployerDetails . getArtifactoryName ( ) , deployerDetails . getArtifactoryUrl ( ) , null , null , resolverReleaseRepos , resolveSnapshotRepos , null , null ) ;
public class ApacheHttp31SLR { /** * / * ( non - Javadoc ) * @ see org . archive . util . binsearch . impl . HTTPSeekableLineReader # getHeaderValue ( java . lang . String ) */ @ Override public String getHeaderValue ( String headerName ) { } }
if ( activeMethod == null ) { return null ; } Header header = activeMethod . getResponseHeader ( headerName ) ; if ( header == null ) { return null ; } return header . getValue ( ) ;
public class AddonProjectConfiguratorImpl { /** * Checks if the { @ link Project } depends on the provided { @ link AddonId } */ @ Override public boolean dependsOnAddon ( final Project project , AddonId addonId ) { } }
DependencyInstaller dependencyInstaller = getDependencyInstaller ( ) ; Dependency dependency = toDependency ( addonId ) ; return dependencyInstaller . isInstalled ( project , dependency ) ;
public class Correspondence { /** * Returns a new correspondence which is like this one , except that the given formatter may be * used to format the difference between a pair of elements that do not correspond . * < p > Note that , if you the data you are asserting about contains null actual or expected values , * the formatter may be invoked with a null argument . If this causes it to throw a { @ link * NullPointerException } , that will be taken to indicate that the values cannot be diffed . ( See * { @ link Correspondence # formatDiff } for more detail on how exceptions are handled . ) If you think * null values are likely , it is slightly cleaner to have the formatter return null in that case * instead of throwing . * < p > Example : * < pre > { @ code * class MyRecordTestHelper { * static final Correspondence < MyRecord , MyRecord > EQUIVALENCE = * Correspondence . from ( MyRecordTestHelper : : recordsEquivalent , " is equivalent to " ) * . formattingDiffsUsing ( MyRecordTestHelper : : formatRecordDiff ) ; * static boolean recordsEquivalent ( @ Nullable MyRecord actual , @ Nullable MyRecord expected ) { * / / code to check whether records should be considered equivalent for testing purposes * static String formatRecordDiff ( @ Nullable MyRecord actual , @ Nullable MyRecord expected ) { * / / code to format the diff between the records * } < / pre > */ public Correspondence < A , E > formattingDiffsUsing ( DiffFormatter < ? super A , ? super E > formatter ) { } }
return new FormattingDiffs < > ( this , formatter ) ;
public class GeoNear { /** * The point for which to find the closest documents . * If using a 2dsphere index , you can specify the point as either a GeoJSON point or legacy coordinate pair . * If using a 2d index , specify the point as a legacy coordinate pair . * @ return the point */ public double [ ] getNear ( ) { } }
double [ ] copy = new double [ 0 ] ; if ( nearLegacy != null ) { copy = new double [ nearLegacy . length ] ; System . arraycopy ( nearLegacy , 0 , copy , 0 , nearLegacy . length ) ; } return copy ;
public class Arc42DocumentationTemplate { /** * Adds a " Solution Strategy " section relating to a { @ link SoftwareSystem } from one or more files . * @ param softwareSystem the { @ link SoftwareSystem } the documentation content relates to * @ param files one or more File objects that point to the documentation content * @ return a documentation { @ link Section } * @ throws IOException if there is an error reading the files */ public Section addSolutionStrategySection ( SoftwareSystem softwareSystem , File ... files ) throws IOException { } }
return addSection ( softwareSystem , "Solution Strategy" , files ) ;
public class FullscreenVideoView { /** * Overwrite the default ProgressView to represent loading progress state * It is controlled by stopLoading and startLoading methods , that only sets it to VISIBLE and GONE * Remember to set RelativeLayout . LayoutParams before setting the view . * @ param v The custom View that will be used as progress view . * Set it to null to remove the default one */ public void setOnProgressView ( View v ) { } }
int progressViewVisibility = - 1 ; if ( this . onProgressView != null ) { progressViewVisibility = this . onProgressView . getVisibility ( ) ; removeView ( this . onProgressView ) ; } this . onProgressView = v ; if ( this . onProgressView != null ) { addView ( this . onProgressView ) ; if ( progressViewVisibility != - 1 ) this . onProgressView . setVisibility ( progressViewVisibility ) ; }
public class Packet { /** * Setter for data * @ param buffer * Packet data */ public void setData ( IoBuffer buffer ) { } }
if ( noCopy ) { log . trace ( "Using buffer reference" ) ; this . data = buffer ; } else { // try the backing array first if it exists if ( buffer . hasArray ( ) ) { log . trace ( "Buffer has backing array, making a copy" ) ; byte [ ] copy = new byte [ buffer . limit ( ) ] ; buffer . mark ( ) ; buffer . get ( copy ) ; buffer . reset ( ) ; data = IoBuffer . wrap ( copy ) ; } else { log . trace ( "Buffer has no backing array, using ByteBuffer" ) ; // fallback to ByteBuffer data . put ( buffer . buf ( ) ) . flip ( ) ; } }
public class VersionMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Version version , ProtocolMarshaller protocolMarshaller ) { } }
if ( version == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( version . getApplicationId ( ) , APPLICATIONID_BINDING ) ; protocolMarshaller . marshall ( version . getCreationTime ( ) , CREATIONTIME_BINDING ) ; protocolMarshaller . marshall ( version . getParameterDefinitions ( ) , PARAMETERDEFINITIONS_BINDING ) ; protocolMarshaller . marshall ( version . getRequiredCapabilities ( ) , REQUIREDCAPABILITIES_BINDING ) ; protocolMarshaller . marshall ( version . getResourcesSupported ( ) , RESOURCESSUPPORTED_BINDING ) ; protocolMarshaller . marshall ( version . getSemanticVersion ( ) , SEMANTICVERSION_BINDING ) ; protocolMarshaller . marshall ( version . getSourceCodeArchiveUrl ( ) , SOURCECODEARCHIVEURL_BINDING ) ; protocolMarshaller . marshall ( version . getSourceCodeUrl ( ) , SOURCECODEURL_BINDING ) ; protocolMarshaller . marshall ( version . getTemplateUrl ( ) , TEMPLATEURL_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CmsSerialDateController { /** * Set the serial pattern type . * @ param patternType the pattern type to set . */ public void setPattern ( String patternType ) { } }
final PatternType type = PatternType . valueOf ( patternType ) ; if ( type != m_model . getPatternType ( ) ) { removeExceptionsOnChange ( new Command ( ) { public void execute ( ) { EndType oldEndType = m_model . getEndType ( ) ; m_model . setPatternType ( type ) ; m_model . setIndividualDates ( null ) ; m_model . setInterval ( getPatternDefaultValues ( ) . getInterval ( ) ) ; m_model . setEveryWorkingDay ( Boolean . FALSE ) ; m_model . clearWeekDays ( ) ; m_model . clearIndividualDates ( ) ; m_model . clearWeeksOfMonth ( ) ; m_model . clearExceptions ( ) ; if ( type . equals ( PatternType . NONE ) || type . equals ( PatternType . INDIVIDUAL ) ) { m_model . setEndType ( EndType . SINGLE ) ; } else if ( oldEndType . equals ( EndType . SINGLE ) ) { m_model . setEndType ( EndType . TIMES ) ; m_model . setOccurrences ( 10 ) ; m_model . setSeriesEndDate ( null ) ; } m_model . setDayOfMonth ( getPatternDefaultValues ( ) . getDayOfMonth ( ) ) ; m_model . setMonth ( getPatternDefaultValues ( ) . getMonth ( ) ) ; if ( type . equals ( PatternType . WEEKLY ) ) { m_model . setWeekDay ( getPatternDefaultValues ( ) . getWeekDay ( ) ) ; } valueChanged ( ) ; } } ) ; }
public class YarnHelixUtils { /** * Read a collection { @ link Token } s from a given file . * @ param tokenFilePath the token file path * @ param configuration a { @ link Configuration } object carrying Hadoop configuration properties * @ return a collection of { @ link Token } s * @ throws IOException */ public static Collection < Token < ? extends TokenIdentifier > > readTokensFromFile ( Path tokenFilePath , Configuration configuration ) throws IOException { } }
return Credentials . readTokenStorageFile ( tokenFilePath , configuration ) . getAllTokens ( ) ;
public class GenericStats { /** * { @ inheritDoc } . * Called by Accumulator . */ @ Override public String getValueByNameAsString ( String valueName , String intervalName , TimeUnit timeUnit ) { } }
if ( valueName == null || valueName . isEmpty ( ) ) throw new IllegalArgumentException ( "Value name can not be empty" ) ; StatValue statValue = statValueMap . get ( valueName ) ; if ( statValue != null ) { for ( T metric : metrics ) { if ( valueName . equals ( metric . getCaption ( ) ) ) return getStatValueAsString ( metric , intervalName ) ; } } return super . getValueByNameAsString ( valueName , intervalName , timeUnit ) ;
public class LogMessage { /** * This function takes in the message type and sets the appropriate Type . * @ param type This is the error type as a string . */ private void setTypeString ( final String type ) { } }
if ( type == null ) { this . type = null ; } else if ( type . equalsIgnoreCase ( "WARN" ) ) { this . type = Type . WARN ; } else if ( type . equalsIgnoreCase ( "ERROR" ) ) { this . type = Type . ERROR ; } else if ( type . equalsIgnoreCase ( "DEBUG" ) ) { this . type = Type . DEBUG ; } else if ( type . equalsIgnoreCase ( "INFO" ) ) { this . type = Type . INFO ; }
public class SearchWidgetRegistry { /** * Get a list with all the ids + names of the search widgets in the registry . * @ return list of widget id / name pairs */ public static LinkedHashMap < String , String > getSearchWidgetMapping ( ) { } }
LinkedHashMap < String , String > map = new LinkedHashMap < String , String > ( ) ; for ( SearchWidgetCreator swc : REGISTRY . values ( ) ) { map . put ( swc . getSearchWidgetId ( ) , swc . getSearchWidgetName ( ) ) ; } return map ;
public class RunsInner { /** * Gets the detailed information for a given run . * @ param resourceGroupName The name of the resource group to which the container registry belongs . * @ param registryName The name of the container registry . * @ param runId The run ID . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the RunInner object if successful . */ public RunInner get ( String resourceGroupName , String registryName , String runId ) { } }
return getWithServiceResponseAsync ( resourceGroupName , registryName , runId ) . toBlocking ( ) . single ( ) . body ( ) ;
public class JavacTrees { /** * Prints a message of the specified kind at the location of the * tree within the provided compilation unit * @ param kind the kind of message * @ param msg the message , or an empty string if none * @ param t the tree to use as a position hint * @ param root the compilation unit that contains tree */ @ Override @ DefinedBy ( Api . COMPILER_TREE ) public void printMessage ( Diagnostic . Kind kind , CharSequence msg , com . sun . source . tree . Tree t , com . sun . source . tree . CompilationUnitTree root ) { } }
printMessage ( kind , msg , ( ( JCTree ) t ) . pos ( ) , root ) ;
public class Validator { /** * Merges a multi - value property JSONArray . * trims to max length currently 100 items , on a FIFO basis * please clean the key and newValues values before calling this * @ param currentValues current JSONArray property value * @ param newValues JSONArray of new values * @ param action String the action to take relative to the new values ( $ add , $ remove ) * @ param key String the property key * @ return The { @ link ValidationResult } object containing the merged value , * and the error code ( if any ) */ ValidationResult mergeMultiValuePropertyForKey ( JSONArray currentValues , JSONArray newValues , String action , String key ) { } }
ValidationResult vr = new ValidationResult ( ) ; Boolean remove = REMOVE_VALUES_OPERATION . equals ( action ) ; vr = _mergeListInternalForKey ( key , currentValues , newValues , remove , vr ) ; return vr ;
public class Replacer { /** * Makes a Replacer that replaces a literal String key in dict with the corresponding String value in dict . Doesn ' t * need escapes in the Strings it searches for ( at index 0 , 2 , 4 , etc . ) , but cannot search for the exact two * characters in immediate succession , backslash then capital E , because it finds literal Strings using * { @ code \ \ Q . . . \ \ E } . Uses only default modes ( not case - insensitive , and most other flags don ' t have any effect * since this doesn ' t care about " \ \ w " or other backslash - escaped special categories ) , but you can get the Pattern * from this afterwards and set its flags with its setFlags ( ) method . The Strings this replaces with are the values , * and are also literal . If the Map this is given is a sorted Map of some kind or a ( preferably ) LinkedHashMap , then * the order search strings will be tried will be stable ; the same is not necessarily true for HashMap . * @ param dict a Map ( hopefully with stable order ) with search String keys and replacement String values * @ return a Replacer that will act as a replacement table for the given Strings */ public static Replacer makeTable ( Map < String , String > dict ) { } }
if ( dict == null || dict . isEmpty ( ) ) return new Replacer ( Pattern . compile ( "$" ) , new DummySubstitution ( "" ) ) ; TableSubstitution tab = new TableSubstitution ( new LinkedHashMap < String , String > ( dict ) ) ; StringBuilder sb = new StringBuilder ( 128 ) ; sb . append ( "(?>" ) ; for ( String s : tab . dictionary . keySet ( ) ) { sb . append ( "\\Q" ) ; sb . append ( s ) ; sb . append ( "\\E|" ) ; } if ( sb . length ( ) > 3 ) sb . setCharAt ( sb . length ( ) - 1 , ')' ) ; else sb . append ( ')' ) ; return new Replacer ( Pattern . compile ( sb . toString ( ) ) , tab ) ;
public class FacesServletAutoConfiguration { /** * This bean registers the { @ link FacesServlet } . * This { @ link ServletRegistrationBean } also sets two * { @ link ServletContext # setAttribute ( String , Object ) servlet - context attributes } to inform Mojarra and MyFaces about * the dynamically added Servlet . * @ param facesServletProperties The properties for the { @ link FacesServlet } - registration . * @ return A custom { @ link ServletRegistrationBean } which registers the { @ link FacesServlet } . */ @ Bean public ServletRegistrationBean < FacesServlet > facesServletRegistrationBean ( FacesServletProperties facesServletProperties ) { } }
ServletRegistrationBean < FacesServlet > facesServletServletRegistrationBean = new ServletRegistrationBean < FacesServlet > ( new FacesServlet ( ) ) { @ Override protected ServletRegistration . Dynamic addRegistration ( String description , ServletContext servletContext ) { ServletRegistration . Dynamic servletRegistration = super . addRegistration ( description , servletContext ) ; if ( servletRegistration != null ) { servletContext . setAttribute ( "org.apache.myfaces.DYNAMICALLY_ADDED_FACES_SERVLET" , true ) ; servletContext . setAttribute ( "com.sun.faces.facesInitializerMappingsAdded" , true ) ; } return servletRegistration ; } } ; facesServletServletRegistrationBean . setName ( facesServletProperties . getName ( ) ) ; facesServletServletRegistrationBean . setUrlMappings ( facesServletProperties . getUrlMappings ( ) ) ; facesServletServletRegistrationBean . setLoadOnStartup ( facesServletProperties . getLoadOnStartup ( ) ) ; facesServletServletRegistrationBean . setEnabled ( facesServletProperties . isEnabled ( ) ) ; facesServletServletRegistrationBean . setAsyncSupported ( facesServletProperties . isAsyncSupported ( ) ) ; facesServletServletRegistrationBean . setOrder ( facesServletProperties . getOrder ( ) ) ; return facesServletServletRegistrationBean ;
public class TransliterationRule { /** * Internal method . Returns 8 - bit index value for this rule . * This is the low byte of the first character of the key , * unless the first character of the key is a set . If it ' s a * set , or otherwise can match multiple keys , the index value is - 1. */ final int getIndexValue ( ) { } }
if ( anteContextLength == pattern . length ( ) ) { // A pattern with just ante context { such as foo ) > bar } can // match any key . return - 1 ; } int c = UTF16 . charAt ( pattern , anteContextLength ) ; return data . lookupMatcher ( c ) == null ? ( c & 0xFF ) : - 1 ;
public class TraceSummary { /** * A list of resource ARNs for any resource corresponding to the trace segments . * @ param resourceARNs * A list of resource ARNs for any resource corresponding to the trace segments . */ public void setResourceARNs ( java . util . Collection < ResourceARNDetail > resourceARNs ) { } }
if ( resourceARNs == null ) { this . resourceARNs = null ; return ; } this . resourceARNs = new java . util . ArrayList < ResourceARNDetail > ( resourceARNs ) ;
public class FullBoltMetrics { /** * since we could not have default values for them */ public void initMultiCountMetrics ( PhysicalPlanHelper helper ) { } }
// For bolt , we would consider both input stream and output stream List < TopologyAPI . InputStream > inputs = helper . getMyBolt ( ) . getInputsList ( ) ; for ( TopologyAPI . InputStream inputStream : inputs ) { String streamId = inputStream . getStream ( ) . getId ( ) ; String globalStreamId = new StringBuilder ( inputStream . getStream ( ) . getComponentName ( ) ) . append ( "/" ) . append ( streamId ) . toString ( ) ; ackCount . scope ( streamId ) ; failCount . scope ( streamId ) ; executeCount . scope ( streamId ) ; executeTimeNs . scope ( streamId ) ; ackCount . scope ( globalStreamId ) ; failCount . scope ( globalStreamId ) ; executeCount . scope ( globalStreamId ) ; executeTimeNs . scope ( globalStreamId ) ; } List < TopologyAPI . OutputStream > outputs = helper . getMyBolt ( ) . getOutputsList ( ) ; for ( TopologyAPI . OutputStream outputStream : outputs ) { String streamId = outputStream . getStream ( ) . getId ( ) ; emitCount . scope ( streamId ) ; }
public class JournalSegment { /** * Completes a journal entry started by a write ( ) call . */ public boolean completeWrite ( ) { } }
int digest = ( int ) _crc . getValue ( ) ; byte [ ] headerBuffer = _headerBuffer ; BitsUtil . writeInt16 ( headerBuffer , 0 , 0 ) ; BitsUtil . writeInt ( headerBuffer , 2 , digest ) ; writeImpl ( headerBuffer , 0 , 6 ) ; return true ;
public class L1SegmentedDataContainer { /** * Removes all entries that map to the given segments * @ param segments the segments to clear data for */ @ Override public void clear ( IntSet segments ) { } }
IntSet extraSegments = null ; PrimitiveIterator . OfInt iter = segments . iterator ( ) ; // First try to just clear the respective maps while ( iter . hasNext ( ) ) { int segment = iter . nextInt ( ) ; ConcurrentMap < K , InternalCacheEntry < K , V > > map = maps . get ( segment ) ; if ( map != null ) { map . clear ( ) ; } else { // If we don ' t have a map for a segment we have to later go through the unowned segments and remove // those entries separately if ( extraSegments == null ) { extraSegments = IntSets . mutableEmptySet ( segments . size ( ) ) ; } extraSegments . set ( segment ) ; } } if ( extraSegments != null ) { IntSet finalExtraSegments = extraSegments ; nonOwnedEntries . keySet ( ) . removeIf ( k -> finalExtraSegments . contains ( getSegmentForKey ( k ) ) ) ; }
public class WindowedSequence { /** * Returns the window specified at the given index in offsets i . e . asking * for position 2 in a moving window sequence of size 3 will get you * the window starting at position 4. */ public SequenceView < C > get ( int index ) { } }
int start = toStartIndex ( index ) ; int end = index + ( getWindowSize ( ) - 1 ) ; return getBackingSequence ( ) . getSubSequence ( start , end ) ;
public class BrowserDialog { /** * This method initializes jPanelBottom * @ return javax . swing . JPanel */ private JPanel getJPanelBottom ( ) { } }
if ( jPanelBottom == null ) { GridBagConstraints gridBagConstraints2 = new GridBagConstraints ( ) ; gridBagConstraints2 . insets = new java . awt . Insets ( 5 , 3 , 5 , 5 ) ; gridBagConstraints2 . gridy = 0 ; gridBagConstraints2 . anchor = java . awt . GridBagConstraints . EAST ; gridBagConstraints2 . gridx = 2 ; GridBagConstraints gridBagConstraints3 = new GridBagConstraints ( ) ; gridBagConstraints3 . insets = new java . awt . Insets ( 5 , 3 , 5 , 2 ) ; gridBagConstraints3 . gridy = 0 ; gridBagConstraints3 . anchor = java . awt . GridBagConstraints . WEST ; gridBagConstraints3 . gridx = 3 ; GridBagConstraints gridBagConstraints1 = new GridBagConstraints ( ) ; gridBagConstraints1 . insets = new java . awt . Insets ( 5 , 3 , 5 , 2 ) ; gridBagConstraints1 . gridy = 0 ; gridBagConstraints1 . anchor = java . awt . GridBagConstraints . WEST ; gridBagConstraints1 . gridx = 1 ; GridBagConstraints gridBagConstraints = new GridBagConstraints ( ) ; gridBagConstraints . insets = new java . awt . Insets ( 10 , 5 , 10 , 2 ) ; gridBagConstraints . gridy = 0 ; gridBagConstraints . fill = java . awt . GridBagConstraints . HORIZONTAL ; gridBagConstraints . anchor = java . awt . GridBagConstraints . WEST ; gridBagConstraints . weighty = 1.0D ; gridBagConstraints . weightx = 1.0D ; gridBagConstraints . gridx = 0 ; jLabel = new JLabel ( ) ; jLabel . setText ( " " ) ; jLabel . setComponentOrientation ( java . awt . ComponentOrientation . UNKNOWN ) ; jPanelBottom = new JPanel ( ) ; jPanelBottom . setLayout ( new GridBagLayout ( ) ) ; jPanelBottom . add ( jLabel , gridBagConstraints ) ; jPanelBottom . add ( getBtnCapture ( ) , gridBagConstraints1 ) ; jPanelBottom . add ( getBtnStop ( ) , gridBagConstraints2 ) ; jPanelBottom . add ( getBtnClose ( ) , gridBagConstraints3 ) ; } return jPanelBottom ;
public class IndexElasticsearchUpdater { /** * Create a new index in Elasticsearch * @ param client Elasticsearch client * @ param index Index name * @ param settings Settings if any , null if no specific settings * @ throws Exception if the elasticsearch API call is failing */ @ Deprecated private static void createIndexWithSettingsInElasticsearch ( Client client , String index , String settings ) throws Exception { } }
logger . trace ( "createIndex([{}])" , index ) ; assert client != null ; assert index != null ; CreateIndexRequestBuilder cirb = client . admin ( ) . indices ( ) . prepareCreate ( index ) ; // If there are settings for this index , we use it . If not , using Elasticsearch defaults . if ( settings != null ) { logger . trace ( "Found settings for index [{}]: [{}]" , index , settings ) ; cirb . setSource ( settings , XContentType . JSON ) ; } CreateIndexResponse createIndexResponse = cirb . execute ( ) . actionGet ( ) ; if ( ! createIndexResponse . isAcknowledged ( ) ) { logger . warn ( "Could not create index [{}]" , index ) ; throw new Exception ( "Could not create index [" + index + "]." ) ; } logger . trace ( "/createIndex([{}])" , index ) ;
public class JGoogleAnalyticsTracker { /** * Define the proxy to use for all GA tracking requests . * Call this static method early ( before creating any tracking requests ) . * @ param proxyAddr " addr : port " of the proxy to use ; may also be given as URL ( " http : / / addr : port / " ) . */ public static void setProxy ( String proxyAddr ) { } }
if ( proxyAddr != null ) { Scanner s = new Scanner ( proxyAddr ) ; // Split into " proxyAddr : proxyPort " . proxyAddr = null ; int proxyPort = 8080 ; try { s . findInLine ( "(http://|)([^:/]+)(:|)([0-9]*)(/|)" ) ; MatchResult m = s . match ( ) ; if ( m . groupCount ( ) >= 2 ) { proxyAddr = m . group ( 2 ) ; } if ( ( m . groupCount ( ) >= 4 ) && ( ! m . group ( 4 ) . isEmpty ( ) ) ) { proxyPort = Integer . parseInt ( m . group ( 4 ) ) ; } } finally { s . close ( ) ; } if ( proxyAddr != null ) { SocketAddress sa = new InetSocketAddress ( proxyAddr , proxyPort ) ; setProxy ( new Proxy ( Type . HTTP , sa ) ) ; } }
public class NetworkWatchersInner { /** * Configures flow log on a specified resource . * @ param resourceGroupName The name of the network watcher resource group . * @ param networkWatcherName The name of the network watcher resource . * @ param parameters Parameters that define the configuration of flow log . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the FlowLogInformationInner object if successful . */ public FlowLogInformationInner beginSetFlowLogConfiguration ( String resourceGroupName , String networkWatcherName , FlowLogInformationInner parameters ) { } }
return beginSetFlowLogConfigurationWithServiceResponseAsync ( resourceGroupName , networkWatcherName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ;
public class Queue { /** * Touching a reserved message extends its timeout to the specified duration . * @ param id The ID of the message to delete . * @ param reservationId This id is returned when you reserve a message and must be provided to delete a message that is reserved . * @ param timeout After timeout ( in seconds ) , item will be placed back onto queue . * @ throws io . iron . ironmq . HTTPException If the IronMQ service returns a status other than 200 OK . * @ throws java . io . IOException If there is an error accessing the IronMQ server . */ public MessageOptions touchMessage ( String id , String reservationId , int timeout ) throws IOException { } }
return touchMessage ( id , reservationId , ( long ) timeout ) ;
public class CmsMessageBundleEditorModel { /** * Locks all files of the currently edited bundle ( that contain the provided key if it is not null ) . * @ param key the key that must be contained in the localization to lock . If null , the files for all localizations are locked . * @ throws CmsException thrown if locking fails . */ private void lockAllLocalizations ( String key ) throws CmsException { } }
for ( Locale l : m_bundleFiles . keySet ( ) ) { if ( ( null == l ) || m_localizations . get ( l ) . containsKey ( key ) ) { lockLocalization ( l ) ; } }
public class CmsContainerpageService { /** * Internal method for saving a group container . < p > * @ param cms the cms context * @ param pageStructureId the container page structure id * @ param groupContainer the group container to save * @ return the container element representing the group container * @ throws CmsException if something goes wrong * @ throws CmsXmlException if the XML processing goes wrong */ private CmsPair < CmsContainerElement , List < CmsRemovedElementStatus > > internalSaveGroupContainer ( CmsObject cms , CmsUUID pageStructureId , CmsGroupContainer groupContainer ) throws CmsException , CmsXmlException { } }
ensureSession ( ) ; CmsResource pageResource = getCmsObject ( ) . readResource ( pageStructureId , CmsResourceFilter . IGNORE_EXPIRATION ) ; CmsResource groupContainerResource = null ; if ( groupContainer . isNew ( ) ) { CmsADEConfigData config = getConfigData ( pageResource . getRootPath ( ) ) ; CmsResourceTypeConfig typeConfig = config . getResourceType ( CmsResourceTypeXmlContainerPage . GROUP_CONTAINER_TYPE_NAME ) ; groupContainerResource = typeConfig . createNewElement ( getCmsObject ( ) , pageResource . getRootPath ( ) ) ; String resourceName = cms . getSitePath ( groupContainerResource ) ; groupContainer . setSitePath ( resourceName ) ; groupContainer . setClientId ( groupContainerResource . getStructureId ( ) . toString ( ) ) ; } if ( groupContainerResource == null ) { CmsUUID id = convertToServerId ( groupContainer . getClientId ( ) ) ; groupContainerResource = cms . readResource ( id , CmsResourceFilter . ONLY_VISIBLE_NO_DELETED ) ; } CmsGroupContainerBean groupContainerBean = getGroupContainerBean ( groupContainer , pageResource , Locale . ENGLISH . toString ( ) ) ; cms . lockResourceTemporary ( groupContainerResource ) ; CmsFile groupContainerFile = cms . readFile ( groupContainerResource ) ; Locale locale = Locale . ENGLISH ; CmsXmlGroupContainer xmlGroupContainer = CmsXmlGroupContainerFactory . unmarshal ( cms , groupContainerFile ) ; Set < CmsUUID > oldElementIds = getGroupElementIds ( xmlGroupContainer , locale ) ; xmlGroupContainer . clearLocales ( ) ; xmlGroupContainer . save ( cms , groupContainerBean , locale ) ; cms . unlockResource ( groupContainerResource ) ; Set < CmsUUID > newElementIds = getGroupElementIds ( xmlGroupContainer , locale ) ; Set < CmsUUID > removedElementIds = Sets . difference ( oldElementIds , newElementIds ) ; List < CmsRemovedElementStatus > deletionCandidateStatuses = new ArrayList < CmsRemovedElementStatus > ( ) ; for ( CmsUUID removedId : removedElementIds ) { CmsRemovedElementStatus status = internalGetRemovedElementStatus ( removedId , null ) ; if ( status . isDeletionCandidate ( ) ) { deletionCandidateStatuses . add ( status ) ; } } CmsContainerElement element = new CmsContainerElement ( ) ; element . setClientId ( groupContainerFile . getStructureId ( ) . toString ( ) ) ; element . setSitePath ( cms . getSitePath ( groupContainerFile ) ) ; element . setResourceType ( CmsResourceTypeXmlContainerPage . GROUP_CONTAINER_TYPE_NAME ) ; return CmsPair . create ( element , deletionCandidateStatuses ) ;
public class BaseSession { /** * If this database is in my database list , return this object . * @ param database The database to lookup . * @ return this if successful . */ public DatabaseSession getDatabaseSession ( BaseDatabase database ) { } }
for ( int iFieldSeq = 0 ; iFieldSeq < this . getSessionObjectCount ( ) ; iFieldSeq ++ ) { // See if any of my children want to handle this command if ( this . getSessionObjectAt ( iFieldSeq ) . getDatabaseSession ( database ) != null ) return this . getSessionObjectAt ( iFieldSeq ) . getDatabaseSession ( database ) ; } return null ; // Not found
public class TFGraphMapper { /** * Strip the variable suffix to give the node name : " Unique : 1 " - > " Unique " */ public String varNameToOpName ( String varName ) { } }
int idx = varName . lastIndexOf ( ':' ) ; if ( idx < 0 ) return varName ; return varName . substring ( 0 , idx ) ;
public class AccountHeaderBuilder { /** * get the current selection * @ return */ protected int getCurrentSelection ( ) { } }
if ( mCurrentProfile != null && mProfiles != null ) { int i = 0 ; for ( IProfile profile : mProfiles ) { if ( profile == mCurrentProfile ) { return i ; } i ++ ; } } return - 1 ;
public class RealConnection { /** * Refuse incoming streams . */ @ Override public void onStream ( Http2Stream stream ) throws IOException { } }
stream . close ( ErrorCode . REFUSED_STREAM , null ) ;
public class HoconFactory { /** * / * Internal methods */ protected Reader _createReader ( InputStream in , JsonEncoding enc , IOContext ctxt ) throws IOException { } }
if ( enc == null ) { enc = JsonEncoding . UTF8 ; } // default to UTF - 8 if encoding missing if ( enc == JsonEncoding . UTF8 ) { boolean autoClose = ctxt . isResourceManaged ( ) || isEnabled ( JsonParser . Feature . AUTO_CLOSE_SOURCE ) ; return new UTF8Reader ( in , autoClose ) ; } return new InputStreamReader ( in , enc . getJavaName ( ) ) ;
public class Vector4f { /** * Set this { @ link Vector4f } to the values of the given < code > v < / code > . * @ param v * the vector whose values will be copied into this * @ return this */ public Vector4f set ( Vector4fc v ) { } }
if ( v instanceof Vector4f ) { MemUtil . INSTANCE . copy ( ( Vector4f ) v , this ) ; } else { this . x = v . x ( ) ; this . y = v . y ( ) ; this . z = v . z ( ) ; this . w = v . w ( ) ; } return this ;
public class CSL { /** * Loads a CSL style from the classpath . For example , if the given name * is < code > ieee < / code > this method will load the file < code > / ieee . csl < / code > * @ param styleName the style ' s name * @ return the serialized XML representation of the style * @ throws IOException if the style could not be loaded */ private String loadStyle ( String styleName ) throws IOException { } }
URL url ; if ( styleName . startsWith ( "http://" ) || styleName . startsWith ( "https://" ) ) { try { // try to load matching style from classpath return loadStyle ( styleName . substring ( styleName . lastIndexOf ( '/' ) + 1 ) ) ; } catch ( FileNotFoundException e ) { // there is no matching style in classpath url = new URL ( styleName ) ; } } else { // normalize file name if ( ! styleName . endsWith ( ".csl" ) ) { styleName = styleName + ".csl" ; } if ( ! styleName . startsWith ( "/" ) ) { styleName = "/" + styleName ; } // try to find style in classpath url = getClass ( ) . getResource ( styleName ) ; if ( url == null ) { throw new FileNotFoundException ( "Could not find style in " + "classpath: " + styleName ) ; } } // load style String result = CSLUtils . readURLToString ( url , "UTF-8" ) ; // handle dependent styles if ( isDependent ( result ) ) { String independentParentLink ; try { independentParentLink = getIndependentParentLink ( result ) ; } catch ( ParserConfigurationException | IOException | SAXException e ) { throw new IOException ( "Could not load independent parent style" , e ) ; } if ( independentParentLink == null ) { throw new IOException ( "Dependent style does not have an " + "independent parent" ) ; } return loadStyle ( independentParentLink ) ; } return result ;
public class Range { /** * @ param ranges in - out argument * TODO : * o expensive , the list is modified . . . * * states that touch each other are not merged */ public static void normalizeRanges ( List < Range > ranges ) { } }
int i , todo , max ; Range current , op , and ; todo = 0 ; while ( todo < ranges . size ( ) ) { // take the first range , and - it with all others and // append fractions to the end . current = ( Range ) ranges . get ( todo ) ; max = ranges . size ( ) ; // don ' t grow max inside the for - loop for ( i = todo + 1 ; i < max ; i ++ ) { op = ( Range ) ranges . get ( i ) ; and = current . and ( op ) ; if ( and != null ) { current . remove ( and , ranges ) ; op . remove ( and , ranges ) ; ranges . remove ( i ) ; i -- ; max -- ; current = and ; } } ranges . set ( todo , current ) ; todo ++ ; }
public class ProcessUtils { /** * Runs process . * @ param args List of process args . * @ return A ProcessResult data structure . * @ throws IOException If interrupted , we throw an IOException . If non - zero * exit code , we throw an IOException ( This may need to change ) . */ public static ProcessUtils . ProcessResult exec ( String [ ] args ) throws IOException { } }
Process p = Runtime . getRuntime ( ) . exec ( args ) ; ProcessUtils pu = new ProcessUtils ( ) ; // Gobble up any output . StreamGobbler err = pu . new StreamGobbler ( p . getErrorStream ( ) , "stderr" ) ; err . setDaemon ( true ) ; err . start ( ) ; StreamGobbler out = pu . new StreamGobbler ( p . getInputStream ( ) , "stdout" ) ; out . setDaemon ( true ) ; out . start ( ) ; int exitVal ; try { exitVal = p . waitFor ( ) ; } catch ( InterruptedException e ) { throw new IOException ( "Wait on process " + Arrays . toString ( args ) + " interrupted: " + e . getMessage ( ) ) ; } ProcessUtils . ProcessResult result = pu . new ProcessResult ( args , exitVal , out . getSink ( ) , err . getSink ( ) ) ; if ( exitVal != 0 ) { throw new IOException ( result . toString ( ) ) ; } else if ( LOGGER . isLoggable ( Level . INFO ) ) { LOGGER . info ( result . toString ( ) ) ; } return result ;
public class Qcow2OverlapChecks { @ Nonnull public static Qcow2OverlapChecks flags ( @ Nonnull Qcow2OverlapCheckFlags flags ) { } }
Qcow2OverlapChecks self = new Qcow2OverlapChecks ( ) ; self . flags = flags ; return self ;
public class AbstractFxmlView { /** * Load synchronously . * @ param resource * the resource * @ param bundle * the bundle * @ return the FXML loader * @ throws IllegalStateException * the illegal state exception */ private FXMLLoader loadSynchronously ( final URL resource , final Optional < ResourceBundle > bundle ) throws IllegalStateException { } }
final FXMLLoader loader = new FXMLLoader ( resource , bundle . orElse ( null ) ) ; loader . setControllerFactory ( this :: createControllerForType ) ; try { loader . load ( ) ; } catch ( final IOException | IllegalStateException e ) { throw new IllegalStateException ( "Cannot load " + getConventionalName ( ) , e ) ; } return loader ;
public class ParallaxImageView { /** * Sets the image view ' s translation coordinates . These values must be between - 1 and 1, * representing the transaction percentage from the center . * @ param x the horizontal translation * @ param y the vertical translation */ private void setTranslate ( float x , float y ) { } }
if ( Math . abs ( x ) > 1 || Math . abs ( y ) > 1 ) { throw new IllegalArgumentException ( "Parallax effect cannot translate more than 100% of its off-screen size" ) ; } float xScale , yScale ; if ( mScaledIntensities ) { // Set both scales to their offset values xScale = mXOffset ; yScale = mYOffset ; } else { // Set both scales to the max offset ( should be negative , so smaller absolute value ) xScale = Math . max ( mXOffset , mYOffset ) ; yScale = Math . max ( mXOffset , mYOffset ) ; } // Make sure below maximum jump limit if ( mMaximumJump > 0 ) { // Limit x jump if ( x - mXTranslation / xScale > mMaximumJump ) { x = mXTranslation / xScale + mMaximumJump ; } else if ( x - mXTranslation / xScale < - mMaximumJump ) { x = mXTranslation / xScale - mMaximumJump ; } // Limit y jump if ( y - mYTranslation / yScale > mMaximumJump ) { y = mYTranslation / yScale + mMaximumJump ; } else if ( y - mYTranslation / yScale < - mMaximumJump ) { y = mYTranslation / yScale - mMaximumJump ; } } mXTranslation = x * xScale ; mYTranslation = y * yScale ; configureMatrix ( ) ;
public class JsonSerDe { /** * Utility method to extract current expected field from given JsonParser * isTokenCurrent is a boolean variable also passed in , which determines * if the JsonParser is already at the token we expect to read next , or * needs advancing to the next before we read . */ private Object extractCurrentField ( JsonParser p , HCatFieldSchema hcatFieldSchema , boolean isTokenCurrent ) throws IOException { } }
Object val = null ; JsonToken valueToken ; if ( isTokenCurrent ) { valueToken = p . getCurrentToken ( ) ; } else { valueToken = p . nextToken ( ) ; } switch ( hcatFieldSchema . getType ( ) ) { case INT : val = ( valueToken == JsonToken . VALUE_NULL ) ? null : p . getIntValue ( ) ; break ; case TINYINT : val = ( valueToken == JsonToken . VALUE_NULL ) ? null : p . getByteValue ( ) ; break ; case SMALLINT : val = ( valueToken == JsonToken . VALUE_NULL ) ? null : p . getShortValue ( ) ; break ; case BIGINT : val = ( valueToken == JsonToken . VALUE_NULL ) ? null : p . getLongValue ( ) ; break ; case BOOLEAN : String bval = ( valueToken == JsonToken . VALUE_NULL ) ? null : p . getText ( ) ; if ( bval != null ) { val = Boolean . valueOf ( bval ) ; } else { val = null ; } break ; case FLOAT : val = ( valueToken == JsonToken . VALUE_NULL ) ? null : p . getFloatValue ( ) ; break ; case DOUBLE : val = ( valueToken == JsonToken . VALUE_NULL ) ? null : p . getDoubleValue ( ) ; break ; case STRING : val = ( valueToken == JsonToken . VALUE_NULL ) ? null : p . getText ( ) ; break ; case BINARY : throw new IOException ( "JsonSerDe does not support BINARY type" ) ; case DATE : val = ( valueToken == JsonToken . VALUE_NULL ) ? null : Date . valueOf ( p . getText ( ) ) ; break ; case TIMESTAMP : val = ( valueToken == JsonToken . VALUE_NULL ) ? null : tsParser . parseTimestamp ( p . getText ( ) ) ; break ; case DECIMAL : val = ( valueToken == JsonToken . VALUE_NULL ) ? null : HiveDecimal . create ( p . getText ( ) ) ; break ; case VARCHAR : int vLen = ( ( BaseCharTypeInfo ) hcatFieldSchema . getTypeInfo ( ) ) . getLength ( ) ; val = ( valueToken == JsonToken . VALUE_NULL ) ? null : new HiveVarchar ( p . getText ( ) , vLen ) ; break ; case CHAR : int cLen = ( ( BaseCharTypeInfo ) hcatFieldSchema . getTypeInfo ( ) ) . getLength ( ) ; val = ( valueToken == JsonToken . VALUE_NULL ) ? null : new HiveChar ( p . getText ( ) , cLen ) ; break ; case ARRAY : if ( valueToken == JsonToken . VALUE_NULL ) { val = null ; break ; } if ( valueToken != JsonToken . START_ARRAY ) { throw new IOException ( "Start of Array expected" ) ; } List < Object > arr = new ArrayList < Object > ( ) ; while ( ( valueToken = p . nextToken ( ) ) != JsonToken . END_ARRAY ) { arr . add ( extractCurrentField ( p , hcatFieldSchema . getArrayElementSchema ( ) . get ( 0 ) , true ) ) ; } val = arr ; break ; case MAP : if ( valueToken == JsonToken . VALUE_NULL ) { val = null ; break ; } if ( valueToken != JsonToken . START_OBJECT ) { throw new IOException ( "Start of Object expected" ) ; } Map < Object , Object > map = new LinkedHashMap < Object , Object > ( ) ; HCatFieldSchema valueSchema = hcatFieldSchema . getMapValueSchema ( ) . get ( 0 ) ; while ( ( valueToken = p . nextToken ( ) ) != JsonToken . END_OBJECT ) { Object k = getObjectOfCorrespondingPrimitiveType ( p . getCurrentName ( ) , hcatFieldSchema . getMapKeyTypeInfo ( ) ) ; Object v = extractCurrentField ( p , valueSchema , false ) ; map . put ( k , v ) ; } val = map ; break ; case STRUCT : if ( valueToken == JsonToken . VALUE_NULL ) { val = null ; break ; } if ( valueToken != JsonToken . START_OBJECT ) { throw new IOException ( "Start of Object expected" ) ; } HCatSchema subSchema = hcatFieldSchema . getStructSubSchema ( ) ; int sz = subSchema . getFieldNames ( ) . size ( ) ; List < Object > struct = new ArrayList < Object > ( Collections . nCopies ( sz , null ) ) ; while ( ( valueToken = p . nextToken ( ) ) != JsonToken . END_OBJECT ) { populateRecord ( struct , valueToken , p , subSchema ) ; } val = struct ; break ; default : LOG . error ( "Unknown type found: " + hcatFieldSchema . getType ( ) ) ; return null ; } return val ;
public class CRest { /** * < p > Build a < b > CRest < / b > instance that authenticate all request using OAuth . < / p > * @ param consumerKey consumer key to use * @ param consumerSecret consumer secret to use * @ param accessToken access token to use * @ param accessTokenSecret access token secret to use * @ param sessionHandle session handle to use to refresh an expired access token * @ param accessTokenRefreshUrl url to use to refresh an expired access token * @ return a < b > CRest < / b > instance * @ see org . codegist . crest . CRestBuilder # oauth ( String , String , String , String , String , String ) */ public static CRest getOAuthInstance ( String consumerKey , String consumerSecret , String accessToken , String accessTokenSecret , String sessionHandle , String accessTokenRefreshUrl ) { } }
return oauth ( consumerKey , consumerSecret , accessToken , accessTokenSecret , sessionHandle , accessTokenRefreshUrl ) . build ( ) ;
public class DetectorToDetector2Adapter { /** * ( non - Javadoc ) * @ see * edu . umd . cs . findbugs . Detector2 # visitClass ( edu . umd . cs . findbugs . classfile * . ClassDescriptor ) */ @ Override public void visitClass ( ClassDescriptor classDescriptor ) throws CheckedAnalysisException { } }
// Just get the ClassContext from the analysis cache // and apply the detector to it . IAnalysisCache analysisCache = Global . getAnalysisCache ( ) ; ClassContext classContext = analysisCache . getClassAnalysis ( ClassContext . class , classDescriptor ) ; Profiler profiler = analysisCache . getProfiler ( ) ; profiler . start ( detector . getClass ( ) ) ; try { detector . visitClassContext ( classContext ) ; } finally { profiler . end ( detector . getClass ( ) ) ; }
public class PixelMatrix { /** * * * * * * Resizing * * * * * */ private void resize ( ) { } }
width = getWidth ( ) - getInsets ( ) . getLeft ( ) - getInsets ( ) . getRight ( ) ; height = getHeight ( ) - getInsets ( ) . getTop ( ) - getInsets ( ) . getBottom ( ) ; pixelSize = ( width / cols ) < ( height / rows ) ? ( width / cols ) : ( height / rows ) ; pixelWidth = ( width / cols ) ; pixelHeight = ( height / rows ) ; spacer = useSpacer ? pixelSize * getSpacerSizeFactor ( ) : 0 ; pixelSizeMinusDoubleSpacer = pixelSize - spacer * 2 ; pixelWidthMinusDoubleSpacer = pixelWidth - spacer * 2 ; pixelHeightMinusDoubleSpacer = pixelHeight - spacer * 2 ; if ( width > 0 && height > 0 ) { if ( squarePixels ) { pixelWidth = pixelSize ; pixelHeight = pixelSize ; pixelWidthMinusDoubleSpacer = pixelSizeMinusDoubleSpacer ; pixelHeightMinusDoubleSpacer = pixelSizeMinusDoubleSpacer ; } canvas . setWidth ( cols * pixelWidth ) ; canvas . setHeight ( rows * pixelHeight ) ; canvas . relocate ( ( getWidth ( ) - ( cols * pixelWidth ) ) * 0.5 , ( getHeight ( ) - ( rows * pixelHeight ) ) * 0.5 ) ; drawMatrix ( ) ; }
public class BaseValidator { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public boolean validateUNKNSF ( UNKNSF unknsf , DiagnosticChain diagnostics , Map < Object , Object > context ) { } }
return validate_EveryDefaultConstraint ( unknsf , diagnostics , context ) ;
public class PagerIndicator { /** * since we used a adapter wrapper , so we can ' t getCount directly from wrapper . * @ return */ private int getShouldDrawCount ( ) { } }
if ( mPager . getAdapter ( ) instanceof InfinitePagerAdapter ) { return ( ( InfinitePagerAdapter ) mPager . getAdapter ( ) ) . getRealCount ( ) ; } else { return mPager . getAdapter ( ) . getCount ( ) ; }
public class UndertowReactiveWebServerFactory { /** * Set { @ link UndertowBuilderCustomizer } s that should be applied to the Undertow * { @ link io . undertow . Undertow . Builder Builder } . Calling this method will replace any * existing customizers . * @ param customizers the customizers to set */ public void setBuilderCustomizers ( Collection < ? extends UndertowBuilderCustomizer > customizers ) { } }
Assert . notNull ( customizers , "Customizers must not be null" ) ; this . builderCustomizers = new ArrayList < > ( customizers ) ;
public class PeepholeReplaceKnownMethods { /** * Try to evaluate parseInt , parseFloat : * parseInt ( " 1 " ) - > 1 * parseInt ( " 1 " , 10 ) - > 1 * parseFloat ( " 1.11 " ) - > 1.11 */ private Node tryFoldParseNumber ( Node n , String functionName , Node firstArg ) { } }
checkArgument ( n . isCall ( ) ) ; boolean isParseInt = functionName . equals ( "parseInt" ) ; Node secondArg = firstArg . getNext ( ) ; // Second argument is only used as the radix for parseInt int radix = 0 ; if ( secondArg != null ) { if ( ! isParseInt ) { return n ; } // Third - argument and non - numeric second arg are problematic . Discard . if ( secondArg . getNext ( ) != null || ! secondArg . isNumber ( ) ) { return n ; } else { double tmpRadix = secondArg . getDouble ( ) ; if ( tmpRadix != ( int ) tmpRadix ) { return n ; } radix = ( int ) tmpRadix ; if ( radix < 0 || radix == 1 || radix > 36 ) { return n ; } } } // stringVal must be a valid string . String stringVal = null ; Double checkVal ; if ( firstArg . isNumber ( ) ) { checkVal = NodeUtil . getNumberValue ( firstArg ) ; if ( ! ( radix == 0 || radix == 10 ) && isParseInt ) { // Convert a numeric first argument to a different base stringVal = String . valueOf ( checkVal . intValue ( ) ) ; } else { // If parseFloat is called with a numeric argument , // replace it with just the number . // If parseInt is called with a numeric first argument and the radix // is 10 or omitted , just replace it with the number Node numericNode ; if ( isParseInt ) { numericNode = IR . number ( checkVal . intValue ( ) ) ; } else { numericNode = IR . number ( checkVal ) ; } n . replaceWith ( numericNode ) ; reportChangeToEnclosingScope ( numericNode ) ; return numericNode ; } } else { stringVal = NodeUtil . getStringValue ( firstArg ) ; if ( stringVal == null ) { return n ; } // Check that the string is in a format we can recognize checkVal = NodeUtil . getStringNumberValue ( stringVal ) ; if ( checkVal == null ) { return n ; } stringVal = NodeUtil . trimJsWhiteSpace ( stringVal ) ; if ( stringVal . isEmpty ( ) ) { return n ; } } Node newNode ; if ( stringVal . equals ( "0" ) ) { // Special case for parseInt ( " 0 " ) or parseFloat ( " 0 " ) newNode = IR . number ( 0 ) ; } else if ( isParseInt ) { if ( radix == 0 || radix == 16 ) { if ( stringVal . length ( ) > 1 && stringVal . substring ( 0 , 2 ) . equalsIgnoreCase ( "0x" ) ) { radix = 16 ; stringVal = stringVal . substring ( 2 ) ; } else if ( radix == 0 ) { // if a radix is not specified or is 0 and the most // significant digit is " 0 " , the string will parse // with a radix of 8 on some browsers , so leave // this case alone . This check does not apply in // script mode ECMA5 or greater if ( ! isEcmaScript5OrGreater ( ) && stringVal . substring ( 0 , 1 ) . equals ( "0" ) ) { return n ; } radix = 10 ; } } int newVal = 0 ; try { newVal = Integer . parseInt ( stringVal , radix ) ; } catch ( NumberFormatException e ) { return n ; } newNode = IR . number ( newVal ) ; } else { String normalizedNewVal = "0" ; try { double newVal = Double . parseDouble ( stringVal ) ; newNode = IR . number ( newVal ) ; normalizedNewVal = normalizeNumericString ( String . valueOf ( newVal ) ) ; } catch ( NumberFormatException e ) { return n ; } // Make sure that the parsed number matches the original string // This prevents rounding differences between the Java implementation // and native script . if ( ! normalizeNumericString ( stringVal ) . equals ( normalizedNewVal ) ) { return n ; } } n . replaceWith ( newNode ) ; reportChangeToEnclosingScope ( newNode ) ; return newNode ;
public class OptionsCertificatePanel { /** * GEN - LAST : event _ showActiveCertificateButtonActionPerformed */ private void addPkcs11ButtonActionPerformed ( java . awt . event . ActionEvent evt ) { } }
// GEN - FIRST : event _ addPkcs11ButtonActionPerformed String name = null ; try { final int indexSelectedDriver = driverComboBox . getSelectedIndex ( ) ; name = driverConfig . getNames ( ) . get ( indexSelectedDriver ) ; if ( name . equals ( "" ) ) { return ; } String library = driverConfig . getPaths ( ) . get ( indexSelectedDriver ) ; if ( library . equals ( "" ) ) { return ; } int slot = driverConfig . getSlots ( ) . get ( indexSelectedDriver ) ; if ( slot < 0 ) { return ; } int slotListIndex = driverConfig . getSlotIndexes ( ) . get ( indexSelectedDriver ) ; if ( slotListIndex < 0 ) { return ; } String kspass = new String ( pkcs11PasswordField . getPassword ( ) ) ; if ( kspass . equals ( "" ) ) { kspass = null ; } PCKS11ConfigurationBuilder confBuilder = PKCS11Configuration . builder ( ) ; confBuilder . setName ( name ) . setLibrary ( library ) ; if ( usePkcs11ExperimentalSliSupportCheckBox . isSelected ( ) ) { confBuilder . setSlotListIndex ( slotListIndex ) ; } else { confBuilder . setSlotId ( slot ) ; } int ksIndex = contextManager . initPKCS11 ( confBuilder . build ( ) , kspass ) ; if ( ksIndex == - 1 ) { logger . error ( "The required PKCS#11 provider is not available (" + SSLContextManager . SUN_PKCS11_CANONICAL_CLASS_NAME + " or " + SSLContextManager . IBM_PKCS11_CONONICAL_CLASS_NAME + ")." ) ; showErrorMessageSunPkcs11ProviderNotAvailable ( ) ; return ; } // The PCKS11 driver / smartcard was initialized properly : reset login attempts login_attempts = 0 ; keyStoreListModel . insertElementAt ( contextManager . getKeyStoreDescription ( ksIndex ) , ksIndex ) ; // Issue 182 retry = true ; certificatejTabbedPane . setSelectedIndex ( 0 ) ; activateFirstOnlyAliasOfKeyStore ( ksIndex ) ; driverComboBox . setSelectedIndex ( - 1 ) ; pkcs11PasswordField . setText ( "" ) ; } catch ( InvocationTargetException e ) { if ( e . getCause ( ) instanceof ProviderException ) { if ( "Error parsing configuration" . equals ( e . getCause ( ) . getMessage ( ) ) ) { // There was a problem with the configuration provided : // - Missing library . // - Malformed configuration . logAndShowGenericErrorMessagePkcs11CouldNotBeAdded ( false , name , e ) ; } else if ( "Initialization failed" . equals ( e . getCause ( ) . getMessage ( ) ) ) { // The initialisation may fail because of : // - no smart card reader or smart card detected . // - smart card is in use by other application . // Issue 182 : Try to instantiate the PKCS11 provider twice if there are // conflicts with other software ( eg . Firefox ) , that is accessing it too . if ( retry ) { // Try two times only retry = false ; addPkcs11ButtonActionPerformed ( evt ) ; } else { JOptionPane . showMessageDialog ( null , new String [ ] { Constant . messages . getString ( "options.cert.error" ) , Constant . messages . getString ( "options.cert.error.pkcs11" ) } , Constant . messages . getString ( "options.cert.label.client.cert" ) , JOptionPane . ERROR_MESSAGE ) ; // Error message changed to explain that user should try to add it again . . . retry = true ; logger . warn ( "Couldn't add key from " + name , e ) ; } } else { logAndShowGenericErrorMessagePkcs11CouldNotBeAdded ( false , name , e ) ; } } else { logAndShowGenericErrorMessagePkcs11CouldNotBeAdded ( false , name , e ) ; } } catch ( java . io . IOException e ) { if ( e . getMessage ( ) . equals ( "load failed" ) && e . getCause ( ) . getClass ( ) . getName ( ) . equals ( "javax.security.auth.login.FailedLoginException" ) ) { // Exception due to a failed login attempt : BAD PIN or password login_attempts ++ ; String attempts = " (" + login_attempts + "/" + MAX_LOGIN_ATTEMPTS + ") " ; if ( login_attempts == ( MAX_LOGIN_ATTEMPTS - 1 ) ) { // Last attempt before blocking the smartcard JOptionPane . showMessageDialog ( null , new String [ ] { Constant . messages . getString ( "options.cert.error" ) , Constant . messages . getString ( "options.cert.error.wrongpassword" ) , Constant . messages . getString ( "options.cert.error.wrongpasswordlast" ) , attempts } , Constant . messages . getString ( "options.cert.label.client.cert" ) , JOptionPane . ERROR_MESSAGE ) ; logger . warn ( "PKCS#11: Incorrect PIN or password" + attempts + ": " + name + " *LAST TRY BEFORE BLOCKING*" ) ; } else { JOptionPane . showMessageDialog ( null , new String [ ] { Constant . messages . getString ( "options.cert.error" ) , Constant . messages . getString ( "options.cert.error.wrongpassword" ) , attempts } , Constant . messages . getString ( "options.cert.label.client.cert" ) , JOptionPane . ERROR_MESSAGE ) ; logger . warn ( "PKCS#11: Incorrect PIN or password" + attempts + ": " + name ) ; } } else { logAndShowGenericErrorMessagePkcs11CouldNotBeAdded ( false , name , e ) ; } } catch ( KeyStoreException e ) { logAndShowGenericErrorMessagePkcs11CouldNotBeAdded ( false , name , e ) ; } catch ( Exception e ) { logAndShowGenericErrorMessagePkcs11CouldNotBeAdded ( true , name , e ) ; }
public class ThriftClient { /** * ( non - Javadoc ) * @ see com . impetus . kundera . client . Client # findByRelation ( java . lang . String , * java . lang . Object , java . lang . Class ) */ @ Override public List < Object > findByRelation ( String colName , Object colValue , Class entityClazz ) { } }
EntityMetadata m = KunderaMetadataManager . getEntityMetadata ( kunderaMetadata , entityClazz ) ; List < Object > entities = null ; if ( isCql3Enabled ( m ) ) { entities = new ArrayList < Object > ( ) ; MetamodelImpl metaModel = ( MetamodelImpl ) kunderaMetadata . getApplicationMetadata ( ) . getMetamodel ( m . getPersistenceUnit ( ) ) ; EntityType entityType = metaModel . entity ( m . getEntityClazz ( ) ) ; List < AbstractManagedType > subManagedType = ( ( AbstractManagedType ) entityType ) . getSubManagedType ( ) ; if ( subManagedType . isEmpty ( ) ) { entities . addAll ( findByRelationQuery ( m , colName , colValue , entityClazz , dataHandler ) ) ; } else { for ( AbstractManagedType subEntity : subManagedType ) { EntityMetadata subEntityMetadata = KunderaMetadataManager . getEntityMetadata ( kunderaMetadata , subEntity . getJavaType ( ) ) ; entities . addAll ( findByRelationQuery ( subEntityMetadata , colName , colValue , subEntityMetadata . getEntityClazz ( ) , dataHandler ) ) ; // TODOO : : if ( entities ! = null ) } } } else { SlicePredicate slicePredicate = new SlicePredicate ( ) ; slicePredicate . setSlice_range ( new SliceRange ( ByteBufferUtil . EMPTY_BYTE_BUFFER , ByteBufferUtil . EMPTY_BYTE_BUFFER , false , Integer . MAX_VALUE ) ) ; IndexExpression ie = new IndexExpression ( UTF8Type . instance . decompose ( colName ) , IndexOperator . EQ , ByteBuffer . wrap ( PropertyAccessorHelper . getBytes ( colValue ) ) ) ; List < IndexExpression > expressions = new ArrayList < IndexExpression > ( ) ; expressions . add ( ie ) ; IndexClause ix = new IndexClause ( ) ; ix . setStart_key ( ByteBufferUtil . EMPTY_BYTE_BUFFER ) ; ix . setCount ( Integer . MAX_VALUE ) ; ix . setExpressions ( expressions ) ; ColumnParent columnParent = new ColumnParent ( m . getTableName ( ) ) ; List < KeySlice > keySlices = null ; Connection conn = null ; try { conn = getConnection ( ) ; if ( ! m . getType ( ) . equals ( Type . SUPER_COLUMN_FAMILY ) ) { keySlices = conn . getClient ( ) . get_indexed_slices ( columnParent , ix , slicePredicate , getConsistencyLevel ( ) ) ; } } catch ( InvalidRequestException e ) { if ( e . why != null && e . why . contains ( "No indexed columns" ) ) { return entities ; } else { log . error ( "Error while finding relations for column family {} , Caused by: ." , m . getTableName ( ) , e ) ; throw new KunderaException ( e ) ; } } catch ( UnavailableException e ) { log . error ( "Error while finding relations for column family {} , Caused by: ." , m . getTableName ( ) , e ) ; throw new KunderaException ( e ) ; } catch ( TimedOutException e ) { log . error ( "Error while finding relations for column family {} , Caused by: ." , m . getTableName ( ) , e ) ; throw new KunderaException ( e ) ; } catch ( TException e ) { log . error ( "Error while finding relations for column family {} , Caused by: ." , m . getTableName ( ) , e ) ; throw new KunderaException ( e ) ; } finally { releaseConnection ( conn ) ; } if ( keySlices != null ) { entities = new ArrayList < Object > ( keySlices . size ( ) ) ; MetamodelImpl metaModel = ( MetamodelImpl ) kunderaMetadata . getApplicationMetadata ( ) . getMetamodel ( m . getPersistenceUnit ( ) ) ; EntityType entityType = metaModel . entity ( m . getEntityClazz ( ) ) ; List < AbstractManagedType > subManagedType = ( ( AbstractManagedType ) entityType ) . getSubManagedType ( ) ; if ( subManagedType . isEmpty ( ) ) { entities = populateData ( m , keySlices , entities , m . getRelationNames ( ) != null , m . getRelationNames ( ) ) ; } else { for ( AbstractManagedType subEntity : subManagedType ) { EntityMetadata subEntityMetadata = KunderaMetadataManager . getEntityMetadata ( kunderaMetadata , subEntity . getJavaType ( ) ) ; entities = populateData ( subEntityMetadata , keySlices , entities , subEntityMetadata . getRelationNames ( ) != null , subEntityMetadata . getRelationNames ( ) ) ; // TODOO : : if ( entities ! = null ) } } } } return entities ;
public class CollectedStatistics { /** * Add a StopWatch to the statistics . * @ param sw StopWatch to add . */ public synchronized void add ( StopWatch sw ) { } }
double timeInMs = sw . getTimeMicros ( ) / MICROS_IN_MILLIS ; // let fake the array for ( int i = 0 ; i < sw . getCount ( ) ; i ++ ) m_times . add ( timeInMs / sw . getCount ( ) ) ; if ( timeInMs < m_min ) m_min = timeInMs ; if ( timeInMs > m_max ) m_max = timeInMs ;
public class ApolloCallTracker { /** * < p > Adds provided { @ link ApolloQueryWatcher } that is currently in progress . < / p > * < p > < b > Note < / b > : This method needs to be called right before * { @ link ApolloQueryWatcher # enqueueAndWatch ( ApolloCall . Callback ) } . < / p > */ void registerQueryWatcher ( @ NotNull ApolloQueryWatcher queryWatcher ) { } }
checkNotNull ( queryWatcher , "queryWatcher == null" ) ; OperationName operationName = queryWatcher . operation ( ) . name ( ) ; registerCall ( activeQueryWatchers , operationName , queryWatcher ) ;
public class SeleniumSpec { /** * Switches to a frame / iframe . */ @ Given ( "^I switch to the iframe on index '(\\d+?)'$" ) public void seleniumSwitchFrame ( Integer index ) { } }
assertThat ( commonspec . getPreviousWebElements ( ) ) . as ( "There are less found elements than required" ) . hasAtLeast ( index ) ; WebElement elem = commonspec . getPreviousWebElements ( ) . getPreviousWebElements ( ) . get ( index ) ; commonspec . getDriver ( ) . switchTo ( ) . frame ( elem ) ;
public class JsiiEngine { /** * Given a java class that extends a Jsii proxy , loads the corresponding jsii module * and returns the FQN of the jsii type . * @ param nativeClass The java class . * @ return The FQN . */ String loadModuleForClass ( Class < ? > nativeClass ) { } }
final Jsii jsii = tryGetJsiiAnnotation ( nativeClass , true ) ; if ( jsii == null ) { throw new JsiiException ( "Unable to find @Jsii annotation for class" ) ; } this . loadModule ( jsii . module ( ) ) ; return jsii . fqn ( ) ;
public class Job { /** * This is the non - type - safe version of the { @ code futureCall ( ) } family of * methods . Normally a user will not need to invoke this method directly . * Instead , one of the type - safe methods such as * { @ link # futureCall ( Job2 , Value , Value , JobSetting . . . ) } should be used . The * only reason a user should invoke this method directly is if * { @ code jobInstance } is a direct subclass of { @ code Job } instead of being a * subclass of one of the { @ code Jobn } classes and the { @ code run ( ) } method of * { @ code jobInstance } takes more arguments than the greatest { @ code n } such * that the framework offers a { @ code Jobn } class . * @ param < T > The return type of the child job being specified * @ param settings * @ param jobInstance The user - written job object * @ param params The parameters to be passed to the { @ code run } method of the * job * @ return a { @ code FutureValue } representing an empty value slot that will be * filled by the output of { @ code jobInstance } when it finalizes . This * may be passed in to further invocations of { @ code futureCall ( ) } in * order to specify a data dependency . */ public < T > FutureValue < T > futureCallUnchecked ( JobSetting [ ] settings , Job < ? > jobInstance , Object ... params ) { } }
JobRecord childJobRecord = PipelineManager . registerNewJobRecord ( updateSpec , settings , thisJobRecord , currentRunGUID , jobInstance , params ) ; thisJobRecord . appendChildKey ( childJobRecord . getKey ( ) ) ; return new FutureValueImpl < > ( childJobRecord . getOutputSlotInflated ( ) ) ;
public class PhiAccrualFailureDetector { /** * bootstrap with 2 entries with rather high standard deviation */ @ SuppressWarnings ( "checkstyle:magicnumber" ) private void firstHeartbeat ( long firstHeartbeatEstimateMillis ) { } }
long stdDeviationMillis = firstHeartbeatEstimateMillis / 4 ; heartbeatHistory . add ( firstHeartbeatEstimateMillis - stdDeviationMillis ) ; heartbeatHistory . add ( firstHeartbeatEstimateMillis + stdDeviationMillis ) ;
public class diff_match_patch { /** * Compute and return the score for a match with e errors and x location . * @ param e * Number of errors in match . * @ param x * Location of match . * @ param loc * Expected location of match . * @ param pattern * Pattern being sought . * @ return Overall score for match ( 0.0 = good , 1.0 = bad ) . */ @ SuppressWarnings ( "unused" ) private static double match_bitapScore ( int e , int x , int loc , String pattern ) { } }
float accuracy = ( float ) e / pattern . length ( ) ; int proximity = Math . abs ( loc - x ) ; if ( Match_Distance == 0 ) { // Dodge divide by zero error . return proximity == 0 ? accuracy : 1.0 ; } return accuracy + ( proximity / ( float ) Match_Distance ) ;
public class ClassPathBinaryResourceGenerator { /** * ( non - Javadoc ) * @ see net . jawr . web . resource . bundle . generator . StreamResourceGenerator # * createResourceAsStream * ( net . jawr . web . resource . bundle . generator . GeneratorContext ) */ @ Override public InputStream createResourceAsStream ( GeneratorContext context ) { } }
InputStream is = null ; if ( FileNameUtils . hasImageExtension ( context . getPath ( ) ) ) { is = helper . createStreamResource ( context ) ; } return is ;
public class OCommandExecutorSQLAlterClass { /** * Execute the ALTER CLASS . */ public Object execute ( final Map < Object , Object > iArgs ) { } }
if ( attribute == null ) throw new OCommandExecutionException ( "Cannot execute the command because it has not been parsed yet" ) ; final OClassImpl cls = ( OClassImpl ) getDatabase ( ) . getMetadata ( ) . getSchema ( ) . getClass ( className ) ; if ( cls == null ) throw new OCommandExecutionException ( "Source class '" + className + "' not found" ) ; cls . setInternalAndSave ( attribute , value ) ; renameCluster ( ) ; return null ;
public class JdbcPublisher { /** * 1 . Truncate destination table if requested * 2 . Move data from staging to destination * 3 . Update Workunit state * TODO : Research on running this in parallel . While testing publishing it in parallel , it turns out delete all from the table locks the table * so that copying table threads wait until transaction lock times out and throwing exception ( MySQL ) . Is there a way to avoid this ? * { @ inheritDoc } * @ see org . apache . gobblin . publisher . DataPublisher # publishData ( java . util . Collection ) */ @ Override public void publishData ( Collection < ? extends WorkUnitState > states ) throws IOException { } }
LOG . info ( "Start publishing data" ) ; int branches = this . state . getPropAsInt ( ConfigurationKeys . FORK_BRANCHES_KEY , 1 ) ; Set < String > emptiedDestTables = Sets . newHashSet ( ) ; final Connection conn = createConnection ( ) ; final JdbcWriterCommands commands = this . jdbcWriterCommandsFactory . newInstance ( this . state , conn ) ; try { conn . setAutoCommit ( false ) ; for ( int i = 0 ; i < branches ; i ++ ) { final String destinationTable = this . state . getProp ( ForkOperatorUtils . getPropertyNameForBranch ( JDBC_PUBLISHER_FINAL_TABLE_NAME , branches , i ) ) ; final String databaseName = this . state . getProp ( ForkOperatorUtils . getPropertyNameForBranch ( JDBC_PUBLISHER_DATABASE_NAME , branches , i ) ) ; Preconditions . checkNotNull ( destinationTable ) ; if ( this . state . getPropAsBoolean ( ForkOperatorUtils . getPropertyNameForBranch ( JDBC_PUBLISHER_REPLACE_FINAL_TABLE , branches , i ) , false ) && ! emptiedDestTables . contains ( destinationTable ) ) { LOG . info ( "Deleting table " + destinationTable ) ; commands . deleteAll ( databaseName , destinationTable ) ; emptiedDestTables . add ( destinationTable ) ; } Map < String , List < WorkUnitState > > stagingTables = getStagingTables ( states , branches , i ) ; for ( Map . Entry < String , List < WorkUnitState > > entry : stagingTables . entrySet ( ) ) { String stagingTable = entry . getKey ( ) ; LOG . info ( "Copying data from staging table " + stagingTable + " into destination table " + destinationTable ) ; commands . copyTable ( databaseName , stagingTable , destinationTable ) ; for ( WorkUnitState workUnitState : entry . getValue ( ) ) { workUnitState . setWorkingState ( WorkUnitState . WorkingState . COMMITTED ) ; } } } LOG . info ( "Commit publish data" ) ; conn . commit ( ) ; } catch ( Exception e ) { try { LOG . error ( "Failed publishing. Rolling back." ) ; conn . rollback ( ) ; } catch ( SQLException se ) { LOG . error ( "Failed rolling back." , se ) ; } throw new RuntimeException ( "Failed publishing" , e ) ; } finally { try { conn . close ( ) ; } catch ( SQLException e ) { throw new RuntimeException ( e ) ; } }
public class WaitHttpConditionBuilder { /** * Sets the Http status code to check . * @ param status * @ return */ public WaitHttpConditionBuilder status ( HttpStatus status ) { } }
getCondition ( ) . setHttpResponseCode ( String . valueOf ( status . value ( ) ) ) ; return this ;
public class SVGAndroidRenderer { /** * This was one of the ambiguous markers . Try to see if we can find a better direction for * it , now that we have more info available on the neighbouring marker positions . */ private MarkerVector realignMarkerMid ( MarkerVector lastPos , MarkerVector thisPos , MarkerVector nextPos ) { } }
// Check the temporary marker vector against the incoming vector float dot = dotProduct ( thisPos . dx , thisPos . dy , ( thisPos . x - lastPos . x ) , ( thisPos . y - lastPos . y ) ) ; if ( dot == 0f ) { // Those two were perpendicular , so instead try the outgoing vector dot = dotProduct ( thisPos . dx , thisPos . dy , ( nextPos . x - thisPos . x ) , ( nextPos . y - thisPos . y ) ) ; } if ( dot > 0 ) return thisPos ; if ( dot == 0f ) { // If that was perpendicular also , then give up . // Else use the one that points in the same direction as 0deg ( 1,0 ) or has non - negative y . if ( thisPos . dx > 0f || thisPos . dy >= 0 ) return thisPos ; } // Reverse this vector and point the marker in the opposite direction . thisPos . dx = - thisPos . dx ; thisPos . dy = - thisPos . dy ; return thisPos ;
public class CmsContainerElementBean { /** * Creates an element bean for the given resource type . < p > * < b > The represented resource will be in memory only and not in the VFS ! ! ! . < / b > < p > * @ param cms the CMS context * @ param resourceType the resource type * @ param targetFolder the parent folder of the resource * @ param individualSettings the element settings as a map of name / value pairs * @ param isCopyModels if this element when used in models should be copied instead of reused * @ param locale the locale to use * @ return the created element bean * @ throws CmsException if something goes wrong creating the element * @ throws IllegalArgumentException if the resource type not instance of { @ link org . opencms . file . types . CmsResourceTypeXmlContent } */ public static CmsContainerElementBean createElementForResourceType ( CmsObject cms , I_CmsResourceType resourceType , String targetFolder , Map < String , String > individualSettings , boolean isCopyModels , Locale locale ) throws CmsException { } }
if ( ! ( resourceType instanceof CmsResourceTypeXmlContent ) ) { throw new IllegalArgumentException ( ) ; } byte [ ] content = new byte [ 0 ] ; String schema = ( ( CmsResourceTypeXmlContent ) resourceType ) . getSchema ( ) ; if ( schema != null ) { // must set URI of OpenCms user context to parent folder of created resource , // in order to allow reading of properties for default values CmsObject newCms = OpenCms . initCmsObject ( cms ) ; newCms . getRequestContext ( ) . setUri ( targetFolder ) ; // unmarshal the content definition for the new resource CmsXmlContentDefinition contentDefinition = CmsXmlContentDefinition . unmarshal ( cms , schema ) ; CmsXmlContent xmlContent = CmsXmlContentFactory . createDocument ( newCms , locale , OpenCms . getSystemInfo ( ) . getDefaultEncoding ( ) , contentDefinition ) ; // adding all other available locales for ( Locale otherLocale : OpenCms . getLocaleManager ( ) . getAvailableLocales ( ) ) { if ( ! locale . equals ( otherLocale ) ) { xmlContent . addLocale ( newCms , otherLocale ) ; } } content = xmlContent . marshal ( ) ; } @ SuppressWarnings ( "deprecation" ) CmsFile file = new CmsFile ( CmsUUID . getNullUUID ( ) , CmsUUID . getNullUUID ( ) , targetFolder + "~" , resourceType . getTypeId ( ) , 0 , cms . getRequestContext ( ) . getCurrentProject ( ) . getUuid ( ) , CmsResource . STATE_NEW , 0 , cms . getRequestContext ( ) . getCurrentUser ( ) . getId ( ) , 0 , cms . getRequestContext ( ) . getCurrentUser ( ) . getId ( ) , CmsResource . DATE_RELEASED_DEFAULT , CmsResource . DATE_EXPIRED_DEFAULT , 1 , content . length , 0 , 0 , content ) ; CmsContainerElementBean elementBean = new CmsContainerElementBean ( file , null , individualSettings , true , resourceType . getTypeName ( ) + getSettingsHash ( individualSettings , isCopyModels ) , isCopyModels ) ; return elementBean ;
public class AmazonElastiCacheClient { /** * Returns a list of cache security group descriptions . If a cache security group name is specified , the list * contains only the description of that group . * @ param describeCacheSecurityGroupsRequest * Represents the input of a < code > DescribeCacheSecurityGroups < / code > operation . * @ return Result of the DescribeCacheSecurityGroups operation returned by the service . * @ throws CacheSecurityGroupNotFoundException * The requested cache security group name does not refer to an existing cache security group . * @ throws InvalidParameterValueException * The value for a parameter is invalid . * @ throws InvalidParameterCombinationException * Two or more incompatible parameters were specified . * @ sample AmazonElastiCache . DescribeCacheSecurityGroups * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / elasticache - 2015-02-02 / DescribeCacheSecurityGroups " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DescribeCacheSecurityGroupsResult describeCacheSecurityGroups ( DescribeCacheSecurityGroupsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeCacheSecurityGroups ( request ) ;
public class JarClassLoader { /** * 加载Jar文件 , 或者加载目录 * @ param jarFileOrDir jar文件或者jar文件所在目录 * @ return this */ public JarClassLoader addJar ( File jarFileOrDir ) { } }
if ( isJarFile ( jarFileOrDir ) ) { return addURL ( jarFileOrDir ) ; } final List < File > jars = loopJar ( jarFileOrDir ) ; for ( File jar : jars ) { addURL ( jar ) ; } return this ;
public class AbstractCompositeHandler { /** * { @ inheritDoc } */ public QueryResult whenSQLQuery ( final String sql , final List < Parameter > parameters ) throws SQLException { } }
if ( this . queryHandler == null ) { throw new SQLException ( "No query handler" ) ; } // end of if return this . queryHandler . apply ( sql , parameters ) ;
public class CoordinationElement { /** * setter for cat - sets * @ generated * @ param v value to set into the feature */ public void setCat ( String v ) { } }
if ( CoordinationElement_Type . featOkTst && ( ( CoordinationElement_Type ) jcasType ) . casFeat_cat == null ) jcasType . jcas . throwFeatMissing ( "cat" , "de.julielab.jules.types.CoordinationElement" ) ; jcasType . ll_cas . ll_setStringValue ( addr , ( ( CoordinationElement_Type ) jcasType ) . casFeatCode_cat , v ) ;
public class JvmParameterizedTypeReferenceImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case TypesPackage . JVM_PARAMETERIZED_TYPE_REFERENCE__ARGUMENTS : return getArguments ( ) ; case TypesPackage . JVM_PARAMETERIZED_TYPE_REFERENCE__TYPE : if ( resolve ) return getType ( ) ; return basicGetType ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
public class PageObjectUtil { /** * 从对象中取参数 * @ param paramsObject * @ param paramName * @ param required * @ return */ protected static Object getParamValue ( MetaObject paramsObject , String paramName , boolean required ) { } }
Object value = null ; if ( paramsObject . hasGetter ( PARAMS . get ( paramName ) ) ) { value = paramsObject . getValue ( PARAMS . get ( paramName ) ) ; } if ( value != null && value . getClass ( ) . isArray ( ) ) { Object [ ] values = ( Object [ ] ) value ; if ( values . length == 0 ) { value = null ; } else { value = values [ 0 ] ; } } if ( required && value == null ) { throw new PageException ( "分页查询缺少必要的参数:" + PARAMS . get ( paramName ) ) ; } return value ;
public class SpecificationMethodAdapter { /** * Marks the end of a busy section . */ @ Requires ( { } }
"contextLocal >= 0" , "skip != null" } ) protected void leaveBusySection ( Label skip ) { loadLocal ( contextLocal ) ; invokeVirtual ( CONTRACT_CONTEXT_TYPE , LEAVE_CONTRACT_METHOD ) ; mark ( skip ) ;