signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class JdbcNeo4JNativeExtractor { /** * { @ inheritDoc } */
@ Override public Partition [ ] getPartitions ( S config ) { } } | JdbcNeo4JDeepJobConfig neo4jConfig = ( JdbcNeo4JDeepJobConfig ) config ; JdbcPartition partition = new JdbcPartition ( 0 , neo4jConfig . getLowerBound ( ) , neo4jConfig . getUpperBound ( ) ) ; Partition [ ] result = new Partition [ 1 ] ; result [ 0 ] = partition ; return result ; |
public class Member { /** * Issue a invoke request on behalf of this member
* @ param invokeRequest { @ link InvokeRequest }
* @ throws ChainCodeException if the chain code invocation fails */
public ChainCodeResponse invoke ( InvokeRequest invokeRequest ) throws ChainCodeException , NoAvailableTCertException , CryptoException , IOException { } } | logger . debug ( "Member.invoke" ) ; if ( getChain ( ) . getPeers ( ) . isEmpty ( ) ) { throw new NoValidPeerException ( String . format ( "chain %s has no peers" , getChain ( ) . getName ( ) ) ) ; } TransactionContext tcxt = this . newTransactionContext ( null ) ; return tcxt . invoke ( invokeRequest ) ; |
public class CharacterApi { /** * Calculate a CSPA charge cost Takes a source character ID in the url and a
* set of target character ID & # 39 ; s in the body , returns a CSPA charge cost
* - - - SSO Scope : esi - characters . read _ contacts . v1
* @ param characterId
* An EVE character ID ( required )
* @ param requestBody
* The target characters to calculate the charge for ( required )
* @ param datasource
* The server name you would like data from ( optional , default to
* tranquility )
* @ param token
* Access token to use if unable to set a header ( optional )
* @ return ApiResponse & lt ; Float & gt ;
* @ throws ApiException
* If fail to call the API , e . g . server error or cannot
* deserialize the response body */
public ApiResponse < Float > postCharactersCharacterIdCspaWithHttpInfo ( Integer characterId , List < Integer > requestBody , String datasource , String token ) throws ApiException { } } | com . squareup . okhttp . Call call = postCharactersCharacterIdCspaValidateBeforeCall ( characterId , requestBody , datasource , token , null ) ; Type localVarReturnType = new TypeToken < Float > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ; |
public class EntityBodyConverter { /** * ( non - Javadoc )
* @ see com . netflix . discovery . provider . ISerializer # write ( java . lang . Object ,
* java . io . OutputStream , javax . ws . rs . core . MediaType ) */
public void write ( Object object , OutputStream os , MediaType mediaType ) throws IOException { } } | XStream xstream = getXStreamInstance ( mediaType ) ; if ( xstream != null ) { xstream . toXML ( object , os ) ; } else { throw new IllegalArgumentException ( "Content-type: " + mediaType . getType ( ) + " is currently not supported for " + object . getClass ( ) . getName ( ) ) ; } |
public class Evaluator { /** * Checks the token stream for context references and if there are missing references - substitutes available
* references and returns a partially evaluated expression .
* @ param tokens the token stream ( all tokens fetched )
* @ param context the evaluation context
* @ return the partially evaluated expression or null if expression can be fully evaluated */
protected String resolveAvailable ( CommonTokenStream tokens , EvaluationContext context ) { } } | boolean hasMissing = false ; List < Object > outputComponents = new ArrayList < > ( ) ; for ( int t = 0 ; t < tokens . size ( ) - 1 ; t ++ ) { // we can ignore the final EOF token
Token token = tokens . get ( t ) ; Token nextToken = tokens . get ( t + 1 ) ; // if token is a NAME not followed by ( then it ' s a context reference
if ( token . getType ( ) == ExcellentParser . NAME && nextToken . getType ( ) != ExcellentParser . LPAREN ) { try { outputComponents . add ( context . resolveVariable ( token . getText ( ) ) ) ; } catch ( EvaluationError ex ) { hasMissing = true ; outputComponents . add ( token ) ; } } else { outputComponents . add ( token ) ; } } // if we don ' t have missing context references , perform evaluation as normal
if ( ! hasMissing ) { return null ; } // re - combine the tokens and context values back into an expression
StringBuilder output = new StringBuilder ( String . valueOf ( m_expressionPrefix ) ) ; for ( Object outputComponent : outputComponents ) { String compVal ; if ( outputComponent instanceof Token ) { compVal = ( ( Token ) outputComponent ) . getText ( ) ; } else { compVal = Conversions . toRepr ( outputComponent , context ) ; } output . append ( compVal ) ; } return output . toString ( ) ; |
public class TvdbParser { /** * Gets the languages .
* @ param urlString the url string
* @ return the languages
* @ throws TvDbException the tv db exception */
public static List < Language > getLanguages ( String urlString ) throws TvDbException { } } | List < Language > languages = new ArrayList < > ( ) ; NodeList nlLanguages ; Node nLanguage ; Element eLanguage ; Document doc = DOMHelper . getEventDocFromUrl ( urlString ) ; if ( doc != null ) { nlLanguages = doc . getElementsByTagName ( LANGUAGE ) ; for ( int loop = 0 ; loop < nlLanguages . getLength ( ) ; loop ++ ) { nLanguage = nlLanguages . item ( loop ) ; if ( nLanguage . getNodeType ( ) == Node . ELEMENT_NODE ) { eLanguage = ( Element ) nLanguage ; languages . add ( parseNextLanguage ( eLanguage ) ) ; } } } return languages ; |
public class BinaryJedis { /** * Add the string value to the head ( LPUSH ) or tail ( RPUSH ) of the list stored at key . If the key
* does not exist an empty list is created just before the append operation . If the key exists but
* is not a List an error is returned .
* Time complexity : O ( 1)
* @ see BinaryJedis # rpush ( byte [ ] , byte [ ] . . . )
* @ param key
* @ param strings
* @ return Integer reply , specifically , the number of elements inside the list after the push
* operation . */
@ Override public Long rpush ( final byte [ ] key , final byte [ ] ... strings ) { } } | checkIsInMultiOrPipeline ( ) ; client . rpush ( key , strings ) ; return client . getIntegerReply ( ) ; |
public class JsonParser { /** * Advances a character , throwing if it is illegal in the context of a JSON string . */
private char stringChar ( ) throws JsonParserException { } } | int c = advanceChar ( ) ; if ( c == - 1 ) throw createParseException ( null , "String was not terminated before end of input" , true ) ; if ( c < 32 ) throw createParseException ( null , "Strings may not contain control characters: 0x" + Integer . toString ( c , 16 ) , false ) ; return ( char ) c ; |
public class MediaInfo { /** * Dump out the media information in a readable format .
* This is easier to read than a standard { @ link # toString ( ) } .
* This should only be used for debugging purposes .
* @ param writer writer to dump the media information to */
public void dump ( Writer writer ) { } } | PrintWriter printer = new PrintWriter ( writer ) ; for ( String sectionType : sectionsByType . keySet ( ) ) { int sectionNumber = 0 ; for ( Section section : sectionsByType . get ( sectionType ) ) { printer . printf ( "%s [%d]%n" , sectionType , sectionNumber ++ ) ; for ( String key : section ) { printer . printf ( " %30s -> %s%n" , key , section . value ( key ) ) ; } } printer . println ( ) ; } try { writer . flush ( ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } |
public class PolicyStatesInner { /** * Summarizes policy states for the subscription level policy assignment .
* @ param subscriptionId Microsoft Azure subscription ID .
* @ param policyAssignmentName Policy assignment name .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the SummarizeResultsInner object */
public Observable < SummarizeResultsInner > summarizeForSubscriptionLevelPolicyAssignmentAsync ( String subscriptionId , String policyAssignmentName ) { } } | return summarizeForSubscriptionLevelPolicyAssignmentWithServiceResponseAsync ( subscriptionId , policyAssignmentName ) . map ( new Func1 < ServiceResponse < SummarizeResultsInner > , SummarizeResultsInner > ( ) { @ Override public SummarizeResultsInner call ( ServiceResponse < SummarizeResultsInner > response ) { return response . body ( ) ; } } ) ; |
public class BeanDataSetProviderCDI { /** * Listen to changes on the data set definition registry */
protected void onDataSetStaleEvent ( @ Observes DataSetStaleEvent event ) { } } | DataSetDef def = event . getDataSetDef ( ) ; if ( DataSetProviderType . BEAN . equals ( def . getProvider ( ) ) ) { staticDataSetProvider . removeDataSet ( def . getUUID ( ) ) ; } |
public class SqlQueryImpl { /** * { @ inheritDoc }
* @ see jp . co . future . uroborosql . fluent . SqlQuery # collect ( jp . co . future . uroborosql . utils . CaseFormat ) */
@ Override public List < Map < String , Object > > collect ( final CaseFormat caseFormat ) { } } | try { return agent ( ) . query ( context ( ) , caseFormat ) ; } catch ( SQLException e ) { throw new UroborosqlSQLException ( e ) ; } |
public class DocumentLine { /** * indexed getter for beginnings - gets an indexed value -
* @ generated
* @ param i index in the array to get
* @ return value of the element at index i */
public float getBeginnings ( int i ) { } } | if ( DocumentLine_Type . featOkTst && ( ( DocumentLine_Type ) jcasType ) . casFeat_beginnings == null ) jcasType . jcas . throwFeatMissing ( "beginnings" , "ch.epfl.bbp.uima.types.DocumentLine" ) ; jcasType . jcas . checkArrayBounds ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( DocumentLine_Type ) jcasType ) . casFeatCode_beginnings ) , i ) ; return jcasType . ll_cas . ll_getFloatArrayValue ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( DocumentLine_Type ) jcasType ) . casFeatCode_beginnings ) , i ) ; |
public class Partition { /** * Determines whether the two items are in the same component or not */
public boolean sameComponent ( V a , V b ) { } } | return componentOf ( a ) . equals ( componentOf ( b ) ) ; |
public class IterUtil { /** * 获得 { @ link Iterator } 对象的元素类型 ( 通过第一个非空元素判断 ) < br >
* 注意 , 此方法至少会调用多次next方法
* @ param iterator { @ link Iterator }
* @ return 元素类型 , 当列表为空或元素全部为null时 , 返回null */
public static Class < ? > getElementType ( Iterator < ? > iterator ) { } } | final Iterator < ? > iter2 = new CopiedIter < > ( iterator ) ; if ( null != iter2 ) { Object t ; while ( iter2 . hasNext ( ) ) { t = iter2 . next ( ) ; if ( null != t ) { return t . getClass ( ) ; } } } return null ; |
public class OverviewPlot { /** * Initialize the SVG plot . */
private void initializePlot ( ) { } } | plot = new VisualizationPlot ( ) ; { // Add a background element :
CSSClass cls = new CSSClass ( this , "background" ) ; final String bgcol = context . getStyleLibrary ( ) . getBackgroundColor ( StyleLibrary . PAGE ) ; cls . setStatement ( SVGConstants . CSS_FILL_PROPERTY , bgcol ) ; plot . addCSSClassOrLogError ( cls ) ; Element background = plot . svgElement ( SVGConstants . SVG_RECT_TAG ) ; background . setAttribute ( SVGConstants . SVG_X_ATTRIBUTE , "0" ) ; background . setAttribute ( SVGConstants . SVG_Y_ATTRIBUTE , "0" ) ; background . setAttribute ( SVGConstants . SVG_WIDTH_ATTRIBUTE , "100%" ) ; background . setAttribute ( SVGConstants . SVG_HEIGHT_ATTRIBUTE , "100%" ) ; SVGUtil . setCSSClass ( background , cls . getName ( ) ) ; // Don ' t export a white background :
if ( "white" . equals ( bgcol ) ) { background . setAttribute ( SVGPlot . NO_EXPORT_ATTRIBUTE , SVGPlot . NO_EXPORT_ATTRIBUTE ) ; } plot . getRoot ( ) . appendChild ( background ) ; } { // setup the hover CSS classes .
selcss = new CSSClass ( this , "s" ) ; if ( DEBUG_LAYOUT ) { selcss . setStatement ( SVGConstants . CSS_STROKE_PROPERTY , SVGConstants . CSS_RED_VALUE ) ; selcss . setStatement ( SVGConstants . CSS_STROKE_WIDTH_PROPERTY , .00001 * StyleLibrary . SCALE ) ; selcss . setStatement ( SVGConstants . CSS_STROKE_OPACITY_PROPERTY , "0.5" ) ; } selcss . setStatement ( SVGConstants . CSS_FILL_PROPERTY , SVGConstants . CSS_RED_VALUE ) ; selcss . setStatement ( SVGConstants . CSS_FILL_OPACITY_PROPERTY , "0" ) ; selcss . setStatement ( SVGConstants . CSS_CURSOR_PROPERTY , SVGConstants . CSS_POINTER_VALUE ) ; plot . addCSSClassOrLogError ( selcss ) ; CSSClass hovcss = new CSSClass ( this , "h" ) ; hovcss . setStatement ( SVGConstants . CSS_FILL_OPACITY_PROPERTY , "0.25" ) ; plot . addCSSClassOrLogError ( hovcss ) ; // Hover listener .
hoverer = new CSSHoverClass ( hovcss . getName ( ) , null , true ) ; } // Disable Batik default interactions ( zoom , rotate , etc . )
if ( single ) { plot . setDisableInteractions ( true ) ; } SVGEffects . addShadowFilter ( plot ) ; SVGEffects . addLightGradient ( plot ) ; |
public class SchemaGenerator { /** * Generates an XML Schema which includes the given classes
* @ param classes to include in the schema definition
* @ throws JAXBException
* @ throws IOException */
public void generateSchema ( Class ... classes ) throws JAXBException , IOException { } } | if ( ! baseDir . exists ( ) ) { baseDir . mkdirs ( ) ; } JAXBContext context = JAXBContext . newInstance ( classes ) ; context . generateSchema ( this ) ; |
public class RowMajorSparseMatrix { /** * Creates a new { @ link RowMajorSparseMatrix } from the given 1D { @ code array } with
* compressing ( copying ) the underlying array . */
public static RowMajorSparseMatrix from1DArray ( int rows , int columns , double [ ] array ) { } } | return CRSMatrix . from1DArray ( rows , columns , array ) ; |
public class CreateClientWmsPresenterImpl { /** * / * private methods */
private void showStep ( int index ) { } } | if ( index >= 0 && index < wizardSteps . size ( ) ) { currentStep = wizardSteps . get ( index ) ; // only show current step
for ( WizardStepView view : wizardSteps ) { view . getWidget ( ) . setVisible ( currentStep == view ) ; } controllerButtonsWindow . setSubTitle ( currentStep . getTitle ( ) ) ; controllerButtonsWindow . setPreviousButtonEnabled ( index > 0 ) ; controllerButtonsWindow . setNextButtonEnabled ( index + 1 < wizardSteps . size ( ) ) ; controllerButtonsWindow . setWarningLabelText ( null , false ) ; // only allow save on last step
controllerButtonsWindow . setSaveButtonEnabled ( index + 1 == wizardSteps . size ( ) ) ; logger . info ( "Client WMS layer wizard, current step " + ( currentStep != null ? currentStep . getClass ( ) . toString ( ) : "none" ) ) ; } else { hideAndCleanWindow ( ) ; } |
public class DRUMS { /** * Searches for the given key in workingBuffer , beginning at the given index . Remember : The records in the
* given workingBuffer have to be ordered ascending .
* @ param workingBuffer
* the ByteBuffer to work on
* @ param key
* the key to find
* @ param indexInChunk
* the start position of reading the < code > workingBuffer < / code >
* @ return the byteOffset where the key was found . < br >
* - 1 if the key wasn ' t found */
public int findElementInReadBuffer ( ByteBuffer workingBuffer , byte [ ] key , int indexInChunk ) { } } | workingBuffer . position ( indexInChunk ) ; int minElement = indexInChunk / gp . getElementSize ( ) ; int numberOfEntries = workingBuffer . limit ( ) / gp . getElementSize ( ) ; // binary search
int maxElement = numberOfEntries - 1 ; int midElement ; int comp ; byte [ ] tempKey = new byte [ gp . getKeySize ( ) ] ; while ( minElement <= maxElement ) { midElement = minElement + ( maxElement - minElement ) / 2 ; indexInChunk = midElement * gp . getElementSize ( ) ; workingBuffer . position ( indexInChunk ) ; workingBuffer . get ( tempKey ) ; comp = KeyUtils . compareKey ( key , tempKey , gp . getKeySize ( ) ) ; if ( comp == 0 ) { return indexInChunk ; } else if ( comp < 0 ) { maxElement = midElement - 1 ; } else { minElement = midElement + 1 ; } } return - 1 ; |
public class BranchController { /** * Gets the form for a bulk update of the branch */
@ RequestMapping ( value = "branches/{branchId}/update/bulk" , method = RequestMethod . GET ) public Form bulkUpdate ( @ SuppressWarnings ( "UnusedParameters" ) @ PathVariable ID branchId ) { } } | return Form . create ( ) . with ( Replacements . of ( "replacements" ) . label ( "Replacements" ) ) ; |
public class ResumableDuplexConnection { /** * / * reconnected by session after error . After this downstream can receive frames ,
* but sending in suppressed until resume ( ) is called */
public void reconnect ( DuplexConnection connection ) { } } | if ( curConnection == null ) { logger . debug ( "{} Resumable duplex connection started with connection: {}" , tag , connection ) ; state = State . CONNECTED ; onNewConnection ( connection ) ; } else { logger . debug ( "{} Resumable duplex connection reconnected with connection: {}" , tag , connection ) ; /* race between sendFrame and doResumeStart may lead to ongoing upstream frames
written before resume complete */
dispatch ( new ResumeStart ( connection ) ) ; } |
public class Property { /** * Retrieves the specified program property . if it exists from the system properties , that is returned , overridding
* all other values . Otherwise , if it exists from the properties file , that is returned , otherwise , null is returned
* @ param property - what property value to return
* @ return String : the property value , null if unset */
private static String getProgramProperty ( String property ) { } } | if ( System . getProperty ( property ) != null ) { return System . getProperty ( property ) . trim ( ) ; } Properties prop = new Properties ( ) ; try ( InputStream input = new FileInputStream ( SELENIFIED ) ) { prop . load ( input ) ; } catch ( IOException e ) { log . info ( e ) ; } String fullProperty = prop . getProperty ( property ) ; if ( fullProperty != null ) { fullProperty = fullProperty . trim ( ) ; } return fullProperty ; |
public class HuLuIndexTool { /** * Lists a 2D double matrix to the System console . */
public static void displayMatrix ( double [ ] [ ] matrix ) { } } | String line ; for ( int f = 0 ; f < matrix . length ; f ++ ) { line = "" ; for ( int g = 0 ; g < matrix . length ; g ++ ) { line += matrix [ g ] [ f ] + " | " ; } logger . debug ( line ) ; } |
public class TransTypes { /** * Visitor method for parameterized types . */
public void visitTypeApply ( JCTypeApply tree ) { } } | JCTree clazz = translate ( tree . clazz , null ) ; result = clazz ; |
public class Expiry { /** * Helper to calculate the next expiry out of two expiry times that
* may be up next . Return the time value that is closest or equal to
* the current time . Time values in the past are ignored . If all times
* are in the past , returns { @ link # ETERNAL } .
* @ param loadTime the current time in millis since epoch
* @ param candidate1 candidate time for next expiry
* @ param candidate2 candidate time for next expiry
* @ return either first or second candidate or { @ link # ETERNAL } */
public static long earliestTime ( long loadTime , long candidate1 , long candidate2 ) { } } | if ( candidate1 >= loadTime ) { if ( candidate1 < candidate2 || candidate2 < loadTime ) { return candidate1 ; } } if ( candidate2 >= loadTime ) { return candidate2 ; } return ETERNAL ; |
public class PoiUtil { /** * 查找有值得最大列索引 。
* @ param sheet 表单
* @ return 最大列索引 */
public static int findMaxCol ( Sheet sheet ) { } } | int maxCol = 0 ; for ( int i = 0 , ii = sheet . getLastRowNum ( ) ; i <= ii ; ++ i ) { val row = sheet . getRow ( i ) ; if ( row == null ) continue ; for ( int j = row . getLastCellNum ( ) - 1 ; j > maxCol ; -- j ) { val cell = row . getCell ( j ) ; if ( cell == null ) continue ; val value = getCellStringValue ( cell ) ; if ( StringUtils . isNotEmpty ( value ) ) { maxCol = j ; break ; } } } return maxCol ; |
public class TableWriterServiceImpl { /** * Frees a segment to be reused . Called by the segment - gc service . */
public void freeSegment ( SegmentKelp segment ) throws IOException { } } | freeTableSegmentLength ( segment . length ( ) ) ; // System . out . println ( " FREE : " + _ tableLength + " " + segment . getLength ( ) + " " + segment ) ;
_segmentService . freeSegment ( segment ) ; |
public class GetVoiceConnectorTerminationHealthRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetVoiceConnectorTerminationHealthRequest getVoiceConnectorTerminationHealthRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( getVoiceConnectorTerminationHealthRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getVoiceConnectorTerminationHealthRequest . getVoiceConnectorId ( ) , VOICECONNECTORID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class Token { /** * this is used for singleton tokens like COMMA or OPEN _ CURLY */
static Token newWithoutOrigin ( TokenType tokenType , String debugString , String tokenText ) { } } | return new Token ( tokenType , null , tokenText , debugString ) ; |
public class AdaptedAction { /** * ( non - Javadoc )
* @ see com . sporniket . libre . ui . action . UserInterfaceAction # setIconForMenu ( java . lang . Object ) */
@ Override public void setIconForMenu ( IconLocationType iconForMenu ) { } } | if ( null != iconForMenu ) { myUserInterfaceAction . setIconForMenu ( iconForMenu ) ; ImageIcon _icon = retrieveIcon ( getIconForMenu ( ) ) ; putValue ( Action . SMALL_ICON , _icon ) ; } |
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIfcCompositeCurve ( ) { } } | if ( ifcCompositeCurveEClass == null ) { ifcCompositeCurveEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 92 ) ; } return ifcCompositeCurveEClass ; |
public class SnapshotCommand { /** * - - - - - private methods - - - - - */
private void createSnapshot ( final String name , final List < String > types ) throws FrameworkException { } } | // we want to create a sorted , human - readble , diffable representation of the schema
final App app = StructrApp . getInstance ( ) ; // isolate write output
try ( final Tx tx = app . tx ( ) ) { final File snapshotFile = locateFile ( name , true ) ; try ( final Writer writer = new FileWriter ( snapshotFile ) ) { final JsonSchema schema = StructrSchema . createFromDatabase ( app , types ) ; writer . append ( schema . toString ( ) ) ; writer . append ( "\n" ) ; // useful newline
writer . flush ( ) ; } tx . success ( ) ; } catch ( IOException | URISyntaxException ioex ) { logger . warn ( "" , ioex ) ; } |
public class WebRiskServiceV1Beta1Client { /** * This method is used to check whether a URI is on a given threatList .
* < p > Sample code :
* < pre > < code >
* try ( WebRiskServiceV1Beta1Client webRiskServiceV1Beta1Client = WebRiskServiceV1Beta1Client . create ( ) ) {
* String uri = " " ;
* List & lt ; ThreatType & gt ; threatTypes = new ArrayList & lt ; & gt ; ( ) ;
* SearchUrisResponse response = webRiskServiceV1Beta1Client . searchUris ( uri , threatTypes ) ;
* < / code > < / pre >
* @ param uri The URI to be checked for matches .
* @ param threatTypes Required . The ThreatLists to search in .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final SearchUrisResponse searchUris ( String uri , List < ThreatType > threatTypes ) { } } | SearchUrisRequest request = SearchUrisRequest . newBuilder ( ) . setUri ( uri ) . addAllThreatTypes ( threatTypes ) . build ( ) ; return searchUris ( request ) ; |
public class FrequentlyUsedPolicy { /** * Removes the entry . */
private void evictEntry ( Node node ) { } } | data . remove ( node . key ) ; node . remove ( ) ; if ( node . freq . isEmpty ( ) ) { node . freq . remove ( ) ; } |
public class LogRecord { /** * 操作ログオブジェクトを作成します 。
* @ param logger
* ロガー
* @ param logLevel
* ログレベル
* @ param testStep
* テストステップ
* @ param messageKey
* メッセージキー
* @ param params
* メッセージパラメーター
* @ return 操作ログ */
public static LogRecord create ( SitLogger logger , LogLevelVo logLevel , TestStep testStep , String messageKey , Object ... params ) { } } | String msg = MessageManager . getMessage ( messageKey , params ) ; switch ( logLevel ) { case INFO : logger . infoMsg ( msg ) ; break ; case DEBUG : logger . debugMsg ( msg ) ; break ; case ERROR : logger . errorMsg ( msg ) ; break ; case WARN : logger . warnMsg ( msg ) ; break ; default : logger . infoMsg ( msg ) ; } String testStepNo = testStep == null ? "xxx" : testStep . getNo ( ) ; return new LogRecord ( testStepNo , msg ) ; |
public class CodecCollector { /** * Creates the sub query from group hit .
* @ param subHit
* the sub hit
* @ param reverse
* the reverse
* @ param field
* the field
* @ return the mtas span query */
private static MtasSpanQuery createSubQueryFromGroupHit ( List < String > [ ] subHit , boolean reverse , String field ) { } } | MtasSpanQuery query = null ; if ( subHit != null && subHit . length > 0 ) { List < MtasSpanSequenceItem > items = new ArrayList < > ( ) ; List < String > subHitItem ; for ( int i = 0 ; i < subHit . length ; i ++ ) { MtasSpanQuery item = null ; if ( reverse ) { subHitItem = subHit [ ( subHit . length - i - 1 ) ] ; } else { subHitItem = subHit [ i ] ; } if ( subHitItem . isEmpty ( ) ) { item = new MtasSpanMatchAllQuery ( field ) ; } else if ( subHitItem . size ( ) == 1 ) { Term term = new Term ( field , subHitItem . get ( 0 ) ) ; item = new MtasSpanTermQuery ( term ) ; } else { MtasSpanQuery [ ] subList = new MtasSpanQuery [ subHitItem . size ( ) ] ; for ( int j = 0 ; j < subHitItem . size ( ) ; j ++ ) { Term term = new Term ( field , subHitItem . get ( j ) ) ; subList [ j ] = new MtasSpanTermQuery ( term ) ; } item = new MtasSpanAndQuery ( subList ) ; } items . add ( new MtasSpanSequenceItem ( item , false ) ) ; } query = new MtasSpanSequenceQuery ( items , null , null ) ; } return query ; |
public class DeleteMatchmakingRuleSetRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DeleteMatchmakingRuleSetRequest deleteMatchmakingRuleSetRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( deleteMatchmakingRuleSetRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteMatchmakingRuleSetRequest . getName ( ) , NAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class Ldif { /** * search
* @ param dn Used as the pathname
* @ param filter Ignored
* @ param scope Ignored */
public boolean search ( final String dn , final String filter , final int scope ) throws NamingException { } } | if ( debug ( ) ) { debug ( "Ldif: About to open " + dn ) ; } inp = null ; try { this . in = new FileInputStream ( dn ) ; } catch ( final Throwable t ) { throw new NamingException ( t . getMessage ( ) ) ; } return true ; |
public class SgUtils { /** * CHECKSTYLE : OFF Cyclomatic complexity is OK */
public static void checkMethodModifiers ( final int modifiers ) { } } | // Base check
checkModifiers ( METHOD , modifiers ) ; // Check overlapping modifiers
if ( Modifier . isPrivate ( modifiers ) ) { if ( Modifier . isProtected ( modifiers ) || Modifier . isPublic ( modifiers ) ) { throw new IllegalArgumentException ( METHOD_ACCESS_MODIFIER_ERROR + " [" + Modifier . toString ( modifiers ) + "]" ) ; } } if ( Modifier . isProtected ( modifiers ) ) { if ( Modifier . isPrivate ( modifiers ) || Modifier . isPublic ( modifiers ) ) { throw new IllegalArgumentException ( METHOD_ACCESS_MODIFIER_ERROR + " [" + Modifier . toString ( modifiers ) + "]" ) ; } } if ( Modifier . isPublic ( modifiers ) ) { if ( Modifier . isPrivate ( modifiers ) || Modifier . isProtected ( modifiers ) ) { throw new IllegalArgumentException ( METHOD_ACCESS_MODIFIER_ERROR + " [" + Modifier . toString ( modifiers ) + "]" ) ; } } // Check illegal abstract modifiers
if ( Modifier . isAbstract ( modifiers ) ) { if ( Modifier . isPrivate ( modifiers ) || Modifier . isStatic ( modifiers ) || Modifier . isFinal ( modifiers ) || Modifier . isNative ( modifiers ) || Modifier . isStrict ( modifiers ) || Modifier . isSynchronized ( modifiers ) ) { throw new IllegalArgumentException ( METHOD_ILLEGAL_ABSTRACT_MODIFIERS_ERROR + " [" + Modifier . toString ( modifiers ) + "]" ) ; } } // Check native and strictfp
if ( Modifier . isNative ( modifiers ) && Modifier . isStrict ( modifiers ) ) { throw new IllegalArgumentException ( METHOD_NATIVE_STRICTFP_ERROR + " [" + Modifier . toString ( modifiers ) + "]" ) ; } |
public class AbstractSlideModel { /** * { @ inheritDoc } */
@ Override public boolean nextStep ( ) { } } | initStepPosition ( ) ; final boolean res = this . stepPosition . get ( ) < getStepList ( ) . size ( ) - 1 ; if ( res && view ( ) . isReadyForSlidesStepUpdate ( false ) ) { setCurrentFlow ( SlideFlow . forward ) ; // Launch the next step
showSlideStep ( getStepList ( ) . get ( this . stepPosition . incrementAndGet ( ) ) ) ; } // otherwise no more step return true to go to the next slide
return ! res ; |
public class ElemLinear { /** * Backward pass :
* dG / dx _ i + = dG / dy _ i dy _ i / dx _ i = dG / dy _ i \ lambda
* dG / dw _ i + = dG / dy _ i dy _ i / dw _ i = dG / dy _ i \ gamma */
@ Override public void backward ( ) { } } | Tensor tmp1 = new Tensor ( yAdj ) ; // copy
tmp1 . multiply ( weightX ) ; modInX . getOutputAdj ( ) . elemAdd ( tmp1 ) ; Tensor tmp2 = new Tensor ( yAdj ) ; // copy
tmp2 . multiply ( weightW ) ; modInW . getOutputAdj ( ) . elemAdd ( tmp2 ) ; |
public class OpenCmsListener { /** * De - registers the SQL drivers in order to prevent potential memory leaks . < p > */
private void shutDownSqlDrivers ( ) { } } | // This manually deregisters JDBC driver , which prevents Tomcat 7 from complaining about memory leaks
Enumeration < Driver > drivers = DriverManager . getDrivers ( ) ; while ( drivers . hasMoreElements ( ) ) { Driver driver = drivers . nextElement ( ) ; try { DriverManager . deregisterDriver ( driver ) ; } catch ( Throwable e ) { System . out . println ( Messages . get ( ) . getBundle ( ) . key ( Messages . ERR_DEREGISTERING_JDBC_DRIVER_1 , driver . getClass ( ) . getName ( ) ) ) ; e . printStackTrace ( System . out ) ; } } try { Class < ? > cls = Class . forName ( "com.mysql.jdbc.AbandonedConnectionCleanupThread" ) ; Method shutdownMethod = ( cls == null ? null : cls . getMethod ( "shutdown" ) ) ; if ( shutdownMethod != null ) { shutdownMethod . invoke ( null ) ; } } catch ( Throwable e ) { System . out . println ( "Failed to shutdown MySQL connection cleanup thread: " + e . getMessage ( ) ) ; } |
public class Listeners { /** * Gets the value of the listenerList property .
* This accessor method returns a reference to the live list ,
* not a snapshot . Therefore any modification you make to the
* returned list will be present inside the JAXB object .
* This is why there is not a < CODE > set < / CODE > method for the listenerList property .
* For example , to add a new item , do as follows :
* < pre >
* getListenerList ( ) . add ( newItem ) ;
* < / pre >
* Objects of the following type ( s ) are allowed in the list
* { @ link Listener } */
@ Generated ( value = "com.ibm.jtc.jax.tools.xjc.Driver" , date = "2014-06-11T05:49:00-04:00" , comments = "JAXB RI v2.2.3-11/28/2011 06:21 AM(foreman)-" ) public List < Listener > getListenerList ( ) { } } | if ( listenerList == null ) { listenerList = new ArrayList < Listener > ( ) ; } return this . listenerList ; |
public class CmsFormatterConfiguration { /** * Selects the best matching formatter for the provided type and width from this configuration . < p >
* This method first tries to find the formatter for the provided container type .
* If this fails , it returns the width based formatter that matched the container width . < p >
* @ param containerTypes the container types ( comma separated )
* @ param containerWidth the container width
* @ return the matching formatter , or < code > null < / code > if none was found */
public I_CmsFormatterBean getDefaultFormatter ( final String containerTypes , final int containerWidth ) { } } | Optional < I_CmsFormatterBean > result = Iterables . tryFind ( m_allFormatters , new MatchesTypeOrWidth ( containerTypes , containerWidth ) ) ; return result . orNull ( ) ; |
public class Address { /** * Create a AddressCreator to execute create .
* @ param pathAccountSid The SID of the Account that will be responsible for
* the new Address resource
* @ param customerName The name to associate with the new address
* @ param street The number and street address of the new address
* @ param city The city of the new address
* @ param region The state or region of the new address
* @ param postalCode The postal code of the new address
* @ param isoCountry The ISO country code of the new address
* @ return AddressCreator capable of executing the create */
public static AddressCreator creator ( final String pathAccountSid , final String customerName , final String street , final String city , final String region , final String postalCode , final String isoCountry ) { } } | return new AddressCreator ( pathAccountSid , customerName , street , city , region , postalCode , isoCountry ) ; |
public class DescribeJobsRequest { /** * A list of up to 100 job IDs .
* @ param jobs
* A list of up to 100 job IDs . */
public void setJobs ( java . util . Collection < String > jobs ) { } } | if ( jobs == null ) { this . jobs = null ; return ; } this . jobs = new java . util . ArrayList < String > ( jobs ) ; |
public class MaxExtensionBulkSplit { /** * Partitions the specified feature vectors where the split axes are the
* dimensions with maximum extension .
* @ param spatialObjects the spatial objects to be partitioned
* @ param minEntries the minimum number of entries in a partition
* @ param maxEntries the maximum number of entries in a partition
* @ return the partition of the specified spatial objects */
@ Override public < N extends SpatialComparable > List < List < N > > partition ( List < N > spatialObjects , int minEntries , int maxEntries ) { } } | List < List < N > > partitions = new ArrayList < > ( ) ; List < N > objects = new ArrayList < > ( spatialObjects ) ; while ( ! objects . isEmpty ( ) ) { StringBuilder msg = new StringBuilder ( ) ; // get the split axis and split point
int splitAxis = chooseMaximalExtendedSplitAxis ( objects ) ; int splitPoint = chooseBulkSplitPoint ( objects . size ( ) , minEntries , maxEntries ) ; if ( LOG . isDebugging ( ) ) { msg . append ( "\nsplitAxis " ) . append ( splitAxis ) ; msg . append ( "\nsplitPoint " ) . append ( splitPoint ) ; } // sort in the right dimension
Collections . sort ( objects , new SpatialSingleMinComparator ( splitAxis ) ) ; // insert into partition
List < N > partition1 = new ArrayList < > ( ) ; for ( int i = 0 ; i < splitPoint ; i ++ ) { N o = objects . remove ( 0 ) ; partition1 . add ( o ) ; } partitions . add ( partition1 ) ; // copy array
if ( LOG . isDebugging ( ) ) { msg . append ( "\ncurrent partition " ) . append ( partition1 ) ; msg . append ( "\nremaining objects # " ) . append ( objects . size ( ) ) ; LOG . debugFine ( msg . toString ( ) ) ; } } if ( LOG . isDebugging ( ) ) { LOG . debugFine ( "partitions " + partitions ) ; } return partitions ; |
public class SetRepositoryPolicyRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( SetRepositoryPolicyRequest setRepositoryPolicyRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( setRepositoryPolicyRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( setRepositoryPolicyRequest . getRegistryId ( ) , REGISTRYID_BINDING ) ; protocolMarshaller . marshall ( setRepositoryPolicyRequest . getRepositoryName ( ) , REPOSITORYNAME_BINDING ) ; protocolMarshaller . marshall ( setRepositoryPolicyRequest . getPolicyText ( ) , POLICYTEXT_BINDING ) ; protocolMarshaller . marshall ( setRepositoryPolicyRequest . getForce ( ) , FORCE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class RxFile { /** * Get a file extension based on the given file name . */
public static Observable < String > getFileExtension ( final String fileName ) { } } | return Observable . fromCallable ( new Func0 < String > ( ) { @ Override public String call ( ) { return fileName . substring ( ( fileName . lastIndexOf ( '.' ) ) + 1 ) ; } } ) ; |
public class OAuth1 { /** * Sends a request to the server and returns an input stream from which
* the response can be read . The caller is responsible for consuming
* the input stream ' s content and for closing the stream .
* @ param url the URL to send the request to
* @ param method the HTTP request method
* @ param token a token used for authorization ( may be null if the
* app is not authorized yet )
* @ param additionalAuthParams additional parameters that should be
* added to the < code > Authorization < / code > header ( may be null )
* @ param additionalHeaders additional HTTP headers ( may be null )
* @ return a response
* @ throws IOException if the request was not successful
* @ throws RequestException if the server returned an error
* @ throws UnauthorizedException if the request is not authorized */
private Response requestInternal ( URL url , Method method , Token token , Map < String , String > additionalAuthParams , Map < String , String > additionalHeaders ) throws IOException { } } | // prepare HTTP connection
HttpURLConnection conn = ( HttpURLConnection ) url . openConnection ( ) ; conn . setInstanceFollowRedirects ( true ) ; conn . setRequestMethod ( method . toString ( ) ) ; conn . setRequestProperty ( HEADER_HOST , makeBaseUri ( url ) ) ; String timestamp = makeTimestamp ( ) ; String nonce = makeNonce ( timestamp ) ; // create OAuth parameters
Map < String , String > authParams = new HashMap < > ( ) ; if ( additionalAuthParams != null ) { authParams . putAll ( additionalAuthParams ) ; } if ( token != null ) { authParams . put ( OAUTH_TOKEN , token . getToken ( ) ) ; } authParams . put ( OAUTH_CONSUMER_KEY , consumerKey ) ; authParams . put ( OAUTH_SIGNATURE_METHOD , HMAC_SHA1_METHOD ) ; authParams . put ( OAUTH_TIMESTAMP , timestamp ) ; authParams . put ( OAUTH_NONCE , nonce ) ; authParams . put ( OAUTH_VERSION , OAUTH_IMPL_VERSION ) ; // create signature from method , url , and OAuth parameters
String signature = makeSignature ( method . toString ( ) , url , authParams , token ) ; // put OAuth parameters into " Authorization " header
StringBuilder sb = new StringBuilder ( ) ; for ( Map . Entry < String , String > e : authParams . entrySet ( ) ) { appendAuthParam ( sb , e . getKey ( ) , e . getValue ( ) ) ; } appendAuthParam ( sb , OAUTH_SIGNATURE , signature ) ; conn . setRequestProperty ( HEADER_AUTHORIZATION , "OAuth " + sb . toString ( ) ) ; if ( additionalHeaders != null ) { for ( Map . Entry < String , String > e : additionalHeaders . entrySet ( ) ) { conn . setRequestProperty ( e . getKey ( ) , e . getValue ( ) ) ; } } // perform request
conn . connect ( ) ; // check response
if ( conn . getResponseCode ( ) == 401 ) { throw new UnauthorizedException ( "Not authorized" ) ; } else if ( conn . getResponseCode ( ) != 200 ) { throw new RequestException ( "HTTP request failed with error code: " + conn . getResponseCode ( ) ) ; } return new Response ( conn ) ; |
public class AuthAPI { /** * Creates a request to revoke an existing Refresh Token .
* < pre >
* { @ code
* AuthAPI auth = new AuthAPI ( " me . auth0 . com " , " B3c6RYhk1v9SbIJcRIOwu62gIUGsnze " , " 2679NfkaBn62e6w5E8zNEzjr - yWfkaBne " ) ;
* try {
* auth . revokeToken ( " ej2E8zNEzjrcSD2edjaE " )
* . execute ( ) ;
* } catch ( Auth0Exception e ) {
* / / Something happened
* < / pre >
* @ param refreshToken the refresh token to revoke .
* @ return a Request to execute . */
public Request < Void > revokeToken ( String refreshToken ) { } } | Asserts . assertNotNull ( refreshToken , "refresh token" ) ; String url = baseUrl . newBuilder ( ) . addPathSegment ( PATH_OAUTH ) . addPathSegment ( PATH_REVOKE ) . build ( ) . toString ( ) ; VoidRequest request = new VoidRequest ( client , url , "POST" ) ; request . addParameter ( KEY_CLIENT_ID , clientId ) ; request . addParameter ( KEY_CLIENT_SECRET , clientSecret ) ; request . addParameter ( KEY_TOKEN , refreshToken ) ; return request ; |
public class AnsiLogger { /** * { @ inheritDoc } */
public void warn ( String format , Object ... params ) { } } | log . warn ( colored ( format , COLOR_WARNING , true , params ) ) ; |
public class CronetClientStream { /** * TODO ( ericgribkoff ) : move header related method to a common place like GrpcUtil . */
private static boolean isApplicationHeader ( String key ) { } } | // Don ' t allow reserved non HTTP / 2 pseudo headers to be added
// HTTP / 2 headers can not be created as keys because Header . Key disallows the ' : ' character .
return ! CONTENT_TYPE_KEY . name ( ) . equalsIgnoreCase ( key ) && ! USER_AGENT_KEY . name ( ) . equalsIgnoreCase ( key ) && ! TE_HEADER . name ( ) . equalsIgnoreCase ( key ) ; |
public class CPDefinitionOptionRelPersistenceImpl { /** * Clears the cache for all cp definition option rels .
* The { @ link EntityCache } and { @ link FinderCache } are both cleared by this method . */
@ Override public void clearCache ( ) { } } | entityCache . clearCache ( CPDefinitionOptionRelImpl . class ) ; finderCache . clearCache ( FINDER_CLASS_NAME_ENTITY ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITH_PAGINATION ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITHOUT_PAGINATION ) ; |
public class Utils4J { /** * Creates a HEX encoded hash from a stream .
* @ param inputStream
* Stream to create a hash for - Cannot be < code > null < / code > .
* @ param algorithm
* Hash algorithm like " MD5 " or " SHA " - Cannot be < code > null < / code > .
* @ return HEX encoded hash . */
public static String createHash ( final InputStream inputStream , final String algorithm ) { } } | checkNotNull ( "inputStream" , inputStream ) ; checkNotNull ( "algorithm" , algorithm ) ; try { final MessageDigest messageDigest = MessageDigest . getInstance ( algorithm ) ; try ( final BufferedInputStream in = new BufferedInputStream ( inputStream ) ) { final byte [ ] buf = new byte [ 1024 ] ; int count = 0 ; while ( ( count = in . read ( buf ) ) > - 1 ) { messageDigest . update ( buf , 0 , count ) ; } } return encodeHex ( messageDigest . digest ( ) ) ; } catch ( final NoSuchAlgorithmException | IOException ex ) { throw new RuntimeException ( ex ) ; } |
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EEnum getIfcOutletTypeEnum ( ) { } } | if ( ifcOutletTypeEnumEEnum == null ) { ifcOutletTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 1025 ) ; } return ifcOutletTypeEnumEEnum ; |
public class AdditionalRequestHeadersInterceptor { /** * Adds the list of header values for the given header .
* Note that { @ code headerName } and { @ code headerValues } cannot be null .
* @ param headerName the name of the header
* @ param headerValues the list of values to add for the header
* @ throws NullPointerException if either parameter is { @ code null } */
public void addAllHeaderValues ( String headerName , List < String > headerValues ) { } } | Objects . requireNonNull ( headerName , "headerName cannot be null" ) ; Objects . requireNonNull ( headerValues , "headerValues cannot be null" ) ; getHeaderValues ( headerName ) . addAll ( headerValues ) ; |
public class Ifc4FactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public String convertIfcTaskTypeEnumToString ( EDataType eDataType , Object instanceValue ) { } } | return instanceValue == null ? null : instanceValue . toString ( ) ; |
public class Collectors { /** * Returns a { @ code Collector } that reduces input elements .
* @ param < T > the type of the input elements
* @ param < R > the type of the output elements
* @ param identity the initial value
* @ param mapper the mapping function
* @ param op the operator to reduce elements
* @ return a { @ code Collector }
* @ see # reducing ( java . lang . Object , com . annimon . stream . function . BinaryOperator ) */
@ NotNull public static < T , R > Collector < T , ? , R > reducing ( @ Nullable final R identity , @ NotNull final Function < ? super T , ? extends R > mapper , @ NotNull final BinaryOperator < R > op ) { } } | return new CollectorsImpl < T , Tuple1 < R > , R > ( new Supplier < Tuple1 < R > > ( ) { @ NotNull @ Override public Tuple1 < R > get ( ) { return new Tuple1 < R > ( identity ) ; } } , new BiConsumer < Tuple1 < R > , T > ( ) { @ Override public void accept ( @ NotNull Tuple1 < R > tuple , T value ) { tuple . a = op . apply ( tuple . a , mapper . apply ( value ) ) ; } } , new Function < Tuple1 < R > , R > ( ) { @ Override public R apply ( @ NotNull Tuple1 < R > tuple ) { return tuple . a ; } } ) ; |
public class PropertyPlaceholderHelper { /** * Replaces all placeholders of format { @ code $ { name } } with the value returned
* from the supplied { @ link PlaceholderResolver } .
* @ param value the value containing the placeholders to be replaced
* @ param placeholderResolver the { @ code PlaceholderResolver } to use for replacement
* @ return the supplied value with placeholders replaced inline */
public String replacePlaceholders ( String value , PlaceholderResolver placeholderResolver ) { } } | Assert . notNull ( value , "'value' must not be null" ) ; return parseStringValue ( value , placeholderResolver , new HashSet < String > ( ) ) ; |
public class ScriptRuntime { /** * Call obj . [ [ Put ] ] ( id , value ) */
public static Object setObjectElem ( Object obj , Object elem , Object value , Context cx , Scriptable scope ) { } } | Scriptable sobj = toObjectOrNull ( cx , obj , scope ) ; if ( sobj == null ) { throw undefWriteError ( obj , elem , value ) ; } return setObjectElem ( sobj , elem , value , cx ) ; |
public class Util { /** * Counts the number of occurrences of a weekday in a given period .
* @ param dow the weekday
* @ param dow0 the weekday of the first day of the period
* @ param nDays the number of days in the period */
static int countInPeriod ( DayOfWeek dow , DayOfWeek dow0 , int nDays ) { } } | // two cases :
// (1a ) dow > = dow0 : count = = = ( nDays - ( dow - dow0 ) ) / 7
// (1b ) dow < dow0 : count = = = ( nDays - ( 7 - dow0 - dow ) ) / 7
if ( dow . getCalendarConstant ( ) >= dow0 . getCalendarConstant ( ) ) { return 1 + ( ( nDays - ( dow . getCalendarConstant ( ) - dow0 . getCalendarConstant ( ) ) - 1 ) / 7 ) ; } else { return 1 + ( ( nDays - ( 7 - ( dow0 . getCalendarConstant ( ) - dow . getCalendarConstant ( ) ) ) - 1 ) / 7 ) ; } |
public class FqlResult { /** * Returns the value of the identified field as a Float .
* @ param fieldName the name of the field
* @ return the value of the field as a Float
* @ throws FqlException if the field cannot be expressed as an Float */
public Float getFloat ( String fieldName ) { } } | try { return hasValue ( fieldName ) ? Float . valueOf ( String . valueOf ( resultMap . get ( fieldName ) ) ) : null ; } catch ( NumberFormatException e ) { throw new FqlException ( "Field '" + fieldName + "' is not a number." , e ) ; } |
public class Table { /** * Returns the next constraint of a given type
* @ param from
* @ param type */
int getNextConstraintIndex ( int from , int type ) { } } | for ( int i = from , size = constraintList . length ; i < size ; i ++ ) { Constraint c = constraintList [ i ] ; if ( c . getConstraintType ( ) == type ) { return i ; } } return - 1 ; |
public class Numbers { /** * Parse an < b > int < / b > with a given default value < b > i < / b > and
* a given range between < b > min < / b > and < b > max < / b > ( inclusive )
* @ return default value if null or not parsable */
public static int parseInt ( String s , int i , int min , int max ) { } } | return minMax ( min , parseInt ( s , i ) , max ) ; |
public class MemoryStateUpdater { /** * Processes the given operations and applies them to the ReadIndex and InMemory OperationLog .
* @ param operations An Iterator iterating over the operations to process ( in sequence ) .
* @ throws DataCorruptionException If a serious , non - recoverable , data corruption was detected , such as trying to
* append operations out of order . */
void process ( Iterator < Operation > operations ) throws DataCorruptionException { } } | HashSet < Long > segmentIds = new HashSet < > ( ) ; while ( operations . hasNext ( ) ) { Operation op = operations . next ( ) ; process ( op ) ; if ( op instanceof SegmentOperation ) { // Record recent activity on stream segment , if applicable . This should be recorded for any kind
// of Operation that touches a Segment , since when we issue ' triggerFutureReads ' on the readIndex ,
// it should include ' sealed ' StreamSegments too - any Future Reads waiting on that Offset will be cancelled .
segmentIds . add ( ( ( SegmentOperation ) op ) . getStreamSegmentId ( ) ) ; } } if ( ! this . recoveryMode . get ( ) ) { // Trigger Future Reads on those segments which were touched by Appends or Seals .
this . readIndex . triggerFutureReads ( segmentIds ) ; if ( this . commitSuccess != null ) { this . commitSuccess . run ( ) ; } } |
public class PelopsClient { /** * ( non - Javadoc )
* @ see com . impetus . kundera . client . Client # find ( java . lang . String ,
* java . lang . String , com . impetus . kundera . metadata . model . EntityMetadata ) */
@ Override public List < Object > findByRelation ( String colName , Object colValue , Class clazz ) { } } | EntityMetadata m = KunderaMetadataManager . getEntityMetadata ( kunderaMetadata , clazz ) ; List < Object > entities = null ; if ( isCql3Enabled ( m ) ) { entities = findByRelationQuery ( m , colName , colValue , clazz , dataHandler ) ; } else { Selector selector = clientFactory . getSelector ( pool ) ; SlicePredicate slicePredicate = Selector . newColumnsPredicateAll ( false , 10000 ) ; IndexClause ix = Selector . newIndexClause ( Bytes . EMPTY , 10000 , Selector . newIndexExpression ( colName , IndexOperator . EQ , Bytes . fromByteArray ( PropertyAccessorHelper . getBytes ( colValue ) ) ) ) ; Map < Bytes , List < Column > > qResults ; try { qResults = selector . getIndexedColumns ( m . getTableName ( ) , ix , slicePredicate , getConsistencyLevel ( ) ) ; } catch ( PelopsException e ) { log . warn ( "Error while retrieving entities for given column {} for class {}." , colName , clazz ) ; return entities ; } entities = new ArrayList < Object > ( qResults . size ( ) ) ; // iterate through complete map and populateData
MetamodelImpl metaModel = ( MetamodelImpl ) kunderaMetadata . getApplicationMetadata ( ) . getMetamodel ( m . getPersistenceUnit ( ) ) ; EntityType entityType = metaModel . entity ( m . getEntityClazz ( ) ) ; List < AbstractManagedType > subManagedType = ( ( AbstractManagedType ) entityType ) . getSubManagedType ( ) ; if ( subManagedType . isEmpty ( ) ) { entities = populateData ( m , qResults , entities , false , m . getRelationNames ( ) , dataHandler ) ; } else { for ( AbstractManagedType subEntity : subManagedType ) { EntityMetadata subEntityMetadata = KunderaMetadataManager . getEntityMetadata ( kunderaMetadata , subEntity . getJavaType ( ) ) ; entities = populateData ( subEntityMetadata , qResults , entities , false , subEntityMetadata . getRelationNames ( ) , dataHandler ) ; } } } return entities ; |
public class ShortArrays { /** * Counts the number of indices that appear in both arrays .
* @ param indices1 Sorted array of indices .
* @ param indices2 Sorted array of indices . */
public static short countCommon ( short [ ] indices1 , short [ ] indices2 ) { } } | short numCommonIndices = 0 ; int i = 0 ; int j = 0 ; while ( i < indices1 . length && j < indices2 . length ) { if ( indices1 [ i ] < indices2 [ j ] ) { i ++ ; } else if ( indices2 [ j ] < indices1 [ i ] ) { j ++ ; } else { numCommonIndices ++ ; // Equal indices .
i ++ ; j ++ ; } } for ( ; i < indices1 . length ; i ++ ) { numCommonIndices ++ ; } for ( ; j < indices2 . length ; j ++ ) { numCommonIndices ++ ; } return numCommonIndices ; |
public class WorkflowStatisticsFetcher { /** * Add the requested query string arguments to the Request .
* @ param request Request to add query string arguments to */
private void addQueryParams ( final Request request ) { } } | if ( minutes != null ) { request . addQueryParam ( "Minutes" , minutes . toString ( ) ) ; } if ( startDate != null ) { request . addQueryParam ( "StartDate" , startDate . toString ( ) ) ; } if ( endDate != null ) { request . addQueryParam ( "EndDate" , endDate . toString ( ) ) ; } if ( taskChannel != null ) { request . addQueryParam ( "TaskChannel" , taskChannel ) ; } if ( splitByWaitTime != null ) { request . addQueryParam ( "SplitByWaitTime" , splitByWaitTime ) ; } |
public class ApplicationPermissionRepository { /** * region > newPermission ( programmatic ) */
@ Programmatic public ApplicationPermission newPermission ( final ApplicationRole role , final ApplicationPermissionRule rule , final ApplicationPermissionMode mode , final ApplicationFeatureType featureType , final String featureFqn ) { } } | final ApplicationFeatureId featureId = ApplicationFeatureId . newFeature ( featureType , featureFqn ) ; final ApplicationFeature feature = applicationFeatureRepository . findFeature ( featureId ) ; if ( feature == null ) { container . warnUser ( "No such " + featureType . name ( ) . toLowerCase ( ) + ": " + featureFqn ) ; return null ; } return newPermissionNoCheck ( role , rule , mode , featureType , featureFqn ) ; |
public class ZonedDateTimeXmlAdapter { /** * When marshalling , the provided zonedDateTime is normalized to UTC , so that the output is consistent between
* dates and time zones .
* @ param zonedDateTime the zonedDateTime to marshall
* @ return the UTC - based date time in ISO format */
@ Override public String marshal ( ZonedDateTime zonedDateTime ) { } } | if ( null == zonedDateTime ) { return null ; } return zonedDateTime . withZoneSameInstant ( ZoneOffset . UTC ) . format ( formatter ) ; |
public class HttpSupport { /** * Returns response headers
* @ return map with response headers . */
public Map < String , String > getResponseHeaders ( ) { } } | Collection < String > names = RequestContext . getHttpResponse ( ) . getHeaderNames ( ) ; Map < String , String > headers = new HashMap < > ( ) ; for ( String name : names ) { headers . put ( name , RequestContext . getHttpResponse ( ) . getHeader ( name ) ) ; } return headers ; |
public class GlobalUsersInner { /** * Get batch operation status .
* @ param userName The name of the user .
* @ param urls The operation url of long running operation
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the OperationBatchStatusResponseInner object if successful . */
public OperationBatchStatusResponseInner getOperationBatchStatus ( String userName , List < String > urls ) { } } | return getOperationBatchStatusWithServiceResponseAsync ( userName , urls ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class EnterClickAdapter { /** * Binds a listener to the supplied target text box that triggers the supplied click handler
* when enter is pressed on the text box . */
public static HandlerRegistration bind ( HasKeyDownHandlers target , ClickHandler onEnter ) { } } | return target . addKeyDownHandler ( new EnterClickAdapter ( onEnter ) ) ; |
public class EglCore { /** * Creates an EGL surface associated with an offscreen buffer . */
public EGLSurface createOffscreenSurface ( int width , int height ) { } } | int [ ] surfaceAttribs = { EGL14 . EGL_WIDTH , width , EGL14 . EGL_HEIGHT , height , EGL14 . EGL_NONE } ; EGLSurface eglSurface = EGL14 . eglCreatePbufferSurface ( mEGLDisplay , mEGLConfig , surfaceAttribs , 0 ) ; checkEglError ( "eglCreatePbufferSurface" ) ; if ( eglSurface == null ) { throw new RuntimeException ( "surface was null" ) ; } return eglSurface ; |
public class SvdImplicitQrAlgorithm_DDRM { /** * Here the lambda in the implicit step is determined dynamically . At first
* it selects zeros to quickly reveal singular values that are zero or close to zero .
* Then it computes it using a Wilkinson shift . */
private void performDynamicStep ( ) { } } | // initially look for singular values of zero
if ( findingZeros ) { if ( steps > 6 ) { findingZeros = false ; } else { double scale = computeBulgeScale ( ) ; performImplicitSingleStep ( scale , 0 , false ) ; } } else { // For very large and very small numbers the only way to prevent overflow / underflow
// is to have a common scale between the wilkinson shift and the implicit single step
// What happens if you don ' t is that when the wilkinson shift returns the value it
// computed it multiplies it by the scale twice , which will cause an overflow
double scale = computeBulgeScale ( ) ; // use the wilkinson shift to perform a step
double lambda = selectWilkinsonShift ( scale ) ; performImplicitSingleStep ( scale , lambda , false ) ; } |
public class Logger { /** * Sets the log tag .
* @ param logTag The new tag value . */
public static void setLogTag ( final String logTag ) { } } | Logger . logTag = TextUtils . isEmpty ( logTag ) ? LOG_TAG : logTag ; |
public class OWLTransitiveObjectPropertyAxiomImpl_CustomFieldSerializer { /** * Serializes the content of the object into the
* { @ link com . google . gwt . user . client . rpc . SerializationStreamWriter } .
* @ param streamWriter the { @ link com . google . gwt . user . client . rpc . SerializationStreamWriter } to write the
* object ' s content to
* @ param instance the object instance to serialize
* @ throws com . google . gwt . user . client . rpc . SerializationException
* if the serialization operation is not
* successful */
@ Override public void serializeInstance ( SerializationStreamWriter streamWriter , OWLTransitiveObjectPropertyAxiomImpl instance ) throws SerializationException { } } | serialize ( streamWriter , instance ) ; |
public class Groundy { /** * Inserts a Serializable value into the mapping of this Bundle , replacing any existing value for
* the given key . Either key or value may be null .
* @ param key a String , or null
* @ param value a Serializable object , or null */
public Groundy arg ( String key , Serializable value ) { } } | mArgs . putSerializable ( key , value ) ; return this ; |
public class ConstructorUtils { /** * Returns a new object of < i > className < / i > . The objected is casted to the < i > ofType < / i > , which is either super class or interface of the className class .
* @ param className Name of the class to instanciate an object of
* @ param ofType An super class or interface of the className class .
* @ param constructorArgs Constructor arguments to use when creating a new instance
* @ return A new instance of class with name className casted to the ofType class .
* @ throws IllegalStateException if className could not be loaded or if that class does not have a matching constructor
* to the constructorParam or if the loaded class is not of the supplied type ( ofType ) . */
@ SuppressWarnings ( "unchecked" ) public static < T > T getInstance ( String className , Class < T > ofType , Class < ? > [ ] constructorParams , Object [ ] constructorArgs ) { } } | Constructor < ? > constructor = null ; try { Class < ? > clazz = Class . forName ( className ) ; constructor = clazz . getConstructor ( constructorParams ) ; return ( T ) constructor . newInstance ( constructorArgs ) ; } catch ( Exception e ) { String errorMessage = "Could not instanceiate " + className + "." ; if ( constructor == null ) { errorMessage += " No constructor found with parameters " + constructorParams ; } else if ( constructorParams . length == 0 ) { errorMessage += " Using default constructor." ; } else { errorMessage += " " + constructor ; } throw new IllegalStateException ( errorMessage , e ) ; } |
public class LObjIntBytePredicateBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */
@ Nonnull public static < T > LObjIntBytePredicate < T > objIntBytePredicateFrom ( Consumer < LObjIntBytePredicateBuilder < T > > buildingFunction ) { } } | LObjIntBytePredicateBuilder builder = new LObjIntBytePredicateBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ; |
public class WeakHashMapPro { /** * Expunges stale entries from the table . */
private void expungeStaleEntries ( ) { } } | for ( Object x ; ( x = queue . poll ( ) ) != null ; ) { synchronized ( queue ) { @ SuppressWarnings ( "unchecked" ) Entry < K , V > e = ( Entry < K , V > ) x ; int i = indexFor ( e . hash , table . length ) ; Entry < K , V > prev = table [ i ] ; Entry < K , V > p = prev ; while ( p != null ) { Entry < K , V > next = p . next ; if ( p == e ) { if ( prev == e ) table [ i ] = next ; else prev . next = next ; // Must not null out e . next ;
// stale entries may be in use by a HashIterator
e . value = null ; // Help GC
size -- ; break ; } prev = p ; p = next ; } } } |
public class DeflatingStreamSinkConduit { /** * Runs the current data through the deflater . As much as possible this will be buffered in the current output
* stream .
* @ throws IOException */
private void deflateData ( boolean force ) throws IOException { } } | // we don ' t need to flush here , as this should have been called already by the time we get to
// this point
boolean nextCreated = false ; try ( PooledByteBuffer arrayPooled = this . exchange . getConnection ( ) . getByteBufferPool ( ) . getArrayBackedPool ( ) . allocate ( ) ) { PooledByteBuffer pooled = this . currentBuffer ; final ByteBuffer outputBuffer = pooled . getBuffer ( ) ; final boolean shutdown = anyAreSet ( state , SHUTDOWN ) ; ByteBuffer buf = arrayPooled . getBuffer ( ) ; while ( force || ! deflater . needsInput ( ) || ( shutdown && ! deflater . finished ( ) ) ) { int count = deflater . deflate ( buf . array ( ) , buf . arrayOffset ( ) , buf . remaining ( ) , force ? Deflater . SYNC_FLUSH : Deflater . NO_FLUSH ) ; Connectors . updateResponseBytesSent ( exchange , count ) ; if ( count != 0 ) { int remaining = outputBuffer . remaining ( ) ; if ( remaining > count ) { outputBuffer . put ( buf . array ( ) , buf . arrayOffset ( ) , count ) ; } else { if ( remaining == count ) { outputBuffer . put ( buf . array ( ) , buf . arrayOffset ( ) , count ) ; } else { outputBuffer . put ( buf . array ( ) , buf . arrayOffset ( ) , remaining ) ; additionalBuffer = ByteBuffer . allocate ( count - remaining ) ; additionalBuffer . put ( buf . array ( ) , buf . arrayOffset ( ) + remaining , count - remaining ) ; additionalBuffer . flip ( ) ; } outputBuffer . flip ( ) ; this . state |= FLUSHING_BUFFER ; if ( next == null ) { nextCreated = true ; this . next = createNextChannel ( ) ; } if ( ! performFlushIfRequired ( ) ) { return ; } } } else { force = false ; } } } finally { if ( nextCreated ) { if ( anyAreSet ( state , WRITES_RESUMED ) ) { next . resumeWrites ( ) ; } } } |
public class PortableClassAccess { /** * Get a new instance that can access the given Class . If the ClassAccess for this class
* has not been obtained before , then the specific PortableClassAccess is created by generating
* a specialised subclass of this class and returning it .
* @ param clazz Class to be accessed
* @ param < C > The type of class
* @ return New PortableClassAccess instance */
public static < C > PortableClassAccess < C > get ( Class < C > clazz ) { } } | @ SuppressWarnings ( "unchecked" ) PortableClassAccess < C > access = ( PortableClassAccess < C > ) CLASS_ACCESSES . get ( clazz ) ; if ( access != null ) { return access ; } access = new PortableClassAccess < C > ( clazz ) ; CLASS_ACCESSES . putIfAbsent ( clazz , access ) ; return access ; |
public class AmazonDynamoDBAsyncClient { /** * Gets the values of one or more items and its attributes by primary key
* ( composite primary key , only ) .
* Narrow the scope of the query using comparison operators on the
* < code > RangeKeyValue < / code > of the composite key . Use the
* < code > ScanIndexForward < / code > parameter to get results in forward or
* reverse order by range key .
* @ param queryRequest Container for the necessary parameters to execute
* the Query operation on AmazonDynamoDB .
* @ return A Java Future object containing the response from the Query
* service method , as returned by AmazonDynamoDB .
* @ throws AmazonClientException
* If any internal errors are encountered inside the client while
* attempting to make the request or handle the response . For example
* if a network connection is not available .
* @ throws AmazonServiceException
* If an error response is returned by AmazonDynamoDB indicating
* either a problem with the data in the request , or a server side issue . */
public Future < QueryResult > queryAsync ( final QueryRequest queryRequest ) throws AmazonServiceException , AmazonClientException { } } | return executorService . submit ( new Callable < QueryResult > ( ) { public QueryResult call ( ) throws Exception { return query ( queryRequest ) ; } } ) ; |
public class DatePickerDialog { /** * Sets the minimal date supported by this DatePicker . Dates before ( but not including ) the
* specified date will be disallowed from being selected .
* @ param calendar a Calendar object set to the year , month , day desired as the mindate . */
@ SuppressWarnings ( "unused" ) public void setMinDate ( Calendar calendar ) { } } | mDefaultLimiter . setMinDate ( calendar ) ; if ( mDayPickerView != null ) { mDayPickerView . onChange ( ) ; } |
public class JobGraph { /** * Adds a new task vertex to the job graph if it is not already included .
* @ param vertex
* the new task vertex to be added */
public void addVertex ( JobVertex vertex ) { } } | final JobVertexID id = vertex . getID ( ) ; JobVertex previous = taskVertices . put ( id , vertex ) ; // if we had a prior association , restore and throw an exception
if ( previous != null ) { taskVertices . put ( id , previous ) ; throw new IllegalArgumentException ( "The JobGraph already contains a vertex with that id." ) ; } |
public class PasswordMaker { /** * Intermediate step of generating a password . Performs constant hashing until
* the resulting hash is long enough .
* @ param masterPassword You should know by now .
* @ param data Not much has changed .
* @ param account A donut ?
* @ return A suitable hash .
* @ throws Exception if we ran out of donuts . */
private SecureUTF8String hashTheData ( SecureUTF8String masterPassword , SecureUTF8String data , Account account ) throws Exception { } } | final SecureUTF8String output = new SecureUTF8String ( ) ; final SecureUTF8String secureIteration = new SecureUTF8String ( ) ; SecureUTF8String intermediateOutput = null ; int count = 0 ; final int length = account . getLength ( ) ; try { while ( output . size ( ) < length ) { if ( count == 0 ) { intermediateOutput = runAlgorithm ( masterPassword , data , account ) ; } else { // add ye bit ' o chaos
secureIteration . replace ( masterPassword ) ; secureIteration . append ( NEW_LINE ) ; secureIteration . append ( new SecureCharArray ( Integer . toString ( count ) ) ) ; intermediateOutput = runAlgorithm ( secureIteration , data , account ) ; secureIteration . erase ( ) ; } output . append ( intermediateOutput ) ; intermediateOutput . erase ( ) ; count ++ ; } } catch ( Exception e ) { output . erase ( ) ; throw e ; } finally { if ( intermediateOutput != null ) intermediateOutput . erase ( ) ; secureIteration . erase ( ) ; } return output ; |
public class NoopPublisher { /** * Publish the data for the given tasks .
* @ param states */
@ Override public void publishData ( Collection < ? extends WorkUnitState > states ) throws IOException { } } | for ( WorkUnitState state : states ) { if ( state . getWorkingState ( ) == WorkUnitState . WorkingState . SUCCESSFUL ) { state . setWorkingState ( WorkUnitState . WorkingState . COMMITTED ) ; log . info ( "Marking state committed" ) ; } } |
public class Preconditions { /** * Ensures that an object reference passed as a parameter to the calling method is not null .
* @ param reference an object reference
* @ param errorMessageTemplate a template for the exception message should the check fail . The
* message is formed by replacing each { @ code % s } placeholder in the template with an
* argument . These are matched by position - the first { @ code % s } gets { @ code
* errorMessageArgs [ 0 ] } , etc . Unmatched arguments will be appended to the formatted message in
* square braces . Unmatched placeholders will be left as - is .
* @ param errorMessageArgs the arguments to be substituted into the message template . Arguments
* are converted to strings using { @ link String # valueOf ( Object ) } .
* @ return the non - null reference that was validated
* @ throws NullPointerException if { @ code reference } is null */
public static < T > T checkNotNull ( T reference , String errorMessageTemplate , Object ... errorMessageArgs ) { } } | return com . google . common . base . Preconditions . checkNotNull ( reference , errorMessageTemplate , errorMessageArgs ) ; |
public class FileUtil { /** * 文件移动 / 重命名 .
* @ see { @ link Files # move } */
public static void moveFile ( @ NotNull Path from , @ NotNull Path to ) throws IOException { } } | Validate . isTrue ( isFileExists ( from ) , "%s is not exist or not a file" , from ) ; Validate . notNull ( to ) ; Validate . isTrue ( ! isDirExists ( to ) , "%s is exist but it is a dir" , to ) ; Files . move ( from , to ) ; |
public class PythonPlanBinder { /** * Entry point for the execution of a python plan .
* @ param args planPath [ package1 [ packageX [ - parameter1 [ parameterX ] ] ] ]
* @ throws Exception */
public static void main ( String [ ] args ) throws Exception { } } | Configuration globalConfig = GlobalConfiguration . loadConfiguration ( ) ; PythonPlanBinder binder = new PythonPlanBinder ( globalConfig ) ; try { binder . runPlan ( args ) ; } catch ( Exception e ) { System . out . println ( "Failed to run plan: " + e . getMessage ( ) ) ; LOG . error ( "Failed to run plan." , e ) ; } |
public class LineItemSummary { /** * Gets the deliveryData value for this LineItemSummary .
* @ return deliveryData * Delivery data provides the number of clicks or impressions
* delivered for a
* { @ link LineItem } in the last 7 days . This attribute
* is readonly and is
* populated by Google . This will be { @ code null } if
* the delivery data cannot
* be computed due to one of the following reasons :
* < ol >
* < li >
* The line item is not deliverable . < / li >
* < li >
* The line item has completed delivering more than 7
* days ago . < / li >
* < li >
* The line item has an absolute - based goal .
* { @ link LineItem # deliveryIndicator } should be used
* to track its
* progress in this case . < / li > */
public com . google . api . ads . admanager . axis . v201808 . DeliveryData getDeliveryData ( ) { } } | return deliveryData ; |
public class GraphicalIndicatorReader { /** * The main entry point for processing graphical indicator definitions .
* @ param indicators graphical indicators container
* @ param properties project properties
* @ param props properties data */
public void process ( CustomFieldContainer indicators , ProjectProperties properties , Props props ) { } } | m_container = indicators ; m_properties = properties ; m_data = props . getByteArray ( Props . TASK_FIELD_ATTRIBUTES ) ; if ( m_data != null ) { int columnsCount = MPPUtility . getInt ( m_data , 4 ) ; m_headerOffset = 8 ; for ( int loop = 0 ; loop < columnsCount ; loop ++ ) { processColumns ( ) ; } } |
public class Lifecycle { /** * Changes to the next state .
* @ return true if the transition is allowed */
private boolean toState ( LifecycleState newState ) { } } | LifecycleState state ; synchronized ( this ) { state = _state ; _state = newState ; _lastChangeTime = CurrentTime . currentTime ( ) ; } if ( _log != null && _log . isLoggable ( _lowLevel ) ) { _log . log ( _lowLevel , newState + " " + _name ) ; } notifyListeners ( state , newState ) ; return true ; |
public class AFactoryAppBeansJdbc { /** * < p > Get SrvDatabase in lazy mode . < / p >
* @ return SrvDatabase - SrvDatabase
* @ throws Exception - an exception */
@ Override public final synchronized SrvDatabase lazyGetSrvDatabase ( ) throws Exception { } } | String beanName = getSrvDatabaseName ( ) ; SrvDatabase srvDatabase = ( SrvDatabase ) getBeansMap ( ) . get ( beanName ) ; if ( srvDatabase == null ) { srvDatabase = new SrvDatabase ( ) ; srvDatabase . setLogger ( lazyGetLogger ( ) ) ; srvDatabase . setHlpInsertUpdate ( lazyGetHlpInsertUpdate ( ) ) ; srvDatabase . setDataSource ( lazyGetDataSource ( ) ) ; getBeansMap ( ) . put ( beanName , srvDatabase ) ; lazyGetLogger ( ) . info ( null , AFactoryAppBeansJdbc . class , beanName + " has been created." ) ; } return srvDatabase ; |
public class DatatypeConverter { /** * Detect numbers using comma as a decimal separator and replace with period .
* @ param value original numeric value
* @ return corrected numeric value */
private static final String correctNumberFormat ( String value ) { } } | String result ; int index = value . indexOf ( ',' ) ; if ( index == - 1 ) { result = value ; } else { char [ ] chars = value . toCharArray ( ) ; chars [ index ] = '.' ; result = new String ( chars ) ; } return result ; |
public class DockerMachine { /** * Checks if Docker Machine is installed by running docker - machine and inspect the result .
* @ param cliPathExec
* location of docker - machine or null if it is on PATH .
* @ return true if it is installed , false otherwise . */
public boolean isDockerMachineInstalled ( String cliPathExec ) { } } | try { commandLineExecutor . execCommand ( createDockerMachineCommand ( cliPathExec ) ) ; return true ; } catch ( Exception e ) { return false ; } |
public class CodeGenerator { /** * generate < code > size < / code > method source code
* @ return */
private String getSizeMethodCode ( ) { } } | StringBuilder code = new StringBuilder ( ) ; Set < Integer > orders = new HashSet < Integer > ( ) ; // encode method
code . append ( "public int size(" ) . append ( ClassHelper . getInternalName ( cls . getCanonicalName ( ) ) ) ; code . append ( " t) throws IOException {" ) . append ( LINE_BREAK ) ; code . append ( "int size = 0" ) . append ( JAVA_LINE_BREAK ) ; for ( FieldInfo field : fields ) { boolean isList = field . isList ( ) ; // check type
if ( ! isList ) { checkType ( field . getFieldType ( ) , field . getField ( ) ) ; } if ( orders . contains ( field . getOrder ( ) ) ) { throw new IllegalArgumentException ( "Field order '" + field . getOrder ( ) + "' on field" + field . getField ( ) . getName ( ) + " already exsit." ) ; } // define field
code . append ( CodedConstant . getMappedTypeDefined ( field . getOrder ( ) , field . getFieldType ( ) , getAccessByField ( "t" , field . getField ( ) , cls ) , isList , field . isMap ( ) ) ) ; // compute size
code . append ( "if (!CodedConstant.isNull(" ) . append ( getAccessByField ( "t" , field . getField ( ) , cls ) ) . append ( "))" ) . append ( "{" ) . append ( LINE_BREAK ) ; code . append ( "size += " ) ; code . append ( CodedConstant . getMappedTypeSize ( field , field . getOrder ( ) , field . getFieldType ( ) , isList , field . isMap ( ) , debug , outputPath ) ) ; code . append ( "}" ) . append ( LINE_BREAK ) ; if ( field . isRequired ( ) ) { code . append ( CodedConstant . getRequiredCheck ( field . getOrder ( ) , field . getField ( ) ) ) ; } } code . append ( "return size" ) . append ( JAVA_LINE_BREAK ) ; code . append ( "}" ) . append ( LINE_BREAK ) ; return code . toString ( ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.