signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class AbstractShape3i { /** * Notify any listener of a geometry change . */ protected synchronized void fireGeometryChange ( ) { } }
if ( this . geometryListeners == null ) { return ; } final ShapeGeometryChangeListener [ ] array = new ShapeGeometryChangeListener [ this . geometryListeners . size ( ) ] ; this . geometryListeners . toArray ( array ) ; for ( final ShapeGeometryChangeListener listener : array ) { listener . shapeGeometryChange ( this ) ; }
public class SubsetProblem { /** * Validate a subset solution . The returned validation object separately indicates whether * the solution passed general mandatory constraint validation and whether it has a valid size . * @ param solution solution to validate * @ return subset validation */ @ Override public SubsetValidation validate ( SubsetSolution solution ) { } }
// check size boolean validSize = solution . getNumSelectedIDs ( ) >= getMinSubsetSize ( ) && solution . getNumSelectedIDs ( ) <= getMaxSubsetSize ( ) ; // combine with mandatory constraint validation if ( getMandatoryConstraints ( ) . isEmpty ( ) ) { // CASE 1 : no mandatory constraints - - return constant validation object return validSize ? UNCONSTRAINED_VALID_SIZE : UNCONSTRAINED_INVALID_SIZE ; } else { // CASE 2 : mandatory constraint ( s ) - - wrap constraints validation in subset validation Validation constraintVal = super . validate ( solution ) ; return new SubsetValidation ( validSize , constraintVal ) ; }
public class FilterExprWalker { /** * Init a FilterExprWalker . * @ param compiler non - null reference to the Compiler that is constructing . * @ param opPos positive opcode position for this step . * @ param stepType The type of step . * @ throws javax . xml . transform . TransformerException */ public void init ( Compiler compiler , int opPos , int stepType ) throws javax . xml . transform . TransformerException { } }
super . init ( compiler , opPos , stepType ) ; // Smooth over an anomily in the opcode map . . . switch ( stepType ) { case OpCodes . OP_FUNCTION : case OpCodes . OP_EXTFUNCTION : m_mustHardReset = true ; case OpCodes . OP_GROUP : case OpCodes . OP_VARIABLE : m_expr = compiler . compile ( opPos ) ; m_expr . exprSetParent ( this ) ; // if ( ( OpCodes . OP _ FUNCTION = = stepType ) & & ( m _ expr instanceof org . apache . xalan . templates . FuncKey ) ) if ( m_expr instanceof org . apache . xpath . operations . Variable ) { // hack / temp workaround m_canDetachNodeset = false ; } break ; default : m_expr = compiler . compile ( opPos + 2 ) ; m_expr . exprSetParent ( this ) ; } // if ( m _ expr instanceof WalkingIterator ) // WalkingIterator wi = ( WalkingIterator ) m _ expr ; // if ( wi . getFirstWalker ( ) instanceof FilterExprWalker ) // FilterExprWalker fw = ( FilterExprWalker ) wi . getFirstWalker ( ) ; // if ( null = = fw . getNextWalker ( ) ) // m _ expr = fw . m _ expr ; // m _ expr . exprSetParent ( this ) ;
public class POEditorClient { /** * Add / create a contributor for a language of a project * @ param projectId id of the project * @ param name name of the contributor * @ param email email of the contributor * @ param language language for the contributor * @ return boolean if the contributor has been added */ public boolean addContributor ( String projectId , String name , String email , String language ) { } }
ResponseWrapper wrapper = service . addProjectMember ( Action . ADD_CONTRIBUTOR , apiKey , projectId , name , email , language , 0 ) ; ApiUtils . checkResponse ( wrapper . response ) ; return "200" . equals ( wrapper . response . code ) ;
public class SocketExtensions { /** * Checks if the given port at the given host is available . Note : Socket will be closed after * the test . * @ param host * the host name . * @ param port * the port number . * @ return If the given port at the given host is available true , otherwise false . */ public static boolean available ( final String host , final int port ) { } }
Socket socket = null ; try { socket = newSocket ( host , port ) ; return false ; } catch ( final IOException ignored ) { return true ; } finally { if ( ! closeClientSocket ( socket ) ) { LOGGER . error ( "Socket could not be closed on host " + host + " on port " + port ) ; } }
public class MaintenanceWindowIdentityForTargetMarshaller { /** * Marshall the given parameter object . */ public void marshall ( MaintenanceWindowIdentityForTarget maintenanceWindowIdentityForTarget , ProtocolMarshaller protocolMarshaller ) { } }
if ( maintenanceWindowIdentityForTarget == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( maintenanceWindowIdentityForTarget . getWindowId ( ) , WINDOWID_BINDING ) ; protocolMarshaller . marshall ( maintenanceWindowIdentityForTarget . getName ( ) , NAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ApiOvhDomain { /** * Delete a DynHost login * REST : DELETE / domain / zone / { zoneName } / dynHost / login / { login } * @ param zoneName [ required ] The internal name of your zone * @ param login [ required ] Login */ public void zone_zoneName_dynHost_login_login_DELETE ( String zoneName , String login ) throws IOException { } }
String qPath = "/domain/zone/{zoneName}/dynHost/login/{login}" ; StringBuilder sb = path ( qPath , zoneName , login ) ; exec ( qPath , "DELETE" , sb . toString ( ) , null ) ;
public class SyncAgentsInner { /** * Gets a sync agent . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server on which the sync agent is hosted . * @ param syncAgentName The name of the sync agent . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < SyncAgentInner > getAsync ( String resourceGroupName , String serverName , String syncAgentName , final ServiceCallback < SyncAgentInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getWithServiceResponseAsync ( resourceGroupName , serverName , syncAgentName ) , serviceCallback ) ;
public class IfcObjectDefinitionImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public EList < IfcRelNests > getNests ( ) { } }
return ( EList < IfcRelNests > ) eGet ( Ifc4Package . Literals . IFC_OBJECT_DEFINITION__NESTS , true ) ;
public class InboundNatRulesInner { /** * Creates or updates a load balancer inbound nat rule . * @ param resourceGroupName The name of the resource group . * @ param loadBalancerName The name of the load balancer . * @ param inboundNatRuleName The name of the inbound nat rule . * @ param inboundNatRuleParameters Parameters supplied to the create or update inbound nat rule operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < InboundNatRuleInner > createOrUpdateAsync ( String resourceGroupName , String loadBalancerName , String inboundNatRuleName , InboundNatRuleInner inboundNatRuleParameters ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , loadBalancerName , inboundNatRuleName , inboundNatRuleParameters ) . map ( new Func1 < ServiceResponse < InboundNatRuleInner > , InboundNatRuleInner > ( ) { @ Override public InboundNatRuleInner call ( ServiceResponse < InboundNatRuleInner > response ) { return response . body ( ) ; } } ) ;
public class IDLProxyObject { /** * Put . * @ param field the field * @ param value the value * @ return the IDL proxy object */ public IDLProxyObject put ( String field , Object value ) { } }
return put ( field , field , value , target ) ;
public class ApiOvhDbaaslogs { /** * Returns details of specified graylog dashboard * REST : GET / dbaas / logs / { serviceName } / output / graylog / dashboard / { dashboardId } * @ param serviceName [ required ] Service name * @ param dashboardId [ required ] Dashboard ID */ public OvhDashboard serviceName_output_graylog_dashboard_dashboardId_GET ( String serviceName , String dashboardId ) throws IOException { } }
String qPath = "/dbaas/logs/{serviceName}/output/graylog/dashboard/{dashboardId}" ; StringBuilder sb = path ( qPath , serviceName , dashboardId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhDashboard . class ) ;
public class AbstrCFMLScriptTransformer { /** * Liest ein if Statement ein . < br / > * EBNF : < br / > * < code > spaces condition spaces " ) " spaces block { " else if " spaces " ( " elseifStatement spaces } * [ ( " else " spaces " ( " | " else " ) elseStatement spaces ] ; < / code > * @ return if Statement * @ throws TemplateException */ private final Statement ifStatement ( Data data ) throws TemplateException { } }
if ( ! data . srcCode . forwardIfCurrent ( "if" , '(' ) ) return null ; Position line = data . srcCode . getPosition ( ) ; Body body = new BodyBase ( data . factory ) ; Condition cont = new Condition ( data . factory , condition ( data ) , body , line , null ) ; if ( ! data . srcCode . forwardIfCurrent ( ')' ) ) throw new TemplateException ( data . srcCode , "if statement must end with a [)]" ) ; // ex block Body prior = data . setParent ( body ) ; statement ( data , body , CTX_IF ) ; data . setParent ( prior ) ; // else if comments ( data ) ; while ( elseifStatement ( data , cont ) ) { comments ( data ) ; } // else if ( elseStatement ( data , cont ) ) { comments ( data ) ; } cont . setEnd ( data . srcCode . getPosition ( ) ) ; return cont ;
public class ExcelItemWriter { /** * { @ inheritDoc } */ public void writeTitle ( String titleName , Object data ) { } }
if ( null != titleName ) { sheet = workbook . createSheet ( titleName ) ; } else { sheet = workbook . createSheet ( ) ; } title = data ; index = 0 ; writeItem ( data ) ; HSSFRow titleRow = sheet . getRow ( index ) ; HSSFCellStyle titleStyle = getTitleStyle ( ) ; for ( int i = 0 ; i < titleRow . getLastCellNum ( ) ; i ++ ) { titleRow . getCell ( i ) . setCellStyle ( titleStyle ) ; } index ++ ;
public class FieldToMatchMarshaller { /** * Marshall the given parameter object . */ public void marshall ( FieldToMatch fieldToMatch , ProtocolMarshaller protocolMarshaller ) { } }
if ( fieldToMatch == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( fieldToMatch . getType ( ) , TYPE_BINDING ) ; protocolMarshaller . marshall ( fieldToMatch . getData ( ) , DATA_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class JsonPathAssert { /** * Extracts a JSON number using a JsonPath expression and wrap it in an { @ link IntegerAssert } * @ param path JsonPath to extract the number * @ return an instance of { @ link IntegerAssert } */ public AbstractIntegerAssert < ? > jsonPathAsInteger ( String path ) { } }
return Assertions . assertThat ( actual . read ( path , Integer . class ) ) ;
public class JBBPOut { /** * Write a boolean value into the session stream as a byte . * @ param value a boolean value to be written , true is 1 , false is 0 * @ param bitOrder bit outOrder for saving data * @ return the DSL session * @ throws IOException it will be thrown for transport errors * @ since 1.1 */ public JBBPOut Bool ( final boolean value , final JBBPBitOrder bitOrder ) throws IOException { } }
assertNotEnded ( ) ; if ( this . processCommands ) { this . outStream . write ( value ? bitOrder == JBBPBitOrder . MSB0 ? 0x80 : 1 : 0 ) ; } return this ;
public class Stylesheet { /** * Get an " xsl : output " property . * @ see < a href = " http : / / www . w3 . org / TR / xslt # output " > output in XSLT Specification < / a > * @ param i Index of OutputFormatExtended to get * @ return non - null reference to an OutputProperties object . * @ throws ArrayIndexOutOfBoundsException */ public OutputProperties getOutput ( int i ) throws ArrayIndexOutOfBoundsException { } }
if ( null == m_output ) throw new ArrayIndexOutOfBoundsException ( ) ; return ( OutputProperties ) m_output . elementAt ( i ) ;
public class SecurityServletConfiguratorHelper { /** * Creates the servlet to run - as mapping from the run - as elements in web . xml and / or web - fragment . xml files . * Note that only one run - as element can be present per servlet . Only the first occurrence is processed . * If multiple web fragments specify this element with different values and it ' s absent from the web . xml , this will result * in an error that fails the application install . * @ param servlet the servlet */ private void processRunAs ( Servlet servlet ) { } }
String servletName = servlet . getServletName ( ) ; Map < String , ConfigItem < String > > runAsMap = this . configurator . getConfigItemMap ( RUN_AS_KEY ) ; ConfigItem < String > existingRunAs = runAsMap . get ( servletName ) ; RunAs runAs = servlet . getRunAs ( ) ; String roleName = ( runAs != null ) ? runAs . getRoleName ( ) : null ; if ( runAs != null ) { if ( existingRunAs == null ) { runAsMap . put ( servletName , this . configurator . createConfigItem ( roleName ) ) ; if ( roleName != null ) this . servletNameToRunAsRole . put ( servletName , roleName ) ; } else { this . configurator . validateDuplicateKeyValueConfiguration ( SERVLET_KEY , SERVLET_NAME_KEY , servletName , RUN_AS_KEY , roleName , existingRunAs ) ; } } if ( ( TraceComponent . isAnyTracingEnabled ( ) ) && ( SecurityServletConfiguratorHelper . tc . isDebugEnabled ( ) ) ) Tr . debug ( SecurityServletConfiguratorHelper . tc , "servletNameToRunAsRole: " + this . servletNameToRunAsRole , new Object [ 0 ] ) ;
public class MyfacesLogger { /** * Returns formated string in default locale */ public String getMessage ( String key , Object param ) { } }
return getMessage ( key , new Object [ ] { param } ) ;
public class UrlOperations { /** * Return the default port for the scheme String argument , if known . * @ param scheme String scheme , including ' : / / ' , as in , " http : / / " , " ftp : / / " * @ return the default port for the scheme , or - 1 if the scheme isn ' t known . */ public static int schemeToDefaultPort ( final String scheme ) { } }
if ( scheme . equals ( HTTP_SCHEME ) ) { return 80 ; } if ( scheme . equals ( HTTPS_SCHEME ) ) { return 443 ; } if ( scheme . equals ( FTP_SCHEME ) ) { return 21 ; } if ( scheme . equals ( RTSP_SCHEME ) ) { return 554 ; } if ( scheme . equals ( MMS_SCHEME ) ) { return 1755 ; } return - 1 ;
public class CharArrayBuffer { /** * Converts the content of this buffer to an array of chars . * @ return char array */ public char [ ] toCharArray ( ) { } }
final char [ ] b = new char [ this . len ] ; if ( this . len > 0 ) { System . arraycopy ( this . array , 0 , b , 0 , this . len ) ; } return b ;
public class SoyTypes { /** * Helper method used by { @ link # getSoyTypeForBinaryOperator } for handling { @ code UnionType } * instances . */ @ Nullable private static SoyType getSoyTypeFromUnionForBinaryOperator ( UnionType t0 , SoyType t1 , boolean isNullable , SoyTypeBinaryOperator operator ) { } }
List < SoyType > subTypes = new ArrayList < > ( ) ; for ( SoyType unionMember : t0 . getMembers ( ) ) { SoyType result = getSoyTypeForBinaryOperator ( unionMember , t1 , operator ) ; if ( result == null ) { return null ; } subTypes . add ( result ) ; } SoyType result = UnionType . of ( subTypes ) ; return isNullable ? makeNullable ( result ) : result ;
public class DescribeStateMachineRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeStateMachineRequest describeStateMachineRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeStateMachineRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeStateMachineRequest . getStateMachineArn ( ) , STATEMACHINEARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class JpaAction { /** * Returns the end time of next available or active maintenance window for * the { @ link Action } as { @ link ZonedDateTime } . If a maintenance window is * already active , the end time of currently active window is returned . * @ return the end time of window as { @ link Optional < ZonedDateTime > } . */ private Optional < ZonedDateTime > getMaintenanceWindowEndTime ( ) { } }
return getMaintenanceWindowStartTime ( ) . map ( start -> start . plus ( MaintenanceScheduleHelper . convertToISODuration ( maintenanceWindowDuration ) ) ) ;
public class IconicsAnimationProcessor { /** * Removes all { @ link # addListener ( IconicsAnimationListener ) listeners } and * { @ link # addPauseListener ( IconicsAnimationPauseListener ) pauseListeners } from this processor . */ public void removeAllListeners ( ) { } }
if ( mListeners != null ) { mListeners . clear ( ) ; mListeners = null ; mAnimator . removeListener ( mProxyListener ) ; } if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . KITKAT ) { if ( mPauseListeners != null ) { mPauseListeners . clear ( ) ; mPauseListeners = null ; mAnimator . removePauseListener ( ( Animator . AnimatorPauseListener ) mProxyPauseListener ) ; } }
public class CrawlerTool { /** * ist die aktuelle Filmliste , xz komprimiert */ public static synchronized void startMsg ( ) { } }
Log . startZeit . setTime ( System . currentTimeMillis ( ) ) ; Log . versionMsg ( Const . PROGRAMMNAME ) ; Log . sysLog ( Log . LILNE ) ; Log . sysLog ( "" ) ; Log . sysLog ( "Programmpfad: " + Functions . getPathJar ( ) ) ; Log . sysLog ( "Filmliste: " + getPathFilmlist_json_akt ( true /* aktDate */ ) ) ; Log . sysLog ( "Useragent: " + Config . getUserAgent ( ) ) ; Log . sysLog ( "" ) ; Log . sysLog ( Log . LILNE ) ; Log . sysLog ( "" ) ; if ( loadLongMax ( ) ) { Log . sysLog ( "Laden: alles" ) ; } else { Log . sysLog ( "Laden: nur update" ) ; } if ( CrawlerConfig . updateFilmliste ) { Log . sysLog ( "Filmliste: nur updaten" ) ; } else { Log . sysLog ( "Filmliste: neu erstellen" ) ; } Log . sysLog ( "ImportURL 1: " + CrawlerConfig . importUrl_1__anhaengen ) ; Log . sysLog ( "ImportURL 2: " + CrawlerConfig . importUrl_2__anhaengen ) ; Log . sysLog ( "ImportOLD: " + CrawlerConfig . importOld ) ; Log . sysLog ( "ImportAkt: " + CrawlerConfig . importAkt ) ; if ( CrawlerConfig . nurSenderLaden != null ) { Log . sysLog ( "Nur Sender laden: " + StringUtils . join ( CrawlerConfig . nurSenderLaden , ',' ) ) ; } Log . sysLog ( "" ) ; Log . sysLog ( Log . LILNE ) ;
public class TypeReferenceAdjustment { /** * { @ inheritDoc } */ public ClassVisitor wrap ( TypeDescription instrumentedType , ClassVisitor classVisitor , Implementation . Context implementationContext , TypePool typePool , FieldList < FieldDescription . InDefinedShape > fields , MethodList < ? > methods , int writerFlags , int readerFlags ) { } }
return new TypeReferenceClassVisitor ( classVisitor , strict , filter , typePool ) ;
public class TileBoundingBoxUtils { /** * Get the Projected tile bounding box from the Google Maps API tile * coordinates and zoom level * @ param projectionEpsg * projection epsg * @ param x * x coordinate * @ param y * y coordinate * @ param zoom * zoom level * @ return bounding box */ public static BoundingBox getProjectedBoundingBox ( Long projectionEpsg , int x , int y , int zoom ) { } }
return getProjectedBoundingBox ( ProjectionConstants . AUTHORITY_EPSG , projectionEpsg , x , y , zoom ) ;
public class EnglishChunkFilter { /** * Get the type of the chunk that starts at the given position . */ private ChunkType getChunkType ( List < ChunkTaggedToken > tokens , int chunkStartPos ) { } }
boolean isPlural = false ; for ( int i = chunkStartPos ; i < tokens . size ( ) ; i ++ ) { ChunkTaggedToken token = tokens . get ( i ) ; if ( ! isBeginningOfNounPhrase ( token ) && ! isContinuationOfNounPhrase ( token ) ) { break ; } if ( false && "and" . equals ( token . getToken ( ) ) ) { // e . g . " Tarzan and Jane " is a plural noun phrase // TODO : " Additionally , there are over 500 college and university chapter . " isPlural = true ; } else if ( hasNounWithPluralReading ( token ) ) { // e . g . " ten books " is a plural noun phrase isPlural = true ; } } return isPlural ? ChunkType . PLURAL : ChunkType . SINGULAR ;
public class Indexer { /** * Gets the fully - qualified index table name for the given table . * @ param schema Schema name * @ param table Table name * @ return Qualified index table name */ public static String getIndexTableName ( String schema , String table ) { } }
return schema . equals ( "default" ) ? table + "_idx" : schema + '.' + table + "_idx" ;
public class CaddScoreParser { /** * / * Example : * # # CADD v1.3 ( c ) University of Washington and Hudson - Alpha Institute for Biotechnology 2013-2015 . All rights reserved . * # Chrom Pos Ref Alt RawScore PHRED * 1 10001 T A 0.337036 6.046 * 1 10001 T C 0.143254 4.073 * 1 10001 T G 0.202491 4.705 * 1 10002 A C 0.192576 4.601 * 1 10002 A G 0.178363 4.450 * 1 10002 A T 0.347401 6.143 */ @ Override public void parse ( ) throws Exception { } }
FileUtils . checkPath ( caddFilePath ) ; BufferedReader bufferedReader = FileUtils . newBufferedReader ( caddFilePath ) ; List < Long > rawValues = new ArrayList < > ( CHUNK_SIZE ) ; List < Long > scaledValues = new ArrayList < > ( CHUNK_SIZE ) ; int start = 1 ; // int end = 1999; int end = CHUNK_SIZE - 1 ; String line ; String [ ] fields = new String [ 0 ] ; short v ; int lineCount = 0 ; int counter = 1 ; int serializedChunks = 0 ; int previousPosition = 0 ; int newPosition = 0 ; String chromosome = null ; String [ ] nucleotides = new String [ ] { "A" , "C" , "G" , "T" } ; long rawLongValue = 0 ; long scaledLongValue = 0 ; Map < String , Float > rawScoreValuesMap = new HashMap < > ( ) ; Map < String , Float > scaledScoreValuesMap = new HashMap < > ( ) ; while ( ( line = bufferedReader . readLine ( ) ) != null ) { if ( ! line . startsWith ( "#" ) ) { fields = line . split ( "\t" ) ; newPosition = Integer . parseInt ( fields [ 1 ] ) ; // if ( fields [ 0 ] . equals ( " 1 " ) & & fields [ 1 ] . equals ( " 249240621 " ) ) { // if ( fields [ 0 ] . equals ( " 1 " ) & & fields [ 1 ] . equals ( " 69100 " ) ) { // if ( fields [ 0 ] . equals ( " 1 " ) & & fields [ 1 ] . equals ( " 144854598 " ) ) { // logger . debug ( " line { } reached " , line ) ; // logger . debug ( " Associated chunk count { } " , serializedChunks ) ; // logger . debug ( " start { } " , start ) ; // logger . debug ( " end { } " , end ) ; // logger . debug ( " chunk size { } " , CHUNK _ SIZE ) ; // this only happens the first time , when we start reading the file if ( chromosome == null ) { logger . info ( "Parsing chr {} " , fields [ 0 ] ) ; chromosome = fields [ 0 ] ; start = newPosition ; previousPosition = newPosition ; end = start + CHUNK_SIZE - 2 ; } if ( ! chromosome . equals ( fields [ 0 ] ) ) { logger . info ( "Parsing chr {} " , fields [ 0 ] ) ; // both raw and scaled are serialized GenomicScoreRegion < Long > genomicScoreRegion = new GenomicScoreRegion < > ( chromosome , start , previousPosition , "cadd_raw" , rawValues ) ; serializer . serialize ( genomicScoreRegion ) ; genomicScoreRegion = new GenomicScoreRegion < > ( chromosome , start , previousPosition , "cadd_scaled" , scaledValues ) ; serializer . serialize ( genomicScoreRegion ) ; serializedChunks ++ ; chromosome = fields [ 0 ] ; start = newPosition ; // end = CHUNK _ SIZE - 1; end = start + CHUNK_SIZE - 2 ; counter = 0 ; rawValues . clear ( ) ; scaledValues . clear ( ) ; // rawLongValue = 0; // lineCount = 0; // rawScoreValuesMap . clear ( ) ; // scaledScoreValuesMap . clear ( ) ; // The series of cadd scores is not continuous through the whole chromosome } else if ( end < newPosition || ( newPosition - previousPosition ) > 1 ) { // both raw and scaled are serialized GenomicScoreRegion genomicScoreRegion = new GenomicScoreRegion < > ( fields [ 0 ] , start , previousPosition , "cadd_raw" , rawValues ) ; serializer . serialize ( genomicScoreRegion ) ; genomicScoreRegion = new GenomicScoreRegion < > ( fields [ 0 ] , start , previousPosition , "cadd_scaled" , scaledValues ) ; serializer . serialize ( genomicScoreRegion ) ; serializedChunks ++ ; start = newPosition ; // start = end + 1; // end + = CHUNK _ SIZE ; end = ( start / CHUNK_SIZE ) * CHUNK_SIZE + CHUNK_SIZE - 1 ; counter = 0 ; rawValues . clear ( ) ; scaledValues . clear ( ) ; } rawScoreValuesMap . put ( fields [ 3 ] , Float . valueOf ( fields [ 4 ] ) ) ; scaledScoreValuesMap . put ( fields [ 3 ] , Float . valueOf ( fields [ 5 ] ) ) ; if ( ++ lineCount == 3 ) { // if ( fields [ 0 ] . equals ( " 1 " ) & & fields [ 1 ] . equals ( " 249240621 " ) ) { // if ( fields [ 0 ] . equals ( " 1 " ) & & fields [ 1 ] . equals ( " 69100 " ) ) { // if ( fields [ 0 ] . equals ( " 1 " ) & & fields [ 1 ] . equals ( " 144854598 " ) ) { // logger . info ( " offset : { } " , rawValues . size ( ) ) ; for ( String nucleotide : nucleotides ) { // raw CADD score values can be negative , we add 10 to make positive float a = rawScoreValuesMap . getOrDefault ( nucleotide , 10f ) + 10.0f ; v = ( short ) ( a * DECIMAL_RESOLUTION ) ; rawLongValue = ( rawLongValue << 16 ) | v ; // scaled CADD scores are always positive a = scaledScoreValuesMap . getOrDefault ( nucleotide , 0f ) ; v = ( short ) ( a * DECIMAL_RESOLUTION ) ; scaledLongValue = ( scaledLongValue << 16 ) | v ; } // if ( rawLongValue < 0 | | scaledLongValue < 0 ) { // logger . error ( " raw / scaled Long Values cannot be 0 " ) ; // logger . error ( " Last read line { } " , line ) ; // System . exit ( 1 ) ; rawValues . add ( rawLongValue ) ; scaledValues . add ( scaledLongValue ) ; counter ++ ; rawLongValue = 0 ; lineCount = 0 ; rawScoreValuesMap . clear ( ) ; scaledScoreValuesMap . clear ( ) ; } previousPosition = newPosition ; } } // Last chunks can be incomplete for both raw and scaled are serialized // GenomicScoreRegion < Long > genomicScoreRegion = // new GenomicScoreRegion < > ( fields [ 0 ] , start , start + rawValues . size ( ) - 1 , " cadd _ raw " , rawValues ) ; GenomicScoreRegion < Long > genomicScoreRegion = new GenomicScoreRegion < > ( fields [ 0 ] , start , newPosition , "cadd_raw" , rawValues ) ; serializer . serialize ( genomicScoreRegion ) ; // genomicScoreRegion = new GenomicScoreRegion < > ( fields [ 0 ] , start , start + scaledValues . size ( ) - 1 , " cadd _ scaled " , scaledValues ) ; genomicScoreRegion = new GenomicScoreRegion < > ( fields [ 0 ] , start , newPosition , "cadd_scaled" , scaledValues ) ; serializer . serialize ( genomicScoreRegion ) ; serializer . close ( ) ; bufferedReader . close ( ) ; logger . info ( "Parsing finished." ) ;
public class StaticResourcePool { /** * With u . * @ param f the f * @ param filter */ public void apply ( @ javax . annotation . Nonnull final Consumer < T > f , final Predicate < T > filter ) { } }
T poll = get ( filter ) ; try { f . accept ( poll ) ; } finally { this . pool . add ( poll ) ; }
public class ParameterableImpl { /** * ( non - Javadoc ) * @ see javax . servlet . sip . Parameterable # getParameters ( ) */ public Set < Entry < String , String > > getParameters ( ) { } }
Map < String , String > retval = new HashMap < String , String > ( ) ; for ( Entry < String , String > nameValue : this . parameters . entrySet ( ) ) { retval . put ( nameValue . getKey ( ) , ( RFC2396UrlDecoder . decode ( nameValue . getValue ( ) ) ) ) ; } return retval . entrySet ( ) ;
public class KeyVaultClientBaseImpl { /** * Sets the specified certificate issuer . * The SetCertificateIssuer operation adds or updates the specified certificate issuer . This operation requires the certificates / setissuers permission . * @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net . * @ param issuerName The name of the issuer . * @ param provider The issuer provider . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws KeyVaultErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the IssuerBundle object if successful . */ public IssuerBundle setCertificateIssuer ( String vaultBaseUrl , String issuerName , String provider ) { } }
return setCertificateIssuerWithServiceResponseAsync ( vaultBaseUrl , issuerName , provider ) . toBlocking ( ) . single ( ) . body ( ) ;
public class ProductSearchClient { /** * Lists products in an unspecified order . * < p > Possible errors : * < p > & # 42 ; Returns INVALID _ ARGUMENT if page _ size is greater than 100 or less than 1. * < p > Sample code : * < pre > < code > * try ( ProductSearchClient productSearchClient = ProductSearchClient . create ( ) ) { * LocationName parent = LocationName . of ( " [ PROJECT ] " , " [ LOCATION ] " ) ; * for ( Product element : productSearchClient . listProducts ( parent ) . iterateAll ( ) ) { * / / doThingsWith ( element ) ; * < / code > < / pre > * @ param parent The project OR ProductSet from which Products should be listed . * < p > Format : ` projects / PROJECT _ ID / locations / LOC _ ID ` * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final ListProductsPagedResponse listProducts ( LocationName parent ) { } }
ListProductsRequest request = ListProductsRequest . newBuilder ( ) . setParent ( parent == null ? null : parent . toString ( ) ) . build ( ) ; return listProducts ( request ) ;
public class PyGenerator { /** * Generate the given object . * @ param annotation the annotation . * @ param context the context . */ protected void _generate ( SarlAnnotationType annotation , IExtraLanguageGeneratorContext context ) { } }
final JvmDeclaredType jvmType = getJvmModelAssociations ( ) . getInferredType ( annotation ) ; final PyAppendable appendable = createAppendable ( jvmType , context ) ; if ( generateTypeDeclaration ( this . qualifiedNameProvider . getFullyQualifiedName ( annotation ) . toString ( ) , annotation . getName ( ) , false , Collections . emptyList ( ) , getTypeBuilder ( ) . getDocumentation ( annotation ) , true , annotation . getMembers ( ) , appendable , context , null ) ) { final QualifiedName name = getQualifiedNameProvider ( ) . getFullyQualifiedName ( annotation ) ; writeFile ( name , appendable , context ) ; }
public class ApiOvhIpLoadbalancing { /** * Alter this object properties * REST : PUT / ipLoadbalancing / { serviceName } / tcp / farm / { farmId } / server / { serverId } * @ param body [ required ] New object properties * @ param serviceName [ required ] The internal name of your IP load balancing * @ param farmId [ required ] Id of your farm * @ param serverId [ required ] Id of your server */ public void serviceName_tcp_farm_farmId_server_serverId_PUT ( String serviceName , Long farmId , Long serverId , OvhBackendTCPServer body ) throws IOException { } }
String qPath = "/ipLoadbalancing/{serviceName}/tcp/farm/{farmId}/server/{serverId}" ; StringBuilder sb = path ( qPath , serviceName , farmId , serverId ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ;
public class ServletOut { public void write ( byte [ ] b , int o , int l ) throws IOException { } }
_out . write ( b , o , l ) ;
public class NameHash { /** * Normalise ENS name as per the * < a href = " http : / / docs . ens . domains / en / latest / implementers . html # normalising - and - validating - names " > specification < / a > . * @ param ensName our user input ENS name * @ return normalised ens name * @ throws EnsResolutionException if the name cannot be normalised */ public static String normalise ( String ensName ) { } }
try { return IDN . toASCII ( ensName , IDN . USE_STD3_ASCII_RULES ) . toLowerCase ( ) ; } catch ( IllegalArgumentException e ) { throw new EnsResolutionException ( "Invalid ENS name provided: " + ensName ) ; }
public class BuiltinAuthorizationService { /** * Check all of the authorization table services for the resourceName . * If no authorization table can be found for the resourceName , null * is returned . If more than one authorization table can be found for * the resourceName , null is returned . * @ param resourceName * @ param specialSubject * @ return */ private Collection < String > getRolesForSpecialSubject ( String resourceName , String specialSubject ) { } }
int found = 0 ; Collection < String > roles = null ; FeatureAuthorizationTableService featureAuthzTableSvc = featureAuthzTableServiceRef . getService ( ) ; String featureAuthzRoleHeaderValue = null ; if ( featureAuthzTableSvc != null ) { featureAuthzRoleHeaderValue = featureAuthzTableSvc . getFeatureAuthzRoleHeaderValue ( ) ; } if ( featureAuthzRoleHeaderValue != null && ! featureAuthzRoleHeaderValue . equals ( MGMT_AUTHZ_ROLES ) ) { roles = featureAuthzTableSvc . getRolesForSpecialSubject ( resourceName , specialSubject ) ; } else { Iterator < AuthorizationTableService > itr = authorizationTables . getServices ( ) ; while ( itr . hasNext ( ) ) { AuthorizationTableService authzTableSvc = itr . next ( ) ; Collection < String > rolesFound = authzTableSvc . getRolesForSpecialSubject ( resourceName , specialSubject ) ; if ( rolesFound != null ) { roles = rolesFound ; found ++ ; } } // We must find one , and only one , Collection of roles if ( found > 1 ) { Tr . error ( tc , "AUTHZ_MULTIPLE_RESOURCES_WITH_SAME_NAME" , resourceName ) ; roles = null ; } } return roles ;
public class Beagle { /** * Populates the given array with values 0 to { @ code indexVectorSize } , and * then shuffly the values randomly . */ private void randomPermute ( int [ ] permute ) { } }
for ( int i = 0 ; i < indexVectorSize ; i ++ ) permute [ i ] = i ; for ( int i = indexVectorSize - 1 ; i > 0 ; i -- ) { int w = ( int ) Math . floor ( Math . random ( ) * ( i + 1 ) ) ; int temp = permute [ w ] ; permute [ w ] = permute [ i ] ; permute [ i ] = permute [ w ] ; }
public class Strman { /** * Decodes data encoded with MIME base64 * @ param value The data to decode * @ return decoded data */ public static String base64Decode ( final String value ) { } }
validate ( value , NULL_STRING_PREDICATE , NULL_STRING_MSG_SUPPLIER ) ; return new String ( Base64 . getDecoder ( ) . decode ( value ) , StandardCharsets . UTF_8 ) ;
public class DispatchQueue { /** * Add < code > { @ link DispatchTask } < / code > to the queue */ public void dispatchAsync ( DispatchTask task , long delayMillis ) { } }
task . setScheduled ( true ) ; dispatch ( task , delayMillis ) ;
public class CmpUtil { /** * Retrieves a lexicographical comparator for the given type . * @ param elemComp * the comparator to use for comparing the elements . * @ return a comparator for comparing objects of type < code > T < / code > based on lexicographical ordering . */ public static < T extends Iterable < U > , U > Comparator < T > lexComparator ( Comparator < U > elemComp ) { } }
return new LexComparator < > ( elemComp ) ;
public class PurchaseHostReservationRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional * parameters to enable operation dry - run . */ @ Override public Request < PurchaseHostReservationRequest > getDryRunRequest ( ) { } }
Request < PurchaseHostReservationRequest > request = new PurchaseHostReservationRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ;
public class Ifc4FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public IfcAudioVisualApplianceTypeEnum createIfcAudioVisualApplianceTypeEnumFromString ( EDataType eDataType , String initialValue ) { } }
IfcAudioVisualApplianceTypeEnum result = IfcAudioVisualApplianceTypeEnum . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
public class Ix { /** * Emits elements of this sequence which match the given predicate only . * The result ' s iterator ( ) forwards the call to remove ( ) to this ' Iterator . * @ param predicate the predicate receiving the current element and if it * returns true , the value is emitted , ignored otherwise . * @ return the new Ix instance * @ throws NullPointerException if predicate is null * @ since 1.0 */ public final Ix < T > filter ( IxPredicate < T > predicate ) { } }
return new IxFilter < T > ( this , nullCheck ( predicate , "predicate is null" ) ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link String } { @ code > } } */ @ XmlElementDecl ( namespace = "http://xmlsoccer.com/" , name = "string" ) public JAXBElement < String > createString ( String value ) { } }
return new JAXBElement < String > ( _String_QNAME , String . class , null , value ) ;
public class MetaStore { /** * Note : used in backward compatibility code with pre 1.2.6 release . This can be removed in later releases . */ private Configuration loadConfigurationFromMetadataBuffer ( Buffer buffer ) { } }
if ( buffer . position ( 12 ) . readByte ( ) == 1 ) { return new Configuration ( buffer . readLong ( ) , buffer . readLong ( ) , buffer . readLong ( ) , serializer . readObject ( buffer ) ) ; } return null ;
public class Parser { /** * Parse any number of " ( expr ) " , " [ expr ] " " . expr " , " . . expr " , * or " . ( expr ) " constructs trailing the passed expression . * @ param pn the non - null parent node * @ return the outermost ( lexically last occurring ) expression , * which will have the passed parent node as a descendant */ private AstNode memberExprTail ( boolean allowCallSyntax , AstNode pn ) throws IOException { } }
// we no longer return null for errors , so this won ' t be null if ( pn == null ) codeBug ( ) ; int pos = pn . getPosition ( ) ; int lineno ; tailLoop : for ( ; ; ) { int tt = peekToken ( ) ; switch ( tt ) { case Token . DOT : case Token . DOTDOT : lineno = ts . lineno ; pn = propertyAccess ( tt , pn ) ; pn . setLineno ( lineno ) ; break ; case Token . DOTQUERY : consumeToken ( ) ; int opPos = ts . tokenBeg , rp = - 1 ; lineno = ts . lineno ; mustHaveXML ( ) ; setRequiresActivation ( ) ; AstNode filter = expr ( ) ; int end = getNodeEnd ( filter ) ; if ( mustMatchToken ( Token . RP , "msg.no.paren" , true ) ) { rp = ts . tokenBeg ; end = ts . tokenEnd ; } XmlDotQuery q = new XmlDotQuery ( pos , end - pos ) ; q . setLeft ( pn ) ; q . setRight ( filter ) ; q . setOperatorPosition ( opPos ) ; q . setRp ( rp - pos ) ; q . setLineno ( lineno ) ; pn = q ; break ; case Token . LB : consumeToken ( ) ; int lb = ts . tokenBeg , rb = - 1 ; lineno = ts . lineno ; AstNode expr = expr ( ) ; end = getNodeEnd ( expr ) ; if ( mustMatchToken ( Token . RB , "msg.no.bracket.index" , true ) ) { rb = ts . tokenBeg ; end = ts . tokenEnd ; } ElementGet g = new ElementGet ( pos , end - pos ) ; g . setTarget ( pn ) ; g . setElement ( expr ) ; g . setParens ( lb , rb ) ; g . setLineno ( lineno ) ; pn = g ; break ; case Token . LP : if ( ! allowCallSyntax ) { break tailLoop ; } lineno = ts . lineno ; consumeToken ( ) ; checkCallRequiresActivation ( pn ) ; FunctionCall f = new FunctionCall ( pos ) ; f . setTarget ( pn ) ; // Assign the line number for the function call to where // the paren appeared , not where the name expression started . f . setLineno ( lineno ) ; f . setLp ( ts . tokenBeg - pos ) ; List < AstNode > args = argumentList ( ) ; if ( args != null && args . size ( ) > ARGC_LIMIT ) reportError ( "msg.too.many.function.args" ) ; f . setArguments ( args ) ; f . setRp ( ts . tokenBeg - pos ) ; f . setLength ( ts . tokenEnd - pos ) ; pn = f ; break ; case Token . COMMENT : // Ignoring all the comments , because previous statement may not be terminated properly . int currentFlagTOken = currentFlaggedToken ; peekUntilNonComment ( tt ) ; currentFlaggedToken = ( currentFlaggedToken & TI_AFTER_EOL ) != 0 ? currentFlaggedToken : currentFlagTOken ; break ; default : break tailLoop ; } } return pn ;
public class Set { /** * < div color = ' red ' style = " font - size : 24px ; color : red " > < b > < i > JCYPHER LANGUAGE ELEMENT < / i > < / b > < / div > * < div color = ' red ' style = " font - size : 18px ; color : red " > < i > remove the property ( set to < b > NULL < / b > ) < / i > < / div > * < br / > */ public ModifyTerminal toNull ( ) { } }
ModifyExpression mx = ( ModifyExpression ) this . astNode ; mx . setToNull ( ) ; ModifyTerminal ret = new ModifyTerminal ( mx ) ; return ret ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EEnum getIfcRampFlightTypeEnum ( ) { } }
if ( ifcRampFlightTypeEnumEEnum == null ) { ifcRampFlightTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 1046 ) ; } return ifcRampFlightTypeEnumEEnum ;
public class ConsonantUtil { /** * * * * * * BEGINNING OF FUNCTION * * * * * */ / * / public static boolean is_ashadi ( String str ) { } }
String s1 = VarnaUtil . getAdiVarna ( str ) ; if ( is_ash ( s1 ) ) return true ; return false ;
public class UploadOptions { /** * 过滤用户自定义参数 , 只有参数名以 < code > x : < / code > 开头的参数才会被使用 * @ param params 待过滤的用户自定义参数 * @ return 过滤后的用户自定义参数 */ private static Map < String , String > filterParam ( Map < String , String > params ) { } }
Map < String , String > ret = new HashMap < String , String > ( ) ; if ( params == null ) { return ret ; } for ( Map . Entry < String , String > i : params . entrySet ( ) ) { if ( i . getKey ( ) . startsWith ( "x:" ) && i . getValue ( ) != null && ! i . getValue ( ) . equals ( "" ) ) { ret . put ( i . getKey ( ) , i . getValue ( ) ) ; } } return ret ;
public class PagingPredicate { /** * Used if inner predicate is instanceof { @ link IndexAwarePredicate } for filtering . * @ param queryContext * @ return */ @ Override public Set < QueryableEntry < K , V > > filter ( QueryContext queryContext ) { } }
if ( ! ( predicate instanceof IndexAwarePredicate ) ) { return null ; } Set < QueryableEntry < K , V > > set = ( ( IndexAwarePredicate < K , V > ) predicate ) . filter ( queryContext ) ; if ( set == null || set . isEmpty ( ) ) { return set ; } List < QueryableEntry < K , V > > resultList = new ArrayList < QueryableEntry < K , V > > ( ) ; Map . Entry < Integer , Map . Entry > nearestAnchorEntry = getNearestAnchorEntry ( ) ; for ( QueryableEntry < K , V > queryableEntry : set ) { if ( SortingUtil . compareAnchor ( this , queryableEntry , nearestAnchorEntry ) ) { resultList . add ( queryableEntry ) ; } } List < QueryableEntry < K , V > > sortedSubList = ( List ) SortingUtil . getSortedSubList ( ( List ) resultList , this , nearestAnchorEntry ) ; return new LinkedHashSet < QueryableEntry < K , V > > ( sortedSubList ) ;
public class ContentsDao { /** * Delete the table * @ param table * table name */ public void deleteTable ( String table ) { } }
try { deleteByIdCascade ( table , true ) ; } catch ( SQLException e ) { throw new GeoPackageException ( "Failed to delete table: " + table , e ) ; }
public class FieldInfo { /** * Add the ' property access method ' that callers should use ( instead of get field ) . */ public void writeMethod ( ClassVisitor cw , boolean typeQueryRootBean ) { } }
// simple why to determine the property is an associated bean type boolean assocProperty = desc . contains ( "/QAssoc" ) ; if ( classInfo . isLog ( 4 ) ) { classInfo . log ( " ... add method _" + name + " assocProperty:" + assocProperty + " rootBean:" + typeQueryRootBean ) ; } MethodVisitor mv = cw . visitMethod ( ACC_PUBLIC , "_" + name , "()" + desc , "()" + signature , null ) ; mv . visitCode ( ) ; Label l0 = new Label ( ) ; mv . visitLabel ( l0 ) ; mv . visitLineNumber ( 1 , l0 ) ; mv . visitVarInsn ( ALOAD , 0 ) ; mv . visitFieldInsn ( GETFIELD , classInfo . getClassName ( ) , name , desc ) ; Label l1 = new Label ( ) ; mv . visitJumpInsn ( IFNONNULL , l1 ) ; Label l2 = new Label ( ) ; mv . visitLabel ( l2 ) ; mv . visitLineNumber ( 2 , l2 ) ; mv . visitVarInsn ( ALOAD , 0 ) ; mv . visitTypeInsn ( NEW , internalName ) ; mv . visitInsn ( DUP ) ; mv . visitLdcInsn ( name ) ; mv . visitVarInsn ( ALOAD , 0 ) ; if ( assocProperty ) { if ( typeQueryRootBean ) { mv . visitInsn ( ICONST_1 ) ; mv . visitMethodInsn ( INVOKESPECIAL , internalName , "<init>" , "(Ljava/lang/String;Ljava/lang/Object;I)V" , false ) ; } else { mv . visitFieldInsn ( GETFIELD , classInfo . getClassName ( ) , FIELD_ROOT , "Ljava/lang/Object;" ) ; mv . visitVarInsn ( ALOAD , 0 ) ; mv . visitFieldInsn ( GETFIELD , classInfo . getClassName ( ) , FIELD_PATH , "Ljava/lang/String;" ) ; mv . visitInsn ( ICONST_1 ) ; mv . visitMethodInsn ( INVOKESPECIAL , internalName , "<init>" , "(Ljava/lang/String;Ljava/lang/Object;Ljava/lang/String;I)V" , false ) ; } } else { if ( typeQueryRootBean ) { mv . visitMethodInsn ( INVOKESPECIAL , internalName , "<init>" , "(Ljava/lang/String;Ljava/lang/Object;)V" , false ) ; } else { mv . visitFieldInsn ( GETFIELD , classInfo . getClassName ( ) , FIELD_ROOT , "Ljava/lang/Object;" ) ; mv . visitVarInsn ( ALOAD , 0 ) ; mv . visitFieldInsn ( GETFIELD , classInfo . getClassName ( ) , FIELD_PATH , "Ljava/lang/String;" ) ; mv . visitMethodInsn ( INVOKESPECIAL , internalName , "<init>" , "(Ljava/lang/String;Ljava/lang/Object;Ljava/lang/String;)V" , false ) ; } } mv . visitFieldInsn ( PUTFIELD , classInfo . getClassName ( ) , name , desc ) ; mv . visitLabel ( l1 ) ; mv . visitLineNumber ( 3 , l1 ) ; mv . visitFrame ( Opcodes . F_SAME , 0 , null , 0 , null ) ; mv . visitVarInsn ( ALOAD , 0 ) ; mv . visitFieldInsn ( GETFIELD , classInfo . getClassName ( ) , name , desc ) ; mv . visitInsn ( ARETURN ) ; Label l3 = new Label ( ) ; mv . visitLabel ( l3 ) ; mv . visitLocalVariable ( "this" , "L" + classInfo . getClassName ( ) + ";" , null , l0 , l3 , 0 ) ; if ( assocProperty ) { if ( typeQueryRootBean ) { mv . visitMaxs ( 6 , 1 ) ; } else { mv . visitMaxs ( 7 , 1 ) ; } } else { if ( typeQueryRootBean ) { mv . visitMaxs ( 5 , 1 ) ; } else { mv . visitMaxs ( 6 , 1 ) ; } } mv . visitEnd ( ) ;
public class TemplateModelProcessor { /** * Returns a list of enhancing methods for a given { @ link org . glassfish . jersey . server . model . RuntimeResource runtime resource } . * @ param runtimeResource runtime resource to create enhancing methods for . * @ return list of enhancing methods . */ private List < ModelProcessorUtil . Method > getEnhancingMethods ( final RuntimeResource runtimeResource ) { } }
final List < ModelProcessorUtil . Method > newMethods = Lists . newArrayList ( ) ; for ( final Resource resource : runtimeResource . getResources ( ) ) { // Handler classes . for ( final Class < ? > handlerClass : resource . getHandlerClasses ( ) ) { createEnhancingMethods ( handlerClass , null , newMethods ) ; } // Names - if there are no handler classes / instances . if ( resource . getHandlerClasses ( ) . isEmpty ( ) && resource . getHandlerInstances ( ) . isEmpty ( ) ) { for ( String resourceName : resource . getNames ( ) ) { final Class < Object > resourceClass = AccessController . doPrivileged ( ReflectionHelper . classForNamePA ( resourceName ) ) ; if ( resourceClass != null ) { createEnhancingMethods ( resourceClass , null , newMethods ) ; } } } // Handler instances . Errors . process ( ( Producer < Void > ) ( ) -> { for ( final Object handlerInstance : resource . getHandlerInstances ( ) ) { final Class < ? > handlerInstanceClass = handlerInstance . getClass ( ) ; if ( ! resource . getHandlerClasses ( ) . contains ( handlerInstanceClass ) ) { createEnhancingMethods ( handlerInstanceClass , handlerInstance , newMethods ) ; } else { Errors . warning ( resource , LocalizationMessages . TEMPLATE_HANDLER_ALREADY_ENHANCED ( handlerInstanceClass ) ) ; } } return null ; } ) ; } return newMethods ;
public class ThriftCodecByteCodeGenerator { /** * Defines the getType method which simply returns the value of the type field . */ private void defineGetTypeMethod ( ) { } }
classDefinition . addMethod ( new MethodDefinition ( a ( PUBLIC ) , "getType" , type ( ThriftType . class ) ) . loadThis ( ) . getField ( codecType , typeField ) . retObject ( ) ) ;
public class AstMean { /** * Compute column - wise means ( i . e . means of each column ) , and return a frame having a single row . */ private ValFrame colwiseMean ( Frame fr , final boolean na_rm ) { } }
Frame res = new Frame ( ) ; Vec vec1 = Vec . makeCon ( null , 0 ) ; assert vec1 . length ( ) == 1 ; for ( int i = 0 ; i < fr . numCols ( ) ; i ++ ) { Vec v = fr . vec ( i ) ; boolean valid = ( v . isNumeric ( ) || v . isTime ( ) || v . isBinary ( ) ) && v . length ( ) > 0 && ( na_rm || v . naCnt ( ) == 0 ) ; Vec newvec = vec1 . makeCon ( valid ? v . mean ( ) : Double . NaN , v . isTime ( ) ? Vec . T_TIME : Vec . T_NUM ) ; res . add ( fr . name ( i ) , newvec ) ; } vec1 . remove ( ) ; return new ValFrame ( res ) ;
public class RC4 { /** * 初始化Sbox * @ param key 密钥 * @ return sbox */ private int [ ] initSBox ( byte [ ] key ) { } }
int [ ] sbox = new int [ SBOX_LENGTH ] ; int j = 0 ; for ( int i = 0 ; i < SBOX_LENGTH ; i ++ ) { sbox [ i ] = i ; } for ( int i = 0 ; i < SBOX_LENGTH ; i ++ ) { j = ( j + sbox [ i ] + ( key [ i % key . length ] ) & 0xFF ) % SBOX_LENGTH ; swap ( i , j , sbox ) ; } return sbox ;
public class AbstractServer { /** * Create a new group iterator from the given stream . */ private static group_stream_response newGroupStream ( Stream < Map . Entry < DateTime , TimeSeriesValue > > tsc , int fetch ) { } }
final BufferedIterator < Map . Entry < DateTime , TimeSeriesValue > > iter = new BufferedIterator < > ( tsc . iterator ( ) , GROUP_STREAM_QUEUE_SIZE ) ; final long idx = GROUP_STREAM_ITERS_ALLOC . getAndIncrement ( ) ; final IteratorAndCookie < Map . Entry < DateTime , TimeSeriesValue > > iterAndCookie = new IteratorAndCookie < > ( iter ) ; GROUP_STREAM_ITERS . put ( idx , iterAndCookie ) ; final List < Map . Entry < DateTime , TimeSeriesValue > > result = fetchFromIter ( iter , fetch , MAX_GROUP_STREAM_FETCH ) ; EncDec . NewIterResponse < Map . Entry < DateTime , TimeSeriesValue > > responseObj = new EncDec . NewIterResponse < > ( idx , result , iter . atEnd ( ) , iterAndCookie . getCookie ( ) ) ; LOG . log ( Level . FINE , "responseObj = {0}" , responseObj ) ; return EncDec . encodeStreamGroupResponse ( responseObj ) ;
public class BaseTangramEngine { /** * Insert parsed data to Tangram at target position . It cause full screen item ' s rebinding , be careful . * @ param position Target insert position . * @ param data Parsed data list . */ @ Deprecated public void insertData ( int position , @ Nullable List < Card > data ) { } }
Preconditions . checkState ( mGroupBasicAdapter != null , "Must call bindView() first" ) ; this . mGroupBasicAdapter . insertGroup ( position , data ) ;
public class AsyncLookupInBuilder { /** * Helper method to actually perform the subdoc get count operation . */ private Observable < DocumentFragment < Lookup > > getCountIn ( final String id , final LookupSpec spec , final long timeout , final TimeUnit timeUnit ) { } }
return Observable . defer ( new Func0 < Observable < DocumentFragment < Lookup > > > ( ) { @ Override public Observable < DocumentFragment < Lookup > > call ( ) { final SubGetCountRequest request = new SubGetCountRequest ( id , spec . path ( ) , bucketName ) ; request . xattr ( spec . xattr ( ) ) ; request . accessDeleted ( accessDeleted ) ; addRequestSpan ( environment , request , "subdoc_count" ) ; return applyTimeout ( deferAndWatch ( new Func1 < Subscriber , Observable < SimpleSubdocResponse > > ( ) { @ Override public Observable < SimpleSubdocResponse > call ( Subscriber s ) { request . subscriber ( s ) ; return core . send ( request ) ; } } ) . map ( new Func1 < SimpleSubdocResponse , DocumentFragment < Lookup > > ( ) { @ Override public DocumentFragment < Lookup > call ( SimpleSubdocResponse response ) { try { if ( response . status ( ) . isSuccess ( ) ) { try { long count = subdocumentTranscoder . decode ( response . content ( ) , Long . class ) ; SubdocOperationResult < Lookup > single = SubdocOperationResult . createResult ( spec . path ( ) , Lookup . GET_COUNT , response . status ( ) , count ) ; return new DocumentFragment < Lookup > ( id , response . cas ( ) , response . mutationToken ( ) , Collections . singletonList ( single ) ) ; } finally { if ( response . content ( ) != null ) { response . content ( ) . release ( ) ; } } } else { if ( response . content ( ) != null && response . content ( ) . refCnt ( ) > 0 ) { response . content ( ) . release ( ) ; } if ( response . status ( ) == ResponseStatus . SUBDOC_PATH_NOT_FOUND ) { SubdocOperationResult < Lookup > single = SubdocOperationResult . createResult ( spec . path ( ) , Lookup . GET_COUNT , response . status ( ) , null ) ; return new DocumentFragment < Lookup > ( id , response . cas ( ) , response . mutationToken ( ) , Collections . singletonList ( single ) ) ; } else { throw SubdocHelper . commonSubdocErrors ( response . status ( ) , id , spec . path ( ) ) ; } } } finally { if ( environment . operationTracingEnabled ( ) ) { environment . tracer ( ) . scopeManager ( ) . activate ( response . request ( ) . span ( ) , true ) . close ( ) ; } } } } ) , request , environment , timeout , timeUnit ) ; } } ) ;
public class DataGenerator { /** * Create a file with the name < code > file < / code > and a length of * < code > fileSize < / code > . The file is filled with character ' a ' . */ @ SuppressWarnings ( "unused" ) private void genFile ( Path file , long fileSize ) throws IOException { } }
FSDataOutputStream out = fs . create ( file , true , getConf ( ) . getInt ( "io.file.buffer.size" , 4096 ) , ( short ) getConf ( ) . getInt ( "dfs.replication" , 3 ) , fs . getDefaultBlockSize ( ) ) ; for ( long i = 0 ; i < fileSize ; i ++ ) { out . writeByte ( 'a' ) ; } out . close ( ) ;
public class LargeObject { /** * Truncates the large object to the given length in bytes . If the number of bytes is larger than * the current large object length , the large object will be filled with zero bytes . This method * does not modify the current file offset . * @ param len given length in bytes * @ throws SQLException if something goes wrong */ public void truncate64 ( long len ) throws SQLException { } }
FastpathArg [ ] args = new FastpathArg [ 2 ] ; args [ 0 ] = new FastpathArg ( fd ) ; args [ 1 ] = new FastpathArg ( len ) ; fp . getInteger ( "lo_truncate64" , args ) ;
public class CirclePageIndicator { /** * ( non - Javadoc ) * @ see android . view . View # onMeasure ( int , int ) */ @ Override protected void onMeasure ( int widthMeasureSpec , int heightMeasureSpec ) { } }
if ( mOrientation == HORIZONTAL ) { setMeasuredDimension ( measureLong ( widthMeasureSpec ) , measureShort ( heightMeasureSpec ) ) ; } else { setMeasuredDimension ( measureShort ( widthMeasureSpec ) , measureLong ( heightMeasureSpec ) ) ; }
public class DirectoryConnection { /** * Queue the Packet to Connection . * The DirectoryConnect Queue the Packet to the internal Queue and send it to remote * Directory Server . * @ param header * the ProtocolHeader . * @ param protocol * the Protocol . * @ param cb * the Callback . * @ param context * the context Object of the Callback . * @ param future * the Future . * @ param wr * WatcherRegistration * @ return * the queued Packet . */ private Packet queuePacket ( ProtocolHeader header , Protocol protocol , ProtocolCallback cb , Object context , ServiceDirectoryFuture future , WatcherRegistration wr ) { } }
Packet packet = new Packet ( header , protocol , wr ) ; PacketLatency . initPacket ( packet ) ; header . createTime = packet . createTime ; packet . cb = cb ; packet . context = context ; packet . future = future ; if ( ! clientSocket . isConnected ( ) || closing ) { onLossPacket ( packet ) ; } else { synchronized ( pendingQueue ) { if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( "Add the packet in queuePacket, type=" + header . getType ( ) ) ; } header . setXid ( xid . incrementAndGet ( ) ) ; try { PacketLatency . queuePacket ( packet ) ; clientSocket . sendPacket ( header , protocol ) ; pendingQueue . add ( packet ) ; PacketLatency . sendPacket ( packet ) ; } catch ( IOException e ) { LOGGER . error ( "ClientSocket send packet failed." ) ; if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( "ClientSocket send packet failed." , e ) ; } if ( packet != null ) { onLossPacket ( packet ) ; } } } } return packet ;
public class JDBCDatabaseMetaData { /** * An SQL statement executor that knows how to create a " SELECT * * FROM " statement , given a table name and a < em > where < / em > clause . < p > * If the < em > where < / em > clause is null , it is ommited . < p > * It is assumed that the table name is non - null , since this is a private * method . No check is performed . < p > * @ return the result of executing " SELECT * FROM " + table " " + where * @ param table the name of a table to " select * from " * @ param where the where condition for the select * @ throws SQLException if database error occurs */ private ResultSet executeSelect ( String table , String where ) throws SQLException { } }
String select = selstar + table ; if ( where != null ) { select += " WHERE " + where ; } return execute ( select ) ;
public class SelectParserFactory { /** * Create select parser instance . * @ param dbType database type * @ param shardingRule databases and tables sharding rule * @ param lexerEngine lexical analysis engine . * @ param shardingTableMetaData sharding metadata . * @ return select parser instance */ public static AbstractSelectParser newInstance ( final DatabaseType dbType , final ShardingRule shardingRule , final LexerEngine lexerEngine , final ShardingTableMetaData shardingTableMetaData ) { } }
switch ( dbType ) { case H2 : case MySQL : return new MySQLSelectParser ( shardingRule , lexerEngine , shardingTableMetaData ) ; case Oracle : return new OracleSelectParser ( shardingRule , lexerEngine , shardingTableMetaData ) ; case SQLServer : return new SQLServerSelectParser ( shardingRule , lexerEngine , shardingTableMetaData ) ; case PostgreSQL : return new PostgreSQLSelectParser ( shardingRule , lexerEngine , shardingTableMetaData ) ; default : throw new UnsupportedOperationException ( String . format ( "Cannot support database [%s]." , dbType ) ) ; }
public class Chunk { /** * After writing we must call close ( ) to register the bulk changes */ public void close ( int cidx , Futures fs ) { } }
int len = _len ; if ( this instanceof NewChunk ) _chk2 = this ; if ( _chk2 == null ) return ; // No change ? if ( _chk2 instanceof NewChunk ) _chk2 = ( ( NewChunk ) _chk2 ) . new_close ( ) ; assert _chk2 . _len == len : "incompatible length after compression, " + len + " != " + _chk2 . _len + ", " + ", chunk = " + _chk2 . getClass ( ) . getSimpleName ( ) ; DKV . put ( _vec . chunkKey ( cidx ) , _chk2 , fs , true ) ; // Write updated chunk back into K / V if ( _vec . _cache == this ) _vec . _cache = null ;
public class QYDepartmentAPI { /** * 更新部门 * @ param department 需要更新的部门 * @ return 更新结果 */ public QYResultType update ( QYDepartment department ) { } }
BeanUtil . requireNonNull ( department , "department is null" ) ; String url = BASE_API_URL + "cgi-bin/department/update?access_token=#" ; BaseResponse r = executePost ( url , department . toJsonString ( ) ) ; return QYResultType . get ( r . getErrcode ( ) ) ;
public class ProposalLineItem { /** * Sets the billingCap value for this ProposalLineItem . * @ param billingCap * Overrides the billing cap of this { @ code ProposalLineItem } . * This attribute is optional . * If this field is overridden , then other required billing * fields * ( { @ link # billingSource } , or { @ link # billingBase } ) * also need to be overridden * depending on the { @ link # billingSource } . That is , * none of the billing fields will inherit from * their { @ link Proposal } object anymore . * This attribute can be configured as editable after * the proposal has been submitted . * Please check with your network administrator for editable * fields configuration . * < span class = " constraint Applicable " > This attribute * is applicable when : < ul > < li > not using programmatic , using sales management . < / li > < / ul > < / span > */ public void setBillingCap ( com . google . api . ads . admanager . axis . v201811 . BillingCap billingCap ) { } }
this . billingCap = billingCap ;
public class JmsJcaConnectionFactoryImpl { /** * @ see javax . naming . Referenceable # getReference ( ) */ @ Override public Reference getReference ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) SibTr . entry ( this , TRACE , "getReference" ) ; final Reference reference = _managedConnectionFactory . getReference ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) SibTr . exit ( this , TRACE , "getReference" , reference ) ; return reference ;
public class CmsSecurityManager { /** * Writes a property for a specified resource . < p > * @ param context the current request context * @ param resource the resource to write the property for * @ param property the property to write * @ throws CmsException if something goes wrong * @ throws CmsSecurityException if the user has insufficient permission for the given resource ( { @ link CmsPermissionSet # ACCESS _ WRITE } required ) * @ see CmsObject # writePropertyObject ( String , CmsProperty ) * @ see org . opencms . file . types . I _ CmsResourceType # writePropertyObject ( CmsObject , CmsSecurityManager , CmsResource , CmsProperty ) */ public void writePropertyObject ( CmsRequestContext context , CmsResource resource , CmsProperty property ) throws CmsException , CmsSecurityException { } }
CmsDbContext dbc = m_dbContextFactory . getDbContext ( context ) ; try { checkOfflineProject ( dbc ) ; checkPermissions ( dbc , resource , CmsPermissionSet . ACCESS_WRITE , true , CmsResourceFilter . IGNORE_EXPIRATION ) ; m_driverManager . writePropertyObject ( dbc , resource , property ) ; } catch ( Exception e ) { dbc . report ( null , Messages . get ( ) . container ( Messages . ERR_WRITE_PROP_2 , property . getName ( ) , context . getSitePath ( resource ) ) , e ) ; } finally { dbc . clear ( ) ; }
public class BackoffIdleStrategy { /** * Creates a new BackoffIdleStrategy . */ public static BackoffIdleStrategy createBackoffIdleStrategy ( String config ) { } }
String [ ] args = config . split ( "," ) ; if ( args . length != ARG_COUNT ) { throw new IllegalArgumentException ( format ( "Invalid backoff configuration '%s', 4 arguments expected" , config ) ) ; } long maxSpins = parseLong ( args [ ARG_MAX_SPINS ] ) ; long maxYields = parseLong ( args [ ARG_MAX_YIELDS ] ) ; long minParkPeriodNs = parseLong ( args [ ARG_MIN_PARK_PERIOD ] ) ; long maxParkNanos = parseLong ( args [ ARG_MAX_PARK_PERIOD ] ) ; return new BackoffIdleStrategy ( maxSpins , maxYields , minParkPeriodNs , maxParkNanos ) ;
public class TransformationsInner { /** * Creates a transformation or replaces an already existing transformation under an existing streaming job . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param jobName The name of the streaming job . * @ param transformationName The name of the transformation . * @ param transformation The definition of the transformation that will be used to create a new transformation or replace the existing one under the streaming job . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the TransformationInner object if successful . */ public TransformationInner createOrReplace ( String resourceGroupName , String jobName , String transformationName , TransformationInner transformation ) { } }
return createOrReplaceWithServiceResponseAsync ( resourceGroupName , jobName , transformationName , transformation ) . toBlocking ( ) . single ( ) . body ( ) ;
public class ListKeysRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListKeysRequest listKeysRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listKeysRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listKeysRequest . getLimit ( ) , LIMIT_BINDING ) ; protocolMarshaller . marshall ( listKeysRequest . getMarker ( ) , MARKER_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Maybe { /** * Returns a Maybe instance that if this Maybe emits an error and the predicate returns * true , it will emit an onComplete and swallow the throwable . * < dl > * < dt > < b > Scheduler : < / b > < / dt > * < dd > { @ code onErrorComplete } does not operate by default on a particular { @ link Scheduler } . < / dd > * < / dl > * @ param predicate the predicate to call when an Throwable is emitted which should return true * if the Throwable should be swallowed and replaced with an onComplete . * @ return the new Completable instance */ @ CheckReturnValue @ SchedulerSupport ( SchedulerSupport . NONE ) public final Maybe < T > onErrorComplete ( final Predicate < ? super Throwable > predicate ) { } }
ObjectHelper . requireNonNull ( predicate , "predicate is null" ) ; return RxJavaPlugins . onAssembly ( new MaybeOnErrorComplete < T > ( this , predicate ) ) ;
public class PersistenceDelegator { /** * Close . */ void close ( ) { } }
doFlush ( ) ; // Close all clients created in this session if ( ! clientMap . isEmpty ( ) ) { for ( Client client : clientMap . values ( ) ) { client . close ( ) ; } clientMap . clear ( ) ; } onClearProxy ( ) ; // TODO : Move all nodes tied to this EM into detached state , need to // discuss with Amresh . closed = true ;
public class ContribStandardPortletEnvironment { /** * { @ inheritDoc } */ @ Override public void initPropertySources ( ServletContext servletContext , PortletContext portletContext , PortletConfig portletConfig ) { } }
PortletApplicationContextUtils . initPortletPropertySources ( this . getPropertySources ( ) , servletContext , portletContext , portletConfig ) ;
public class OSMUtils { /** * Extracts known way tags and returns their ids . * @ param entity the way * @ return the ids of the identified tags */ public static Map < Short , Object > extractKnownWayTags ( Entity entity ) { } }
Map < Short , Object > tagMap = new HashMap < > ( ) ; OSMTagMapping mapping = OSMTagMapping . getInstance ( ) ; if ( entity . getTags ( ) != null ) { for ( Tag tag : entity . getTags ( ) ) { OSMTag wayTag = mapping . getWayTag ( tag . getKey ( ) , tag . getValue ( ) ) ; if ( wayTag != null ) { String wildcard = wayTag . getValue ( ) ; tagMap . put ( wayTag . getId ( ) , getObjectFromWildcardAndValue ( wildcard , tag . getValue ( ) ) ) ; } } } return tagMap ;
public class AmazonDynamoDBClient { /** * Retrieves a paginated list of table names created by the AWS Account * of the caller in the AWS Region ( e . g . < code > us - east - 1 < / code > ) . * @ param listTablesRequest Container for the necessary parameters to * execute the ListTables service method on AmazonDynamoDB . * @ return The response from the ListTables service method , as returned * by AmazonDynamoDB . * @ throws InternalServerErrorException * @ throws AmazonClientException * If any internal errors are encountered inside the client while * attempting to make the request or handle the response . For example * if a network connection is not available . * @ throws AmazonServiceException * If an error response is returned by AmazonDynamoDB indicating * either a problem with the data in the request , or a server side issue . */ public ListTablesResult listTables ( ListTablesRequest listTablesRequest ) throws AmazonServiceException , AmazonClientException { } }
ExecutionContext executionContext = createExecutionContext ( listTablesRequest ) ; AWSRequestMetrics awsRequestMetrics = executionContext . getAwsRequestMetrics ( ) ; Request < ListTablesRequest > request = marshall ( listTablesRequest , new ListTablesRequestMarshaller ( ) , executionContext . getAwsRequestMetrics ( ) ) ; // Binds the request metrics to the current request . request . setAWSRequestMetrics ( awsRequestMetrics ) ; Unmarshaller < ListTablesResult , JsonUnmarshallerContext > unmarshaller = new ListTablesResultJsonUnmarshaller ( ) ; JsonResponseHandler < ListTablesResult > responseHandler = new JsonResponseHandler < ListTablesResult > ( unmarshaller ) ; return invoke ( request , responseHandler , executionContext ) ;
public class UpdatePodBuilder { /** * Builds the configured pod . */ public UpdatePod build ( ) { } }
if ( getServers ( ) == null ) { int count = Math . max ( _primaryServerCount , _depth ) ; if ( _type == PodType . off ) { count = 0 ; } _servers = buildServers ( count ) ; } Objects . requireNonNull ( getServers ( ) ) ; /* if ( getServers ( ) . length < _ primaryServerCount ) { throw new IllegalStateException ( ) ; */ return new UpdatePod ( this ) ;
public class Geldbetrag { /** * Returns a { @ code MonetaryAmount } whose value is < code > this / * divisor < / code > , and whose preferred scale is < code > this . scale ( ) - * divisor . scale ( ) < / code > ; if the exact quotient cannot be represented an { @ code ArithmeticException } * is thrown . * @ param divisor value by which this { @ code MonetaryAmount } is to be divided . * @ return { @ code this / divisor } * @ throws ArithmeticException if the exact quotient does not have a terminating decimal expansion , or if the * result exceeds the numeric capabilities of this implementation class , i . e . the * { @ link MonetaryContext } cannot be adapted as required . */ @ Override public MonetaryAmount divide ( double divisor ) { } }
if ( isInfinite ( divisor ) ) { return Geldbetrag . valueOf ( BigDecimal . ZERO , currency ) ; } return divide ( BigDecimal . valueOf ( divisor ) ) ;
public class AmazonSageMakerClient { /** * Gets a description of a hyperparameter tuning job . * @ param describeHyperParameterTuningJobRequest * @ return Result of the DescribeHyperParameterTuningJob operation returned by the service . * @ throws ResourceNotFoundException * Resource being access is not found . * @ sample AmazonSageMaker . DescribeHyperParameterTuningJob * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / sagemaker - 2017-07-24 / DescribeHyperParameterTuningJob " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DescribeHyperParameterTuningJobResult describeHyperParameterTuningJob ( DescribeHyperParameterTuningJobRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeHyperParameterTuningJob ( request ) ;
public class SipStandardService { /** * This method simply makes the HTTP and SSL ports avaialble everywhere in the JVM in order jsip ha to read them for * balancer description purposes . There is no other good way to communicate the properies to jsip ha without adding * more dependencies . */ public void initializeSystemPortProperties ( ) { } }
for ( Connector connector : connectors ) { if ( connector . getProtocol ( ) . contains ( "HTTP" ) ) { if ( connector . getSecure ( ) ) { System . setProperty ( "org.mobicents.properties.sslPort" , Integer . toString ( connector . getPort ( ) ) ) ; } else { System . setProperty ( "org.mobicents.properties.httpPort" , Integer . toString ( connector . getPort ( ) ) ) ; } } }
public class JsonUtils { /** * This is a helper method that reads a JSON token using a JsonParser * instance , and throws an exception if the next token is not START _ ARRAY . * @ param jsonParser The JsonParser instance to be used * @ param parentFieldName The name of the field * @ throws IOException */ public static void readStartArrayToken ( JsonParser jsonParser , String parentFieldName ) throws IOException { } }
readToken ( jsonParser , parentFieldName , JsonToken . START_ARRAY ) ;
public class WhitelistingApi { /** * Get the status of whitelist feature ( enabled / disabled ) of a device type . * Get the status of whitelist feature ( enabled / disabled ) of a device type . * @ param dtid Device Type ID . ( required ) * @ return ApiResponse & lt ; WhitelistEnvelope & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < WhitelistEnvelope > getWhitelistStatusWithHttpInfo ( String dtid ) throws ApiException { } }
com . squareup . okhttp . Call call = getWhitelistStatusValidateBeforeCall ( dtid , null , null ) ; Type localVarReturnType = new TypeToken < WhitelistEnvelope > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class FedoraObjectTripleGenerator_3_0 { /** * { @ inheritDoc } */ public Set < Triple > getTriplesForObject ( DOReader reader ) throws ResourceIndexException { } }
Set < Triple > set = new HashSet < Triple > ( ) ; addCommonTriples ( reader , set ) ; return set ;
public class Axis { /** * Converts a screen coordinate to chart coordinate value . Reverses the AxisTickCalculators calculation . * @ param screenPoint Coordinate of screen . eg : MouseEvent . getX ( ) , MouseEvent . getY ( ) * @ return value in chart coordinate system */ public double getChartValue ( double screenPoint ) { } }
// a check if all axis data are the exact same values if ( min == max ) { return min ; } double minVal = min ; double maxVal = max ; // min & max is not set in category charts with string labels if ( min > max ) { if ( getDirection ( ) == Direction . X ) { if ( axesChartStyler instanceof CategoryStyler ) { AxesChartSeriesCategory axesChartSeries = ( AxesChartSeriesCategory ) chart . getSeriesMap ( ) . values ( ) . iterator ( ) . next ( ) ; int count = axesChartSeries . getXData ( ) . size ( ) ; minVal = 0 ; maxVal = count ; } } } double workingSpace ; double startOffset ; if ( direction == Direction . X ) { startOffset = bounds . getX ( ) ; workingSpace = bounds . getWidth ( ) ; } else { startOffset = 0 ; // bounds . getY ( ) ; workingSpace = bounds . getHeight ( ) ; screenPoint = bounds . getHeight ( ) - screenPoint + bounds . getY ( ) ; // y increments top to bottom } // tick space - a percentage of the working space available for ticks double tickSpace = axesChartStyler . getPlotContentSize ( ) * workingSpace ; // in plot space // this prevents an infinite loop when the plot gets sized really small . if ( tickSpace < axesChartStyler . getXAxisTickMarkSpacingHint ( ) ) { return minVal ; } // where the tick should begin in the working space in pixels double margin = Utils . getTickStartOffset ( workingSpace , tickSpace ) ; // given tickLabelPositon ( screenPoint ) find value // double tickLabelPosition = // margin + ( ( value - min ) / ( max - min ) * tickSpace ) ; double value = ( ( screenPoint - margin - startOffset ) * ( maxVal - minVal ) / tickSpace ) + minVal ; return value ;
public class Http2Client { /** * Create async connection with default config value */ public CompletableFuture < ClientConnection > connectAsync ( URI uri ) { } }
return this . connectAsync ( null , uri , com . networknt . client . Http2Client . WORKER , com . networknt . client . Http2Client . SSL , com . networknt . client . Http2Client . BUFFER_POOL , OptionMap . create ( UndertowOptions . ENABLE_HTTP2 , true ) ) ;
public class LoggingHandlerInterceptor { /** * Handle request message and write request to logger . * @ param request */ public void handleRequest ( String request ) { } }
if ( messageListener != null ) { log . debug ( "Received Http request" ) ; messageListener . onInboundMessage ( new RawMessage ( request ) , null ) ; } else { if ( log . isDebugEnabled ( ) ) { log . debug ( "Received Http request:" + NEWLINE + request ) ; } }
public class MapRepository { /** * This method returns the map of children associated with given parent id . * @ param parentId of parent entity . * @ return map of children */ public Map < String , String > getChildrenMap ( String parentId ) { } }
Map < String , String > map = new HashMap < > ( ) ; List < StringEntity > children = getByField ( Column . PARENT_ID , parentId ) ; for ( StringEntity stringEntity : children ) { map . put ( stringEntity . getId ( ) , stringEntity . getValue ( ) ) ; } return map ;
public class MemoryStore { /** * Method for accessing the httpSessListener variable which tells us if our * store has any HttpSession listeners associated with it * @ see com . ibm . wsspi . session . IStore # isHttpSessionListener ( ) */ @ Override public boolean isHttpSessionListener ( ) { } }
if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_CORE . isLoggable ( Level . FINE ) ) { String s = httpSessListener + appNameForLogging ; LoggingUtil . SESSION_LOGGER_CORE . logp ( Level . FINE , methodClassName , "isHttpSessionListener" , s ) ; } return httpSessListener ;
public class OperatorUtil { /** * Returns the associated { @ link Operator } type for the given { @ link eu . stratosphere . api . common . functions . Function } class . * @ param stubClass * the stub class * @ return the associated Operator type */ @ SuppressWarnings ( { } }
"unchecked" , "rawtypes" } ) public static Class < ? extends Operator > getContractClass ( final Class < ? > stubClass ) { if ( stubClass == null ) { return null ; } final Class < ? > contract = STUB_CONTRACTS . get ( stubClass ) ; if ( contract != null ) { return ( Class < ? extends Operator < ? > > ) contract ; } Iterator < Entry < Class < ? > , Class < ? extends Operator > > > stubContracts = STUB_CONTRACTS . entrySet ( ) . iterator ( ) ; while ( stubContracts . hasNext ( ) ) { Map . Entry < Class < ? > , Class < ? extends Operator > > entry = stubContracts . next ( ) ; if ( entry . getKey ( ) . isAssignableFrom ( stubClass ) ) { return entry . getValue ( ) ; } } return null ;
public class ParquetRecordReader { /** * Returns the current read position in the split , i . e . , the current block and * the number of records that were returned from that block . * @ return The current read position in the split . */ public Tuple2 < Long , Long > getCurrentReadPosition ( ) { } }
// compute number of returned records long numRecordsReturned = numReadRecords ; if ( ! readRecordReturned && numReadRecords > 0 ) { numRecordsReturned -= 1 ; } if ( numRecordsReturned == numTotalRecords ) { // all records of split returned . return Tuple2 . of ( - 1L , - 1L ) ; } if ( numRecordsReturned == numRecordsUpToCurrentBlock ) { // all records of block returned . Next record is in next block return Tuple2 . of ( currentBlock + 1L , 0L ) ; } // compute number of returned records of this block long numRecordsOfBlockReturned = numRecordsReturned - numRecordsUpToPreviousBlock ; return Tuple2 . of ( ( long ) currentBlock , numRecordsOfBlockReturned ) ;
public class Sql2o { /** * Calls the { @ link StatementRunnable # run ( Connection , Object ) } method on the { @ link StatementRunnable } parameter . All statements * run on the { @ link Connection } instance in the { @ link StatementRunnable # run ( Connection , Object ) run } method will be * executed in a transaction . The transaction will automatically be committed if the { @ link StatementRunnable # run ( Connection , Object ) run } * method finishes without throwing an exception . If an exception is thrown within the { @ link StatementRunnable # run ( Connection , Object ) run } method , * the transaction will automatically be rolled back . * The isolation level of the transaction will be set to { @ link java . sql . Connection # TRANSACTION _ READ _ COMMITTED } * @ param runnable The { @ link StatementRunnable } instance . * @ param argument An argument which will be forwarded to the { @ link StatementRunnable # run ( Connection , Object ) run } method */ public void runInTransaction ( StatementRunnable runnable , Object argument ) { } }
runInTransaction ( runnable , argument , java . sql . Connection . TRANSACTION_READ_COMMITTED ) ;
public class ChemModelRenderer { /** * Set the scale for an IChemModel . It calculates the average bond length of * the model and calculates the multiplication factor to transform this * to the bond length that is set in the RendererModel . * @ param chemModel */ @ Override public void setScale ( IChemModel chemModel ) { } }
double bondLength = AverageBondLengthCalculator . calculateAverageBondLength ( chemModel ) ; double scale = this . calculateScaleForBondLength ( bondLength ) ; // store the scale so that other components can access it this . rendererModel . getParameter ( Scale . class ) . setValue ( scale ) ;