signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class TranslatorTypes { /** * Creates a DPT translator for the given datapoint type main / sub number . * @ param mainNumber datapoint type main number , 0 & lt ; mainNumber * @ param subNumber datapoint type sub number selecting a particular kind of value translation ; use 0 to request any * type ID of that translator ( in that case , appending the physical unit for string values is disabled ) * @ param data ( optional ) KNX datapoint data to set in the created translator for translation * @ return the new { @ link DPTXlator } object * @ throws KNXException if no matching DPT translator is available or creation failed ( see * { @ link MainType # createTranslator ( String ) } ) */ public static DPTXlator createTranslator ( final int mainNumber , final int subNumber , final byte ... data ) throws KNXException { } }
final MainType type = map . get ( mainNumber ) ; if ( type == null ) throw new KNXException ( "no DPT translator available for main number " + mainNumber ) ; final boolean withSub = subNumber != 0 ; final String id = withSub ? String . format ( "%d.%03d" , mainNumber , subNumber ) : type . getSubTypes ( ) . keySet ( ) . iterator ( ) . next ( ) ; final DPTXlator t = type . createTranslator ( id ) ; t . setAppendUnit ( withSub ) ; if ( data . length > 0 ) t . setData ( data ) ; return t ;
public class CompletionKey { /** * Sets the address and length of a buffer with a specified index . * @ param address of the buffer * @ param length of the buffer in bytes * @ param index of the buffer to set , where 0 is the first buffer * @ throws IllegalArgumentException if the index value is < 0 or > = bufferCount */ public void setBuffer ( long address , long length , int index ) { } }
if ( ( index < 0 ) || ( index >= this . bufferCount ) ) { throw new IllegalArgumentException ( ) ; } this . stagingByteBuffer . putLong ( ( FIRST_BUFFER_INDEX + ( 2 * index ) ) * 8 , address ) ; this . stagingByteBuffer . putLong ( ( FIRST_BUFFER_INDEX + ( 2 * index ) + 1 ) * 8 , length ) ;
public class PubSubOutputHandler { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . impl . interfaces . MessageEventListener # messageEventOccurred ( int , com . ibm . ws . sib . processor . impl . interfaces . SIMPMessage , com . ibm . ws . sib . msgstore . Transaction ) */ public void messageEventOccurred ( int event , SIMPMessage msg , TransactionCommon tran ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "messageEventOccurred" , new Object [ ] { new Integer ( event ) , msg , tran } ) ; InvalidOperationException e = new InvalidOperationException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0001" , new Object [ ] { "com.ibm.ws.sib.processor.impl.PubSubOutputHandler" , "1:603:1.164.1.5" } , null ) ) ; // FFDC FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.PubSubOutputHandler.messageEventOccurred" , "1:610:1.164.1.5" , this ) ; SibTr . error ( tc , "INTERNAL_MESSAGING_ERROR_CWSIP0001" , new Object [ ] { "com.ibm.ws.sib.processor.impl.PubSubOutputHandler" , "1:616:1.164.1.5" } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "messageEventOccurred" , e ) ; throw e ;
public class DecompositionFactory_DDRM { /** * Returns an { @ link EigenDecomposition } that has been optimized for the specified matrix size . * If the input matrix is symmetric within tolerance then the symmetric algorithm will be used , otherwise * a general purpose eigenvalue decomposition is used . * @ param matrixSize Number of rows and columns that the returned decomposition is optimized for . * @ param needVectors Should eigenvectors be computed or not . If not sure set to true . * @ return A new EigenDecomposition */ public static EigenDecomposition_F64 < DMatrixRMaj > eig ( int matrixSize , boolean needVectors ) { } }
return new SwitchingEigenDecomposition_DDRM ( matrixSize , needVectors , UtilEjml . TEST_F64 ) ;
public class ExceptionUtils { /** * formatela la excepcion . * @ param exception excepcion * @ param buffer bufer * @ return StringBuilder */ public static StringBuilder formatException ( Throwable exception , StringBuilder buffer ) { } }
// ByteArrayOutputStream escribe = new ByteArrayOutputStream ( ) ; // PrintStream print = new PrintStream ( escribe ) ; // exception . printStackTrace ( print ) ; // String prpr = escribe . toString ( ) ; StringBuilder ret ; if ( exception == null ) { ret = buffer ; } else { buffer . append ( exception . getClass ( ) . getName ( ) ) ; buffer . append ( ": \"" ) ; buffer . append ( exception . getMessage ( ) ) ; buffer . append ( "\" \n" ) ; StackTraceElement array [ ] = exception . getStackTrace ( ) ; for ( StackTraceElement element : array ) { buffer . append ( "\tat " ) ; printStackTraceElement ( element , buffer ) ; buffer . append ( '\n' ) ; } if ( exception . getCause ( ) != null ) { buffer . append ( "Parent exception: " ) ; ret = formatException ( exception . getCause ( ) , buffer ) ; } else { ret = buffer ; } } return ret ;
public class AbstractElementTransformer { /** * class OR is defined on a supertype or interface of an enclosing class */ protected ICompilableTypeInternal isMemberOnEnclosingType ( IReducedSymbol symbol ) { } }
if ( ! _cc ( ) . isNonStaticInnerClass ( ) ) { return null ; } // If the symbol is on this class , or any ancestors , it ' s not enclosed // noinspection SuspiciousMethodCalls IType symbolClass = maybeUnwrapProxy ( symbol . getGosuClass ( ) ) ; if ( getGosuClass ( ) . getAllTypesInHierarchy ( ) . contains ( symbolClass ) ) { return null ; } ICompilableTypeInternal enclosingClass = _cc ( ) . getEnclosingType ( ) ; if ( ! ( TypeLord . getOuterMostEnclosingClass ( _cc ( ) . getEnclosingType ( ) ) instanceof IGosuEnhancement ) && symbolClass instanceof IGosuEnhancement ) { symbolClass = ( ( IGosuEnhancement ) symbolClass ) . getEnhancedType ( ) ; } while ( enclosingClass != null ) { // noinspection SuspiciousMethodCalls if ( enclosingClass . getAllTypesInHierarchy ( ) . contains ( symbolClass ) ) { return enclosingClass ; } enclosingClass = enclosingClass . getEnclosingType ( ) ; } return null ;
public class Planner { /** * Makes plans starting from the AAM and a String containing the TOSCA of the * offerings available to generate the plans * @ param aam the String representing the AAM * @ param uniqueOfferingsTosca the String containing the Tosca representation of the available offerings * @ return the generated plans * @ throws ParsingException * @ throws IOException */ public String [ ] plan ( String aam , String uniqueOfferingsTosca ) throws ParsingException , IOException { } }
log . info ( "Planning for aam: \n" + aam ) ; // Get offerings log . info ( "Getting Offeing Step: Start" ) ; Map < String , Pair < NodeTemplate , String > > offerings = parseOfferings ( uniqueOfferingsTosca ) ; // getOfferingsFromDiscoverer ( ) ; log . info ( "Getting Offeing Step: Complete" ) ; log . info ( "\nNot deployable offering have been filtered!" ) ; log . info ( "\nDeployable offerings have location: " + deployableProviders ) ; log . info ( "Got " + offerings . size ( ) + " offerings from discoverer:" ) ; // Matchmake log . info ( "Matchmaking Step: Start" ) ; Matchmaker mm = new Matchmaker ( ) ; Map < String , HashSet < String > > matchingOfferings = mm . match ( ToscaSerializer . fromTOSCA ( aam ) , offerings ) ; log . info ( "Matchmaking Step: Complete" ) ; // Optimize String mmOutput = "" ; try { mmOutput = generateMMOutput2 ( matchingOfferings , offerings ) ; } catch ( JsonProcessingException e ) { log . error ( "Error preparing matchmaker output for optimization" , e ) ; } for ( String s : matchingOfferings . keySet ( ) ) { log . info ( "Module " + s + "has matching offerings: " + matchingOfferings . get ( s ) ) ; } log . info ( "Optimization Step: Start" ) ; log . info ( "Calling optimizer with suitable offerings: \n" + mmOutput ) ; Optimizer optimizer = new Optimizer ( ) ; String [ ] outputPlans = optimizer . optimize ( aam , mmOutput ) ; log . info ( "Optimzer result: " + Arrays . asList ( outputPlans ) ) ; log . info ( "Optimization Step: Complete" ) ; return outputPlans ;
public class JTune { /** * Triggers the re computation of all staff lines elements in order to * get the alignment justified . */ private void justify ( ) { } }
if ( m_staffLines . size ( ) > 1 ) { double maxWidth = ( ( JStaffLine ) m_staffLines . elementAt ( 0 ) ) . getWidth ( ) ; for ( int i = 1 ; i < m_staffLines . size ( ) ; i ++ ) { JStaffLine currentStaffLine = ( JStaffLine ) m_staffLines . elementAt ( i ) ; maxWidth = Math . max ( maxWidth , currentStaffLine . getWidth ( ) ) ; } for ( int i = 0 ; i < m_staffLines . size ( ) ; i ++ ) { JStaffLine currentStaffLine = ( JStaffLine ) m_staffLines . elementAt ( i ) ; if ( currentStaffLine . getWidth ( ) > maxWidth / 2 ) currentStaffLine . scaleToWidth ( maxWidth ) ; } }
public class FaceletViewDeclarationLanguageStrategy { /** * { @ inheritDoc } */ public boolean handles ( String viewId ) { } }
if ( viewId == null ) { return false ; } // Check extension first as it ' s faster than mappings if ( viewId . endsWith ( _extension ) ) { // If the extension matches , it ' s a Facelet viewId . return true ; } // Otherwise , try to match the view identifier with the facelet mappings return _acceptPatterns != null && _acceptPatterns . matcher ( viewId ) . matches ( ) ;
public class AptField { /** * Returns the access modifier associated with the field */ public String getAccessModifier ( ) { } }
if ( _fieldDecl == null ) return "" ; Collection < Modifier > modifiers = _fieldDecl . getModifiers ( ) ; if ( modifiers . contains ( Modifier . PRIVATE ) ) return "private" ; if ( modifiers . contains ( Modifier . PROTECTED ) ) return "protected" ; if ( modifiers . contains ( Modifier . PUBLIC ) ) return "public" ; return "" ;
public class InfluxDBResultMapper { /** * Process a { @ link QueryResult } object returned by the InfluxDB client inspecting the internal * data structure and creating the respective object instances based on the Class passed as * parameter . * @ param queryResult the InfluxDB result object * @ param clazz the Class that will be used to hold your measurement data * @ param < T > the target type * @ param measurementName name of the Measurement * @ param precision the time precision of results * @ return a { @ link List } of objects from the same Class passed as parameter and sorted on the * same order as received from InfluxDB . * @ throws InfluxDBMapperException If { @ link QueryResult } parameter contain errors , * < code > clazz < / code > parameter is not annotated with & # 64 ; Measurement or it was not * possible to define the values of your POJO ( e . g . due to an unsupported field type ) . */ public < T > List < T > toPOJO ( final QueryResult queryResult , final Class < T > clazz , final String measurementName , final TimeUnit precision ) throws InfluxDBMapperException { } }
Objects . requireNonNull ( measurementName , "measurementName" ) ; Objects . requireNonNull ( queryResult , "queryResult" ) ; Objects . requireNonNull ( clazz , "clazz" ) ; throwExceptionIfResultWithError ( queryResult ) ; cacheMeasurementClass ( clazz ) ; List < T > result = new LinkedList < T > ( ) ; queryResult . getResults ( ) . stream ( ) . filter ( internalResult -> Objects . nonNull ( internalResult ) && Objects . nonNull ( internalResult . getSeries ( ) ) ) . forEach ( internalResult -> { internalResult . getSeries ( ) . stream ( ) . filter ( series -> series . getName ( ) . equals ( measurementName ) ) . forEachOrdered ( series -> { parseSeriesAs ( series , clazz , result , precision ) ; } ) ; } ) ; return result ;
public class SecretsManagerSecretResourceData { /** * Optional . The staging labels whose values you want to make available on the core , in addition to ' ' AWSCURRENT ' ' . * @ param additionalStagingLabelsToDownload * Optional . The staging labels whose values you want to make available on the core , in addition to * ' ' AWSCURRENT ' ' . */ public void setAdditionalStagingLabelsToDownload ( java . util . Collection < String > additionalStagingLabelsToDownload ) { } }
if ( additionalStagingLabelsToDownload == null ) { this . additionalStagingLabelsToDownload = null ; return ; } this . additionalStagingLabelsToDownload = new java . util . ArrayList < String > ( additionalStagingLabelsToDownload ) ;
public class MetaMediaManager { /** * Renders the sprites and animations that intersect the supplied dirty region in the specified * layer . */ public void paintMedia ( Graphics2D gfx , int layer , Rectangle dirty ) { } }
if ( layer == FRONT ) { _spritemgr . paint ( gfx , layer , dirty ) ; _animmgr . paint ( gfx , layer , dirty ) ; } else { _animmgr . paint ( gfx , layer , dirty ) ; _spritemgr . paint ( gfx , layer , dirty ) ; }
public class CheckArg { /** * Check that the collection is not empty * @ param argument Collection * @ param name The name of the argument * @ throws IllegalArgumentException If collection is null or empty */ public static void isNotEmpty ( Collection < ? > argument , String name ) { } }
isNotNull ( argument , name ) ; if ( argument . isEmpty ( ) ) { throw new IllegalArgumentException ( CommonI18n . argumentMayNotBeEmpty . text ( name ) ) ; }
public class SliceUtf8 { /** * Gets the substring starting at { @ code codePointStart } and extending for * { @ code codePointLength } code points . * Note : This method does not explicitly check for valid UTF - 8 , and may * return incorrect results or throw an exception for invalid UTF - 8. */ public static Slice substring ( Slice utf8 , int codePointStart , int codePointLength ) { } }
checkArgument ( codePointStart >= 0 , "codePointStart is negative" ) ; checkArgument ( codePointLength >= 0 , "codePointLength is negative" ) ; int indexStart = offsetOfCodePoint ( utf8 , codePointStart ) ; if ( indexStart < 0 ) { throw new IllegalArgumentException ( "UTF-8 does not contain " + codePointStart + " code points" ) ; } if ( codePointLength == 0 ) { return Slices . EMPTY_SLICE ; } int indexEnd = offsetOfCodePoint ( utf8 , indexStart , codePointLength - 1 ) ; if ( indexEnd < 0 ) { throw new IllegalArgumentException ( "UTF-8 does not contain " + ( codePointStart + codePointLength ) + " code points" ) ; } indexEnd += lengthOfCodePoint ( utf8 , indexEnd ) ; if ( indexEnd > utf8 . length ( ) ) { throw new InvalidUtf8Exception ( "UTF-8 is not well formed" ) ; } return utf8 . slice ( indexStart , indexEnd - indexStart ) ;
public class StaticSemanticSpace { /** * Loads the { @ link SemanticSpace } from the binary formatted file , adding * its words to { @ link # termToIndex } and returning the { @ code Matrix } * containing the space ' s vectors . * @ param sspaceFile a file in { @ link SSpaceFormat # BINARY binary } format */ private Matrix loadBinary ( InputStream fileStream ) throws IOException { } }
DataInputStream dis = new DataInputStream ( fileStream ) ; int rows = dis . readInt ( ) ; int cols = dis . readInt ( ) ; // create a dense matrix Matrix m = new ArrayMatrix ( rows , cols ) ; double [ ] d = new double [ cols ] ; for ( int row = 0 ; row < rows ; ++ row ) { String word = dis . readUTF ( ) ; termToIndex . put ( word , row ) ; for ( int col = 0 ; col < cols ; ++ col ) { d [ col ] = dis . readDouble ( ) ; } m . setRow ( row , d ) ; } return m ;
public class ZeroLeggedOAuthInterceptor { /** * Get the OAuthConsumer . Will initialize it lazily . * @ return the OAuthConsumer object . */ private synchronized RealmOAuthConsumer getConsumer ( ) { } }
// could just inject these , but I kinda prefer pushing this out // to the properties file . . . if ( consumer == null ) { OAuthServiceProvider serviceProvider = new OAuthServiceProvider ( "" , "" , "" ) ; String realm = propertyResolver . getProperty ( "org.jasig.rest.interceptor.oauth." + id + ".realm" ) ; String consumerKey = propertyResolver . getProperty ( "org.jasig.rest.interceptor.oauth." + id + ".consumerKey" ) ; String secretKey = propertyResolver . getProperty ( "org.jasig.rest.interceptor.oauth." + id + ".secretKey" ) ; Assert . notNull ( consumerKey , "The property \"org.jasig.rest.interceptor.oauth." + id + ".consumerKey\" must be set." ) ; Assert . notNull ( secretKey , "The property \"org.jasig.rest.interceptor.oauth." + id + ".secretKey\" must be set." ) ; consumer = new RealmOAuthConsumer ( consumerKey , secretKey , realm , serviceProvider ) ; } return consumer ;
public class Mutations { /** * Moves positions of mutations by specified offset * @ param offset offset * @ return relocated positions */ public Mutations < S > move ( int offset ) { } }
int [ ] newMutations = new int [ mutations . length ] ; for ( int i = 0 ; i < mutations . length ; ++ i ) newMutations [ i ] = Mutation . move ( mutations [ i ] , offset ) ; return new Mutations < S > ( alphabet , newMutations , true ) ;
public class ObservableListenerHelper { /** * { @ inheritDoc } */ @ Override public void removeListener ( InvalidationListener listener ) { } }
Objects . requireNonNull ( listener ) ; if ( 0 < invalidationSize ) { if ( size == 1 ) { if ( invalidationSize == 1 && this . listener . equals ( listener ) ) { sentinel = false ; this . listener = null ; invalidationSize -- ; size -- ; } } else if ( size == 2 ) { Object [ ] l = ( Object [ ] ) this . listener ; if ( listener . equals ( l [ 0 ] ) ) { sentinel = false ; invalidationSize -- ; size -- ; this . listener = l [ 1 ] ; } else if ( invalidationSize == 2 && listener . equals ( l [ 1 ] ) ) { sentinel = false ; invalidationSize -- ; size -- ; this . listener = l [ 0 ] ; } } else { Object [ ] l = ( Object [ ] ) this . listener ; for ( int i = 0 ; i < invalidationSize ; i ++ ) { if ( listener . equals ( l [ i ] ) ) { if ( sentinel ) { sentinel = false ; l = Arrays . copyOf ( l , l . length ) ; this . listener = l ; } if ( i + 1 < size ) { System . arraycopy ( l , i + 1 , l , i , size - i - 1 ) ; } else { l [ i ] = null ; } invalidationSize -- ; size -- ; break ; } } } }
public class ChangeSetAdapter { /** * Handle the addition of a node . * @ param workspaceName the workspace in which the node information should be available ; may not be null * @ param key the unique key for the node ; may not be null * @ param path the path of the node ; may not be null * @ param primaryType the primary type of the node ; may not be null * @ param mixinTypes the mixin types for the node ; may not be null but may be empty * @ param properties the properties of the node ; may not be null but may be empty */ protected void addNode ( String workspaceName , NodeKey key , Path path , Name primaryType , Set < Name > mixinTypes , Properties properties ) { } }
public class DMNEvaluatorCompiler { /** * Utility method to have a error message is reported if a DMN Variable is missing typeRef . * @ param model used for reporting errors * @ param variable the variable to extract typeRef * @ return the ` variable . typeRef ` or null in case of errors . Errors are reported with standard notification mechanism via MsgUtil . reportMessage */ private static QName variableTypeRefOrErrIfNull ( DMNModelImpl model , InformationItem variable ) { } }
if ( variable . getTypeRef ( ) != null ) { return variable . getTypeRef ( ) ; } else { MsgUtil . reportMessage ( logger , DMNMessage . Severity . ERROR , variable , model , null , null , Msg . MISSING_TYPEREF_FOR_VARIABLE , variable . getName ( ) , variable . getParentDRDElement ( ) . getIdentifierString ( ) ) ; return null ; }
public class ServerMappingController { /** * Updates the src url in the server redirects * @ param model * @ param id * @ param enabled * @ return * @ throws Exception */ @ ExceptionHandler ( Exception . class ) @ RequestMapping ( value = "api/edit/server/{id}" , method = RequestMethod . POST ) public @ ResponseBody ServerRedirect updateServer ( Model model , @ PathVariable int id , @ RequestParam ( required = false ) Boolean enabled ) throws Exception { } }
logger . info ( "updating Server" ) ; if ( enabled != null ) { if ( enabled ) { Client . enableHost ( ServerRedirectService . getInstance ( ) . getRedirect ( id ) . getSrcUrl ( ) ) ; } else { Client . disableHost ( ServerRedirectService . getInstance ( ) . getRedirect ( id ) . getSrcUrl ( ) ) ; } } return ServerRedirectService . getInstance ( ) . getRedirect ( id ) ;
public class HttpStatus { /** * Returns the { @ link HttpStatus } having the given status code . * @ param statusCode * An HTTP status code integer . * @ return The { @ link HttpStatus } having the given status code . */ public static HttpStatus fromStatusCode ( final int statusCode ) { } }
if ( statusCode < 100 || statusCode > 999 ) { throw new IllegalArgumentException ( "Illegal status code " + statusCode ) ; } HttpStatus result = STATUS_CODES . get ( statusCode ) ; if ( result == null ) { return new HttpStatus ( statusCode , "Unknown" ) ; } return result ;
public class FlinkKafkaConsumerBase { /** * Specifies the consumer to start reading from the earliest offset for all partitions . * This lets the consumer ignore any committed group offsets in Zookeeper / Kafka brokers . * < p > This method does not affect where partitions are read from when the consumer is restored * from a checkpoint or savepoint . When the consumer is restored from a checkpoint or * savepoint , only the offsets in the restored state will be used . * @ return The consumer object , to allow function chaining . */ public FlinkKafkaConsumerBase < T > setStartFromEarliest ( ) { } }
this . startupMode = StartupMode . EARLIEST ; this . startupOffsetsTimestamp = null ; this . specificStartupOffsets = null ; return this ;
public class NlsFormatterChoice { /** * This method parses the { @ link Condition # comparatorArgument comparator argument } . * @ param scanner is the { @ link CharSequenceScanner } . * @ return the parsed comparator argument . */ private Object parseComparatorArgument ( CharSequenceScanner scanner ) { } }
int index = scanner . getCurrentIndex ( ) ; Object comparatorArgument ; char c = scanner . forcePeek ( ) ; if ( ( c == '"' ) || ( c == '\'' ) ) { scanner . next ( ) ; comparatorArgument = scanner . readUntil ( c , false , c ) ; } else { String argument = scanner . readWhile ( FILTER_COMPARATOR_ARGUMENT ) ; if ( argument . length ( ) == 0 ) { throw new IllegalArgumentException ( scanner . substring ( index , scanner . getCurrentIndex ( ) ) ) ; } if ( "null" . equals ( argument ) ) { comparatorArgument = null ; } else if ( argument . matches ( Iso8601UtilLimited . PATTERN_STRING_ALL ) ) { comparatorArgument = this . nlsDependencies . getIso8601Util ( ) . parseDate ( argument ) ; } else if ( Boolean . TRUE . toString ( ) . equals ( argument ) ) { comparatorArgument = Boolean . TRUE ; } else if ( Boolean . FALSE . toString ( ) . equals ( argument ) ) { comparatorArgument = Boolean . FALSE ; } else { // double vs . date ? comparatorArgument = Double . valueOf ( argument ) ; } } return comparatorArgument ;
public class SecurityCenterClient { /** * Creates a finding . The corresponding source must exist for finding creation to succeed . * < p > Sample code : * < pre > < code > * try ( SecurityCenterClient securityCenterClient = SecurityCenterClient . create ( ) ) { * SourceName parent = SourceName . of ( " [ ORGANIZATION ] " , " [ SOURCE ] " ) ; * String findingId = " " ; * Finding finding = Finding . newBuilder ( ) . build ( ) ; * Finding response = securityCenterClient . createFinding ( parent , findingId , finding ) ; * < / code > < / pre > * @ param parent Resource name of the new finding ' s parent . Its format should be * " organizations / [ organization _ id ] / sources / [ source _ id ] " . * @ param findingId Unique identifier provided by the client within the parent scope . It must be * alphanumeric and less than or equal to 32 characters and greater than 0 characters in * length . * @ param finding The Finding being created . The name and security _ marks will be ignored as they * are both output only fields on this resource . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final Finding createFinding ( SourceName parent , String findingId , Finding finding ) { } }
CreateFindingRequest request = CreateFindingRequest . newBuilder ( ) . setParent ( parent == null ? null : parent . toString ( ) ) . setFindingId ( findingId ) . setFinding ( finding ) . build ( ) ; return createFinding ( request ) ;
public class StreamFunctionExecutor { /** * Execute the function for the given input and output . * @ param input The input * @ param output The output * @ throws IOException If an I / O exception occurs */ public void execute ( InputStream input , OutputStream output ) throws IOException { } }
execute ( input , output , null ) ;
public class WMultiFileWidgetRenderer { /** * Paints the given WMultiFileWidget . * @ param component the WMultiFileWidget to paint . * @ param renderContext the RenderContext to paint to . */ @ Override public void doRender ( final WComponent component , final WebXmlRenderContext renderContext ) { } }
WMultiFileWidget widget = ( WMultiFileWidget ) component ; XmlStringBuilder xml = renderContext . getWriter ( ) ; // Check if rendering a file upload response String uploadId = widget . getFileUploadRequestId ( ) ; if ( uploadId != null ) { handleFileUploadRequest ( widget , xml , uploadId ) ; return ; } boolean readOnly = widget . isReadOnly ( ) ; xml . appendTagOpen ( TAG_NAME ) ; xml . appendAttribute ( "id" , component . getId ( ) ) ; xml . appendOptionalAttribute ( "class" , component . getHtmlClass ( ) ) ; xml . appendOptionalAttribute ( "track" , component . isTracking ( ) , "true" ) ; xml . appendOptionalAttribute ( "hidden" , widget . isHidden ( ) , "true" ) ; if ( readOnly ) { xml . appendAttribute ( "readOnly" , "true" ) ; } else { long maxFileSize = widget . getMaxFileSize ( ) ; int maxFiles = widget . getMaxFiles ( ) ; WComponent dropzone = widget . getDropzone ( ) ; WImageEditor editor = widget . getEditor ( ) ; xml . appendOptionalAttribute ( "disabled" , widget . isDisabled ( ) , "true" ) ; xml . appendOptionalAttribute ( "required" , widget . isMandatory ( ) , "true" ) ; xml . appendOptionalAttribute ( "toolTip" , widget . getToolTip ( ) ) ; xml . appendOptionalAttribute ( "accessibleText" , widget . getAccessibleText ( ) ) ; xml . appendOptionalAttribute ( "acceptedMimeTypes" , typesToString ( widget . getFileTypes ( ) ) ) ; xml . appendOptionalAttribute ( "maxFileSize" , maxFileSize > 0 , maxFileSize ) ; xml . appendOptionalAttribute ( "maxFiles" , maxFiles > 0 , maxFiles ) ; if ( dropzone != null ) { xml . appendAttribute ( "dropzone" , dropzone . getId ( ) ) ; } if ( editor != null ) { xml . appendAttribute ( "editor" , editor . getId ( ) ) ; if ( editor . getUseCamera ( ) ) { xml . appendAttribute ( "camera" , true ) ; } } } if ( widget . getColumns ( ) != null ) { xml . appendAttribute ( "cols" , widget . getColumns ( ) ) ; } if ( widget . getFileAjaxAction ( ) != null ) { xml . appendAttribute ( "ajax" , "true" ) ; } List < Diagnostic > diags = readOnly ? null : widget . getDiagnostics ( Diagnostic . ERROR ) ; if ( widget . getFiles ( ) . isEmpty ( ) ) { if ( readOnly || diags == null || diags . isEmpty ( ) ) { xml . appendEnd ( ) ; return ; } xml . appendClose ( ) ; } else { xml . appendClose ( ) ; // Render files int i = 0 ; for ( FileWidgetUpload file : widget . getFiles ( ) ) { FileWidgetRendererUtil . renderFileElement ( widget , xml , file , i ++ ) ; } } if ( ! readOnly && diags != null && ! diags . isEmpty ( ) ) { DiagnosticRenderUtil . renderDiagnostics ( widget , renderContext ) ; } xml . appendEndTag ( TAG_NAME ) ;
public class DrlxParseUtil { /** * Mutates expression * such that , if it contains a < pre > nameRef < / pre > , it is replaced and forcibly casted with < pre > ( type ) nameRef < / pre > . * @ param expression a mutated expression */ public static void forceCastForName ( String nameRef , Type type , Expression expression ) { } }
List < NameExpr > allNameExprForName = expression . findAll ( NameExpr . class , n -> n . getNameAsString ( ) . equals ( nameRef ) ) ; for ( NameExpr n : allNameExprForName ) { n . getParentNode ( ) . get ( ) . replace ( n , new EnclosedExpr ( new CastExpr ( type , n ) ) ) ; }
public class GVRWorld { /** * Add a { @ link GVRConstraint } to this physics world . * @ param gvrConstraint The { @ link GVRConstraint } to add . */ public void addConstraint ( final GVRConstraint gvrConstraint ) { } }
mPhysicsContext . runOnPhysicsThread ( new Runnable ( ) { @ Override public void run ( ) { if ( contains ( gvrConstraint ) ) { return ; } if ( ! contains ( gvrConstraint . mBodyA ) || ( gvrConstraint . mBodyB != null && ! contains ( gvrConstraint . mBodyB ) ) ) { throw new UnsupportedOperationException ( "Rigid body not found in the physics world." ) ; } NativePhysics3DWorld . addConstraint ( getNative ( ) , gvrConstraint . getNative ( ) ) ; mPhysicsObject . put ( gvrConstraint . getNative ( ) , gvrConstraint ) ; } } ) ;
public class ModeShapeRestClient { /** * Returns all the node types that are available in the repository from { @ code repoUrl } * @ return a { @ link NodeTypes } instance ; never { @ code null } */ public NodeTypes getNodeTypes ( ) { } }
String url = jsonRestClient . appendToURL ( ITEMS_METHOD , NODE_TYPES_SEGMENT ) ; JSONRestClient . Response response = jsonRestClient . doGet ( url ) ; if ( ! response . isOK ( ) ) { throw new RuntimeException ( JdbcI18n . invalidServerResponse . text ( url , response . asString ( ) ) ) ; } return new NodeTypes ( response . json ( ) ) ;
public class ConfigException { /** * public static ConfigException createLine ( String loc , String msg ) * if ( " " . equals ( loc ) ) { * return new ConfigException ( msg ) ; * String fileName = getFileName ( loc ) ; * int line = getLine ( loc ) ; * String source = ConfigUtilTemp . getSourceLines ( fileName , line ) ; * return new ConfigExceptionLine ( loc + msg + source ) ; * private static String getFileName ( String loc ) * loc = loc . trim ( ) ; * if ( loc . endsWith ( " : " ) ) { * loc = loc . substring ( 0 , loc . length ( ) - 1 ) ; * int p = loc . lastIndexOf ( ' : ' ) ; * if ( p < = 0 ) { * return null ; * else { * return loc . substring ( 0 , p ) ; * private static int getLine ( String loc ) * loc = loc . trim ( ) ; * if ( loc . endsWith ( " : " ) ) { * loc = loc . substring ( 0 , loc . length ( ) - 1 ) ; * int p = loc . lastIndexOf ( ' : ' ) ; * if ( p < = 0 ) { * return 0; * else { * return Integer . parseInt ( loc . substring ( p + 1 ) ) ; * public static RuntimeException create ( Field field , Throwable e ) * return create ( loc ( field ) , e ) ; * public static RuntimeException create ( Method method , Throwable e ) * return create ( loc ( method ) , e ) ; * public static RuntimeException create ( Method method , String msg , Throwable e ) * return new ConfigException ( loc ( method ) + msg , e ) ; * public static RuntimeException create ( Method method , String msg ) * return new ConfigException ( loc ( method ) + msg ) ; */ public static ConfigException createConfig ( Throwable e ) { } }
if ( e instanceof ConfigException ) return ( ConfigException ) e ; else return new ConfigException ( e ) ;
public class TrackerMeanShiftComaniciu2003 { /** * Computes the difference between two histograms using SAD . * This is a change from the paper , which uses Bhattacharyya . Bhattacharyya could give poor performance * even with perfect data since two errors can cancel each other out . For example , part of the histogram * is too small and another part is too large . */ protected double distanceHistogram ( float histogramA [ ] , float histogramB [ ] ) { } }
double sumP = 0 ; for ( int i = 0 ; i < histogramA . length ; i ++ ) { float q = histogramA [ i ] ; float p = histogramB [ i ] ; sumP += Math . abs ( q - p ) ; } return sumP ;
public class StandardAtomGenerator { /** * Utility to determine if the specified mass is the major isotope for the given atomic number . * @ param number atomic number * @ param mass atomic mass * @ return the mass is the major mass for the atomic number */ private boolean isMajorIsotope ( int number , int mass ) { } }
try { IIsotope isotope = Isotopes . getInstance ( ) . getMajorIsotope ( number ) ; return isotope != null && isotope . getMassNumber ( ) . equals ( mass ) ; } catch ( IOException e ) { return false ; }
public class CreateHapgRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CreateHapgRequest createHapgRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( createHapgRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createHapgRequest . getLabel ( ) , LABEL_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class First { /** * Initializes the first map for data input . */ private void initFirstMapForGrammarProductions ( ) { } }
for ( Production production : grammar . getProductions ( ) . getList ( ) ) { if ( ! firstGrammar . containsKey ( production . getName ( ) ) ) { firstGrammar . put ( production . getName ( ) , new LinkedHashSet < Terminal > ( ) ) ; } }
public class JSONReader { /** * Method for reading a JSON Array from input and building a < code > Object [ ] < / code > * out of it . Note that if input does NOT contain a * JSON Array , { @ link JSONObjectException } will be thrown . */ public Object [ ] readArray ( ) throws IOException { } }
if ( _parser . isExpectedStartArrayToken ( ) ) { return AnyReader . std . readArrayFromArray ( this , _parser , _collectionBuilder ) ; } if ( _parser . hasToken ( JsonToken . VALUE_NULL ) ) { return null ; } throw JSONObjectException . from ( _parser , "Can not read an array: expect to see START_ARRAY ('['), instead got: " + ValueReader . _tokenDesc ( _parser ) ) ;
public class PnPLepetitEPnP { /** * Given the control points it computes the 4 weights for each camera point . This is done by * solving the following linear equation : C * & alpha ; = X . where C is the control point matrix , * & alpha ; is the 4 by n matrix containing the solution , and X is the camera point matrix . * N is the number of points . * C = [ controlPts ' ; ones ( 1,4 ) ] < br > * X = [ cameraPts ' ; ones ( 1 , N ) ] */ protected void computeBarycentricCoordinates ( FastQueue < Point3D_F64 > controlWorldPts , DMatrixRMaj alphas , List < Point3D_F64 > worldPts ) { } }
alphas . reshape ( worldPts . size ( ) , numControl , false ) ; v_temp . reshape ( 3 , 1 ) ; A_temp . reshape ( 3 , numControl - 1 ) ; for ( int i = 0 ; i < numControl - 1 ; i ++ ) { Point3D_F64 c = controlWorldPts . get ( i ) ; A_temp . set ( 0 , i , c . x - meanWorldPts . x ) ; A_temp . set ( 1 , i , c . y - meanWorldPts . y ) ; A_temp . set ( 2 , i , c . z - meanWorldPts . z ) ; } // invert the matrix solverPinv . setA ( A_temp ) ; A_temp . reshape ( A_temp . numCols , A_temp . numRows ) ; solverPinv . invert ( A_temp ) ; w_temp . reshape ( numControl - 1 , 1 ) ; for ( int i = 0 ; i < worldPts . size ( ) ; i ++ ) { Point3D_F64 p = worldPts . get ( i ) ; v_temp . data [ 0 ] = p . x - meanWorldPts . x ; v_temp . data [ 1 ] = p . y - meanWorldPts . y ; v_temp . data [ 2 ] = p . z - meanWorldPts . z ; MatrixVectorMult_DDRM . mult ( A_temp , v_temp , w_temp ) ; int rowIndex = alphas . numCols * i ; for ( int j = 0 ; j < numControl - 1 ; j ++ ) alphas . data [ rowIndex ++ ] = w_temp . data [ j ] ; if ( numControl == 4 ) alphas . data [ rowIndex ] = 1 - w_temp . data [ 0 ] - w_temp . data [ 1 ] - w_temp . data [ 2 ] ; else alphas . data [ rowIndex ] = 1 - w_temp . data [ 0 ] - w_temp . data [ 1 ] ; }
public class PersistenceUnitScanner { /** * Finds all specified ORM files , by name , constrained in location by the persistence unit root and jar files . * @ param ormFileName The name of the ORM file to search for * @ return A List of URLs of resources found by the ClassLoader . Will be an empty List if none are found . * @ throws IOException */ private List < URL > findORMResources ( PersistenceUnitInfo pui , String ormFileName ) throws IOException { } }
final boolean isMetaInfoOrmXML = "META-INF/orm.xml" . equals ( ormFileName ) ; final ArrayList < URL > retArr = new ArrayList < URL > ( ) ; Enumeration < URL > ormEnum = pui . getClassLoader ( ) . getResources ( ormFileName ) ; while ( ormEnum . hasMoreElements ( ) ) { final URL url = ormEnum . nextElement ( ) ; final String urlExtern = url . toExternalForm ( ) ; // ParserUtils . decode ( url . toExternalForm ( ) ) ; if ( ! isMetaInfoOrmXML ) { // If it ' s not " META - INF / orm . xml " , then the mapping files may be present anywhere in the classpath . retArr . add ( url ) ; continue ; } // Check against persistence unit root if ( urlExtern . startsWith ( pui . getPersistenceUnitRootUrl ( ) . toExternalForm ( ) ) ) { retArr . add ( url ) ; continue ; } // Check against Jar files , if any for ( URL jarUrl : pui . getJarFileUrls ( ) ) { final String jarExtern = jarUrl . toExternalForm ( ) ; if ( urlExtern . startsWith ( jarExtern ) ) { retArr . add ( url ) ; continue ; } } } return retArr ;
public class ServerParams { /** * Get the value of the given parameter name belonging to the given module name . If * no such module / parameter name is known , null is returned . Otherwise , the parsed * parameter is returned as an Object . This may be a String , Map , or List depending * on the parameter ' s structure . * @ param moduleName Name of module to get parameter for . * @ param paramName Name of parameter to get value of . * @ return Parameter value as an Object or null if unknown . */ public Object getModuleParam ( String moduleName , String paramName ) { } }
Map < String , Object > moduleParams = getModuleParams ( moduleName ) ; if ( moduleParams == null ) { return null ; } return moduleParams . get ( paramName ) ;
public class AbstractClassicTag { /** * Returns the closest parent form tag , or null if there is none . */ protected Form getNearestForm ( ) { } }
Tag parentTag = getParent ( ) ; while ( parentTag != null ) { if ( parentTag instanceof Form ) return ( Form ) parentTag ; parentTag = parentTag . getParent ( ) ; } return null ;
public class GenericUrl { /** * Constructs the URL based on { @ link URL # URL ( URL , String ) } with this URL representation from * { @ link # toURL ( ) } and a relative url . * < p > Any { @ link MalformedURLException } is wrapped in an { @ link IllegalArgumentException } . * @ return new URL instance * @ since 1.14 */ public final URL toURL ( String relativeUrl ) { } }
try { URL url = toURL ( ) ; return new URL ( url , relativeUrl ) ; } catch ( MalformedURLException e ) { throw new IllegalArgumentException ( e ) ; }
public class EditableCellFocusAction { /** * Provide the custom behaviour of the Action */ @ Override public void actionPerformed ( ActionEvent e ) { } }
invokeOriginalAction ( e ) ; int row = table . getSelectedRow ( ) ; int column = table . getSelectedColumn ( ) ; if ( table . isCellEditable ( row , column ) ) { table . editCellAt ( row , column , e ) ; }
public class SocketJoiner { /** * Bind to the internal interface if one was specified , * otherwise bind on all interfaces . The leader won ' t invoke this . */ private void doBind ( ) throws Exception { } }
LOG . debug ( "Creating listener socket" ) ; try { m_selector = Selector . open ( ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } ServerSocketChannel listenerSocket = ServerSocketChannel . open ( ) ; InetSocketAddress inetsockaddr ; if ( ( m_internalInterface == null ) || ( m_internalInterface . length ( ) == 0 ) ) { inetsockaddr = new InetSocketAddress ( m_internalPort ) ; } else { inetsockaddr = new InetSocketAddress ( m_internalInterface , m_internalPort ) ; } try { hostLog . info ( "Attempting to bind to internal ip " + inetsockaddr ) ; listenerSocket . socket ( ) . bind ( inetsockaddr ) ; listenerSocket . configureBlocking ( false ) ; m_listenerSockets . add ( listenerSocket ) ; } catch ( Exception e ) { /* * If we bound to the leader address , the internal interface address might not * bind if it is all interfaces */ if ( m_listenerSockets . isEmpty ( ) ) { LOG . fatal ( "Failed to bind to " + inetsockaddr ) ; CoreUtils . printPortsInUse ( hostLog ) ; throw e ; } } for ( ServerSocketChannel ssc : m_listenerSockets ) { ssc . register ( m_selector , SelectionKey . OP_ACCEPT ) ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Non-Primary Listening on:" + inetsockaddr . toString ( ) ) ; }
public class S3StorageProvider { /** * { @ inheritDoc } */ protected Map < String , String > getAllSpaceProperties ( String spaceId ) { } }
log . debug ( "getAllSpaceProperties(" + spaceId + ")" ) ; // Will throw if bucket does not exist String bucketName = getBucketName ( spaceId ) ; // Retrieve space properties from bucket tags Map < String , String > spaceProperties = new HashMap < > ( ) ; BucketTaggingConfiguration tagConfig = s3Client . getBucketTaggingConfiguration ( bucketName ) ; if ( null != tagConfig ) { for ( TagSet tagSet : tagConfig . getAllTagSets ( ) ) { spaceProperties . putAll ( tagSet . getAllTags ( ) ) ; } } // Handle @ symbol ( change from + ) , to allow for email usernames in ACLs spaceProperties = replaceInMapValues ( spaceProperties , "+" , "@" ) ; // Add space count spaceProperties . put ( PROPERTIES_SPACE_COUNT , getSpaceCount ( spaceId , MAX_ITEM_COUNT ) ) ; return spaceProperties ;
public class Bidi { /** * Return the index of the character at the start of the nth logical run in * this line , as an offset from the start of the line . * @ param run the index of the run , between 0 and < code > countRuns ( ) < / code > * @ return the start of the run * @ throws IllegalStateException if this call is not preceded by a successful * call to < code > setPara < / code > or < code > setLine < / code > * @ throws IllegalArgumentException if < code > run < / code > is not in * the range < code > 0 & lt ; = run & lt ; countRuns ( ) < / code > */ public int getRunStart ( int run ) { } }
verifyValidParaOrLine ( ) ; BidiLine . getRuns ( this ) ; verifyRange ( run , 0 , runCount ) ; getLogicalToVisualRunsMap ( ) ; return runs [ logicalToVisualRunsMap [ run ] ] . start ;
public class MtasSolrSearchComponent { /** * ( non - Javadoc ) * @ see org . apache . solr . handler . component . SearchComponent # handleResponses ( org . * apache . solr . handler . component . ResponseBuilder , * org . apache . solr . handler . component . ShardRequest ) */ @ Override public void handleResponses ( ResponseBuilder rb , ShardRequest sreq ) { } }
// System . out // . println ( System . nanoTime ( ) + " - " + Thread . currentThread ( ) . getId ( ) // + " - " + rb . req . getParams ( ) . getBool ( ShardParams . IS _ SHARD , false ) // + " HANDLERESPONSES " + rb . stage + " " + rb . req . getParamString ( ) ) ; MtasSolrStatus solrStatus = Objects . requireNonNull ( ( MtasSolrStatus ) rb . req . getContext ( ) . get ( MtasSolrStatus . class ) , "couldn't find status" ) ; solrStatus . setStage ( rb . stage ) ; try { if ( rb . req . getParams ( ) . getBool ( PARAM_MTAS , false ) ) { // do nothing } } catch ( ExitableDirectoryReader . ExitingReaderException e ) { solrStatus . setError ( e . getMessage ( ) ) ; }
public class ConvolveImageBox { /** * Performs a vertical 1D convolution of a box kernel across the image * @ param input The original image . Not modified . * @ param output Where the resulting image is written to . Modified . * @ param radius Kernel size . */ public static void vertical ( GrayS16 input , GrayI16 output , int radius , @ Nullable IWorkArrays work ) { } }
InputSanityCheck . checkSameShape ( input , output ) ; Kernel1D_S32 kernel = FactoryKernel . table1D_I32 ( radius ) ; ConvolveJustBorder_General_SB . vertical ( kernel , ImageBorderValue . wrap ( input , 0 ) , output ) ; if ( BoofConcurrency . USE_CONCURRENT ) { ImplConvolveBox_MT . vertical ( input , output , radius , work ) ; } else { ImplConvolveBox . vertical ( input , output , radius , work ) ; }
public class NumericUtil { /** * Compare object with a number , the object should be a number , * or it can be converted to a BigDecimal * @ param first might be number or string * @ param second must be number * @ return 0 if first is numerically equal to second ; * a negative int if first is numerically less than second ; * a positive int if first is numerically greater than second . */ @ SuppressWarnings ( "unchecked" ) public static int compareNumber ( Object first , Number second ) { } }
if ( first instanceof Number && first instanceof Comparable && first . getClass ( ) . equals ( second . getClass ( ) ) ) { return ( ( Comparable < Number > ) first ) . compareTo ( second ) ; } Function < Object , BigDecimal > toBig = ( number ) -> { try { return new BigDecimal ( number . toString ( ) ) ;
public class Questionnaire { /** * syntactic sugar */ public Coding addConcept ( ) { } }
Coding t = new Coding ( ) ; if ( this . concept == null ) this . concept = new ArrayList < Coding > ( ) ; this . concept . add ( t ) ; return t ;
public class NavigationView { /** * Used to restore the bottomsheet state and re - center * button visibility . As well as the { @ link MapView } * position prior to rotation . * @ param savedInstanceState to extract state variables */ public void onRestoreInstanceState ( Bundle savedInstanceState ) { } }
String instanceKey = getContext ( ) . getString ( R . string . navigation_view_instance_state ) ; NavigationViewInstanceState navigationViewInstanceState = savedInstanceState . getParcelable ( instanceKey ) ; recenterBtn . setVisibility ( navigationViewInstanceState . getRecenterButtonVisibility ( ) ) ; wayNameView . setVisibility ( navigationViewInstanceState . isWayNameVisible ( ) ? VISIBLE : INVISIBLE ) ; wayNameView . updateWayNameText ( navigationViewInstanceState . getWayNameText ( ) ) ; resetBottomSheetState ( navigationViewInstanceState . getBottomSheetBehaviorState ( ) ) ; updateInstructionListState ( navigationViewInstanceState . isInstructionViewVisible ( ) ) ; updateInstructionMutedState ( navigationViewInstanceState . isMuted ( ) ) ; mapInstanceState = savedInstanceState . getParcelable ( MAP_INSTANCE_STATE_KEY ) ;
public class HighResolutionClock { /** * The number of microseconds since the 1 Jan 1970 UTC . * @ return the number of microseconds since the 1 Jan 1970 UTC . */ public static long epochMicros ( ) { } }
final Instant now = Instant . now ( ) ; final long seconds = now . getEpochSecond ( ) ; final long nanosFromSecond = now . getNano ( ) ; return ( seconds * 1_000_000 ) + ( nanosFromSecond / 1_000 ) ;
public class dnsaction { /** * Use this API to delete dnsaction resources of given names . */ public static base_responses delete ( nitro_service client , String actionname [ ] ) throws Exception { } }
base_responses result = null ; if ( actionname != null && actionname . length > 0 ) { dnsaction deleteresources [ ] = new dnsaction [ actionname . length ] ; for ( int i = 0 ; i < actionname . length ; i ++ ) { deleteresources [ i ] = new dnsaction ( ) ; deleteresources [ i ] . actionname = actionname [ i ] ; } result = delete_bulk_request ( client , deleteresources ) ; } return result ;
public class TransportNegotiator { /** * Add an offered remote candidate . The transport candidate can be unusable : * we must check if we can use it . * @ param rc the remote candidate to add . */ private void addRemoteCandidates ( List < TransportCandidate > rc ) { } }
if ( rc != null ) { if ( rc . size ( ) > 0 ) { for ( TransportCandidate aRc : rc ) { addRemoteCandidate ( aRc ) ; } } }
public class DashboardService { /** * Returns the list of dashboards owned by the user . * @ return The list of dashboards owned by the user . * @ throws IOException If the server cannot be reached . * @ throws TokenExpiredException If the token sent along with the request has expired */ public List < Dashboard > getDashboards ( ) throws IOException , TokenExpiredException { } }
String requestUrl = RESOURCE ; ArgusResponse response = getClient ( ) . executeHttpRequest ( ArgusHttpClient . RequestType . GET , requestUrl , null ) ; assertValidResponse ( response , requestUrl ) ; return fromJson ( response . getResult ( ) , new TypeReference < List < Dashboard > > ( ) { } ) ;
public class DropboxClient { /** * Returns metadata information about specified resource with * checking for its hash with specified max child entries count . * If nothing changes ( hash isn ' t changed ) then 304 will returns . * @ param path to file or directory * @ param fileLimit max child entries count * @ param hash to check smth changed * @ return metadata of specified resource * @ see Entry */ public Entry getMetadata ( String path , int fileLimit , @ Nullable String hash ) { } }
return getMetadata ( path , fileLimit , hash , true ) ;
public class NormOps_DDRM { /** * The condition p = 2 number of a matrix is used to measure the sensitivity of the linear * system < b > Ax = b < / b > . A value near one indicates that it is a well conditioned matrix . < br > * < br > * & kappa ; < sub > 2 < / sub > = | | A | | < sub > 2 < / sub > | | A < sup > - 1 < / sup > | | < sub > 2 < / sub > * This is also known as the spectral condition number . * @ param A The matrix . * @ return The condition number . */ public static double conditionP2 ( DMatrixRMaj A ) { } }
SingularValueDecomposition_F64 < DMatrixRMaj > svd = DecompositionFactory_DDRM . svd ( A . numRows , A . numCols , false , false , true ) ; svd . decompose ( A ) ; double [ ] singularValues = svd . getSingularValues ( ) ; int n = SingularOps_DDRM . rank ( svd , UtilEjml . TEST_F64 ) ; if ( n == 0 ) return 0 ; double smallest = Double . MAX_VALUE ; double largest = Double . MIN_VALUE ; for ( double s : singularValues ) { if ( s < smallest ) smallest = s ; if ( s > largest ) largest = s ; } return largest / smallest ;
public class PhotosUploadApi { /** * Upload a photo or video to Flickr . * < br > * This method requires authentication with ' write ' permission . * < br > * If the title parameter is null , the filename will be used as the title . * @ param photo ( Required ) the photo or video file to upload . * @ param title ( Optional ) the title of the photo or video . * @ param description ( Optional ) the description of the photo or video . * @ param tags ( Optional ) list of tags to apply to the photo or video . * @ param isPublic ( Optional ) is photo / video visible to everyone . This is the default if none of isPublic , isFriends , or * isFamily is specified . * @ param isFriend ( Optional ) is photo / video visible only to friends . * @ param isFamily ( Optional ) is photo / video visible only to family . * @ param safetyLevel ( Optional ) safety level of the photo or video . * @ param contentType ( Optional ) content type of the upload . * @ param hidden ( Optional ) if true , photo / video will be hidden from public searches . * If false or null , it will be included in public searches . * @ param async if true , the photo / video will be uploaded using the Flickr async API . * @ return object with the results of the upload . Successful synchronous uploads will contain a photo id ; successful * asynchronous uploads will include a ticket id . * @ throws JinxException if required parameters are missing , or if there are any errors . * @ see < a href = " https : / / www . flickr . com / services / api / upload . api . html " > Flickr photo upload documentation < / a > */ public UploadResponse upload ( File photo , String title , String description , List < String > tags , Boolean isPublic , Boolean isFriend , Boolean isFamily , JinxConstants . SafetyLevel safetyLevel , JinxConstants . ContentType contentType , Boolean hidden , Boolean async ) throws JinxException { } }
JinxUtils . validateParams ( photo ) ; byte [ ] photoData = new byte [ ( int ) photo . length ( ) ] ; FileInputStream in = null ; try { in = new FileInputStream ( photo ) ; in . read ( photoData ) ; if ( JinxUtils . isNullOrEmpty ( title ) ) { int index = photo . getName ( ) . indexOf ( '.' ) ; if ( index > 0 ) { title = photo . getName ( ) . substring ( 0 , index ) ; } else { title = photo . getName ( ) ; } } } catch ( Exception e ) { throw new JinxException ( "Unable to load data from photo " + photo . getAbsolutePath ( ) , e ) ; } finally { JinxUtils . close ( in ) ; } return upload ( photoData , title , description , tags , isPublic , isFriend , isFamily , safetyLevel , contentType , hidden , async ) ;
public class FileSystem { /** * Replies the dirname of the specified file . * @ param filename is the name to parse . * @ return the dirname of the specified file . * @ see # shortBasename ( File ) * @ see # largeBasename ( File ) * @ see # basename ( File ) * @ see # extension ( File ) */ @ Pure public static URL dirname ( File filename ) { } }
if ( filename == null ) { return null ; } String parent = fromFileStandardToURLStandard ( filename . getParent ( ) ) ; try { if ( parent == null || "" . equals ( parent ) ) { // $ NON - NLS - 1 $ if ( filename . isAbsolute ( ) ) { return null ; } return new URL ( URISchemeType . FILE . name ( ) , "" , CURRENT_DIRECTORY ) ; // $ NON - NLS - 1 $ } // Treat Windows specific . if ( Pattern . matches ( "^" + URL_PATH_SEPARATOR + "?[a-zA-Z][:|]$" , parent ) ) { // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ parent += URL_PATH_SEPARATOR ; } return new URL ( URISchemeType . FILE . name ( ) , "" , parent ) ; // $ NON - NLS - 1 $ } catch ( MalformedURLException exception ) { return null ; }
public class URLPathEncoder { /** * Encode a URL path using percent - encoding . ' / ' is not encoded . * @ param path * The path to encode . * @ return The encoded path . */ private static String encodePath ( final String path ) { } }
// Accept ' : ' if it is part of a scheme prefix int validColonPrefixLen = 0 ; for ( final String scheme : SCHEME_PREFIXES ) { if ( path . startsWith ( scheme ) ) { validColonPrefixLen = scheme . length ( ) ; break ; } } final byte [ ] pathBytes = path . getBytes ( StandardCharsets . UTF_8 ) ; final StringBuilder encodedPath = new StringBuilder ( pathBytes . length * 3 ) ; for ( int i = 0 ; i < pathBytes . length ; i ++ ) { final byte pathByte = pathBytes [ i ] ; final int b = pathByte & 0xff ; if ( safe [ b ] || ( b == ':' && i < validColonPrefixLen ) ) { encodedPath . append ( ( char ) b ) ; } else { encodedPath . append ( '%' ) ; encodedPath . append ( HEXADECIMAL [ ( b & 0xf0 ) >> 4 ] ) ; encodedPath . append ( HEXADECIMAL [ b & 0x0f ] ) ; } } return encodedPath . toString ( ) ;
public class AddressDivisionBase { /** * Gets the value for the lowest address in the range represented by this address division . * If the value fits in the specified array at the specified index , the same array is returned with the value copied at the specified index . * Otherwise , a new array is allocated and returned with the value copied at the specified index , and the rest of the array contents the same as the original . * You can use { @ link # getBitCount ( ) } to determine the required array length for the bytes . * Since bytes are signed values while addresses are unsigned , values greater than 127 are * represented as the ( negative ) two ' s complement value of the actual value . * You can get the unsigned integer value i from byte b using i = 0xff & amp ; b . * @ return */ @ Override public byte [ ] getBytes ( byte bytes [ ] , int index ) { } }
byte cached [ ] = lowerBytes ; if ( cached == null ) { lowerBytes = cached = getBytesImpl ( true ) ; } return getBytes ( bytes , index , cached ) ;
public class SimpleDocumentDbRepository { /** * delete one document per entity * @ param entity */ @ Override public void delete ( T entity ) { } }
Assert . notNull ( entity , "entity to be deleted should not be null" ) ; final String partitionKeyValue = information . getPartitionKeyFieldValue ( entity ) ; operation . deleteById ( information . getCollectionName ( ) , information . getId ( entity ) , partitionKeyValue == null ? null : new PartitionKey ( partitionKeyValue ) ) ;
public class TextAnalysis { /** * Split chars list . * @ param source the source * @ param threshold the threshold * @ return the list */ public List < String > splitChars ( final String source , double threshold ) { } }
List < String > output = new ArrayList < > ( ) ; int wordStart = 0 ; double aposterioriNatsPrev = 0 ; boolean isIncreasing = false ; double prevLink = 0 ; for ( int i = 1 ; i < source . length ( ) ; i ++ ) { String priorText = source . substring ( 0 , i ) ; TrieNode priorNode = getMaxentPrior ( priorText ) ; double aprioriNats = entropy ( priorNode , priorNode . getParent ( ) ) ; String followingText = source . substring ( i - 1 , source . length ( ) ) ; TrieNode followingNode = getMaxentPost ( followingText ) ; TrieNode godparent = followingNode . godparent ( ) ; double aposterioriNats = entropy ( followingNode , godparent ) ; // double jointNats = getJointNats ( priorNode , followingNode ) ; double linkNats = aprioriNats + aposterioriNatsPrev ; if ( isVerbose ( ) ) { verbose . println ( String . format ( "%10s\t%10s\t%s" , '"' + priorNode . getString ( ) . replaceAll ( "\n" , "\\n" ) + '"' , '"' + followingNode . getString ( ) . replaceAll ( "\n" , "\\n" ) + '"' , Arrays . asList ( aprioriNats , aposterioriNats , linkNats ) . stream ( ) . map ( x -> String . format ( "%.4f" , x ) ) . collect ( Collectors . joining ( "\t" ) ) ) ) ; } String word = i < 2 ? "" : source . substring ( wordStart , i - 2 ) ; if ( isIncreasing && linkNats < prevLink && prevLink > threshold && word . length ( ) > 2 ) { wordStart = i - 2 ; output . add ( word ) ; if ( isVerbose ( ) ) verbose . println ( String . format ( "Recognized token \"%s\"" , word ) ) ; prevLink = linkNats ; aposterioriNatsPrev = aposterioriNats ; isIncreasing = false ; } else { if ( linkNats > prevLink ) isIncreasing = true ; prevLink = linkNats ; aposterioriNatsPrev = aposterioriNats ; } } return output ;
public class BooleanField { /** * Convert this field ' s binary data to a string . * @ param tempBinary The physical data convert to a string ( must be the raw data class ) . * @ return A display string representing this binary data . */ public String binaryToString ( Object tempBinary ) { } }
String tempString ; boolean bFlag = false ; if ( tempBinary == null ) return Constants . BLANK ; // Special case - unknown value else bFlag = ( ( Boolean ) tempBinary ) . booleanValue ( ) ; if ( bFlag == true ) tempString = YES ; else tempString = NO ; return tempString ;
public class ManagementClientAsync { /** * Updates an existing rule . * @ param topicName - Name of the topic . * @ param subscriptionName - Name of the subscription . * @ param ruleDescription - A { @ link RuleDescription } object describing the attributes with which the rule will be updated . * @ return { @ link RuleDescription } of the updated rule . * @ throws IllegalArgumentException - descriptor is null . */ public CompletableFuture < RuleDescription > updateRuleAsync ( String topicName , String subscriptionName , RuleDescription ruleDescription ) { } }
return putRuleAsync ( topicName , subscriptionName , ruleDescription , true ) ;
public class Configuration { /** * Gets information about why a property was set . Typically this is the * path to the resource objects ( file , URL , etc . ) the property came from , but * it can also indicate that it was set programmatically , or because of the * command line . * @ param name - The property name to get the source of . * @ return null - If the property or its source wasn ' t found . Otherwise , * returns a list of the sources of the resource . The older sources are * the first ones in the list . So for example if a configuration is set from * the command line , and then written out to a file that is read back in the * first entry would indicate that it was set from the command line , while * the second one would indicate the file that the new configuration was read * in from . */ @ InterfaceStability . Unstable public synchronized String [ ] getPropertySources ( String name ) { } }
if ( properties == null ) { // If properties is null , it means a resource was newly added // but the props were cleared so as to load it upon future // requests . So lets force a load by asking a properties list . getProps ( ) ; } // Return a null right away if our properties still // haven ' t loaded or the resource mapping isn ' t defined if ( properties == null || updatingResource == null ) { return null ; } else { String [ ] source = updatingResource . get ( name ) ; if ( source == null ) { return null ; } else { return Arrays . copyOf ( source , source . length ) ; } }
public class CPSpecificationOptionPersistenceImpl { /** * Returns the last cp specification option in the ordered set where CPOptionCategoryId = & # 63 ; . * @ param CPOptionCategoryId the cp option category ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching cp specification option * @ throws NoSuchCPSpecificationOptionException if a matching cp specification option could not be found */ @ Override public CPSpecificationOption findByCPOptionCategoryId_Last ( long CPOptionCategoryId , OrderByComparator < CPSpecificationOption > orderByComparator ) throws NoSuchCPSpecificationOptionException { } }
CPSpecificationOption cpSpecificationOption = fetchByCPOptionCategoryId_Last ( CPOptionCategoryId , orderByComparator ) ; if ( cpSpecificationOption != null ) { return cpSpecificationOption ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "CPOptionCategoryId=" ) ; msg . append ( CPOptionCategoryId ) ; msg . append ( "}" ) ; throw new NoSuchCPSpecificationOptionException ( msg . toString ( ) ) ;
public class dnsaaaarec { /** * Use this API to fetch filtered set of dnsaaaarec resources . * filter string should be in JSON format . eg : " port : 80 , servicetype : HTTP " . */ public static dnsaaaarec [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
dnsaaaarec obj = new dnsaaaarec ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; dnsaaaarec [ ] response = ( dnsaaaarec [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class PowerMock { /** * Used to specify expectations on private methods . If possible use variant * with only method name . */ public static synchronized < T > IExpectationSetters < T > expectPrivate ( Object instance , Method method , Object ... arguments ) throws Exception { } }
return doExpectPrivate ( instance , method , arguments ) ;
public class ModuleItem { /** * Remove a child from it - synchronized */ public synchronized void remove ( ModuleItem item ) { } }
if ( item == null || children == null ) return ; // remove from all the parents which have links to it due to aggregate data PmiModule removeInstance = item . getInstance ( ) ; if ( ! ( removeInstance instanceof PmiModuleAggregate ) ) { // recursively remove aggregate data for parents ModuleItem myParent = this ; PmiModule parModule = null ; while ( myParent != null ) { parModule = myParent . getInstance ( ) ; // if parent is aggregate if ( parModule != null && parModule instanceof PmiModuleAggregate ) ( ( PmiModuleAggregate ) parModule ) . remove ( removeInstance ) ; myParent = myParent . getParent ( ) ; } } // remove any children item . _cleanChildren ( ) ; // remove ModuleItem children . remove ( item . getInstance ( ) . getName ( ) ) ; if ( myStatsWithChildren != null ) { updateStatsTree ( ) ; } // bStatsTreeNeedsUpdate = true ; // remove mbean mapping and deactivate any CustomStats mbean // _ cleanMBean ( item ) ; item . getInstance ( ) . cleanup ( ) ; item = null ;
public class JTables { /** * Scroll the given table so that the specified row is visible . * @ param table The table * @ param row The row */ public static void scrollToRow ( JTable table , int row ) { } }
Rectangle visibleRect = table . getVisibleRect ( ) ; Rectangle cellRect = table . getCellRect ( row , 0 , true ) ; Rectangle r = new Rectangle ( visibleRect . x , cellRect . y , visibleRect . width , cellRect . height ) ; table . scrollRectToVisible ( r ) ;
public class EventBus { /** * Bind an { @ link ActEventListener } to an event type extended from { @ link EventObject } . * If either ` eventType ` or the class of ` eventListener ` has ` @ Async ` annotation presented , * it will bind the listener into the async repo . When event get triggered the listener * will be invoked asynchronously . * @ param eventType * the target event type - should be a sub class of { @ link EventObject } * @ param eventListener * an instance of { @ link ActEventListener } or it ' s sub class * @ return this event bus instance */ public EventBus bind ( Class < ? extends EventObject > eventType , ActEventListener eventListener ) { } }
boolean async = isAsync ( eventListener . getClass ( ) ) || isAsync ( eventType ) ; return _bind ( async ? asyncActEventListeners : actEventListeners , eventType , eventListener , 0 ) ;
public class LocaleSelectorImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setLocFlgs ( Integer newLocFlgs ) { } }
Integer oldLocFlgs = locFlgs ; locFlgs = newLocFlgs ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . LOCALE_SELECTOR__LOC_FLGS , oldLocFlgs , locFlgs ) ) ;
public class P2sVpnGatewaysInner { /** * Updates virtual wan p2s vpn gateway tags . * @ param resourceGroupName The resource group name of the P2SVpnGateway . * @ param gatewayName The name of the gateway . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the P2SVpnGatewayInner object */ public Observable < P2SVpnGatewayInner > beginUpdateTagsAsync ( String resourceGroupName , String gatewayName ) { } }
return beginUpdateTagsWithServiceResponseAsync ( resourceGroupName , gatewayName ) . map ( new Func1 < ServiceResponse < P2SVpnGatewayInner > , P2SVpnGatewayInner > ( ) { @ Override public P2SVpnGatewayInner call ( ServiceResponse < P2SVpnGatewayInner > response ) { return response . body ( ) ; } } ) ;
public class CurvedArrow { /** * Draws an arrow head on the graphics object . The arrow geometry is based * on the point of its head as well as another point , which the arrow is * defined as facing away from . This arrow head has no body . * @ param g * the graphics object to draw upon * @ param head * the point that is the point of the head of the arrow * @ param away * the point opposite from where the arrow is pointing , a point * along the line segment extending from the head backwards from * the head if this were an arrow with a line trailing the head */ private void drawArrow ( Graphics g , Point head , Point away ) { } }
int endX , endY ; double angle = Math . atan2 ( ( double ) ( away . x - head . x ) , ( double ) ( away . y - head . y ) ) ; angle += ARROW_ANGLE ; endX = ( ( int ) ( Math . sin ( angle ) * ARROW_LENGTH ) ) + head . x ; endY = ( ( int ) ( Math . cos ( angle ) * ARROW_LENGTH ) ) + head . y ; g . drawLine ( head . x , head . y , endX , endY ) ; angle -= 2 * ARROW_ANGLE ; endX = ( ( int ) ( Math . sin ( angle ) * ARROW_LENGTH ) ) + head . x ; endY = ( ( int ) ( Math . cos ( angle ) * ARROW_LENGTH ) ) + head . y ; g . drawLine ( head . x , head . y , endX , endY ) ;
public class DragableArea { /** * Make the rectangle visible , for debug purposes . */ public void makeVisible ( ) { } }
CSSClass cls = new CSSClass ( this , "unused" ) ; cls . setStatement ( SVGConstants . CSS_FILL_PROPERTY , SVGConstants . CSS_GREEN_VALUE ) ; cls . setStatement ( SVGConstants . CSS_FILL_OPACITY_PROPERTY , "0.2" ) ; cls . setStatement ( SVGConstants . CSS_CURSOR_PROPERTY , SVGConstants . CSS_POINTER_VALUE ) ; SVGUtil . setAtt ( element , SVGConstants . SVG_STYLE_ATTRIBUTE , cls . inlineCSS ( ) ) ;
public class EnumHelper { /** * Get the enum value with the passed string ID case insensitive * @ param < ENUMTYPE > * The enum type * @ param aClass * The enum class * @ param sID * The ID to search * @ return < code > null < / code > if no enum item with the given ID is present . */ @ Nullable public static < ENUMTYPE extends Enum < ENUMTYPE > & IHasID < String > > ENUMTYPE getFromIDCaseInsensitiveOrNull ( @ Nonnull final Class < ENUMTYPE > aClass , @ Nullable final String sID ) { } }
return getFromIDCaseInsensitiveOrDefault ( aClass , sID , null ) ;
public class HttpISCWriteErrorCallback { /** * Called by the devide side channel when the write had an error . * @ param vc * @ param t */ @ Override public void error ( VirtualConnection vc , Throwable t ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "error() called: vc=" + vc + " t=" + t ) ; } // The VC might be null if the channel was destroyed before the callback completes if ( vc != null ) { HttpInboundServiceContextImpl mySC = ( HttpInboundServiceContextImpl ) vc . getStateMap ( ) . get ( CallbackIDs . CALLBACK_HTTPISC ) ; if ( mySC . getHttpConfig ( ) . getDebugLog ( ) . isEnabled ( DebugLog . Level . WARN ) ) { mySC . getHttpConfig ( ) . getDebugLog ( ) . log ( DebugLog . Level . WARN , HttpMessages . MSG_WRITE_FAIL , mySC ) ; } mySC . logLegacyMessage ( ) ; // sendError ( ) API already set persistent to false mySC . finishSendError ( ) ; }
public class EJBWrapper { /** * Adds the default definition for the Object . equals method . * @ param cw ASM ClassWriter to add the method to . * @ param implClassName name of the wrapper class being generated . */ private static void addDefaultEqualsMethod ( ClassWriter cw , String implClassName ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , INDENT + "adding method : equals (Ljava/lang/Object;)Z" ) ; // public boolean equals ( Object other ) final String desc = "(Ljava/lang/Object;)Z" ; MethodVisitor mv = cw . visitMethod ( ACC_PUBLIC , "equals" , desc , null , null ) ; GeneratorAdapter mg = new GeneratorAdapter ( mv , ACC_PUBLIC , "equals" , desc ) ; mg . visitCode ( ) ; // return this = = other ; mg . loadThis ( ) ; mg . loadArg ( 0 ) ; Label not_equal = new Label ( ) ; mv . visitJumpInsn ( IF_ACMPNE , not_equal ) ; mg . visitInsn ( ICONST_1 ) ; mg . returnValue ( ) ; mg . visitLabel ( not_equal ) ; mg . visitInsn ( ICONST_0 ) ; mg . returnValue ( ) ; mg . endMethod ( ) ; mg . visitEnd ( ) ;
public class JThreeStateCheckBox { /** * Creates new JThreeStateCheckBox . * @ param text The checkbox description . */ public void init ( String text ) { } }
m_iCurrentState = OFF ; if ( m_iconOff == null ) { m_iconOff = BaseApplet . getSharedInstance ( ) . loadImageIcon ( CHECKBOX_OFF ) ; m_iconOn = BaseApplet . getSharedInstance ( ) . loadImageIcon ( CHECKBOX_ON ) ; m_iconNull = BaseApplet . getSharedInstance ( ) . loadImageIcon ( CHECKBOX_NULL ) ; } this . setIcon ( m_iconOff ) ; this . setBorder ( null ) ; this . setMargin ( JScreenConstants . NO_INSETS ) ; this . setOpaque ( false ) ; this . addActionListener ( this ) ;
public class StreamSourceInputStream { /** * Returns an input stream , freeing the results */ @ Override public InputStream getInputStream ( ) throws IOException { } }
InputStream is = _is ; _is = null ; return is ;
public class IncrementalSemanticAnalysis { /** * Update the semantics using the weighed combination of the semantics of * the co - occurring word and the provided index vector . Note that the index * vector is provided so that the caller can permute it as necessary . * @ param toUpdate the semantics to be updated * @ param cooccurringWord the word that is co - occurring * @ param iv the index vector for the co - occurring word , which has be * permuted as necessary */ @ SuppressWarnings ( "unchecked" ) private void updateSemantics ( SemanticVector toUpdate , String cooccurringWord , TernaryVector iv ) { } }
SemanticVector prevWordSemantics = getSemanticVector ( cooccurringWord ) ; Integer occurrences = wordToOccurrences . get ( cooccurringWord ) ; if ( occurrences == null ) occurrences = 0 ; double semanticWeight = 1d / ( Math . exp ( occurrences / historyDecayRate ) ) ; // The meaning is updated as a combination of the index vector and the // semantics , which is weighted by how many times the co - occurring word // has been seen . The semantics of frequently co - occurring words // receive less weight , i . e . the index vector is weighted more . add ( toUpdate , iv , impactRate * ( 1 - semanticWeight ) ) ; toUpdate . addVector ( prevWordSemantics , impactRate * semanticWeight ) ;
public class dnsparameter { /** * Use this API to fetch all the dnsparameter resources that are configured on netscaler . */ public static dnsparameter get ( nitro_service service ) throws Exception { } }
dnsparameter obj = new dnsparameter ( ) ; dnsparameter [ ] response = ( dnsparameter [ ] ) obj . get_resources ( service ) ; return response [ 0 ] ;
public class BaselineProfile { /** * Check Bilevel Image . * @ param metadata the metadata * @ param n the IFD number */ private void CheckBilevelImage ( IfdTags metadata , int n ) { } }
// Compression long comp = metadata . get ( TiffTags . getTagId ( "Compression" ) ) . getFirstNumericValue ( ) ; // if ( comp ! = 1 & & comp ! = 2 & & comp ! = 32773) if ( comp < 1 ) validation . addError ( "Invalid Compression" , "IFD" + n , comp ) ;
public class JcublasLapack { /** * Q R DECOMP */ @ Override public void sgeqrf ( int M , int N , INDArray A , INDArray R , INDArray INFO ) { } }
INDArray a = A ; INDArray r = R ; if ( Nd4j . dataType ( ) != DataType . FLOAT ) log . warn ( "FLOAT getrf called in DOUBLE environment" ) ; if ( A . ordering ( ) == 'c' ) a = A . dup ( 'f' ) ; if ( R != null && R . ordering ( ) == 'c' ) r = R . dup ( 'f' ) ; INDArray tau = Nd4j . createArrayFromShapeBuffer ( Nd4j . getDataBufferFactory ( ) . createFloat ( N ) , Nd4j . getShapeInfoProvider ( ) . createShapeInformation ( new long [ ] { 1 , N } , A . dataType ( ) ) . getFirst ( ) ) ; if ( Nd4j . getExecutioner ( ) instanceof GridExecutioner ) ( ( GridExecutioner ) Nd4j . getExecutioner ( ) ) . flushQueue ( ) ; // Get context for current thread CudaContext ctx = ( CudaContext ) allocator . getDeviceContext ( ) . getContext ( ) ; // setup the solver handles for cuSolver calls cusolverDnHandle_t handle = ctx . getSolverHandle ( ) ; cusolverDnContext solverDn = new cusolverDnContext ( handle ) ; // synchronized on the solver synchronized ( handle ) { int result = cusolverDnSetStream ( new cusolverDnContext ( handle ) , new CUstream_st ( ctx . getOldStream ( ) ) ) ; if ( result != 0 ) throw new IllegalStateException ( "solverSetStream failed" ) ; // transfer the INDArray into GPU memory CublasPointer xAPointer = new CublasPointer ( a , ctx ) ; CublasPointer xTauPointer = new CublasPointer ( tau , ctx ) ; // this output - indicates how much memory we ' ll need for the real operation DataBuffer worksizeBuffer = Nd4j . getDataBufferFactory ( ) . createInt ( 1 ) ; int stat = cusolverDnSgeqrf_bufferSize ( solverDn , M , N , ( FloatPointer ) xAPointer . getDevicePointer ( ) , M , ( IntPointer ) worksizeBuffer . addressPointer ( ) // we intentionally use host pointer here ) ; if ( stat != CUSOLVER_STATUS_SUCCESS ) { throw new BlasException ( "cusolverDnSgeqrf_bufferSize failed" , stat ) ; } int worksize = worksizeBuffer . getInt ( 0 ) ; // Now allocate memory for the workspace , the permutation matrix and a return code Pointer workspace = new Workspace ( worksize * Nd4j . sizeOfDataType ( ) ) ; // Do the actual QR decomp stat = cusolverDnSgeqrf ( solverDn , M , N , ( FloatPointer ) xAPointer . getDevicePointer ( ) , M , ( FloatPointer ) xTauPointer . getDevicePointer ( ) , new CudaPointer ( workspace ) . asFloatPointer ( ) , worksize , new CudaPointer ( allocator . getPointer ( INFO , ctx ) ) . asIntPointer ( ) ) ; if ( stat != CUSOLVER_STATUS_SUCCESS ) { throw new BlasException ( "cusolverDnSgeqrf failed" , stat ) ; } allocator . registerAction ( ctx , a ) ; // allocator . registerAction ( ctx , tau ) ; allocator . registerAction ( ctx , INFO ) ; if ( INFO . getInt ( 0 ) != 0 ) { throw new BlasException ( "cusolverDnSgeqrf failed on INFO" , INFO . getInt ( 0 ) ) ; } // Copy R ( upper part of Q ) into result if ( r != null ) { r . assign ( a . get ( NDArrayIndex . interval ( 0 , a . columns ( ) ) , NDArrayIndex . all ( ) ) ) ; INDArrayIndex ix [ ] = new INDArrayIndex [ 2 ] ; for ( int i = 1 ; i < Math . min ( a . rows ( ) , a . columns ( ) ) ; i ++ ) { ix [ 0 ] = NDArrayIndex . point ( i ) ; ix [ 1 ] = NDArrayIndex . interval ( 0 , i ) ; r . put ( ix , 0 ) ; } } stat = cusolverDnSorgqr_bufferSize ( solverDn , M , N , N , ( FloatPointer ) xAPointer . getDevicePointer ( ) , M , ( FloatPointer ) xTauPointer . getDevicePointer ( ) , ( IntPointer ) worksizeBuffer . addressPointer ( ) ) ; worksize = worksizeBuffer . getInt ( 0 ) ; workspace = new Workspace ( worksize * Nd4j . sizeOfDataType ( ) ) ; stat = cusolverDnSorgqr ( solverDn , M , N , N , ( FloatPointer ) xAPointer . getDevicePointer ( ) , M , ( FloatPointer ) xTauPointer . getDevicePointer ( ) , new CudaPointer ( workspace ) . asFloatPointer ( ) , worksize , new CudaPointer ( allocator . getPointer ( INFO , ctx ) ) . asIntPointer ( ) ) ; if ( stat != CUSOLVER_STATUS_SUCCESS ) { throw new BlasException ( "cusolverDnSorgqr failed" , stat ) ; } } allocator . registerAction ( ctx , a ) ; allocator . registerAction ( ctx , INFO ) ; // allocator . registerAction ( ctx , tau ) ; if ( a != A ) A . assign ( a ) ; if ( r != null && r != R ) R . assign ( r ) ; log . info ( "A: {}" , A ) ; if ( R != null ) log . info ( "R: {}" , R ) ;
public class CPDefinitionLinkPersistenceImpl { /** * Returns an ordered range of all the cp definition links where CProductId = & # 63 ; and type = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CPDefinitionLinkModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param CProductId the c product ID * @ param type the type * @ param start the lower bound of the range of cp definition links * @ param end the upper bound of the range of cp definition links ( not inclusive ) * @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > ) * @ return the ordered range of matching cp definition links */ @ Override public List < CPDefinitionLink > findByCP_T ( long CProductId , String type , int start , int end , OrderByComparator < CPDefinitionLink > orderByComparator ) { } }
return findByCP_T ( CProductId , type , start , end , orderByComparator , true ) ;
public class SolrSearchService { /** * { @ inheritDoc } */ public void delete ( String site , String id ) throws SearchException { } }
delete ( null , site , id ) ;
public class CmsXmlConfigUpdater { /** * Transforms a config file with an XSLT transform . * @ param name file name of the config file * @ param transform file name of the XSLT file * @ throws Exception if something goes wrong */ public void transform ( String name , String transform ) throws Exception { } }
File configFile = new File ( m_configDir , name ) ; File transformFile = new File ( m_xsltDir , transform ) ; try ( InputStream stream = new FileInputStream ( transformFile ) ) { StreamSource source = new StreamSource ( stream ) ; transform ( configFile , source ) ; }
public class BackgroundUtils { /** * Private method . * Returns a { @ link GradientDrawable } with * slightly rounded corners . * @ param color The desired color of the GradientDrawable * @ return A { @ link GradientDrawable } */ private static GradientDrawable getStandardBackground ( int color ) { } }
final GradientDrawable gradientDrawable = new GradientDrawable ( ) ; gradientDrawable . setCornerRadius ( BackgroundUtils . convertToDIP ( 4 ) ) ; gradientDrawable . setColor ( color ) ; return gradientDrawable ;
public class BaseDialogFragment { /** * Resolves the theme to be used for the dialog . * @ return The theme . */ @ StyleRes private int resolveTheme ( ) { } }
// First check if getTheme ( ) returns some usable theme . int theme = getTheme ( ) ; if ( theme != 0 ) { return theme ; } // Get the light / dark attribute from the Activity ' s Theme . boolean useLightTheme = isActivityThemeLight ( ) ; // Now check if developer overrides the Activity ' s Theme with an argument . Bundle args = getArguments ( ) ; if ( args != null ) { if ( args . getBoolean ( BaseDialogBuilder . ARG_USE_DARK_THEME ) ) { // Developer is explicitly using the dark theme . useLightTheme = false ; } else if ( args . getBoolean ( BaseDialogBuilder . ARG_USE_LIGHT_THEME ) ) { // Developer is explicitly using the light theme . useLightTheme = true ; } } return useLightTheme ? R . style . SDL_Dialog : R . style . SDL_Dark_Dialog ;
public class ExecutionRuntimeServices { /** * setter for the brunch id of the current Execution */ public void setBranchId ( String brunchId ) { } }
Validate . isTrue ( StringUtils . isEmpty ( getBranchId ( ) ) , "not allowed to overwrite branch id" ) ; contextMap . put ( BRANCH_ID , brunchId ) ;
public class DistributionParameterImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setTimeUnit ( TimeUnit newTimeUnit ) { } }
TimeUnit oldTimeUnit = timeUnit ; timeUnit = newTimeUnit == null ? TIME_UNIT_EDEFAULT : newTimeUnit ; boolean oldTimeUnitESet = timeUnitESet ; timeUnitESet = true ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , BpsimPackage . DISTRIBUTION_PARAMETER__TIME_UNIT , oldTimeUnit , timeUnit , ! oldTimeUnitESet ) ) ;
public class AmazonSageMakerWaiters { /** * Builds a NotebookInstanceInService waiter by using custom parameters waiterParameters and other parameters * defined in the waiters specification , and then polls until it determines whether the resource entered the desired * state or not , where polling criteria is bound by either default polling strategy or custom polling strategy . */ public Waiter < DescribeNotebookInstanceRequest > notebookInstanceInService ( ) { } }
return new WaiterBuilder < DescribeNotebookInstanceRequest , DescribeNotebookInstanceResult > ( ) . withSdkFunction ( new DescribeNotebookInstanceFunction ( client ) ) . withAcceptors ( new NotebookInstanceInService . IsInServiceMatcher ( ) , new NotebookInstanceInService . IsFailedMatcher ( ) ) . withDefaultPollingStrategy ( new PollingStrategy ( new MaxAttemptsRetryStrategy ( 60 ) , new FixedDelayStrategy ( 30 ) ) ) . withExecutorService ( executorService ) . build ( ) ;
public class ApiOvhEmailexchange { /** * Create new shared mailbox in exchange server * REST : POST / email / exchange / { organizationName } / service / { exchangeService } / sharedAccount * @ param mailingFilter [ required ] Enable mailing filtrering * @ param lastName [ required ] Shared account last name * @ param hiddenFromGAL [ required ] Hide the shared account in Global Address List * @ param initials [ required ] Shared account initials * @ param quota [ required ] Shared account maximum size * @ param displayName [ required ] Shared account display name * @ param sharedEmailAddress [ required ] Shared account email address * @ param firstName [ required ] Shared account first name * @ param organizationName [ required ] The internal name of your exchange organization * @ param exchangeService [ required ] The internal name of your exchange service */ public OvhTask organizationName_service_exchangeService_sharedAccount_POST ( String organizationName , String exchangeService , String displayName , String firstName , Boolean hiddenFromGAL , String initials , String lastName , OvhMailingFilterEnum [ ] mailingFilter , Long quota , String sharedEmailAddress ) throws IOException { } }
String qPath = "/email/exchange/{organizationName}/service/{exchangeService}/sharedAccount" ; StringBuilder sb = path ( qPath , organizationName , exchangeService ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "displayName" , displayName ) ; addBody ( o , "firstName" , firstName ) ; addBody ( o , "hiddenFromGAL" , hiddenFromGAL ) ; addBody ( o , "initials" , initials ) ; addBody ( o , "lastName" , lastName ) ; addBody ( o , "mailingFilter" , mailingFilter ) ; addBody ( o , "quota" , quota ) ; addBody ( o , "sharedEmailAddress" , sharedEmailAddress ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhTask . class ) ;
public class NormalizerSerializer { /** * Get a serializer strategy the given normalizer * @ param normalizer the normalizer to find a compatible serializer strategy for * @ return the compatible strategy */ private NormalizerSerializerStrategy getStrategy ( Normalizer normalizer ) { } }
for ( NormalizerSerializerStrategy strategy : strategies ) { if ( strategySupportsNormalizer ( strategy , normalizer . getType ( ) , normalizer . getClass ( ) ) ) { return strategy ; } } throw new RuntimeException ( String . format ( "No serializer strategy found for normalizer of class %s. If this is a custom normalizer, you probably " + "forgot to register a corresponding custom serializer strategy with this serializer." , normalizer . getClass ( ) ) ) ;
public class Matrix { /** * Set a submatrix . * @ param i0 Initial row index * @ param i1 Final row index * @ param j0 Initial column index * @ param j1 Final column index * @ param X A ( i0 : i1 , j0 : j1) * @ throws ArrayIndexOutOfBoundsException Submatrix indices */ public void setMatrix ( int i0 , int i1 , int j0 , int j1 , Matrix X ) { } }
try { for ( int i = i0 ; i <= i1 ; i ++ ) { for ( int j = j0 ; j <= j1 ; j ++ ) { A [ i ] [ j ] = X . get ( i - i0 , j - j0 ) ; } } } catch ( ArrayIndexOutOfBoundsException e ) { throw new ArrayIndexOutOfBoundsException ( "Submatrix indices" ) ; }
public class ExistPolicyIndex { /** * create an XML Document from the policy document */ protected static Document createDocument ( String document ) throws PolicyIndexException { } }
// parse policy document and create dom DocumentBuilderFactory factory = DocumentBuilderFactory . newInstance ( ) ; factory . setNamespaceAware ( true ) ; DocumentBuilder builder ; try { builder = factory . newDocumentBuilder ( ) ; Document doc = builder . parse ( new InputSource ( new StringReader ( document ) ) ) ; return doc ; } catch ( ParserConfigurationException e ) { throw new PolicyIndexException ( e ) ; } catch ( SAXException e ) { throw new PolicyIndexException ( e ) ; } catch ( IOException e ) { throw new PolicyIndexException ( e ) ; }
public class GetStatusPResponse { /** * < code > optional . alluxio . grpc . file . FileInfo fileInfo = 1 ; < / code > */ public alluxio . grpc . FileInfoOrBuilder getFileInfoOrBuilder ( ) { } }
return fileInfo_ == null ? alluxio . grpc . FileInfo . getDefaultInstance ( ) : fileInfo_ ;
public class ColumnPath { /** * Returns true if field corresponding to fieldID is set ( has been assigned a value ) and false otherwise */ public boolean isSet ( _Fields field ) { } }
if ( field == null ) { throw new IllegalArgumentException ( ) ; } switch ( field ) { case COLUMN_FAMILY : return isSetColumn_family ( ) ; case SUPER_COLUMN : return isSetSuper_column ( ) ; case COLUMN : return isSetColumn ( ) ; } throw new IllegalStateException ( ) ;
public class ExpressionParser { /** * Add mapper to mapper list . * @ param bean * @ return */ public ExpressionParser addMapper ( Object bean ) { } }
mapperList . add ( ( s ) -> ExpressionParser . getValue ( bean , s ) ) ; return this ;