signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class DerivedQueryCreator { /** * Builds a full AQL query from a built Disjunction , additional information from PartTree and special parameters * caught by ArangoParameterAccessor * @ param criteria * @ param sort * @ return */ @ Override protected String complete ( final Criteria criteria , final Sort sort ) { } }
if ( tree . isDistinct ( ) && ! tree . isCountProjection ( ) ) { LOGGER . debug ( "Use of 'Distinct' is meaningful only in count queries" ) ; } final StringBuilder query = new StringBuilder ( ) ; final String with = withCollections . stream ( ) . collect ( Collectors . joining ( ", " ) ) ; if ( ! with . isEmpty ( ) ) { query . append ( "WITH " ) . append ( with ) . append ( " " ) ; } query . append ( "FOR " ) . append ( "e" ) . append ( " IN " ) . append ( collectionName ) ; if ( ! criteria . getPredicate ( ) . isEmpty ( ) ) { query . append ( " FILTER " ) . append ( criteria . getPredicate ( ) ) ; } if ( tree . isCountProjection ( ) || tree . isExistsProjection ( ) ) { if ( tree . isDistinct ( ) ) { query . append ( " COLLECT entity = " ) . append ( "e" ) ; } query . append ( " COLLECT WITH COUNT INTO length" ) ; } String sortString = " " + AqlUtils . buildSortClause ( sort , "e" ) ; if ( ( ! this . geoFields . isEmpty ( ) || isUnique != null && isUnique ) && ! tree . isDelete ( ) && ! tree . isCountProjection ( ) && ! tree . isExistsProjection ( ) ) { final String distanceSortKey = " SORT " + Criteria . distance ( uniqueLocation , bind ( getUniquePoint ( ) [ 0 ] ) , bind ( getUniquePoint ( ) [ 1 ] ) ) . getPredicate ( ) ; if ( sort . isUnsorted ( ) ) { sortString = distanceSortKey ; } else { sortString = distanceSortKey + ", " + sortString . substring ( 5 , sortString . length ( ) ) ; } } query . append ( sortString ) ; if ( tree . isLimiting ( ) ) { query . append ( " LIMIT " ) . append ( tree . getMaxResults ( ) ) ; } final Pageable pageable = accessor . getPageable ( ) ; if ( pageable != null && pageable . isPaged ( ) ) { query . append ( " " ) . append ( AqlUtils . buildLimitClause ( pageable ) ) ; } if ( tree . isDelete ( ) ) { query . append ( " REMOVE e IN " ) . append ( collectionName ) ; } else if ( tree . isCountProjection ( ) || tree . isExistsProjection ( ) ) { query . append ( " RETURN length" ) ; } else { query . append ( " RETURN " ) ; if ( this . geoFields . isEmpty ( ) ) { query . append ( "e" ) ; } else { query . append ( format ( "MERGE(e, { '_distance': %s })" , Criteria . distance ( uniqueLocation , bind ( getUniquePoint ( ) [ 0 ] ) , bind ( getUniquePoint ( ) [ 1 ] ) ) . getPredicate ( ) ) ) ; } } return query . toString ( ) ;
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public IfcCondenserTypeEnum createIfcCondenserTypeEnumFromString ( EDataType eDataType , String initialValue ) { } }
IfcCondenserTypeEnum result = IfcCondenserTypeEnum . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
public class CmsPrincipalSelectionList { /** * Returns if the list of principals has principals of other organizational units . < p > * @ return if the list of principals has principals of other organizational units */ public boolean hasPrincipalsInOtherOus ( ) { } }
if ( m_hasPrincipalsInOtherOus == null ) { // lazzy initialization m_hasPrincipalsInOtherOus = Boolean . FALSE ; try { Iterator < CmsPrincipal > itPrincipals = getPrincipals ( true ) . iterator ( ) ; while ( itPrincipals . hasNext ( ) ) { CmsPrincipal principal = itPrincipals . next ( ) ; if ( ! principal . getOuFqn ( ) . equals ( getCms ( ) . getRequestContext ( ) . getCurrentUser ( ) . getOuFqn ( ) ) ) { m_hasPrincipalsInOtherOus = Boolean . TRUE ; break ; } } } catch ( Exception e ) { // ignore } } return m_hasPrincipalsInOtherOus . booleanValue ( ) ;
public class FileTag { /** * read source file * @ throws PageException */ public void actionUpload ( ) throws PageException { } }
FormItem item = getFormItem ( pageContext , filefield ) ; Struct cffile = _actionUpload ( pageContext , securityManager , item , strDestination , nameconflict , accept , strict , mode , attributes , acl , serverPassword ) ; if ( StringUtil . isEmpty ( result ) ) { pageContext . undefinedScope ( ) . set ( KeyConstants . _file , cffile ) ; pageContext . undefinedScope ( ) . set ( "cffile" , cffile ) ; } else { pageContext . setVariable ( result , cffile ) ; }
public class CrystalTransform { /** * helper function to format simple fractions into rationals * @ param coef * @ return */ private String formatCoef ( double coef ) { } }
double tol = 1e-6 ; // rounding tolerance // zero case if ( Math . abs ( coef ) < tol ) { return "0" ; } // integer case long num = Math . round ( coef ) ; if ( Math . abs ( num - coef ) < tol ) { return Long . toString ( num ) ; } // Other small cases for ( int denom = 2 ; denom < 12 ; denom ++ ) { num = Math . round ( coef * denom ) ; if ( num - coef * denom < tol ) { return String . format ( "%d/%d" , num , denom ) ; } } // Give up and use floating point ; return String . format ( "%.3f" , coef ) ;
public class ApiTokenClient { /** * Creates a new data store . * @ param file CSV file that should be uploaded ( N . B . max 50MB ) * @ param name name to use in the Load Impact web - console * @ param fromline Payload from this line ( 1st line is 1 ) . Set to value 2 , if the CSV file starts with a headings line * @ param separator field separator , one of { @ link com . loadimpact . resource . DataStore . Separator } * @ param delimiter surround delimiter for text - strings , one of { @ link com . loadimpact . resource . DataStore . StringDelimiter } * @ return { @ link com . loadimpact . resource . DataStore } */ public DataStore createDataStore ( final File file , final String name , final int fromline , final DataStore . Separator separator , final DataStore . StringDelimiter delimiter ) { } }
return invoke ( DATA_STORES , new RequestClosure < JsonObject > ( ) { @ Override public JsonObject call ( Invocation . Builder request ) { MultiPart form = new FormDataMultiPart ( ) . field ( "name" , name ) . field ( "fromline" , Integer . toString ( fromline ) ) . field ( "separator" , separator . param ( ) ) . field ( "delimiter" , delimiter . param ( ) ) . bodyPart ( new FileDataBodyPart ( "file" , file , new MediaType ( "text" , "csv" ) ) ) ; return request . post ( Entity . entity ( form , form . getMediaType ( ) ) , JsonObject . class ) ; } } , new ResponseClosure < JsonObject , DataStore > ( ) { @ Override public DataStore call ( JsonObject json ) { return new DataStore ( json ) ; } } ) ;
public class JavaCompilerMojo { /** * Checks whether or not an event on the given file should trigger the Java compilation . * @ param file the file * @ return { @ literal true } if the given file is a JAva file and is contained in the Java source directory . */ @ Override public boolean accept ( File file ) { } }
return WatcherUtils . isInDirectory ( file , WatcherUtils . getJavaSource ( basedir ) ) ;
public class Discover { /** * Filter the results release dates to matches that include this value . * @ param year * @ return */ public Discover year ( int year ) { } }
if ( checkYear ( year ) ) { params . add ( Param . YEAR , year ) ; } return this ;
public class ApplicationCache { /** * Adds the label to the applications for the account . * @ param label The label to add */ public void addLabel ( Label label ) { } }
// Add the label to any applications it is associated with List < Long > applicationIds = label . getLinks ( ) . getApplications ( ) ; for ( long applicationId : applicationIds ) { Application application = applications . get ( applicationId ) ; if ( application != null ) labels ( applicationId ) . add ( label ) ; else logger . severe ( String . format ( "Unable to find application for label '%s': %d" , label . getKey ( ) , applicationId ) ) ; }
public class PrimaveraPMFileWriter { /** * Writes an activity to a PM XML file . * @ param mpxj MPXJ Task instance */ private void writeActivity ( Task mpxj ) { } }
ActivityType xml = m_factory . createActivityType ( ) ; m_project . getActivity ( ) . add ( xml ) ; Task parentTask = mpxj . getParentTask ( ) ; Integer parentObjectID = parentTask == null ? null : parentTask . getUniqueID ( ) ; xml . setActualStartDate ( mpxj . getActualStart ( ) ) ; xml . setActualFinishDate ( mpxj . getActualFinish ( ) ) ; xml . setAtCompletionDuration ( getDuration ( mpxj . getDuration ( ) ) ) ; xml . setCalendarObjectId ( getCalendarUniqueID ( mpxj . getCalendar ( ) ) ) ; xml . setDurationPercentComplete ( getPercentage ( mpxj . getPercentageComplete ( ) ) ) ; xml . setDurationType ( DURATION_TYPE_MAP . get ( mpxj . getType ( ) ) ) ; xml . setFinishDate ( mpxj . getFinish ( ) ) ; xml . setGUID ( DatatypeConverter . printUUID ( mpxj . getGUID ( ) ) ) ; xml . setId ( getActivityID ( mpxj ) ) ; xml . setName ( mpxj . getName ( ) ) ; xml . setObjectId ( mpxj . getUniqueID ( ) ) ; xml . setPercentComplete ( getPercentage ( mpxj . getPercentageComplete ( ) ) ) ; xml . setPercentCompleteType ( "Duration" ) ; xml . setPrimaryConstraintType ( CONSTRAINT_TYPE_MAP . get ( mpxj . getConstraintType ( ) ) ) ; xml . setPrimaryConstraintDate ( mpxj . getConstraintDate ( ) ) ; xml . setPlannedDuration ( getDuration ( mpxj . getDuration ( ) ) ) ; xml . setPlannedFinishDate ( mpxj . getFinish ( ) ) ; xml . setPlannedStartDate ( mpxj . getStart ( ) ) ; xml . setProjectObjectId ( PROJECT_OBJECT_ID ) ; xml . setRemainingDuration ( getDuration ( mpxj . getRemainingDuration ( ) ) ) ; xml . setRemainingEarlyFinishDate ( mpxj . getEarlyFinish ( ) ) ; xml . setRemainingEarlyStartDate ( mpxj . getResume ( ) ) ; xml . setRemainingLaborCost ( NumberHelper . DOUBLE_ZERO ) ; xml . setRemainingLaborUnits ( NumberHelper . DOUBLE_ZERO ) ; xml . setRemainingNonLaborCost ( NumberHelper . DOUBLE_ZERO ) ; xml . setRemainingNonLaborUnits ( NumberHelper . DOUBLE_ZERO ) ; xml . setStartDate ( mpxj . getStart ( ) ) ; xml . setStatus ( getActivityStatus ( mpxj ) ) ; xml . setType ( extractAndConvertTaskType ( mpxj ) ) ; xml . setWBSObjectId ( parentObjectID ) ; xml . getUDF ( ) . addAll ( writeUDFType ( FieldTypeClass . TASK , mpxj ) ) ; writePredecessors ( mpxj ) ;
public class ParametersAction { /** * Allow an other build of the same project to be scheduled , if it has other parameters . */ public boolean shouldSchedule ( List < Action > actions ) { } }
List < ParametersAction > others = Util . filter ( actions , ParametersAction . class ) ; if ( others . isEmpty ( ) ) { return ! parameters . isEmpty ( ) ; } else { // I don ' t think we need multiple ParametersActions , but let ' s be defensive Set < ParameterValue > params = new HashSet < > ( ) ; for ( ParametersAction other : others ) { params . addAll ( other . parameters ) ; } return ! params . equals ( new HashSet < > ( this . parameters ) ) ; }
public class CRF { /** * Returns the most likely label sequence given the feature sequence by the * forward - backward algorithm . * @ param x a sequence of sparse features taking values in [ 0 , p ) about each * position of original sequence , where p is the number of features . * @ return the most likely label sequence . */ private int [ ] predictForwardBackward ( int [ ] [ ] x ) { } }
int n = x . length ; // length of sequence TrellisNode [ ] [ ] trellis = getTrellis ( x ) ; double [ ] scaling = new double [ n ] ; forward ( trellis , scaling ) ; backward ( trellis ) ; int [ ] label = new int [ n ] ; double [ ] p = new double [ numClasses ] ; for ( int i = 0 ; i < n ; i ++ ) { TrellisNode [ ] ti = trellis [ i ] ; for ( int j = 0 ; j < numClasses ; j ++ ) { TrellisNode tij = ti [ j ] ; p [ j ] = tij . alpha * tij . beta ; } double max = Double . NEGATIVE_INFINITY ; for ( int j = 0 ; j < numClasses ; j ++ ) { if ( max < p [ j ] ) { max = p [ j ] ; label [ i ] = j ; } } } return label ;
public class BmrClient { /** * List the instances belonging to the target instance in the BMR cluster . * @ param clusterId the ID of target BMR cluster . * @ param instanceGroupId the ID of target instance group . * @ return The response containing a list of Instance objects . */ public ListInstancesResponse listInstances ( String clusterId , String instanceGroupId ) { } }
return listInstances ( new ListInstancesRequest ( ) . withClusterId ( clusterId ) . withInstanceGroupId ( instanceGroupId ) ) ;
public class Descriptors { /** * Creates a { @ link FilterDescriptor } for a filter class . * @ param < F > * @ param < C > * @ param filterClass * @ return */ public static < F extends Filter < C > , C extends Enum < C > > FilterDescriptor < F , C > ofFilter ( final Class < F > filterClass ) { } }
return new AnnotationBasedFilterComponentDescriptor < > ( filterClass ) ;
public class XMLSerializer { /** * For maximum efficiency when writing indents the required output is * pre - computed This is internal function that recomputes buffer after user * requested chnages . */ protected void rebuildIndentationBuf ( ) { } }
if ( doIndent == false ) return ; final int maxIndent = 65 ; // hardcoded maximum indentation size in // characters int bufSize = 0 ; offsetNewLine = 0 ; if ( writeLineSepartor ) { offsetNewLine = lineSeparator . length ( ) ; bufSize += offsetNewLine ; } maxIndentLevel = 0 ; if ( writeIndentation ) { indentationJump = indentationString . length ( ) ; maxIndentLevel = maxIndent / indentationJump ; bufSize += maxIndentLevel * indentationJump ; } if ( indentationBuf == null || indentationBuf . length < bufSize ) { indentationBuf = new char [ bufSize + 8 ] ; } int bufPos = 0 ; if ( writeLineSepartor ) { for ( int i = 0 ; i < lineSeparator . length ( ) ; i ++ ) { indentationBuf [ bufPos ++ ] = lineSeparator . charAt ( i ) ; } } if ( writeIndentation ) { for ( int i = 0 ; i < maxIndentLevel ; i ++ ) { for ( int j = 0 ; j < indentationString . length ( ) ; j ++ ) { indentationBuf [ bufPos ++ ] = indentationString . charAt ( j ) ; } } }
public class JobConf { /** * Set the maximum amount of memory any task of this job can use . See * { @ link # MAPRED _ TASK _ MAXVMEM _ PROPERTY } * mapred . task . maxvmem is split into * mapred . job . map . memory . mb * and mapred . job . map . memory . mb , mapred * each of the new key are set * as mapred . task . maxvmem / 1024 * as new values are in MB * @ param vmem Maximum amount of virtual memory in bytes any task of this job * can use . * @ see # getMaxVirtualMemoryForTask ( ) * @ deprecated * Use { @ link # setMemoryForMapTask ( long mem ) } and * Use { @ link # setMemoryForReduceTask ( long mem ) } */ @ Deprecated public void setMaxVirtualMemoryForTask ( long vmem ) { } }
LOG . warn ( "setMaxVirtualMemoryForTask() is deprecated." + "Instead use setMemoryForMapTask() and setMemoryForReduceTask()" ) ; if ( vmem != DISABLED_MEMORY_LIMIT && vmem < 0 ) { setMemoryForMapTask ( DISABLED_MEMORY_LIMIT ) ; setMemoryForReduceTask ( DISABLED_MEMORY_LIMIT ) ; } if ( get ( JobConf . MAPRED_TASK_MAXVMEM_PROPERTY ) == null ) { setMemoryForMapTask ( vmem / ( 1024 * 1024 ) ) ; // Changing bytes to mb setMemoryForReduceTask ( vmem / ( 1024 * 1024 ) ) ; // Changing bytes to mb } else { this . setLong ( JobConf . MAPRED_TASK_MAXVMEM_PROPERTY , vmem ) ; }
public class VirtualNetworkGatewaysInner { /** * Creates or updates a virtual network gateway in the specified resource group . * @ param resourceGroupName The name of the resource group . * @ param virtualNetworkGatewayName The name of the virtual network gateway . * @ param parameters Parameters supplied to create or update virtual network gateway operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the VirtualNetworkGatewayInner object if successful . */ public VirtualNetworkGatewayInner createOrUpdate ( String resourceGroupName , String virtualNetworkGatewayName , VirtualNetworkGatewayInner parameters ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , virtualNetworkGatewayName , parameters ) . toBlocking ( ) . last ( ) . body ( ) ;
public class Grammar { /** * Adds new rule if the same rule doesn ' t exist already . * @ param reducer Reducer method . * @ param nonterminal Left hand side of the rule . * @ param rhs Strings which are either nonterminal names , terminal names or * anonymous terminals . Anonymous terminals are regular expressions inside * apostrophes . E . g ' [ 0-9 ] + ' */ public void addRule ( ExecutableElement reducer , String nonterminal , String document , boolean synthetic , List < String > rhs ) { } }
Grammar . R rule = new Grammar . R ( nonterminal , rhs , reducer , document , synthetic ) ; if ( ! ruleSet . contains ( rule ) ) { rule . number = ruleNumber ++ ; ruleSet . add ( rule ) ; lhsMap . add ( nonterminal , rule ) ; if ( ! nonterminalMap . containsKey ( nonterminal ) ) { Grammar . NT nt = new Grammar . NT ( nonterminal ) ; nonterminalMap . put ( nonterminal , nt ) ; symbolMap . put ( nonterminal , nt ) ; numberMap . put ( nt . number , nt ) ; } for ( String s : rhs ) { if ( isAnonymousTerminal ( s ) ) { String expression = s . substring ( 1 , s . length ( ) - 1 ) ; addAnonymousTerminal ( expression ) ; } } }
public class ESClient { /** * Sets the refresh indexes . * @ param puProps * the pu props * @ param externalProperties * the external properties */ private void setRefreshIndexes ( Properties puProps , Map < String , Object > externalProperties ) { } }
Object refreshIndexes = null ; /* * Check from properties set while creating emf */ if ( externalProperties . get ( ESConstants . KUNDERA_ES_REFRESH_INDEXES ) != null ) { refreshIndexes = externalProperties . get ( ESConstants . KUNDERA_ES_REFRESH_INDEXES ) ; } /* * Check from PU Properties */ if ( refreshIndexes == null && puProps . get ( ESConstants . KUNDERA_ES_REFRESH_INDEXES ) != null ) { refreshIndexes = puProps . get ( ESConstants . KUNDERA_ES_REFRESH_INDEXES ) ; } if ( refreshIndexes != null ) { if ( refreshIndexes instanceof Boolean ) { this . setRereshIndexes = ( boolean ) refreshIndexes ; } else { this . setRereshIndexes = Boolean . parseBoolean ( ( String ) refreshIndexes ) ; } }
public class ThreadLocalMappedDiagnosticContext { /** * Get the context identified by the < code > key < / code > parameter . */ @ Override public String get ( String key ) { } }
Map < String , String > map = inheritableThreadLocal . get ( ) ; if ( ( map != null ) && ( key != null ) ) { return map . get ( key ) ; } else { return null ; }
public class TangramBuilder { /** * init a { @ link TangramEngine } builder with build - in resource inited , such as registering build - in card and cell . Users use this builder to regiser custom card and cell , then call { @ link InnerBuilder # build ( ) } to create a { @ link TangramEngine } instance . * @ param context activity context * @ return a { @ link TangramEngine } builder */ @ NonNull public static InnerBuilder newInnerBuilder ( @ NonNull final Context context ) { } }
if ( ! TangramBuilder . isInitialized ( ) ) { throw new IllegalStateException ( "Tangram must be init first" ) ; } DefaultResolverRegistry registry = new DefaultResolverRegistry ( ) ; // install default cards & mCells installDefaultRegistry ( registry ) ; return new InnerBuilder ( context , registry ) ;
public class IPRangeCollection { /** * performs a binary search over sorted list * @ param addr * @ return */ public IPRangeNode findFast ( String addr ) { } }
InetAddress iaddr ; try { iaddr = InetAddress . getByName ( addr ) ; } catch ( UnknownHostException ex ) { return null ; } return findFast ( iaddr ) ;
public class BugLoader { /** * Performs an analysis and returns the BugSet created * @ param p * The Project to run the analysis on * @ param progressCallback * the progressCallBack is supposed to be supplied by analyzing * dialog , FindBugs supplies progress information while it runs * the analysis * @ return the bugs found * @ throws InterruptedException * @ throws IOException */ public static BugCollection doAnalysis ( @ Nonnull Project p , FindBugsProgress progressCallback ) throws IOException , InterruptedException { } }
StringWriter stringWriter = new StringWriter ( ) ; BugCollectionBugReporter pcb = new BugCollectionBugReporter ( p , new PrintWriter ( stringWriter , true ) ) ; pcb . setPriorityThreshold ( Priorities . LOW_PRIORITY ) ; IFindBugsEngine fb = createEngine ( p , pcb ) ; fb . setUserPreferences ( getUserPreferences ( ) ) ; fb . setProgressCallback ( progressCallback ) ; fb . setProjectName ( p . getProjectName ( ) ) ; fb . execute ( ) ; String warnings = stringWriter . toString ( ) ; if ( warnings . length ( ) > 0 ) { JTextArea tp = new JTextArea ( warnings ) ; tp . setEditable ( false ) ; JScrollPane pane = new JScrollPane ( tp ) ; pane . setPreferredSize ( new Dimension ( 600 , 400 ) ) ; JOptionPane . showMessageDialog ( MainFrame . getInstance ( ) , pane , "Analysis errors" , JOptionPane . WARNING_MESSAGE ) ; } return pcb . getBugCollection ( ) ;
public class OutputPropertiesFactory { /** * Creates an empty OutputProperties with the property key / value defaults specified by * a property file . The method argument is used to construct a string of * the form output _ [ method ] . properties ( for instance , output _ html . properties ) . * The output _ xml . properties file is always used as the base . * < p > Anything other than ' text ' , ' xml ' , and ' html ' , will * use the output _ xml . properties file . < / p > * @ param method non - null reference to method name . * @ return Properties object that holds the defaults for the given method . */ static public final Properties getDefaultMethodProperties ( String method ) { } }
String fileName = null ; Properties defaultProperties = null ; // According to this article : Double - check locking does not work // http : / / www . javaworld . com / javaworld / jw - 02-2001 / jw - 0209 - toolbox . html try { synchronized ( m_synch_object ) { if ( null == m_xml_properties ) // double check { fileName = PROP_FILE_XML ; m_xml_properties = loadPropertiesFile ( fileName , null ) ; } } if ( method . equals ( Method . XML ) ) { defaultProperties = m_xml_properties ; } else if ( method . equals ( Method . HTML ) ) { if ( null == m_html_properties ) // double check { fileName = PROP_FILE_HTML ; m_html_properties = loadPropertiesFile ( fileName , m_xml_properties ) ; } defaultProperties = m_html_properties ; } else if ( method . equals ( Method . TEXT ) ) { if ( null == m_text_properties ) // double check { fileName = PROP_FILE_TEXT ; m_text_properties = loadPropertiesFile ( fileName , m_xml_properties ) ; if ( null == m_text_properties . getProperty ( OutputKeys . ENCODING ) ) { String mimeEncoding = Encodings . getMimeEncoding ( null ) ; m_text_properties . put ( OutputKeys . ENCODING , mimeEncoding ) ; } } defaultProperties = m_text_properties ; } else if ( method . equals ( Method . UNKNOWN ) ) { if ( null == m_unknown_properties ) // double check { fileName = PROP_FILE_UNKNOWN ; m_unknown_properties = loadPropertiesFile ( fileName , m_xml_properties ) ; } defaultProperties = m_unknown_properties ; } else { // TODO : Calculate res file from name . defaultProperties = m_xml_properties ; } } catch ( IOException ioe ) { throw new WrappedRuntimeException ( Utils . messages . createMessage ( MsgKey . ER_COULD_NOT_LOAD_METHOD_PROPERTY , new Object [ ] { fileName , method } ) , ioe ) ; } // wrap these cached defaultProperties in a new Property object just so // that the caller of this method can ' t modify the default values return new Properties ( defaultProperties ) ;
public class RuntimePermissionUtils { /** * Returns true if the context has access to any given permissions . */ private static boolean hasSelfPermissions ( Context context , String ... permissions ) { } }
for ( String permission : permissions ) { if ( checkSelfPermission ( context , permission ) == PackageManager . PERMISSION_GRANTED ) { return true ; } } return false ;
public class AppServicePlansInner { /** * Gets all selectable sku ' s for a given App Service Plan . * Gets all selectable sku ' s for a given App Service Plan . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param name Name of App Service Plan * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the Object object */ public Observable < Object > getServerFarmSkusAsync ( String resourceGroupName , String name ) { } }
return getServerFarmSkusWithServiceResponseAsync ( resourceGroupName , name ) . map ( new Func1 < ServiceResponse < Object > , Object > ( ) { @ Override public Object call ( ServiceResponse < Object > response ) { return response . body ( ) ; } } ) ;
public class Roster { /** * Creates a new roster item . The server will asynchronously update the roster with the subscription status . * There will be no presence subscription request . Consider using * { @ link # createItemAndRequestSubscription ( BareJid , String , String [ ] ) } if you also want to request a presence * subscription from the contact . * @ param jid the XMPP address of the contact ( e . g . johndoe @ jabber . org ) * @ param name the nickname of the user . * @ param groups the list of group names the entry will belong to , or < tt > null < / tt > if the the roster entry won ' t * belong to a group . * @ throws NoResponseException if there was no response from the server . * @ throws XMPPErrorException if an XMPP exception occurs . * @ throws NotLoggedInException If not logged in . * @ throws NotConnectedException * @ throws InterruptedException * @ since 4.4.0 */ public void createItem ( BareJid jid , String name , String [ ] groups ) throws NotLoggedInException , NoResponseException , XMPPErrorException , NotConnectedException , InterruptedException { } }
final XMPPConnection connection = getAuthenticatedConnectionOrThrow ( ) ; // Create and send roster entry creation packet . RosterPacket rosterPacket = new RosterPacket ( ) ; rosterPacket . setType ( IQ . Type . set ) ; RosterPacket . Item item = new RosterPacket . Item ( jid , name ) ; if ( groups != null ) { for ( String group : groups ) { if ( group != null && group . trim ( ) . length ( ) > 0 ) { item . addGroupName ( group ) ; } } } rosterPacket . addRosterItem ( item ) ; connection . createStanzaCollectorAndSend ( rosterPacket ) . nextResultOrThrow ( ) ;
public class GrailsASTUtils { /** * Determines if the class or interface represented by the superClass * argument is either the same as , or is a superclass or superinterface of , * the class or interface represented by the specified subClass parameter . * @ param superClass The super class to check * @ param childClass The sub class the check * @ return true if the childClass is either equal to or a sub class of the specified superClass */ public static boolean isAssignableFrom ( ClassNode superClass , ClassNode childClass ) { } }
ClassNode currentSuper = childClass ; while ( currentSuper != null ) { if ( currentSuper . equals ( superClass ) ) { return true ; } currentSuper = currentSuper . getSuperClass ( ) ; } return false ;
public class BeanDefinitionParserUtils { /** * Creates new bean definition from bean class and registers new bean in parser registry . * Returns bean definition holder . * @ param beanId * @ param beanClass * @ param parserContext * @ param shouldFireEvents */ public static BeanDefinitionHolder registerBean ( String beanId , Class < ? > beanClass , ParserContext parserContext , boolean shouldFireEvents ) { } }
return registerBean ( beanId , BeanDefinitionBuilder . genericBeanDefinition ( beanClass ) . getBeanDefinition ( ) , parserContext , shouldFireEvents ) ;
public class AccuracyWeightedEnsemble { /** * Predicts a class for an example . */ public double [ ] getVotesForInstance ( Instance inst ) { } }
DoubleVector combinedVote = new DoubleVector ( ) ; if ( this . trainingWeightSeenByModel > 0.0 ) { for ( int i = 0 ; i < this . ensemble . length ; i ++ ) { if ( this . ensembleWeights [ i ] > 0.0 ) { DoubleVector vote = new DoubleVector ( this . ensemble [ i ] . getVotesForInstance ( inst ) ) ; if ( vote . sumOfValues ( ) > 0.0 ) { vote . normalize ( ) ; // scale weight and prevent overflow vote . scaleValues ( this . ensembleWeights [ i ] / ( 1.0 * this . ensemble . length + 1 ) ) ; combinedVote . addValues ( vote ) ; } } } } combinedVote . normalize ( ) ; return combinedVote . getArrayRef ( ) ;
public class MathBindings { /** * Binding for { @ link java . lang . Math # subtractExact ( int , int ) } * @ param x the first value * @ param y the second value to subtract from the first * @ return the result * @ throws ArithmeticException if the result overflows an int */ public static IntegerBinding subtractExact ( final int x , final ObservableIntegerValue y ) { } }
return createIntegerBinding ( ( ) -> Math . subtractExact ( x , y . get ( ) ) , y ) ;
public class DBInitializerHelper { /** * Initialization script for root node . */ public static String getRootNodeInitializeScript ( String itemTableName , boolean multiDb ) { } }
String singeDbScript = "insert into " + itemTableName + "(ID, PARENT_ID, NAME, CONTAINER_NAME, VERSION, I_CLASS, I_INDEX, " + "N_ORDER_NUM) VALUES('" + Constants . ROOT_PARENT_UUID + "', '" + Constants . ROOT_PARENT_UUID + "', '" + Constants . ROOT_PARENT_NAME + "', '" + Constants . ROOT_PARENT_CONAINER_NAME + "', 0, 0, 0, 0)" ; String multiDbScript = "insert into " + itemTableName + "(ID, PARENT_ID, NAME, VERSION, I_CLASS, I_INDEX, " + "N_ORDER_NUM) VALUES('" + Constants . ROOT_PARENT_UUID + "', '" + Constants . ROOT_PARENT_UUID + "', '" + Constants . ROOT_PARENT_NAME + "', 0, 0, 0, 0)" ; return multiDb ? multiDbScript : singeDbScript ;
public class dnsaaaarec { /** * Use this API to delete dnsaaaarec . */ public static base_response delete ( nitro_service client , dnsaaaarec resource ) throws Exception { } }
dnsaaaarec deleteresource = new dnsaaaarec ( ) ; deleteresource . hostname = resource . hostname ; deleteresource . ipv6address = resource . ipv6address ; return deleteresource . delete_resource ( client ) ;
public class ElasticHashinator { /** * Convenience method for generating a deterministic token distribution for the ring based * on a given partition count and tokens per partition . Each partition will have N tokens * placed randomly on the ring . */ public static byte [ ] getConfigureBytes ( int partitionCount , int tokenCount ) { } }
Preconditions . checkArgument ( partitionCount > 0 ) ; Preconditions . checkArgument ( tokenCount > partitionCount ) ; Buckets buckets = new Buckets ( partitionCount , tokenCount ) ; ElasticHashinator hashinator = new ElasticHashinator ( buckets . getTokens ( ) ) ; return hashinator . getConfigBytes ( ) ;
public class EthiopicChronology { /** * Gets an instance of the EthiopicChronology in the given time zone . * @ param zone the time zone to get the chronology in , null is default * @ param minDaysInFirstWeek minimum number of days in first week of the year ; default is 4 * @ return a chronology in the specified time zone */ public static EthiopicChronology getInstance ( DateTimeZone zone , int minDaysInFirstWeek ) { } }
if ( zone == null ) { zone = DateTimeZone . getDefault ( ) ; } EthiopicChronology chrono ; EthiopicChronology [ ] chronos = cCache . get ( zone ) ; if ( chronos == null ) { chronos = new EthiopicChronology [ 7 ] ; EthiopicChronology [ ] oldChronos = cCache . putIfAbsent ( zone , chronos ) ; if ( oldChronos != null ) { chronos = oldChronos ; } } try { chrono = chronos [ minDaysInFirstWeek - 1 ] ; } catch ( ArrayIndexOutOfBoundsException e ) { throw new IllegalArgumentException ( "Invalid min days in first week: " + minDaysInFirstWeek ) ; } if ( chrono == null ) { synchronized ( chronos ) { chrono = chronos [ minDaysInFirstWeek - 1 ] ; if ( chrono == null ) { if ( zone == DateTimeZone . UTC ) { // First create without a lower limit . chrono = new EthiopicChronology ( null , null , minDaysInFirstWeek ) ; // Impose lower limit and make another EthiopicChronology . DateTime lowerLimit = new DateTime ( 1 , 1 , 1 , 0 , 0 , 0 , 0 , chrono ) ; chrono = new EthiopicChronology ( LimitChronology . getInstance ( chrono , lowerLimit , null ) , null , minDaysInFirstWeek ) ; } else { chrono = getInstance ( DateTimeZone . UTC , minDaysInFirstWeek ) ; chrono = new EthiopicChronology ( ZonedChronology . getInstance ( chrono , zone ) , null , minDaysInFirstWeek ) ; } chronos [ minDaysInFirstWeek - 1 ] = chrono ; } } } return chrono ;
public class TwoDScrollView { /** * < p > Handles scrolling in response to a " home / end " shortcut press . This * method will scroll the view to the top or bottom and give the focus * to the topmost / bottommost component in the new visible area . If no * component is a good candidate for focus , this scrollview reclaims the * focus . < / p > * @ param direction the scroll direction : { @ link android . view . View # FOCUS _ UP } * to go the top of the view or * { @ link android . view . View # FOCUS _ DOWN } to go the bottom * @ return true if the key event is consumed by this method , false otherwise */ public boolean fullScroll ( int direction , boolean horizontal ) { } }
if ( ! horizontal ) { boolean down = direction == View . FOCUS_DOWN ; int height = getHeight ( ) ; mTempRect . top = 0 ; mTempRect . bottom = height ; if ( down ) { int count = getChildCount ( ) ; if ( count > 0 ) { View view = getChildAt ( count - 1 ) ; mTempRect . bottom = view . getBottom ( ) ; mTempRect . top = mTempRect . bottom - height ; } } return scrollAndFocus ( direction , mTempRect . top , mTempRect . bottom , 0 , 0 , 0 ) ; } else { boolean right = direction == View . FOCUS_DOWN ; int width = getWidth ( ) ; mTempRect . left = 0 ; mTempRect . right = width ; if ( right ) { int count = getChildCount ( ) ; if ( count > 0 ) { View view = getChildAt ( count - 1 ) ; mTempRect . right = view . getBottom ( ) ; mTempRect . left = mTempRect . right - width ; } } return scrollAndFocus ( 0 , 0 , 0 , direction , mTempRect . top , mTempRect . bottom ) ; }
public class GetResourceMetricsResult { /** * An array of metric results , , where each array element contains all of the data points for a particular dimension . * @ param metricList * An array of metric results , , where each array element contains all of the data points for a particular * dimension . */ public void setMetricList ( java . util . Collection < MetricKeyDataPoints > metricList ) { } }
if ( metricList == null ) { this . metricList = null ; return ; } this . metricList = new java . util . ArrayList < MetricKeyDataPoints > ( metricList ) ;
public class Binding { /** * < pre > * Role that is assigned to ` members ` . * For example , ` roles / viewer ` , ` roles / editor ` , or ` roles / owner ` . * Required * < / pre > * < code > string role = 1 ; < / code > */ public com . google . protobuf . ByteString getRoleBytes ( ) { } }
java . lang . Object ref = role_ ; if ( ref instanceof java . lang . String ) { com . google . protobuf . ByteString b = com . google . protobuf . ByteString . copyFromUtf8 ( ( java . lang . String ) ref ) ; role_ = b ; return b ; } else { return ( com . google . protobuf . ByteString ) ref ; }
public class LittleEndianSerializerDataOutputStream { /** * Writes an { @ code int } as specified by * { @ link DataOutputStream # writeInt ( int ) } , except using little - endian byte * order . * @ throws IOException * if an I / O error occurs */ @ Override public void writeInt ( int v ) throws IOException { } }
outputStream . write ( 0xFF & v ) ; outputStream . write ( 0xFF & ( v >> 8 ) ) ; outputStream . write ( 0xFF & ( v >> 16 ) ) ; outputStream . write ( 0xFF & ( v >> 24 ) ) ; bytesWritten += 4 ;
public class NodeUtil { /** * Whether the given name is constant by coding convention . */ static boolean isConstantByConvention ( CodingConvention convention , Node node ) { } }
Node parent = node . getParent ( ) ; if ( parent . isGetProp ( ) && node == parent . getLastChild ( ) ) { return convention . isConstantKey ( node . getString ( ) ) ; } else if ( mayBeObjectLitKey ( node ) ) { return convention . isConstantKey ( node . getString ( ) ) ; } else if ( node . isName ( ) ) { return convention . isConstant ( node . getString ( ) ) ; } return false ;
public class DeveloperUtilitiesServiceProgrammatic { public Clob downloadLayout ( final Object domainObject ) { } }
final ObjectAdapter adapterFor = adapterManager . adapterFor ( domainObject ) ; final ObjectSpecification objectSpec = adapterFor . getSpecification ( ) ; final LayoutJsonExporter exporter = new LayoutJsonExporter ( ) ; final String json = exporter . asJson ( objectSpec ) ; return new Clob ( objectSpec . getShortIdentifier ( ) + ".layout.json" , mimeTypeApplicationJson , json ) ;
public class Record { /** * Returns the { @ code column ' s } byte array data contained in { @ code buffer } . * @ param buffer { @ code byte [ ] } ; must be non - null and of the same length as * { @ link # getRecordSize ( ) } * @ param column Column to read * @ return { @ code byte [ ] } * @ throws InvalidArgument Thrown if { @ code buffer } is null or invalid */ public final byte [ ] readColumn ( byte [ ] buffer , int column ) { } }
if ( buffer == null ) { throw new InvalidArgument ( "buffer" , buffer ) ; } else if ( buffer . length != recordSize ) { final String fmt = "invalid buffer (%d bytes, expected %d)" ; final String msg = format ( fmt , buffer . length , recordSize ) ; throw new InvalidArgument ( msg ) ; } int i = 0 , offset = 0 ; for ( ; i < column ; i ++ ) { offset += columns [ i ] . size ; } Column < ? > c = columns [ i ] ; byte [ ] ret = new byte [ c . size ] ; arraycopy ( buffer , offset , ret , 0 , c . size ) ; return ret ;
public class MainActivity { /** * This animation was intended to keep a pressed state of the Drawable * @ return Animation */ private Animator preparePressedAnimation ( ) { } }
Animator animation = ObjectAnimator . ofFloat ( drawable , CircularProgressDrawable . CIRCLE_SCALE_PROPERTY , drawable . getCircleScale ( ) , 0.65f ) ; animation . setDuration ( 120 ) ; return animation ;
public class CSVTableSaveFile { /** * Returns a more CSV data in UTF - 8 format . Returns null when there is no * more data . May block . * @ return null if there is no more data or a byte array contain some number * of complete CSV lines * @ throws IOException */ public byte [ ] read ( ) throws IOException { } }
if ( m_exception . get ( ) != null ) { throw m_exception . get ( ) ; } byte bytes [ ] = null ; if ( m_activeConverters . get ( ) == 0 ) { bytes = m_available . poll ( ) ; } else { try { bytes = m_available . take ( ) ; } catch ( InterruptedException e ) { throw new IOException ( e ) ; } } if ( bytes != null ) { m_availableBytes . addAndGet ( - 1 * bytes . length ) ; } return bytes ;
public class OptimizeParameters { /** * This reference set is a candidate for parameter - moving if : * - if all call sites are known ( no aliasing ) * - if all definition sites are known ( the possible values are known functions ) * - there is at least one definition */ private boolean isCandidate ( String name , ArrayList < Node > refs ) { } }
if ( ! OptimizeCalls . mayBeOptimizableName ( compiler , name ) ) { return false ; } boolean seenCandidateDefiniton = false ; boolean seenCandidateUse = false ; for ( Node n : refs ) { // TODO ( johnlenz ) : Determine what to do about " . constructor " references . // Currently classes that are super classes or have superclasses aren ' t optimized // if ( parent . isCall ( ) & & n ! = parent . getFirstChild ( ) & & isClassDefiningCall ( parent ) ) { // continue ; // } else if ( ReferenceMap . isCallOrNewTarget ( n ) ) { // TODO ( johnlenz ) : filter . apply when we support it seenCandidateUse = true ; } else if ( isCandidateDefinition ( n ) ) { seenCandidateDefiniton = true ; } else { // If this isn ' t an non - aliasing reference ( typeof , instanceof , etc ) // then there is nothing that can be done . if ( ! OptimizeCalls . isAllowedReference ( n ) ) { // TODO ( johnlenz ) : allow extends clauses . return false ; } } } return seenCandidateDefiniton && seenCandidateUse ;
public class TextToSpeech { /** * List custom words . * Lists all of the words and their translations for the specified custom voice model . The output shows the * translations as they are defined in the model . You must use credentials for the instance of the service that owns a * model to list its words . * * * Note : * * This method is currently a beta release . * * * See also : * * [ Querying all words from a custom * model ] ( https : / / cloud . ibm . com / docs / services / text - to - speech / custom - entries . html # cuWordsQueryModel ) . * @ param listWordsOptions the { @ link ListWordsOptions } containing the options for the call * @ return a { @ link ServiceCall } with a response type of { @ link Words } */ public ServiceCall < Words > listWords ( ListWordsOptions listWordsOptions ) { } }
Validator . notNull ( listWordsOptions , "listWordsOptions cannot be null" ) ; String [ ] pathSegments = { "v1/customizations" , "words" } ; String [ ] pathParameters = { listWordsOptions . customizationId ( ) } ; RequestBuilder builder = RequestBuilder . get ( RequestBuilder . constructHttpUrl ( getEndPoint ( ) , pathSegments , pathParameters ) ) ; Map < String , String > sdkHeaders = SdkCommon . getSdkHeaders ( "text_to_speech" , "v1" , "listWords" ) ; for ( Entry < String , String > header : sdkHeaders . entrySet ( ) ) { builder . header ( header . getKey ( ) , header . getValue ( ) ) ; } builder . header ( "Accept" , "application/json" ) ; return createServiceCall ( builder . build ( ) , ResponseConverterUtils . getObject ( Words . class ) ) ;
public class AgentLoader { /** * Returns process id . Note that Java does not guarantee any format for id , * so this is just a common heuristic . * @ return Current process id */ private static String getPID ( ) { } }
String vmName = ManagementFactory . getRuntimeMXBean ( ) . getName ( ) ; return vmName . substring ( 0 , vmName . indexOf ( "@" ) ) ;
public class WFieldRenderer { /** * Paints the given WField . * @ param component the WField to paint . * @ param renderContext the RenderContext to paint to . */ @ Override public void doRender ( final WComponent component , final WebXmlRenderContext renderContext ) { } }
WField field = ( WField ) component ; XmlStringBuilder xml = renderContext . getWriter ( ) ; int inputWidth = field . getInputWidth ( ) ; xml . appendTagOpen ( "ui:field" ) ; xml . appendAttribute ( "id" , component . getId ( ) ) ; xml . appendOptionalAttribute ( "class" , component . getHtmlClass ( ) ) ; xml . appendOptionalAttribute ( "track" , component . isTracking ( ) , "true" ) ; xml . appendOptionalAttribute ( "hidden" , field . isHidden ( ) , "true" ) ; xml . appendOptionalAttribute ( "inputWidth" , inputWidth > 0 , inputWidth ) ; xml . appendClose ( ) ; // Label WLabel label = field . getLabel ( ) ; if ( label != null ) { label . paint ( renderContext ) ; } // Field if ( field . getField ( ) != null ) { xml . appendTag ( "ui:input" ) ; field . getField ( ) . paint ( renderContext ) ; if ( field . getErrorIndicator ( ) != null ) { field . getErrorIndicator ( ) . paint ( renderContext ) ; } if ( field . getWarningIndicator ( ) != null ) { field . getWarningIndicator ( ) . paint ( renderContext ) ; } xml . appendEndTag ( "ui:input" ) ; } xml . appendEndTag ( "ui:field" ) ;
public class ArquillianDescriptorImpl { /** * / * ( non - Javadoc ) * @ see org . jboss . arquillian . impl . configuration . api . ArquillianDescriptor # getContainers ( ) */ @ Override public List < ContainerDef > getContainers ( ) { } }
List < ContainerDef > containers = new ArrayList < ContainerDef > ( ) ; for ( Node container : model . get ( "container" ) ) { containers . add ( new ContainerDefImpl ( getDescriptorName ( ) , model , container ) ) ; } return containers ;
public class JAXBAnnotationsHelper { /** * Puts definitions for XML attribute . * @ param member annotations provider * @ param property property instance to be updated */ private static void applyAttribute ( Annotated member , Schema property ) { } }
final XmlAttribute attribute = member . getAnnotation ( XmlAttribute . class ) ; if ( attribute != null ) { final XML xml = getXml ( property ) ; xml . setAttribute ( true ) ; setName ( attribute . namespace ( ) , attribute . name ( ) , property ) ; }
public class CacheProviderWrapper { /** * This method is used only by default cache provider ( cache . java ) . Do nothing . */ @ Override public void addToTimeLimitDaemon ( Object id , long expirationTime , int inactivity ) { } }
final String methodName = "addToTimeLimitDaemon()" ; if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + " cacheName=" + cacheName + " ERROR because it should not be called" ) ; }
public class Stream { /** * Creates a new Stream over the elements of the supplied { @ code Iterable } . If the supplied * { @ code Iterable } is a { @ code Stream } , it will be returned unchanged . If the supplied * { @ code Iterable } implements { @ code Closeable } , method { @ link Closeable # close ( ) } will be * called when the { @ code Stream } is closed . * @ param iterable * an { @ code Iterable } of non - null elements , possibly empty but not null * @ param < T > * the type of element * @ return a { @ code Stream } over the elements in the { @ code Iterable } */ @ SuppressWarnings ( "unchecked" ) public static < T > Stream < T > create ( final Iterable < ? extends T > iterable ) { } }
if ( iterable instanceof Stream ) { return ( Stream < T > ) iterable ; } else if ( iterable instanceof ImmutableCollection < ? > && ( ( ImmutableCollection < ? extends T > ) iterable ) . isEmpty ( ) ) { return new EmptyStream < T > ( ) ; } else { return new IterableStream < T > ( iterable ) ; }
public class ViewHelper { /** * Equivalent to calling View . setTag * @ param cacheView The cache of views to get the view from * @ param viewId The id of the view whose tag should change * @ param tag An Object to tag the view with */ public static void setTag ( EfficientCacheView cacheView , int viewId , Object tag ) { } }
View view = cacheView . findViewByIdEfficient ( viewId ) ; if ( view != null ) { view . setTag ( tag ) ; }
public class Style { /** * Convert a dash array to string . * @ param dash the dash to convert . * @ param shift the shift . * @ return the string representation . */ public static String dashToString ( float [ ] dash , Float shift ) { } }
StringBuilder sb = new StringBuilder ( ) ; if ( shift != null ) sb . append ( shift ) ; for ( int i = 0 ; i < dash . length ; i ++ ) { if ( shift != null || i > 0 ) { sb . append ( "," ) ; } sb . append ( ( int ) dash [ i ] ) ; } return sb . toString ( ) ;
public class PrimaryBackupServiceContext { /** * Sets the current timestamp . * @ param timestamp the updated timestamp * @ return the current timestamp */ public long setTimestamp ( long timestamp ) { } }
this . currentTimestamp = timestamp ; service . tick ( WallClockTimestamp . from ( timestamp ) ) ; return currentTimestamp ;
public class CommerceAccountOrganizationRelLocalServiceBaseImpl { /** * Returns a range of all the commerce account organization rels . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link com . liferay . commerce . account . model . impl . CommerceAccountOrganizationRelModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param start the lower bound of the range of commerce account organization rels * @ param end the upper bound of the range of commerce account organization rels ( not inclusive ) * @ return the range of commerce account organization rels */ @ Override public List < CommerceAccountOrganizationRel > getCommerceAccountOrganizationRels ( int start , int end ) { } }
return commerceAccountOrganizationRelPersistence . findAll ( start , end ) ;
public class KiteConnect { /** * Gets account balance and cash margin details for a particular segment . * Example for segment can be equity or commodity . * @ param segment can be equity or commodity . * @ return Margins object . * @ throws KiteException is thrown for all Kite trade related errors . * @ throws JSONException is thrown when there is exception while parsing response . * @ throws IOException is thrown when there is connection error . */ public Margin getMargins ( String segment ) throws KiteException , JSONException , IOException { } }
String url = routes . get ( "user.margins.segment" ) . replace ( ":segment" , segment ) ; JSONObject response = new KiteRequestHandler ( proxy ) . getRequest ( url , apiKey , accessToken ) ; return gson . fromJson ( String . valueOf ( response . get ( "data" ) ) , Margin . class ) ;
public class Table { /** * / * Add a new Cell in the current row . * Adds to the table after this call and before next call to newRow , * newCell or newHeader are added to the cell . * @ return This table for call chaining */ public Table addCell ( Object o , String attributes ) { } }
addCell ( o ) ; cell . attribute ( attributes ) ; return this ;
public class QrCodeUtil { /** * 生成二维码或条形码图片 < br > * 只有二维码时QrConfig中的图片才有效 * @ param content 文本内容 * @ param format 格式 , 可选二维码 、 条形码等 * @ param config 二维码配置 , 包括长 、 宽 、 边距 、 颜色等 * @ return 二维码图片 ( 黑白 ) * @ since 4.1.14 */ public static BufferedImage generate ( String content , BarcodeFormat format , QrConfig config ) { } }
final BitMatrix bitMatrix = encode ( content , format , config ) ; final BufferedImage image = toImage ( bitMatrix , config . foreColor , config . backColor ) ; final Image logoImg = config . img ; if ( null != logoImg && BarcodeFormat . QR_CODE == format ) { // 只有二维码可以贴图 final int qrWidth = image . getWidth ( ) ; final int qrHeight = image . getHeight ( ) ; int width ; int height ; // 按照最短的边做比例缩放 if ( qrWidth < qrHeight ) { width = qrWidth / config . ratio ; height = logoImg . getHeight ( null ) * width / logoImg . getWidth ( null ) ; } else { height = qrHeight / config . ratio ; width = logoImg . getWidth ( null ) * height / logoImg . getHeight ( null ) ; } Img . from ( image ) . pressImage ( Img . from ( logoImg ) . round ( 0.3 ) . getImg ( ) , // 圆角 new Rectangle ( width , height ) , 1 ) ; } return image ;
public class CodedConstant { /** * Given a field type , return the wire type . * @ param type the type * @ param isPacked the is packed * @ return the wire format for field type * @ returns One of the { @ code WIRETYPE _ } constants defined in { @ link WireFormat } . */ static int getWireFormatForFieldType ( final WireFormat . FieldType type , boolean isPacked ) { } }
if ( isPacked ) { return WireFormat . WIRETYPE_LENGTH_DELIMITED ; } else { return type . getWireType ( ) ; }
public class ActionResponseReflector { protected String toJsonBySwitchedJsonEngine ( JsonManager jsonManager , Object jsonResult , Supplier < RealJsonEngine > jsonEngineSwitcher ) { } }
final RealJsonEngine switchedEngine = jsonEngineSwitcher . get ( ) ; // application ' s callback if ( switchedEngine == null ) { // check for user method throw new IllegalStateException ( "The jsonEngineSwitcher cannot return null: " + jsonEngineSwitcher ) ; } return switchedEngine . toJson ( jsonResult ) ;
public class MariaDbStatement { /** * Adds the given SQL command to the current list of commands for this < code > Statement < / code > * object . The send in this list can be executed as a batch by calling the method * < code > executeBatch < / code > . * @ param sql typically this is a SQL < code > INSERT < / code > or < code > UPDATE < / code > statement * @ throws SQLException if a database access error occurs , this method is called on a closed * < code > Statement < / code > or the driver does not support batch updates * @ see # executeBatch * @ see DatabaseMetaData # supportsBatchUpdates */ public void addBatch ( final String sql ) throws SQLException { } }
if ( batchQueries == null ) { batchQueries = new ArrayList < > ( ) ; } if ( sql == null ) { throw ExceptionMapper . getSqlException ( "null cannot be set to addBatch( String sql)" ) ; } batchQueries . add ( sql ) ;
public class IdentityPatchRunner { /** * Rollback a patch . * @ param patchID the patch id * @ param context the patch context * @ throws PatchingException */ private void rollback ( final String patchID , final IdentityPatchContext context ) throws PatchingException { } }
try { // Load the patch history final PatchingTaskContext . Mode mode = context . getMode ( ) ; final Patch originalPatch = loadPatchInformation ( patchID , installedImage ) ; final RollbackPatch rollbackPatch = loadRollbackInformation ( patchID , installedImage ) ; final Patch . PatchType patchType = rollbackPatch . getIdentity ( ) . getPatchType ( ) ; final InstalledIdentity history = rollbackPatch . getIdentityState ( ) ; // Process originals by type first final LinkedHashMap < String , PatchElement > originalLayers = new LinkedHashMap < String , PatchElement > ( ) ; final LinkedHashMap < String , PatchElement > originalAddOns = new LinkedHashMap < String , PatchElement > ( ) ; for ( final PatchElement patchElement : originalPatch . getElements ( ) ) { final PatchElementProvider provider = patchElement . getProvider ( ) ; final String layerName = provider . getName ( ) ; final LayerType layerType = provider . getLayerType ( ) ; final Map < String , PatchElement > originals ; switch ( layerType ) { case Layer : originals = originalLayers ; break ; case AddOn : originals = originalAddOns ; break ; default : throw new IllegalStateException ( ) ; } if ( ! originals . containsKey ( layerName ) ) { originals . put ( layerName , patchElement ) ; } else { throw PatchLogger . ROOT_LOGGER . installationDuplicateLayer ( layerType . toString ( ) , layerName ) ; } } // Process the rollback xml for ( final PatchElement patchElement : rollbackPatch . getElements ( ) ) { final String elementPatchId = patchElement . getId ( ) ; final PatchElementProvider provider = patchElement . getProvider ( ) ; final String layerName = provider . getName ( ) ; final LayerType layerType = provider . getLayerType ( ) ; final LinkedHashMap < String , PatchElement > originals ; switch ( layerType ) { case Layer : originals = originalLayers ; break ; case AddOn : originals = originalAddOns ; break ; default : throw new IllegalStateException ( ) ; } final PatchElement original = originals . remove ( layerName ) ; if ( original == null ) { throw PatchLogger . ROOT_LOGGER . noSuchLayer ( layerName ) ; } final IdentityPatchContext . PatchEntry entry = context . resolveForElement ( patchElement ) ; // Create the rollback PatchingTasks . rollback ( elementPatchId , original . getModifications ( ) , patchElement . getModifications ( ) , entry , ContentItemFilter . ALL_BUT_MISC , mode ) ; entry . rollback ( original . getId ( ) ) ; // We need to restore the previous state final Patch . PatchType elementPatchType = provider . getPatchType ( ) ; final PatchableTarget . TargetInfo info ; if ( layerType == LayerType . AddOn ) { info = history . getAddOn ( layerName ) . loadTargetInfo ( ) ; } else { info = history . getLayer ( layerName ) . loadTargetInfo ( ) ; } if ( mode == ROLLBACK ) { restoreFromHistory ( entry , elementPatchId , elementPatchType , info ) ; } } if ( ! originalLayers . isEmpty ( ) || ! originalAddOns . isEmpty ( ) ) { throw PatchLogger . ROOT_LOGGER . invalidRollbackInformation ( ) ; } // Rollback the patch final IdentityPatchContext . PatchEntry identity = context . getIdentityEntry ( ) ; PatchingTasks . rollback ( patchID , originalPatch . getModifications ( ) , rollbackPatch . getModifications ( ) , identity , ContentItemFilter . MISC_ONLY , mode ) ; identity . rollback ( patchID ) ; // Restore previous state if ( mode == ROLLBACK ) { final PatchableTarget . TargetInfo identityHistory = history . getIdentity ( ) . loadTargetInfo ( ) ; restoreFromHistory ( identity , rollbackPatch . getPatchId ( ) , patchType , identityHistory ) ; if ( patchType == Patch . PatchType . CUMULATIVE ) { reenableNotOverridenModules ( rollbackPatch , context ) ; } } if ( patchType == Patch . PatchType . CUMULATIVE ) { final Identity . IdentityUpgrade upgrade = rollbackPatch . getIdentity ( ) . forType ( Patch . PatchType . CUMULATIVE , Identity . IdentityUpgrade . class ) ; identity . setResultingVersion ( upgrade . getResultingVersion ( ) ) ; } } catch ( Exception e ) { throw rethrowException ( e ) ; }
public class Bytes { /** * Creates a Bytes object by copying the data of the given ByteBuffer . * @ param bb Data will be read from this ByteBuffer in such a way that its position is not * changed . */ public static final Bytes of ( ByteBuffer bb ) { } }
Objects . requireNonNull ( bb ) ; if ( bb . remaining ( ) == 0 ) { return EMPTY ; } byte [ ] data ; if ( bb . hasArray ( ) ) { data = Arrays . copyOfRange ( bb . array ( ) , bb . position ( ) + bb . arrayOffset ( ) , bb . limit ( ) + bb . arrayOffset ( ) ) ; } else { data = new byte [ bb . remaining ( ) ] ; // duplicate so that it does not change position bb . duplicate ( ) . get ( data ) ; } return new Bytes ( data ) ;
public class TableInfo { /** * Adds the element collection metadata . * @ param elementCollectionMetadata * the element collection metadata */ public void addElementCollectionMetadata ( CollectionColumnInfo elementCollectionMetadata ) { } }
if ( this . elementCollectionMetadatas == null ) { this . elementCollectionMetadatas = new ArrayList < CollectionColumnInfo > ( ) ; } if ( ! elementCollectionMetadatas . contains ( elementCollectionMetadata ) ) { elementCollectionMetadatas . add ( elementCollectionMetadata ) ; }
public class ServerPluginRepository { /** * Move the plugins recently downloaded to extensions / plugins . */ private void moveDownloadedPlugins ( ) { } }
if ( fs . getDownloadedPluginsDir ( ) . exists ( ) ) { for ( File sourceFile : listJarFiles ( fs . getDownloadedPluginsDir ( ) ) ) { overrideAndRegisterPlugin ( sourceFile ) ; } }
public class TCPMasterConnection { /** * Tests if this < tt > TCPMasterConnection < / tt > is connected . * @ return < tt > true < / tt > if connected , < tt > false < / tt > otherwise . */ public synchronized boolean isConnected ( ) { } }
if ( connected && socket != null ) { if ( ! socket . isConnected ( ) || socket . isClosed ( ) || socket . isInputShutdown ( ) || socket . isOutputShutdown ( ) ) { try { socket . close ( ) ; } catch ( IOException e ) { logger . error ( "Socket exception" , e ) ; } finally { connected = false ; } } else { /* * When useUrgentData is set , a byte of urgent data * will be sent to the server to test the connection . If * the connection is actually broken , an IException will * occur and the connection will be closed . * Note : RFC 6093 has decreed that we stop using urgent * data . */ if ( useUrgentData ) { try { socket . sendUrgentData ( 0 ) ; ModbusUtil . sleep ( 5 ) ; } catch ( IOException e ) { connected = false ; try { socket . close ( ) ; } catch ( IOException e1 ) { // Do nothing . } } } } } return connected ;
public class ModelBuilder3D { /** * Translates the template ring system to new coordinates . * @ param originalCoord original coordinates of the placed ring atom from template * @ param newCoord new coordinates from branch placement * @ param ac AtomContainer contains atoms of ring system */ private void translateStructure ( Point3d originalCoord , Point3d newCoord , IAtomContainer ac ) { } }
Point3d transVector = new Point3d ( originalCoord ) ; transVector . sub ( newCoord ) ; for ( int i = 0 ; i < ac . getAtomCount ( ) ; i ++ ) { if ( ! ( ac . getAtom ( i ) . getFlag ( CDKConstants . ISPLACED ) ) ) { ac . getAtom ( i ) . getPoint3d ( ) . sub ( transVector ) ; // ac . getAtomAt ( i ) . setFlag ( CDKConstants . ISPLACED , true ) ; } }
public class JsonStringToJsonIntermediateConverter { /** * Parses a provided JsonObject input using the provided JsonArray schema into * a JsonObject . * @ param record * @ param schema * @ return * @ throws DataConversionException */ private JsonObject parse ( JsonObject record , JsonSchema schema ) throws DataConversionException { } }
JsonObject output = new JsonObject ( ) ; for ( int i = 0 ; i < schema . fieldsCount ( ) ; i ++ ) { JsonSchema schemaElement = schema . getFieldSchemaAt ( i ) ; String columnKey = schemaElement . getColumnName ( ) ; JsonElement parsed ; if ( ! record . has ( columnKey ) ) { output . add ( columnKey , JsonNull . INSTANCE ) ; continue ; } JsonElement columnValue = record . get ( columnKey ) ; switch ( schemaElement . getType ( ) ) { case UNION : parsed = parseUnionType ( schemaElement , columnValue ) ; break ; case ENUM : parsed = parseEnumType ( schemaElement , columnValue ) ; break ; default : if ( columnValue . isJsonArray ( ) ) { parsed = parseJsonArrayType ( schemaElement , columnValue ) ; } else if ( columnValue . isJsonObject ( ) ) { parsed = parseJsonObjectType ( schemaElement , columnValue ) ; } else { parsed = parsePrimitiveType ( schemaElement , columnValue ) ; } } output . add ( columnKey , parsed ) ; } return output ;
public class ExtensionNamespacesManager { /** * Get the index for a namespace entry in the extension namespace Vector , - 1 if * no such entry yet exists . */ public int namespaceIndex ( String namespace , Vector extensions ) { } }
for ( int i = 0 ; i < extensions . size ( ) ; i ++ ) { if ( ( ( ExtensionNamespaceSupport ) extensions . get ( i ) ) . getNamespace ( ) . equals ( namespace ) ) return i ; } return - 1 ;
public class ConfigurationDialog { /** * / * Panel to choose underline Colors * @ since 4.2 */ JPanel getUnderlineColorPanel ( List < Rule > rules ) { } }
JPanel panel = new JPanel ( ) ; panel . setLayout ( new GridBagLayout ( ) ) ; GridBagConstraints cons = new GridBagConstraints ( ) ; cons . gridx = 0 ; cons . gridy = 0 ; cons . weightx = 0.0f ; cons . fill = GridBagConstraints . NONE ; cons . anchor = GridBagConstraints . NORTHWEST ; List < String > categories = new ArrayList < String > ( ) ; for ( Rule rule : rules ) { String category = rule . getCategory ( ) . getName ( ) ; boolean contain = false ; for ( String c : categories ) { if ( c . equals ( category ) ) { contain = true ; break ; } } if ( ! contain ) { categories . add ( category ) ; } } List < JLabel > categorieLabel = new ArrayList < JLabel > ( ) ; List < JLabel > underlineLabel = new ArrayList < JLabel > ( ) ; List < JButton > changeButton = new ArrayList < JButton > ( ) ; List < JButton > defaultButton = new ArrayList < JButton > ( ) ; for ( int nCat = 0 ; nCat < categories . size ( ) ; nCat ++ ) { categorieLabel . add ( new JLabel ( categories . get ( nCat ) + " " ) ) ; underlineLabel . add ( new JLabel ( " \u2588\u2588\u2588 " ) ) ; // \ u2587 is smaller underlineLabel . get ( nCat ) . setForeground ( config . getUnderlineColor ( categories . get ( nCat ) ) ) ; underlineLabel . get ( nCat ) . setBackground ( config . getUnderlineColor ( categories . get ( nCat ) ) ) ; JLabel uLabel = underlineLabel . get ( nCat ) ; String cLabel = categories . get ( nCat ) ; panel . add ( categorieLabel . get ( nCat ) , cons ) ; cons . gridx ++ ; panel . add ( underlineLabel . get ( nCat ) , cons ) ; changeButton . add ( new JButton ( messages . getString ( "guiUColorChange" ) ) ) ; changeButton . get ( nCat ) . addActionListener ( new ActionListener ( ) { @ Override public void actionPerformed ( ActionEvent e ) { Color oldColor = uLabel . getForeground ( ) ; Color newColor = JColorChooser . showDialog ( null , messages . getString ( "guiUColorDialogHeader" ) , oldColor ) ; if ( newColor != null && newColor != oldColor ) { uLabel . setForeground ( newColor ) ; config . setUnderlineColor ( cLabel , newColor ) ; } } } ) ; cons . gridx ++ ; panel . add ( changeButton . get ( nCat ) , cons ) ; defaultButton . add ( new JButton ( messages . getString ( "guiUColorDefault" ) ) ) ; defaultButton . get ( nCat ) . addActionListener ( new ActionListener ( ) { @ Override public void actionPerformed ( ActionEvent e ) { config . setDefaultUnderlineColor ( cLabel ) ; uLabel . setForeground ( config . getUnderlineColor ( cLabel ) ) ; } } ) ; cons . gridx ++ ; panel . add ( defaultButton . get ( nCat ) , cons ) ; cons . gridx = 0 ; cons . gridy ++ ; } return panel ;
public class MuxServer { /** * Grabs the channel for writing . * @ param channel the channel * @ return true if the channel has permission to write . */ OutputStream writeChannel ( int channel ) throws IOException { } }
while ( os != null ) { boolean canWrite = false ; synchronized ( WRITE_LOCK ) { if ( ! isWriteLocked ) { isWriteLocked = true ; canWrite = true ; } else { try { WRITE_LOCK . wait ( 5000 ) ; } catch ( Exception e ) { } } } if ( canWrite ) { os . write ( 'C' ) ; os . write ( channel >> 8 ) ; os . write ( channel ) ; return os ; } } return null ;
public class Currency { /** * Registers a new currency for the provided locale . The returned object * is a key that can be used to unregister this currency object . * < p > Because ICU may choose to cache Currency objects internally , this must * be called at application startup , prior to any calls to * Currency . getInstance to avoid undefined behavior . * @ param currency the currency to register * @ param locale the ulocale under which to register the currency * @ return a registry key that can be used to unregister this currency * @ see # unregister * @ hide unsupported on Android */ public static Object registerInstance ( Currency currency , ULocale locale ) { } }
return getShim ( ) . registerInstance ( currency , locale ) ;
public class AWSServiceCatalogClient { /** * Lists all provisioning artifacts ( also known as versions ) for the specified product . * @ param listProvisioningArtifactsRequest * @ return Result of the ListProvisioningArtifacts operation returned by the service . * @ throws ResourceNotFoundException * The specified resource was not found . * @ throws InvalidParametersException * One or more parameters provided to the operation are not valid . * @ sample AWSServiceCatalog . ListProvisioningArtifacts * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / servicecatalog - 2015-12-10 / ListProvisioningArtifacts " * target = " _ top " > AWS API Documentation < / a > */ @ Override public ListProvisioningArtifactsResult listProvisioningArtifacts ( ListProvisioningArtifactsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListProvisioningArtifacts ( request ) ;
public class N { /** * Distinct by the value mapped from < code > keyMapper < / code > . * Mostly it ' s designed for one - step operation to complete the operation in one step . * < code > java . util . stream . Stream < / code > is preferred for multiple phases operation . * @ param a * @ param fromIndex * @ param toIndex * @ param keyMapper don ' t change value of the input parameter . * @ return */ public static < T , E extends Exception > List < T > distinctBy ( final T [ ] a , final int fromIndex , final int toIndex , final Try . Function < ? super T , ? , E > keyMapper ) throws E { } }
checkFromToIndex ( fromIndex , toIndex , len ( a ) ) ; if ( N . isNullOrEmpty ( a ) ) { return new ArrayList < > ( ) ; } final List < T > result = new ArrayList < > ( ) ; final Set < Object > set = new HashSet < > ( ) ; for ( int i = fromIndex ; i < toIndex ; i ++ ) { if ( set . add ( hashKey ( keyMapper . apply ( a [ i ] ) ) ) ) { result . add ( a [ i ] ) ; } } return result ;
public class ConfigProcessor { /** * Process user configuration of " version " . If set to GCLOUD _ CONFIG then allow gcloud to generate * a version . If set but not a keyword then just return the set value . */ String processVersion ( String version ) { } }
if ( version == null || version . trim ( ) . isEmpty ( ) || version . equals ( APPENGINE_CONFIG ) ) { throw new IllegalArgumentException ( VERSION_ERROR ) ; } else if ( version . equals ( GCLOUD_CONFIG ) ) { return null ; } return version ;
public class HttpHealthCheckService { /** * Creates a new response which is sent when the { @ link Server } is unhealthy . */ protected AggregatedHttpMessage newUnhealthyResponse ( @ SuppressWarnings ( "UnusedParameters" ) ServiceRequestContext ctx ) { } }
return AggregatedHttpMessage . of ( HttpStatus . SERVICE_UNAVAILABLE , MediaType . PLAIN_TEXT_UTF_8 , RES_NOT_OK ) ;
public class NewChunk { /** * Slow - path append data */ private void append2slowUUID ( ) { } }
if ( _id != null ) cancel_sparse ( ) ; if ( _ds == null && _ms != null ) { // This can happen for columns with all NAs and then a UUID _xs = null ; _ms . switchToLongs ( ) ; _ds = MemoryManager . malloc8d ( _sparseLen ) ; Arrays . fill ( _ms . _vals8 , C16Chunk . _LO_NA ) ; Arrays . fill ( _ds , Double . longBitsToDouble ( C16Chunk . _HI_NA ) ) ; } if ( _ms != null && _sparseLen > 0 ) { _ds = MemoryManager . arrayCopyOf ( _ds , _sparseLen * 2 ) ; _ms . resize ( _sparseLen * 2 ) ; if ( _id != null ) _id = Arrays . copyOf ( _id , _sparseLen * 2 ) ; } else { _ms = new Mantissas ( 4 ) ; _xs = null ; _ms . switchToLongs ( ) ; _ds = new double [ 4 ] ; }
public class TypeDesc { /** * Acquire a TypeDesc from any class name , including primitives and arrays . * Primitive and array syntax matches Java declarations . */ public static TypeDesc forClass ( final String name ) throws IllegalArgumentException { } }
if ( name . length ( ) < 1 ) { throw invalidName ( name ) ; } // TODO : Support generics in name . TypeDesc type = cNamesToInstances . get ( name ) ; if ( type != null ) { return type ; } int index1 = name . lastIndexOf ( '[' ) ; int index2 = name . lastIndexOf ( ']' ) ; if ( index2 >= 0 ) { if ( index2 + 1 != name . length ( ) || index1 + 1 != index2 ) { throw invalidName ( name ) ; } try { type = forClass ( name . substring ( 0 , index1 ) ) . toArrayType ( ) ; } catch ( IllegalArgumentException e ) { throw invalidName ( name ) ; } } else if ( index1 >= 0 ) { throw invalidName ( name ) ; } else { setType : { switch ( name . charAt ( 0 ) ) { case 'v' : if ( name . equals ( "void" ) ) { type = VOID ; break setType ; } break ; case 'b' : if ( name . equals ( "boolean" ) ) { type = BOOLEAN ; break setType ; } else if ( name . equals ( "byte" ) ) { type = BYTE ; break setType ; } break ; case 'c' : if ( name . equals ( "char" ) ) { type = CHAR ; break setType ; } break ; case 's' : if ( name . equals ( "short" ) ) { type = SHORT ; break setType ; } break ; case 'i' : if ( name . equals ( "int" ) ) { type = INT ; break setType ; } break ; case 'l' : if ( name . equals ( "long" ) ) { type = LONG ; break setType ; } break ; case 'f' : if ( name . equals ( "float" ) ) { type = FLOAT ; break setType ; } break ; case 'd' : if ( name . equals ( "double" ) ) { type = DOUBLE ; break setType ; } break ; } String desc = generateDescriptor ( name ) ; if ( name . indexOf ( '/' ) < 0 ) { type = new ObjectType ( desc , name ) ; } else { type = new ObjectType ( desc , name . replace ( '/' , '.' ) ) ; } type = intern ( type ) ; } } cNamesToInstances . put ( name , type ) ; return type ;
public class RCSwitch { /** * Send a set of bits * @ param bitSet Bits ( 000001010100010001) * @ param length Length of the bit string ( 24) */ public void send ( final BitSet bitSet , int length ) { } }
if ( transmitterPin != null ) { for ( int nRepeat = 0 ; nRepeat < repeatTransmit ; nRepeat ++ ) { for ( int i = 0 ; i < length ; i ++ ) { if ( bitSet . get ( i ) ) { transmit ( protocol . getOneBit ( ) ) ; } else { transmit ( protocol . getZeroBit ( ) ) ; } } sendSync ( ) ; } transmitterPin . low ( ) ; }
public class UIData { /** * < p > Override behavior from { @ link * UIComponentBase # invokeOnComponent } to provide special care for * positioning the data properly before finding the component and * invoking the callback on it . If the argument * < code > clientId < / code > is equal to < code > this . getClientId ( ) < / code > * simply invoke the < code > contextCallback < / code > , passing the * < code > context < / code > argument and < b > this < / b > as arguments , and * return < code > true . < / code > If the argument < code > clientId < / code > * is not equal to < code > this . getClientId ( ) < / code > , inspect each of * the facet children of this < code > UIData < / code > instance and for * each one , compare its < code > clientId < / code > with the argument * < code > clientId < / code > . If there is a match , invoke the * < code > contextCallback < / code > , passing the < code > context < / code > * argument and < b > this < / b > as arguments , and return * < code > true < / code > . Otherwise , attempt to extract a rowIndex from * the < code > clientId < / code > . For example , if the argument * < code > clientId < / code > was < code > form : data : 3 : customerHeader < / code > * the rowIndex would be < code > 3 < / code > . Let this value be called * < code > newIndex < / code > . The current rowIndex of this instance must * be saved aside and restored before returning in all cases , * regardless of the outcome of the search or if any exceptions are * thrown in the process . < / p > * < p > The implementation of this method must never return < code > true < / code > * if setting the rowIndex of this instance to be equal to * < code > newIndex < / code > causes this instance to return < code > false < / code > * from { @ link # isRowAvailable } . < / p > * @ throws NullPointerException { @ inheritDoc } * @ throws FacesException { @ inheritDoc } Also throws < code > FacesException < / code > * if any exception is thrown when deriving the * rowIndex from the argument < code > clientId < / code > . * @ since 1.2 */ public boolean invokeOnComponent ( FacesContext context , String clientId , ContextCallback callback ) throws FacesException { } }
if ( null == context || null == clientId || null == callback ) { throw new NullPointerException ( ) ; } String myId = super . getClientId ( context ) ; boolean found = false ; if ( clientId . equals ( myId ) ) { try { this . pushComponentToEL ( context , compositeParent ) ; callback . invokeContextCallback ( context , this ) ; return true ; } catch ( Exception e ) { throw new FacesException ( e ) ; } finally { this . popComponentFromEL ( context ) ; } } // check the facets , if any , of UIData if ( this . getFacetCount ( ) > 0 ) { for ( UIComponent c : this . getFacets ( ) . values ( ) ) { if ( clientId . equals ( c . getClientId ( context ) ) ) { callback . invokeContextCallback ( context , c ) ; return true ; } } } // check column level facets , if any if ( this . getChildCount ( ) > 0 ) { for ( UIComponent column : this . getChildren ( ) ) { if ( column instanceof UIColumn ) { if ( column . getFacetCount ( ) > 0 ) { for ( UIComponent facet : column . getFacets ( ) . values ( ) ) { if ( facet . invokeOnComponent ( context , clientId , callback ) ) { return true ; } } } } } } /* * Check if we are looking for a component that is part of the * actual skeleton . */ if ( this . getChildCount ( ) > 0 ) { for ( UIComponent column : this . getChildren ( ) ) { if ( column instanceof UIColumn ) { if ( column . invokeOnComponent ( context , clientId , callback ) ) { return true ; } } } } int lastSep , newRow , savedRowIndex = this . getRowIndex ( ) ; try { char sepChar = UINamingContainer . getSeparatorChar ( context ) ; // If we need to strip out the rowIndex from our id // PENDING ( edburns ) : is this safe with respect to I18N ? if ( myId . endsWith ( sepChar + Integer . toString ( savedRowIndex , 10 ) ) ) { lastSep = myId . lastIndexOf ( sepChar ) ; assert ( - 1 != lastSep ) ; myId = myId . substring ( 0 , lastSep ) ; } // myId will be something like form : outerData for a non - nested table , // and form : outerData : 3 : data for a nested table . // clientId will be something like form : outerData : 3 : outerColumn // for a non - nested table . clientId will be something like // outerData : 3 : data : 3 : input for a nested table . if ( clientId . startsWith ( myId ) ) { int preRowIndexSep , postRowIndexSep ; if ( - 1 != ( preRowIndexSep = clientId . indexOf ( sepChar , myId . length ( ) ) ) ) { // Check the length if ( ++ preRowIndexSep < clientId . length ( ) ) { if ( - 1 != ( postRowIndexSep = clientId . indexOf ( sepChar , preRowIndexSep + 1 ) ) ) { try { newRow = Integer . valueOf ( clientId . substring ( preRowIndexSep , postRowIndexSep ) ) . intValue ( ) ; } catch ( NumberFormatException ex ) { // PENDING ( edburns ) : I18N String message = "Trying to extract rowIndex from clientId \'" + clientId + "\' " + ex . getMessage ( ) ; throw new NumberFormatException ( message ) ; } this . setRowIndex ( newRow ) ; if ( this . isRowAvailable ( ) ) { found = super . invokeOnComponent ( context , clientId , callback ) ; } } } } } } catch ( FacesException fe ) { throw fe ; } catch ( Exception e ) { throw new FacesException ( e ) ; } finally { this . setRowIndex ( savedRowIndex ) ; } return found ;
public class DefaultQueryParamsParser { /** * < strong > Important : < / strong > Grouping itself is not specified by JSON API itself , but the * keyword and format it reserved for today and future use in Katharsis . * Group params can be send with following format : < br > * < strong > group [ ResourceType ] = " property ( . property ) * " < / strong > * Examples of accepted grouping of resources : * < ul > * < li > { @ code GET / tasks / ? group [ tasks ] = name } < / li > * < li > { @ code GET / project / ? group [ users ] = name . firstName & include [ projects ] = team } < / li > * < / ul > * @ param context I don ' t know , I didn ' t write the code * @ return { @ link Map } Map of grouping params passed to request grouped by type of resource */ protected TypedParams < GroupingParams > parseGroupingParameters ( final QueryParamsParserContext context ) { } }
String groupingKey = RestrictedQueryParamsMembers . group . name ( ) ; Map < String , Set < String > > grouping = filterQueryParamsByKey ( context , groupingKey ) ; Map < String , Set < String > > temporaryGroupingMap = new LinkedHashMap < > ( ) ; for ( Map . Entry < String , Set < String > > entry : grouping . entrySet ( ) ) { List < String > propertyList = buildPropertyListFromEntry ( entry , groupingKey ) ; if ( propertyList . size ( ) > 1 ) { throw new ParametersDeserializationException ( "Exceeded maximum level of nesting of 'group' parameter " + "(1) eg. group[tasks][name] <-- #2 level and more are not allowed" ) ; } String resourceType = propertyList . get ( 0 ) ; if ( temporaryGroupingMap . containsKey ( resourceType ) ) { Set < String > resourceParams = temporaryGroupingMap . get ( resourceType ) ; resourceParams . addAll ( entry . getValue ( ) ) ; temporaryGroupingMap . put ( resourceType , resourceParams ) ; } else { Set < String > resourceParams = new LinkedHashSet < > ( ) ; resourceParams . addAll ( entry . getValue ( ) ) ; temporaryGroupingMap . put ( resourceType , resourceParams ) ; } } Map < String , GroupingParams > decodedGroupingMap = new LinkedHashMap < > ( ) ; for ( Map . Entry < String , Set < String > > resourceTypesMap : temporaryGroupingMap . entrySet ( ) ) { Set < String > groupingSet = Collections . unmodifiableSet ( resourceTypesMap . getValue ( ) ) ; decodedGroupingMap . put ( resourceTypesMap . getKey ( ) , new GroupingParams ( groupingSet ) ) ; } return new TypedParams < > ( Collections . unmodifiableMap ( decodedGroupingMap ) ) ;
public class TheMovieDbApi { /** * Get the primary information about a TV series by id . * @ param tvID tvID * @ param language language * @ param appendToResponse appendToResponse * @ return * @ throws com . omertron . themoviedbapi . MovieDbException */ public TVInfo getTVInfo ( int tvID , String language , String ... appendToResponse ) throws MovieDbException { } }
return tmdbTv . getTVInfo ( tvID , language , appendToResponse ) ;
public class BitUtil { /** * Returns the number of set bits in an array of longs . */ public static long pop_array ( long [ ] arr , int wordOffset , int numWords ) { } }
long popCount = 0 ; for ( int i = wordOffset , end = wordOffset + numWords ; i < end ; ++ i ) { popCount += Long . bitCount ( arr [ i ] ) ; } return popCount ;
public class ObjectManager { /** * @ returns long the current Log filesize in bytes . * @ throws ObjectManagerException */ public long getLogFileSize ( ) throws ObjectManagerException { } }
if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "getLogFileSize" ) ; long logFileSize = objectManagerState . logOutput . getLogFileSize ( ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "getLogFileSize" , new Object [ ] { new Long ( logFileSize ) } ) ; return logFileSize ;
public class CmsSolrIndex { /** * Checks if the query should be executed using the debug mode where the security restrictions do not apply . * @ param cms the current context . * @ param query the query to execute . * @ return a flag , indicating , if the query should be performed in debug mode . */ private boolean isDebug ( CmsObject cms , CmsSolrQuery query ) { } }
String [ ] debugSecretValues = query . remove ( REQUEST_PARAM_DEBUG_SECRET ) ; String debugSecret = ( debugSecretValues == null ) || ( debugSecretValues . length < 1 ) ? null : debugSecretValues [ 0 ] ; if ( ( null != debugSecret ) && ! debugSecret . trim ( ) . isEmpty ( ) && ( null != m_handlerDebugSecretFile ) ) { try { CmsFile secretFile = cms . readFile ( m_handlerDebugSecretFile ) ; String secret = new String ( secretFile . getContents ( ) , CmsFileUtil . getEncoding ( cms , secretFile ) ) ; return secret . trim ( ) . equals ( debugSecret . trim ( ) ) ; } catch ( Exception e ) { LOG . info ( "Failed to read secret file for index \"" + getName ( ) + "\" at path \"" + m_handlerDebugSecretFile + "\"." ) ; } } return false ;
public class RelationalOperations { /** * Returns true if pt _ a touches env _ b . */ private static boolean pointTouchesEnvelope_ ( Point2D pt_a , Envelope2D env_b , double tolerance , ProgressTracker progress_tracker ) { } }
if ( env_b . getHeight ( ) <= tolerance && env_b . getWidth ( ) <= tolerance ) return false ; // when treates as a point , points cannot touch points Envelope2D env_b_inflated = new Envelope2D ( ) , env_b_deflated = new Envelope2D ( ) ; env_b_inflated . setCoords ( env_b ) ; env_b_inflated . inflate ( tolerance , tolerance ) ; if ( ! env_b_inflated . contains ( pt_a ) ) return false ; if ( env_b . getHeight ( ) <= tolerance || env_b . getWidth ( ) <= tolerance ) { env_b_deflated . setCoords ( env_b ) ; if ( env_b . getHeight ( ) > tolerance ) env_b_deflated . inflate ( 0 , - tolerance ) ; else env_b_deflated . inflate ( - tolerance , 0 ) ; if ( env_b . getHeight ( ) > tolerance ) { if ( pt_a . y > env_b_deflated . ymin && pt_a . y < env_b_deflated . ymax ) return false ; } else { if ( pt_a . x > env_b_deflated . xmin && pt_a . x < env_b_deflated . xmax ) return false ; } return true ; } env_b_deflated . setCoords ( env_b ) ; env_b_deflated . inflate ( - tolerance , - tolerance ) ; if ( env_b_deflated . containsExclusive ( pt_a ) ) return false ; return true ;
public class JacksonSingleton { /** * Gets the JSONP response for the given callback and value . * @ param callback the callback name * @ param data the data to transform to json * @ return the String built as follows : " callback ( json ( data ) ) " */ public String toJsonP ( final String callback , final Object data ) { } }
synchronized ( lock ) { try { return callback + "(" + stringify ( ( JsonNode ) mapper . valueToTree ( data ) ) + ");" ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } }
public class CloudantService { /** * { @ inheritDoc } */ @ Override public Object createResource ( ResourceInfo info ) throws Exception { } }
if ( info == null ) throw new UnsupportedOperationException ( Tr . formatMessage ( tc , "direct.lookup.CWWKD0301E" , cloudantConfigIdentifier ) ) ; return createResource ( null , false , info . getAuth ( ) , info . getLoginPropertyList ( ) ) ;
public class LinearEquationSystem { /** * Method for total pivot search , searches for x , y in { k , . . . n } , so that * { @ code | a _ xy | > | a _ ij | } * @ param k search starts at entry ( k , k ) * @ return the position of the found pivot element */ private IntIntPair totalPivotSearch ( int k ) { } }
double max = 0 ; int i , j , pivotRow = k , pivotCol = k ; double absValue ; for ( i = k ; i < coeff . length ; i ++ ) { for ( j = k ; j < coeff [ 0 ] . length ; j ++ ) { // compute absolute value of // current entry in absValue absValue = Math . abs ( coeff [ row [ i ] ] [ col [ j ] ] ) ; // compare absValue with value max // found so far if ( max < absValue ) { // remember new value and position max = absValue ; pivotRow = i ; pivotCol = j ; } // end if } // end for j } // end for k return new IntIntPair ( pivotRow , pivotCol ) ;
public class MetadataManager { /** * Read ClassDescriptors from the given InputStream . * @ see # mergeDescriptorRepository */ public DescriptorRepository readDescriptorRepository ( InputStream inst ) { } }
try { RepositoryPersistor persistor = new RepositoryPersistor ( ) ; return persistor . readDescriptorRepository ( inst ) ; } catch ( Exception e ) { throw new MetadataException ( "Can not read repository " + inst , e ) ; }
public class AbstractResourceAdapterDeployer { /** * Create a recovery module * @ param mcf The ManagedConnectionFactory * @ param cd The connection definition * @ return The recovery module , or < code > null < / code > if no recovery * @ exception Throwable In case on an error */ private org . ironjacamar . core . api . deploymentrepository . Recovery createRecovery ( javax . resource . spi . ManagedConnectionFactory mcf , ConnectionDefinition cd ) throws Throwable { } }
Boolean padXid = Defaults . PAD_XID ; Boolean isSameRMOverride = Defaults . IS_SAME_RM_OVERRIDE ; Boolean wrapXAResource = Defaults . WRAP_XA_RESOURCE ; String securityDomain = null ; RecoveryPlugin plugin = null ; Collection < org . ironjacamar . core . api . deploymentrepository . ConfigProperty > dcps = null ; if ( transactionIntegration . getRecoveryRegistry ( ) == null ) return null ; if ( subjectFactory == null ) return null ; if ( cd . getRecovery ( ) != null && cd . getRecovery ( ) . isNoRecovery ( ) ) return null ; // Check security domain if ( cd . getRecovery ( ) != null && cd . getRecovery ( ) . getCredential ( ) != null ) securityDomain = cd . getRecovery ( ) . getCredential ( ) . getSecurityDomain ( ) ; if ( securityDomain == null && cd . getSecurity ( ) != null ) securityDomain = cd . getSecurity ( ) . getSecurityDomain ( ) ; if ( securityDomain == null ) return null ; if ( cd . getRecovery ( ) != null && cd . getRecovery ( ) . getPlugin ( ) != null ) { Extension extension = cd . getRecovery ( ) . getPlugin ( ) ; Collection < org . ironjacamar . common . api . metadata . spec . ConfigProperty > configProperties = new ArrayList < org . ironjacamar . common . api . metadata . spec . ConfigProperty > ( ) ; for ( Map . Entry < String , String > property : extension . getConfigPropertiesMap ( ) . entrySet ( ) ) { org . ironjacamar . common . api . metadata . spec . ConfigProperty c = new org . ironjacamar . common . metadata . spec . ConfigPropertyImpl ( null , new XsdString ( property . getKey ( ) , null ) , XsdString . NULL_XSDSTRING , new XsdString ( property . getValue ( ) , null ) , Boolean . FALSE , Boolean . FALSE , Boolean . FALSE , null , false , null , null , null , null ) ; configProperties . add ( c ) ; } Class < ? > clz = Class . forName ( extension . getClassName ( ) , true , mcf . getClass ( ) . getClassLoader ( ) ) ; plugin = ( RecoveryPlugin ) clz . newInstance ( ) ; dcps = injectConfigProperties ( plugin , configProperties , null , plugin . getClass ( ) . getClassLoader ( ) ) ; } if ( plugin == null ) plugin = new DefaultRecoveryPlugin ( ) ; if ( dcps == null ) dcps = new ArrayList < > ( 1 ) ; if ( cd . getPool ( ) != null ) { org . ironjacamar . common . api . metadata . common . XaPool xaPool = ( org . ironjacamar . common . api . metadata . common . XaPool ) cd . getPool ( ) ; if ( xaPool . isPadXid ( ) != null ) padXid = xaPool . isPadXid ( ) ; if ( xaPool . isIsSameRmOverride ( ) != null ) isSameRMOverride = xaPool . isIsSameRmOverride ( ) ; if ( xaPool . isWrapXaResource ( ) != null ) wrapXAResource = xaPool . isWrapXaResource ( ) ; } XAResourceRecovery r = transactionIntegration . createXAResourceRecovery ( mcf , padXid , isSameRMOverride , wrapXAResource , securityDomain , subjectFactory , plugin , null ) ; return new RecoveryImpl ( plugin . getClass ( ) . getName ( ) , dcps , r , cd . getJndiName ( ) , transactionIntegration . getRecoveryRegistry ( ) ) ;
public class InputParallelismMarshaller { /** * Marshall the given parameter object . */ public void marshall ( InputParallelism inputParallelism , ProtocolMarshaller protocolMarshaller ) { } }
if ( inputParallelism == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( inputParallelism . getCount ( ) , COUNT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class MavenImportUtils { /** * Force the pom file of a project to be " simple " . * @ param projectDir the folder in which the pom file is located . * @ param monitor the progress monitor . * @ throws IOException if the pom file cannot be changed . */ static void forceSimplePom ( File projectDir , IProgressMonitor monitor ) throws IOException { } }
final File pomFile = new File ( projectDir , POM_FILE ) ; if ( pomFile . exists ( ) ) { final SubMonitor submon = SubMonitor . convert ( monitor , 4 ) ; final File savedPomFile = new File ( projectDir , POM_BACKUP_FILE ) ; if ( savedPomFile . exists ( ) ) { savedPomFile . delete ( ) ; } submon . worked ( 1 ) ; Files . copy ( pomFile , savedPomFile ) ; submon . worked ( 1 ) ; final StringBuilder content = new StringBuilder ( ) ; try ( BufferedReader stream = new BufferedReader ( new FileReader ( pomFile ) ) ) { String line = stream . readLine ( ) ; while ( line != null ) { line = line . replaceAll ( "<extensions>\\s*true\\s*</extensions>" , "" ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ content . append ( line ) . append ( "\n" ) ; // $ NON - NLS - 1 $ line = stream . readLine ( ) ; } } submon . worked ( 1 ) ; Files . write ( content . toString ( ) . getBytes ( ) , pomFile ) ; submon . worked ( 1 ) ; }
public class RemoteTaskRunner { /** * Schedule a task that will , at some point in the future , clean up znodes and issue failures for " tasksToFail " * if they are being run by " worker " . */ private void scheduleTasksCleanupForWorker ( final String worker , final List < String > tasksToFail ) { } }
// This method is only called from the PathChildrenCache event handler , so this may look like a race , // but is actually not . cancelWorkerCleanup ( worker ) ; final ListenableScheduledFuture < ? > cleanupTask = cleanupExec . schedule ( new Runnable ( ) { @ Override public void run ( ) { log . info ( "Running scheduled cleanup for Worker[%s]" , worker ) ; try { for ( String assignedTask : tasksToFail ) { String taskPath = JOINER . join ( indexerZkConfig . getTasksPath ( ) , worker , assignedTask ) ; String statusPath = JOINER . join ( indexerZkConfig . getStatusPath ( ) , worker , assignedTask ) ; if ( cf . checkExists ( ) . forPath ( taskPath ) != null ) { cf . delete ( ) . guaranteed ( ) . forPath ( taskPath ) ; } if ( cf . checkExists ( ) . forPath ( statusPath ) != null ) { cf . delete ( ) . guaranteed ( ) . forPath ( statusPath ) ; } log . info ( "Failing task[%s]" , assignedTask ) ; RemoteTaskRunnerWorkItem taskRunnerWorkItem = runningTasks . remove ( assignedTask ) ; if ( taskRunnerWorkItem != null ) { taskRunnerWorkItem . setResult ( TaskStatus . failure ( assignedTask ) ) ; TaskRunnerUtils . notifyStatusChanged ( listeners , assignedTask , TaskStatus . failure ( assignedTask ) ) ; } else { log . warn ( "RemoteTaskRunner has no knowledge of task[%s]" , assignedTask ) ; } } // worker is gone , remove worker task status announcements path . String workerStatusPath = JOINER . join ( indexerZkConfig . getStatusPath ( ) , worker ) ; if ( cf . checkExists ( ) . forPath ( workerStatusPath ) != null ) { cf . delete ( ) . guaranteed ( ) . forPath ( JOINER . join ( indexerZkConfig . getStatusPath ( ) , worker ) ) ; } } catch ( Exception e ) { log . makeAlert ( "Exception while cleaning up worker[%s]" , worker ) . emit ( ) ; throw new RuntimeException ( e ) ; } } } , config . getTaskCleanupTimeout ( ) . toStandardDuration ( ) . getMillis ( ) , TimeUnit . MILLISECONDS ) ; removedWorkerCleanups . put ( worker , cleanupTask ) ; // Remove this entry from removedWorkerCleanups when done , if it ' s actually the one in there . Futures . addCallback ( cleanupTask , new FutureCallback < Object > ( ) { @ Override public void onSuccess ( Object result ) { removedWorkerCleanups . remove ( worker , cleanupTask ) ; } @ Override public void onFailure ( Throwable t ) { removedWorkerCleanups . remove ( worker , cleanupTask ) ; } } ) ;
public class IfcLightDistributionDataImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public EList < Double > getSecondaryPlaneAngle ( ) { } }
return ( EList < Double > ) eGet ( Ifc4Package . Literals . IFC_LIGHT_DISTRIBUTION_DATA__SECONDARY_PLANE_ANGLE , true ) ;
public class MoreCollectors { /** * Returns a { @ code Collector } which performs the bitwise - and operation of a * long - valued function applied to the input elements . If no elements are * present , the result is empty { @ link OptionalLong } . * This method returns a * < a href = " package - summary . html # ShortCircuitReduction " > short - circuiting * collector < / a > : it may not process all the elements if the result is zero . * @ param < T > the type of the input elements * @ param mapper a function extracting the property to be processed * @ return a { @ code Collector } that produces the bitwise - and operation of a * derived property * @ since 0.4.0 */ public static < T > Collector < T , ? , OptionalLong > andingLong ( ToLongFunction < T > mapper ) { } }
return new CancellableCollectorImpl < > ( PrimitiveBox :: new , ( acc , t ) -> { if ( ! acc . b ) { acc . l = mapper . applyAsLong ( t ) ; acc . b = true ; } else { acc . l &= mapper . applyAsLong ( t ) ; } } , ( acc1 , acc2 ) -> { if ( ! acc1 . b ) return acc2 ; if ( ! acc2 . b ) return acc1 ; acc1 . l &= acc2 . l ; return acc1 ; } , PrimitiveBox :: asLong , acc -> acc . b && acc . l == 0 , UNORDERED_CHARACTERISTICS ) ;
public class SVGLogoPaneController { /** * Method sets the text displayed next to the logo . * @ param text */ public void setText ( final String text ) { } }
this . text . setText ( text ) ; this . text . setFont ( Font . font ( this . text . getFont ( ) . getFamily ( ) , FontWeight . BOLD , size / 2 ) ) ; stack . requestLayout ( ) ;
public class HttpServiceSchedulerClient { /** * Construct the endpoint to send http request for a particular command * Make sure the construction matches server sides . * @ param schedulerEndpoint The scheduler http endpoint * @ param command The command to request * @ return The http endpoint for particular command */ protected String getCommandEndpoint ( String schedulerEndpoint , Command command ) { } }
// Currently the server side receives command request in lower case return String . format ( "http://%s/%s" , schedulerEndpoint , command . name ( ) . toLowerCase ( ) ) ;
public class GVRCamera { /** * Add a post - effect to this camera ' s render chain . * Post - effects are GL shaders , applied to the texture ( hardware bitmap ) * containing the rendered scene graph . Each post - effect combines a shader * selector with a set of parameters : This lets you pass different * parameters to the shaders for each eye . * @ param postEffectData * Post - effect to append to this camera ' s render chain */ public void addPostEffect ( GVRMaterial postEffectData ) { } }
GVRContext ctx = getGVRContext ( ) ; if ( mPostEffects == null ) { mPostEffects = new GVRRenderData ( ctx , postEffectData ) ; GVRMesh dummyMesh = new GVRMesh ( getGVRContext ( ) , "float3 a_position float2 a_texcoord" ) ; mPostEffects . setMesh ( dummyMesh ) ; NativeCamera . setPostEffect ( getNative ( ) , mPostEffects . getNative ( ) ) ; mPostEffects . setCullFace ( GVRRenderPass . GVRCullFaceEnum . None ) ; } else { GVRRenderPass rpass = new GVRRenderPass ( ctx , postEffectData ) ; rpass . setCullFace ( GVRRenderPass . GVRCullFaceEnum . None ) ; mPostEffects . addPass ( rpass ) ; }