signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ResourceIndexImpl { /** * { @ inheritDoc } */ public void add ( Triple triple , boolean flush ) throws IOException , TrippiException { } }
_writer . add ( triple , flush ) ;
public class NumberExpression { /** * Create a cast to String expression * @ see java . lang . Object # toString ( ) * @ return string representation */ public StringExpression stringValue ( ) { } }
if ( stringCast == null ) { stringCast = Expressions . stringOperation ( Ops . STRING_CAST , mixin ) ; } return stringCast ;
public class BasicSqlQueryParser { /** * Remove all leading and trailing single - quotes , double - quotes , or square brackets from the supplied text . If multiple , * properly - paired quotes or brackets are found , they will all be removed . * @ param text the input text ; may not be null * @ param position the position of the text ; may not be null * @ return the text without leading and trailing brackets and quotes , or < code > text < / code > if there were no square brackets or * quotes */ protected String removeBracketsAndQuotes ( String text , Position position ) { } }
return removeBracketsAndQuotes ( text , true , position ) ;
public class ModuleDotGraph { /** * Returns a Graph containing only requires transitive edges * with transitive reduction . */ public Graph < String > requiresTransitiveGraph ( Configuration cf , Set < String > roots ) { } }
Deque < String > deque = new ArrayDeque < > ( roots ) ; Set < String > visited = new HashSet < > ( ) ; Graph . Builder < String > builder = new Graph . Builder < > ( ) ; while ( deque . peek ( ) != null ) { String mn = deque . pop ( ) ; if ( visited . contains ( mn ) ) continue ; visited . add ( mn ) ; builder . addNode ( mn ) ; cf . findModule ( mn ) . get ( ) . reference ( ) . descriptor ( ) . requires ( ) . stream ( ) . filter ( d -> d . modifiers ( ) . contains ( TRANSITIVE ) || d . name ( ) . equals ( "java.base" ) ) . map ( Requires :: name ) . forEach ( d -> { deque . add ( d ) ; builder . addEdge ( mn , d ) ; } ) ; } return builder . build ( ) . reduce ( ) ;
public class InMemoryMessageStore { /** * ( non - Javadoc ) * @ see net . timewalker . ffmq4 . local . destination . store . impl . AbstractMessageStore # storeMessage ( net . timewalker . ffmq4 . common . message . AbstractMessage , int ) */ @ Override protected int storeMessage ( AbstractMessage message , int previousHandle ) throws JMSException { } }
return ( ( InMemoryLinkedDataStore ) dataStore ) . store ( message , previousHandle ) ;
public class ManagedChannelImpl { /** * Must be run from syncContext */ private void maybeTerminateChannel ( ) { } }
if ( terminated ) { return ; } if ( shutdown . get ( ) && subchannels . isEmpty ( ) && oobChannels . isEmpty ( ) ) { channelLogger . log ( ChannelLogLevel . INFO , "Terminated" ) ; channelz . removeRootChannel ( this ) ; terminated = true ; terminatedLatch . countDown ( ) ; executorPool . returnObject ( executor ) ; balancerRpcExecutorHolder . release ( ) ; // Release the transport factory so that it can deallocate any resources . transportFactory . close ( ) ; }
public class AutoRegisterActionServlet { /** * Last chance to handle an unhandled action URI . * @ return < code > true < / code > if this method handled it ( by forwarding somewhere or writing to the response ) . */ protected boolean processUnhandledAction ( HttpServletRequest request , HttpServletResponse response , String uri ) throws IOException , ServletException { } }
return false ;
public class CeSymm { /** * Analyze the symmetries of the input Atom array using the provided * parameters . * @ param atoms * representative Atom array of the Structure * @ param param * CeSymmParameters bean * @ return CeSymmResult * @ throws StructureException */ public static CeSymmResult analyze ( Atom [ ] atoms , CESymmParameters params ) throws StructureException { } }
if ( atoms . length < 1 ) throw new IllegalArgumentException ( "Empty Atom array given." ) ; // If the SSE information is needed , we calculate it if the user did not if ( params . getSSEThreshold ( ) > 0 ) { Structure s = atoms [ 0 ] . getGroup ( ) . getChain ( ) . getStructure ( ) ; if ( SecStrucTools . getSecStrucInfo ( s ) . isEmpty ( ) ) { logger . info ( "Calculating Secondary Structure..." ) ; SecStrucCalc ssp = new SecStrucCalc ( ) ; ssp . calculate ( s , true ) ; } } CeSymmIterative iter = new CeSymmIterative ( params ) ; CeSymmResult result = iter . execute ( atoms ) ; if ( result . isRefined ( ) ) { // Optimize the global alignment freely once more ( final step ) if ( params . getOptimization ( ) && result . getSymmLevels ( ) > 1 ) { try { SymmOptimizer optimizer = new SymmOptimizer ( result ) ; MultipleAlignment optimized = optimizer . optimize ( ) ; // Set the optimized MultipleAlignment and the axes result . setMultipleAlignment ( optimized ) ; } catch ( RefinerFailedException e ) { logger . info ( "Final optimization failed:" + e . getMessage ( ) ) ; } } result . getMultipleAlignment ( ) . getEnsemble ( ) . setStructureIdentifiers ( result . getRepeatsID ( ) ) ; } return result ;
public class PortMapping { /** * Return the content of the mapping as an array with all specifications as given * @ return port mappings as JSON array or null if no mappings exist */ public JsonArray toJson ( ) { } }
Map < String , Integer > portMap = getContainerPortToHostPortMap ( ) ; if ( portMap . isEmpty ( ) ) { return null ; } JsonArray ret = new JsonArray ( ) ; Map < String , String > bindToMap = getBindToHostMap ( ) ; for ( Map . Entry < String , Integer > entry : portMap . entrySet ( ) ) { JsonObject mapping = new JsonObject ( ) ; String containerPortSpec = entry . getKey ( ) ; Matcher matcher = PROTOCOL_SPLIT_PATTERN . matcher ( entry . getKey ( ) ) ; if ( ! matcher . matches ( ) ) { throw new IllegalStateException ( "Internal error: " + entry . getKey ( ) + " doesn't contain protocol part and doesn't match " + PROTOCOL_SPLIT_PATTERN ) ; } mapping . addProperty ( "containerPort" , Integer . parseInt ( matcher . group ( 1 ) ) ) ; if ( matcher . group ( 2 ) != null ) { mapping . addProperty ( "protocol" , matcher . group ( 2 ) ) ; } Integer hostPort = entry . getValue ( ) ; if ( hostPort != null ) { mapping . addProperty ( "hostPort" , hostPort ) ; } if ( bindToMap . containsKey ( containerPortSpec ) ) { mapping . addProperty ( "hostIP" , bindToMap . get ( containerPortSpec ) ) ; } ret . add ( mapping ) ; } return ret ;
public class SearchCommService { /** * Execute a search by criterion command . * @ param criterion criterion * @ param mapWidget map widget * @ param onFinished callback * @ param onError * callback to execute in case of error , optional use null if you * don ' t need it */ public static void searchByCriterion ( final Criterion criterion , final MapWidget mapWidget , final DataCallback < Map < VectorLayer , List < Feature > > > onFinished , final Runnable onError ) { } }
FeatureSearchRequest request = new FeatureSearchRequest ( ) ; request . setMapCrs ( mapWidget . getMapModel ( ) . getCrs ( ) ) ; request . setCriterion ( criterion ) ; request . setLayerFilters ( getLayerFiltersForCriterion ( criterion , mapWidget . getMapModel ( ) ) ) ; request . setFeatureIncludes ( featureIncludes ) ; request . setMax ( searchResultSize ) ; GwtCommand commandRequest = new GwtCommand ( FeatureSearchRequest . COMMAND ) ; commandRequest . setCommandRequest ( request ) ; GwtCommandDispatcher . getInstance ( ) . execute ( commandRequest , new AbstractCommandCallback < FeatureSearchResponse > ( ) { public void execute ( FeatureSearchResponse response ) { onFinished . execute ( convertFromDto ( response . getFeatureMap ( ) , mapWidget . getMapModel ( ) ) ) ; } @ Override public void onCommunicationException ( Throwable error ) { if ( null != onError ) { onError . run ( ) ; } else { super . onCommunicationException ( error ) ; } } } ) ;
public class StreamableHashMap { /** * Writes our custom streamable fields . */ public void writeObject ( ObjectOutputStream out ) throws IOException { } }
int ecount = size ( ) ; out . writeInt ( ecount ) ; for ( Map . Entry < K , V > entry : entrySet ( ) ) { out . writeObject ( entry . getKey ( ) ) ; out . writeObject ( entry . getValue ( ) ) ; }
public class Task { /** * Adds an Task - based continuation to this task that will be scheduled using the executor , * returning a new task that completes after the task returned by the continuation has completed . */ public < TContinuationResult > Task < TContinuationResult > continueWithTask ( final Continuation < TResult , Task < TContinuationResult > > continuation , final Executor executor , final CancellationToken ct ) { } }
boolean completed ; final bolts . TaskCompletionSource < TContinuationResult > tcs = new bolts . TaskCompletionSource < > ( ) ; synchronized ( lock ) { completed = this . isCompleted ( ) ; if ( ! completed ) { this . continuations . add ( new Continuation < TResult , Void > ( ) { @ Override public Void then ( Task < TResult > task ) { completeAfterTask ( tcs , continuation , task , executor , ct ) ; return null ; } } ) ; } } if ( completed ) { completeAfterTask ( tcs , continuation , this , executor , ct ) ; } return tcs . getTask ( ) ;
public class DefaultCommunicationClientImpl { /** * 直接提交一个异步任务 */ public Future submit ( Runnable call ) { } }
Assert . notNull ( this . factory , "No factory specified" ) ; return executor . submit ( call ) ;
public class DistributedStoreManager { protected void sleepAfterWrite ( StoreTransaction txh , MaskedTimestamp mustPass ) throws BackendException { } }
assert mustPass . getDeletionTime ( times ) < mustPass . getAdditionTime ( times ) ; try { times . sleepPast ( mustPass . getAdditionTimeInstant ( times ) ) ; } catch ( InterruptedException e ) { throw new PermanentBackendException ( "Unexpected interrupt" , e ) ; }
public class DFSOutputStream { /** * Closes this output stream and releases any system * resources associated with this stream . */ private synchronized void closeInternal ( ) throws IOException { } }
dfsClient . checkOpen ( ) ; isClosed ( ) ; try { eventStartWrite ( ) ; flushBuffer ( true , false ) ; // flush from all upper layers eventCloseAfterFlushBuffer ( ) ; // Mark that this packet is the last packet in block . // If there are no outstanding packets and the last packet // was not the last one in the current block , then create a // packet with empty payload . synchronized ( dataQueue ) { if ( currentPacket == null && bytesCurBlock != 0 ) { WritePacketClientProfile pktProfile = null ; if ( getProfileData ( ) != null ) { pktProfile = getProfileData ( ) . getWritePacketClientProfile ( ) ; } currentPacket = DFSOutputStreamPacketFactory . getPacket ( DFSOutputStream . this , ifPacketIncludeVersion ( ) , getPacketVersion ( ) , packetSize , chunksPerPacket , bytesCurBlock , pktProfile ) ; } if ( currentPacket != null ) { currentPacket . lastPacketInBlock = true ; } } flushInternal ( ) ; // flush all data to Datanodes isClosed ( ) ; // check to see if flushInternal had any exceptions closed = true ; // allow closeThreads ( ) to showdown threads closeThreads ( ) ; synchronized ( dataQueue ) { if ( blockStream != null ) { blockStream . writeInt ( 0 ) ; // indicate end - of - block to datanode blockStream . close ( ) ; blockReplyStream . close ( ) ; } if ( s != null ) { for ( int i = 0 ; i < s . length ; i ++ ) { s [ i ] . close ( ) ; } s = null ; } } streamer = null ; blockStream = null ; blockReplyStream = null ; eventCloseReceivedAck ( ) ; dfsClient . closeFile ( src , lastBlkOffset , getLastBlock ( ) ) ; eventEndClose ( ) ; } finally { closed = true ; }
public class ClassPathScanHandler { /** * 扫描包 * @ param basePackage 基础包 * @ param recursive 是否递归搜索子包 * @ return Set */ public Set < Class < ? > > getPackageAllClasses ( String basePackage , boolean recursive ) { } }
Set < Class < ? > > classes = new LinkedHashSet < Class < ? > > ( ) ; String packageName = basePackage ; if ( packageName . endsWith ( "." ) ) { packageName = packageName . substring ( 0 , packageName . lastIndexOf ( '.' ) ) ; } String package2Path = packageName . replace ( '.' , '/' ) ; Enumeration < URL > dirs ; try { dirs = Thread . currentThread ( ) . getContextClassLoader ( ) . getResources ( package2Path ) ; while ( dirs . hasMoreElements ( ) ) { URL url = dirs . nextElement ( ) ; String protocol = url . getProtocol ( ) ; if ( "file" . equals ( protocol ) ) { String filePath = URLDecoder . decode ( url . getFile ( ) , "UTF-8" ) ; doScanPackageClassesByFile ( classes , packageName , filePath , recursive ) ; } else if ( "jar" . equals ( protocol ) ) { doScanPackageClassesByJar ( packageName , url , recursive , classes ) ; } } } catch ( IOException e ) { throw new LtsRuntimeException ( e ) ; } return classes ;
public class DatastoreUtils { /** * Converts the given list of model objects to an array of native Entity objects . * @ param entities * the model objects to convert . * @ param entityManager * the entity manager * @ param intent * the intent of marshalling * @ return the equivalent Entity array */ static Entity [ ] toNativeEntities ( List < ? > entities , DefaultEntityManager entityManager , Marshaller . Intent intent ) { } }
Entity [ ] nativeEntities = new Entity [ entities . size ( ) ] ; for ( int i = 0 ; i < entities . size ( ) ; i ++ ) { nativeEntities [ i ] = ( Entity ) Marshaller . marshal ( entityManager , entities . get ( i ) , intent ) ; } return nativeEntities ;
public class RestEventManager { /** * { @ inheritDoc } */ @ Override public Collection < Response > getResponses ( String earliest ) { } }
GenericType < Collection < Response > > responseType = new GenericType < Collection < Response > > ( ) { } ; Collection < Response > responses = target . path ( "api" ) . path ( "v1.0" ) . path ( "responses" ) . queryParam ( "earliest" , earliest ) . request ( ) . header ( clientApplicationIdName , clientApplicationIdValue ) . get ( responseType ) ; // make request return responses ;
public class DefaultGroovyMethods { /** * Implements the setAt ( int idx ) method for primitive type arrays . * @ param self an object * @ param idx the index of interest * @ param newValue the new value to be put into the index of interest * @ return the added value * @ since 1.5.0 */ protected static Object primitiveArrayPut ( Object self , int idx , Object newValue ) { } }
Array . set ( self , normaliseIndex ( idx , Array . getLength ( self ) ) , newValue ) ; return newValue ;
public class FullyQualifiedNameImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setFQName ( String newFQName ) { } }
String oldFQName = fqName ; fqName = newFQName ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . FULLY_QUALIFIED_NAME__FQ_NAME , oldFQName , fqName ) ) ;
public class DULogEntry { /** * Removes the given attribute from the set of managed attributes . * @ param attribute Attribute to remove . * @ return if the set of managed attributes was modified ; < br > * < code > false < / code > otherwise . * @ throws LockingException if the corresponding field is locked < br > * and the given attribute is not already contained in the set of * managed attributes . */ public boolean removeDataAttribute ( DataAttribute attribute ) throws LockingException { } }
if ( isFieldLocked ( EntryField . DATA ) ) { if ( dataUsage . containsKey ( attribute ) ) { throw new LockingException ( EntryField . DATA ) ; } return false ; } else { return dataUsage . remove ( attribute ) != null ; }
public class TwitterImpl { /** * / * Saved Searches Resources */ @ Override public ResponseList < SavedSearch > getSavedSearches ( ) throws TwitterException { } }
return factory . createSavedSearchList ( get ( conf . getRestBaseURL ( ) + "saved_searches/list.json" ) ) ;
public class hqlParser { /** * hql . g : 356:1 : ascendingOrDescending : ( ( a = ' asc ' | a = ' ascending ' ) - > ^ ( ASCENDING [ $ a . getText ( ) ] ) | ( d = ' desc ' | d = ' descending ' ) - > ^ ( DESCENDING [ $ d . getText ( ) ] ) ) ; */ public final hqlParser . ascendingOrDescending_return ascendingOrDescending ( ) throws RecognitionException { } }
hqlParser . ascendingOrDescending_return retval = new hqlParser . ascendingOrDescending_return ( ) ; retval . start = input . LT ( 1 ) ; CommonTree root_0 = null ; Token a = null ; Token d = null ; CommonTree a_tree = null ; CommonTree d_tree = null ; RewriteRuleTokenStream stream_134 = new RewriteRuleTokenStream ( adaptor , "token 134" ) ; RewriteRuleTokenStream stream_133 = new RewriteRuleTokenStream ( adaptor , "token 133" ) ; RewriteRuleTokenStream stream_DESCENDING = new RewriteRuleTokenStream ( adaptor , "token DESCENDING" ) ; RewriteRuleTokenStream stream_ASCENDING = new RewriteRuleTokenStream ( adaptor , "token ASCENDING" ) ; try { // hql . g : 357:2 : ( ( a = ' asc ' | a = ' ascending ' ) - > ^ ( ASCENDING [ $ a . getText ( ) ] ) | ( d = ' desc ' | d = ' descending ' ) - > ^ ( DESCENDING [ $ d . getText ( ) ] ) ) int alt48 = 2 ; int LA48_0 = input . LA ( 1 ) ; if ( ( LA48_0 == ASCENDING || LA48_0 == 133 ) ) { alt48 = 1 ; } else if ( ( LA48_0 == DESCENDING || LA48_0 == 134 ) ) { alt48 = 2 ; } else { NoViableAltException nvae = new NoViableAltException ( "" , 48 , 0 , input ) ; throw nvae ; } switch ( alt48 ) { case 1 : // hql . g : 357:4 : ( a = ' asc ' | a = ' ascending ' ) { // hql . g : 357:4 : ( a = ' asc ' | a = ' ascending ' ) int alt46 = 2 ; int LA46_0 = input . LA ( 1 ) ; if ( ( LA46_0 == ASCENDING ) ) { alt46 = 1 ; } else if ( ( LA46_0 == 133 ) ) { alt46 = 2 ; } else { NoViableAltException nvae = new NoViableAltException ( "" , 46 , 0 , input ) ; throw nvae ; } switch ( alt46 ) { case 1 : // hql . g : 357:6 : a = ' asc ' { a = ( Token ) match ( input , ASCENDING , FOLLOW_ASCENDING_in_ascendingOrDescending1717 ) ; stream_ASCENDING . add ( a ) ; } break ; case 2 : // hql . g : 357:16 : a = ' ascending ' { a = ( Token ) match ( input , 133 , FOLLOW_133_in_ascendingOrDescending1723 ) ; stream_133 . add ( a ) ; } break ; } // AST REWRITE // elements : // token labels : // rule labels : retval // token list labels : // rule list labels : // wildcard labels : retval . tree = root_0 ; RewriteRuleSubtreeStream stream_retval = new RewriteRuleSubtreeStream ( adaptor , "rule retval" , retval != null ? retval . getTree ( ) : null ) ; root_0 = ( CommonTree ) adaptor . nil ( ) ; // 358:3 : - > ^ ( ASCENDING [ $ a . getText ( ) ] ) { // hql . g : 358:6 : ^ ( ASCENDING [ $ a . getText ( ) ] ) { CommonTree root_1 = ( CommonTree ) adaptor . nil ( ) ; root_1 = ( CommonTree ) adaptor . becomeRoot ( adaptor . create ( ASCENDING , a . getText ( ) ) , root_1 ) ; adaptor . addChild ( root_0 , root_1 ) ; } } retval . tree = root_0 ; } break ; case 2 : // hql . g : 359:4 : ( d = ' desc ' | d = ' descending ' ) { // hql . g : 359:4 : ( d = ' desc ' | d = ' descending ' ) int alt47 = 2 ; int LA47_0 = input . LA ( 1 ) ; if ( ( LA47_0 == DESCENDING ) ) { alt47 = 1 ; } else if ( ( LA47_0 == 134 ) ) { alt47 = 2 ; } else { NoViableAltException nvae = new NoViableAltException ( "" , 47 , 0 , input ) ; throw nvae ; } switch ( alt47 ) { case 1 : // hql . g : 359:6 : d = ' desc ' { d = ( Token ) match ( input , DESCENDING , FOLLOW_DESCENDING_in_ascendingOrDescending1743 ) ; stream_DESCENDING . add ( d ) ; } break ; case 2 : // hql . g : 359:17 : d = ' descending ' { d = ( Token ) match ( input , 134 , FOLLOW_134_in_ascendingOrDescending1749 ) ; stream_134 . add ( d ) ; } break ; } // AST REWRITE // elements : // token labels : // rule labels : retval // token list labels : // rule list labels : // wildcard labels : retval . tree = root_0 ; RewriteRuleSubtreeStream stream_retval = new RewriteRuleSubtreeStream ( adaptor , "rule retval" , retval != null ? retval . getTree ( ) : null ) ; root_0 = ( CommonTree ) adaptor . nil ( ) ; // 360:3 : - > ^ ( DESCENDING [ $ d . getText ( ) ] ) { // hql . g : 360:6 : ^ ( DESCENDING [ $ d . getText ( ) ] ) { CommonTree root_1 = ( CommonTree ) adaptor . nil ( ) ; root_1 = ( CommonTree ) adaptor . becomeRoot ( adaptor . create ( DESCENDING , d . getText ( ) ) , root_1 ) ; adaptor . addChild ( root_0 , root_1 ) ; } } retval . tree = root_0 ; } break ; } retval . stop = input . LT ( - 1 ) ; retval . tree = ( CommonTree ) adaptor . rulePostProcessing ( root_0 ) ; adaptor . setTokenBoundaries ( retval . tree , retval . start , retval . stop ) ; } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; retval . tree = ( CommonTree ) adaptor . errorNode ( input , retval . start , input . LT ( - 1 ) , re ) ; } finally { // do for sure before leaving } return retval ;
public class Operators { /** * returns Object covering both Double and Boolean return types */ static Object doubleBinaryOperation ( double lhs , double rhs , int kind ) throws UtilEvalError { } }
switch ( kind ) { // arithmetic case PLUS : if ( lhs > 0d && ( Double . MAX_VALUE - lhs ) < rhs ) break ; return lhs + rhs ; case MINUS : if ( lhs < 0d && ( - Double . MAX_VALUE - lhs ) > - rhs ) break ; return lhs - rhs ; case STAR : if ( lhs != 0 && Double . MAX_VALUE / lhs < rhs ) break ; return lhs * rhs ; case SLASH : return lhs / rhs ; case MOD : case MODX : return lhs % rhs ; case POWER : case POWERX : double check = Math . pow ( lhs , rhs ) ; if ( Double . isInfinite ( check ) ) break ; return check ; // can ' t shift floating - point values case LSHIFT : case LSHIFTX : case RSIGNEDSHIFT : case RSIGNEDSHIFTX : case RUNSIGNEDSHIFT : case RUNSIGNEDSHIFTX : throw new UtilEvalError ( "Can't shift floatingpoint values" ) ; } if ( OVERFLOW_OPS . contains ( kind ) ) return bigDecimalBinaryOperation ( BigDecimal . valueOf ( lhs ) , BigDecimal . valueOf ( rhs ) , kind ) ; throw new InterpreterError ( "Unimplemented binary double operator" ) ;
public class Shape { /** * Check if the shape passed is entirely contained within * this shape . * @ param other The other shape to test against this one * @ return True if the other shape supplied is entirely contained * within this one . */ public boolean contains ( Shape other ) { } }
if ( other . intersects ( this ) ) { return false ; } for ( int i = 0 ; i < other . getPointCount ( ) ; i ++ ) { float [ ] pt = other . getPoint ( i ) ; if ( ! contains ( pt [ 0 ] , pt [ 1 ] ) ) { return false ; } } return true ;
public class MapsInner { /** * Creates or updates an integration account map . * @ param resourceGroupName The resource group name . * @ param integrationAccountName The integration account name . * @ param mapName The integration account map name . * @ param map The integration account map . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the IntegrationAccountMapInner object if successful . */ public IntegrationAccountMapInner createOrUpdate ( String resourceGroupName , String integrationAccountName , String mapName , IntegrationAccountMapInner map ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , integrationAccountName , mapName , map ) . toBlocking ( ) . single ( ) . body ( ) ;
public class UnionNodeImpl { /** * Has at least two children */ private IQTree liftBindingFromLiftedChildren ( ImmutableList < IQTree > liftedChildren , VariableGenerator variableGenerator , IQProperties currentIQProperties ) { } }
/* * Cannot lift anything if some children do not have a construction node */ if ( liftedChildren . stream ( ) . anyMatch ( c -> ! ( c . getRootNode ( ) instanceof ConstructionNode ) ) ) return iqFactory . createNaryIQTree ( this , liftedChildren , currentIQProperties . declareLifted ( ) ) ; ImmutableSubstitution < ImmutableTerm > mergedSubstitution = mergeChildSubstitutions ( projectedVariables , liftedChildren . stream ( ) . map ( c -> ( ConstructionNode ) c . getRootNode ( ) ) . map ( ConstructionNode :: getSubstitution ) . collect ( ImmutableCollectors . toList ( ) ) , variableGenerator ) ; if ( mergedSubstitution . isEmpty ( ) ) { return iqFactory . createNaryIQTree ( this , liftedChildren , currentIQProperties . declareLifted ( ) ) ; } ConstructionNode newRootNode = iqFactory . createConstructionNode ( projectedVariables , mergedSubstitution ) ; ImmutableSet < Variable > unionVariables = newRootNode . getChildVariables ( ) ; UnionNode newUnionNode = iqFactory . createUnionNode ( unionVariables ) ; NaryIQTree unionIQ = iqFactory . createNaryIQTree ( newUnionNode , liftedChildren . stream ( ) . map ( c -> ( UnaryIQTree ) c ) . map ( c -> updateChild ( c , mergedSubstitution , unionVariables ) ) . collect ( ImmutableCollectors . toList ( ) ) ) ; return iqFactory . createUnaryIQTree ( newRootNode , unionIQ ) ;
public class VisitorHelper { /** * Return the type descriptor for the given type name . * @ param typeName The full qualified name of the type ( e . g . java . lang . Object ) . */ TypeCache . CachedType resolveType ( String fullQualifiedName , TypeCache . CachedType < ? extends ClassFileDescriptor > dependentType ) { } }
TypeCache . CachedType cachedType = getTypeResolver ( ) . resolve ( fullQualifiedName , scannerContext ) ; if ( ! dependentType . equals ( cachedType ) ) { dependentType . addDependency ( cachedType . getTypeDescriptor ( ) ) ; } return cachedType ;
public class InMemoryRateLimiterRegistry { /** * { @ inheritDoc } */ @ Override public RateLimiter rateLimiter ( String name , String configName ) { } }
return computeIfAbsent ( name , ( ) -> RateLimiter . of ( name , getConfiguration ( configName ) . orElseThrow ( ( ) -> new ConfigurationNotFoundException ( configName ) ) ) ) ;
public class CmsConfigurationManager { /** * Creates the XML document build from the provided configuration . < p > * @ param configuration the configuration to build the XML for * @ return the XML document build from the provided configuration */ public Document generateXml ( I_CmsXmlConfiguration configuration ) { } }
// create a new document Document result = DocumentHelper . createDocument ( ) ; // set the document type DOMDocumentType docType = new DOMDocumentType ( ) ; docType . setElementName ( N_ROOT ) ; docType . setSystemID ( configuration . getDtdUrlPrefix ( ) + configuration . getDtdFilename ( ) ) ; result . setDocType ( docType ) ; Element root = result . addElement ( N_ROOT ) ; // start the XML generation configuration . generateXml ( root ) ; // return the resulting document return result ;
public class StrSubstitutor { /** * Replaces all the occurrences of variables with their matching values * from the resolver using the given source builder as a template . * The builder is not altered by this method . * @ param source the builder to use as a template , not changed , null returns null * @ return the result of the replace operation */ public String replace ( final StrBuilder source ) { } }
if ( source == null ) { return null ; } final StrBuilder buf = new StrBuilder ( source . length ( ) ) . append ( source ) ; substitute ( buf , 0 , buf . length ( ) ) ; return buf . toString ( ) ;
public class DiagnosticsInner { /** * Get Diagnostics Category . * Get Diagnostics Category . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param siteName Site Name * @ param diagnosticCategory Diagnostic Category * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the DiagnosticCategoryInner object */ public Observable < ServiceResponse < DiagnosticCategoryInner > > getSiteDiagnosticCategoryWithServiceResponseAsync ( String resourceGroupName , String siteName , String diagnosticCategory ) { } }
if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( siteName == null ) { throw new IllegalArgumentException ( "Parameter siteName is required and cannot be null." ) ; } if ( diagnosticCategory == null ) { throw new IllegalArgumentException ( "Parameter diagnosticCategory is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } return service . getSiteDiagnosticCategory ( resourceGroupName , siteName , diagnosticCategory , this . client . subscriptionId ( ) , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < DiagnosticCategoryInner > > > ( ) { @ Override public Observable < ServiceResponse < DiagnosticCategoryInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < DiagnosticCategoryInner > clientResponse = getSiteDiagnosticCategoryDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class ServerFilter { /** * Method allow to find servers with specified power state of target servers * @ param states is a list target server power states * @ return { @ link GroupFilter } */ public ServerFilter powerStates ( PowerState ... states ) { } }
allItemsNotNull ( states , "Power states" ) ; predicate = predicate . and ( combine ( s -> s . getDetails ( ) . getPowerState ( ) , in ( map ( states , PowerState :: getCode ) ) ) ) ; return this ;
public class DirectoryScanner { /** * Util method that allow to scan all files in a folder . Works in cooperation with { @ link # setFileFilter ( FileFilter ) } or * { @ link # setFileExtensionToFilter ( String . . . ) } * @ param directoryToScan * @ return */ public ArrayList < File > scanAllFiles ( File directoryToScan ) { } }
CumulativeFileScannerObserver fileScanner = new CumulativeFileScannerObserver ( ) ; DirectoryScanner directoryScanner = new DirectoryScanner ( fileScanner ) ; for ( FileScannerObserver fileScannerObserver : fileScanners ) { directoryScanner . addFileScanner ( fileScannerObserver ) ; } directoryScanner . setFileFilter ( this . fileFilter ) ; directoryScanner . scanDirectory ( directoryToScan ) ; return fileScanner . getResults ( ) ;
public class SourceParams { /** * Create parameters necessary to create a SEPA debit source * @ param name The full name of the account holder . * @ param iban The IBAN number for the bank account that you wish to debit . * @ param email The full email address of the owner ( optional ) . * @ param addressLine1 The first line of the owner ' s address ( optional ) . * @ param city The city of the owner ' s address . * @ param postalCode The postal code of the owner ' s address . * @ param country The ISO - 3166 2 - letter country code of the owner ' s address . * @ return a { @ link SourceParams } object that can be used to create a SEPA debit source * @ see < a href = " https : / / stripe . com / docs / sources / sepa - debit " > https : / / stripe . com / docs / sources / sepa - debit < / a > */ @ NonNull public static SourceParams createSepaDebitParams ( @ NonNull String name , @ NonNull String iban , @ Nullable String email , @ Nullable String addressLine1 , @ Nullable String city , @ Nullable String postalCode , @ Nullable @ Size ( 2 ) String country ) { } }
final SourceParams params = new SourceParams ( ) . setType ( Source . SEPA_DEBIT ) . setCurrency ( Source . EURO ) ; final AbstractMap < String , Object > address = new HashMap < > ( ) ; address . put ( FIELD_LINE_1 , addressLine1 ) ; address . put ( FIELD_CITY , city ) ; address . put ( FIELD_POSTAL_CODE , postalCode ) ; address . put ( FIELD_COUNTRY , country ) ; final AbstractMap < String , Object > ownerMap = new HashMap < > ( ) ; ownerMap . put ( FIELD_NAME , name ) ; ownerMap . put ( FIELD_EMAIL , email ) ; ownerMap . put ( FIELD_ADDRESS , address ) ; return params . setOwner ( ownerMap ) . setApiParameterMap ( createSimpleMap ( FIELD_IBAN , iban ) ) ;
public class ACE { /** * A GUID ( 16 bytes ) that identifies the type of child object that can inherit the ACE . Inheritance is also * controlled by the inheritance flags in the ACE _ HEADER , as well as by any protection against inheritance placed on * the child objects . This field is valid only if the ACE _ INHERITED _ OBJECT _ TYPE _ PRESENT bit is set in the Flags * member . Otherwise , the InheritedObjectType field is ignored . * @ return InheritedObjectType ; null if not available . */ public byte [ ] getInheritedObjectType ( ) { } }
return this . inheritedObjectType == null || this . inheritedObjectType . length == 0 ? null : Arrays . copyOf ( this . inheritedObjectType , this . inheritedObjectType . length ) ;
public class ProjectApi { /** * Get a Stream of project users . This Stream includes all project members and all users assigned to project parent groups . * < pre > < code > GET / projects / : id / users < / code > < / pre > * @ param projectIdOrPath projectIdOrPath the project in the form of an Integer ( ID ) , String ( path ) , or Project instance , required * @ return a Stream of the users belonging to the specified project and its parent groups * @ throws GitLabApiException if any exception occurs */ public Stream < ProjectUser > getProjectUsersStream ( Object projectIdOrPath ) throws GitLabApiException { } }
return ( getProjectUsers ( projectIdOrPath , null , getDefaultPerPage ( ) ) . stream ( ) ) ;
public class OptimizerNode { /** * Computes all the interesting properties that are relevant to this node . The interesting * properties are a union of the interesting properties on each outgoing connection . * However , if two interesting properties on the outgoing connections overlap , * the interesting properties will occur only once in this set . For that , this * method deduplicates and merges the interesting properties . * This method returns copies of the original interesting properties objects and * leaves the original objects , contained by the connections , unchanged . */ public void computeUnionOfInterestingPropertiesFromSuccessors ( ) { } }
List < DagConnection > conns = getOutgoingConnections ( ) ; if ( conns . size ( ) == 0 ) { // no incoming , we have none ourselves this . intProps = new InterestingProperties ( ) ; } else { this . intProps = conns . get ( 0 ) . getInterestingProperties ( ) . clone ( ) ; for ( int i = 1 ; i < conns . size ( ) ; i ++ ) { this . intProps . addInterestingProperties ( conns . get ( i ) . getInterestingProperties ( ) ) ; } } this . intProps . dropTrivials ( ) ;
public class DANameFactory { /** * Crée un objet DAName à partir d ' un DATypeKind représentant un type primitif * TOIMPROVE : DAName for each DATypeKind with flag primitive = true can be * cached into a Map and used as constants * @ param kind un { @ link DATypeKind } primitif * @ return a { @ link fr . javatronic . damapping . processor . model . impl . DANameImpl } * @ throws IllegalArgumentException si { @ code kink . isPrimitive ( ) } retourne false */ @ Nonnull public static DAName fromPrimitiveKind ( @ Nonnull DATypeKind kind ) { } }
checkArgument ( kind . isPrimitive ( ) ) ; return from ( kind . name ( ) . toLowerCase ( Locale . US ) ) ;
public class AbstractPropositionDefinitionCheckedVisitor { /** * Processes a collection of proposition definitions . * @ param propositionDefinitions * a { @ link Collection < PropositionDefinition > } . * @ throws ProtempaException * if an error occurs . * @ see org . protempa . PropositionDefinitionCheckedVisitor # visit ( java . util . Collection ) */ @ Override public void visit ( Collection < ? extends PropositionDefinition > propositionDefinitions ) throws ProtempaException { } }
for ( PropositionDefinition def : propositionDefinitions ) { def . acceptChecked ( this ) ; }
public class Table { /** * Moves the data from table to table . * The colindex argument is the index of the column that was * added or removed . The adjust argument is { - 1 | 0 | + 1} */ void moveData ( Session session , Table from , int colindex , int adjust ) { } }
Object colvalue = null ; ColumnSchema column = null ; if ( adjust >= 0 && colindex != - 1 ) { column = getColumn ( colindex ) ; colvalue = column . getDefaultValue ( session ) ; } PersistentStore store = session . sessionData . getRowStore ( this ) ; try { RowIterator it = from . rowIterator ( session ) ; while ( it . hasNext ( ) ) { Row row = it . getNextRow ( ) ; Object [ ] o = row . getData ( ) ; Object [ ] data = getEmptyRowData ( ) ; if ( adjust == 0 && colindex != - 1 ) { colvalue = column . getDataType ( ) . convertToType ( session , o [ colindex ] , from . getColumn ( colindex ) . getDataType ( ) ) ; } ArrayUtil . copyAdjustArray ( o , data , colvalue , colindex , adjust ) ; systemSetIdentityColumn ( session , data ) ; enforceRowConstraints ( session , data ) ; // get object without RowAction Row newrow = ( Row ) store . getNewCachedObject ( null , data ) ; if ( row . rowAction != null ) { newrow . rowAction = row . rowAction . duplicate ( newrow . getPos ( ) ) ; } store . indexRow ( null , newrow ) ; } this . timeToLive = from . timeToLive ; } catch ( Throwable t ) { store . release ( ) ; if ( t instanceof HsqlException ) { throw ( HsqlException ) t ; } throw new HsqlException ( t , "" , 0 ) ; }
public class Timer { /** * Creates a work item and puts it on the work queue for requesting a * timeout to be started . * @ param timeoutTime * how long this timeout is for in milliseconds . * @ param _ callback * the routine to be called when / if the timeout triggers * @ param _ future * attachment to be passed to the callback routine * @ return the work item that was queue ' d to the timer task . This item will * be needed if it is to be cancelled later . */ public TimerWorkItem createTimeoutRequest ( long timeoutTime , TimerCallback _callback , IAbstractAsyncFuture _future ) { } }
TimerWorkItem wi = new TimerWorkItem ( timeoutTime , _callback , _future , _future . getReuseCount ( ) ) ; _future . setTimeoutWorkItem ( wi ) ; // put this to the Timer ' s work queue . Use the queue that the Timer // thread is not using . if ( ( this . queueToUse == 1 ) || ( numQueues == 1 ) ) { synchronized ( this . requestQueue1 ) { // add the request to the Timer work queue this . requestQueue1 . add ( wi ) ; } } else { synchronized ( this . requestQueue2 ) { // add the request to the Timer work queue this . requestQueue2 . add ( wi ) ; } } return wi ;
public class UpdateTrafficAdjustments { /** * Runs the example . * @ param adManagerServices the services factory . * @ param session the session . * @ param adjustmentId the ID of the adjustment to update . * @ throws ApiException if the API request failed with one or more service errors . * @ throws RemoteException if the API request failed due to other errors . */ public static void runExample ( AdManagerServices adManagerServices , AdManagerSession session , long adjustmentId ) throws RemoteException { } }
// Get the adjustment service . AdjustmentServiceInterface adjustmentService = adManagerServices . get ( session , AdjustmentServiceInterface . class ) ; // Create a statement to only select a single traffic forecast adjustment by ID . StatementBuilder statementBuilder = new StatementBuilder ( ) . where ( "id = :id" ) . orderBy ( "id ASC" ) . limit ( 1 ) . withBindVariableValue ( "id" , adjustmentId ) ; // Get the forecast adjustment . TrafficForecastAdjustmentPage page = adjustmentService . getTrafficAdjustmentsByStatement ( statementBuilder . toStatement ( ) ) ; TrafficForecastAdjustment adjustment = Iterables . getOnlyElement ( Arrays . asList ( page . getResults ( ) ) ) ; // Create a new historical adjustment segment for New Years Day . HistoricalAdjustment newYearsAdjustment = new HistoricalAdjustment ( ) ; Date lastNewYearsDay = new Date ( new org . joda . time . DateTime ( ) . getYear ( ) , 1 , 1 ) ; DateRange referenceDateRange = new DateRange ( ) ; referenceDateRange . setStartDate ( lastNewYearsDay ) ; referenceDateRange . setEndDate ( lastNewYearsDay ) ; newYearsAdjustment . setReferenceDateRange ( referenceDateRange ) ; Date nextNewYearsDay = new Date ( new org . joda . time . DateTime ( ) . plusYears ( 1 ) . getYear ( ) , 1 , 1 ) ; DateRange targetDateRange = new DateRange ( ) ; targetDateRange . setStartDate ( nextNewYearsDay ) ; targetDateRange . setEndDate ( nextNewYearsDay ) ; newYearsAdjustment . setTargetDateRange ( targetDateRange ) ; newYearsAdjustment . setMilliPercentMultiplier ( 110000L ) ; TrafficForecastAdjustmentSegment segment = new TrafficForecastAdjustmentSegment ( ) ; segment . setBasisType ( BasisType . HISTORICAL ) ; segment . setHistoricalAdjustment ( newYearsAdjustment ) ; // Add the historical segment to the adjustment List < TrafficForecastAdjustmentSegment > segments = Arrays . asList ( adjustment . getForecastAdjustmentSegments ( ) ) ; segments . add ( segment ) ; adjustment . setForecastAdjustmentSegments ( segments . toArray ( new TrafficForecastAdjustmentSegment [ ] { } ) ) ; // Update the traffic adjustment on the server . TrafficForecastAdjustment [ ] adjustments = adjustmentService . updateTrafficAdjustments ( new TrafficForecastAdjustment [ ] { adjustment } ) ; for ( TrafficForecastAdjustment updatedAdjustment : adjustments ) { System . out . printf ( "Traffic forecast adjustment with ID %d and %d segments was updated.%n" , updatedAdjustment . getId ( ) , updatedAdjustment . getForecastAdjustmentSegments ( ) . length ) ; }
public class SolverWorldModelInterface { /** * Deletes an Identifier and all of its Attributes from the world model . All * data for the Identifier will be removed permanently . Callers may also * wish to consider expiring the Identifier instead . * @ param identifier * the Identifier to delete * @ return { @ code true } if the message was sent successfully */ public boolean deleteId ( final String identifier ) { } }
if ( identifier == null ) { log . error ( "Unable to delete a null Identifier value." ) ; return false ; } if ( this . originString == null ) { log . error ( "Origin has not been set. Cannot delete Identifiers without a valid origin." ) ; return false ; } DeleteIdentifierMessage message = new DeleteIdentifierMessage ( ) ; message . setOrigin ( this . originString ) ; message . setId ( identifier ) ; this . session . write ( message ) ; log . debug ( "Sent {}" , message ) ; return true ;
public class AbstractJanitor { /** * Dump a thread * @ param t The thread * @ return The stack trace */ private String dumpQueuedThread ( Thread t ) { } }
StringBuilder sb = new StringBuilder ( ) ; // Header sb = sb . append ( "Queued thread: " ) ; sb = sb . append ( t . getName ( ) ) ; sb = sb . append ( newLine ) ; // Body StackTraceElement [ ] stes = SecurityActions . getStackTrace ( t ) ; if ( stes != null ) { for ( StackTraceElement ste : stes ) { sb = sb . append ( " " ) ; sb = sb . append ( ste . getClassName ( ) ) ; sb = sb . append ( ":" ) ; sb = sb . append ( ste . getMethodName ( ) ) ; sb = sb . append ( ":" ) ; sb = sb . append ( ste . getLineNumber ( ) ) ; sb = sb . append ( newLine ) ; } } return sb . toString ( ) ;
public class CronDescriptor { /** * Provide description for hours , minutes and seconds . * @ param fields - fields to describe ; * @ return description - String */ public String describeHHmmss ( final Map < CronFieldName , CronField > fields ) { } }
return DescriptionStrategyFactory . hhMMssInstance ( resourceBundle , fields . containsKey ( CronFieldName . HOUR ) ? fields . get ( CronFieldName . HOUR ) . getExpression ( ) : null , fields . containsKey ( CronFieldName . MINUTE ) ? fields . get ( CronFieldName . MINUTE ) . getExpression ( ) : null , fields . containsKey ( CronFieldName . SECOND ) ? fields . get ( CronFieldName . SECOND ) . getExpression ( ) : null ) . describe ( ) ;
public class KriptonRequestBodyConverter { /** * / * ( non - Javadoc ) * @ see retrofit2 . Converter # convert ( java . lang . Object ) */ @ Override public RequestBody convert ( T value ) throws IOException { } }
Buffer buffer = new Buffer ( ) ; try { binderContext . serialize ( value , buffer . outputStream ( ) ) ; return RequestBody . create ( MEDIA_TYPE , buffer . readByteString ( ) ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; return null ; } finally { buffer . close ( ) ; }
public class NotifyQuery { /** * @ Override * public int partitionHash ( Object [ ] args ) * if ( _ keyExpr = = null ) { * return - 1; * return _ keyExpr . partitionHash ( args ) ; */ @ Override public void exec ( Result < Object > result , Object [ ] args ) { } }
TableKelp tableKelp = _table . getTableKelp ( ) ; RowCursor minCursor = tableKelp . cursor ( ) ; RowCursor maxCursor = tableKelp . cursor ( ) ; minCursor . clear ( ) ; maxCursor . setKeyMax ( ) ; _whereKraken . fillMinCursor ( minCursor , args ) ; _whereKraken . fillMaxCursor ( minCursor , args ) ; // QueryKelp whereKelp = _ whereExpr . bind ( args ) ; // XXX : binding should be with unique EnvKelp whereKelp = new EnvKelp ( _whereKelp , args ) ; // tableKelp . findOne ( minCursor , maxCursor , whereKelp , // new FindDeleteResult ( result ) ) ; _table . notifyOwner ( minCursor . getKey ( ) ) ; // result . completed ( null ) ; result . ok ( null ) ;
public class DescribeAppsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeAppsRequest describeAppsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeAppsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeAppsRequest . getStackId ( ) , STACKID_BINDING ) ; protocolMarshaller . marshall ( describeAppsRequest . getAppIds ( ) , APPIDS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class OrderManager { /** * Execute a new Order * @ param order * @ return * @ throws Exception */ private boolean placeOrderOrderOnAPI ( final BitfinexNewOrder order ) throws Exception { } }
final CountDownLatch waitLatch = new CountDownLatch ( 1 ) ; final Consumer < BitfinexSubmittedOrder > ordercallback = ( o ) -> { if ( Objects . equals ( o . getClientId ( ) , order . getClientId ( ) ) ) { waitLatch . countDown ( ) ; }
public class JournalRecovery { /** * Start the recovery process * @ return the new block count of the store , or - 1 if unchanged * @ throws JournalException */ public int recover ( ) throws JournalException { } }
int newBlockCount = - 1 ; log . warn ( "[" + baseName + "] Recovery required for data store : found " + journalFiles . length + " journal file(s)" ) ; for ( int i = 0 ; i < journalFiles . length ; i ++ ) newBlockCount = recoverFromJournalFile ( journalFiles [ i ] ) ; return newBlockCount ;
public class Log4j2Helper { /** * 使用自定义properties插件替换log4j2的指定context中的内置 { @ link org . apache . logging . log4j . core . config . PropertiesPlugin properties插件 } ( log4j2使用 ) * @ param pluginClazz 插件class * @ param context 要替换配置的context , 可以使用LogManager . getContext ( ) 获取 */ public static void reconfigLog4j2 ( Class < ? > pluginClazz , org . apache . logging . log4j . spi . LoggerContext context ) { } }
Configuration configuration = ReflectUtil . getFieldValue ( context , "configuration" ) ; PluginManager manager = ReflectUtil . getFieldValue ( configuration , "pluginManager" ) ; PluginType < ? > pluginType = manager . getPluginType ( "properties" ) ; ReflectUtil . setFieldValue ( pluginType , "pluginClass" , pluginClazz ) ; LoggerContext loggerContext = ( LoggerContext ) context ; loggerContext . reconfigure ( ) ;
public class TextToSpeech { /** * Add a custom word . * Adds a single word and its translation to the specified custom voice model . Adding a new translation for a word * that already exists in a custom model overwrites the word ' s existing translation . A custom model can contain no * more than 20,000 entries . You must use credentials for the instance of the service that owns a model to add a word * to it . * You can define sounds - like or phonetic translations for words . A sounds - like translation consists of one or more * words that , when combined , sound like the word . Phonetic translations are based on the SSML phoneme format for * representing a word . You can specify them in standard International Phonetic Alphabet ( IPA ) representation * < code > & lt ; phoneme alphabet = \ " ipa \ " ph = \ " t & # 601 ; m & # 712 ; & # 593 ; to \ " & gt ; & lt ; / phoneme & gt ; < / code > * or in the proprietary IBM Symbolic Phonetic Representation ( SPR ) * < code > & lt ; phoneme alphabet = \ " ibm \ " ph = \ " 1gAstroEntxrYFXs \ " & gt ; & lt ; / phoneme & gt ; < / code > * * * Note : * * This method is currently a beta release . * * * See also : * * * * [ Adding a single word to a custom * model ] ( https : / / cloud . ibm . com / docs / services / text - to - speech / custom - entries . html # cuWordAdd ) * * [ Adding words to a Japanese custom * model ] ( https : / / cloud . ibm . com / docs / services / text - to - speech / custom - entries . html # cuJapaneseAdd ) * * [ Understanding customization ] ( https : / / cloud . ibm . com / docs / services / text - to - speech / custom - intro . html ) . * @ param addWordOptions the { @ link AddWordOptions } containing the options for the call * @ return a { @ link ServiceCall } with a response type of Void */ public ServiceCall < Void > addWord ( AddWordOptions addWordOptions ) { } }
Validator . notNull ( addWordOptions , "addWordOptions cannot be null" ) ; String [ ] pathSegments = { "v1/customizations" , "words" } ; String [ ] pathParameters = { addWordOptions . customizationId ( ) , addWordOptions . word ( ) } ; RequestBuilder builder = RequestBuilder . put ( RequestBuilder . constructHttpUrl ( getEndPoint ( ) , pathSegments , pathParameters ) ) ; Map < String , String > sdkHeaders = SdkCommon . getSdkHeaders ( "text_to_speech" , "v1" , "addWord" ) ; for ( Entry < String , String > header : sdkHeaders . entrySet ( ) ) { builder . header ( header . getKey ( ) , header . getValue ( ) ) ; } final JsonObject contentJson = new JsonObject ( ) ; contentJson . addProperty ( "translation" , addWordOptions . translation ( ) ) ; if ( addWordOptions . partOfSpeech ( ) != null ) { contentJson . addProperty ( "part_of_speech" , addWordOptions . partOfSpeech ( ) ) ; } builder . bodyJson ( contentJson ) ; return createServiceCall ( builder . build ( ) , ResponseConverterUtils . getVoid ( ) ) ;
public class CollisionCategoryConfig { /** * Create the collision category data from node ( should only be used to display names , as real content is * < code > null < / code > , mainly UI specific to not have dependency on { @ link MapTileCollision } ) . * @ param root The node root reference ( must not be < code > null < / code > ) . * @ return The collisions category data . * @ throws LionEngineException If unable to read node . */ public static Collection < CollisionCategory > imports ( Xml root ) { } }
Check . notNull ( root ) ; final Collection < Xml > childrenCategory = root . getChildren ( NODE_CATEGORY ) ; final Collection < CollisionCategory > categories = new ArrayList < > ( childrenCategory . size ( ) ) ; for ( final Xml node : childrenCategory ) { final Collection < Xml > childrenGroup = node . getChildren ( TileGroupsConfig . NODE_GROUP ) ; final Collection < CollisionGroup > groups = new ArrayList < > ( childrenGroup . size ( ) ) ; for ( final Xml group : childrenGroup ) { final String name = group . getText ( ) ; groups . add ( new CollisionGroup ( name , new ArrayList < CollisionFormula > ( 0 ) ) ) ; } final String name = node . readString ( ATT_NAME ) ; final Axis axis = Axis . valueOf ( node . readString ( ATT_AXIS ) ) ; final int x = node . readInteger ( ATT_X ) ; final int y = node . readInteger ( ATT_Y ) ; final boolean glue = node . readBoolean ( true , ATT_GLUE ) ; final CollisionCategory category = new CollisionCategory ( name , axis , x , y , glue , groups ) ; categories . add ( category ) ; } return categories ;
public class Parcels { /** * Wraps the input ` @ Parcel ` annotated class with a ` Parcelable ` wrapper . * @ throws ParcelerRuntimeException if there was an error looking up the wrapped Parceler $ Parcels class . * @ param inputType specific type to parcel * @ param input Parcel * @ return Parcelable wrapper */ @ SuppressWarnings ( "unchecked" ) public static < T > Parcelable wrap ( Class < ? extends T > inputType , T input ) { } }
if ( input == null ) { return null ; } ParcelableFactory parcelableFactory = REPOSITORY . get ( inputType ) ; return parcelableFactory . buildParcelable ( input ) ;
public class AwsSecurityFindingFilters { /** * The parent process ID . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setProcessParentPid ( java . util . Collection ) } or { @ link # withProcessParentPid ( java . util . Collection ) } if you * want to override the existing values . * @ param processParentPid * The parent process ID . * @ return Returns a reference to this object so that method calls can be chained together . */ public AwsSecurityFindingFilters withProcessParentPid ( NumberFilter ... processParentPid ) { } }
if ( this . processParentPid == null ) { setProcessParentPid ( new java . util . ArrayList < NumberFilter > ( processParentPid . length ) ) ; } for ( NumberFilter ele : processParentPid ) { this . processParentPid . add ( ele ) ; } return this ;
public class VersionHistoryImpl { /** * { @ inheritDoc } */ public void addVersionLabel ( String versionName , String label , boolean moveLabel ) throws VersionException , RepositoryException { } }
checkValid ( ) ; JCRName jcrLabelName = locationFactory . parseJCRName ( label ) ; InternalQName labelQName = jcrLabelName . getInternalName ( ) ; NodeData labels = getVersionLabelsData ( ) ; List < PropertyData > labelsList = dataManager . getChildPropertiesData ( labels ) ; for ( PropertyData prop : labelsList ) { if ( prop . getQPath ( ) . getName ( ) . equals ( labelQName ) ) { // label is found if ( moveLabel ) { removeVersionLabel ( label ) ; break ; } throw new VersionException ( "Label " + label + " is already exists and moveLabel=false" ) ; } } NodeData versionData = getVersionData ( versionName ) ; SessionChangesLog changesLog = new SessionChangesLog ( session ) ; PropertyData labelData = TransientPropertyData . createPropertyData ( labels , labelQName , PropertyType . REFERENCE , false , new TransientValueData ( versionData . getIdentifier ( ) ) ) ; changesLog . add ( ItemState . createAddedState ( labelData ) ) ; dataManager . getTransactManager ( ) . save ( changesLog ) ;
public class ProcessTask { /** * Implements the callable interface . * @ return this object * @ throws Exception thrown if there is an exception ; note that any * UpdateExceptions are simply added to the tasks exception collection */ @ Override public ProcessTask call ( ) throws Exception { } }
try { processFiles ( ) ; } catch ( UpdateException ex ) { this . exception = ex ; } finally { settings . cleanup ( false ) ; } return this ;
public class SyncMapPermission { /** * Create a SyncMapPermissionUpdater to execute update . * @ param pathServiceSid Sync Service Instance SID . * @ param pathMapSid Sync Map SID or unique name . * @ param pathIdentity Identity of the user to whom the Sync Map Permission * applies . * @ param read Read access . * @ param write Write access . * @ param manage Manage access . * @ return SyncMapPermissionUpdater capable of executing the update */ public static SyncMapPermissionUpdater updater ( final String pathServiceSid , final String pathMapSid , final String pathIdentity , final Boolean read , final Boolean write , final Boolean manage ) { } }
return new SyncMapPermissionUpdater ( pathServiceSid , pathMapSid , pathIdentity , read , write , manage ) ;
public class FilenameInputFormat { /** * Return a RecordReader which returns 1 record : the file path from * the InputSplit . */ public RecordReader < Text , Text > getRecordReader ( InputSplit genericSplit , JobConf job , Reporter reporter ) throws IOException { } }
reporter . setStatus ( genericSplit . toString ( ) ) ; FileSplit split = ( FileSplit ) genericSplit ; final Path file = split . getPath ( ) ; return new RecordReader < Text , Text > ( ) { boolean done = false ; public void close ( ) { } public Text createKey ( ) { return new Text ( ) ; } public Text createValue ( ) { return new Text ( ) ; } public long getPos ( ) { return 0 ; } public float getProgress ( ) { return 0.0f ; } public boolean next ( Text key , Text value ) { if ( done ) return false ; key . set ( file . toString ( ) ) ; value . set ( file . toString ( ) ) ; done = true ; return true ; } } ;
public class HttpUtil { /** * parse the given http query string * @ param queryString the standard http query string * @ param hasPath whether the query string contains uri * @ return the parsed json object . if the given query string is empty then an empty json object is returned . */ public static JSONObject parseQueryString ( String queryString , boolean hasPath ) { } }
JSONObject uriParameters = new JSONObject ( ) ; if ( queryString == null ) return uriParameters ; QueryStringDecoder queryStringDecoder = new QueryStringDecoder ( queryString , hasPath ) ; Map < String , List < String > > parameters = queryStringDecoder . parameters ( ) ; parameters . forEach ( ( key , values ) -> { if ( values == null || values . isEmpty ( ) ) { LOG . debug ( "空参数统一对应空字符串" ) ; uriParameters . put ( key , "" ) ; } else if ( values . size ( ) == 1 ) uriParameters . put ( key , values . get ( 0 ) ) ; else uriParameters . put ( key , values ) ; } ) ; return uriParameters ;
public class MD5Utils { /** * Gets the MD5 hashcode of an input stream . */ public static byte [ ] md5 ( InputStream in ) throws IOException { } }
MD5InputStream md5in = new MD5InputStream ( in ) ; byte [ ] trashBuffer = BufferManager . getBytes ( ) ; try { while ( md5in . read ( trashBuffer , 0 , BufferManager . BUFFER_SIZE ) != - 1 ) { // Intentional empty block } } finally { BufferManager . release ( trashBuffer , false ) ; } return md5in . hash ( ) ;
public class BackupEnginesInner { /** * The backup management servers registered to a Recovery Services vault . This returns a pageable list of servers . * @ param vaultName The name of the Recovery Services vault . * @ param resourceGroupName The name of the resource group associated with the Recovery Services vault . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the PagedList & lt ; BackupEngineBaseResourceInner & gt ; object if successful . */ public PagedList < BackupEngineBaseResourceInner > get ( final String vaultName , final String resourceGroupName ) { } }
ServiceResponse < Page < BackupEngineBaseResourceInner > > response = getSinglePageAsync ( vaultName , resourceGroupName ) . toBlocking ( ) . single ( ) ; return new PagedList < BackupEngineBaseResourceInner > ( response . body ( ) ) { @ Override public Page < BackupEngineBaseResourceInner > nextPage ( String nextPageLink ) { return getNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) . body ( ) ; } } ;
public class AccountsInner { /** * Creates the specified Data Lake Analytics account . This supplies the user with computation services for Data Lake Analytics workloads . * @ param resourceGroupName The name of the Azure resource group that contains the Data Lake Analytics account . the account will be associated with . * @ param name The name of the Data Lake Analytics account to create . * @ param parameters Parameters supplied to the create Data Lake Analytics account operation . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < DataLakeAnalyticsAccountInner > beginCreateAsync ( String resourceGroupName , String name , DataLakeAnalyticsAccountInner parameters , final ServiceCallback < DataLakeAnalyticsAccountInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( beginCreateWithServiceResponseAsync ( resourceGroupName , name , parameters ) , serviceCallback ) ;
public class StateCompiler { /** * Enumerates all the fields in this state interface . * @ param clazz a subclass of AccumulatorState * @ param fieldTypes a map of field name and Type * @ return list of state fields . Ordering is guaranteed to be stable , and have all primitive fields at the beginning . */ private static List < StateField > enumerateFields ( Class < ? > clazz , Map < String , Type > fieldTypes ) { } }
ImmutableList . Builder < StateField > builder = ImmutableList . builder ( ) ; final Set < Class < ? > > primitiveClasses = ImmutableSet . of ( byte . class , boolean . class , long . class , double . class , int . class ) ; Set < Class < ? > > supportedClasses = getSupportedFieldTypes ( ) ; for ( Method method : clazz . getMethods ( ) ) { if ( method . getName ( ) . equals ( "getEstimatedSize" ) ) { continue ; } if ( method . getName ( ) . startsWith ( "get" ) ) { Class < ? > type = method . getReturnType ( ) ; checkArgument ( supportedClasses . contains ( type ) , type . getName ( ) + " is not supported" ) ; String name = method . getName ( ) . substring ( 3 ) ; builder . add ( new StateField ( name , type , getInitialValue ( method ) , method . getName ( ) , Optional . ofNullable ( fieldTypes . get ( name ) ) ) ) ; } if ( method . getName ( ) . startsWith ( "is" ) ) { Class < ? > type = method . getReturnType ( ) ; checkArgument ( type == boolean . class , "Only boolean is support for 'is' methods" ) ; String name = method . getName ( ) . substring ( 2 ) ; builder . add ( new StateField ( name , type , getInitialValue ( method ) , method . getName ( ) , Optional . of ( BOOLEAN ) ) ) ; } } // We need this ordering because the serializer and deserializer are on different machines , and so the ordering of fields must be stable Ordering < StateField > ordering = new Ordering < StateField > ( ) { @ Override public int compare ( StateField left , StateField right ) { if ( primitiveClasses . contains ( left . getType ( ) ) && ! primitiveClasses . contains ( right . getType ( ) ) ) { return - 1 ; } if ( primitiveClasses . contains ( right . getType ( ) ) && ! primitiveClasses . contains ( left . getType ( ) ) ) { return 1 ; } // If they ' re the category , just sort by name return left . getName ( ) . compareTo ( right . getName ( ) ) ; } } ; List < StateField > fields = ordering . sortedCopy ( builder . build ( ) ) ; checkInterface ( clazz , fields ) ; return fields ;
public class CmsJspImageBean { /** * Returns a lazy initialized Map that provides access to ratio scaled instances of this image bean . < p > * @ return a lazy initialized Map that provides access to ratio scaled instances of this image bean */ public Map < String , CmsJspImageBean > getScaleRatio ( ) { } }
if ( m_scaleRatio == null ) { m_scaleRatio = CmsCollectionsGenericWrapper . createLazyMap ( new CmsScaleRatioTransformer ( ) ) ; } return m_scaleRatio ;
public class EntryStream { /** * Returns a { @ link StreamEx } consisting of the results of applying the * given function to the keys and values of this stream . * This is an < a href = " package - summary . html # StreamOps " > intermediate < / a > * operation . * @ param < R > The element type of the new stream * @ param mapper a < a * href = " package - summary . html # NonInterference " > non - interfering < / a > , * < a href = " package - summary . html # Statelessness " > stateless < / a > function to apply to key and * value of each { @ link Entry } in this stream * @ return the new stream */ public < R > StreamEx < R > mapKeyValue ( BiFunction < ? super K , ? super V , ? extends R > mapper ) { } }
return this . < R > map ( toFunction ( mapper ) ) ;
public class Matrix4x3d { /** * Apply a translation to this matrix by translating by the given number of * units in x , y and z . * If < code > M < / code > is < code > this < / code > matrix and < code > T < / code > the translation * matrix , then the new matrix will be < code > M * T < / code > . So when * transforming a vector < code > v < / code > with the new matrix by using * < code > M * T * v < / code > , the translation will be applied first ! * In order to set the matrix to a translation transformation without post - multiplying * it , use { @ link # translation ( Vector3dc ) } . * @ see # translation ( Vector3dc ) * @ param offset * the number of units in x , y and z by which to translate * @ return this */ public Matrix4x3d translate ( Vector3dc offset ) { } }
return translate ( offset . x ( ) , offset . y ( ) , offset . z ( ) ) ;
public class JobClient { /** * Get a { @ link BufferedReader } to read the taskLog by lines . * @ param taskLogUrl The URL of the taskLog location * @ return A { @ link BufferedReader } to read the taskLog by lines * @ throws IOException Will throw this if we cannot get the reader for 5 mins */ private static BufferedReader getTaskLogReader ( final URL taskLogUrl ) throws IOException { } }
FutureTask < BufferedReader > task = new FutureTask < BufferedReader > ( new Callable < BufferedReader > ( ) { @ Override public BufferedReader call ( ) throws IOException { URLConnection connection = taskLogUrl . openConnection ( ) ; BufferedReader input = new BufferedReader ( new InputStreamReader ( connection . getInputStream ( ) ) ) ; return input ; } } ) ; Thread thread = new Thread ( task ) ; thread . setDaemon ( true ) ; thread . start ( ) ; BufferedReader result = null ; try { // Fails if we cannot open the input stream for 5 minutes . // This prevents JobClient from hanging result = task . get ( 5 , TimeUnit . MINUTES ) ; } catch ( InterruptedException e ) { throw new IOException ( e ) ; } catch ( TimeoutException e ) { throw new IOException ( e ) ; } catch ( ExecutionException e ) { throw new IOException ( e ) ; } if ( result == null ) { throw new IOException ( "Failed to open input stream for " + taskLogUrl ) ; } return result ;
public class ServletRESTRequestWithParams { /** * ( non - Javadoc ) * @ see com . ibm . wsspi . rest . handler . RESTRequest # getContentType ( ) */ @ Override public String getContentType ( ) { } }
ServletRESTRequestImpl ret = castRequest ( ) ; if ( ret != null ) return ret . getContentType ( ) ; return null ;
public class ApplicationMetadata { /** * Sets the clazz to pu map . * @ param map * the map */ public void setClazzToPuMap ( Map < String , List < String > > map ) { } }
if ( clazzToPuMap == null ) { this . clazzToPuMap = map ; } else { clazzToPuMap . putAll ( map ) ; }
public class ProcessClosurePrimitives { /** * Verifies that a provide method call has exactly one argument , and that it ' s a string literal * and that the contents of the string are valid JS tokens . Reports a compile error if it doesn ' t . * @ return Whether the argument checked out okay */ private boolean verifyProvide ( Node methodName , Node arg ) { } }
if ( ! verifyLastArgumentIsString ( methodName , arg ) ) { return false ; } if ( ! NodeUtil . isValidQualifiedName ( compiler . getOptions ( ) . getLanguageIn ( ) . toFeatureSet ( ) , arg . getString ( ) ) ) { compiler . report ( JSError . make ( arg , INVALID_PROVIDE_ERROR , arg . getString ( ) , compiler . getOptions ( ) . getLanguageIn ( ) . toString ( ) ) ) ; return false ; } return true ;
public class SortedCellTable { /** * Sets the table ' s data provider list and sorts the table based on the * column given in { @ link SortedCellTable # setInitialSortColumn ( Column ) } * @ param list */ public void setList ( List < T > list ) { } }
dataProvider . getList ( ) . clear ( ) ; if ( list != null ) { for ( T t : list ) { dataProvider . getList ( ) . add ( t ) ; } } // Do a first - time sort based on which column was set in // setInitialSortColumn ( ) if ( initialSortColumn != null ) { Collections . sort ( dataProvider . getList ( ) , new Comparator < T > ( ) { @ Override public int compare ( T o1 , T o2 ) { return ( defaultSortOrderMap . get ( initialSortColumn ) ? 1 : - 1 ) * comparators . get ( initialSortColumn ) . compare ( o1 , o2 ) ; } } ) ; // Might as well get the little arrow on the header to make it // official getColumnSortList ( ) . push ( new ColumnSortList . ColumnSortInfo ( initialSortColumn , defaultSortOrderMap . get ( initialSortColumn ) ) ) ; currentlySortedColumn = initialSortColumn ; }
public class CmsImportExportUserDialog { /** * Get selected roles list . < p > * @ param parent layout * @ param importCase boolean * @ return List of roles */ private List < CmsRole > getRolesList ( VerticalLayout parent , boolean importCase ) { } }
List < CmsRole > res = new ArrayList < CmsRole > ( ) ; CmsEditableGroup editableGroup = importCase ? m_importRolesGroup : m_exportRolesGroup ; for ( I_CmsEditableGroupRow row : editableGroup . getRows ( ) ) { res . add ( ( ( ComboBox < CmsRole > ) row . getComponent ( ) ) . getValue ( ) ) ; } return res ;
public class AbstractHttpRequest { /** * Creates an HTTP connection . * Optionally checks for proxy parameters and creates a proxied connection * using the system properties : " hellosign . proxy . url " - the URL of the HTTP * proxy " hellosign . proxy . port " - the port of the HTTP proxy * @ param url String URL to connect to * @ return HttpUrlConnection the ( proxied ) connection to the URL * @ throws MalformedURLException thrown if the URL is invalid * @ throws IOException thrown if IO cannot be established with the URL */ protected static HttpURLConnection getProxiedConnection ( String url ) throws MalformedURLException , IOException { } }
HttpURLConnection conn = null ; Proxy proxy = null ; String proxyUrlStr = System . getProperty ( "hellosign.proxy.url" ) ; String proxyPortStr = System . getProperty ( "hellosign.proxy.port" ) ; Integer proxyPort = 80 ; // Default to port 80 if ( proxyPortStr != null ) { proxyPort = Integer . parseInt ( proxyPortStr ) ; } if ( proxyUrlStr != null ) { proxy = new Proxy ( Proxy . Type . HTTP , new InetSocketAddress ( proxyUrlStr , proxyPort ) ) ; } if ( proxy == null ) { conn = ( HttpURLConnection ) new URL ( url ) . openConnection ( ) ; } else { conn = ( HttpURLConnection ) new URL ( url ) . openConnection ( proxy ) ; } return conn ;
public class JSDocInfoBuilder { /** * Records a type for { @ code @ this } annotation . * @ return { @ code true } if the type was recorded and * { @ code false } if it is invalid or if it collided with { @ code @ enum } or * { @ code @ type } annotations */ public boolean recordThisType ( JSTypeExpression type ) { } }
if ( type != null && ! hasAnySingletonTypeTags ( ) && ! currentInfo . hasThisType ( ) ) { currentInfo . setThisType ( type ) ; populated = true ; return true ; } else { return false ; }
public class ConnectionPool { /** * Signals that a new segment should be activated . */ final void signalActivateSegment ( ) throws SQLException { } }
/* Note that ' offer ' does so only if it can be done immediately without exceeding the queue capacity . The queue capacity is ' 1 ' , so this means that signals may be ignored . Either way , this message will never block . */ if ( segmentSignalQueue != null ) { segmentSignalQueue . offer ( ACTIVATE_SEGMENT_SIGNAL ) ; } else if ( activeSegments == 0 ) { // Single segment , no monitor , started with the one segment deactivated . . . synchronized ( this ) { if ( activeSegments == 0 ) { // . . . and DCL works here because activeSegments is volatile . segments [ 0 ] . activate ( ) ; activeSegments = 1 ; } } }
public class ArbitrateConfigUtils { /** * 获取对应Node的zk集群列表配置 */ public static List < String > getServerAddrs ( ) { } }
Node node = ArbitrateConfigRegistry . getConfig ( ) . currentNode ( ) ; if ( node != null ) { String addr = StringUtils . join ( node . getParameters ( ) . getZkCluster ( ) . getServerList ( ) , ',' ) ; return Arrays . asList ( addr ) ; } else { return new ArrayList < String > ( ) ; }
public class AtomixCluster { /** * Builds a member location provider . */ @ SuppressWarnings ( "unchecked" ) protected static NodeDiscoveryProvider buildLocationProvider ( ClusterConfig config ) { } }
NodeDiscoveryConfig discoveryProviderConfig = config . getDiscoveryConfig ( ) ; if ( discoveryProviderConfig != null ) { return discoveryProviderConfig . getType ( ) . newProvider ( discoveryProviderConfig ) ; } if ( config . getMulticastConfig ( ) . isEnabled ( ) ) { return new MulticastDiscoveryProvider ( new MulticastDiscoveryConfig ( ) ) ; } else { return new BootstrapDiscoveryProvider ( Collections . emptyList ( ) ) ; }
public class GVRSensor { /** * Send a sensor event to the owner of this sensor . */ private void sendSensorEvent ( GVRPicker . GVRPickedObject collision , boolean over ) { } }
SensorEvent event = SensorEvent . obtain ( ) ; final IEventReceiver ownerCopy = getOwnerObject ( ) ; GVRCursorController controller ; if ( collision . picker == null ) { collision . picker = mHit . picker ; } else if ( over ) { mHit = collision ; } controller = collision . picker . getController ( ) ; if ( controller != null ) { event . setCursorController ( controller ) ; } event . setActive ( collision . touched ) ; event . setPickedObject ( collision ) ; event . setOver ( over ) ; getGVRContext ( ) . getEventManager ( ) . sendEvent ( ownerCopy , ISensorEvents . class , "onSensorEvent" , event ) ; event . recycle ( ) ;
public class WorkflowsInner { /** * Updates a workflow . * @ param resourceGroupName The resource group name . * @ param workflowName The workflow name . * @ param workflow The workflow . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the WorkflowInner object */ public Observable < WorkflowInner > updateAsync ( String resourceGroupName , String workflowName , WorkflowInner workflow ) { } }
return updateWithServiceResponseAsync ( resourceGroupName , workflowName , workflow ) . map ( new Func1 < ServiceResponse < WorkflowInner > , WorkflowInner > ( ) { @ Override public WorkflowInner call ( ServiceResponse < WorkflowInner > response ) { return response . body ( ) ; } } ) ;
public class InetAddressUtils { /** * Gets by name . * @ param urlAddr the host * @ return the by name */ public static InetAddress getByName ( final String urlAddr ) { } }
try { val url = new URL ( urlAddr ) ; return InetAddress . getByName ( url . getHost ( ) ) ; } catch ( final Exception e ) { LOGGER . trace ( "Host name could not be determined automatically." , e ) ; } return null ;
public class HttpRule { /** * < pre > * The custom pattern is used for specifying an HTTP method that is not * included in the ` pattern ` field , such as HEAD , or " * " to leave the * HTTP method unspecified for this rule . The wild - card rule is useful * for services that provide content to Web ( HTML ) clients . * < / pre > * < code > . google . api . CustomHttpPattern custom = 8 ; < / code > */ public com . google . api . CustomHttpPattern getCustom ( ) { } }
if ( patternCase_ == 8 ) { return ( com . google . api . CustomHttpPattern ) pattern_ ; } return com . google . api . CustomHttpPattern . getDefaultInstance ( ) ;
public class JBBPMapper { /** * Map a structure to a class instance . * @ param rootStructure a structure to be mapped , must not be null * @ param mappingClassInstance a class instance to be destination for map * operations , must not be null * @ param customFieldProcessor a custom field processor to provide custom * values , it can be null if there is not any mapping field desires the * processor * @ return the processed class instance , the same which was the argument for * the method . * @ throws JBBPMapperException for any error */ public static Object map ( final JBBPFieldStruct rootStructure , final Object mappingClassInstance , final JBBPMapperCustomFieldProcessor customFieldProcessor ) { } }
return map ( rootStructure , mappingClassInstance , customFieldProcessor , 0 ) ;
public class SearchOperationsImpl { /** * Extract feature string ( encoded in base 64 ) given an image file or url . * @ param uploadSearchParams the upload search parameters , must contain a image file or a url * @ return the feature response string result */ @ Override public FeatureResponseResult extractFeature ( UploadSearchParams uploadSearchParams ) { } }
try { ViSearchHttpResponse response = getPostImageSearchHttpResponse ( uploadSearchParams , ENDPOINT_EXTRACT_FEATURE ) ; return getFeatureResponseResult ( response ) ; } catch ( InternalViSearchException e ) { return new FeatureResponseResult ( e . getMessage ( ) , e . getCause ( ) , e . getServerRawResponse ( ) ) ; }
public class LDAPController { /** * Gets the form to update a mapping */ @ RequestMapping ( value = "ldap-mapping/{id}/update" , method = RequestMethod . GET ) public Form getMappingUpdateForm ( @ PathVariable ID id ) { } }
securityService . checkGlobalFunction ( AccountGroupManagement . class ) ; return accountGroupMappingService . getMapping ( LDAPExtensionFeature . LDAP_GROUP_MAPPING , id ) . asForm ( accountService . getAccountGroups ( ) ) ;
public class UIComponentClassicTagBase { /** * The pageContext ' s request scope map is used to hold a stack of JSP tag objects seen so far , so that a new tag can * find the parent tag that encloses it . Access to the parent tag is used to find the parent UIComponent for the * component associated with this tag plus some other uses . */ private void popTag ( ) { } }
Stack < UIComponentClassicTagBase > stack = getStack ( pageContext ) ; int size = stack . size ( ) ; stack . remove ( size - 1 ) ; if ( size <= 1 ) { pageContext . removeAttribute ( COMPONENT_STACK_ATTR , PageContext . REQUEST_SCOPE ) ; }
public class OsiamConnector { /** * Create a client . * @ param client the client to create * @ param accessToken the access token used to access the service * @ return The created client * @ throws UnauthorizedException if the accessToken is not valid * @ throws ConnectionInitializationException if the connection to the given OSIAM service could not be initialized * @ throws ClientAlreadyExistsException if the client with the clientId already exists * @ throws IllegalStateException if OSIAM ' s endpoint ( s ) are not properly configured */ public Client createClient ( Client client , AccessToken accessToken ) { } }
return getAuthService ( ) . createClient ( client , accessToken ) ;
public class ImmediateExpressions { /** * Delegates to { @ link org . hamcrest . Matchers # not ( org . hamcrest . Matcher ) } . */ public static < S > Matcher < S > not ( Matcher < S > matcher ) { } }
return Matchers . not ( matcher ) ;
public class EndpointUtils { /** * Get the list of all eureka service urls from properties file for the eureka client to talk to . * @ param clientConfig the clientConfig to use * @ param instanceZone The zone in which the client resides * @ param preferSameZone true if we have to prefer the same zone as the client , false otherwise * @ return an ( ordered ) map of zone - > list of urls mappings , with the preferred zone first in iteration order */ public static Map < String , List < String > > getServiceUrlsMapFromConfig ( EurekaClientConfig clientConfig , String instanceZone , boolean preferSameZone ) { } }
Map < String , List < String > > orderedUrls = new LinkedHashMap < > ( ) ; String region = getRegion ( clientConfig ) ; String [ ] availZones = clientConfig . getAvailabilityZones ( clientConfig . getRegion ( ) ) ; if ( availZones == null || availZones . length == 0 ) { availZones = new String [ 1 ] ; availZones [ 0 ] = DEFAULT_ZONE ; } logger . debug ( "The availability zone for the given region {} are {}" , region , availZones ) ; int myZoneOffset = getZoneOffset ( instanceZone , preferSameZone , availZones ) ; String zone = availZones [ myZoneOffset ] ; List < String > serviceUrls = clientConfig . getEurekaServerServiceUrls ( zone ) ; if ( serviceUrls != null ) { orderedUrls . put ( zone , serviceUrls ) ; } int currentOffset = myZoneOffset == ( availZones . length - 1 ) ? 0 : ( myZoneOffset + 1 ) ; while ( currentOffset != myZoneOffset ) { zone = availZones [ currentOffset ] ; serviceUrls = clientConfig . getEurekaServerServiceUrls ( zone ) ; if ( serviceUrls != null ) { orderedUrls . put ( zone , serviceUrls ) ; } if ( currentOffset == ( availZones . length - 1 ) ) { currentOffset = 0 ; } else { currentOffset ++ ; } } if ( orderedUrls . size ( ) < 1 ) { throw new IllegalArgumentException ( "DiscoveryClient: invalid serviceUrl specified!" ) ; } return orderedUrls ;
public class DatabaseAdminClientSnippets { /** * [ VARIABLE my _ database _ id ] */ public Database createDatabase ( String instanceId , String databaseId ) { } }
// [ START createDatabase ] OperationFuture < Database , CreateDatabaseMetadata > op = dbAdminClient . createDatabase ( instanceId , databaseId , Arrays . asList ( "CREATE TABLE Singers (\n" + " SingerId INT64 NOT NULL,\n" + " FirstName STRING(1024),\n" + " LastName STRING(1024),\n" + " SingerInfo BYTES(MAX)\n" + ") PRIMARY KEY (SingerId)" , "CREATE TABLE Albums (\n" + " SingerId INT64 NOT NULL,\n" + " AlbumId INT64 NOT NULL,\n" + " AlbumTitle STRING(MAX)\n" + ") PRIMARY KEY (SingerId, AlbumId),\n" + " INTERLEAVE IN PARENT Singers ON DELETE CASCADE" ) ) ; Database db ; try { db = op . get ( ) ; } catch ( ExecutionException e ) { throw ( SpannerException ) e . getCause ( ) ; } catch ( InterruptedException e ) { throw SpannerExceptionFactory . propagateInterrupt ( e ) ; } // [ END createDatabase ] return db ;
public class SoapServlet { /** * Original API ( Defaults to using MessageFactory . newInstance ( ) , i . e . SOAP * 1.1) * @ param code * @ param message * @ return Soap fault as string * @ throws SOAPException * @ throws TransformerException */ protected String createSoapFaultResponse ( String code , String message ) throws SOAPException , TransformerException { } }
return createSoapFaultResponse ( SOAPConstants . SOAP_1_1_PROTOCOL , code , message ) ;
public class IbvCQ { /** * - - - - - oo - verbs */ public SVCPollCq poll ( IbvWC [ ] wcList , int ne ) throws IOException { } }
return verbs . pollCQ ( this , wcList , ne ) ;
public class TemplateClassCache { /** * Retrieve java source and bytecode if template content has not changed * @ param tc */ public void loadTemplateClass ( TemplateClass tc ) { } }
if ( ! readEnabled ( ) ) { return ; } InputStream is = null ; try { File f = getCacheFile ( tc ) ; if ( ! f . exists ( ) || ! f . canRead ( ) ) return ; is = new BufferedInputStream ( new FileInputStream ( f ) ) ; // - - - check hash int offset = 0 ; int read ; StringBuilder hash = new StringBuilder ( ) ; while ( ( read = is . read ( ) ) != 0 ) { if ( read == - 1 ) { logger . error ( "Failed to read cache file for template class: %s" , tc ) ; return ; } hash . append ( ( char ) read ) ; offset ++ ; } // check hash only in non precompiled mode if ( ! conf . loadPrecompiled ( ) ) { String curHash = hash ( tc ) ; if ( ! curHash . equals ( hash . toString ( ) ) ) { if ( logger . isTraceEnabled ( ) ) { logger . trace ( "Bytecode too old (%s != %s)" , hash , curHash ) ; } return ; } } // - - - load java source read = - 1 ; StringBuilder source = new StringBuilder ( ) ; while ( ( read = is . read ( ) ) != 0 ) { source . append ( ( char ) read ) ; offset ++ ; } if ( source . length ( ) != 0 ) { String s = source . toString ( ) ; String [ ] sa = s . split ( "__INCLUDED_TAG_TYPES__" ) ; tc . javaSource = sa [ 0 ] ; s = sa [ 1 ] ; sa = s . split ( "__INCULDED_TEMPLATE_CLASS_NAME_LIST__" ) ; tc . deserializeIncludeTagTypes ( sa [ 0 ] ) ; s = sa [ 1 ] ; sa = s . split ( "__IMPORT_PATH_LIST__" ) ; tc . setIncludeTemplateClassNames ( sa [ 0 ] ) ; if ( sa . length > 1 ) { s = sa [ 1 ] ; sa = s . split ( ";" ) ; Set < String > importPaths = new HashSet < String > ( ) ; for ( String path : sa ) { if ( "java.lang" . equals ( path ) ) continue ; importPaths . add ( path ) ; } tc . replaceImportPath ( importPaths ) ; } } // else it must be an inner class // - - - load byte code byte [ ] byteCode = new byte [ ( int ) f . length ( ) - ( offset + 2 ) ] ; is . read ( byteCode ) ; tc . loadCachedByteCode ( byteCode ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } finally { if ( is != null ) try { is . close ( ) ; } catch ( IOException e ) { logger . error ( e . getMessage ( ) , e ) ; } }
public class Style { /** * Convert string to dash . * @ param dashPattern the string to convert . * @ return the dash array or null , if conversion failed . */ public static float [ ] dashFromString ( String dashPattern ) { } }
if ( dashPattern . trim ( ) . length ( ) > 0 ) { String [ ] split = dashPattern . split ( "," ) ; if ( split . length > 1 ) { float [ ] dash = new float [ split . length ] ; for ( int i = 0 ; i < split . length ; i ++ ) { try { float tmpDash = Float . parseFloat ( split [ i ] . trim ( ) ) ; dash [ i ] = tmpDash ; } catch ( NumberFormatException e ) { // GPLog . error ( " Style " , " Can ' t convert to dash pattern : " + dashPattern , e ) ; return null ; } } return dash ; } } return null ;
public class DefaultAlertService { /** * ~ Methods * * * * * */ private void _initializeObjectMapper ( ) { } }
SimpleModule module = new SimpleModule ( ) ; module . addSerializer ( Alert . class , new Alert . Serializer ( ) ) ; module . addSerializer ( Trigger . class , new Trigger . Serializer ( ) ) ; module . addSerializer ( Notification . class , new Notification . Serializer ( ) ) ; module . addSerializer ( PrincipalUser . class , new Alert . PrincipalUserSerializer ( ) ) ; module . addDeserializer ( Alert . class , new Alert . Deserializer ( ) ) ; _mapper . registerModule ( module ) ;
public class URLTool { /** * Parse a query string into a map of key - value pairs . * @ param query query string to be parsed * @ return a mapping of parameter names to values suitable e . g . to pass into { @ link EscapeTool # url ( Map ) } */ public Map < String , List < String > > parseQuery ( String query ) { } }
Map < String , List < String > > queryParams = new LinkedHashMap < > ( ) ; if ( query != null ) { for ( NameValuePair params : URLEncodedUtils . parse ( query , StandardCharsets . UTF_8 ) ) { String name = params . getName ( ) ; List < String > values = queryParams . get ( name ) ; if ( values == null ) { values = new ArrayList < > ( ) ; queryParams . put ( name , values ) ; } values . add ( params . getValue ( ) ) ; } } return queryParams ;
public class CMAESUtils { /** * tql2 */ private static double specificShift ( int idx , int n , double [ ] d , double [ ] e ) { } }
double g = d [ idx ] ; double p = ( d [ idx + 1 ] - g ) / ( 2.0 * e [ idx ] ) ; double r = hypot ( p , 1.0 ) ; if ( p < 0 ) { r = - r ; } d [ idx ] = e [ idx ] / ( p + r ) ; d [ idx + 1 ] = e [ idx ] * ( p + r ) ; double h = g - d [ idx ] ; for ( int i = idx + 2 ; i < n ; i ++ ) { d [ i ] -= h ; } return h ;
public class WildcardFileFilter { /** * Checks to see if the filename matches one of the wildcards . * @ param file the file to check * @ return true if the filename matches one of the wildcards */ @ Override public boolean accept ( File file ) { } }
String name = file . getName ( ) ; if ( FilenameUtils . wildcardMatch ( name , wildcards , caseSensitivity ) ) { return true ; } return false ;
public class SnapshotDaemon { /** * Make this SnapshotDaemon responsible for generating snapshots */ public ListenableFuture < Void > mayGoActiveOrInactive ( final SnapshotSchedule schedule ) { } }
return m_es . submit ( new Callable < Void > ( ) { @ Override public Void call ( ) throws Exception { makeActivePrivate ( schedule ) ; return null ; } } ) ;