signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class AggressiveInlineAliases { /** * Inlines some references to an alias with its value . This handles cases where the alias is not
* declared at initialization . It does nothing if the alias is reassigned after being initialized ,
* unless the reassignment occurs because of an enclosing function or a loop .
* @ param alias An alias of some variable , which may not be well - defined .
* @ param namespace The GlobalNamespace , which will be updated with all new nodes created .
* @ param aliasRefs All references to the alias in its scope .
* @ param aliasLhsNode The lhs name of the alias when it is first initialized .
* @ return Whether all references to the alias were inlined */
private boolean partiallyInlineAlias ( Ref alias , GlobalNamespace namespace , ReferenceCollection aliasRefs , Node aliasLhsNode ) { } } | BasicBlock aliasBlock = null ; // This initial iteration through all the alias references does two things :
// a ) Find the control flow block in which the alias is assigned .
// b ) See if the alias var is assigned to in multiple places , and return if that ' s the case .
// NOTE : we still may inline if the alias is assigned in a loop or inner function and that
// assignment statement is potentially executed multiple times .
// This is more aggressive than what " inlineAliasIfPossible " does .
for ( Reference aliasRef : aliasRefs ) { Node aliasRefNode = aliasRef . getNode ( ) ; if ( aliasRefNode == aliasLhsNode ) { aliasBlock = aliasRef . getBasicBlock ( ) ; continue ; } else if ( aliasRef . isLvalue ( ) ) { // Don ' t replace any references if the alias is reassigned
return false ; } } Set < AstChange > newNodes = new LinkedHashSet < > ( ) ; boolean alreadySeenInitialAlias = false ; boolean foundNonReplaceableAlias = false ; // Do a second iteration through all the alias references , and replace any inlinable references .
for ( Reference aliasRef : aliasRefs ) { Node aliasRefNode = aliasRef . getNode ( ) ; if ( aliasRefNode == aliasLhsNode ) { alreadySeenInitialAlias = true ; continue ; } else if ( aliasRef . isDeclaration ( ) ) { // Ignore any alias declarations , e . g . " var alias ; " , since there ' s nothing to inline .
continue ; } BasicBlock refBlock = aliasRef . getBasicBlock ( ) ; if ( ( refBlock != aliasBlock && aliasBlock . provablyExecutesBefore ( refBlock ) ) || ( refBlock == aliasBlock && alreadySeenInitialAlias ) ) { // We replace the alias only if the alias and reference are in the same BasicBlock ,
// the aliasing assignment takes place before the reference , and the alias is
// never reassigned .
codeChanged = true ; newNodes . add ( replaceAliasReference ( alias , aliasRef ) ) ; } else { foundNonReplaceableAlias = true ; } } // We removed all references to the alias , so remove the original aliasing assignment .
if ( ! foundNonReplaceableAlias ) { tryReplacingAliasingAssignment ( alias , aliasLhsNode ) ; } if ( codeChanged ) { // Inlining the variable may have introduced new references
// to descendants of { @ code name } . So those need to be collected now .
namespace . scanNewNodes ( newNodes ) ; } return ! foundNonReplaceableAlias ; |
public class ReflectUtil { /** * Finds a method by name and parameter types .
* @ param declaringType the name of the class
* @ param methodName the name of the method to look for
* @ param parameterTypes the types of the parameters */
public static Method getMethod ( Class < ? > declaringType , String methodName , Class < ? > ... parameterTypes ) { } } | return findMethod ( declaringType , methodName , parameterTypes ) ; |
public class JmxClient { /** * Get multiple attributes at once from the server . */
public List < Attribute > getAttributes ( String domain , String beanName , String [ ] attributes ) throws Exception { } } | checkClientConnected ( ) ; return getAttributes ( ObjectNameUtil . makeObjectName ( domain , beanName ) , attributes ) ; |
public class ParallelismConfigurationUpdateMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ParallelismConfigurationUpdate parallelismConfigurationUpdate , ProtocolMarshaller protocolMarshaller ) { } } | if ( parallelismConfigurationUpdate == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( parallelismConfigurationUpdate . getConfigurationTypeUpdate ( ) , CONFIGURATIONTYPEUPDATE_BINDING ) ; protocolMarshaller . marshall ( parallelismConfigurationUpdate . getParallelismUpdate ( ) , PARALLELISMUPDATE_BINDING ) ; protocolMarshaller . marshall ( parallelismConfigurationUpdate . getParallelismPerKPUUpdate ( ) , PARALLELISMPERKPUUPDATE_BINDING ) ; protocolMarshaller . marshall ( parallelismConfigurationUpdate . getAutoScalingEnabledUpdate ( ) , AUTOSCALINGENABLEDUPDATE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class Messenger { /** * Video Calling to user
* @ param uid user you want to call
* @ return command to execute */
@ ObjectiveCName ( "doVideoCallWithUid:" ) public Command < Long > doVideoCall ( int uid ) { } } | return modules . getCallsModule ( ) . makeCall ( Peer . user ( uid ) , true ) ; |
public class MapUtil { /** * 将map转成字符串
* @ param < K > 键类型
* @ param < V > 值类型
* @ param map Map
* @ param separator entry之间的连接符
* @ param keyValueSeparator kv之间的连接符
* @ param isIgnoreNull 是否忽略null的键和值
* @ return 连接后的字符串
* @ since 3.1.1 */
public static < K , V > String join ( Map < K , V > map , String separator , String keyValueSeparator , boolean isIgnoreNull ) { } } | final StringBuilder strBuilder = StrUtil . builder ( ) ; boolean isFirst = true ; for ( Entry < K , V > entry : map . entrySet ( ) ) { if ( false == isIgnoreNull || entry . getKey ( ) != null && entry . getValue ( ) != null ) { if ( isFirst ) { isFirst = false ; } else { strBuilder . append ( separator ) ; } strBuilder . append ( Convert . toStr ( entry . getKey ( ) ) ) . append ( keyValueSeparator ) . append ( Convert . toStr ( entry . getValue ( ) ) ) ; } } return strBuilder . toString ( ) ; |
public class HiltItemStack { /** * Sets the lore of this ItemStack . Use null to remove lore .
* @ param lore lore to give the item
* @ return this HiltItemStack with new lore */
public HiltItemStack setLore ( List < String > lore ) { } } | createItemMeta ( ) ; ItemMeta itemMeta = getItemMeta ( ) ; itemMeta . setLore ( lore ) ; setItemMeta ( itemMeta ) ; return this ; |
public class IntCounter { /** * Sets the current count for each of the given keys . This will wipe out
* any existing counts for these keys .
* To add to the counts of a collection of objects instead of replacing them ,
* use { @ link # incrementCounts ( Collection , int ) } . */
public void setCounts ( Collection < E > keys , int count ) { } } | for ( E key : keys ) { setCount ( key , count ) ; } |
public class OptionalBoolean { /** * State transitions .
* - if the states are the same then no change
* - if either state is unset then change to the other state
* - if the states are conflicting then set to the conflicting state
* @ param other object from which to merge state */
public void mergeWith ( OptionalBoolean other ) { } } | if ( state == other . state ) { // no change in state
} else if ( state == State . UNSET ) { state = other . state ; } else if ( other . state == State . UNSET ) { // no change in state
} else { state = State . CONFLICTING ; } |
public class ResourceManager { /** * Retrieve resource with specified basename .
* @ param baseName the basename
* @ param classLoader the classLoader to load resources from
* @ return the Resources */
public static Resources getBaseResources ( String baseName , ClassLoader classLoader ) { } } | synchronized ( ResourceManager . class ) { Resources resources = getCachedResource ( baseName ) ; if ( null == resources ) { resources = new Resources ( baseName , classLoader ) ; putCachedResource ( baseName , resources ) ; } return resources ; } |
public class GHUtility { /** * This method could throw an exception if problems like index out of bounds etc */
public static List < String > getProblems ( Graph g ) { } } | List < String > problems = new ArrayList < > ( ) ; int nodes = g . getNodes ( ) ; int nodeIndex = 0 ; NodeAccess na = g . getNodeAccess ( ) ; try { EdgeExplorer explorer = g . createEdgeExplorer ( ) ; for ( ; nodeIndex < nodes ; nodeIndex ++ ) { double lat = na . getLatitude ( nodeIndex ) ; if ( lat > 90 || lat < - 90 ) problems . add ( "latitude is not within its bounds " + lat ) ; double lon = na . getLongitude ( nodeIndex ) ; if ( lon > 180 || lon < - 180 ) problems . add ( "longitude is not within its bounds " + lon ) ; EdgeIterator iter = explorer . setBaseNode ( nodeIndex ) ; while ( iter . next ( ) ) { if ( iter . getAdjNode ( ) >= nodes ) { problems . add ( "edge of " + nodeIndex + " has a node " + iter . getAdjNode ( ) + " greater or equal to getNodes" ) ; } if ( iter . getAdjNode ( ) < 0 ) { problems . add ( "edge of " + nodeIndex + " has a negative node " + iter . getAdjNode ( ) ) ; } } } } catch ( Exception ex ) { throw new RuntimeException ( "problem with node " + nodeIndex , ex ) ; } // for ( int i = 0 ; i < nodes ; i + + ) {
// new BreadthFirstSearch ( ) . start ( g , i ) ;
return problems ; |
public class DirectoryScannerActor { /** * Answer the actor as the { @ code protocol } or { @ code null } .
* @ param protocol the { @ code Class < T > } of the protocol that the actor must support
* @ param address the { @ code Address } of the actor to find
* @ param T the protocol type
* @ return T */
private < T > T internalActorOf ( final Class < T > protocol , final Address address ) { } } | final Actor actor = directory . actorOf ( address ) ; try { if ( actor != null ) { return stage ( ) . actorAs ( actor , protocol ) ; } else { logger ( ) . log ( "Actor with address: " + address + " not found; protocol is: " + protocol . getName ( ) ) ; } } catch ( Exception e ) { logger ( ) . log ( "Error providing protocol: " + protocol . getName ( ) + " for actor with address: " + address , e ) ; } return null ; |
public class ReactiveSubscriber { /** * < pre >
* { @ code
* ReactiveSubscriber < Integer > sub = Spouts . reactiveSubscriber ( ) ;
* Flux . just ( 1,2,3 ) . forEachAsync ( sub ) ;
* sub . stream ( ) . forEach ( System . out : : println ) ;
* / / note JDK Stream based terminal operations may block the current thread
* / / see ReactiveSeq # collectStream ReactiveSeq # foldAll for non - blocking alternatives
* < / pre >
* @ return A push - based asychronous event driven Observable - style Stream that implements Backpressure via the reactive - streams API */
public ReactiveSeq < T > reactiveStream ( ) { } } | streamCreated = true ; ReactiveSeq < T > result = Spouts . reactiveStream ( getAction ( ) ) ; if ( complete ) return ReactiveSeq . fromIterable ( buffer ) ; if ( error != null ) throw ExceptionSoftener . throwSoftenedException ( error ) ; if ( buffer . size ( ) > 0 ) { return Spouts . concat ( Spouts . fromIterable ( buffer ) , result ) ; } return result ; |
public class SqlDateTimeUtils { /** * Returns the epoch days since 1970-01-01. */
public static int strToDate ( String dateStr , String fromFormat ) { } } | // It is OK to use UTC , we just want get the epoch days
// TODO use offset , better performance
long ts = parseToTimeMillis ( dateStr , fromFormat , TimeZone . getTimeZone ( "UTC" ) ) ; ZoneId zoneId = ZoneId . of ( "UTC" ) ; Instant instant = Instant . ofEpochMilli ( ts ) ; ZonedDateTime zdt = ZonedDateTime . ofInstant ( instant , zoneId ) ; return DateTimeUtils . ymdToUnixDate ( zdt . getYear ( ) , zdt . getMonthValue ( ) , zdt . getDayOfMonth ( ) ) ; |
public class ScalingPlanResource { /** * The scaling policies .
* @ param scalingPolicies
* The scaling policies . */
public void setScalingPolicies ( java . util . Collection < ScalingPolicy > scalingPolicies ) { } } | if ( scalingPolicies == null ) { this . scalingPolicies = null ; return ; } this . scalingPolicies = new java . util . ArrayList < ScalingPolicy > ( scalingPolicies ) ; |
public class MavenJDOMWriter { /** * Method updateScm .
* @ param value
* @ param element
* @ param counter
* @ param xmlTag */
protected void updateScm ( Scm value , String xmlTag , Counter counter , Element element ) { } } | boolean shouldExist = value != null ; Element root = updateElement ( counter , element , xmlTag , shouldExist ) ; if ( shouldExist ) { Counter innerCount = new Counter ( counter . getDepth ( ) + 1 ) ; findAndReplaceSimpleElement ( innerCount , root , "connection" , value . getConnection ( ) , null ) ; findAndReplaceSimpleElement ( innerCount , root , "developerConnection" , value . getDeveloperConnection ( ) , null ) ; findAndReplaceSimpleElement ( innerCount , root , "tag" , value . getTag ( ) , "HEAD" ) ; findAndReplaceSimpleElement ( innerCount , root , "url" , value . getUrl ( ) , null ) ; } |
public class DTDNmTokensAttr { /** * Method called by the validator
* to let the attribute do necessary normalization and / or validation
* for the value . */
@ Override public String validate ( DTDValidatorBase v , char [ ] cbuf , int start , int end , boolean normalize ) throws XMLStreamException { } } | // int origStart = start ;
/* First things first ; let ' s ensure value is not empty ( all
* white space ) . . . */
while ( start < end && WstxInputData . isSpaceChar ( cbuf [ start ] ) ) { ++ start ; } // Empty value ?
if ( start >= end ) { return reportValidationProblem ( v , "Empty NMTOKENS value" ) ; } /* Then , let ' s have separate handling for normalizing and
* non - normalizing case , since latter is trivially easy case : */
if ( ! normalize ) { for ( ; start < end ; ++ start ) { char c = cbuf [ start ] ; if ( ! WstxInputData . isSpaceChar ( c ) && ! WstxInputData . isNameChar ( c , mCfgNsAware , mCfgXml11 ) ) { return reportInvalidChar ( v , c , "not valid as NMTOKENS character" ) ; } } return null ; // ok , all good
} // boolean trimmed = ( origStart ! = start ) ;
// origStart = start ;
-- end ; // so that it now points to the last char
// Wouldn ' t absolutely have to trim trailing . . . but is easy to do
while ( end > start && WstxInputData . isSpaceChar ( cbuf [ end ] ) ) { -- end ; // trimmed = true ;
} /* Ok , now , need to check we only have valid chars , and maybe
* also coalesce multiple spaces , if any . */
StringBuilder sb = null ; while ( start <= end ) { int i = start ; for ( ; i <= end ; ++ i ) { char c = cbuf [ i ] ; if ( WstxInputData . isSpaceChar ( c ) ) { break ; } if ( ! WstxInputData . isNameChar ( c , mCfgNsAware , mCfgXml11 ) ) { return reportInvalidChar ( v , c , "not valid as an NMTOKENS character" ) ; } } if ( sb == null ) { sb = new StringBuilder ( end - start + 1 ) ; } else { sb . append ( ' ' ) ; } sb . append ( cbuf , start , ( i - start ) ) ; start = i + 1 ; // Ok , any white space to skip ?
while ( start <= end && WstxInputData . isSpaceChar ( cbuf [ start ] ) ) { ++ start ; } } /* 27 - Nov - 2005 , TSa : Could actually optimize trimming , and often
* avoid using StringBuilder . . . but let ' s only do it if it turns
* out dealing with NMTOKENS normalization shows up on profiling . . . */
return sb . toString ( ) ; |
public class FilterStreamUnpacker { /** * Finds the underlying { @ link OutputStream } to a { @ link FilterOutputStream } . Note this is not always possible due
* to security restrictions of the JVM .
* @ throws IllegalAccessException If security policies of the JVM prevent unpacking of the { @ link FilterOutputStream } . */
public static OutputStream unpackFilterOutputStream ( FilterOutputStream os ) throws IllegalAccessException { } } | try { Field field = FilterOutputStream . class . getDeclaredField ( "out" ) ; field . setAccessible ( true ) ; return ( OutputStream ) field . get ( os ) ; } catch ( NoSuchFieldException nsfe ) { throw new RuntimeException ( nsfe ) ; } |
public class SecurityUtils { /** * Removes the certificate checks of HTTPS traffic on a HTTP client . Use
* with caution !
* @ param httpClient
* @ throws IllegalStateException
* @ { @ link Deprecated } use { @ link # createUnsafeSSLConnectionSocketFactory ( ) }
* in conjunction with { @ link HttpClients # custom ( ) } instead . */
@ Deprecated public static void removeSshCertificateChecks ( final HttpClient httpClient ) throws IllegalStateException { } } | try { // prepare a SSL context which doesn ' t validate certificates
final SSLContext sslContext = SSLContext . getInstance ( "SSL" ) ; final TrustManager trustManager = new NaiveTrustManager ( ) ; sslContext . init ( null , new TrustManager [ ] { trustManager } , new SecureRandom ( ) ) ; final org . apache . http . conn . ssl . SSLSocketFactory schemeSocketFactory = new org . apache . http . conn . ssl . SSLSocketFactory ( sslContext ) ; final org . apache . http . conn . scheme . Scheme sslScheme = new org . apache . http . conn . scheme . Scheme ( "https" , 443 , schemeSocketFactory ) ; // try again with a new registry
final org . apache . http . conn . scheme . SchemeRegistry registry = httpClient . getConnectionManager ( ) . getSchemeRegistry ( ) ; registry . register ( sslScheme ) ; } catch ( final Exception e ) { throw new IllegalStateException ( e ) ; } |
public class RepositoryResourceImpl { /** * { @ inheritDoc } */
@ Override public Collection < String > getAppliesToMinimumVersions ( ) { } } | Collection < String > versions = new HashSet < String > ( ) ; try { List < AppliesToFilterInfo > entries = generateAppliesToFilterInfoList ( false ) ; if ( entries != null ) { for ( AppliesToFilterInfo appliesToFilterInfo : entries ) { FilterVersion minVersion = appliesToFilterInfo . getMinVersion ( ) ; if ( minVersion != null ) { versions . add ( minVersion . toString ( ) ) ; } } } } catch ( RepositoryResourceCreationException e ) { // Impossible as we don ' t validate the applies to
} return versions ; |
public class VerbPlusPreps { /** * that should be linking them , otherwise returns null */
public Prep findWord ( String word ) { } } | if ( prepsMap . containsKey ( "*" ) ) { word = "*" ; } return prepsMap . get ( word ) ; |
public class BELScriptWalker { /** * BELScriptWalker . g : 157:1 : set _ annotation : ' SET ' an = OBJECT _ IDENT ' = ' ( qv = QUOTED _ VALUE | list = VALUE _ LIST | oi = OBJECT _ IDENT ) ; */
public final BELScriptWalker . set_annotation_return set_annotation ( ) throws RecognitionException { } } | BELScriptWalker . set_annotation_return retval = new BELScriptWalker . set_annotation_return ( ) ; retval . start = input . LT ( 1 ) ; CommonTree root_0 = null ; CommonTree _first_0 = null ; CommonTree _last = null ; CommonTree an = null ; CommonTree qv = null ; CommonTree list = null ; CommonTree oi = null ; CommonTree string_literal18 = null ; CommonTree char_literal19 = null ; CommonTree an_tree = null ; CommonTree qv_tree = null ; CommonTree list_tree = null ; CommonTree oi_tree = null ; CommonTree string_literal18_tree = null ; CommonTree char_literal19_tree = null ; try { // BELScriptWalker . g : 157:15 : ( ' SET ' an = OBJECT _ IDENT ' = ' ( qv = QUOTED _ VALUE | list = VALUE _ LIST | oi = OBJECT _ IDENT ) )
// BELScriptWalker . g : 158:5 : ' SET ' an = OBJECT _ IDENT ' = ' ( qv = QUOTED _ VALUE | list = VALUE _ LIST | oi = OBJECT _ IDENT )
{ root_0 = ( CommonTree ) adaptor . nil ( ) ; _last = ( CommonTree ) input . LT ( 1 ) ; string_literal18 = ( CommonTree ) match ( input , 24 , FOLLOW_24_in_set_annotation219 ) ; string_literal18_tree = ( CommonTree ) adaptor . dupNode ( string_literal18 ) ; adaptor . addChild ( root_0 , string_literal18_tree ) ; _last = ( CommonTree ) input . LT ( 1 ) ; an = ( CommonTree ) match ( input , OBJECT_IDENT , FOLLOW_OBJECT_IDENT_in_set_annotation223 ) ; an_tree = ( CommonTree ) adaptor . dupNode ( an ) ; adaptor . addChild ( root_0 , an_tree ) ; _last = ( CommonTree ) input . LT ( 1 ) ; char_literal19 = ( CommonTree ) match ( input , 25 , FOLLOW_25_in_set_annotation225 ) ; char_literal19_tree = ( CommonTree ) adaptor . dupNode ( char_literal19 ) ; adaptor . addChild ( root_0 , char_literal19_tree ) ; // BELScriptWalker . g : 158:31 : ( qv = QUOTED _ VALUE | list = VALUE _ LIST | oi = OBJECT _ IDENT )
int alt5 = 3 ; switch ( input . LA ( 1 ) ) { case QUOTED_VALUE : { alt5 = 1 ; } break ; case VALUE_LIST : { alt5 = 2 ; } break ; case OBJECT_IDENT : { alt5 = 3 ; } break ; default : NoViableAltException nvae = new NoViableAltException ( "" , 5 , 0 , input ) ; throw nvae ; } switch ( alt5 ) { case 1 : // BELScriptWalker . g : 158:32 : qv = QUOTED _ VALUE
{ _last = ( CommonTree ) input . LT ( 1 ) ; qv = ( CommonTree ) match ( input , QUOTED_VALUE , FOLLOW_QUOTED_VALUE_in_set_annotation230 ) ; qv_tree = ( CommonTree ) adaptor . dupNode ( qv ) ; adaptor . addChild ( root_0 , qv_tree ) ; } break ; case 2 : // BELScriptWalker . g : 158:50 : list = VALUE _ LIST
{ _last = ( CommonTree ) input . LT ( 1 ) ; list = ( CommonTree ) match ( input , VALUE_LIST , FOLLOW_VALUE_LIST_in_set_annotation236 ) ; list_tree = ( CommonTree ) adaptor . dupNode ( list ) ; adaptor . addChild ( root_0 , list_tree ) ; } break ; case 3 : // BELScriptWalker . g : 158:68 : oi = OBJECT _ IDENT
{ _last = ( CommonTree ) input . LT ( 1 ) ; oi = ( CommonTree ) match ( input , OBJECT_IDENT , FOLLOW_OBJECT_IDENT_in_set_annotation242 ) ; oi_tree = ( CommonTree ) adaptor . dupNode ( oi ) ; adaptor . addChild ( root_0 , oi_tree ) ; } break ; } final String name = an . getText ( ) ; BELAnnotationDefinition ad = definedAnnotations . get ( name ) ; if ( ad != null ) { // read annotation value
final BELAnnotation annotation ; if ( qv != null ) { annotation = new BELAnnotation ( ad , qv . getText ( ) ) ; } else if ( oi != null ) { annotation = new BELAnnotation ( ad , oi . getText ( ) ) ; } else { if ( list == null ) { throw new IllegalStateException ( "Did not understand annotation value, expecting annotation list form." ) ; } String listvalues = list . getText ( ) ; listvalues = listvalues . substring ( 1 , listvalues . length ( ) - 1 ) ; annotation = new BELAnnotation ( ad , Arrays . asList ( ParserUtil . parseListRecord ( listvalues ) ) ) ; } if ( activeStatementGroup != null ) { // add to local statement group scope
sgAnnotationContext . put ( name , annotation ) ; } else { // add to main statement group scope
annotationContext . put ( name , annotation ) ; } } else if ( ! name . equals ( "Citation" ) && ! name . equals ( "Evidence" ) ) { // throw if annotation is not defined and it ' s not the intrinsics : Citation or EvidenceLine
addError ( new DefineAnnotationBeforeUsageException ( an . getLine ( ) , an . getCharPositionInLine ( ) ) ) ; } if ( name . equals ( "Citation" ) ) { // redefinition of citation so clear out citation context
citationContext = null ; if ( list == null ) { addError ( new InvalidCitationException ( an . getLine ( ) , an . getCharPositionInLine ( ) ) ) ; } else { String listvalues = list . getText ( ) ; String [ ] tokens = ParserUtil . parseListRecord ( listvalues ) ; String type = null ; String cname = null ; String reference = null ; Date publicationDate = null ; String [ ] authors = null ; String comment = null ; // ( required ) parse type
if ( tokens . length > 0 && tokens [ 0 ] != null ) { type = tokens [ 0 ] ; if ( ! ( "Book" . equals ( type ) || "Journal" . equals ( type ) || "Online Resource" . equals ( type ) || "Other" . equals ( type ) || "PubMed" . equals ( type ) ) ) { addError ( new InvalidCitationException ( an . getLine ( ) , an . getCharPositionInLine ( ) ) ) ; } } else { addError ( new InvalidCitationException ( an . getLine ( ) , an . getCharPositionInLine ( ) ) ) ; } // ( required ) parse name
if ( tokens . length > 1 && tokens [ 1 ] != null ) { if ( "" . equals ( tokens [ 1 ] . trim ( ) ) ) { addError ( new InvalidCitationException ( an . getLine ( ) , an . getCharPositionInLine ( ) ) ) ; } else { cname = tokens [ 1 ] ; } } else { addError ( new InvalidCitationException ( an . getLine ( ) , an . getCharPositionInLine ( ) ) ) ; } // ( required ) parse reference
if ( tokens . length > 2 && tokens [ 2 ] != null ) { if ( "" . equals ( tokens [ 2 ] . trim ( ) ) ) { addError ( new InvalidCitationException ( an . getLine ( ) , an . getCharPositionInLine ( ) ) ) ; } else { reference = tokens [ 2 ] ; } } // ( optional ) parse date of publication
if ( tokens . length > 3 && tokens [ 3 ] != null ) { if ( ! "" . equals ( tokens [ 3 ] . trim ( ) ) ) { try { publicationDate = iso8601DateFormat . parse ( tokens [ 3 ] ) ; } catch ( ParseException e ) { addError ( new InvalidCitationException ( an . getLine ( ) , an . getCharPositionInLine ( ) ) ) ; } if ( publicationDate == null ) { addError ( new InvalidCitationException ( an . getLine ( ) , an . getCharPositionInLine ( ) ) ) ; } } } // ( optional ) parse authors
if ( tokens . length > 4 && tokens [ 4 ] != null ) { authors = ParserUtil . parseValueSeparated ( tokens [ 4 ] ) ; } // ( optional ) parse comments
if ( tokens . length > 5 && tokens [ 5 ] != null ) { comment = tokens [ 5 ] ; } citationContext = new BELCitation ( type , cname , publicationDate , reference , authors == null ? null : Arrays . asList ( authors ) , comment ) ; } } else if ( name . equals ( "Evidence" ) ) { // redefinition of evidence so clear out evidence context
evidenceContext = null ; if ( qv == null || "" . equals ( qv . getText ( ) . trim ( ) ) ) { addError ( new InvalidEvidenceException ( an . getLine ( ) , an . getCharPositionInLine ( ) ) ) ; } else { evidenceContext = new BELEvidence ( qv . getText ( ) ) ; } } } retval . tree = ( CommonTree ) adaptor . rulePostProcessing ( root_0 ) ; } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { } return retval ; |
public class URLField { /** * Set up the default screen control for this field .
* @ param itsLocation Location of this component on screen ( ie . , GridBagConstraint ) .
* @ param targetScreen Where to place this component ( ie . , Parent screen or GridBagLayout ) .
* @ param converter The converter to set the screenfield to .
* @ param iDisplayFieldDesc Display the label ? ( optional ) .
* @ return Return the component or ScreenField that is created for this field . */
public ScreenComponent setupDefaultView ( ScreenLoc itsLocation , ComponentParent targetScreen , Convert converter , int iDisplayFieldDesc , Map < String , Object > properties ) { } } | if ( converter . getMaxLength ( ) > ScreenConstants . MAX_SINGLE_CHARS ) converter = new FieldLengthConverter ( ( Converter ) converter , ScreenConstants . MAX_SINGLE_CHARS ) ; // Show as a single line .
ScreenComponent sScreenField = super . setupDefaultView ( itsLocation , targetScreen , converter , iDisplayFieldDesc , properties ) ; properties = new HashMap < String , Object > ( ) ; properties . put ( ScreenModel . FIELD , this ) ; properties . put ( ScreenModel . COMMAND , ScreenModel . URL ) ; properties . put ( ScreenModel . IMAGE , ScreenModel . URL ) ; ScreenComponent pSScreenField = createScreenComponent ( ScreenModel . CANNED_BOX , targetScreen . getNextLocation ( ScreenConstants . RIGHT_OF_LAST , ScreenConstants . DONT_SET_ANCHOR ) , targetScreen , converter , iDisplayFieldDesc , properties ) ; pSScreenField . setRequestFocusEnabled ( false ) ; return sScreenField ; |
public class CPDefinitionLinkPersistenceImpl { /** * Returns the last cp definition link in the ordered set where CPDefinitionId = & # 63 ; and type = & # 63 ; .
* @ param CPDefinitionId the cp definition ID
* @ param type the type
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the last matching cp definition link
* @ throws NoSuchCPDefinitionLinkException if a matching cp definition link could not be found */
@ Override public CPDefinitionLink findByCPD_T_Last ( long CPDefinitionId , String type , OrderByComparator < CPDefinitionLink > orderByComparator ) throws NoSuchCPDefinitionLinkException { } } | CPDefinitionLink cpDefinitionLink = fetchByCPD_T_Last ( CPDefinitionId , type , orderByComparator ) ; if ( cpDefinitionLink != null ) { return cpDefinitionLink ; } StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "CPDefinitionId=" ) ; msg . append ( CPDefinitionId ) ; msg . append ( ", type=" ) ; msg . append ( type ) ; msg . append ( "}" ) ; throw new NoSuchCPDefinitionLinkException ( msg . toString ( ) ) ; |
public class DataService { /** * Method to download the file for the given entity id in asynchronous fashion
* @ param entity
* the entity
* @ param callbackHandler
* the callback handler
* @ throws FMSException
* throws FMSException */
public < T extends IEntity > void downloadPDFAsync ( T entity , CallbackHandler callbackHandler ) throws FMSException { } } | if ( ! isAvailableAsPDF ( entity ) ) { throw new FMSException ( "Following entity: " + entity . getClass ( ) . getSimpleName ( ) + " cannot be exported as PDF (Async) " ) ; } IntuitMessage intuitMessage = prepareDownloadPDF ( entity ) ; // set callback handler
intuitMessage . getRequestElements ( ) . setCallbackHandler ( callbackHandler ) ; // execute async interceptors
executeAsyncInterceptors ( intuitMessage ) ; |
public class IdentityPatchRunner { /** * Apply a patch .
* @ param patchResolver the patch metadata resolver
* @ param contentProvider the patch content provider
* @ param contentPolicy the content verification policy
* @ param modification the installation modification
* @ throws PatchingException for any error */
public PatchingResult applyPatch ( final PatchMetadataResolver patchResolver , final PatchContentProvider contentProvider , final ContentVerificationPolicy contentPolicy , final InstallationManager . InstallationModification modification ) throws PatchingException { } } | try { // Check if we can apply this patch
final Patch patch = patchResolver . resolvePatch ( modification . getName ( ) , modification . getVersion ( ) ) ; if ( patch == null ) { throw PatchLogger . ROOT_LOGGER . failedToResolvePatch ( modification . getName ( ) , modification . getVersion ( ) ) ; } final String patchId = patch . getPatchId ( ) ; final Identity identity = patch . getIdentity ( ) ; final String appliesTo = identity . getVersion ( ) ; if ( ! appliesTo . equals ( modification . getVersion ( ) ) ) { throw PatchLogger . ROOT_LOGGER . doesNotApply ( appliesTo , modification . getVersion ( ) ) ; } // Cannot apply the same patch twice
if ( modification . isApplied ( patchId ) ) { throw PatchLogger . ROOT_LOGGER . alreadyApplied ( patchId ) ; } // See if the prerequisites are met
checkUpgradeConditions ( identity , modification ) ; // Apply the patch
final File backup = installedImage . getPatchHistoryDir ( patchId ) ; final IdentityPatchContext context = new IdentityPatchContext ( backup , contentProvider , contentPolicy , modification , APPLY , installedImage ) ; try { return applyPatch ( patchId , patch , context ) ; } catch ( Exception e ) { PatchLogger . ROOT_LOGGER . debugf ( e , "failed to apply patch %s" , patchId ) ; throw rethrowException ( e ) ; } finally { context . cleanup ( ) ; } } finally { contentProvider . cleanup ( ) ; } |
public class PersonGroupPersonsImpl { /** * Update a person persisted face ' s userData field .
* @ param personGroupId Id referencing a particular person group .
* @ param personId Id referencing a particular person .
* @ param persistedFaceId Id referencing a particular persistedFaceId of an existing face .
* @ param userData User - provided data attached to the face . The size limit is 1KB .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceResponse } object if successful . */
public Observable < ServiceResponse < Void > > updateFaceWithServiceResponseAsync ( String personGroupId , UUID personId , UUID persistedFaceId , String userData ) { } } | if ( this . client . azureRegion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.azureRegion() is required and cannot be null." ) ; } if ( personGroupId == null ) { throw new IllegalArgumentException ( "Parameter personGroupId is required and cannot be null." ) ; } if ( personId == null ) { throw new IllegalArgumentException ( "Parameter personId is required and cannot be null." ) ; } if ( persistedFaceId == null ) { throw new IllegalArgumentException ( "Parameter persistedFaceId is required and cannot be null." ) ; } UpdatePersonFaceRequest bodyParameter = new UpdatePersonFaceRequest ( ) ; bodyParameter . withUserData ( userData ) ; String parameterizedHost = Joiner . on ( ", " ) . join ( "{AzureRegion}" , this . client . azureRegion ( ) ) ; return service . updateFace ( personGroupId , personId , persistedFaceId , this . client . acceptLanguage ( ) , bodyParameter , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < Void > > > ( ) { @ Override public Observable < ServiceResponse < Void > > call ( Response < ResponseBody > response ) { try { ServiceResponse < Void > clientResponse = updateFaceDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ; |
public class BinFileSearcher { /** * Search for a sequence of bytes from the file within the specified size
* range starting at the specified position .
* @ param f
* @ param searchBytes
* a sequence of bytes you want to find
* @ param startPosition
* ' 0 ' means the beginning of the file
* @ param maxSizeToRead
* max size to read . ' - 1 ' means read until the end .
* @ return */
public List < Long > searchPartially ( File f , byte [ ] searchBytes , long startPosition , long maxSizeToRead ) { } } | if ( USE_NIO ) { return searchPartiallyUsingNIO ( f , searchBytes , startPosition , maxSizeToRead , null ) ; } else { return searchPartiallyUsingLegacy ( f , searchBytes , startPosition , maxSizeToRead , null ) ; } |
public class ExamplesUtil { /** * Generates examples for integer properties - if there are enums , it uses first enum value , returns 0 otherwise .
* @ param enumValues the enum values
* @ return example */
public static Integer generateIntegerExample ( List < Integer > enumValues ) { } } | if ( enumValues == null || enumValues . isEmpty ( ) ) { return 0 ; } else { return enumValues . get ( 0 ) ; } |
public class ModelResourceStructure { /** * < p > applyRowCountHeader . < / p >
* @ param headerParams a { @ link javax . ws . rs . core . MultivaluedMap } object .
* @ param query a { @ link io . ebean . Query } object .
* @ param rowCount a { @ link io . ebean . FutureRowCount } object . */
protected void applyRowCountHeader ( MultivaluedMap < String , Object > headerParams , Query query , FutureRowCount rowCount ) { } } | ModelInterceptor . applyRowCountHeader ( headerParams , query , rowCount ) ; |
public class HebrewCalendar { /** * Add a signed amount to a specified field , using this calendar ' s rules .
* For example , to add three days to the current date , you can call
* < code > add ( Calendar . DATE , 3 ) < / code > .
* When adding to certain fields , the values of other fields may conflict and
* need to be changed . For example , when adding one to the { @ link # MONTH MONTH } field
* for the date " 30 Av 5758 " , the { @ link # DAY _ OF _ MONTH DAY _ OF _ MONTH } field
* must be adjusted so that the result is " 29 Elul 5758 " rather than the invalid
* " 30 Elul 5758 " .
* This method is able to add to
* all fields except for { @ link # ERA ERA } , { @ link # DST _ OFFSET DST _ OFFSET } ,
* and { @ link # ZONE _ OFFSET ZONE _ OFFSET } .
* < b > Note : < / b > You should always use { @ link # roll roll } and add rather
* than attempting to perform arithmetic operations directly on the fields
* of a < tt > HebrewCalendar < / tt > . Since the { @ link # MONTH MONTH } field behaves
* discontinuously in non - leap years , simple arithmetic can give invalid results .
* @ param field the time field .
* @ param amount the amount to add to the field .
* @ exception IllegalArgumentException if the field is invalid or refers
* to a field that cannot be handled by this method . */
public void add ( int field , int amount ) { } } | switch ( field ) { case MONTH : { // We can ' t just do a set ( MONTH , get ( MONTH ) + amount ) . The
// reason is ADAR _ 1 . Suppose amount is + 2 and we land in
// ADAR _ 1 - - then we have to bump to ADAR _ 2 aka ADAR . But
// if amount is - 2 and we land in ADAR _ 1 , then we have to
// bump the other way - - down to SHEVAT . - Alan 11/00
int month = get ( MONTH ) ; int year = get ( YEAR ) ; boolean acrossAdar1 ; if ( amount > 0 ) { acrossAdar1 = ( month < ADAR_1 ) ; // started before ADAR _ 1?
month += amount ; for ( ; ; ) { if ( acrossAdar1 && month >= ADAR_1 && ! isLeapYear ( year ) ) { ++ month ; } if ( month <= ELUL ) { break ; } month -= ELUL + 1 ; ++ year ; acrossAdar1 = true ; } } else { acrossAdar1 = ( month > ADAR_1 ) ; // started after ADAR _ 1?
month += amount ; for ( ; ; ) { if ( acrossAdar1 && month <= ADAR_1 && ! isLeapYear ( year ) ) { -- month ; } if ( month >= 0 ) { break ; } month += ELUL + 1 ; -- year ; acrossAdar1 = true ; } } set ( MONTH , month ) ; set ( YEAR , year ) ; pinField ( DAY_OF_MONTH ) ; break ; } default : super . add ( field , amount ) ; break ; } |
public class ByteBufferOutputStream { /** * Write the content from the passed byte buffer to this output stream .
* @ param aSrcBuffer
* The buffer to use . May not be < code > null < / code > . */
public void write ( @ Nonnull final ByteBuffer aSrcBuffer ) { } } | ValueEnforcer . notNull ( aSrcBuffer , "SourceBuffer" ) ; if ( m_bCanGrow && aSrcBuffer . remaining ( ) > m_aBuffer . remaining ( ) ) _growBy ( aSrcBuffer . remaining ( ) ) ; m_aBuffer . put ( aSrcBuffer ) ; |
public class BigDecimal { /** * Match the scales of two { @ code BigDecimal } s to align their
* least significant digits .
* < p > If the scales of val [ 0 ] and val [ 1 ] differ , rescale
* ( non - destructively ) the lower - scaled { @ code BigDecimal } so
* they match . That is , the lower - scaled reference will be
* replaced by a reference to a new object with the same scale as
* the other { @ code BigDecimal } .
* @ param val array of two elements referring to the two
* { @ code BigDecimal } s to be aligned . */
private static void matchScale ( BigDecimal [ ] val ) { } } | if ( val [ 0 ] . scale == val [ 1 ] . scale ) { return ; } else if ( val [ 0 ] . scale < val [ 1 ] . scale ) { val [ 0 ] = val [ 0 ] . setScale ( val [ 1 ] . scale , ROUND_UNNECESSARY ) ; } else if ( val [ 1 ] . scale < val [ 0 ] . scale ) { val [ 1 ] = val [ 1 ] . setScale ( val [ 0 ] . scale , ROUND_UNNECESSARY ) ; } |
public class ShardingCTLBackendHandlerFactory { /** * Create new instance of sharding CTL backend handler .
* @ param sql SQL to be executed
* @ param backendConnection backend connection
* @ return instance of sharding CTL backend handler */
public static TextProtocolBackendHandler newInstance ( final String sql , final BackendConnection backendConnection ) { } } | if ( sql . toUpperCase ( ) . startsWith ( SCTL_SET ) ) { return new ShardingCTLSetBackendHandler ( sql , backendConnection ) ; } if ( sql . toUpperCase ( ) . startsWith ( SCTL_SHOW ) ) { return new ShardingCTLShowBackendHandler ( sql , backendConnection ) ; } if ( sql . toUpperCase ( ) . startsWith ( SCTL_EXPLAIN ) ) { return new ShardingCTLExplainBackendHandler ( sql , backendConnection ) ; } throw new IllegalArgumentException ( sql ) ; |
public class BugLinkStrategyFactory { /** * Register a strategy with the factory so that it can be used .
* @ param type The Bug Links type the strategy is to be used for .
* @ param priority The priority of the strategy . ( 1 - 10 with 1 being the Highest and 10 being the lowest )
* @ param strategyClass The strategies class .
* @ param serverUrl The server url that the strategy should be used for . */
public void registerStrategy ( final BugLinkType type , final Integer priority , final Class < ? extends BaseBugLinkStrategy < ? > > strategyClass , final String serverUrl ) { } } | registerStrategy ( type , priority , strategyClass , Arrays . asList ( serverUrl ) ) ; |
public class WebApplicationHandler { /** * Add a mapping from a pathSpec to a Filter .
* @ param pathSpec The path specification
* @ param filterName The name of the filter ( must already be added or defined )
* @ param dispatches An integer formed by the logical OR of FilterHolder . _ _ REQUEST ,
* FilterHolder . _ _ FORWARD , FilterHolder . _ _ INCLUDE and / or FilterHolder . _ _ ERROR .
* @ return The holder of the filter instance . */
public FilterHolder addFilterPathMapping ( String pathSpec , String filterName , int dispatches ) { } } | FilterHolder holder = ( FilterHolder ) _filterMap . get ( filterName ) ; if ( holder == null ) throw new IllegalArgumentException ( "unknown filter: " + filterName ) ; FilterMapping mapping = new FilterMapping ( pathSpec , holder , dispatches ) ; _pathFilters . add ( mapping ) ; return holder ; |
public class ServerAdvisorsInner { /** * Gets a list of server advisors .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the AdvisorListResultInner object if successful . */
public AdvisorListResultInner listByServer ( String resourceGroupName , String serverName ) { } } | return listByServerWithServiceResponseAsync ( resourceGroupName , serverName ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class Vector3d { /** * Returns the squared of the Euclidean distance between this vector and
* vector v .
* @ return squared distance between this vector and v */
public double distanceSquared ( Vector3d v ) { } } | double dx = x - v . x ; double dy = y - v . y ; double dz = z - v . z ; return dx * dx + dy * dy + dz * dz ; |
public class GBMModel { /** * Bulk scoring API for one row . Chunks are all compatible with the model ,
* and expect the last Chunks are for the final distribution and prediction .
* Default method is to just load the data into the tmp array , then call
* subclass scoring logic . */
@ Override protected double [ ] score0 ( double data [ /* ncols */
] , double preds [ /* nclasses + 1 */
] , double offset , int ntrees ) { } } | super . score0 ( data , preds , offset , ntrees ) ; // These are f _ k ( x ) in Algorithm 10.4
return score0Probabilities ( preds , offset ) ; |
public class ReidSolomonCodes { /** * Use Forney algorithm to compute correction values .
* @ param message ( Input / Output ) The message which is to be corrected . Just the message . ECC not required .
* @ param length _ msg _ ecc ( Input ) length of message and ecc code
* @ param errorLocations ( Input ) locations of bytes in message with errors . */
void correctErrors ( GrowQueue_I8 message , int length_msg_ecc , GrowQueue_I8 syndromes , GrowQueue_I8 errorLocator , GrowQueue_I32 errorLocations ) { } } | GrowQueue_I8 err_eval = new GrowQueue_I8 ( ) ; // TODO avoid new
findErrorEvaluator ( syndromes , errorLocator , err_eval ) ; // Compute error positions
GrowQueue_I8 X = GrowQueue_I8 . zeros ( errorLocations . size ) ; // TODO avoid new
for ( int i = 0 ; i < errorLocations . size ; i ++ ) { int coef_pos = ( length_msg_ecc - errorLocations . data [ i ] - 1 ) ; X . data [ i ] = ( byte ) math . power ( 2 , coef_pos ) ; // The commented out code below replicates exactly how the reference code works . This code above
// seems to work just as well and passes all the unit tests
// int coef _ pos = math . max _ value - ( length _ msg _ ecc - errorLocations . data [ i ] - 1 ) ;
// X . data [ i ] = ( byte ) math . power _ n ( 2 , - coef _ pos ) ;
} GrowQueue_I8 err_loc_prime_tmp = new GrowQueue_I8 ( X . size ) ; // storage for error magnitude polynomial
for ( int i = 0 ; i < X . size ; i ++ ) { int Xi = X . data [ i ] & 0xFF ; int Xi_inv = math . inverse ( Xi ) ; // Compute the polynomial derivative
err_loc_prime_tmp . size = 0 ; for ( int j = 0 ; j < X . size ; j ++ ) { if ( i == j ) continue ; err_loc_prime_tmp . data [ err_loc_prime_tmp . size ++ ] = ( byte ) GaliosFieldOps . subtract ( 1 , math . multiply ( Xi_inv , X . data [ j ] & 0xFF ) ) ; } // compute the product , which is the denominator of Forney algorithm ( errata locator derivative )
int err_loc_prime = 1 ; for ( int j = 0 ; j < err_loc_prime_tmp . size ; j ++ ) { err_loc_prime = math . multiply ( err_loc_prime , err_loc_prime_tmp . data [ j ] & 0xFF ) ; } int y = math . polyEval_S ( err_eval , Xi_inv ) ; y = math . multiply ( math . power ( Xi , 1 ) , y ) ; // Compute the magnitude
int magnitude = math . divide ( y , err_loc_prime ) ; // only apply a correction if it ' s part of the message and not the ECC
int loc = errorLocations . get ( i ) ; if ( loc < message . size ) message . data [ loc ] = ( byte ) ( ( message . data [ loc ] & 0xFF ) ^ magnitude ) ; } |
public class SibRaTransactionalDispatcher { /** * Returns the < code > XAResource < / code > used to deliver the messages under .
* @ return the < code > XAResource < / code >
* @ throws ResourceException
* if the < code > XAResource < / code > could not be created */
private SIXAResource getXaResource ( ) throws ResourceException { } } | final String methodName = "getXaResource" ; if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . entry ( this , TRACE , methodName ) ; } if ( _xaResource == null ) { try { _xaResource = _connection . getSIXAResource ( ) ; } catch ( final SIException exception ) { FFDCFilter . processException ( exception , CLASS_NAME + "." + methodName , FFDC_PROBE_1 , this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEventEnabled ( ) ) { SibTr . exception ( this , TRACE , exception ) ; } throw new ResourceException ( NLS . getFormattedMessage ( ( "XARESOURCE_EXCEPTION_CWSIV0650" ) , new Object [ ] { exception , _connection } , null ) , exception ) ; } catch ( final SIErrorException exception ) { FFDCFilter . processException ( exception , CLASS_NAME + "." + methodName , FFDC_PROBE_3 , this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEventEnabled ( ) ) { SibTr . exception ( this , TRACE , exception ) ; } throw new ResourceException ( NLS . getFormattedMessage ( ( "XARESOURCE_EXCEPTION_CWSIV0650" ) , new Object [ ] { exception , _connection } , null ) , exception ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . exit ( this , TRACE , methodName , _xaResource ) ; } return _xaResource ; |
public class DatabasesInner { /** * Resumes a database .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server .
* @ param databaseName The name of the database to be resumed .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the DatabaseInner object */
public Observable < DatabaseInner > beginResumeAsync ( String resourceGroupName , String serverName , String databaseName ) { } } | return beginResumeWithServiceResponseAsync ( resourceGroupName , serverName , databaseName ) . map ( new Func1 < ServiceResponse < DatabaseInner > , DatabaseInner > ( ) { @ Override public DatabaseInner call ( ServiceResponse < DatabaseInner > response ) { return response . body ( ) ; } } ) ; |
public class FixBondOrdersTool { /** * Function to set up an array of integers corresponding to indicate how many free valencies need fulfilling for each atom through ring bonds .
* @ param { @ link IAtomContainer } molecule
* @ param atomsToCheck
* @ param M
* @ return The List of free valencies available for extra ring bonding */
private List < Integer > getFreeValenciesForRingGroup ( IAtomContainer molecule , List < Integer > atomsToCheck , Matrix M , IRingSet rs ) { } } | List < Integer > fvtc = new ArrayList < Integer > ( ) ; for ( int i = 0 ; i < atomsToCheck . size ( ) ; i ++ ) { int j = atomsToCheck . get ( i ) ; // Put in an implicit hydrogen atom for Planar3 C - atoms in 5 - membered rings ( it doesn ' t get put in by the Smiles parser )
if ( ( "C" . equals ( molecule . getAtom ( j ) . getSymbol ( ) ) ) && ( molecule . getAtom ( j ) . getHybridization ( ) == Hybridization . PLANAR3 ) ) { // Check that ring containing the atom is five - membered
for ( IAtomContainer ac : rs . atomContainers ( ) ) { if ( ac . contains ( molecule . getAtom ( j ) ) ) { if ( ( int ) molecule . getBondOrderSum ( molecule . getAtom ( j ) ) == 2 && ac . getAtomCount ( ) == 5 ) { molecule . getAtom ( j ) . setImplicitHydrogenCount ( 1 ) ; break ; } } } } int implicitH = 0 ; if ( molecule . getAtom ( j ) . getImplicitHydrogenCount ( ) == null ) { CDKHydrogenAdder ha = CDKHydrogenAdder . getInstance ( molecule . getBuilder ( ) ) ; try { ha . addImplicitHydrogens ( molecule , molecule . getAtom ( j ) ) ; implicitH = molecule . getAtom ( j ) . getImplicitHydrogenCount ( ) ; } catch ( CDKException e ) { // No need to do anything because implicitH already set to 0
} } else { implicitH = molecule . getAtom ( j ) . getImplicitHydrogenCount ( ) ; } fvtc . add ( molecule . getAtom ( j ) . getValency ( ) - ( implicitH + ( int ) molecule . getBondOrderSum ( molecule . getAtom ( j ) ) ) + M . sumOfRow ( i ) ) ; } return fvtc ; |
public class AbstractAnalyticsService { /** * This method expands a tree into the collapsed set of endpoints .
* @ param root The tree
* @ return The list of endpoints */
protected static List < EndpointInfo > extractEndpointInfo ( EndpointPart root ) { } } | List < EndpointInfo > endpoints = new ArrayList < EndpointInfo > ( ) ; root . extractEndpointInfo ( endpoints , "" ) ; return endpoints ; |
public class ParameterResolverImpl { /** * Validate that application ids and configuration names are unique over all providers . */
private void validateParameterProviders ( ) { } } | Set < String > parameterNames = new HashSet < > ( ) ; for ( ParameterProvider provider : this . parameterProviders ) { Set < String > applicationIdsOfThisProvider = new HashSet < > ( ) ; for ( Parameter < ? > parameter : provider . getParameters ( ) ) { if ( StringUtils . isNotEmpty ( parameter . getApplicationId ( ) ) ) { applicationIdsOfThisProvider . add ( parameter . getApplicationId ( ) ) ; } if ( parameterNames . contains ( parameter . getName ( ) ) ) { log . warn ( "Parameter name is not unique: {} (application: {})" , parameter . getName ( ) , parameter . getApplicationId ( ) ) ; } else { parameterNames . add ( parameter . getName ( ) ) ; } } if ( applicationIdsOfThisProvider . size ( ) > 1 ) { // NOPMD
log . warn ( "Parameter provider {} defines parameters with multiple application Ids: {}" , provider , applicationIdsOfThisProvider . toArray ( new String [ applicationIdsOfThisProvider . size ( ) ] ) ) ; } } |
public class PrcWebstorePage { /** * < p > Find chooseable specifics in given list by ID . < / p >
* @ param pListChSpecifics List Ch - Specifics
* @ param pId Id
* @ return chooseable specifics or null */
public final ChooseableSpecifics findChSpecificsById ( final List < ChooseableSpecifics > pListChSpecifics , final Long pId ) { } } | for ( ChooseableSpecifics chs : pListChSpecifics ) { if ( chs . getItsId ( ) . equals ( pId ) ) { return chs ; } } return null ; |
public class JDBC4ResultSet { /** * Deprecated . use getCharacterStream instead */
@ Override @ Deprecated public InputStream getUnicodeStream ( String columnLabel ) throws SQLException { } } | return getUnicodeStream ( findColumn ( columnLabel ) ) ; |
public class ReviewsImpl { /** * Get the Job Details for a Job Id .
* @ param teamName Your Team Name .
* @ param jobId Id of the job .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the Job object */
public Observable < ServiceResponse < Job > > getJobDetailsWithServiceResponseAsync ( String teamName , String jobId ) { } } | if ( this . client . baseUrl ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.baseUrl() is required and cannot be null." ) ; } if ( teamName == null ) { throw new IllegalArgumentException ( "Parameter teamName is required and cannot be null." ) ; } if ( jobId == null ) { throw new IllegalArgumentException ( "Parameter jobId is required and cannot be null." ) ; } String parameterizedHost = Joiner . on ( ", " ) . join ( "{baseUrl}" , this . client . baseUrl ( ) ) ; return service . getJobDetails ( teamName , jobId , this . client . acceptLanguage ( ) , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < Job > > > ( ) { @ Override public Observable < ServiceResponse < Job > > call ( Response < ResponseBody > response ) { try { ServiceResponse < Job > clientResponse = getJobDetailsDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ; |
public class CmsShellCommands { /** * Purges the jsp repository . < p >
* @ throws Exception if something goes wrong
* @ see org . opencms . flex . CmsFlexCache # cmsEvent ( org . opencms . main . CmsEvent ) */
public void purgeJspRepository ( ) throws Exception { } } | OpenCms . fireCmsEvent ( new CmsEvent ( I_CmsEventListener . EVENT_FLEX_PURGE_JSP_REPOSITORY , new HashMap < String , Object > ( ) ) ) ; |
public class XPopupBox { /** * Print this field ' s data in XML format .
* @ return true if default params were found for this form .
* @ param out The http output stream .
* @ exception DBException File exception . */
public boolean printData ( PrintWriter out , int iPrintOptions ) { } } | boolean bFieldsFound = false ; String strFieldName = this . getScreenField ( ) . getSFieldParam ( ) ; String string = DBConstants . BLANK ; int iIndex = 0 ; Convert converter = this . getScreenField ( ) . getConverter ( ) ; if ( m_vDisplays == null ) { String strField = null ; if ( converter != null ) if ( converter . getField ( ) != null ) strField = converter . getField ( ) . getString ( ) ; this . scanTableItems ( ) ; if ( converter != null ) if ( converter . getField ( ) != null ) converter . getField ( ) . setString ( strField ) ; } if ( converter != null ) { // This is required for the display or popup fields because displayField ( ) is never called to get the value .
iIndex = converter . convertFieldToIndex ( ) ; if ( iIndex == - 1 ) { return super . printData ( out , iPrintOptions ) ; } try { string = ( String ) m_vDisplays . get ( iIndex ) ; } catch ( ArrayIndexOutOfBoundsException ex ) { string = DBConstants . BLANK ; } } String strFieldData = string ; // this . getScreenField ( ) . getSFieldValue ( true , false ) ;
if ( this . getScreenField ( ) . getConverter ( ) != null ) strFieldData = Utility . encodeXML ( strFieldData ) ; out . println ( " " + Utility . startTag ( strFieldName ) + strFieldData + Utility . endTag ( strFieldName ) ) ; return bFieldsFound ; |
public class CollationBuilder { /** * Adds the mapping and its canonical closure .
* Takes ce32 = dataBuilder . encodeCEs ( . . . ) so that the data builder
* need not re - encode the CEs multiple times . */
private int addWithClosure ( CharSequence nfdPrefix , CharSequence nfdString , long [ ] newCEs , int newCEsLength , int ce32 ) { } } | // Map from the NFD input to the CEs .
ce32 = addIfDifferent ( nfdPrefix , nfdString , newCEs , newCEsLength , ce32 ) ; ce32 = addOnlyClosure ( nfdPrefix , nfdString , newCEs , newCEsLength , ce32 ) ; addTailComposites ( nfdPrefix , nfdString ) ; return ce32 ; |
public class WindowedStream { /** * Applies the given window function to each window . The window function is called for each
* evaluation of the window for each key individually . The output of the window function is
* interpreted as a regular non - windowed stream .
* < p > Note that this function requires that all data in the windows is buffered until the window
* is evaluated , as the function provides no means of incremental aggregation .
* @ param function The window function .
* @ return The data stream that is the result of applying the window function to the window . */
@ PublicEvolving public < R > SingleOutputStreamOperator < R > process ( ProcessWindowFunction < T , R , K , W > function ) { } } | TypeInformation < R > resultType = getProcessWindowFunctionReturnType ( function , getInputType ( ) , null ) ; return process ( function , resultType ) ; |
public class ConfigurationConversionService { /** * Returns a set of { @ code io . motown . vas . viewmodel . model . ChargingCapability } s based on the EVSEs .
* @ param evses list of EVSEs .
* @ return set of ChargingCapability . */
public Set < ChargingCapability > getChargingCapabilitiesFromEvses ( Set < Evse > evses ) { } } | Set < ChargingCapability > chargingCapabilities = new HashSet < > ( ) ; for ( Evse evse : evses ) { for ( Connector connector : evse . getConnectors ( ) ) { chargingCapabilities . add ( ChargingCapability . fromConnector ( connector ) ) ; } } return chargingCapabilities ; |
public class ModelsImpl { /** * Gets information about the closedlist models .
* @ param appId The application ID .
* @ param versionId The version ID .
* @ param listClosedListsOptionalParameter the object representing the optional parameters to be set before calling this API
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the List & lt ; ClosedListEntityExtractor & gt ; object */
public Observable < List < ClosedListEntityExtractor > > listClosedListsAsync ( UUID appId , String versionId , ListClosedListsOptionalParameter listClosedListsOptionalParameter ) { } } | return listClosedListsWithServiceResponseAsync ( appId , versionId , listClosedListsOptionalParameter ) . map ( new Func1 < ServiceResponse < List < ClosedListEntityExtractor > > , List < ClosedListEntityExtractor > > ( ) { @ Override public List < ClosedListEntityExtractor > call ( ServiceResponse < List < ClosedListEntityExtractor > > response ) { return response . body ( ) ; } } ) ; |
public class DirectoryConnectSettings { /** * A list of one or more IP addresses of DNS servers or domain controllers in the on - premises directory .
* @ return A list of one or more IP addresses of DNS servers or domain controllers in the on - premises directory . */
public java . util . List < String > getCustomerDnsIps ( ) { } } | if ( customerDnsIps == null ) { customerDnsIps = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return customerDnsIps ; |
public class SocksSocketImpl { /** * Provides the authentication machanism required by the proxy . */
private boolean authenticate ( byte method , InputStream in , BufferedOutputStream out ) throws IOException { } } | return authenticate ( method , in , out , 0L ) ; |
public class NeuralNetworkParser { /** * 获取词性
* @ param postags 词性列表
* @ param id 词性下标
* @ return 词性 */
int POSTAG ( final List < Integer > postags , int id ) { } } | return ( ( id != - 1 ) ? ( postags . get ( id ) + kPostagInFeaturespace ) : kNilPostag ) ; |
public class ConfiguratorOnCreation { /** * Updates the configuration file of an agent .
* @ param parameters
* @ param ssh
* @ param tmpDir
* @ param keyToNewValue
* @ throws IOException */
void updateAgentConfigurationFile ( TargetHandlerParameters parameters , SSHClient ssh , File tmpDir , Map < String , String > keyToNewValue ) throws IOException { } } | this . logger . fine ( "Updating agent parameters on remote host..." ) ; // Update the agent ' s configuration file
String agentConfigDir = Utils . getValue ( parameters . getTargetProperties ( ) , SCP_AGENT_CONFIG_DIR , DEFAULT_SCP_AGENT_CONFIG_DIR ) ; File localAgentConfig = new File ( tmpDir , Constants . KARAF_CFG_FILE_AGENT ) ; File remoteAgentConfig = new File ( agentConfigDir , Constants . KARAF_CFG_FILE_AGENT ) ; // Download remote agent config file . . .
ssh . newSCPFileTransfer ( ) . download ( remoteAgentConfig . getCanonicalPath ( ) , new FileSystemFile ( tmpDir ) ) ; // Replace " parameters " property to point on the user data file . . .
String config = Utils . readFileContent ( localAgentConfig ) ; config = Utils . updateProperties ( config , keyToNewValue ) ; Utils . writeStringInto ( config , localAgentConfig ) ; // Then upload agent config file back
ssh . newSCPFileTransfer ( ) . upload ( new FileSystemFile ( localAgentConfig ) , agentConfigDir ) ; |
public class TypeDefinitionRegistry { /** * Returns true if the specified type exists in the registry and is an abstract ( Interface or Union ) type
* @ param type the type to check
* @ return true if its abstract */
public boolean isInterfaceOrUnion ( Type type ) { } } | Optional < TypeDefinition > typeDefinition = getType ( type ) ; if ( typeDefinition . isPresent ( ) ) { TypeDefinition definition = typeDefinition . get ( ) ; return definition instanceof UnionTypeDefinition || definition instanceof InterfaceTypeDefinition ; } return false ; |
public class UIContextImpl { /** * { @ inheritDoc } */
@ Override public Map < Object , Object > getRequestScratchMap ( final WComponent component ) { } } | if ( requestScratchMap == null ) { requestScratchMap = new HashMap < > ( ) ; } Map < Object , Object > componentScratchMap = requestScratchMap . get ( component ) ; if ( componentScratchMap == null ) { componentScratchMap = new HashMap < > ( 2 ) ; requestScratchMap . put ( component , componentScratchMap ) ; } return componentScratchMap ; |
public class FailoverGroupsInner { /** * Fails over from the current primary server to this server . This operation might result in data loss .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server containing the failover group .
* @ param failoverGroupName The name of the failover group .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < FailoverGroupInner > forceFailoverAllowDataLossAsync ( String resourceGroupName , String serverName , String failoverGroupName , final ServiceCallback < FailoverGroupInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( forceFailoverAllowDataLossWithServiceResponseAsync ( resourceGroupName , serverName , failoverGroupName ) , serviceCallback ) ; |
public class ResolveSpringHibernateJPADataSourceRuleProvider { /** * < bean id = " jpaVendorAdapter2 " class = " org . springframework . orm . jpa . vendor . HibernateJpaVendorAdapter " > < property name = " dataSource " value = " HSQL " / >
* < / bean > */
private String extractHibernateJpaVendorDatabase ( Document doc , Element bean ) { } } | for ( Element jpaVendorAdapterProperty : $ ( bean ) . children ( "property" ) . filter ( attr ( "name" , "jpaVendorAdapter" ) ) . get ( ) ) { String propertyRef = $ ( jpaVendorAdapterProperty ) . attr ( "ref" ) ; if ( StringUtils . isNotBlank ( propertyRef ) ) { // look for the properties referenced by a local bean . .
for ( Element jpaVendorAdapter : findLocalBeanById ( doc , propertyRef ) ) { // check attribute on element
String propAttrValue = $ ( jpaVendorAdapter ) . attr ( "database" ) ; if ( StringUtils . isNotBlank ( propAttrValue ) ) { return propAttrValue ; } // now look for the property " dataSource " off of that bean .
for ( Element p : $ ( jpaVendorAdapter ) . children ( "property" ) . filter ( attr ( "name" , "database" ) ) . get ( ) ) { String value = $ ( p ) . attr ( "value" ) ; if ( StringUtils . isNotBlank ( value ) ) { return value ; } } } } } return null ; |
public class SpeechClient { /** * Performs asynchronous speech recognition : receive results via the google . longrunning . Operations
* interface . Returns either an ` Operation . error ` or an ` Operation . response ` which contains a
* ` LongRunningRecognizeResponse ` message .
* < p > Sample code :
* < pre > < code >
* try ( SpeechClient speechClient = SpeechClient . create ( ) ) {
* RecognitionConfig . AudioEncoding encoding = RecognitionConfig . AudioEncoding . FLAC ;
* int sampleRateHertz = 44100;
* String languageCode = " en - US " ;
* RecognitionConfig config = RecognitionConfig . newBuilder ( )
* . setEncoding ( encoding )
* . setSampleRateHertz ( sampleRateHertz )
* . setLanguageCode ( languageCode )
* . build ( ) ;
* String uri = " gs : / / bucket _ name / file _ name . flac " ;
* RecognitionAudio audio = RecognitionAudio . newBuilder ( )
* . setUri ( uri )
* . build ( ) ;
* LongRunningRecognizeResponse response = speechClient . longRunningRecognizeAsync ( config , audio ) . get ( ) ;
* < / code > < / pre >
* @ param config & # 42 ; Required & # 42 ; Provides information to the recognizer that specifies how to
* process the request .
* @ param audio & # 42 ; Required & # 42 ; The audio data to be recognized .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi ( "The surface for long-running operations is not stable yet and may change in the future." ) public final OperationFuture < LongRunningRecognizeResponse , LongRunningRecognizeMetadata > longRunningRecognizeAsync ( RecognitionConfig config , RecognitionAudio audio ) { } } | LongRunningRecognizeRequest request = LongRunningRecognizeRequest . newBuilder ( ) . setConfig ( config ) . setAudio ( audio ) . build ( ) ; return longRunningRecognizeAsync ( request ) ; |
public class JDBCDateTimeTimestampPositionParser { /** * Parses a date / time into a PROTEMPA position . For types date , time and
* timestamp it calls the corresponding method in { @ link ResultSet } . For
* other types , it calls { @ link ResultSet # getTimestamp ( int ) } and lets the
* JDBC driver attempt to parse a date .
* @ param resultSet a { @ link ResultSet } . Cannot be < code > null < / code > .
* @ param columnIndex the index of the column to retrieve from the result
* set .
* @ param colType the SQL type of the column as a { @ link Types } .
* @ return a PROTEMPA position or < code > null < / code > if the column in the
* database is < code > NULL < / code > .
* @ throws SQLException if there is an error accessing the result set or the
* column cannot be parsed into a date . */
@ Override public Long toPosition ( ResultSet resultSet , int columnIndex , int colType ) throws SQLException { } } | Date result ; switch ( colType ) { case Types . DATE : result = resultSet . getDate ( columnIndex ) ; break ; case Types . TIME : result = resultSet . getTime ( columnIndex ) ; break ; default : /* * We ' ll end up here for Types . TIMESTAMP and non - date SQL data
* types . For the latter , we ' ll let the JDBC driver try to
* parse a date . */
result = resultSet . getTimestamp ( columnIndex ) ; } if ( result != null ) { return AbsoluteTimeGranularityUtil . asPosition ( result ) ; } else { return this . defaultDate ; } |
public class UtilFile { /** * Get all files existing in the path with the specified name .
* @ param path The path to check ( must not be < code > null < / code > ) .
* @ param name The file name ( must not be < code > null < / code > ) .
* @ return The files list ( empty array if none ) .
* @ throws LionEngineException If invalid arguments . */
public static List < File > getFilesByName ( File path , String name ) { } } | Check . notNull ( path ) ; Check . notNull ( name ) ; final List < File > filesList = new ArrayList < > ( 1 ) ; getFilesByNameRecursive ( filesList , path , name ) ; return filesList ; |
public class LimitParameter { /** * Ensures that the limit is in the allowed range .
* @ param limit the parsed limit value to check the range of
* @ throws InvalidParameterException if the limit value is less outside of the allowed range */
private void checkLimitRange ( int limit ) { } } | int minLimit = offsetPaginationConfiguration . allowZeroLimit ( ) ? 0 : 1 ; if ( limit < minLimit || limit > offsetPaginationConfiguration . maxLimit ( ) ) { throw InvalidParameterException . create ( "Invalid value for '" + this . getParameterFieldName ( ) + "': " + limit + " - value between " + minLimit + " and " + offsetPaginationConfiguration . maxLimit ( ) + " is required." ) ; } |
public class SQLiteQueryBuilder { /** * Append a chunk to the WHERE clause of the query . All chunks appended are surrounded
* by parenthesis and ANDed with the selection passed to { @ link # query } . The final
* WHERE clause looks like :
* WHERE ( & lt ; append chunk 1 > & lt ; append chunk2 > ) AND ( & lt ; query ( ) selection parameter > )
* @ param inWhere the chunk of text to append to the WHERE clause . */
public void appendWhere ( CharSequence inWhere ) { } } | if ( mWhereClause == null ) { mWhereClause = new StringBuilder ( inWhere . length ( ) + 16 ) ; } if ( mWhereClause . length ( ) == 0 ) { mWhereClause . append ( '(' ) ; } mWhereClause . append ( inWhere ) ; |
public class JsonConfig { /** * Returns a set of default excludes with user - defined excludes . < br >
* [ Java - & gt ; JSON ] */
public Collection getMergedExcludes ( ) { } } | Collection exclusions = new HashSet ( ) ; for ( int i = 0 ; i < excludes . length ; i ++ ) { String exclusion = excludes [ i ] ; if ( ! StringUtils . isBlank ( excludes [ i ] ) ) { exclusions . add ( exclusion . trim ( ) ) ; } } if ( ! ignoreDefaultExcludes ) { for ( int i = 0 ; i < DEFAULT_EXCLUDES . length ; i ++ ) { if ( ! exclusions . contains ( DEFAULT_EXCLUDES [ i ] ) ) { exclusions . add ( DEFAULT_EXCLUDES [ i ] ) ; } } } return exclusions ; |
public class ConnectorImpl { /** * If not already created , a new < code > license < / code > element with the given value will be created .
* Otherwise , the existing < code > license < / code > element will be returned .
* @ return a new or existing instance of < code > License < Connector < T > > < / code > */
public License < Connector < T > > getOrCreateLicense ( ) { } } | Node node = childNode . getOrCreate ( "license" ) ; License < Connector < T > > license = new LicenseImpl < Connector < T > > ( this , "license" , childNode , node ) ; return license ; |
public class KuznechikMath { /** * totally not constant time */
public static byte kuz_mul_gf256 ( byte x , byte y ) { } } | // uint8 _ t z ;
// z = 0;
byte z = 0 ; // while ( y ) {
while ( ( y & 0xFF ) != 0 ) { // if ( y & 1)
if ( ( y & 1 ) != 0 ) { // z ^ = x ;
z ^= x ; } // x = ( x < < 1 ) ^ ( x & 0x80 ? 0xC3 : 0x00 ) ;
x = ( byte ) ( ( ( x & 0xFF ) << 1 ) ^ ( ( x & 0x80 ) != 0 ? 0xC3 : 0x00 ) ) ; // y > > = 1;
y = ( byte ) ( ( y & 0xFF ) >> 1 ) ; } // return z ;
return z ; |
public class ChineseCalendar { /** * Return the closest new moon to the given date , searching either
* forward or backward in time .
* @ param days days after January 1 , 1970 0:00 Asia / Shanghai
* @ param after if true , search for a new moon on or after the given
* date ; otherwise , search for a new moon before it
* @ return days after January 1 , 1970 0:00 Asia / Shanghai of the nearest
* new moon after or before < code > days < / code > */
private int newMoonNear ( int days , boolean after ) { } } | astro . setTime ( daysToMillis ( days ) ) ; long newMoon = astro . getMoonTime ( CalendarAstronomer . NEW_MOON , after ) ; return millisToDays ( newMoon ) ; |
public class JMMap { /** * Sort map .
* @ param < K > the type parameter
* @ param < V > the type parameter
* @ param map the map
* @ param comparator the comparator
* @ return the map */
public static < K , V > Map < K , V > sort ( Map < K , V > map , Comparator < K > comparator ) { } } | TreeMap < K , V > sortedMap = new TreeMap < > ( comparator ) ; sortedMap . putAll ( map ) ; return sortedMap ; |
public class ServiceInstanceQueryHelper { /** * Filter the ModelServiceInstance list against the ServiceInstanceQuery .
* @ param query
* the ServiceInstanceQuery matchers .
* @ param list
* the ModelServiceInstance list .
* @ return
* the matched ModelServiceInstance list . */
public static List < ModelServiceInstance > filter ( ServiceInstanceQuery query , List < ModelServiceInstance > list ) { } } | if ( list == null || list . size ( ) == 0 ) { return Collections . emptyList ( ) ; } List < QueryCriterion > criteria = query . getCriteria ( ) ; if ( criteria == null || criteria . size ( ) == 0 ) { return list ; } List < ModelServiceInstance > instances = new ArrayList < ModelServiceInstance > ( ) ; for ( ModelServiceInstance instance : list ) { boolean passed = true ; for ( QueryCriterion criterion : criteria ) { if ( criterion . isMatch ( instance . getMetadata ( ) ) == false ) { passed = false ; break ; } } if ( passed ) { instances . add ( instance ) ; } } return instances ; |
public class PlainOldFixture { /** * { @ inheritDoc } */
protected Message getCheckMessage ( String name ) { } } | final Class type = target . getClass ( ) ; InvocationMessage invocationMessage = getInvocations ( name ) ; Method getterMethod = getGetter ( type , name ) ; if ( getterMethod != null ) invocationMessage . addMessage ( new StaticInvocation ( target , getterMethod ) ) ; Field field = getField ( type , name ) ; if ( field != null ) invocationMessage . addMessage ( new FieldReader ( target , field ) ) ; if ( ! invocationMessage . isEmpty ( ) ) return invocationMessage ; if ( getSystemUnderTest ( ) == null ) return null ; PlainOldFixture fixture = new PlainOldFixture ( getSystemUnderTest ( ) ) ; return fixture . getCheckMessage ( name ) ; |
public class StreamUtil { /** * generate a String of UTF - 8 characters ( or hex - digits if byteStream isn ' t UTF - 8 chars ) from byteStream ,
* truncating to maxLen ( with " . . . " added if the result is truncated )
* @ param byteStream the source of bytes to be converted to a UTF - 8 String
* @ param maxLen the point at which to truncate the string ( - 1 means don ' t truncate ) in which case " . . . " is appended
* @ return the String read from the stream */
public static String toUtf8StringOrHex ( ByteArrayOutputStream byteStream , int maxLen ) { } } | if ( maxLen == - 1 ) { maxLen = Integer . MAX_VALUE ; } String result ; try { result = byteStream . toString ( "UTF-8" ) ; } catch ( Exception notUtf8 ) { result = Hex . encodeHexString ( byteStream . toByteArray ( ) ) ; } final int resultLen = result != null ? result . length ( ) : 0 ; final String suffix = resultLen > maxLen ? "..." : "" ; return resultLen == 0 ? "" : result . substring ( 0 , Math . min ( resultLen , maxLen ) ) + suffix ; |
public class ListMultipartUploadsResult { /** * A list of in - progress multipart uploads .
* @ param uploadsList
* A list of in - progress multipart uploads . */
public void setUploadsList ( java . util . Collection < UploadListElement > uploadsList ) { } } | if ( uploadsList == null ) { this . uploadsList = null ; return ; } this . uploadsList = new java . util . ArrayList < UploadListElement > ( uploadsList ) ; |
public class InjectorConfiguration { /** * Returns a factory class to instantiate a certain class .
* @ param classDefinition the class that is to be instantiated .
* @ param < T > the type of the class to be instantiated .
* @ return a factory method for the specified class . */
@ Nullable public < T > Class < Provider < T > > getFactoryClass ( Class < T > classDefinition ) { } } | // noinspection unchecked
return ( Class ) factoryClasses . getRootValue ( ) . get ( classDefinition ) ; |
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public String convertIfcWindowStyleOperationEnumToString ( EDataType eDataType , Object instanceValue ) { } } | return instanceValue == null ? null : instanceValue . toString ( ) ; |
public class CStruct { /** * Prints a jsonCompact string representation of the message .
* @ param message The message to stringify .
* @ param < Message > The contained message type .
* @ return The resulting string . */
protected static < Message extends PMessage < Message , CField > > String asString ( Message message ) { } } | ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; PRETTY_SERIALIZER . serialize ( baos , message ) ; return new String ( baos . toByteArray ( ) , UTF_8 ) ; |
public class ExtSSHExec { /** * Writes a string to a file . If destination file exists , it may be
* overwritten depending on the " append " value .
* @ param from string to write
* @ param to file to write to
* @ param append if true , append to existing file , else overwrite
* @ exception IOException on io error */
private void writeToFile ( String from , boolean append , File to ) throws IOException { } } | FileWriter out = null ; try { out = new FileWriter ( to . getAbsolutePath ( ) , append ) ; StringReader in = new StringReader ( from ) ; char [ ] buffer = new char [ BUFFER_SIZE ] ; int bytesRead ; while ( true ) { bytesRead = in . read ( buffer ) ; if ( bytesRead == - 1 ) { break ; } out . write ( buffer , 0 , bytesRead ) ; } out . flush ( ) ; } finally { if ( out != null ) { out . close ( ) ; } } |
public class FactoryTrackerObjectQuad { /** * Create an instance of { @ link SparseFlowObjectTracker Sparse Flow Object Tracker } for the
* { @ link TrackerObjectQuad } interface .
* @ param config Configuration for the tracker , Null for default .
* @ param < T > Image input type
* @ param < D > Image derivative type . Null for default .
* @ return TrackerObjectQuad */
public static < T extends ImageGray < T > , D extends ImageGray < D > > TrackerObjectQuad < T > sparseFlow ( SfotConfig config , Class < T > imageType , Class < D > derivType ) { } } | if ( derivType == null ) derivType = GImageDerivativeOps . getDerivativeType ( imageType ) ; if ( config == null ) config = new SfotConfig ( ) ; ImageGradient < T , D > gradient = FactoryDerivative . sobel ( imageType , derivType ) ; SparseFlowObjectTracker < T , D > tracker = new SparseFlowObjectTracker < > ( config , imageType , derivType , gradient ) ; return new Sfot_to_TrackObjectQuad < > ( tracker , imageType ) ; |
public class MsgDestEncodingUtilsImpl { /** * @ see com . ibm . ws . sib . api . jms . MessageDestEncodingUtils # getDestinationFromMsgRepresentation ( byte [ ] )
* Inflates the efficient byte [ ] representation from the message into a
* JmsDestination object .
* Throws a JMSException if there are problems during the deserialization process , for
* example if the parameter is null . */
public final JmsDestination getDestinationFromMsgRepresentation ( byte [ ] msgForm ) throws JMSException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getDestinationFromMsgRepresentation" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "msgForm: " + SibTr . formatBytes ( msgForm ) ) ; JmsDestination newDest = null ; if ( msgForm == null ) { // Error case .
throw ( JMSException ) JmsErrorUtils . newThrowable ( JMSException . class , "INTERNAL_INVALID_VALUE_CWSIA0361" , new Object [ ] { "null" , "getDestinationFromMsgRepresentation(byte[])" } , tc ) ; } // The first half - byte in the message form indicates the Destination type
if ( ( msgForm [ 0 ] & TOPIC_TYPE ) == TOPIC_TYPE ) { newDest = new JmsTopicImpl ( ) ; } else { newDest = new JmsQueueImpl ( ) ; } // Decode the basic stuff ( i . e . DeliveryMode , Priority & TTL ) onto the new Destination
int offset = decodeBasicProperties ( newDest , msgForm ) ; // Now decode the rest of the byte [ ] onto this new Destination
decodeOtherProperties ( newDest , msgForm , offset ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getDestinationFromMsgRepresentation" , newDest ) ; return newDest ; |
public class ZooModel { /** * Returns a pretrained model for the given dataset , if available .
* @ param pretrainedType
* @ return
* @ throws IOException */
public < M extends Model > M initPretrained ( PretrainedType pretrainedType ) throws IOException { } } | String remoteUrl = pretrainedUrl ( pretrainedType ) ; if ( remoteUrl == null ) throw new UnsupportedOperationException ( "Pretrained " + pretrainedType + " weights are not available for this model." ) ; String localFilename = new File ( remoteUrl ) . getName ( ) ; File rootCacheDir = DL4JResources . getDirectory ( ResourceType . ZOO_MODEL , modelName ( ) ) ; File cachedFile = new File ( rootCacheDir , localFilename ) ; if ( ! cachedFile . exists ( ) ) { log . info ( "Downloading model to " + cachedFile . toString ( ) ) ; FileUtils . copyURLToFile ( new URL ( remoteUrl ) , cachedFile ) ; } else { log . info ( "Using cached model at " + cachedFile . toString ( ) ) ; } long expectedChecksum = pretrainedChecksum ( pretrainedType ) ; if ( expectedChecksum != 0L ) { log . info ( "Verifying download..." ) ; Checksum adler = new Adler32 ( ) ; FileUtils . checksum ( cachedFile , adler ) ; long localChecksum = adler . getValue ( ) ; log . info ( "Checksum local is " + localChecksum + ", expecting " + expectedChecksum ) ; if ( expectedChecksum != localChecksum ) { log . error ( "Checksums do not match. Cleaning up files and failing..." ) ; cachedFile . delete ( ) ; throw new IllegalStateException ( "Pretrained model file failed checksum. If this error persists, please open an issue at https://github.com/deeplearning4j/deeplearning4j." ) ; } } if ( modelType ( ) == MultiLayerNetwork . class ) { return ( M ) ModelSerializer . restoreMultiLayerNetwork ( cachedFile ) ; } else if ( modelType ( ) == ComputationGraph . class ) { return ( M ) ModelSerializer . restoreComputationGraph ( cachedFile ) ; } else { throw new UnsupportedOperationException ( "Pretrained models are only supported for MultiLayerNetwork and ComputationGraph." ) ; } |
public class MainScene { /** * Calling on first { @ link org . gearvrf . GVRMain # onStep } to process first rendering
* @ return true if it is first rendering , otherwise - false */
private boolean onFirstStep ( ) { } } | if ( ! mFirstStepDone ) { Log . d ( TAG , "onFirstStep()" ) ; mFirstStepDone = true ; float yaw = getMainCameraRigYaw ( ) ; Log . d ( TAG , "Update frontFacingRotation at start: %f" , yaw ) ; updateFrontFacingRotation ( yaw ) ; // Now we will do our first layout
mSceneRootWidget . onFirstStep ( ) ; return true ; } return false ; |
public class OverdueListener { /** * Optimization : don ' t bother running the Overdue machinery if it ' s disabled */
private boolean shouldInsertNotification ( final InternalTenantContext internalTenantContext ) { } } | OverdueConfig overdueConfig ; try { overdueConfig = overdueConfigCache . getOverdueConfig ( internalTenantContext ) ; } catch ( final OverdueApiException e ) { log . warn ( "Failed to extract overdue config for tenantRecordId='{}'" , internalTenantContext . getTenantRecordId ( ) ) ; overdueConfig = null ; } if ( overdueConfig == null || overdueConfig . getOverdueStatesAccount ( ) == null || overdueConfig . getOverdueStatesAccount ( ) . getStates ( ) == null ) { return false ; } for ( final DefaultOverdueState state : ( ( DefaultOverdueConfig ) overdueConfig ) . getOverdueStatesAccount ( ) . getStates ( ) ) { if ( state . getConditionEvaluation ( ) != null ) { return true ; } } return false ; |
public class Bucket { /** * Returns a list of at most the provided number of stored objects
* @ param output the xml structured output the list of objects should be written to
* @ param limit controls the maximum number of objects returned
* @ param marker the key to start with when listing objects in a bucket
* @ param prefix limits the response to keys that begin with the specified prefix */
public void outputObjects ( XMLStructuredOutput output , int limit , @ Nullable String marker , @ Nullable String prefix ) { } } | ListFileTreeVisitor visitor = new ListFileTreeVisitor ( output , limit , marker , prefix ) ; output . beginOutput ( "ListBucketResult" , Attribute . set ( "xmlns" , "http://s3.amazonaws.com/doc/2006-03-01/" ) ) ; output . property ( "Name" , getName ( ) ) ; output . property ( "MaxKeys" , limit ) ; output . property ( "Marker" , marker ) ; output . property ( "Prefix" , prefix ) ; try { Files . walkFileTree ( file . toPath ( ) , visitor ) ; } catch ( IOException e ) { Exceptions . handle ( e ) ; } output . property ( "IsTruncated" , limit > 0 && visitor . getCount ( ) > limit ) ; output . endOutput ( ) ; |
public class Iterators { /** * Returns the { @ link Iterable } that lists items in the reverse order .
* @ since 1.150 */
public static < T > Iterable < T > reverse ( final List < T > lst ) { } } | return new Iterable < T > ( ) { public Iterator < T > iterator ( ) { final ListIterator < T > itr = lst . listIterator ( lst . size ( ) ) ; return new Iterator < T > ( ) { public boolean hasNext ( ) { return itr . hasPrevious ( ) ; } public T next ( ) { return itr . previous ( ) ; } public void remove ( ) { itr . remove ( ) ; } } ; } } ; |
public class Util { /** * Returns debugging info about the types of a list of objects .
* @ param list
* @ return */
private static String getTypeInfo ( List < Object > list ) { } } | StringBuilder s = new StringBuilder ( ) ; for ( Object o : list ) { if ( s . length ( ) > 0 ) s . append ( ", " ) ; if ( o == null ) s . append ( "null" ) ; else { s . append ( o . getClass ( ) . getName ( ) ) ; s . append ( "=" ) ; s . append ( o ) ; } } return s . toString ( ) ; |
public class TypeUtility { /** * Convert a WSDL - style
* { @ link org . fcrepo . server . types . gen . FieldSearchResult FieldSearchResult } to a
* local { @ link FieldSearchResult } . */
public static FieldSearchResult convertGenFieldSearchResultToFieldSearchResult ( org . fcrepo . server . types . gen . FieldSearchResult fsr ) { } } | long completeListSize = 0 ; long cursor = 0 ; Date expirationDate = new Date ( 0 ) ; String token = null ; ListSession listSession = fsr . getListSession ( ) . getValue ( ) ; if ( listSession != null ) { completeListSize = listSession . getCompleteListSize ( ) . longValue ( ) ; cursor = listSession . getCursor ( ) . longValue ( ) ; expirationDate = DateUtility . convertStringToDate ( listSession . getExpirationDate ( ) . getValue ( ) ) ; token = listSession . getToken ( ) ; } List < org . fcrepo . server . search . ObjectFields > objectFields = convertGenObjectFieldsArrayToObjectFieldsList ( fsr . getResultList ( ) . getObjectFields ( ) . toArray ( new org . fcrepo . server . types . gen . ObjectFields [ 0 ] ) ) ; // BasicFieldSearchResult bfsr = new BasicFieldSearchResult (
return new BasicFieldSearchResult ( completeListSize , cursor , expirationDate , token , objectFields ) ; |
public class EquDirective { /** * { @ inheritDoc } */
@ Override public Token replaceToken ( final Token [ ] tokens , final int replacingFromTokenIndex , final int replaceToTokenIndex ) throws ParseException , IOException { } } | final Equ equ = Equ . getInstance ( true ) ; String resultAsATokenValue = null ; try { Object equResult ; equResult = equ . evaluate ( data ) ; if ( equResult instanceof LocalDateTime ) { final LocalDateTime ldt = ( LocalDateTime ) equResult ; if ( ldt . toLocalDate ( ) . equals ( LocalDate . MIN ) ) resultAsATokenValue = TemporalHelper . getOutputTF ( ) . format ( ldt ) ; else if ( ldt . toLocalTime ( ) . equals ( LocalTime . MIN ) ) resultAsATokenValue = TemporalHelper . getOutputDF ( ) . format ( ldt ) ; else resultAsATokenValue = TemporalHelper . getOutputDTF ( ) . format ( ldt ) ; } else resultAsATokenValue = equResult . toString ( ) ; } catch ( final Exception e ) { logger . error ( "{}" , e . getMessage ( ) , e ) ; throw new ParseException ( e . getMessage ( ) , 0 ) ; } return new Token ( tokens [ replacingFromTokenIndex ] . charCommand ( ) , resultAsATokenValue , tokens [ replacingFromTokenIndex ] . getInputStartX ( ) , tokens [ replaceToTokenIndex ] . getInputEndX ( ) , true ) ; |
public class BboxService { /** * Translate a bounding box by applying a new center point .
* @ param bbox
* The original bounding box to translate . This one will remain untouched .
* @ param center
* The new center point .
* @ return The result as a new bounding box . */
public static Bbox setCenterPoint ( Bbox bbox , Coordinate center ) { } } | double x = center . getX ( ) - 0.5 * bbox . getWidth ( ) ; double y = center . getY ( ) - 0.5 * bbox . getHeight ( ) ; return new Bbox ( x , y , bbox . getWidth ( ) , bbox . getHeight ( ) ) ; |
public class GetGameSessionLogUrlRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetGameSessionLogUrlRequest getGameSessionLogUrlRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( getGameSessionLogUrlRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getGameSessionLogUrlRequest . getGameSessionId ( ) , GAMESESSIONID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class HostConnectionPoolPartition { /** * Refresh the partition */
public synchronized void refresh ( ) { } } | List < HostConnectionPool < CL > > pools = Lists . newArrayList ( ) ; for ( HostConnectionPool < CL > pool : this . pools ) { if ( ! pool . isReconnecting ( ) ) { pools . add ( pool ) ; } } this . activePools . set ( strategy . sortAndfilterPartition ( pools , prioritize ) ) ; |
public class SeqServerGroup { /** * Create a new instance of a server group object by parsing the group name . */
public static SeqServerGroup parse ( String asg ) { } } | int d1 = asg . indexOf ( '-' ) ; int d2 = asg . indexOf ( '-' , d1 + 1 ) ; int dN = asg . lastIndexOf ( '-' ) ; if ( dN < 0 || ! isSequence ( asg , dN ) ) { dN = asg . length ( ) ; } return new SeqServerGroup ( asg , d1 , d2 , dN ) ; |
public class ServerVulnerabilityAssessmentsInner { /** * Lists the vulnerability assessment policies associated with a server .
* @ param nextPageLink The NextLink from the previous successful call to List operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; ServerVulnerabilityAssessmentInner & gt ; object */
public Observable < Page < ServerVulnerabilityAssessmentInner > > listByServerNextAsync ( final String nextPageLink ) { } } | return listByServerNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < ServerVulnerabilityAssessmentInner > > , Page < ServerVulnerabilityAssessmentInner > > ( ) { @ Override public Page < ServerVulnerabilityAssessmentInner > call ( ServiceResponse < Page < ServerVulnerabilityAssessmentInner > > response ) { return response . body ( ) ; } } ) ; |
public class IntHashMap { /** * Save the state of the < tt > IntHashMap < / tt > instance to a stream ( i . e . ,
* serialize it ) .
* @ serialData The < i > capacity < / i > of the IntHashMap ( the length of the
* bucket array ) is emitted ( int ) , followed by the
* < i > size < / i > of the IntHashMap ( the number of key - value
* mappings ) , followed by the key ( Object ) and value ( Object )
* for each key - value mapping represented by the IntHashMap
* The key - value mappings are emitted in no particular order . */
private void writeObject ( java . io . ObjectOutputStream s ) throws IOException { } } | // Write out the threshold , loadfactor , and any hidden stuff
s . defaultWriteObject ( ) ; // Write out number of buckets
s . writeInt ( table . length ) ; // Write out size ( number of Mappings )
s . writeInt ( count ) ; // Write out keys and values ( alternating )
for ( int index = table . length - 1 ; index >= 0 ; index -- ) { Entry entry = table [ index ] ; while ( entry != null ) { s . writeInt ( entry . key ) ; s . writeObject ( entry . value ) ; entry = entry . next ; } } |
public class StringBinderAdapter { /** * / * @ Override */
public < S > String convertToString ( Class < S > input , Object object ) { } } | return binder . convertTo ( input , String . class , object ) ; |
public class JsonObject { /** * 添加字段错误信息
* @ param key
* @ param value */
@ SuppressWarnings ( "unchecked" ) public void addFieldMsg ( String key , String value ) { } } | if ( key == null ) { return ; } Map < String , String > fieldMsgs = null ; // 字段错误信息
if ( ! this . statusInfo . containsKey ( WebResponseConstant . MESSAGE_FIELD ) ) { fieldMsgs = new HashMap < String , String > ( ) ; this . statusInfo . put ( WebResponseConstant . MESSAGE_FIELD , fieldMsgs ) ; } fieldMsgs = ( Map < String , String > ) ( this . statusInfo . get ( WebResponseConstant . MESSAGE_FIELD ) ) ; fieldMsgs . put ( key , value ) ; |
public class JumblrClient { /** * Delete a given post
* @ param blogName the name of the blog the post is in
* @ param postId the id of the post to delete */
public void postDelete ( String blogName , Long postId ) { } } | Map < String , String > map = new HashMap < String , String > ( ) ; map . put ( "id" , postId . toString ( ) ) ; requestBuilder . post ( JumblrClient . blogPath ( blogName , "/post/delete" ) , map ) ; |
public class Router { /** * Gets internal { @ link EndpointRouter } implementation for given protocol .
* @ param protocol Protocol for obtaining the router .
* @ return The { @ link EndpointRouter } implementation . */
public Optional < EndpointRouter > getRouter ( EndpointRouter . Protocol protocol ) { } } | return endpointRouters . stream ( ) . filter ( r -> r . getProtocol ( ) == protocol ) . findFirst ( ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.