signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class SchemaUtility { /** * private static void generateMethodDeclaration ( SQLiteDatabaseSchema schema , Builder classBuilder , boolean async ) { * for ( ExecutableElement item : schema . transactions ) { * Set < String > daoNames = new HashSet < String > ( ) ; * daoNames . addAll ( schema . getDaoNameSet ( ) ) ; * MethodSpec . Builder methodBuilder = MethodSpec . methodBuilder ( item . getSimpleName ( ) . toString ( ) ) * . addModifiers ( Modifier . PUBLIC ) ; * if ( async ) { * methodBuilder . returns ( ParameterizedTypeName . get ( Future . class , Boolean . TYPE ) ) ; * } else { * methodBuilder . returns ( Boolean . TYPE ) ; * methodBuilder . addJavadoc ( " Executes $ L { @ link $ L } \ n " , ( async ? " in async mode " : " " ) , item . getSimpleName ( ) . toString ( ) ) ; * for ( VariableElement p : item . getParameters ( ) ) { * schema . contains ( name ) * if ( ! daoNames . contains ( TypeUtility . typeName ( p . asType ( ) ) . toString ( ) ) ) { * methodBuilder . addParameter ( TypeUtility . typeName ( p . asType ( ) ) , p . getSimpleName ( ) . toString ( ) ) ; * classBuilder . addMethod ( methodBuilder . build ( ) ) ; */ public static void generateTransaction ( TypeSpec . Builder classBuilder , SQLiteDatabaseSchema schema , boolean onlyInterface ) { } }
for ( ExecutableElement item : schema . transactions ) { Set < String > daoNames = new HashSet < String > ( ) ; daoNames . addAll ( schema . getDaoNameSet ( ) ) ; MethodSpec . Builder methodBuilderSync = MethodSpec . methodBuilder ( item . getSimpleName ( ) . toString ( ) ) . addModifiers ( Modifier . PUBLIC ) . returns ( Boolean . TYPE ) ; MethodSpec . Builder methodBuilderAsync = MethodSpec . methodBuilder ( item . getSimpleName ( ) . toString ( ) + "Async" ) . addModifiers ( Modifier . PUBLIC ) . returns ( ParameterizedTypeName . get ( Future . class , Boolean . class ) ) ; if ( ! onlyInterface ) { methodBuilderSync . addAnnotation ( Override . class ) ; methodBuilderAsync . addAnnotation ( Override . class ) ; } else { methodBuilderSync . addModifiers ( Modifier . ABSTRACT ) ; methodBuilderAsync . addModifiers ( Modifier . ABSTRACT ) ; } methodBuilderSync . addJavadoc ( "Executes method {@link $L}\n\n@return <code>true</code> if transaction was done succefull.\n" , item . getSimpleName ( ) . toString ( ) ) ; methodBuilderAsync . addJavadoc ( "Executes method {@link $L} in async mode\n\n@return a <code>Future</code> with true if transaction was done succefull.\n" , item . getSimpleName ( ) . toString ( ) ) ; { for ( VariableElement p : item . getParameters ( ) ) { // schema . contains ( name ) if ( ! daoNames . contains ( TypeUtility . typeName ( p . asType ( ) ) . toString ( ) ) ) { methodBuilderSync . addParameter ( TypeUtility . typeName ( p . asType ( ) ) , p . getSimpleName ( ) . toString ( ) ) ; methodBuilderAsync . addParameter ( TypeUtility . typeName ( p . asType ( ) ) , p . getSimpleName ( ) . toString ( ) ) ; } } } if ( ! onlyInterface ) { boolean managedTransationStatus = false ; if ( TypeUtility . isEquals ( ClassName . get ( TransactionResult . class ) , TypeUtility . typeName ( item . getReturnType ( ) ) ) ) { managedTransationStatus = true ; } methodBuilderSync . addCode ( "return $L.this.execute(daoFactory -> {" , schema . getGeneratedClassName ( ) ) ; methodBuilderSync . addCode ( "\n$>" ) ; if ( managedTransationStatus ) { methodBuilderSync . addCode ( "return " ) ; } methodBuilderAsync . addCode ( "return $L.this.executeAsync(daoFactory -> { " , schema . getGeneratedClassName ( ) ) ; methodBuilderAsync . addCode ( "\n$>if (daoFactory.$L(" , item . getSimpleName ( ) . toString ( ) ) ; String s = "" ; for ( VariableElement p : item . getParameters ( ) ) { // schema . contains ( name ) if ( ! daoNames . contains ( TypeUtility . typeName ( p . asType ( ) ) . toString ( ) ) ) { methodBuilderAsync . addCode ( s + p . getSimpleName ( ) . toString ( ) ) ; s = ", " ; } } methodBuilderAsync . addCode ( ")==true) { return $T.COMMIT; } else { return $T.ROLLBACK; }" , TransactionResult . class , TransactionResult . class ) ; methodBuilderAsync . addCode ( "$<\n});\n" ) ; methodBuilderSync . addCode ( schema . getElement ( ) . getSimpleName ( ) + "." + item . getSimpleName ( ) . toString ( ) + "(" ) ; { String separator = "" ; for ( VariableElement p : item . getParameters ( ) ) { methodBuilderSync . addCode ( separator ) ; // schema . contains ( name ) if ( daoNames . contains ( TypeUtility . typeName ( p . asType ( ) ) . toString ( ) ) ) { methodBuilderSync . addCode ( "daoFactory.get" + TypeUtility . simpleName ( TypeUtility . typeName ( p . asType ( ) ) ) + "()" ) ; } else { methodBuilderSync . addCode ( "" + p . getSimpleName ( ) ) ; } separator = ", " ; } } methodBuilderSync . addCode ( ");" ) ; if ( ! managedTransationStatus ) { methodBuilderSync . addCode ( "\nreturn TransactionResult.COMMIT;" ) ; } methodBuilderSync . addCode ( "$<\n" ) ; methodBuilderSync . addCode ( "});\n" ) ; } classBuilder . addMethod ( methodBuilderSync . build ( ) ) ; classBuilder . addMethod ( methodBuilderAsync . build ( ) ) ; }
public class FileUtils { /** * Creates a File that is unique in the specified directory . If the specified * filename exists in the directory , " - # " will be appended to the filename until * a unique filename can be created . * @ param directory the directory to create the file in * @ param filename the base filename with extension * @ return a File that is unique in the specified directory * @ throws IOException if any error occurs during file creation */ public static File createUniqueFile ( File directory , String filename ) throws IOException { } }
File uniqueFile = new File ( directory , filename ) ; if ( uniqueFile . createNewFile ( ) ) { return ( uniqueFile ) ; } String extension = "" ; int dotIndex = filename . lastIndexOf ( '.' ) ; if ( dotIndex >= 0 ) { extension = filename . substring ( dotIndex ) ; filename = filename . substring ( 0 , dotIndex ) ; } int fileNumber = 0 ; while ( ! uniqueFile . createNewFile ( ) ) { fileNumber ++ ; String numberedFilename = String . format ( "%s-%d%s" , filename , fileNumber , extension ) ; uniqueFile = new File ( directory , numberedFilename ) ; } return ( uniqueFile ) ;
public class KinesisFirehoseOutputUpdateMarshaller { /** * Marshall the given parameter object . */ public void marshall ( KinesisFirehoseOutputUpdate kinesisFirehoseOutputUpdate , ProtocolMarshaller protocolMarshaller ) { } }
if ( kinesisFirehoseOutputUpdate == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( kinesisFirehoseOutputUpdate . getResourceARNUpdate ( ) , RESOURCEARNUPDATE_BINDING ) ; protocolMarshaller . marshall ( kinesisFirehoseOutputUpdate . getRoleARNUpdate ( ) , ROLEARNUPDATE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Router { /** * Route a request . * @ param method The request method . E . g . { @ code GET , PUT , POST , DELETE } , etc . * @ param path The request path . E . g . { @ code / foo / baz / bar } . * @ param result A { @ link Result } for storing the routing result , target and captured parameters . * The { @ link Result } should have enough capacity to store all captured parameters . * See { @ link # result ( ) } . * @ return Routing status . { @ link Status # SUCCESS } if an endpoint and matching method was found . * { @ link Status # NOT _ FOUND } if the endpoint could not be found , { @ link Status # METHOD _ NOT _ ALLOWED } * if the endpoint was found but the method did not match . */ public Status route ( final CharSequence method , final CharSequence path , final Result < T > result ) { } }
result . captor . optionalTrailingSlash ( optionalTrailingSlash ) ; final RouteTarget < T > route = trie . lookup ( path , result . captor ) ; if ( route == null ) { return result . notFound ( ) . status ( ) ; } final Target < T > target = route . lookup ( method ) ; if ( target == null ) { return result . notAllowed ( route ) . status ( ) ; } return result . success ( path , route , target ) . status ( ) ;
public class CalendarPath { /** * Method to construct the between expression for calendar * @ param startValue the start value * @ param endValue the end value * @ return Expression */ public Expression < Calendar > between ( Calendar startValue , Calendar endValue ) { } }
String valueString = "'" + getCalendarAsString ( startValue ) . concat ( "Z" ) + "' AND '" + getCalendarAsString ( endValue ) + "'" ; return new Expression < Calendar > ( this , Operation . between , valueString ) ;
public class CommerceNotificationAttachmentLocalServiceBaseImpl { /** * Returns the commerce notification attachment matching the UUID and group . * @ param uuid the commerce notification attachment ' s UUID * @ param groupId the primary key of the group * @ return the matching commerce notification attachment , or < code > null < / code > if a matching commerce notification attachment could not be found */ @ Override public CommerceNotificationAttachment fetchCommerceNotificationAttachmentByUuidAndGroupId ( String uuid , long groupId ) { } }
return commerceNotificationAttachmentPersistence . fetchByUUID_G ( uuid , groupId ) ;
public class AppServiceEnvironmentsInner { /** * Create or update a multi - role pool . * Create or update a multi - role pool . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param name Name of the App Service Environment . * @ param multiRolePoolEnvelope Properties of the multi - role pool . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the WorkerPoolResourceInner object if successful . */ public WorkerPoolResourceInner updateMultiRolePool ( String resourceGroupName , String name , WorkerPoolResourceInner multiRolePoolEnvelope ) { } }
return updateMultiRolePoolWithServiceResponseAsync ( resourceGroupName , name , multiRolePoolEnvelope ) . toBlocking ( ) . single ( ) . body ( ) ;
public class Assert { /** * Asserts that condition is < code > false < / code > */ public static void assertFalse ( boolean condition , String format , Object ... args ) { } }
if ( imp != null && condition ) { imp . assertFailed ( StringUtils . format ( format , args ) ) ; }
public class ZooSchemaImpl { /** * Define a new database class schema based on the given Java class . * @ param cls The Java class for which a schema should be defined * @ return New schema object * @ see ZooSchema # addClass ( Class ) */ @ Override public ZooClass addClass ( Class < ? > cls ) { } }
DBTracer . logCall ( this , cls ) ; checkValidity ( true ) ; return sm . createSchema ( null , cls ) ;
public class StorageKeyCloudBlobProvider { /** * Returns an instance of { @ link CloudBlobClient } based on available authentication mechanism . * @ return an instance of { @ link CloudBlobClient } . * @ throws IOException */ @ Override public CloudBlobClient getCloudBlobClient ( ) throws IOException { } }
String connectionString = String . format ( AZURE_STORAGE_CONNECTION_STRING_FORMAT , this . azureStorageAccountName , this . azureStorageAccountKey ) ; try { return CloudStorageAccount . parse ( connectionString ) . createCloudBlobClient ( ) ; } catch ( URISyntaxException | InvalidKeyException e ) { throw new IOException ( "Failed to create a Cloud Storage Account." , e ) ; }
public class GwtRunner { /** * Convert a list of { @ link JSError } instances to a JS array containing plain objects . */ private static JavaScriptObject [ ] toNativeErrorArray ( List < JSError > errors ) { } }
JavaScriptObject [ ] out = new JavaScriptObject [ errors . size ( ) ] ; for ( int i = 0 ; i < errors . size ( ) ; ++ i ) { JSError error = errors . get ( i ) ; DiagnosticType type = error . getType ( ) ; out [ i ] = createError ( error . sourceName , error . description , type != null ? type . key : null , error . lineNumber , error . getCharno ( ) ) ; } return out ;
public class JpaBitLogStore { /** * ( non - Javadoc ) * @ see org . duracloud . mill . bitlog . BitLogStore # delete ( java . lang . String , * java . lang . String , java . lang . String ) */ @ Override @ Transactional ( MillJpaRepoConfig . TRANSACTION_MANAGER_BEAN ) public void delete ( String account , String storeId , String spaceId ) { } }
int deleted = 0 ; while ( ( deleted = bitLogItemRepo . deleteFirst50000ByAccountAndStoreIdAndSpaceId ( account , storeId , spaceId ) ) > 0 ) { log . info ( "deleted {} bit log items where account = {}, store_id = {}, space_id = {}" , deleted , account , storeId , spaceId ) ; this . bitLogItemRepo . flush ( ) ; }
public class CodecSearchTree { /** * Search mtas tree . * @ param startPosition the start position * @ param endPosition the end position * @ param in the in * @ param ref the ref * @ param objectRefApproxOffset the object ref approx offset * @ return the array list * @ throws IOException Signals that an I / O exception has occurred . */ public static ArrayList < MtasTreeHit < ? > > searchMtasTree ( int startPosition , int endPosition , IndexInput in , long ref , long objectRefApproxOffset ) throws IOException { } }
int boundary = 1000 + 10 * ( endPosition - startPosition ) ; ArrayList < MtasTreeHit < ? > > list = new ArrayList < MtasTreeHit < ? > > ( ) ; ArrayList < MtasTreeItem > checkList = new ArrayList < MtasTreeItem > ( ) ; AtomicBoolean isSinglePoint = new AtomicBoolean ( false ) ; AtomicBoolean isStoreAdditionalId = new AtomicBoolean ( false ) ; AtomicLong nodeRefApproxOffset = new AtomicLong ( - 1 ) ; checkList . add ( getMtasTreeItem ( ref , isSinglePoint , isStoreAdditionalId , nodeRefApproxOffset , in , objectRefApproxOffset ) ) ; ArrayList < Long > history = new ArrayList < Long > ( ) ; do { MtasTreeItem checkItem = checkList . remove ( checkList . size ( ) - 1 ) ; searchMtasTree ( checkItem , startPosition , endPosition , in , isSinglePoint , isStoreAdditionalId , objectRefApproxOffset , list , nodeRefApproxOffset , checkList ) ; history . add ( checkItem . ref ) ; if ( history . size ( ) > boundary ) { throw new IOException ( "Too many items collected from tree" ) ; } } while ( checkList . size ( ) > 0 ) ; return list ;
public class CmsJspImageBean { /** * Returns the largest width value form the source set . < p > * In case no source set entries have been added before , the map is not initialized and < code > 0 < / code > is returned . * @ return the largest width value form the source set */ public int getSrcSetMaxWidth ( ) { } }
int result = 0 ; if ( ( m_srcSet != null ) && ( m_srcSet . size ( ) > 0 ) ) { result = m_srcSet . lastKey ( ) . intValue ( ) ; } return result ;
public class FieldWithIdComparator { /** * Compares two fields given by their names . * @ param objA The name of the first field * @ param objB The name of the second field * @ return * @ see java . util . Comparator # compare ( java . lang . Object , java . lang . Object ) */ public int compare ( Object objA , Object objB ) { } }
String idAStr = ( ( FieldDescriptorDef ) _fields . get ( objA ) ) . getProperty ( "id" ) ; String idBStr = ( ( FieldDescriptorDef ) _fields . get ( objB ) ) . getProperty ( "id" ) ; int idA ; int idB ; try { idA = Integer . parseInt ( idAStr ) ; } catch ( Exception ex ) { return 1 ; } try { idB = Integer . parseInt ( idBStr ) ; } catch ( Exception ex ) { return - 1 ; } return idA < idB ? - 1 : ( idA > idB ? 1 : 0 ) ;
public class VirtualMachineScaleSetsInner { /** * Create or update a VM scale set . * @ param resourceGroupName The name of the resource group . * @ param vmScaleSetName The name of the VM scale set to create or update . * @ param parameters The scale set object . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < VirtualMachineScaleSetInner > createOrUpdateAsync ( String resourceGroupName , String vmScaleSetName , VirtualMachineScaleSetInner parameters ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , vmScaleSetName , parameters ) . map ( new Func1 < ServiceResponse < VirtualMachineScaleSetInner > , VirtualMachineScaleSetInner > ( ) { @ Override public VirtualMachineScaleSetInner call ( ServiceResponse < VirtualMachineScaleSetInner > response ) { return response . body ( ) ; } } ) ;
public class ModelsImpl { /** * Updates the name and explicit list of a Pattern . Any entity model . * @ param appId The application ID . * @ param versionId The version ID . * @ param entityId The Pattern . Any entity extractor ID . * @ param patternAnyUpdateObject An object containing the explicit list of the Pattern . Any entity . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < OperationStatus > updatePatternAnyEntityModelAsync ( UUID appId , String versionId , UUID entityId , PatternAnyModelUpdateObject patternAnyUpdateObject , final ServiceCallback < OperationStatus > serviceCallback ) { } }
return ServiceFuture . fromResponse ( updatePatternAnyEntityModelWithServiceResponseAsync ( appId , versionId , entityId , patternAnyUpdateObject ) , serviceCallback ) ;
public class JSONFormat { /** * Checks if the JSONFormat in parameter is the same as this , omitting ids and jcell . jvalue . value content * @ param jf the format to compare * @ return true if this and jf are the same , omits ids */ public boolean sameJSONFormat ( JSONFormat jf ) { } }
Map < String , String > featLinks = new HashMap < > ( ) ; if ( ! this . name . equals ( jf . name ) || ! this . creator . equals ( jf . creator ) || ! this . license . equals ( jf . license ) || ! this . source . equals ( jf . source ) ) { return false ; } else if ( ! this . sameFeatures ( jf , featLinks ) ) { System . out . println ( "\nDifferences in features" ) ; return false ; } else if ( ! this . sameProducts ( jf , featLinks , false ) ) { System . out . println ( "\nDifferences in products" ) ; return false ; } return true ;
public class SoundCloudArtworkHelper { /** * Retrieve the artwork url of a track pointing to the requested size . * By default , { @ link fr . tvbarthel . cheerleader . library . client . SoundCloudTrack # getArtworkUrl ( ) } * points to the { @ link fr . tvbarthel . cheerleader . library . helpers . SoundCloudArtworkHelper # LARGE } * Available size are : * { @ link fr . tvbarthel . cheerleader . library . helpers . SoundCloudArtworkHelper # MINI } * { @ link fr . tvbarthel . cheerleader . library . helpers . SoundCloudArtworkHelper # TINY } * { @ link fr . tvbarthel . cheerleader . library . helpers . SoundCloudArtworkHelper # SMALL } * { @ link fr . tvbarthel . cheerleader . library . helpers . SoundCloudArtworkHelper # BADGE } * { @ link fr . tvbarthel . cheerleader . library . helpers . SoundCloudArtworkHelper # LARGE } * { @ link fr . tvbarthel . cheerleader . library . helpers . SoundCloudArtworkHelper # XLARGE } * { @ link fr . tvbarthel . cheerleader . library . helpers . SoundCloudArtworkHelper # XXLARGE } * { @ link fr . tvbarthel . cheerleader . library . helpers . SoundCloudArtworkHelper # XXXLARGE } * @ param track track from which artwork url should be returned . * @ param size wished size . * @ return artwork url or null if no artwork are available . */ public static String getArtworkUrl ( SoundCloudTrack track , String size ) { } }
String defaultUrl = track . getArtworkUrl ( ) ; if ( defaultUrl == null ) { return null ; } switch ( size ) { case MINI : case TINY : case SMALL : case BADGE : case LARGE : case XLARGE : case XXLARGE : case XXXLARGE : return defaultUrl . replace ( LARGE , size ) ; default : return defaultUrl ; }
public class EventDimensions { /** * Custom attributes that your app reports to Amazon Pinpoint . You can use these attributes as selection criteria * when you create an event filter . * @ param attributes * Custom attributes that your app reports to Amazon Pinpoint . You can use these attributes as selection * criteria when you create an event filter . * @ return Returns a reference to this object so that method calls can be chained together . */ public EventDimensions withAttributes ( java . util . Map < String , AttributeDimension > attributes ) { } }
setAttributes ( attributes ) ; return this ;
public class AuthenticateApi { /** * Remove entries in the authentication cache * @ param req * @ param res * @ param config */ private void removeEntryFromAuthCache ( HttpServletRequest req , HttpServletResponse res , WebAppSecurityConfig config ) { } }
/* * TODO : we need to optimize this method . . . if the authCacheService . remove ( ) method * return true for successfully removed the entry in the authentication cache , then we * do not have to call the second method for token . See defect 66015 */ removeEntryFromAuthCacheForUser ( req , res ) ; removeEntryFromAuthCacheForToken ( req , res , config ) ;
public class ProxyProvider { /** * Should proxy the given address * @ param address the address to test * @ return true if of type { @ link InetSocketAddress } and hostname candidate to proxy */ public boolean shouldProxy ( SocketAddress address ) { } }
SocketAddress addr = address ; if ( address instanceof TcpUtils . SocketAddressSupplier ) { addr = ( ( TcpUtils . SocketAddressSupplier ) address ) . get ( ) ; } return addr instanceof InetSocketAddress && shouldProxy ( ( ( InetSocketAddress ) addr ) . getHostString ( ) ) ;
public class ReadModifyWriteRow { /** * Adds ` amount ` be added to the existing value . If the targeted cell is unset , it will be treated * as containing a zero . Otherwise , the targeted cell must contain an 8 - byte value ( interpreted as * a 64 - bit big - endian signed integer ) , or the entire request will fail . */ public ReadModifyWriteRow increment ( @ Nonnull String familyName , @ Nonnull ByteString qualifier , long amount ) { } }
Validations . validateFamily ( familyName ) ; Preconditions . checkNotNull ( qualifier , "Qualifier can't be null" ) ; ReadModifyWriteRule rule = ReadModifyWriteRule . newBuilder ( ) . setFamilyName ( familyName ) . setColumnQualifier ( qualifier ) . setIncrementAmount ( amount ) . build ( ) ; builder . addRules ( rule ) ; return this ;
public class SchemaResource { /** * - - - - - public static methods - - - - - */ public static ResultStream getSchemaOverviewResult ( ) throws FrameworkException { } }
final List < GraphObjectMap > resultList = new LinkedList < > ( ) ; final ConfigurationProvider config = StructrApp . getConfiguration ( ) ; // extract types from ModuleService final Set < String > nodeEntityKeys = config . getNodeEntities ( ) . keySet ( ) ; final Set < String > relEntityKeys = config . getRelationshipEntities ( ) . keySet ( ) ; Set < String > entityKeys = new HashSet < > ( ) ; entityKeys . addAll ( nodeEntityKeys ) ; entityKeys . addAll ( relEntityKeys ) ; for ( String rawType : entityKeys ) { // create & add schema information Class type = SchemaHelper . getEntityClassForRawType ( rawType ) ; GraphObjectMap schema = new GraphObjectMap ( ) ; resultList . add ( schema ) ; if ( type != null ) { String url = "/" . concat ( rawType ) ; final boolean isRel = AbstractRelationship . class . isAssignableFrom ( type ) ; schema . setProperty ( urlProperty , url ) ; schema . setProperty ( typeProperty , type . getSimpleName ( ) ) ; schema . setProperty ( nameProperty , type . getSimpleName ( ) ) ; schema . setProperty ( classNameProperty , type . getName ( ) ) ; schema . setProperty ( extendsClassNameProperty , type . getSuperclass ( ) . getName ( ) ) ; schema . setProperty ( isRelProperty , isRel ) ; schema . setProperty ( flagsProperty , SecurityContext . getResourceFlags ( rawType ) ) ; if ( ! isRel ) { final List < GraphObjectMap > relatedTo = new LinkedList < > ( ) ; final List < GraphObjectMap > relatedFrom = new LinkedList < > ( ) ; for ( final PropertyKey key : config . getPropertySet ( type , PropertyView . All ) ) { if ( key instanceof RelationProperty ) { final RelationProperty relationProperty = ( RelationProperty ) key ; final Relation relation = relationProperty . getRelation ( ) ; if ( ! relation . isHidden ( ) ) { switch ( relation . getDirectionForType ( type ) ) { case OUTGOING : relatedTo . add ( relationPropertyToMap ( config , relationProperty ) ) ; break ; case INCOMING : relatedFrom . add ( relationPropertyToMap ( config , relationProperty ) ) ; break ; case BOTH : relatedTo . add ( relationPropertyToMap ( config , relationProperty ) ) ; relatedFrom . add ( relationPropertyToMap ( config , relationProperty ) ) ; break ; } } } } if ( ! relatedTo . isEmpty ( ) ) { schema . setProperty ( relatedToProperty , relatedTo ) ; } if ( ! relatedFrom . isEmpty ( ) ) { schema . setProperty ( relatedFromProperty , relatedFrom ) ; } } } } return new PagingIterable < > ( resultList ) ;
public class StringGroovyMethods { /** * TODO expose this for stream based scenarios ? */ private static int findMinimumLeadingSpaces ( String line , int count ) { } }
int length = line . length ( ) ; int index = 0 ; while ( index < length && index < count && Character . isWhitespace ( line . charAt ( index ) ) ) index ++ ; return index ;
public class RoomNumber { /** * setter for building - sets * @ generated * @ param v value to set into the feature */ public void setBuilding ( String v ) { } }
if ( RoomNumber_Type . featOkTst && ( ( RoomNumber_Type ) jcasType ) . casFeat_building == null ) jcasType . jcas . throwFeatMissing ( "building" , "org.apache.uima.examples.RoomNumber" ) ; jcasType . ll_cas . ll_setStringValue ( addr , ( ( RoomNumber_Type ) jcasType ) . casFeatCode_building , v ) ;
public class HiveRegisterUtils { /** * Register the given { @ link Path } s . * @ param paths The { @ link Path } s to be registered . * @ param state A { @ link State } which will be used to instantiate a { @ link HiveRegister } and a * { @ link HiveRegistrationPolicy } for registering the given The { @ link Path } s . */ public static void register ( Iterable < String > paths , State state ) throws IOException { } }
try ( HiveRegister hiveRegister = HiveRegister . get ( state ) ) { HiveRegistrationPolicy policy = HiveRegistrationPolicyBase . getPolicy ( state ) ; for ( String path : paths ) { for ( HiveSpec spec : policy . getHiveSpecs ( new Path ( path ) ) ) { hiveRegister . register ( spec ) ; } } }
public class _ARouter { /** * Extract the default group from path . */ private String extractGroup ( String path ) { } }
if ( TextUtils . isEmpty ( path ) || ! path . startsWith ( "/" ) ) { throw new HandlerException ( Consts . TAG + "Extract the default group failed, the path must be start with '/' and contain more than 2 '/'!" ) ; } try { String defaultGroup = path . substring ( 1 , path . indexOf ( "/" , 1 ) ) ; if ( TextUtils . isEmpty ( defaultGroup ) ) { throw new HandlerException ( Consts . TAG + "Extract the default group failed! There's nothing between 2 '/'!" ) ; } else { return defaultGroup ; } } catch ( Exception e ) { logger . warning ( Consts . TAG , "Failed to extract default group! " + e . getMessage ( ) ) ; return null ; }
public class CmsDriverManager { /** * Writes an access control entries to a given resource . < p > * @ param dbc the current database context * @ param resource the resource * @ param ace the entry to write * @ throws CmsException if something goes wrong */ public void writeAccessControlEntry ( CmsDbContext dbc , CmsResource resource , CmsAccessControlEntry ace ) throws CmsException { } }
// write the new ace getUserDriver ( dbc ) . writeAccessControlEntry ( dbc , dbc . currentProject ( ) , ace ) ; // log it log ( dbc , new CmsLogEntry ( dbc , resource . getStructureId ( ) , CmsLogEntryType . RESOURCE_PERMISSIONS , new String [ ] { resource . getRootPath ( ) } ) , false ) ; // update the " last modified " information setDateLastModified ( dbc , resource , resource . getDateLastModified ( ) ) ; // clear the cache m_monitor . clearAccessControlListCache ( ) ; // fire a resource modification event Map < String , Object > data = new HashMap < String , Object > ( 2 ) ; data . put ( I_CmsEventListener . KEY_RESOURCE , resource ) ; data . put ( I_CmsEventListener . KEY_CHANGE , new Integer ( CHANGED_ACCESSCONTROL ) ) ; OpenCms . fireCmsEvent ( new CmsEvent ( I_CmsEventListener . EVENT_RESOURCE_MODIFIED , data ) ) ;
public class MonitoringProxyActivator { /** * Create the { @ code Manifest } for the boot proxy jar . * @ return the boot proxy jar { @ code Manifest } */ Manifest createBootJarManifest ( ) { } }
Manifest manifest = new Manifest ( ) ; Attributes manifestAttributes = manifest . getMainAttributes ( ) ; manifestAttributes . putValue ( Attributes . Name . MANIFEST_VERSION . toString ( ) , "1.0" ) ; manifestAttributes . putValue ( "Created-By" , "Liberty Monitoring Extender" ) ; manifestAttributes . putValue ( "Created-Time" , DateFormat . getInstance ( ) . format ( new Date ( ) ) ) ; manifestAttributes . putValue ( MONITORING_VERSION_MANIFEST_HEADER , getCurrentVersion ( ) ) ; return manifest ;
public class CaseInsensitiveIntMap { /** * Clear the hashmap . */ public void clear ( ) { } }
for ( int i = 0 ; i < _values . length ; i ++ ) { _keys [ i ] = null ; _values [ i ] = 0 ; } _size = 0 ;
public class WebElementCreator { /** * Waits for { @ code WebElement } objects to be created * @ return true if successfully created before timout */ private boolean waitForWebElementsToBeCreated ( ) { } }
final long endTime = SystemClock . uptimeMillis ( ) + 5000 ; while ( SystemClock . uptimeMillis ( ) < endTime ) { if ( isFinished ) { return true ; } sleeper . sleepMini ( ) ; } return false ;
public class BasicSceneGenerator { /** * { @ inheritDoc } */ @ Override public List < IGeneratorParameter < ? > > getParameters ( ) { } }
return Arrays . asList ( new IGeneratorParameter < ? > [ ] { backgroundColor , foregroundColor , margin , useAntiAliasing , fontStyle , fontName , zoomFactor , scale , bondLength , fitToScreen , showMoleculeTitle , showTooltip , arrowHeadWidth , new ShowReactionTitle ( ) } ) ;
public class CmsHelpTemplateBean { /** * Attaches the resource name to the request as parameter . < p > * @ param resourceName a name of a resource * @ return The given resource name with additional request parameter concatenations of the * current request on this < code > CmsDialog < / code > */ private String attachRequestString ( String resourceName ) { } }
StringBuffer result = new StringBuffer ( resourceName ) ; boolean firstParam = true ; if ( resourceName . indexOf ( '?' ) == - 1 ) { // no params in uri yet ? result . append ( '?' ) ; } else { firstParam = false ; } @ SuppressWarnings ( "unchecked" ) Map < String , String [ ] > params = getJsp ( ) . getRequest ( ) . getParameterMap ( ) ; Iterator < Map . Entry < String , String [ ] > > it = params . entrySet ( ) . iterator ( ) ; String [ ] values = null ; while ( it . hasNext ( ) ) { if ( values == null ) { // first iteration : check if params before so an & has to be used . if ( ! firstParam ) { result . append ( '&' ) ; } } else { result . append ( "&" ) ; } Map . Entry < String , String [ ] > entry = it . next ( ) ; result . append ( entry . getKey ( ) . toString ( ) ) . append ( '=' ) ; values = entry . getValue ( ) ; for ( int i = 0 ; i < values . length ; i ++ ) { result . append ( values [ i ] ) ; if ( ( i + 1 ) < values . length ) { result . append ( ',' ) ; } } } return result . toString ( ) ;
public class TimeSeriesUtils { /** * Reshape time series mask arrays . This should match the assumptions ( f order , etc ) in RnnOutputLayer * @ param timeSeriesMaskAsVector Mask array to reshape to a column vector * @ return Mask array as a column vector */ public static INDArray reshapeVectorToTimeSeriesMask ( INDArray timeSeriesMaskAsVector , int minibatchSize ) { } }
if ( ! timeSeriesMaskAsVector . isVector ( ) ) throw new IllegalArgumentException ( "Cannot reshape mask: expected vector" ) ; val timeSeriesLength = timeSeriesMaskAsVector . length ( ) / minibatchSize ; return timeSeriesMaskAsVector . reshape ( 'f' , minibatchSize , timeSeriesLength ) ;
public class FileUtils { /** * Verifica que el directorio existe , que es un directorio y que tenemos * permisos de lectura y escritura . En caso de error devuelve false y escribe un log * @ param dir * @ return */ public static boolean verifyDir ( File dir , Logger logger ) { } }
if ( dir == null ) { logger . error ( "El directorio es nulo." ) ; return false ; } String fileName = dir . getAbsolutePath ( ) ; if ( fileName == null ) { return false ; } if ( ! dir . exists ( ) ) { logger . error ( "El path '" + fileName + "' no existe." ) ; return false ; } if ( ! dir . isDirectory ( ) ) { logger . error ( "El path '" + fileName + "' no es un directorio." ) ; return false ; } if ( ! dir . canRead ( ) ) { logger . error ( "No tenemos permisos de lectura en el path '" + fileName + "'." ) ; return false ; } if ( ! dir . canWrite ( ) ) { logger . error ( "No tenemos permisos de escritura en el path '" + fileName + "'." ) ; return false ; } return true ;
public class WordConfTools { /** * 强制覆盖默认配置 * @ param confFile 配置文件路径 */ public static void forceOverride ( String confFile ) { } }
File file = new File ( confFile ) ; try ( InputStream in = new FileInputStream ( file ) ) { LOGGER . info ( "使用配置文件 " + file . getAbsolutePath ( ) + " 强制覆盖默认配置" ) ; loadConf ( in ) ; } catch ( Exception ex ) { LOGGER . error ( "强制覆盖默认配置失败:" , ex ) ; } int i = 1 ; for ( String key : conf . keySet ( ) ) { LOGGER . info ( ( i ++ ) + "、" + key + "=" + conf . get ( key ) ) ; }
public class StringUtils { /** * Compares two string being equals ignoring whitespaces , but preserving whitespace between double - quotes * The two inputs MUST BE valid DRL / Java syntax ( this validation is NOT performed by this method , this method assumes they are ) . * Null check : if either of the input is null , this method will return true IFF both are null . * Empty check : if either of the input is an empty string , it will be considered as a whitespace during code - aware comparison . */ public static boolean codeAwareEqualsIgnoreSpaces ( String in1 , String in2 ) { } }
if ( in1 == null || in2 == null ) { return in1 == null && in2 == null ; } if ( in1 . isEmpty ( ) && in2 . isEmpty ( ) ) { return true ; } if ( in1 . length ( ) == 0 ) { in1 = " " ; } if ( in2 . length ( ) == 0 ) { in2 = " " ; } int idx1 = 0 ; Character quoted1 = null ; int idx2 = 0 ; Character quoted2 = null ; boolean equals = true ; while ( equals ) { while ( idx1 < ( in1 . length ( ) ) && ( quoted1 == null ) && isWhitespace ( in1 . charAt ( idx1 ) ) ) { idx1 ++ ; } while ( idx2 < ( in2 . length ( ) ) && ( quoted2 == null ) && isWhitespace ( in2 . charAt ( idx2 ) ) ) { idx2 ++ ; } if ( idx1 >= in1 . length ( ) || idx2 >= in2 . length ( ) ) { // considered equals if equals check succeeded and both indexes reached end of respective string . equals = equals && idx1 == in1 . length ( ) && idx2 == in2 . length ( ) ; break ; } if ( in1 . charAt ( idx1 ) == '"' || in1 . charAt ( idx1 ) == '\'' ) { if ( quoted1 == null ) { quoted1 = in1 . charAt ( idx1 ) ; } else if ( quoted1 != null && quoted1 . equals ( in1 . charAt ( idx1 ) ) ) { if ( in1 . charAt ( idx1 - 1 ) != '\\' ) { quoted1 = null ; } } } if ( in2 . charAt ( idx2 ) == '"' || in2 . charAt ( idx2 ) == '\'' ) { if ( quoted2 == null ) { quoted2 = in2 . charAt ( idx2 ) ; } else if ( quoted2 != null && quoted2 . equals ( in2 . charAt ( idx2 ) ) ) { if ( in2 . charAt ( idx2 - 1 ) != '\\' ) { quoted2 = null ; } } } equals &= in1 . charAt ( idx1 ) == in2 . charAt ( idx2 ) ; idx1 ++ ; idx2 ++ ; } return equals ;
public class FileSystemView { /** * Reads attributes of the file located by the given path in this view as an object . */ public < A extends BasicFileAttributes > A readAttributes ( JimfsPath path , Class < A > type , Set < ? super LinkOption > options ) throws IOException { } }
File file = lookUpWithLock ( path , options ) . requireExists ( path ) . file ( ) ; return store . readAttributes ( file , type ) ;
public class HadoopUtils { /** * Calls { @ link # getOptionallyThrottledFileSystem ( FileSystem , int ) } parsing the qps from the input { @ link State } * at key { @ link # MAX _ FILESYSTEM _ QPS } . * @ throws IOException */ public static FileSystem getOptionallyThrottledFileSystem ( FileSystem fs , State state ) throws IOException { } }
DeprecationUtils . renameDeprecatedKeys ( state , MAX_FILESYSTEM_QPS , DEPRECATED_KEYS ) ; if ( state . contains ( MAX_FILESYSTEM_QPS ) ) { return getOptionallyThrottledFileSystem ( fs , state . getPropAsInt ( MAX_FILESYSTEM_QPS ) ) ; } return fs ;
public class PtoPOutputHandler { /** * Restores the GD source streams */ public void reconstitutePtoPSourceStreams ( StreamSet streamSet , int startMode ) throws SIRollbackException , SIConnectionLostException , SIResourceException , SIErrorException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "reconstitutePtoPSourceStreams" , new Object [ ] { streamSet , new Integer ( startMode ) } ) ; sourceStreamManager . reconstituteStreamSet ( streamSet ) ; // Don ' t do flush if we are asked to start in recovery mode if ( ( ( startMode & JsConstants . ME_START_FLUSH ) == JsConstants . ME_START_FLUSH ) && ( ( startMode & JsConstants . ME_START_RECOVERY ) == 0 ) ) { this . sendFlushedMessage ( null , streamSet . getStreamID ( ) ) ; // Now change streamID of streamSet streamSet . setStreamID ( new SIBUuid12 ( ) ) ; // This calls requestUpdate on the StreamSet Item which will // cause a callback to the streamSet . getPersistentData ( ) by msgstore Transaction tran = messageProcessor . getTXManager ( ) . createAutoCommitTransaction ( ) ; try { streamSet . requestUpdate ( tran ) ; } catch ( MessageStoreException e ) { // MessageStoreException shouldn ' t occur so FFDC . FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.PtoPOutputHandler.reconstitutePtoPSourceStreams" , "1:1883:1.241" , this ) ; SibTr . exception ( tc , e ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "reconstitutePtoPSourceStreams" , "SIStoreException" ) ; throw new SIResourceException ( e ) ; } } NonLockingCursor cursor = null ; try { cursor = transmissionItemStream . newNonLockingItemCursor ( new ClassEqualsFilter ( MessageItem . class ) ) ; cursor . allowUnavailableItems ( ) ; MessageItem msg = ( MessageItem ) cursor . next ( ) ; while ( msg != null ) { // Change streamID in message to streamID of StreamSet // If we are restoring from a stale backup this will // and the restoreMessage method puts it on a new SourceStream if ( msg . getGuaranteedStreamUuid ( ) != streamSet . getStreamID ( ) ) { msg . setGuaranteedStreamUuid ( streamSet . getStreamID ( ) ) ; } // add all messages back in to the streams if ( ! ( msg . isAdding ( ) || msg . isRemoving ( ) ) ) { // commit those which are not in doubt sourceStreamManager . restoreMessage ( msg , true ) ; } else { // add to stream in uncommitted state sourceStreamManager . restoreMessage ( msg , false ) ; } msg = ( MessageItem ) cursor . next ( ) ; } // Consolidate all streams which may have been reconstituted // This is necessary as messages may have been added out of order // This will return a list of the messageStoreIds of all the // messages which need to be locked because they are inside the // sendWindows of the streams which they were added to and so may // already have been sent List msgsToLock = sourceStreamManager . consolidateStreams ( startMode ) ; // Run through locking all messages in list so they won ' t expire . // watch out as list is Longs msgstore ids long msgId = 0 ; MessageItem msgItem = null ; for ( int i = 0 ; i < msgsToLock . size ( ) ; i ++ ) { msgId = ( ( Long ) msgsToLock . get ( i ) ) . longValue ( ) ; msgItem = ( MessageItem ) transmissionItemStream . findById ( msgId ) ; if ( msgItem != null ) msgItem . lockItemIfAvailable ( lockID ) ; else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "message wasn't there when we tried to lock it" ) ; } } } catch ( MessageStoreException e ) { // MessageStoreException shouldn ' t occur so FFDC . FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.PtoPOutputHandler.reconstitutePtoPSourceStreams" , "1:1956:1.241" , this ) ; SibTr . exception ( tc , e ) ; SibTr . error ( tc , "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.impl.PtoPOutputHandler" , "1:1963:1.241" , e } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "reconstitutePtoPSourceStreams" , e ) ; throw new SIResourceException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.impl.PtoPOutputHandler" , "1:1974:1.241" , e } , null ) , e ) ; } finally { if ( cursor != null ) cursor . finished ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "reconstitutePtoPSourceStreams" ) ;
public class HttpRequest { /** * 发送数据流 * @ throws IOException */ private void send ( ) throws HttpException { } }
try { if ( Method . POST . equals ( this . method ) || Method . PUT . equals ( this . method ) || Method . DELETE . equals ( this . method ) || this . isRest ) { if ( CollectionUtil . isEmpty ( this . fileForm ) ) { sendFormUrlEncoded ( ) ; // 普通表单 } else { sendMultipart ( ) ; // 文件上传表单 } } else { this . httpConnection . connect ( ) ; } } catch ( IOException e ) { throw new HttpException ( e . getMessage ( ) , e ) ; }
public class ManagementLocksInner { /** * Get the management lock of a resource or any level below resource . * @ param resourceGroupName The name of the resource group . * @ param resourceProviderNamespace The namespace of the resource provider . * @ param parentResourcePath An extra path parameter needed in some services , like SQL Databases . * @ param resourceType The type of the resource . * @ param resourceName The name of the resource . * @ param lockName The name of lock . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < ManagementLockObjectInner > getAtResourceLevelAsync ( String resourceGroupName , String resourceProviderNamespace , String parentResourcePath , String resourceType , String resourceName , String lockName , final ServiceCallback < ManagementLockObjectInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getAtResourceLevelWithServiceResponseAsync ( resourceGroupName , resourceProviderNamespace , parentResourcePath , resourceType , resourceName , lockName ) , serviceCallback ) ;
public class Select { /** * This method builds the list of selected items so that they can be marked as selected . * @ param val The < code > dataSource < / code > */ private void buildMatch ( Object val ) { } }
// create the match data if ( val != null ) { if ( val instanceof String ) { _match = new String [ ] { ( String ) val } ; } else if ( val instanceof String [ ] ) { String [ ] s = ( String [ ] ) val ; int cnt = 0 ; for ( int i = 0 ; i < s . length ; i ++ ) { if ( s [ i ] != null ) cnt ++ ; } if ( cnt == s . length ) _match = s ; else { if ( cnt > 0 ) { _match = new String [ cnt ] ; cnt = 0 ; for ( int i = 0 ; i < s . length ; i ++ ) { if ( s [ i ] != null ) { _match [ cnt ++ ] = s [ i ] ; } } } } } else { Iterator matchIterator = null ; // val is never null so this would be an error matchIterator = IteratorFactory . createIterator ( val ) ; if ( matchIterator == null ) { matchIterator = IteratorFactory . EMPTY_ITERATOR ; } ArrayList matchList = new ArrayList ( ) ; while ( matchIterator . hasNext ( ) ) { Object o = matchIterator . next ( ) ; if ( o == null ) continue ; matchList . add ( o ) ; } int size = matchList . size ( ) ; _match = new String [ size ] ; for ( int i = 0 ; i < size ; i ++ ) { assert ( matchList . get ( i ) != null ) ; assert ( matchList . get ( i ) . toString ( ) != null ) ; _match [ i ] = matchList . get ( i ) . toString ( ) ; } } if ( logger . isDebugEnabled ( ) ) { logger . debug ( "****** Select Matches ******" ) ; if ( _match != null ) { for ( int i = 0 ; i < _match . length ; i ++ ) { logger . debug ( i + ": " + _match [ i ] ) ; } } } } else { if ( _nullable && ! isMultiple ( ) && ( _defaultSelections == null || _defaultSelections . size ( ) == 0 ) ) { _match = new String [ ] { NULL_VALUE } ; } }
public class TableRow { /** * Find the default cell style for a column * @ param columnIndex the column index * @ return the style , null if none */ public TableCellStyle findDefaultCellStyle ( final int columnIndex ) { } }
TableCellStyle s = this . defaultCellStyle ; if ( s == null ) { s = this . parent . findDefaultCellStyle ( columnIndex ) ; } return s ;
public class BaseTraceFormatter { /** * Get the " formatted " message text . * If useResourceBundle is true , it will look in the resource bundle for * the message text ( as a key ) , and will then use MessageFormat . format on * the returned string if there are any substitutions in the string , e . g . { 0 } . * If useResourceBundle is false , we ' re calling this for a trace - level ( untranslated ) * message : just construct the trace text by appending the result of * formatObj ( . . . ) to the log record message . * @ param logRecord * @ param logParams the parameters for the message * @ param useResourceBundle * @ return */ private String formatMessage ( LogRecord logRecord , Object [ ] logParams , boolean useResourceBundle ) { } }
final String txt ; boolean hasParams = logParams != null && logParams . length > 0 ; String msg = logRecord . getMessage ( ) ; if ( useResourceBundle ) { ResourceBundle rb = logRecord . getResourceBundle ( ) ; if ( rb != null ) { try { msg = rb . getString ( msg ) ; } catch ( Exception e ) { } } } if ( msg != null && hasParams && msg . contains ( "{0" ) ) { Object [ ] formattedParams = new Object [ logParams . length ] ; for ( int i = 0 ; i < logParams . length ; i ++ ) { // Do truncated stack traces where appropriate if ( logParams [ i ] instanceof TruncatableThrowable ) { formattedParams [ i ] = DataFormatHelper . throwableToString ( ( TruncatableThrowable ) logParams [ i ] ) ; } else if ( logParams [ i ] instanceof Throwable ) { formattedParams [ i ] = DataFormatHelper . throwableToString ( new TruncatableThrowable ( ( Throwable ) logParams [ i ] ) ) ; } else if ( logParams [ i ] instanceof Untraceable ) { formattedParams [ i ] = logParams [ i ] . getClass ( ) . getName ( ) ; // Use only the class name of the object } else if ( logParams [ i ] instanceof Traceable ) { formattedParams [ i ] = formatTraceable ( ( Traceable ) logParams [ i ] ) ; } else { formattedParams [ i ] = logParams [ i ] ; } // Would any of the other parameters benefit from our whizzy formatting ? } // If this is a parameter list , use MessageFormat to sort it out txt = MessageFormat . format ( msg , formattedParams ) ; } else { txt = msg + ( hasParams ? " " + formatObj ( logParams ) : "" ) ; } return txt ;
public class ExecutorServices { /** * Creates a single - threaded executor that is registered by this class in order to shut it down later ( when it * becomes necessary ) . * @ return a new background executor */ public static ExecutorService createBackgroundExecutor ( ) { } }
final ExecutorService executor = Executors . newSingleThreadExecutor ( new DaemonThreadFactory ( DEFAULT_BACKGROUND_EXECUTOR_NAME ) ) ; BACKGROUND_EXECUTORS . add ( executor ) ; return executor ;
public class AbstractProcessMojo { /** * / * package private */ static Predicate < CtClass > createExcludePredicate ( @ Nullable final String [ ] classes ) { } }
final Set < PathMatcher > excludeSet ; if ( classes != null && classes . length != 0 ) { excludeSet = createPathMatcherSet ( classes ) ; } else { return ctClass -> false ; } return createMatchingPredicate ( excludeSet ) ;
public class HadoopUtils { /** * Reads maps of integer - > integer from glob paths like " folder / part - r * " */ public static HashMap < Integer , Integer > readIntIntMapFromGlob ( Path glob , FileSystem fs ) throws IOException { } }
FileStatus status [ ] = fs . globStatus ( glob ) ; HashMap < Integer , Integer > ret = new HashMap < Integer , Integer > ( ) ; for ( FileStatus fileS : status ) { ret . putAll ( readIntIntMap ( fileS . getPath ( ) , fs ) ) ; } return ret ;
public class FilePluginCache { /** * Remove the association with ident , and remove any filecache association as well . */ private void remove ( final ProviderIdent ident ) { } }
final cacheItem cacheItem = cache . get ( ident ) ; if ( null != cacheItem ) { filecache . remove ( cacheItem . getFirst ( ) ) ; } cache . remove ( ident ) ;
public class LoadBalancingRxClient { /** * Add a listener that is responsible for removing an HttpClient and shutting down * its connection pool if it is no longer available from load balancer . */ private void addLoadBalancerListener ( ) { } }
if ( ! ( lbContext . getLoadBalancer ( ) instanceof BaseLoadBalancer ) ) { return ; } ( ( BaseLoadBalancer ) lbContext . getLoadBalancer ( ) ) . addServerListChangeListener ( new ServerListChangeListener ( ) { @ Override public void serverListChanged ( List < Server > oldList , List < Server > newList ) { Set < Server > removedServers = new HashSet < Server > ( oldList ) ; removedServers . removeAll ( newList ) ; for ( Server server : rxClientCache . keySet ( ) ) { if ( removedServers . contains ( server ) ) { // this server is no longer in UP status removeClient ( server ) ; } } } } ) ;
public class PairCounter { /** * Counts the pair of objects , increasing its total count by 1. */ public int count ( Pair < T > obj ) { } }
long index = getIndex ( obj ) ; int count = counts . get ( index ) ; count ++ ; counts . put ( index , count ) ; sum ++ ; return count ;
public class CoNLLDependencyExtractor { /** * Returns a string representation of the word for a given node in the * dependency parse tree . First , the original word is filtered and if the * word is not accepted , the empty string is returned . Accepted words will * then be stemmed if either a lemma is provided by the parser or a { @ link * Stemmer } is provided with a preference given to the parser provided * lemma . If neither case holds , the original term is returned . */ private String getWord ( String [ ] nodeFeatures ) { } }
String word = nodeFeatures [ formIndex ] ; // Filter if neccessary . if ( filter != null && ! filter . accept ( word ) ) return IteratorFactory . EMPTY_TOKEN ; return word ;
public class Handler { /** * Add a featurable to the list . Will be added at the beginning of { @ link # update ( double ) } call . * If this function is called during { @ link # update ( double ) } , it will be delayed to next { @ link # update ( double ) } * call . * Automatically add { @ link IdentifiableModel } if feature does not have { @ link Identifiable } feature . * @ param featurable The featurable to add . */ public final void add ( Featurable featurable ) { } }
featurable . getFeature ( Identifiable . class ) . addListener ( this ) ; toAdd . add ( featurable ) ; willAdd = true ;
public class Base32Utils { /** * mask : F8 07 C0 3E 01 F0 0F 80 7C 03 E0 1F F8 07 */ static private void encodeBytes ( byte previous , byte current , int byteIndex , StringBuilder output ) { } }
int chunk ; int offset = byteIndex % 5 ; switch ( offset ) { case 0 : chunk = ( ( current & 0xF8 ) >>> 3 ) ; output . append ( lookupTable . charAt ( chunk ) ) ; break ; case 1 : chunk = ( ( ( previous & 0x07 ) << 2 ) | ( ( current & 0xC0 ) >>> 6 ) ) ; output . append ( lookupTable . charAt ( chunk ) ) ; chunk = ( ( current & 0x3E ) >>> 1 ) ; output . append ( lookupTable . charAt ( chunk ) ) ; break ; case 2 : chunk = ( ( ( previous & 0x01 ) << 4 ) | ( ( current & 0xF0 ) >>> 4 ) ) ; output . append ( lookupTable . charAt ( chunk ) ) ; break ; case 3 : chunk = ( ( ( previous & 0x0F ) << 1 ) | ( ( current & 0x80 ) >>> 7 ) ) ; output . append ( lookupTable . charAt ( chunk ) ) ; chunk = ( ( current & 0x7C ) >>> 2 ) ; output . append ( lookupTable . charAt ( chunk ) ) ; break ; case 4 : chunk = ( ( ( previous & 0x03 ) << 3 ) | ( ( current & 0xE0 ) >>> 5 ) ) ; output . append ( lookupTable . charAt ( chunk ) ) ; chunk = ( current & 0x1F ) ; output . append ( lookupTable . charAt ( chunk ) ) ; break ; }
public class BeanPersistenceDelegate { /** * PersistenceDelegate . instantiate ( ) */ protected Expression instantiate ( Object oldInstance , Encoder out ) { } }
XMLEncoder xmlOut = ( XMLEncoder ) out ; ControlBean control = ( ControlBean ) oldInstance ; // If processing a nested control , then use the parent bean ' s context as the // constructor context ControlBeanContext cbc = null ; if ( xmlOut . getOwner ( ) != null ) cbc = ( ( ControlBean ) xmlOut . getOwner ( ) ) . getControlBeanContext ( ) ; // See if the ControlBean has any associated PropertyMap in its delegation chain // that was derived from an AnnotatedElement so this relationship ( and any associated // external config delegates ) will be restored as part of the decoding process . // BUGBUG : What about a user - created PropertyMap that was passed into the constructor ? AnnotatedElementMap aem = null ; PropertyMap pMap = control . getPropertyMap ( ) ; while ( pMap != null ) { if ( pMap instanceof AnnotatedElementMap ) { aem = ( AnnotatedElementMap ) pMap ; // Ignore a class - valued AnnotationElementMap . . this just refers to the // Control type , and will be automatically reassociated at construction // time if ( aem . getAnnotatedElement ( ) instanceof Class ) aem = null ; xmlOut . setPersistenceDelegate ( AnnotatedElementMap . class , new AnnotatedElementMapPersistenceDelegate ( ) ) ; break ; } pMap = pMap . getDelegateMap ( ) ; } // Create a constructor that that uses the following form : // new < BeanClass > ( ControlBeanContext cbc , String id , PropertyMap map ) // The context is set to null , so the current active container context will be // used , the id will be the ID of the original control and the map will be // any AnnotatedElementMap that was passed into the original constructor . return new Expression ( control , control . getClass ( ) , "new" , new Object [ ] { cbc , control . getLocalID ( ) , aem } ) ;
public class WordBuilder { /** * Creates a word from the given range of the contents of the internal storage . Note that the storage management * mechanisms of this class guarantee that the returned word will not change regardless of what further operations * are invoked on this { @ link WordBuilder } . * @ param fromIndex * the starting index , inclusive * @ param toIndex * the end index , exclusive * @ return the word for the specified subrange */ public Word < I > toWord ( int fromIndex , int toIndex ) { } }
if ( fromIndex < 0 || toIndex > length ) { throw new IndexOutOfBoundsException ( ) ; } int len = toIndex - fromIndex ; lock = true ; return new SharedWord < > ( array , fromIndex , len ) ;
public class BaseChart { /** * This is called during layout when the size of this view has changed . If * you were just added to the view hierarchy , you ' re called with the old * values of 0. * @ param w Current width of this view . * @ param h Current height of this view . * @ param oldw Old width of this view . * @ param oldh Old height of this view . */ @ Override protected void onSizeChanged ( int w , int h , int oldw , int oldh ) { } }
super . onSizeChanged ( w , h , oldw , oldh ) ; mWidth = w ; mHeight = h ; mLeftPadding = getPaddingLeft ( ) ; mTopPadding = getPaddingTop ( ) ; mRightPadding = getPaddingRight ( ) ; mBottomPadding = getPaddingBottom ( ) ; mGraph . layout ( mLeftPadding , mTopPadding , w - mRightPadding , ( int ) ( h - mLegendHeight - mBottomPadding ) ) ; mGraphOverlay . layout ( mLeftPadding , mTopPadding , w - mRightPadding , ( int ) ( h - mLegendHeight - mBottomPadding ) ) ; mLegend . layout ( mLeftPadding , ( int ) ( h - mLegendHeight - mBottomPadding ) , w - mRightPadding , h - mBottomPadding ) ;
public class ImageClient { /** * Gets the access control policy for a resource . May be empty if no such policy or resource * exists . * < p > Sample code : * < pre > < code > * try ( ImageClient imageClient = ImageClient . create ( ) ) { * ProjectGlobalImageResourceName resource = ProjectGlobalImageResourceName . of ( " [ PROJECT ] " , " [ RESOURCE ] " ) ; * Policy response = imageClient . getIamPolicyImage ( resource ) ; * < / code > < / pre > * @ param resource Name or id of the resource for this request . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Policy getIamPolicyImage ( ProjectGlobalImageResourceName resource ) { } }
GetIamPolicyImageHttpRequest request = GetIamPolicyImageHttpRequest . newBuilder ( ) . setResource ( resource == null ? null : resource . toString ( ) ) . build ( ) ; return getIamPolicyImage ( request ) ;
public class TopicDefinition { /** * Create a queue definition from this template */ public QueueDefinition createQueueDefinition ( String topicName , String consumerId , boolean temporary ) { } }
QueueDefinition def = new QueueDefinition ( ) ; def . setName ( DestinationTools . getQueueNameForTopicConsumer ( topicName , consumerId ) ) ; def . setTemporary ( temporary ) ; copyAttributesTo ( def ) ; return def ;
public class JTB { /** * Sets the absolute path to the output directory for the syntax tree files . * @ param value * The absolute path to the output directory for the generated syntax * tree files , may be < code > null < / code > to use a sub directory in the * output directory of the grammar file . If this directory does not * exist yet , it is created . Note that this path should already include * the desired package hierarchy because JTB will not append the * required sub directories automatically . */ public void setNodeDirectory ( final File value ) { } }
if ( value != null && ! value . isAbsolute ( ) ) { throw new IllegalArgumentException ( "path is not absolute: " + value ) ; } this . nodeDirectory = value ;
public class MaybeLens { /** * Given a lens , lift < code > A < / code > into { @ link Maybe } . * @ param lens the lens * @ param < S > the type of the " larger " value for reading * @ param < T > the type of the " larger " value for putting * @ param < A > the type of the " smaller " value that is read * @ param < B > the type of the " smaller " update value * @ return the lens with A lifted */ public static < S , T , A , B > Lens < S , T , Maybe < A > , B > liftA ( Lens < S , T , A , B > lens ) { } }
return lens . mapA ( Maybe :: just ) ;
public class NokiaStoreHelper { /** * Called if purchase has been successful * @ param purchaseData Response code for IabResult */ private void processPurchaseSuccess ( final String purchaseData ) { } }
Logger . i ( "NokiaStoreHelper.processPurchaseSuccess" ) ; Logger . d ( "purchaseData = " , purchaseData ) ; Purchase purchase ; try { final JSONObject obj = new JSONObject ( purchaseData ) ; final String sku = SkuManager . getInstance ( ) . getSku ( OpenIabHelper . NAME_NOKIA , obj . getString ( "productId" ) ) ; Logger . d ( "sku = " , sku ) ; purchase = new Purchase ( OpenIabHelper . NAME_NOKIA ) ; purchase . setItemType ( IabHelper . ITEM_TYPE_INAPP ) ; purchase . setOrderId ( obj . getString ( "orderId" ) ) ; purchase . setPackageName ( obj . getString ( "packageName" ) ) ; purchase . setSku ( sku ) ; purchase . setToken ( obj . getString ( "purchaseToken" ) ) ; purchase . setDeveloperPayload ( obj . getString ( "developerPayload" ) ) ; } catch ( JSONException e ) { Logger . e ( e , "JSONException: " , e ) ; final IabResult result = new NokiaResult ( IabHelper . IABHELPER_BAD_RESPONSE , "Failed to parse purchase data." ) ; if ( mPurchaseListener != null ) { mPurchaseListener . onIabPurchaseFinished ( result , null ) ; } return ; } if ( mPurchaseListener != null ) { mPurchaseListener . onIabPurchaseFinished ( new NokiaResult ( RESULT_OK , "Success" ) , purchase ) ; }
public class FileUtils { /** * Devuelve el path de un path absoluto de un fichero , por ejmeplo : / dir / toto . txt > / dir / o en windows \ toto \ toto . txt > \ toto \ * El separador se escoje en funcion de si la el fileNameAndPath ya contien \ o / . * El separador del final del path tambien esta incluido . * @ param fileNameAndPath contiene un path y un nombre del fichero , ej : / dir / toto . txt * @ return el path or " " si no tiene */ public static String getFilePath ( String fileNameAndPath ) { } }
if ( fileNameAndPath == null ) { return null ; } String fileSeparator ; if ( fileNameAndPath . contains ( "/" ) ) { fileSeparator = "/" ; } else { fileSeparator = "\\" ; } int lastIndexOf = fileNameAndPath . lastIndexOf ( fileSeparator ) ; if ( lastIndexOf < 0 ) { return "" ; } else { return fileNameAndPath . substring ( 0 , lastIndexOf ) ; }
public class StructureImpl { /** * Creates a SubstructureIdentifier based on the residues in this Structure . * Only the first and last residues of each chain are considered , so chains * with gaps * @ return A { @ link SubstructureIdentifier } with residue ranges constructed from each chain */ private SubstructureIdentifier toCanonical ( ) { } }
StructureIdentifier real = getStructureIdentifier ( ) ; if ( real != null ) { try { return real . toCanonical ( ) ; } catch ( StructureException e ) { // generate fake one if needed } } // No identifier set , so generate based on residues present in the structure List < ResidueRange > range = new ArrayList < > ( ) ; for ( Chain chain : getChains ( ) ) { List < Group > groups = chain . getAtomGroups ( ) ; ListIterator < Group > groupsIt = groups . listIterator ( ) ; if ( ! groupsIt . hasNext ( ) ) { continue ; // no groups in chain } Group g = groupsIt . next ( ) ; ResidueNumber first = g . getResidueNumber ( ) ; // TODO Detect missing intermediate residues - sbliven , 2015-01-28 // Already better than previous whole - chain representation // get last residue while ( groupsIt . hasNext ( ) ) { g = groupsIt . next ( ) ; } ResidueNumber last = g . getResidueNumber ( ) ; range . add ( new ResidueRange ( chain . getName ( ) , first , last ) ) ; } return new SubstructureIdentifier ( getPDBCode ( ) , range ) ;
public class GeneratorServiceImpl { @ Override public void startGeneration ( ) { } }
metrics . startGeneration ( ) ; getFilteredGenerators ( parallelGeneration ) . forEach ( this :: useGenerator ) ; metrics . stopGeneration ( ) ;
public class CmsExplorer { /** * Returns the html for the explorer file list . < p > * @ return the html for the explorer file list */ public String getFileList ( ) { } }
// if mode is " listonly " , only the list will be shown boolean galleryView = VIEW_GALLERY . equals ( getSettings ( ) . getExplorerMode ( ) ) ; // if mode is " listview " , all file in the set collector will be shown boolean listView = VIEW_LIST . equals ( getSettings ( ) . getExplorerMode ( ) ) ; String currentFolder = getSettings ( ) . getExplorerResource ( ) ; try { getCms ( ) . readResource ( currentFolder , CmsResourceFilter . ALL ) ; } catch ( CmsException e ) { // file was not readable currentFolder = "/" ; } // start creating content StringBuffer content = new StringBuffer ( 2048 ) ; content . append ( getInitializationHeader ( ) ) ; // now get the entries for the file list List < CmsResource > resources = getResources ( getSettings ( ) . getExplorerResource ( ) ) ; // if a folder contains to much entries we split them to pages of C _ ENTRYS _ PER _ PAGE length int startat = 0 ; int stopat = resources . size ( ) ; int selectedPage = 1 ; int numberOfPages = 0 ; int maxEntrys = getSettings ( ) . getUserSettings ( ) . getExplorerFileEntries ( ) ; if ( ! galleryView ) { selectedPage = getSettings ( ) . getExplorerPage ( ) ; if ( stopat > maxEntrys ) { // we have to split numberOfPages = ( ( stopat - 1 ) / maxEntrys ) + 1 ; if ( selectedPage > numberOfPages ) { // the user has changed the folder and then selected a page for the old folder selectedPage = 1 ; } startat = ( selectedPage - 1 ) * maxEntrys ; if ( ( startat + maxEntrys ) < stopat ) { stopat = startat + maxEntrys ; } } } // now check which file list columns we want to show int preferences = getUserPreferences ( ) ; boolean showTitle = ( preferences & CmsUserSettings . FILELIST_TITLE ) > 0 ; boolean showNavText = ( preferences & CmsUserSettings . FILELIST_NAVTEXT ) > 0 ; boolean showPermissions = ( preferences & CmsUserSettings . FILELIST_PERMISSIONS ) > 0 ; boolean showDateLastModified = ( preferences & CmsUserSettings . FILELIST_DATE_LASTMODIFIED ) > 0 ; boolean showUserWhoLastModified = ( preferences & CmsUserSettings . FILELIST_USER_LASTMODIFIED ) > 0 ; boolean showDateCreated = ( preferences & CmsUserSettings . FILELIST_DATE_CREATED ) > 0 ; boolean showUserWhoCreated = ( preferences & CmsUserSettings . FILELIST_USER_CREATED ) > 0 ; boolean showDateReleased = ( preferences & CmsUserSettings . FILELIST_DATE_RELEASED ) > 0 ; boolean showDateExpired = ( preferences & CmsUserSettings . FILELIST_DATE_EXPIRED ) > 0 ; boolean fullPath = galleryView || listView ; // set the right reference project CmsProject referenceProject ; try { if ( ! listView ) { referenceProject = getCms ( ) . readProject ( getSettings ( ) . getProject ( ) ) ; } else { referenceProject = getCms ( ) . readProject ( getSettings ( ) . getExplorerProjectId ( ) ) ; } } catch ( CmsException ex ) { referenceProject = getCms ( ) . getRequestContext ( ) . getCurrentProject ( ) ; } CmsResourceUtil resUtil = new CmsResourceUtil ( getCms ( ) ) ; resUtil . setReferenceProject ( referenceProject ) ; for ( int i = startat ; i < stopat ; i ++ ) { CmsResource res = resources . get ( i ) ; resUtil . setResource ( res ) ; content . append ( getInitializationEntry ( resUtil , fullPath , showTitle , showNavText , showPermissions , showDateLastModified , showUserWhoLastModified , showDateCreated , showUserWhoCreated , showDateReleased , showDateExpired ) ) ; } content . append ( getInitializationFooter ( numberOfPages , selectedPage ) ) ; return content . toString ( ) ;
public class V1InstanceGetter { /** * Get Build Projects filtered by the criteria specified in the passed in * filter . * @ param filter Limit the items returned . If null , then all items returned . * @ return Collection of items as specified in the filter . */ public Collection < BuildProject > buildProjects ( BuildProjectFilter filter ) { } }
return get ( BuildProject . class , ( filter != null ) ? filter : new BuildProjectFilter ( ) ) ;
public class ScroogeReadSupport { /** * Overriding to fall back to get descriptor from the { @ link # thriftClass } if thrift metadata is * not present * @ return */ @ Override public RecordMaterializer < T > prepareForRead ( Configuration configuration , Map < String , String > keyValueMetaData , MessageType fileSchema , ReadSupport . ReadContext readContext ) { } }
ThriftMetaData thriftMetaData = ThriftMetaData . fromExtraMetaData ( keyValueMetaData ) ; try { if ( thriftClass == null ) { thriftClass = getThriftClass ( keyValueMetaData , configuration ) ; } ThriftType . StructType descriptor = null ; if ( thriftMetaData != null ) { descriptor = thriftMetaData . getDescriptor ( ) ; } else { ScroogeStructConverter schemaConverter = new ScroogeStructConverter ( ) ; descriptor = schemaConverter . convert ( thriftClass ) ; } ThriftRecordConverter < T > converter = new ScroogeRecordConverter < T > ( thriftClass , readContext . getRequestedSchema ( ) , descriptor ) ; return converter ; } catch ( Exception t ) { throw new RuntimeException ( "Unable to create Thrift Converter for Thrift metadata " + thriftMetaData , t ) ; }
public class QueryFactory { /** * Creates a new query that is the basis for the { @ link JpaRepository # find ( org . cdlflex . fruit . Query ) } call . Depending * on which parameters are set in the { @ link org . cdlflex . fruit . Query } object , the query { @ code TypedQuery } is built * dynamically . * @ param query the fruit query * @ return a jpa query */ public TypedQuery < T > select ( Query query ) { } }
TypedQuery < T > q = select ( query . getFilter ( ) , query . getOrderBy ( ) ) ; Integer limit = query . getLimit ( ) ; Integer offset = query . getOffset ( ) ; if ( limit != null ) { q . setMaxResults ( limit ) ; } if ( offset != null ) { q . setFirstResult ( offset ) ; } return q ;
public class ModelExt { /** * check current instance is equal obj or not . [ wrapper equal ] * @ param obj * @ return true : equal , false : not equal . */ public boolean eq ( Object obj ) { } }
if ( this == obj ) return true ; if ( obj == null ) return false ; if ( this . _getUsefulClass ( ) != obj . getClass ( ) ) return false ; Model < ? > other = ( Model < ? > ) obj ; Table tableinfo = this . table ( ) ; Set < Entry < String , Object > > attrsEntrySet = this . _getAttrsEntrySet ( ) ; for ( Entry < String , Object > entry : attrsEntrySet ) { String key = entry . getKey ( ) ; Object value = entry . getValue ( ) ; Class < ? > clazz = tableinfo . getColumnType ( key ) ; if ( clazz == Float . class ) { } else if ( clazz == Double . class ) { } else if ( clazz == Model . class ) { } else { if ( value == null ) { if ( other . get ( key ) != null ) return false ; } else if ( ! value . equals ( other . get ( key ) ) ) return false ; } } return true ;
public class Bot { /** * Starts the ping task . Note : It only gets called on HELLO event type . * @ param session */ private void pingAtRegularIntervals ( WebSocketSession session ) { } }
pingTask = new PingTask ( session ) ; if ( pingScheduledExecutorService != null ) { pingScheduledExecutorService . shutdownNow ( ) ; } pingScheduledExecutorService = Executors . newSingleThreadScheduledExecutor ( ) ; pingScheduledExecutorService . scheduleAtFixedRate ( pingTask , 1L , 30L , TimeUnit . SECONDS ) ;
public class ChannelzService { /** * Returns top level channel aka { @ link io . grpc . ManagedChannel } . */ @ Override public void getTopChannels ( GetTopChannelsRequest request , StreamObserver < GetTopChannelsResponse > responseObserver ) { } }
InternalChannelz . RootChannelList rootChannels = channelz . getRootChannels ( request . getStartChannelId ( ) , maxPageSize ) ; GetTopChannelsResponse resp ; try { resp = ChannelzProtoUtil . toGetTopChannelResponse ( rootChannels ) ; } catch ( StatusRuntimeException e ) { responseObserver . onError ( e ) ; return ; } responseObserver . onNext ( resp ) ; responseObserver . onCompleted ( ) ;
public class JSettingsPanel { /** * GEN - LAST : event _ jCheckBoxDrawCurrentXActionPerformed */ private void jCheckBoxMaxPointsActionPerformed ( java . awt . event . ActionEvent evt ) // GEN - FIRST : event _ jCheckBoxMaxPointsActionPerformed { } }
// GEN - HEADEREND : event _ jCheckBoxMaxPointsActionPerformed if ( jCheckBoxMaxPoints . isSelected ( ) ) { parent . getGraphPanelChart ( ) . getChartSettings ( ) . setMaxPointPerRow ( getValueFromString ( ( String ) jComboBoxMaxPoints . getSelectedItem ( ) ) ) ; } else { parent . getGraphPanelChart ( ) . getChartSettings ( ) . setMaxPointPerRow ( - 1 ) ; } refreshGraphPreview ( ) ;
public class Scope { /** * return scope depth * @ return Scope depth */ @ Override public int getDepth ( ) { } }
if ( depth == UNSET ) { if ( hasParent ( ) ) { depth = parent . getDepth ( ) + 1 ; } else { depth = 0 ; } } return depth ;
public class FormLoginExtensionProcessor { /** * Get custom error page that specified in the custom login page * @ param req * @ return */ private String getCustomReloginErrorPage ( HttpServletRequest req ) { } }
String reLogin = CookieHelper . getCookieValue ( req . getCookies ( ) , ReferrerURLCookieHandler . CUSTOM_RELOGIN_URL_COOKIENAME ) ; if ( reLogin != null && reLogin . length ( ) > 0 ) { if ( reLogin . indexOf ( "?" ) < 0 ) reLogin += "?error=error" ; } return reLogin ;
public class DZcs_sqr { /** * Symbolic QR or LU ordering and analysis . * @ param order * ordering method to use ( 0 to 3) * @ param A * column - compressed matrix * @ param qr * analyze for QR if true or LU if false * @ return symbolic analysis for QR or LU , null on error */ public static DZcss cs_sqr ( int order , DZcs A , boolean qr ) { } }
int n , k , post [ ] ; DZcss S ; boolean ok = true ; if ( ! CS_CSC ( A ) ) return ( null ) ; /* check inputs */ n = A . n ; S = new DZcss ( ) ; /* allocate result S */ S . q = cs_amd ( order , A ) ; /* fill - reducing ordering */ if ( order > 0 && S . q == null ) return ( null ) ; if ( qr ) /* QR symbolic analysis */ { DZcs C = order > 0 ? cs_permute ( A , null , S . q , false ) : A ; S . parent = cs_etree ( C , true ) ; /* etree of C ' * C , where C = A ( : , q ) */ post = cs_post ( S . parent , n ) ; S . cp = cs_counts ( C , S . parent , post , true ) ; /* col counts chol ( C ' * C ) */ ok = C != null && S . parent != null && S . cp != null && cs_vcount ( C , S ) ; if ( ok ) for ( S . unz = 0 , k = 0 ; k < n ; k ++ ) S . unz += S . cp [ k ] ; ok = ok && S . lnz >= 0 && S . unz >= 0 ; /* int overflow guard */ if ( order > 0 ) C = null ; } else { S . unz = 4 * ( A . p [ n ] ) + n ; /* for LU factorization only , */ S . lnz = S . unz ; /* guess nnz ( L ) and nnz ( U ) */ } return ( ok ? S : null ) ; /* return result S */
public class Factor { /** * Create a FactorUpdater to execute update . * @ param pathServiceSid Service Sid . * @ param pathIdentity Unique identity of the Entity * @ param pathSid A string that uniquely identifies this Factor . * @ return FactorUpdater capable of executing the update */ public static FactorUpdater updater ( final String pathServiceSid , final String pathIdentity , final String pathSid ) { } }
return new FactorUpdater ( pathServiceSid , pathIdentity , pathSid ) ;
public class WorkflowTriggerHistoriesInner { /** * Gets a workflow trigger history . * @ param resourceGroupName The resource group name . * @ param workflowName The workflow name . * @ param triggerName The workflow trigger name . * @ param historyName The workflow trigger history name . Corresponds to the run name for triggers that resulted in a run . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the WorkflowTriggerHistoryInner object if successful . */ public WorkflowTriggerHistoryInner get ( String resourceGroupName , String workflowName , String triggerName , String historyName ) { } }
return getWithServiceResponseAsync ( resourceGroupName , workflowName , triggerName , historyName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class CmsDataViewQuery { /** * Sets the selected filter values . < p > * @ param filterValues the filter values */ public void setFilterValues ( LinkedHashMap < String , String > filterValues ) { } }
m_filterValues = new LinkedHashMap < String , String > ( filterValues ) ;
public class SystemPropertyContext { /** * Adds a system property only if the property does not currently exist . If the property does not exist on * { @ link # restore ( ) } the property will be cleared . * @ param name the name of the property * @ param value the value to set if the property is absent */ void addPropertyIfAbsent ( final String name , final Object value ) { } }
final String currentValue = SecurityActions . getPropertyPrivileged ( name ) ; if ( currentValue == null ) { SecurityActions . setPropertyPrivileged ( name , value . toString ( ) ) ; propertiesToClear . add ( name ) ; }
public class DistributedClusterState { /** * Note that get _ version doesn ' t use zkCache avoid to conflict with get _ data */ @ Override public Integer get_version ( String path , boolean watch ) throws Exception { } }
return zkObj . getVersion ( zk , path , watch ) ;
public class WaitBuilder { /** * The file condition to wait for during execution . * @ deprecated in favor of { @ link # file ( ) } */ @ Deprecated public WaitFileConditionBuilder file ( String filePath ) { } }
FileCondition condition = new FileCondition ( ) ; condition . setFilePath ( filePath ) ; container . setCondition ( condition ) ; this . buildAndRun ( ) ; return new WaitFileConditionBuilder ( condition , this ) ;
public class WaitContainerResultCallback { /** * Awaits the status code from the container . * @ throws DockerClientException * if the wait operation fails . */ public Integer awaitStatusCode ( long timeout , TimeUnit timeUnit ) { } }
try { if ( ! awaitCompletion ( timeout , timeUnit ) ) { throw new DockerClientException ( "Awaiting status code timeout." ) ; } } catch ( InterruptedException e ) { throw new DockerClientException ( "Awaiting status code interrupted: " , e ) ; } return getStatusCode ( ) ;
public class TypeConverter { /** * Convert the passed source value to char * @ param aSrcValue * The source value . May be < code > null < / code > . * @ param cDefault * The default value to be returned if an error occurs during type * conversion . * @ return The converted value . * @ throws RuntimeException * If the converter itself throws an exception * @ see TypeConverterProviderBestMatch */ public static char convertToChar ( @ Nullable final Object aSrcValue , final char cDefault ) { } }
final Character aValue = convert ( aSrcValue , Character . class , null ) ; return aValue == null ? cDefault : aValue . charValue ( ) ;
public class OutputsInner { /** * Creates an output or replaces an already existing output under an existing streaming job . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param jobName The name of the streaming job . * @ param outputName The name of the output . * @ param output The definition of the output that will be used to create a new output or replace the existing one under the streaming job . * @ param ifMatch The ETag of the output . Omit this value to always overwrite the current output . Specify the last - seen ETag value to prevent accidentally overwritting concurrent changes . * @ param ifNoneMatch Set to ' * ' to allow a new output to be created , but to prevent updating an existing output . Other values will result in a 412 Pre - condition Failed response . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the OutputInner object if successful . */ public OutputInner createOrReplace ( String resourceGroupName , String jobName , String outputName , OutputInner output , String ifMatch , String ifNoneMatch ) { } }
return createOrReplaceWithServiceResponseAsync ( resourceGroupName , jobName , outputName , output , ifMatch , ifNoneMatch ) . toBlocking ( ) . single ( ) . body ( ) ;
public class TypeaheadPrefetch { /** * The time ( in milliseconds ) the prefetched data should be cached in * localStorage . Defaults to < code > 86400000 < / code > ( 1 day ) . * @ param nTTL * Time to live in milliseconds . Must be & ge ; 1. * @ return this */ @ Nonnull public TypeaheadPrefetch setTTL ( @ Nonnegative final int nTTL ) { } }
if ( nTTL < 1 ) throw new IllegalArgumentException ( "TTL is too small: " + nTTL ) ; m_nTTL = nTTL ; return this ;
public class ByteArray { /** * Encodes the given byte array as a base - 64 String . * @ param byteArray The byte array to be encoded . * @ return The byte array encoded using base - 64. */ public static String toBase64 ( byte [ ] byteArray ) { } }
String result = null ; if ( byteArray != null ) { result = Base64 . encodeBase64String ( byteArray ) ; } return result ;
public class CmsContainerpageService { /** * Returns a list of container elements from a list with client id ' s . < p > * @ param clientIds list of client id ' s * @ param pageRootPath the container page root path * @ return a list of element beans * @ throws CmsException in case reading the element resource fails */ private List < CmsContainerElementBean > getCachedElements ( List < String > clientIds , String pageRootPath ) throws CmsException { } }
List < CmsContainerElementBean > result = new ArrayList < CmsContainerElementBean > ( ) ; for ( String id : clientIds ) { try { result . add ( getCachedElement ( id , pageRootPath ) ) ; } catch ( CmsIllegalArgumentException e ) { log ( e . getLocalizedMessage ( ) , e ) ; } } return result ;
public class Neo4jAliasResolver { /** * Given the alias of the entity and the path to the relationship it will return the alias * of the component . * @ param entityAlias the alias of the entity * @ param propertyPathWithoutAlias the path to the property without the alias * @ return the alias the relationship or null */ public String findAlias ( String entityAlias , List < String > propertyPathWithoutAlias ) { } }
RelationshipAliasTree aliasTree = relationshipAliases . get ( entityAlias ) ; if ( aliasTree == null ) { return null ; } RelationshipAliasTree associationAlias = aliasTree ; for ( int i = 0 ; i < propertyPathWithoutAlias . size ( ) ; i ++ ) { associationAlias = associationAlias . findChild ( propertyPathWithoutAlias . get ( i ) ) ; if ( associationAlias == null ) { return null ; } } return associationAlias . getAlias ( ) ;
public class DownloadAction { /** * Get the cached ETag for the given host and file * @ param host the host * @ param file the file * @ return the cached ETag or null if there is no ETag in the cache */ private String getCachedETag ( HttpHost host , String file ) { } }
Map < String , Object > cachedETags = readCachedETags ( ) ; @ SuppressWarnings ( "unchecked" ) Map < String , Object > hostMap = ( Map < String , Object > ) cachedETags . get ( host . toURI ( ) ) ; if ( hostMap == null ) { return null ; } @ SuppressWarnings ( "unchecked" ) Map < String , String > etagMap = ( Map < String , String > ) hostMap . get ( file ) ; if ( etagMap == null ) { return null ; } return etagMap . get ( "ETag" ) ;
public class FileLoginManager { /** * Adds a new user . * @ param username The username . * @ param password The password . * @ param roles A comma - separated list of roles that the new user will have . */ @ CommandArgument public void adduser ( @ OptionArgument ( "username" ) String username , @ OptionArgument ( "password" ) String password , @ OptionArgument ( "roles" ) String roles ) { } }
if ( users . containsKey ( username ) ) { System . err . println ( String . format ( "User '%s' already exists" , username ) ) ; } else { User user = new User ( username ) ; user . salt = passwd . generateSalt ( ) ; user . hash = passwd . getEncryptedPassword ( password , user . salt ) ; user . roles . addAll ( Arrays . asList ( roles . split ( "," ) ) ) ; users . put ( username , user ) ; }
public class TransformerException { /** * Print the the trace of methods from where the error * originated . This will trace all nested exception * objects , as well as this object . * @ param s The writer where the dump will be sent to . */ public void printStackTrace ( java . io . PrintWriter s ) { } }
if ( s == null ) { s = new java . io . PrintWriter ( System . err , true ) ; } try { String locInfo = getLocationAsString ( ) ; if ( null != locInfo ) { s . println ( locInfo ) ; } super . printStackTrace ( s ) ; } catch ( Throwable e ) { }
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EEnum getIfcGlobalOrLocalEnum ( ) { } }
if ( ifcGlobalOrLocalEnumEEnum == null ) { ifcGlobalOrLocalEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 999 ) ; } return ifcGlobalOrLocalEnumEEnum ;
public class Fn { /** * Returns a stateful < code > Consumer < / code > which should not be used in parallel stream . * @ param action * @ return * @ deprecated replaced by { @ code Consumers # indexed ( IndexedConsumer ) } . */ @ Deprecated static < T > Consumer < T > indexeed ( final IndexedConsumer < T > action ) { } }
return Consumers . indexed ( action ) ;
public class SupplierUtils { /** * Returns a composed function that first applies the Supplier and then applies * { @ linkplain BiFunction } { @ code after } to the result . * @ param < T > return type of after * @ param handler the function applied after supplier * @ return a function composed of supplier and handler */ public static < T , R > Supplier < R > andThen ( Supplier < T > supplier , BiFunction < T , Exception , R > handler ) { } }
return ( ) -> { try { T result = supplier . get ( ) ; return handler . apply ( result , null ) ; } catch ( Exception exception ) { return handler . apply ( null , exception ) ; } } ;
public class CmsADEManager { /** * Internal configuration lookup method . < p > * @ param cms the cms context * @ param rootPath the root path for which to look up the configuration * @ return the configuration for the given path */ protected CmsADEConfigData internalLookupConfiguration ( CmsObject cms , String rootPath ) { } }
boolean online = ( null == cms ) || isOnline ( cms ) ; CmsADEConfigCacheState state = getCacheState ( online ) ; return state . lookupConfiguration ( rootPath ) ;
public class DescribeReplicationGroupsResult { /** * A list of replication groups . Each item in the list contains detailed information about one replication group . * @ param replicationGroups * A list of replication groups . Each item in the list contains detailed information about one replication * group . */ public void setReplicationGroups ( java . util . Collection < ReplicationGroup > replicationGroups ) { } }
if ( replicationGroups == null ) { this . replicationGroups = null ; return ; } this . replicationGroups = new com . amazonaws . internal . SdkInternalList < ReplicationGroup > ( replicationGroups ) ;
public class WorkspacePersistentDataManager { /** * Check if given node path contains index higher 1 and if yes if same - name sibling exists in * persistence or in current changes log . */ private void checkSameNameSibling ( NodeData node , WorkspaceStorageConnection con , final Set < QPath > addedNodes ) throws RepositoryException { } }
if ( node . getQPath ( ) . getIndex ( ) > 1 ) { // check if an older same - name sibling exists // the check is actual for all operations including delete final QPathEntry [ ] path = node . getQPath ( ) . getEntries ( ) ; final QPathEntry [ ] siblingPath = new QPathEntry [ path . length ] ; final int li = path . length - 1 ; System . arraycopy ( path , 0 , siblingPath , 0 , li ) ; siblingPath [ li ] = new QPathEntry ( path [ li ] , path [ li ] . getIndex ( ) - 1 ) ; if ( addedNodes . contains ( new QPath ( siblingPath ) ) ) { // current changes log has the older same - name sibling return ; } else { // check in persistence if ( dataContainer . isCheckSNSNewConnection ( ) ) { final WorkspaceStorageConnection acon = dataContainer . openConnection ( ) ; try { checkPersistedSNS ( node , acon ) ; } finally { acon . close ( ) ; } } else { checkPersistedSNS ( node , con ) ; } } }
public class FileAppender { /** * Android 1.6-2.1 used { @ link android . os . Environment # getExternalStorageDirectory ( ) } to * return the ( root ) external storage directory . Folders in this subdir were * shared by all applications and were not removed when the application was * deleted . Starting with andriod 2.2 , Context . getExternalFilesDir ( ) is * available . This is an external directory available to the application * which is removed when the application is removed . * This implementation uses Context . getExternalFilesDir ( ) if available , if * not available uses { @ link android . os . Environment # getExternalStorageDirectory ( ) } . * @ return a File object representing the external storage directory used by * this device or null if the subdir could not be created or proven * to exist */ protected synchronized File getExternalStorageDirectory ( ) { } }
File externalStorageDirectory ; if ( Build . VERSION . SDK_INT >= 8 && mContext != null ) { externalStorageDirectory = mContext . getExternalCacheDir ( ) ; } else { externalStorageDirectory = Environment . getExternalStorageDirectory ( ) ; } if ( externalStorageDirectory != null ) { if ( ! externalStorageDirectory . exists ( ) ) { if ( ! externalStorageDirectory . mkdirs ( ) ) { externalStorageDirectory = null ; Log . e ( TAG , "mkdirs failed on externalStorageDirectory " + externalStorageDirectory ) ; } } } return externalStorageDirectory ;