signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class FSDataset { /** * Register the FSDataset MBean using the name * " hadoop : service = DataNode , name = FSDatasetState - < storageid > " */ void registerMBean ( final String storageId ) { } }
// We wrap to bypass standard mbean naming convetion . // This wraping can be removed in java 6 as it is more flexible in // package naming for mbeans and their impl . StandardMBean bean ; String storageName ; if ( storageId == null || storageId . equals ( "" ) ) { // Temp fix for the uninitialized storage storageName = "UndefinedStorageId" + rand . nextInt ( ) ; } else { storageName = storageId ; } try { bean = new StandardMBean ( this , FSDatasetMBean . class ) ; mbeanName = MBeanUtil . registerMBean ( "DataNode" , "FSDatasetState-" + storageName , bean ) ; versionBeanName = VersionInfo . registerJMX ( "DataNode" ) ; } catch ( NotCompliantMBeanException e ) { e . printStackTrace ( ) ; } DataNode . LOG . info ( "Registered FSDatasetStatusMBean" ) ;
public class AppsImpl { /** * Gets the available application domains . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the List & lt ; String & gt ; object */ public Observable < List < String > > listDomainsAsync ( ) { } }
return listDomainsWithServiceResponseAsync ( ) . map ( new Func1 < ServiceResponse < List < String > > , List < String > > ( ) { @ Override public List < String > call ( ServiceResponse < List < String > > response ) { return response . body ( ) ; } } ) ;
public class GSEAConverter { /** * Converts model to GSEA ( GMT ) and writes to out . * See class declaration for more information . * @ param model Model * @ param out output stream to write the result to * @ throws IOException when there ' s an output stream error */ public void writeToGSEA ( final Model model , OutputStream out ) throws IOException { } }
Collection < GMTEntry > entries = convert ( model ) ; if ( entries . size ( ) > 0 ) { Writer writer = new OutputStreamWriter ( out ) ; for ( GMTEntry entry : entries ) { if ( ( minNumOfGenesPerEntry <= 1 && ! entry . identifiers ( ) . isEmpty ( ) ) || entry . identifiers ( ) . size ( ) >= minNumOfGenesPerEntry ) { writer . write ( entry . toString ( ) + "\n" ) ; } } writer . flush ( ) ; }
public class JBBPDslBuilder { /** * Add named unsigned byte array which size calculated through expression . * @ param name name of the field , it can be null for anonymous one * @ param sizeExpression expression to calculate array size , must ot be null or empty . * @ return the builder instance , must not be null */ public JBBPDslBuilder UByteArray ( final String name , final String sizeExpression ) { } }
final Item item = new Item ( BinType . UBYTE_ARRAY , name , this . byteOrder ) ; item . sizeExpression = assertExpressionChars ( sizeExpression ) ; this . addItem ( item ) ; return this ;
public class JCudaDriver { /** * Sets device memory . * < pre > * CUresult cuMemsetD16Async ( * CUdeviceptr dstDevice , * unsigned short us , * size _ t N , * CUstream hStream ) * < / pre > * < div > * < p > Sets device memory . Sets the memory * range of < tt > N < / tt > 16 - bit values to the specified value < tt > us < / tt > . * The < tt > dstDevice < / tt > pointer must be two byte aligned . * < p > cuMemsetD16Async ( ) is asynchronous and * can optionally be associated to a stream by passing a non - zero < tt > stream < / tt > argument . * < div > * < span > Note : < / span > * < p > Note that this * function may also return error codes from previous , asynchronous * launches . * < / div > * < / div > * @ param dstDevice Destination device pointer * @ param us Value to set * @ param N Number of elements * @ param hStream Stream identifier * @ return CUDA _ SUCCESS , CUDA _ ERROR _ DEINITIALIZED , CUDA _ ERROR _ NOT _ INITIALIZED , * CUDA _ ERROR _ INVALID _ CONTEXT , CUDA _ ERROR _ INVALID _ VALUE * @ see JCudaDriver # cuArray3DCreate * @ see JCudaDriver # cuArray3DGetDescriptor * @ see JCudaDriver # cuArrayCreate * @ see JCudaDriver # cuArrayDestroy * @ see JCudaDriver # cuArrayGetDescriptor * @ see JCudaDriver # cuMemAlloc * @ see JCudaDriver # cuMemAllocHost * @ see JCudaDriver # cuMemAllocPitch * @ see JCudaDriver # cuMemcpy2D * @ see JCudaDriver # cuMemcpy2DAsync * @ see JCudaDriver # cuMemcpy2DUnaligned * @ see JCudaDriver # cuMemcpy3D * @ see JCudaDriver # cuMemcpy3DAsync * @ see JCudaDriver # cuMemcpyAtoA * @ see JCudaDriver # cuMemcpyAtoD * @ see JCudaDriver # cuMemcpyAtoH * @ see JCudaDriver # cuMemcpyAtoHAsync * @ see JCudaDriver # cuMemcpyDtoA * @ see JCudaDriver # cuMemcpyDtoD * @ see JCudaDriver # cuMemcpyDtoDAsync * @ see JCudaDriver # cuMemcpyDtoH * @ see JCudaDriver # cuMemcpyDtoHAsync * @ see JCudaDriver # cuMemcpyHtoA * @ see JCudaDriver # cuMemcpyHtoAAsync * @ see JCudaDriver # cuMemcpyHtoD * @ see JCudaDriver # cuMemcpyHtoDAsync * @ see JCudaDriver # cuMemFree * @ see JCudaDriver # cuMemFreeHost * @ see JCudaDriver # cuMemGetAddressRange * @ see JCudaDriver # cuMemGetInfo * @ see JCudaDriver # cuMemHostAlloc * @ see JCudaDriver # cuMemHostGetDevicePointer * @ see JCudaDriver # cuMemsetD2D8 * @ see JCudaDriver # cuMemsetD2D8Async * @ see JCudaDriver # cuMemsetD2D16 * @ see JCudaDriver # cuMemsetD2D16Async * @ see JCudaDriver # cuMemsetD2D32 * @ see JCudaDriver # cuMemsetD2D32Async * @ see JCudaDriver # cuMemsetD8 * @ see JCudaDriver # cuMemsetD8Async * @ see JCudaDriver # cuMemsetD16 * @ see JCudaDriver # cuMemsetD32 * @ see JCudaDriver # cuMemsetD32Async */ public static int cuMemsetD16Async ( CUdeviceptr dstDevice , short us , long N , CUstream hStream ) { } }
return checkResult ( cuMemsetD16AsyncNative ( dstDevice , us , N , hStream ) ) ;
public class ThrottledApiHandler { /** * Get a specific mastery * This method does not count towards the rate limit and is not affected by the throttle * @ param id The id of the mastery * @ param data Additional information to retrieve * @ return The mastery * @ see < a href = https : / / developer . riotgames . com / api / methods # ! / 649/2175 > Official API documentation < / a > */ public Future < Mastery > getMastery ( int id , MasteryData data ) { } }
return new DummyFuture < > ( handler . getMastery ( id , data ) ) ;
public class Event { /** * Returns the events to be thrown when this event has completed * ( see { @ link # isDone ( ) } ) . * @ return the completed events */ public Set < Event < ? > > completionEvents ( ) { } }
return completionEvents == null ? Collections . emptySet ( ) : Collections . unmodifiableSet ( completionEvents ) ;
public class ProtoParser { /** * Parse a named { @ code . proto } schema . The { @ code InputStream } is not closed . */ public static ProtoFile parseUtf8 ( String name , InputStream is ) throws IOException { } }
return parse ( name , new InputStreamReader ( is , UTF_8 ) ) ;
public class JsApiMessageImpl { /** * Provide the contribution of this part to the estimated encoded length * This contributes the API properties . */ @ Override int guessApproxLength ( ) { } }
int total = super . guessApproxLength ( ) ; int size = 0 ; List props ; // Assume 40 bytes per property ( string name + value ) // Each property map may be cached in a transient , or in the base JMF ( or both ! ) // If there are no properties , the names may be represented by the EMPTY // JMF List , in which case we do NOT want to actually get them . // The property will only be flattened ' once ' so only count it once . if ( jmsUserPropertyMap != null ) { size += jmsUserPropertyMap . size ( ) ; } else { if ( getApi ( ) . isNotEMPTYlist ( JsApiAccess . JMSPROPERTY_NAME ) ) { props = ( List ) getApi ( ) . getField ( JsApiAccess . JMSPROPERTY_NAME ) ; if ( props != null ) { size += props . size ( ) ; } } } if ( otherUserPropertyMap != null ) { size += otherUserPropertyMap . size ( ) ; } else { if ( getApi ( ) . isNotEMPTYlist ( JsApiAccess . OTHERPROPERTY_NAME ) ) { props = ( List ) getApi ( ) . getField ( JsApiAccess . OTHERPROPERTY_NAME ) ; if ( props != null ) { size += props . size ( ) ; } } } if ( jmsSystemPropertyMap != null ) { size += jmsSystemPropertyMap . size ( ) ; } else { if ( getApi ( ) . isNotEMPTYlist ( JsApiAccess . SYSTEMPROPERTY_NAME ) ) { props = ( List ) getApi ( ) . getField ( JsApiAccess . SYSTEMPROPERTY_NAME ) ; if ( props != null ) { size += props . size ( ) ; } } } if ( systemContextMap != null ) { size += systemContextMap . size ( ) ; } else { if ( getApi ( ) . isNotEMPTYlist ( JsApiAccess . SYSTEMCONTEXT_NAME ) ) { props = ( List ) getApi ( ) . getField ( JsApiAccess . SYSTEMCONTEXT_NAME ) ; if ( props != null ) { size += props . size ( ) ; } } } if ( mqMdSetPropertiesMap != null ) { size += mqMdSetPropertiesMap . size ( ) ; } else { // This field usually doesn ' t exist , so no need for the isNotEMPTY check props = ( List ) getHdr2 ( ) . getField ( JsHdr2Access . MQMDPROPERTIES_MAP_NAME ) ; if ( props != null ) { size += props . size ( ) ; } } total += size * 40 ; return total ;
public class ParaClient { /** * Invoke a GET request to the Para API . * @ param resourcePath the subpath after ' / v1 / ' , should not start with ' / ' * @ param params query parameters * @ return a { @ link Response } object */ public Response invokeGet ( String resourcePath , MultivaluedMap < String , String > params ) { } }
logger . debug ( "GET {}, params: {}" , getFullPath ( resourcePath ) , params ) ; return invokeSignedRequest ( getApiClient ( ) , accessKey , key ( ! JWT_PATH . equals ( resourcePath ) ) , GET , getEndpoint ( ) , getFullPath ( resourcePath ) , null , params , new byte [ 0 ] ) ;
public class CommandHelper { /** * Convert the value according the type of DeviceData . * @ param deviceDataArgout * the DeviceData attribute to read * @ return Long , the result in Long format * @ throws DevFailed */ public static Long extractToLong ( final DeviceData deviceDataArgout ) throws DevFailed { } }
final Object value = CommandHelper . extract ( deviceDataArgout ) ; Long argout = null ; if ( value instanceof Short ) { argout = Long . valueOf ( ( ( Short ) value ) . longValue ( ) ) ; } else if ( value instanceof String ) { try { argout = Long . valueOf ( ( String ) value ) ; } catch ( final Exception e ) { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , "output type " + value + " is not a numerical" , "CommandHelper.extractToLong(deviceDataArgin)" ) ; } } else if ( value instanceof Integer ) { argout = Long . valueOf ( ( ( Integer ) value ) . longValue ( ) ) ; } else if ( value instanceof Long ) { argout = Long . valueOf ( ( ( Long ) value ) . longValue ( ) ) ; } else if ( value instanceof Float ) { argout = Long . valueOf ( ( ( Float ) value ) . longValue ( ) ) ; } else if ( value instanceof Boolean ) { if ( ( ( Boolean ) value ) . booleanValue ( ) ) { argout = Long . valueOf ( 1 ) ; } else { argout = Long . valueOf ( 0 ) ; } } else if ( value instanceof Double ) { argout = Long . valueOf ( ( ( Double ) value ) . longValue ( ) ) ; } else if ( value instanceof DevState ) { argout = Long . valueOf ( Integer . valueOf ( ( ( DevState ) value ) . value ( ) ) . longValue ( ) ) ; } else { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , "output type " + value . getClass ( ) + " not supported" , "CommandHelper.extractToLong(Object value,deviceDataArgin)" ) ; } return argout ;
public class LocalPathUtils { /** * { @ inheritDoc } */ @ Override public Map < List < String > , String [ ] > getSimpleColumnsMaster ( String [ ] masterLabels , int [ ] joinColumnNo , String path , String separator ) throws IOException , URISyntaxException { } }
Map < List < String > , String [ ] > m = new HashMap < List < String > , String [ ] > ( ) ; File file = new File ( path ) ; if ( ! file . exists ( ) ) { return null ; } BufferedReader br = new BufferedReader ( new InputStreamReader ( new FileInputStream ( file ) ) ) ; String line ; while ( ( line = br . readLine ( ) ) != null ) { String [ ] strings = StringUtil . split ( line , separator , false ) ; if ( masterLabels . length != strings . length ) { continue ; } List < String > joinData = new ArrayList < String > ( ) ; for ( int i : joinColumnNo ) { joinData . add ( strings [ i ] ) ; } String [ ] data = new String [ strings . length ] ; for ( int i = 0 ; i < strings . length ; i ++ ) { data [ i ] = strings [ i ] ; } m . put ( joinData , data ) ; } br . close ( ) ; return m ;
public class XpathRenderer { /** * { @ inheritDoc } */ @ Override public void render ( DriverRequest httpRequest , String src , Writer out ) throws IOException { } }
try { HtmlDocumentBuilder htmlDocumentBuilder = new HtmlDocumentBuilder ( ) ; htmlDocumentBuilder . setDoctypeExpectation ( DoctypeExpectation . NO_DOCTYPE_ERRORS ) ; Document document = htmlDocumentBuilder . parse ( new InputSource ( new StringReader ( src ) ) ) ; NodeList matchingNodes = ( NodeList ) expr . evaluate ( document , XPathConstants . NODESET ) ; XhtmlSerializer serializer = new XhtmlSerializer ( out ) ; Dom2Sax dom2Sax = new Dom2Sax ( serializer , serializer ) ; for ( int i = 0 ; i < matchingNodes . getLength ( ) ; i ++ ) { dom2Sax . parse ( matchingNodes . item ( i ) ) ; } } catch ( XPathExpressionException e ) { throw new ProcessingFailedException ( "Failed to evaluate XPath expression" , e ) ; } catch ( SAXException e ) { throw new ProcessingFailedException ( "Unable to parse source" , e ) ; }
public class ProcessStarter { /** * Includes the given Environment Variable as part of the start . * @ param name The Environment Variable Name . * @ param value The Environment Variable Value . This will have toString ( ) invoked on it . * @ return This object instance . */ public ProcessStarter env ( String name , Object value ) { } }
this . builder . environment ( ) . put ( name , value . toString ( ) ) ; return this ;
public class RadialPercentageTileSkin { /** * * * * * * Methods * * * * * */ @ Override protected void handleEvents ( final String EVENT_TYPE ) { } }
super . handleEvents ( EVENT_TYPE ) ; if ( "RECALC" . equals ( EVENT_TYPE ) ) { referenceValue = tile . getReferenceValue ( ) < maxValue ? maxValue : tile . getReferenceValue ( ) ; angleStep = ANGLE_RANGE / range ; sum = dataList . stream ( ) . mapToDouble ( ChartData :: getValue ) . sum ( ) ; sections = tile . getSections ( ) ; redraw ( ) ; setBar ( tile . getCurrentValue ( ) ) ; } else if ( "VISIBILITY" . equals ( EVENT_TYPE ) ) { enableNode ( titleText , ! tile . getTitle ( ) . isEmpty ( ) ) ; enableNode ( text , tile . isTextVisible ( ) ) ; enableNode ( unitText , ! tile . getUnit ( ) . isEmpty ( ) ) ; enableNode ( descriptionText , tile . isValueVisible ( ) ) ; }
public class XmlStreamReaderUtils { /** * Returns the value of an attribute as a int . If the attribute is empty , this method throws an * exception . * @ param reader * < code > XMLStreamReader < / code > that contains attribute values . * @ param namespace * namespace * @ param localName * the local name of the attribute . * @ return value of attribute as int * @ throws XMLStreamException * if attribute is empty . */ public static int requiredIntAttribute ( final XMLStreamReader reader , final String namespace , final String localName ) throws XMLStreamException { } }
final String value = reader . getAttributeValue ( namespace , localName ) ; if ( value != null ) { return Integer . parseInt ( value ) ; } throw new XMLStreamException ( MessageFormat . format ( "Attribute {0}:{1} is required" , namespace , localName ) ) ;
public class CommonUtils { /** * - - - ANNOTATION TO JSON CONVERTER - - - */ public static final void convertAnnotations ( Tree config , Annotation [ ] annotations ) throws IllegalAccessException , IllegalArgumentException , InvocationTargetException { } }
for ( Annotation annotation : annotations ) { // Create entry for annotation String annotationName = annotation . toString ( ) ; int i = annotationName . lastIndexOf ( '.' ) ; if ( i > - 1 ) { annotationName = annotationName . substring ( i + 1 ) ; } i = annotationName . indexOf ( '(' ) ; if ( i > - 1 ) { annotationName = annotationName . substring ( 0 , i ) ; } if ( annotationName . length ( ) > 1 ) { annotationName = Character . toLowerCase ( annotationName . charAt ( 0 ) ) + annotationName . substring ( 1 ) ; } else { annotationName = annotationName . toLowerCase ( ) ; } if ( "name" . equals ( annotationName ) || "override" . equals ( annotationName ) ) { continue ; } Tree annotationMap = config . putMap ( annotationName ) ; // Add annotation values Class < ? extends Annotation > type = annotation . annotationType ( ) ; Method [ ] members = type . getDeclaredMethods ( ) ; for ( Method member : members ) { member . setAccessible ( true ) ; String propName = member . getName ( ) ; Object propValue = member . invoke ( annotation ) ; annotationMap . putObject ( propName , propValue ) ; Tree newChild = annotationMap . get ( propName ) ; if ( newChild . size ( ) < 1 ) { newChild . remove ( ) ; } } int size = annotationMap . size ( ) ; if ( size == 0 ) { annotationMap . remove ( ) ; } else if ( size == 1 ) { Tree value = annotationMap . getFirstChild ( ) ; if ( value != null && "value" . equals ( value . getName ( ) ) ) { annotationMap . setObject ( value . asObject ( ) ) ; } } }
public class AWSSecurityHubClient { /** * Lists the results of the Security Hub insight specified by the insight ARN . * @ param getInsightResultsRequest * @ return Result of the GetInsightResults operation returned by the service . * @ throws InternalException * Internal server error . * @ throws InvalidInputException * The request was rejected because an invalid or out - of - range value was supplied for an input parameter . * @ throws InvalidAccessException * AWS Security Hub is not enabled for the account used to make this request . * @ throws LimitExceededException * The request was rejected because it attempted to create resources beyond the current AWS account limits . * The error code describes the limit exceeded . * @ throws ResourceNotFoundException * The request was rejected because the specified resource cannot be found . * @ sample AWSSecurityHub . GetInsightResults * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / securityhub - 2018-10-26 / GetInsightResults " target = " _ top " > AWS * API Documentation < / a > */ @ Override public GetInsightResultsResult getInsightResults ( GetInsightResultsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetInsightResults ( request ) ;
public class DomainsInner { /** * Creates an ownership identifier for a domain or updates identifier details for an existing identifer . * Creates an ownership identifier for a domain or updates identifier details for an existing identifer . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param domainName Name of domain . * @ param name Name of identifier . * @ param domainOwnershipIdentifier A JSON representation of the domain ownership properties . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws DefaultErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the DomainOwnershipIdentifierInner object if successful . */ public DomainOwnershipIdentifierInner createOrUpdateOwnershipIdentifier ( String resourceGroupName , String domainName , String name , DomainOwnershipIdentifierInner domainOwnershipIdentifier ) { } }
return createOrUpdateOwnershipIdentifierWithServiceResponseAsync ( resourceGroupName , domainName , name , domainOwnershipIdentifier ) . toBlocking ( ) . single ( ) . body ( ) ;
public class ArrayUtils { /** * Returns whether or not the given array contains all the given elements to check for . * < pre > * ArrayUtils . containsAll ( new String [ ] { } , Collections . emptyList ( ) ) = true ; * ArrayUtils . containsAll ( new String [ ] { " a " } , Lists . newArrayList ( " a " ) ) = true ; * ArrayUtils . containsAll ( new String [ ] { " a " } , Lists . newArrayList ( " b " ) ) = false ; * ArrayUtils . containsAll ( new String [ ] { " a " , " b " } , Lists . newArrayList ( " a " , " b " , " a " , " b " ) ) = true ; * < / pre > * @ param arrayToCheck array to to check * @ param elementsToCheckFor elements to check for * @ param < T > the type of elements in the given array * @ return whether or not the given array contains all the given elements to check for . */ public static < T > boolean containsAll ( T [ ] arrayToCheck , Iterable < T > elementsToCheckFor ) { } }
return IterableUtils . containsAll ( Arrays . asList ( arrayToCheck ) , elementsToCheckFor ) ;
public class ObjectPoolPartition { /** * set the scavenge interval carefully */ public synchronized void scavenge ( ) throws InterruptedException { } }
int delta = this . totalCount - config . getMinSize ( ) ; if ( delta <= 0 ) return ; int removed = 0 ; long now = System . currentTimeMillis ( ) ; Poolable < T > obj ; while ( delta -- > 0 && ( obj = objectQueue . poll ( ) ) != null ) { // performance trade off : delta always decrease even if the queue is empty , // so it could take several intervals to shrink the pool to the configured min value . if ( Log . isDebug ( ) ) Log . debug ( "obj=" , obj , ", now-last=" , now - obj . getLastAccessTs ( ) , ", max idle=" , config . getMaxIdleMilliseconds ( ) ) ; if ( now - obj . getLastAccessTs ( ) > config . getMaxIdleMilliseconds ( ) && ThreadLocalRandom . current ( ) . nextDouble ( 1 ) < config . getScavengeRatio ( ) ) { decreaseObject ( obj ) ; // shrink the pool size if the object reaches max idle time removed ++ ; } else { objectQueue . put ( obj ) ; // put it back } } if ( removed > 0 ) Log . debug ( removed , " objects were scavenged." ) ;
public class WaveformDetailComponent { /** * Determine the color to use to draw a cue list entry . Hot cues are green , ordinary memory points are red , * and loops are orange . * @ param entry the entry being drawn * @ return the color with which it should be represented . */ public static Color cueColor ( CueList . Entry entry ) { } }
if ( entry . hotCueNumber > 0 ) { return Color . GREEN ; } if ( entry . isLoop ) { return Color . ORANGE ; } return Color . RED ;
public class StreamletImpl { /** * Returns a new Streamlet that is the union of this and the ‘ other ’ streamlet . Essentially * the new streamlet will contain tuples belonging to both Streamlets */ @ Override public Streamlet < R > union ( Streamlet < ? extends R > otherStreamlet ) { } }
checkNotNull ( otherStreamlet , "otherStreamlet cannot be null" ) ; StreamletImpl < ? extends R > joinee = ( StreamletImpl < ? extends R > ) otherStreamlet ; UnionStreamlet < R > retval = new UnionStreamlet < > ( this , joinee ) ; addChild ( retval ) ; joinee . addChild ( retval ) ; return retval ;
public class CPDefinitionOptionRelPersistenceImpl { /** * Returns the last cp definition option rel in the ordered set where companyId = & # 63 ; . * @ param companyId the company ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching cp definition option rel * @ throws NoSuchCPDefinitionOptionRelException if a matching cp definition option rel could not be found */ @ Override public CPDefinitionOptionRel findByCompanyId_Last ( long companyId , OrderByComparator < CPDefinitionOptionRel > orderByComparator ) throws NoSuchCPDefinitionOptionRelException { } }
CPDefinitionOptionRel cpDefinitionOptionRel = fetchByCompanyId_Last ( companyId , orderByComparator ) ; if ( cpDefinitionOptionRel != null ) { return cpDefinitionOptionRel ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "companyId=" ) ; msg . append ( companyId ) ; msg . append ( "}" ) ; throw new NoSuchCPDefinitionOptionRelException ( msg . toString ( ) ) ;
public class TypeImporter { /** * Determines whether the given non - wildcard import should be added . * By default , this returns false if the simple name is a built - in Java language type name . */ private boolean shouldImport ( TypeName typeName ) { } }
// don ' t import classes from the java . lang package String pkg = typeName . getPackageName ( ) ; String simpleName = typeName . getSimpleName ( ) ; boolean exclude = ( pkg . equals ( "java.lang" ) || pkg . equals ( "" ) ) && getJavaDefaultTypes ( ) . contains ( simpleName ) ; return ! exclude ;
public class GrammaticalStructure { /** * Read in a file containing a CoNLL - X dependency treebank and return a * corresponding list of GrammaticalStructures . * @ throws IOException */ public static List < GrammaticalStructure > readCoNLLXGrammaticStructureCollection ( String fileName , Map < String , GrammaticalRelation > shortNameToGRel , GrammaticalStructureFromDependenciesFactory factory ) throws IOException { } }
LineNumberReader reader = new LineNumberReader ( new FileReader ( fileName ) ) ; List < GrammaticalStructure > gsList = new LinkedList < GrammaticalStructure > ( ) ; List < List < String > > tokenFields = new ArrayList < List < String > > ( ) ; for ( String inline = reader . readLine ( ) ; inline != null ; inline = reader . readLine ( ) ) { if ( ! "" . equals ( inline ) ) { // read in a single sentence token by token List < String > fields = Arrays . asList ( inline . split ( "\t" ) ) ; if ( fields . size ( ) != CoNLLX_FieldCount ) { throw new RuntimeException ( String . format ( "Error (line %d): 10 fields expected but %d are present" , reader . getLineNumber ( ) , fields . size ( ) ) ) ; } tokenFields . add ( fields ) ; } else { if ( tokenFields . isEmpty ( ) ) continue ; // skip excess empty lines gsList . add ( buildCoNNLXGrammaticStructure ( tokenFields , shortNameToGRel , factory ) ) ; tokenFields = new ArrayList < List < String > > ( ) ; } } return gsList ;
public class CheckArg { /** * Check that the argument is negative ( < 0 ) . * @ param argument The argument * @ param name The name of the argument * @ throws IllegalArgumentException If argument is non - negative ( > = 0) */ public static void isNegative ( int argument , String name ) { } }
if ( argument >= 0 ) { throw new IllegalArgumentException ( CommonI18n . argumentMustBeNegative . text ( name , argument ) ) ; }
public class TaxinvoiceServiceImp { /** * ( non - Javadoc ) * @ see com . popbill . api . TaxinvoiceService # attachStatement ( java . lang . String , com . popbill . api . taxinvoice . MgtKeyType , java . lang . String , int , java . lang . String ) */ @ Override public Response attachStatement ( String CorpNum , MgtKeyType KeyType , String MgtKey , int SubItemCode , String SubMgtKey ) throws PopbillException { } }
DocRequest request = new DocRequest ( ) ; request . ItemCode = Integer . toString ( SubItemCode ) ; request . MgtKey = SubMgtKey ; String PostData = toJsonString ( request ) ; return httppost ( "/Taxinvoice/" + KeyType . name ( ) + "/" + MgtKey + "/AttachStmt" , CorpNum , PostData , null , "" , Response . class ) ;
public class ConcurrentLinkedHashMap { /** * Drains the buffers up to the amortized threshold and applies the pending * operations . */ void drainBuffers ( ) { } }
// A mostly strict ordering is achieved by observing that each buffer // contains tasks in a weakly sorted order starting from the last drain . // The buffers can be merged into a sorted array in O ( n ) time by using // counting sort and chaining on a collision . // Moves the tasks into the output array , applies them , and updates the // marker for the starting order of the next drain . int maxTaskIndex = moveTasksFromBuffers ( tasks ) ; updateDrainedOrder ( tasks , maxTaskIndex ) ; runTasks ( tasks , maxTaskIndex ) ;
public class MetamodelUtil { /** * Retrieves cascade from metamodel attribute on a xToMany relation . * @ param attribute given singular attribute * @ return an empty collection if no jpa relation annotation can be found . */ public Collection < CascadeType > getCascades ( SingularAttribute < ? , ? > attribute ) { } }
if ( attribute . getJavaMember ( ) instanceof AccessibleObject ) { AccessibleObject accessibleObject = ( AccessibleObject ) attribute . getJavaMember ( ) ; OneToOne oneToOne = accessibleObject . getAnnotation ( OneToOne . class ) ; if ( oneToOne != null ) { return newArrayList ( oneToOne . cascade ( ) ) ; } ManyToOne manyToOne = accessibleObject . getAnnotation ( ManyToOne . class ) ; if ( manyToOne != null ) { return newArrayList ( manyToOne . cascade ( ) ) ; } } return newArrayList ( ) ;
public class AbstractWebAppMojo { /** * region Telemetry Configuration Interface */ @ Override public Map < String , String > getTelemetryProperties ( ) { } }
final Map < String , String > map = super . getTelemetryProperties ( ) ; final WebAppConfiguration webAppConfiguration ; try { webAppConfiguration = getWebAppConfiguration ( ) ; } catch ( Exception e ) { map . put ( INVALID_CONFIG_KEY , e . getMessage ( ) ) ; return map ; } if ( webAppConfiguration . getImage ( ) != null ) { final String imageType = WebAppUtils . getDockerImageType ( webAppConfiguration . getImage ( ) , webAppConfiguration . getServerId ( ) , webAppConfiguration . getRegistryUrl ( ) ) . toString ( ) ; map . put ( DOCKER_IMAGE_TYPE_KEY , imageType ) ; } else { map . put ( DOCKER_IMAGE_TYPE_KEY , DockerImageType . NONE . toString ( ) ) ; } map . put ( SCHEMA_VERSION_KEY , schemaVersion ) ; map . put ( OS_KEY , webAppConfiguration . getOs ( ) == null ? "" : webAppConfiguration . getOs ( ) . toString ( ) ) ; map . put ( JAVA_VERSION_KEY , webAppConfiguration . getJavaVersion ( ) == null ? "" : webAppConfiguration . getJavaVersion ( ) . toString ( ) ) ; map . put ( JAVA_WEB_CONTAINER_KEY , webAppConfiguration . getWebContainer ( ) == null ? "" : webAppConfiguration . getJavaVersion ( ) . toString ( ) ) ; map . put ( LINUX_RUNTIME_KEY , webAppConfiguration . getRuntimeStack ( ) == null ? "" : webAppConfiguration . getRuntimeStack ( ) . stack ( ) + " " + webAppConfiguration . getRuntimeStack ( ) . version ( ) ) ; try { map . put ( DEPLOYMENT_TYPE_KEY , getDeploymentType ( ) . toString ( ) ) ; } catch ( MojoExecutionException e ) { map . put ( DEPLOYMENT_TYPE_KEY , "Unknown deployment type." ) ; } return map ;
public class Connection { /** * { @ inheritDoc } */ public String getClientInfo ( String name ) throws SQLException { } }
if ( this . closed ) { throw new SQLClientInfoException ( ) ; } // end of if return this . clientInfo . getProperty ( name ) ;
public class Cover { /** * non - parallelized utility method for use by other procedures */ public static Stream < Relationship > coverNodes ( Collection < Node > nodes ) { } }
return nodes . stream ( ) . flatMap ( n -> StreamSupport . stream ( n . getRelationships ( Direction . OUTGOING ) . spliterator ( ) , false ) . filter ( r -> nodes . contains ( r . getEndNode ( ) ) ) ) ;
public class AbstractGreenPepperMacro { /** * { @ inheritDoc } */ public String execute ( Map < String , String > parameters , String body , ConversionContext context ) throws MacroExecutionException { } }
try { return execute ( parameters , body , context . getPageContext ( ) ) ; } catch ( MacroException e ) { throw new MacroExecutionException ( e ) ; }
public class ApiOvhHostingreseller { /** * Set new reverse to ip * REST : POST / hosting / reseller / { serviceName } / reverse * @ param serviceName [ required ] The internal name of your reseller service * @ param reverse [ required ] Domain to set the ip reverse */ public String serviceName_reverse_POST ( String serviceName , String reverse ) throws IOException { } }
String qPath = "/hosting/reseller/{serviceName}/reverse" ; StringBuilder sb = path ( qPath , serviceName ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "reverse" , reverse ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , String . class ) ;
public class Tap13Parser { /** * Called after the rest of the stream has been processed . */ private void onFinish ( ) { } }
if ( planRequired && state . getTestSet ( ) . getPlan ( ) == null ) { throw new ParserException ( "Missing TAP Plan." ) ; } parseDiagnostics ( ) ; while ( ! states . isEmpty ( ) && state . getIndentationLevel ( ) > baseIndentation ) { state = states . pop ( ) ; }
public class PersistentCookieStore { /** * Serializes Cookie object into String * @ param cookie cookie to be encoded , can be null * @ return cookie encoded as String */ protected String encodeCookie ( SerializableHttpCookie cookie ) { } }
if ( cookie == null ) return null ; ByteArrayOutputStream os = new ByteArrayOutputStream ( ) ; try { ObjectOutputStream outputStream = new ObjectOutputStream ( os ) ; outputStream . writeObject ( cookie ) ; } catch ( IOException e ) { Util . log ( "IOException in encodeCookie" , e ) ; return null ; } return byteArrayToHexString ( os . toByteArray ( ) ) ;
public class XMLGISElementUtil { /** * Write the XML description for the given container of map elements . * @ param xmlNode is the XML node to fill with the container data . * @ param primitive is the container of map elements to output . * @ param elementNodeName is the name of the XML node that should contains the map element data . * It may be { @ code null } to use the default name . * @ param builder is the tool to create XML nodes . * @ param pathBuilder is the tool to make paths relative . * @ param resources is the tool that permits to gather the resources . * @ throws IOException in case of error . */ public static void writeGISElementContainer ( Element xmlNode , GISElementContainer < ? > primitive , String elementNodeName , XMLBuilder builder , PathBuilder pathBuilder , XMLResources resources ) throws IOException { } }
URL url ; boolean saveElements = true ; url = primitive . getElementGeometrySourceURL ( ) ; if ( url != null ) { xmlNode . setAttribute ( MapElementLayer . ATTR_ELEMENT_GEOMETRY_URL , resources . add ( url , MimeName . MIME_SHAPE_FILE . getMimeConstant ( ) ) ) ; saveElements = false ; } MapMetricProjection mapProjection = primitive . getElementGeometrySourceProjection ( ) ; if ( mapProjection == null ) { mapProjection = MapMetricProjection . getDefault ( ) ; } xmlNode . setAttribute ( MapElementLayer . ATTR_ELEMENT_GEOMETRY_PROJECTION , mapProjection . name ( ) ) ; url = primitive . getElementAttributeSourceURL ( ) ; if ( ! saveElements && url != null ) { xmlNode . setAttribute ( MapElementLayer . ATTR_ELEMENT_ATTRIBUTES_URL , resources . add ( url , MimeName . MIME_DBASE_FILE . getMimeConstant ( ) ) ) ; } if ( saveElements ) { final Element elementList = builder . createElement ( NODE_ELEMENTS ) ; for ( final MapElement element : primitive ) { final Element e = XMLGISElementUtil . writeMapElement ( element , elementNodeName , builder , resources ) ; if ( e != null ) { elementList . appendChild ( e ) ; } } if ( elementList . getChildNodes ( ) . getLength ( ) > 0 ) { xmlNode . appendChild ( elementList ) ; } }
public class ApiServicesRetryStrategy { /** * Decides the actual wait time in milliseconds , by applying a random multiplier to * retryAfterSeconds . */ private long getWaitUntilMillis ( int retryAfterSeconds ) { } }
double multiplier = waitTimeNoiseFactor . get ( ) . nextDouble ( ) ; multiplier = multiplier * ( MAX_WAIT_TIME_MULTIPLIER - MIN_WAIT_TIME_MULTIPLIER ) + MIN_WAIT_TIME_MULTIPLIER ; double result = SECONDS . toMillis ( retryAfterSeconds ) * multiplier ; return ( long ) result ;
public class MembershipTypeHandlerImpl { /** * Removes related membership entity . */ private void removeMemberships ( Node membershipTypeNode ) throws Exception { } }
PropertyIterator refTypes = membershipTypeNode . getReferences ( ) ; while ( refTypes . hasNext ( ) ) { Property refTypeProp = refTypes . nextProperty ( ) ; Node refTypeNode = refTypeProp . getParent ( ) ; Node refUserNode = refTypeNode . getParent ( ) ; membershipHandler . removeMembership ( refUserNode , refTypeNode ) ; }
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcPermit ( ) { } }
if ( ifcPermitEClass == null ) { ifcPermitEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 348 ) ; } return ifcPermitEClass ;
public class CaptureActivity { /** * Briefly show the contents of the barcode , then handle the result outside Barcode Scanner . */ private void handleDecodeExternally ( Result rawResult , ResultHandler resultHandler , Bitmap barcode ) { } }
if ( barcode != null ) { viewfinderView . drawResultBitmap ( barcode ) ; } long resultDurationMS ; if ( getIntent ( ) == null ) { resultDurationMS = DEFAULT_INTENT_RESULT_DURATION_MS ; } else { resultDurationMS = getIntent ( ) . getLongExtra ( Intents . Scan . RESULT_DISPLAY_DURATION_MS , DEFAULT_INTENT_RESULT_DURATION_MS ) ; } if ( resultDurationMS > 0 ) { String rawResultString = String . valueOf ( rawResult ) ; if ( rawResultString . length ( ) > 32 ) { rawResultString = rawResultString . substring ( 0 , 32 ) + " ..." ; } statusView . setText ( getString ( resultHandler . getDisplayTitle ( ) ) + " : " + rawResultString ) ; } maybeSetClipboard ( resultHandler ) ; switch ( source ) { case NATIVE_APP_INTENT : // Hand back whatever action they requested - this can be changed to Intents . Scan . ACTION when // the deprecated intent is retired . Intent intent = new Intent ( getIntent ( ) . getAction ( ) ) ; intent . addFlags ( Intents . FLAG_NEW_DOC ) ; intent . putExtra ( Intents . Scan . RESULT , rawResult . toString ( ) ) ; intent . putExtra ( Intents . Scan . RESULT_FORMAT , rawResult . getBarcodeFormat ( ) . toString ( ) ) ; byte [ ] rawBytes = rawResult . getRawBytes ( ) ; if ( rawBytes != null && rawBytes . length > 0 ) { intent . putExtra ( Intents . Scan . RESULT_BYTES , rawBytes ) ; } Map < ResultMetadataType , ? > metadata = rawResult . getResultMetadata ( ) ; if ( metadata != null ) { if ( metadata . containsKey ( ResultMetadataType . UPC_EAN_EXTENSION ) ) { intent . putExtra ( Intents . Scan . RESULT_UPC_EAN_EXTENSION , metadata . get ( ResultMetadataType . UPC_EAN_EXTENSION ) . toString ( ) ) ; } Number orientation = ( Number ) metadata . get ( ResultMetadataType . ORIENTATION ) ; if ( orientation != null ) { intent . putExtra ( Intents . Scan . RESULT_ORIENTATION , orientation . intValue ( ) ) ; } String ecLevel = ( String ) metadata . get ( ResultMetadataType . ERROR_CORRECTION_LEVEL ) ; if ( ecLevel != null ) { intent . putExtra ( Intents . Scan . RESULT_ERROR_CORRECTION_LEVEL , ecLevel ) ; } @ SuppressWarnings ( "unchecked" ) Iterable < byte [ ] > byteSegments = ( Iterable < byte [ ] > ) metadata . get ( ResultMetadataType . BYTE_SEGMENTS ) ; if ( byteSegments != null ) { int i = 0 ; for ( byte [ ] byteSegment : byteSegments ) { intent . putExtra ( Intents . Scan . RESULT_BYTE_SEGMENTS_PREFIX + i , byteSegment ) ; i ++ ; } } } sendReplyMessage ( R . id . return_scan_result , intent , resultDurationMS ) ; break ; case PRODUCT_SEARCH_LINK : // Reformulate the URL which triggered us into a query , so that the request goes to the same // TLD as the scan URL . int end = sourceUrl . lastIndexOf ( "/scan" ) ; String productReplyURL = sourceUrl . substring ( 0 , end ) + "?q=" + resultHandler . getDisplayContents ( ) + "&source=zxing" ; sendReplyMessage ( R . id . launch_product_query , productReplyURL , resultDurationMS ) ; break ; case ZXING_LINK : if ( scanFromWebPageManager != null && scanFromWebPageManager . isScanFromWebPage ( ) ) { String linkReplyURL = scanFromWebPageManager . buildReplyURL ( rawResult , resultHandler ) ; scanFromWebPageManager = null ; sendReplyMessage ( R . id . launch_product_query , linkReplyURL , resultDurationMS ) ; } break ; }
public class FactoryReplicatorXmlHttp { /** * < p > Create a bean . < / p > * @ param pAddParam additional param * @ return M request ( or ) scoped bean * @ throws Exception - an exception */ @ Override public final ReplicatorXmlHttp < RS > create ( final Map < String , Object > pAddParam ) throws Exception { } }
ReplicatorXmlHttp < RS > srvGetDbCopyXml = new ReplicatorXmlHttp < RS > ( ) ; SrvEntityReaderXml srvEntityReaderXml = new SrvEntityReaderXml ( ) ; srvEntityReaderXml . setUtilXml ( this . factoryAppBeans . lazyGetUtilXml ( ) ) ; SrvEntityFieldFillerStd srvEntityFieldFillerStd = new SrvEntityFieldFillerStd ( ) ; srvEntityFieldFillerStd . setUtilXml ( this . factoryAppBeans . lazyGetUtilXml ( ) ) ; srvEntityFieldFillerStd . setUtlReflection ( this . factoryAppBeans . lazyGetUtlReflection ( ) ) ; SrvEntityFieldPersistableBaseRepl srvEntityFieldPersistableBaseRepl = new SrvEntityFieldPersistableBaseRepl ( ) ; srvEntityFieldPersistableBaseRepl . setUtlReflection ( this . factoryAppBeans . lazyGetUtlReflection ( ) ) ; SrvEntityFieldHasIdStringRepl srvEntityFieldHasIdStringRepl = new SrvEntityFieldHasIdStringRepl ( ) ; srvEntityFieldHasIdStringRepl . setUtlReflection ( this . factoryAppBeans . lazyGetUtlReflection ( ) ) ; SrvEntityFieldHasIdLongRepl srvEntityFieldHasIdLongRepl = new SrvEntityFieldHasIdLongRepl ( ) ; srvEntityFieldHasIdLongRepl . setUtlReflection ( this . factoryAppBeans . lazyGetUtlReflection ( ) ) ; SrvEntityFieldUserTomcatRepl srvEntityFieldUserTomcatRepl = new SrvEntityFieldUserTomcatRepl ( ) ; Map < String , ISrvEntityFieldFiller > fieldsFillersMap = new HashMap < String , ISrvEntityFieldFiller > ( ) ; fieldsFillersMap . put ( "SrvEntityFieldFillerStd" , srvEntityFieldFillerStd ) ; fieldsFillersMap . put ( "SrvEntityFieldPersistableBaseRepl" , srvEntityFieldPersistableBaseRepl ) ; fieldsFillersMap . put ( "SrvEntityFieldHasIdStringRepl" , srvEntityFieldHasIdStringRepl ) ; fieldsFillersMap . put ( "SrvEntityFieldHasIdLongRepl" , srvEntityFieldHasIdLongRepl ) ; fieldsFillersMap . put ( "SrvEntityFieldUserTomcatRepl" , srvEntityFieldUserTomcatRepl ) ; srvEntityReaderXml . setFieldsFillersMap ( fieldsFillersMap ) ; srvEntityReaderXml . setMngSettings ( this . factoryAppBeans . lazyGetMngSettingsGetDbCopy ( ) ) ; DatabaseReaderIdenticalXml < RS > databaseReaderIdenticalXml = new DatabaseReaderIdenticalXml < RS > ( ) ; databaseReaderIdenticalXml . setUtilXml ( this . factoryAppBeans . lazyGetUtilXml ( ) ) ; databaseReaderIdenticalXml . setSrvEntityReader ( srvEntityReaderXml ) ; databaseReaderIdenticalXml . setSrvDatabase ( this . factoryAppBeans . lazyGetSrvDatabase ( ) ) ; databaseReaderIdenticalXml . setSrvOrm ( this . factoryAppBeans . lazyGetSrvOrm ( ) ) ; databaseReaderIdenticalXml . setLogger ( this . factoryAppBeans . lazyGetLogger ( ) ) ; SrvClearDatabase < RS > srvClearDatabase = new SrvClearDatabase < RS > ( ) ; srvClearDatabase . setMngSettings ( this . factoryAppBeans . lazyGetMngSettingsGetDbCopy ( ) ) ; srvClearDatabase . setLogger ( this . factoryAppBeans . lazyGetLogger ( ) ) ; srvClearDatabase . setSrvDatabase ( this . factoryAppBeans . lazyGetSrvDatabase ( ) ) ; srvGetDbCopyXml . setDatabasePrepearerBefore ( srvClearDatabase ) ; srvGetDbCopyXml . setDatabasePrepearerAfter ( this . factoryAppBeans . lazyGetPrepareDbAfterFullImport ( ) ) ; srvGetDbCopyXml . setUtilXml ( this . factoryAppBeans . lazyGetUtilXml ( ) ) ; srvGetDbCopyXml . setSrvEntityReaderXml ( srvEntityReaderXml ) ; srvGetDbCopyXml . setMngSettings ( this . factoryAppBeans . lazyGetMngSettingsGetDbCopy ( ) ) ; srvGetDbCopyXml . setSrvDatabase ( this . factoryAppBeans . lazyGetSrvDatabase ( ) ) ; srvGetDbCopyXml . setDatabaseReader ( databaseReaderIdenticalXml ) ; srvGetDbCopyXml . setLogger ( this . factoryAppBeans . lazyGetLogger ( ) ) ; return srvGetDbCopyXml ;
public class Script { /** * Gets the count of regular SigOps in the script program ( counting multisig ops as 20) */ public static int getSigOpCount ( byte [ ] program ) throws ScriptException { } }
Script script = new Script ( ) ; try { script . parse ( program ) ; } catch ( ScriptException e ) { // Ignore errors and count up to the parse - able length } return getSigOpCount ( script . chunks , false ) ;
public class FileLogHeader { /** * Initialise the FileLogHeader from a RandomAccessFile . * @ param java . io . RandomAceessFile * the stream that connects to the log . * @ author IBM Corporation * @ throws LogFileExhaustedException * if there are no more logrecords left to read . * @ throws PermanentIOException * if the is an underlting java . io . IOException . */ private void readHeader ( java . io . RandomAccessFile logFile ) throws ObjectManagerException { } }
if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "readHeader" , new Object [ ] { logFile } ) ; boolean validHeader = false ; // True if the header is valid try { logFile . seek ( 0 ) ; // Start reading at the head of the file . byte [ ] headerBytes = new byte [ headerLength ] ; for ( int i = 1 ; i <= 2 && ! validHeader ; i ++ ) { logFile . readFully ( headerBytes ) ; validHeader = FileLogInput . restoreSectorBits ( headerBytes , headerBytes [ 5 ] ) ; if ( validHeader ) { java . io . ByteArrayInputStream byteArrayInputStream = new java . io . ByteArrayInputStream ( headerBytes ) ; java . io . DataInputStream inputStream = new java . io . DataInputStream ( byteArrayInputStream ) ; inputStream . skipBytes ( 1 ) ; instanceVersion = inputStream . readInt ( ) ; sectorByte = inputStream . readByte ( ) ; // Read a known number of signature charaters . char [ ] signatureRead = new char [ signature . length ( ) ] ; for ( int ichar = 0 ; ichar < signature . length ( ) ; ichar ++ ) signatureRead [ ichar ] = inputStream . readChar ( ) ; startByteAddress = inputStream . readLong ( ) ; fileSize = inputStream . readLong ( ) ; headerSequence = inputStream . readLong ( ) ; headerWriteTime = inputStream . readLong ( ) ; // Check the signature . if ( ! ( new String ( signatureRead ) . equals ( signature ) ) ) { if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "readheader" , new Object [ ] { signatureRead } ) ; throw new InvalidLogFileSignatureException ( this , new String ( signatureRead ) , signature ) ; } // if ( ! signatureRead ) . equals ( signature ) ) ) . if ( Tracing . isAnyTracingEnabled ( ) && trace . isDebugEnabled ( ) ) trace . debug ( this , cclass , "readheader" , new Object [ ] { new Integer ( instanceVersion ) , new Byte ( sectorByte ) , new Long ( startByteAddress ) , new Long ( fileSize ) , new Long ( headerSequence ) , new java . util . Date ( headerWriteTime ) } ) ; } // if ( validHeader ) . } // for 2 log headers . } catch ( java . io . EOFException exception ) { // No FFDC Code Needed . ObjectManager . ffdc . processException ( this , cclass , "readHeader" , exception , "1:188:1.8" ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "readHeader" , exception ) ; throw new LogFileExhaustedException ( this , exception ) ; } catch ( java . io . IOException exception ) { // No FFDC Code Needed . ObjectManager . ffdc . processException ( this , cclass , "readHeader" , exception , "1:199:1.8" ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "readHeader" , exception ) ; throw new PermanentIOException ( this , exception ) ; } // catch java . io . IOException . if ( ! validHeader ) { LogFileHeaderCorruptException logFileHeaderCorruptException = new LogFileHeaderCorruptException ( this ) ; ObjectManager . ffdc . processException ( this , cclass , "readHeader" , logFileHeaderCorruptException , "1:212:1.8" ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "readHeader" ) ; throw logFileHeaderCorruptException ; } // if ( validHeader ) . if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "readHeader" ) ;
public class Code { /** * Returns the value in { @ code result } to the calling method . After a return * it is an error to define further instructions after a return without * first { @ link # mark marking } an existing unmarked label . */ public void returnValue ( Local < ? > result ) { } }
if ( ! result . type . equals ( method . returnType ) ) { // TODO : this is probably too strict . throw new IllegalArgumentException ( "declared " + method . returnType + " but returned " + result . type ) ; } addInstruction ( new PlainInsn ( Rops . opReturn ( result . type . ropType ) , sourcePosition , null , RegisterSpecList . make ( result . spec ( ) ) ) ) ;
public class JDBCRepository { /** * Attempts to close a DataSource by searching for a " close " method . For * some reason , there ' s no standard way to close a DataSource . * @ return false if DataSource doesn ' t have a close method . */ public static boolean closeDataSource ( DataSource ds ) throws SQLException { } }
try { Method closeMethod = ds . getClass ( ) . getMethod ( "close" ) ; try { closeMethod . invoke ( ds ) ; } catch ( Throwable e ) { ThrowUnchecked . fireFirstDeclaredCause ( e , SQLException . class ) ; } return true ; } catch ( NoSuchMethodException e ) { return false ; }
public class SingletonRuntimeManager { /** * Retrieves session id from serialized file named jbpmSessionId . ser from given location . * @ param location directory where jbpmSessionId . ser file should be * @ param identifier of the manager owning this ksessionId * @ return sessionId if file was found otherwise 0 */ protected Long getPersistedSessionId ( String location , String identifier ) { } }
File sessionIdStore = new File ( location + File . separator + identifier + "-jbpmSessionId.ser" ) ; if ( sessionIdStore . exists ( ) ) { Long knownSessionId = null ; FileInputStream fis = null ; ObjectInputStream in = null ; try { fis = new FileInputStream ( sessionIdStore ) ; in = new ObjectInputStream ( fis ) ; Object tmp = in . readObject ( ) ; if ( tmp instanceof Integer ) { tmp = new Long ( ( Integer ) tmp ) ; } knownSessionId = ( Long ) tmp ; return knownSessionId . longValue ( ) ; } catch ( Exception e ) { return 0L ; } finally { if ( in != null ) { try { in . close ( ) ; } catch ( IOException e ) { } } if ( fis != null ) { try { fis . close ( ) ; } catch ( IOException e ) { } } } } else { return 0L ; }
public class ScmRepositoriesParser { /** * / * - - - Static methods - - - */ public Collection < ScmConfiguration > parseRepositoriesFile ( String fileName , String scmType , String scmPpk , String scmUser , String scmPassword ) { } }
try ( InputStream is = new FileInputStream ( fileName ) ) { String jsonText = IOUtils . toString ( is ) ; JSONObject json = new JSONObject ( jsonText ) ; JSONArray arr = json . getJSONArray ( SCM_REPOSITORIES ) ; List < ScmConfiguration > configurationList = new LinkedList < > ( ) ; arr . forEach ( scm -> { JSONObject obj = ( JSONObject ) scm ; String url = obj . getString ( URL ) ; String branch = obj . getString ( BRANCH ) ; String tag = obj . getString ( Constants . TAG ) ; configurationList . add ( new ScmConfiguration ( scmType , scmUser , scmPassword , scmPpk , url , branch , tag , null , false , 1 ) ) ; } ) ; return configurationList ; } catch ( FileNotFoundException e ) { logger . error ( "file Not Found: {}" , fileName ) ; } catch ( IOException e ) { logger . error ( "error getting file : {}" , e . getMessage ( ) ) ; } return null ;
public class SyncAgentsInner { /** * Creates or updates a sync agent . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server on which the sync agent is hosted . * @ param syncAgentName The name of the sync agent . * @ param syncDatabaseId ARM resource id of the sync database in the sync agent . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the SyncAgentInner object if successful . */ public SyncAgentInner createOrUpdate ( String resourceGroupName , String serverName , String syncAgentName , String syncDatabaseId ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , serverName , syncAgentName , syncDatabaseId ) . toBlocking ( ) . last ( ) . body ( ) ;
public class StaticClassTransformer { /** * Trigger the process to add entry hooks to a class ( and all its parents ) . * @ param features specification what to mock */ < T > void mockClass ( MockFeatures < T > features ) { } }
boolean subclassingRequired = ! features . interfaces . isEmpty ( ) || Modifier . isAbstract ( features . mockedType . getModifiers ( ) ) ; if ( subclassingRequired && ! features . mockedType . isArray ( ) && ! features . mockedType . isPrimitive ( ) && Modifier . isFinal ( features . mockedType . getModifiers ( ) ) ) { throw new MockitoException ( "Unsupported settings with this type '" + features . mockedType . getName ( ) + "'" ) ; } synchronized ( lock ) { Set < Class < ? > > types = new HashSet < > ( ) ; Class < ? > type = features . mockedType ; do { boolean wasAdded = mockedTypes . add ( type ) ; if ( wasAdded ) { if ( ! EXCLUDES . contains ( type ) ) { types . add ( type ) ; } type = type . getSuperclass ( ) ; } else { break ; } } while ( type != null && ! type . isInterface ( ) ) ; if ( ! types . isEmpty ( ) ) { try { agent . requestTransformClasses ( types . toArray ( new Class < ? > [ types . size ( ) ] ) ) ; } catch ( UnmodifiableClassException exception ) { for ( Class < ? > failed : types ) { mockedTypes . remove ( failed ) ; } throw new MockitoException ( "Could not modify all classes " + types , exception ) ; } } }
public class SimpleXmlWriter { /** * Writes ' / > \ n ' . */ public void openCloseElement ( String elementName ) throws IOException { } }
assert ( elementNames . size ( ) > 0 ) ; assert ( elementNames . get ( elementNames . size ( ) - 1 ) . equals ( elementName ) ) ; writer . write ( "/>\n" ) ; removeElementName ( elementName ) ;
public class AsciiString { /** * Returns { @ code true } if both { @ link CharSequence } ' s are equals when ignore the case . This only supports 8 - bit * ASCII . */ public static boolean contentEqualsIgnoreCase ( CharSequence a , CharSequence b ) { } }
if ( a == null || b == null ) { return a == b ; } if ( a . getClass ( ) == AsciiString . class ) { return ( ( AsciiString ) a ) . contentEqualsIgnoreCase ( b ) ; } if ( b . getClass ( ) == AsciiString . class ) { return ( ( AsciiString ) b ) . contentEqualsIgnoreCase ( a ) ; } if ( a . length ( ) != b . length ( ) ) { return false ; } for ( int i = 0 ; i < a . length ( ) ; ++ i ) { if ( ! equalsIgnoreCase ( a . charAt ( i ) , b . charAt ( i ) ) ) { return false ; } } return true ;
public class HttpRequest { /** * 获取指定的header的long值 , 没有返回默认long值 * @ param radix 进制数 * @ param name header名 * @ param defaultValue 默认long值 * @ return header值 */ public long getLongHeader ( int radix , String name , long defaultValue ) { } }
return header . getLongValue ( radix , name , defaultValue ) ;
public class MultiLayerNetwork { /** * Provide the output of the specified layer , detached from any workspace . This is most commonly used at inference / test * time , and is more memory efficient than { @ link # ffToLayerActivationsDetached ( boolean , FwdPassType , boolean , int , INDArray , INDArray , INDArray , boolean ) } * and { @ link # ffToLayerActivationsInWs ( int , FwdPassType , boolean , INDArray , INDArray , INDArray ) } . < br > * This method clears all layer inputs . * NOTE : in general , no workspaces should be activated externally for this method ! * This method handles the workspace activation as required * @ param train Training mode ( true ) or test / inference mode ( false ) * @ param fwdPassType Type of forward pass to perform ( STANDARD , RNN _ TIMESTEP or RNN _ ACTIVATE _ WITH _ STORED _ STATE ) * @ param layerIndex Index ( inclusive ) to stop forward pass at . For all layers , use numLayers - 1 * @ param input Input to the network * @ param featureMask Input / feature mask array . May be null . * @ param labelsMask Labels mask array . May be null * @ param outputWorkspace Optional - if provided , outputs should be placed in this workspace . NOTE : this workspace * must be open * @ return Output of the specified layer , detached from any workspace */ protected INDArray outputOfLayerDetached ( boolean train , @ NonNull FwdPassType fwdPassType , int layerIndex , @ NonNull INDArray input , INDArray featureMask , INDArray labelsMask , MemoryWorkspace outputWorkspace ) { } }
setInput ( input ) ; setLayerMaskArrays ( featureMask , labelsMask ) ; /* Idea here : we want to minimize memory , and return only the final array Approach to do this : keep activations in memory only as long as we need them . In MultiLayerNetwork , the output activations of layer X are used as input to layer X + 1 Which means : the workspace for layer X has to be open for both layers X and X + 1 forward pass . Here , we ' ll use two workspaces for activations : 1 . For even index layers , activations WS that opens on start of even layer fwd pass , closes at end of odd layer fwd pass 2 . For odd index layers , activations WS that opens on start of odd layer fwd pass , closes at end of even layer fwd pass Additionally , we ' ll reconfigure the workspace manager for the * final * layer , so that we don ' t have to detach */ if ( outputWorkspace == null || outputWorkspace instanceof DummyWorkspace ) { WorkspaceUtils . assertNoWorkspacesOpen ( "Expected no workspace active in outputOfLayerDetached" , true ) ; } else { Preconditions . checkState ( outputWorkspace . isScopeActive ( ) , "Workspace \"" + outputWorkspace . getId ( ) + "\" was provided for the network/layer outputs. When provided, this workspace must be opened before " + "calling the output method; furthermore, closing the workspace is the responsibility of the user" ) ; } LayerWorkspaceMgr mgrEven ; LayerWorkspaceMgr mgrOdd ; WorkspaceMode wsm = train ? layerWiseConfigurations . getTrainingWorkspaceMode ( ) : layerWiseConfigurations . getInferenceWorkspaceMode ( ) ; if ( wsm == WorkspaceMode . NONE ) { mgrEven = LayerWorkspaceMgr . noWorkspaces ( ) ; mgrOdd = mgrEven ; // Check for external workspace - doesn ' t make sense to have one with workspace mode NONE if ( outputWorkspace != null && ! ( outputWorkspace instanceof DummyWorkspace ) ) { throw new IllegalStateException ( "Workspace \"" + outputWorkspace . getId ( ) + "\" was provided for the network/layer outputs, however " + ( train ? "training" : "inference" ) + " workspace mode is set to NONE. Cannot put output activations into the specified workspace if" + "workspaces are disabled for the network. use getConfiguration().setTraining/InferenceWorkspaceMode(WorkspaceMode.ENABLED)" ) ; } } else { mgrEven = LayerWorkspaceMgr . builder ( ) . with ( ArrayType . FF_WORKING_MEM , WS_LAYER_WORKING_MEM , WS_LAYER_WORKING_MEM_CONFIG ) . with ( ArrayType . ACTIVATIONS , WS_LAYER_ACT_1 , WS_LAYER_ACT_X_CONFIG ) . with ( ArrayType . INPUT , WS_LAYER_ACT_2 , WS_LAYER_ACT_X_CONFIG ) // Inputs should always be in the previous WS . with ( ArrayType . RNN_FF_LOOP_WORKING_MEM , WS_RNN_LOOP_WORKING_MEM , WS_RNN_LOOP_WORKING_MEM_CONFIG ) . build ( ) ; mgrOdd = LayerWorkspaceMgr . builder ( ) . with ( ArrayType . FF_WORKING_MEM , WS_LAYER_WORKING_MEM , WS_LAYER_WORKING_MEM_CONFIG ) . with ( ArrayType . ACTIVATIONS , WS_LAYER_ACT_2 , WS_LAYER_ACT_X_CONFIG ) . with ( ArrayType . INPUT , WS_LAYER_ACT_1 , WS_LAYER_ACT_X_CONFIG ) // Inputs should always be in the previous WS . with ( ArrayType . RNN_FF_LOOP_WORKING_MEM , WS_RNN_LOOP_WORKING_MEM , WS_RNN_LOOP_WORKING_MEM_CONFIG ) . build ( ) ; } mgrEven . setHelperWorkspacePointers ( helperWorkspaces ) ; mgrOdd . setHelperWorkspacePointers ( helperWorkspaces ) ; MemoryWorkspace wsActCloseNext = null ; MemoryWorkspace temp = null ; MemoryWorkspace initialWorkspace = Nd4j . getMemoryManager ( ) . getCurrentWorkspace ( ) ; try { for ( int i = 0 ; i <= layerIndex ; i ++ ) { LayerWorkspaceMgr mgr = ( i % 2 == 0 ? mgrEven : mgrOdd ) ; // Edge case : for first layer with dropout , inputs can ' t be in previous workspace ( as it hasn ' t been opened yet ) // Hence : put inputs in working memory if ( i == 0 && wsm != WorkspaceMode . NONE ) { mgr . setWorkspace ( ArrayType . INPUT , WS_LAYER_WORKING_MEM , WS_LAYER_WORKING_MEM_CONFIG ) ; } try ( MemoryWorkspace wsFFWorking = mgr . notifyScopeEntered ( ArrayType . FF_WORKING_MEM ) ) { // Working memory : opened / closed once per layer // Activations workspaces : opened / closed every second layer . // So mgrEven ( WS _ LAYER _ ACT _ 1 ) open at start of 0 , 2 , 4 , 8 ; closed at end of 1 , 3 , 5 , 7 etc // and mgrOdd ( WS _ LAYER _ ACT _ 2 ) opened at start of 1 , 3 , 5 , 7 ; closed at end of 2 , 4 , 6 , 8 etc temp = mgr . notifyScopeEntered ( ArrayType . ACTIVATIONS ) ; // Note that because we ' re opening activation workspaces not in a simple nested order , we ' ll manually // override the previous workspace setting . Otherwise , when we close these workspaces , the " current " // workspace may be set to the incorrect one temp . setPreviousWorkspace ( initialWorkspace ) ; if ( i == 0 && input . isAttached ( ) ) { // Don ' t leverage out of async DataSetIterator workspaces mgr . setNoLeverageOverride ( input . data ( ) . getParentWorkspace ( ) . getId ( ) ) ; } if ( getLayerWiseConfigurations ( ) . getInputPreProcess ( i ) != null ) { input = getLayerWiseConfigurations ( ) . getInputPreProcess ( i ) . preProcess ( input , getInputMiniBatchSize ( ) , mgr ) ; // Validation : Exception if invalid ( bad preprocessor implementation ) validateArrayWorkspaces ( mgr , input , ArrayType . ACTIVATIONS , i , true , "Output of layer (inference)" ) ; } if ( i == layerIndex ) { if ( outputWorkspace != null && ! ( outputWorkspace instanceof DummyWorkspace ) ) { // Place activations in user - specified workspace mgr . setWorkspace ( ArrayType . ACTIVATIONS , outputWorkspace . getId ( ) , outputWorkspace . getWorkspaceConfiguration ( ) ) ; } else { // Final activations : should be detached mgr . setScopedOutFor ( ArrayType . ACTIVATIONS ) ; } } if ( fwdPassType == FwdPassType . STANDARD ) { // Standard feed - forward case input = layers [ i ] . activate ( input , train , mgr ) ; } else if ( fwdPassType == FwdPassType . RNN_TIMESTEP ) { // rnnTimeStep case if ( layers [ i ] instanceof RecurrentLayer ) { input = ( ( RecurrentLayer ) layers [ i ] ) . rnnTimeStep ( reshapeTimeStepInput ( input ) , mgr ) ; } else if ( layers [ i ] instanceof BaseWrapperLayer && ( ( BaseWrapperLayer ) layers [ i ] ) . getUnderlying ( ) instanceof RecurrentLayer ) { RecurrentLayer rl = ( ( RecurrentLayer ) ( ( BaseWrapperLayer ) layers [ i ] ) . getUnderlying ( ) ) ; input = rl . rnnTimeStep ( reshapeTimeStepInput ( input ) , mgr ) ; } else if ( layers [ i ] instanceof MultiLayerNetwork ) { input = ( ( MultiLayerNetwork ) layers [ i ] ) . rnnTimeStep ( reshapeTimeStepInput ( input ) ) ; } else { input = layers [ i ] . activate ( input , false , mgr ) ; } } else { throw new IllegalArgumentException ( "Unsupported forward pass type for this method: " + fwdPassType ) ; } layers [ i ] . clear ( ) ; // Validation : Exception if invalid ( bad layer implementation ) validateArrayWorkspaces ( mgr , input , ArrayType . ACTIVATIONS , i , false , "Output of layer (inference)" ) ; if ( wsActCloseNext != null ) { wsActCloseNext . close ( ) ; } wsActCloseNext = temp ; temp = null ; } // Edge case : for first layer with dropout , inputs can ' t be in previous workspace ( as it hasn ' t been opened yet ) // Hence : put inputs in working memory - > set back to default for next use of workspace mgr if ( i == 0 && wsm != WorkspaceMode . NONE ) { mgr . setWorkspace ( ArrayType . INPUT , WS_LAYER_ACT_2 , WS_LAYER_ACT_X_CONFIG ) ; // Inputs should always be in the previous WS } } } finally { if ( wsActCloseNext != null ) { wsActCloseNext . close ( ) ; } if ( temp != null ) { // Should only be non - null on exception while ( temp . isScopeActive ( ) ) { // For safety , should never occur in theory : a single close ( ) call may not be sufficient , if // workspace scope was borrowed and not properly closed when exception occurred temp . close ( ) ; } } Nd4j . getMemoryManager ( ) . setCurrentWorkspace ( initialWorkspace ) ; if ( outputWorkspace == null || outputWorkspace instanceof DummyWorkspace ) { WorkspaceUtils . assertNoWorkspacesOpen ( "Expected no workspace active at the end of outputOfLayerDetached" , true ) ; } else { Preconditions . checkState ( outputWorkspace . isScopeActive ( ) , "Expected output workspace to still be open" + "at end of outputOfLayerDetached, but it is closed. This suggests an implementation or layer workspace problem" ) ; } } return input ;
public class ArrayUtil { /** * 将原始类型数组包装为包装类型 * @ param values 原始类型数组 * @ return 包装类型数组 */ public static Boolean [ ] wrap ( boolean ... values ) { } }
if ( null == values ) { return null ; } final int length = values . length ; if ( 0 == length ) { return new Boolean [ 0 ] ; } final Boolean [ ] array = new Boolean [ length ] ; for ( int i = 0 ; i < length ; i ++ ) { array [ i ] = Boolean . valueOf ( values [ i ] ) ; } return array ;
public class BucketManager { /** * 查询异步抓取任务 * @ param region 抓取任务所在bucket区域 华东 z0 华北 z1 华南 z2 北美 na0 东南亚 as0 * @ param fetchWorkId 抓取任务id * @ return Response * @ throws QiniuException */ public Response checkAsynFetchid ( String region , String fetchWorkId ) throws QiniuException { } }
String path = String . format ( "http://api-%s.qiniu.com/sisyphus/fetch?id=%s" , region , fetchWorkId ) ; return client . get ( path , auth . authorization ( path ) ) ;
public class ByteConverter { /** * Parses a long value from the byte array . * @ param bytes The byte array to parse . * @ param idx The starting index of the parse in the byte array . * @ return parsed long value . */ public static long int8 ( byte [ ] bytes , int idx ) { } }
return ( ( long ) ( bytes [ idx + 0 ] & 255 ) << 56 ) + ( ( long ) ( bytes [ idx + 1 ] & 255 ) << 48 ) + ( ( long ) ( bytes [ idx + 2 ] & 255 ) << 40 ) + ( ( long ) ( bytes [ idx + 3 ] & 255 ) << 32 ) + ( ( long ) ( bytes [ idx + 4 ] & 255 ) << 24 ) + ( ( long ) ( bytes [ idx + 5 ] & 255 ) << 16 ) + ( ( long ) ( bytes [ idx + 6 ] & 255 ) << 8 ) + ( bytes [ idx + 7 ] & 255 ) ;
public class GVRScriptFile { /** * Invokes a function defined in the script . * @ param funcName * The function name . * @ param params * The parameter array . * @ return * A boolean value representing whether the function is * executed correctly . If the function cannot be found , or * parameters don ' t match , { @ code false } is returned . */ @ Override public boolean invokeFunction ( String funcName , Object [ ] params ) { } }
// Run script if it is dirty . This makes sure the script is run // on the same thread as the caller ( suppose the caller is always // calling from the same thread ) . checkDirty ( ) ; // Skip bad functions if ( isBadFunction ( funcName ) ) { return false ; } String statement = getInvokeStatementCached ( funcName , params ) ; synchronized ( mEngineLock ) { localBindings = mLocalEngine . getBindings ( ScriptContext . ENGINE_SCOPE ) ; if ( localBindings == null ) { localBindings = mLocalEngine . createBindings ( ) ; mLocalEngine . setBindings ( localBindings , ScriptContext . ENGINE_SCOPE ) ; } } fillBindings ( localBindings , params ) ; try { mLocalEngine . eval ( statement ) ; } catch ( ScriptException e ) { // The function is either undefined or throws , avoid invoking it later addBadFunction ( funcName ) ; mLastError = e . getMessage ( ) ; return false ; } finally { removeBindings ( localBindings , params ) ; } return true ;
public class csvserver_lbvserver_binding { /** * Use this API to fetch csvserver _ lbvserver _ binding resources of given name . */ public static csvserver_lbvserver_binding [ ] get ( nitro_service service , String name ) throws Exception { } }
csvserver_lbvserver_binding obj = new csvserver_lbvserver_binding ( ) ; obj . set_name ( name ) ; csvserver_lbvserver_binding response [ ] = ( csvserver_lbvserver_binding [ ] ) obj . get_resources ( service ) ; return response ;
public class DynamicPipelineServiceImpl { /** * Filters out builds from the dashboard ' s job that used a different repository . * Builds picked up by a jenkins job might refer to different repositories if users * changed the job around at one point . We are only interested in the repository * that all of our commits come from . This fill filter out builds that do not * correspond to our repository . * Note that this method may not work 100 % due to limitations gathering data from * the jenkins api . See note in code for more information . * @ param buildsa list of builds * @ param urlthe url of the repository we are interested in * @ param branchthe branch of the repository we are interested in * @ return the filtered list */ protected List < Build > filterBuilds ( List < Build > builds , String url , String branch ) { } }
List < Build > rt = new ArrayList < Build > ( ) ; String urlNoNull = url != null ? url : "" ; // String branchNoNull = branch ! = null ? branch : " " ; for ( Build build : builds ) { boolean added = false ; // TODO this is not reliable for ( RepoBranch repo : build . getCodeRepos ( ) ) { String rurl = repo . getUrl ( ) != null ? repo . getUrl ( ) : "" ; // String rbranch = repo . getBranch ( ) ! = null ? repo . getBranch ( ) : " " ; /* * Note : * Based on https : / / github . com / capitalone / Hygieia / pull / 857 and experimentation it seems * that branch information on the repo ' s is not 100 % reliable when there are multiple * repositories that participate in the build ( at least for jenkins ) . It appears that jenkins * will spawn of multiple builds but each build will have all of the repositories listed * that participated in the first build . This means that we cannot distinguish which particular * branch the build used in this case . * As a result the timestamping of commits may be a little off in the build portion of the pipeline . * We shouldn ' t however pick up commits that exist in other branches but not the branch we are tracking * because when processBuilds runs those extraneous commits will be dropped since they will not link * to a commit that we are tracking . */ // do not check type since it might not be known if ( HygieiaUtils . smartUrlEquals ( urlNoNull , rurl ) /* & & ObjectUtils . equals ( branchNoNull , rbranch ) */ ) { rt . add ( build ) ; added = true ; break ; } } if ( logger . isDebugEnabled ( ) && ! added ) { StringBuilder sb = new StringBuilder ( ) ; sb . append ( "Ignoring build " + build . getBuildUrl ( ) + " since it does not use the component's repository\n" ) ; sb . append ( "Component repo: (url: " + url + " branch: " + branch + ")\n" ) ; sb . append ( "Build repos: " ) ; boolean hasPrinted = false ; for ( RepoBranch repo : build . getCodeRepos ( ) ) { if ( hasPrinted ) { sb . append ( " " ) ; } sb . append ( "(url: " + repo . getUrl ( ) + " branch: " + repo . getBranch ( ) + ")\n" ) ; hasPrinted = true ; } if ( ! hasPrinted ) { sb . append ( "(None)\n" ) ; } logger . debug ( sb . toString ( ) ) ; } } return rt ;
public class Grid { /** * Cleanup models and grid */ @ Override protected Futures remove_impl ( final Futures fs ) { } }
for ( Key < Model > k : _models . values ( ) ) k . remove ( fs ) ; _models . clear ( ) ; return fs ;
public class KTypeVTypeHashMap { /** * Creates a hash map from two index - aligned arrays of key - value pairs . */ public static < KType , VType > KTypeVTypeHashMap < KType , VType > from ( KType [ ] keys , VType [ ] values ) { } }
if ( keys . length != values . length ) { throw new IllegalArgumentException ( "Arrays of keys and values must have an identical length." ) ; } KTypeVTypeHashMap < KType , VType > map = new KTypeVTypeHashMap < > ( keys . length ) ; for ( int i = 0 ; i < keys . length ; i ++ ) { map . put ( keys [ i ] , values [ i ] ) ; } return map ;
public class UploadWebOperation { /** * Download file from storage with content - disposition * @ param params { group : " . . . default is GROUP . . . " , id : " . . . " , name : " . . . default will be taken from FileMetaBean . name . . . " } * @ param response */ @ WebOperationMethod public Map < String , Object > downloadAsFile ( Map < String , Object > params , HttpServletRequest request , HttpServletResponse response ) throws Exception { } }
String database = Objects . get ( params , "database" ) ; String collection = Objects . get ( params , "collection" , COLLECTION ) ; String id = Objects . get ( params , "id" ) ; String name = Objects . get ( params , "name" ) ; FileStorage . FileReadBean b = null ; try { b = FileStorage . read ( new Id ( database , collection , id ) ) ; response . setContentType ( b . getMeta ( ) . getContentType ( ) ) ; if ( Objects . isNullOrEmpty ( name ) ) { name = b . getMeta ( ) . getName ( ) ; if ( name . length ( ) > 100 ) name = name . substring ( 0 , 100 ) ; if ( ! Objects . isNullOrEmpty ( b . getMeta ( ) . getExt ( ) ) ) name += "." + b . getMeta ( ) . getExt ( ) ; } response . addHeader ( "Content-Disposition" , "attachment;filename=" + URLEncoder . encode ( name , "UTF-8" ) ) ; try { IOUtils . copy ( b . getInputStream ( ) , response . getOutputStream ( ) ) ; } catch ( IOException e ) { throw S1SystemError . wrap ( e ) ; } } catch ( NotFoundException e ) { response . setStatus ( 404 ) ; } finally { FileStorage . closeAfterRead ( b ) ; } return null ;
public class BaseX { /** * Adds the values for the given characters to the value lookup table . * @ param chars * the list of characters to process . */ private void addChars ( String chars ) { } }
for ( int i = 0 ; i < chars . length ( ) ; i ++ ) { int c = chars . codePointAt ( i ) - min ; if ( values [ c ] != - 1 && values [ c ] != i ) { throw new IllegalArgumentException ( "Duplicate characters in the encoding alphabet" ) ; } values [ c ] = i ; }
public class AWTEventProviderIT { /** * Parameterizes the test instances . * @ return Collection of parameters for the constructor of * { @ link EventProviderTestBase } . */ @ Parameters public static final Collection < Object [ ] > getParameters ( ) { } }
return Arrays . asList ( new Object [ ] { ( Function < ListenerStore , ? extends EventProvider > ) store -> new AWTEventProvider ( store , true ) , ( Supplier < ListenerStore > ) ( ) -> PerformanceListenerStore . create ( ) . synchronizedView ( ) } , new Object [ ] { ( Function < ListenerStore , ? extends EventProvider > ) store -> new AWTEventProvider ( store , false ) , ( Supplier < ListenerStore > ) ( ) -> PriorityListenerStore . create ( ) . synchronizedView ( ) } , new Object [ ] { ( Function < ListenerStore , ? extends EventProvider > ) store -> new StatisticsEventProvider < > ( new AWTEventProvider ( store , true ) ) , ( Supplier < ListenerStore > ) ( ) -> PriorityListenerStore . create ( ) . synchronizedView ( ) } , new Object [ ] { ( Function < ListenerStore , ? extends EventProvider > ) store -> new StatisticsEventProvider < > ( new AWTEventProvider ( store , true ) ) , ( Supplier < ListenerStore > ) ( ) -> PriorityListenerStore . create ( ) . synchronizedView ( ) } ) ;
public class JsonModelCoder { /** * Encodes the given value into the JSON format , and appends it into the given stream using { @ link JsonPullParser # DEFAULT _ CHARSET } . < br > * This method is an alias of { @ link # encodeListNullToBlank ( Writer , List ) } . * @ param out { @ link OutputStream } to be written * @ param obj Value to encoded * @ throws IOException */ public void encode ( OutputStream out , T obj ) throws IOException { } }
OutputStreamWriter writer = new OutputStreamWriter ( out , JsonPullParser . DEFAULT_CHARSET ) ; encodeNullToBlank ( writer , obj ) ;
public class StatisticsJDBCStorageConnection { /** * { @ inheritDoc } */ public void rename ( NodeData data ) throws RepositoryException , UnsupportedOperationException , InvalidItemStateException , IllegalStateException { } }
Statistics s = ALL_STATISTICS . get ( RENAME_NODE_DATA_DESCR ) ; try { s . begin ( ) ; wcs . rename ( data ) ; } finally { s . end ( ) ; }
public class CPMeasurementUnitPersistenceImpl { /** * Returns the last cp measurement unit in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; . * @ param uuid the uuid * @ param companyId the company ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching cp measurement unit * @ throws NoSuchCPMeasurementUnitException if a matching cp measurement unit could not be found */ @ Override public CPMeasurementUnit findByUuid_C_Last ( String uuid , long companyId , OrderByComparator < CPMeasurementUnit > orderByComparator ) throws NoSuchCPMeasurementUnitException { } }
CPMeasurementUnit cpMeasurementUnit = fetchByUuid_C_Last ( uuid , companyId , orderByComparator ) ; if ( cpMeasurementUnit != null ) { return cpMeasurementUnit ; } StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "uuid=" ) ; msg . append ( uuid ) ; msg . append ( ", companyId=" ) ; msg . append ( companyId ) ; msg . append ( "}" ) ; throw new NoSuchCPMeasurementUnitException ( msg . toString ( ) ) ;
public class Reflection { /** * Utility method that allows to extract actual annotation from method , bypassing LibGDX annotation wrapper . * Returns null if annotation is not present . * @ param method method that might be annotated . * @ param annotationType class of the annotation . * @ return an instance of the annotation if the method is annotated or null if not . * @ param < Type > type of annotation . */ public static < Type extends Annotation > Type getAnnotation ( final Method method , final Class < Type > annotationType ) { } }
if ( isAnnotationPresent ( method , annotationType ) ) { return method . getDeclaredAnnotation ( annotationType ) . getAnnotation ( annotationType ) ; } return null ;
public class CommerceTierPriceEntryUtil { /** * Returns the last commerce tier price entry in the ordered set where commercePriceEntryId = & # 63 ; and minQuantity & le ; & # 63 ; . * @ param commercePriceEntryId the commerce price entry ID * @ param minQuantity the min quantity * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce tier price entry , or < code > null < / code > if a matching commerce tier price entry could not be found */ public static CommerceTierPriceEntry fetchByC_LtM_Last ( long commercePriceEntryId , int minQuantity , OrderByComparator < CommerceTierPriceEntry > orderByComparator ) { } }
return getPersistence ( ) . fetchByC_LtM_Last ( commercePriceEntryId , minQuantity , orderByComparator ) ;
public class FormPanel { /** * Adds a form entry to the panel * @ param mainLabelText * @ param minorLabelText * @ param component */ public void addFormEntry ( String mainLabelText , final String minorLabelText , final JComponent component ) { } }
if ( ! mainLabelText . endsWith ( ":" ) ) { mainLabelText += ":" ; } final DCLabel mainLabel = DCLabel . dark ( mainLabelText ) ; mainLabel . setFont ( WidgetUtils . FONT_NORMAL ) ; mainLabel . setBorder ( new EmptyBorder ( 6 , 0 , 0 , 0 ) ) ; final JXLabel minorLabel ; if ( StringUtils . isNullOrEmpty ( minorLabelText ) ) { minorLabel = null ; } else { mainLabel . setToolTipText ( minorLabelText ) ; minorLabel = new JXLabel ( minorLabelText ) ; minorLabel . setLineWrap ( true ) ; minorLabel . setFont ( WidgetUtils . FONT_SMALL ) ; minorLabel . setBorder ( new EmptyBorder ( 0 , 4 , 0 , 0 ) ) ; minorLabel . setVerticalAlignment ( JXLabel . TOP ) ; minorLabel . setPreferredSize ( new Dimension ( FIELD_LABEL_WIDTH - 4 , 0 ) ) ; } addFormEntry ( mainLabel , minorLabel , component ) ;
public class GlueHiveMetastore { /** * < pre > * Ex : Partition keys = [ ' a ' , ' b ' , ' c ' ] * Valid partition values : * [ ' 1 ' , ' 2 ' , ' 3 ' ] or * < / pre > * @ param parts Full or partial list of partition values to filter on . Keys without filter will be empty strings . * @ return a list of partition names . */ @ Override public Optional < List < String > > getPartitionNamesByParts ( String databaseName , String tableName , List < String > parts ) { } }
Table table = getTableOrElseThrow ( databaseName , tableName ) ; String expression = buildGlueExpression ( table . getPartitionColumns ( ) , parts ) ; List < Partition > partitions = getPartitions ( databaseName , tableName , expression ) ; return Optional . of ( buildPartitionNames ( table . getPartitionColumns ( ) , partitions ) ) ;
public class ConditionalOptional { /** * If the passed FieldCase equals { @ link FieldCase # NULL } , { @ code null } is returned . If * { @ link # isMandatory ( ) } is { @ code true } the field value is generated depending on the * Fieldcase . If the { @ link # isMandatory ( ) } is { @ code false } the constraint value is generated * for only half the cases . */ @ Override protected String initValuesImpl ( final FieldCase c ) throws Exception { } }
if ( c == FieldCase . NULL ) { return null ; } if ( c != null || isMandatory ( ) || choice . isNext ( ) ) { return constraint . initValues ( c ) ; } return constraint . initValues ( FieldCase . NULL ) ;
public class Expressions { /** * Create a new Template expression * @ param cl type of expression * @ param template template * @ param args template parameters * @ return template expression */ public static < T > SimpleTemplate < T > simpleTemplate ( Class < ? extends T > cl , String template , Object ... args ) { } }
return simpleTemplate ( cl , createTemplate ( template ) , ImmutableList . copyOf ( args ) ) ;
public class ExpressRouteLinksInner { /** * Retrieves the specified ExpressRouteLink resource . * @ param resourceGroupName The name of the resource group . * @ param expressRoutePortName The name of the ExpressRoutePort resource . * @ param linkName The name of the ExpressRouteLink resource . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < ExpressRouteLinkInner > getAsync ( String resourceGroupName , String expressRoutePortName , String linkName , final ServiceCallback < ExpressRouteLinkInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getWithServiceResponseAsync ( resourceGroupName , expressRoutePortName , linkName ) , serviceCallback ) ;
public class FilterConsensus { /** * Main . * @ param args command line args */ public static void main ( final String [ ] args ) { } }
Switch about = new Switch ( "a" , "about" , "display about message" ) ; Switch help = new Switch ( "h" , "help" , "display help message" ) ; FileArgument bamFile = new FileArgument ( "i" , "bam-file" , "input BAM file of consensus sequences" , true ) ; FileArgument bedFile = new FileArgument ( "x" , "bed-file" , "input BED file of genomic regions" , true ) ; FileArgument outputFile = new FileArgument ( "o" , "output-file" , "output FASTA file, default stdout" , false ) ; StringArgument gene = new StringArgument ( "g" , "gene" , "gene name, written to the FASTA headers" , true ) ; Switch cdna = new Switch ( "c" , "cdna" , "output cDNA from the same contig (phased consensus sequence) in FASTA format" ) ; Switch removeGaps = new Switch ( "r" , "remove-gaps" , "remove alignment gaps in the filtered consensus sequence" ) ; DoubleArgument minimumBreadth = new DoubleArgument ( "b" , "minimum-breadth-of-coverage" , "filter contigs less than minimum, default " + DEFAULT_MINIMUM_BREADTH , false ) ; IntegerArgument expectedPloidy = new IntegerArgument ( "p" , "expected-ploidy" , "filter contigs more than expected ploidy, default " + DEFAULT_EXPECTED_PLOIDY , false ) ; ArgumentList arguments = new ArgumentList ( about , help , bamFile , bedFile , outputFile , gene , cdna , removeGaps , minimumBreadth , expectedPloidy ) ; CommandLine commandLine = new CommandLine ( args ) ; FilterConsensus filterConsensus = null ; try { CommandLineParser . parse ( commandLine , arguments ) ; if ( about . wasFound ( ) ) { About . about ( System . out ) ; System . exit ( 0 ) ; } if ( help . wasFound ( ) ) { Usage . usage ( USAGE , null , commandLine , arguments , System . out ) ; System . exit ( 0 ) ; } if ( ! bamFile . getValue ( ) . exists ( ) ) { throw new IllegalArgumentException ( "-i, --bam-file must be a file that exists" ) ; } if ( ! bedFile . getValue ( ) . exists ( ) ) { throw new IllegalArgumentException ( "-x, --bed-file must be a file that exists" ) ; } filterConsensus = new FilterConsensus ( bamFile . getValue ( ) , bedFile . getValue ( ) , outputFile . getValue ( ) , gene . getValue ( ) , cdna . wasFound ( ) , removeGaps . wasFound ( ) , minimumBreadth . getValue ( DEFAULT_MINIMUM_BREADTH ) , expectedPloidy . getValue ( DEFAULT_EXPECTED_PLOIDY ) ) ; } catch ( CommandLineParseException | IllegalArgumentException e ) { if ( about . wasFound ( ) ) { About . about ( System . out ) ; System . exit ( 0 ) ; } if ( help . wasFound ( ) ) { Usage . usage ( USAGE , null , commandLine , arguments , System . out ) ; System . exit ( 0 ) ; } Usage . usage ( USAGE , e , commandLine , arguments , System . err ) ; System . exit ( - 1 ) ; } try { System . exit ( filterConsensus . call ( ) ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; System . exit ( 1 ) ; }
public class RemoteTaskRunner { /** * When a ephemeral worker node disappears from ZK , incomplete running tasks will be retried by * the logic in the status listener . We still have to make sure there are no tasks assigned * to the worker but not yet running . * @ param worker - the removed worker */ private void removeWorker ( final Worker worker ) { } }
log . info ( "Kaboom! Worker[%s] removed!" , worker . getHost ( ) ) ; final ZkWorker zkWorker = zkWorkers . get ( worker . getHost ( ) ) ; if ( zkWorker != null ) { try { scheduleTasksCleanupForWorker ( worker . getHost ( ) , getAssignedTasks ( worker ) ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } finally { try { zkWorker . close ( ) ; } catch ( Exception e ) { log . error ( e , "Exception closing worker[%s]!" , worker . getHost ( ) ) ; } zkWorkers . remove ( worker . getHost ( ) ) ; checkBlackListedNodes ( ) ; } } lazyWorkers . remove ( worker . getHost ( ) ) ;
public class InstancesController { /** * List all registered instances with name * @ param name the name to search for * @ return application list */ @ GetMapping ( path = "/instances" , produces = MediaType . APPLICATION_JSON_VALUE , params = "name" ) public Flux < Instance > instances ( @ RequestParam ( "name" ) String name ) { } }
return registry . getInstances ( name ) . filter ( Instance :: isRegistered ) ;
public class QueryFilterSet { /** * Applied across all filters in the set . * { @ inheritDoc } */ @ Override public boolean include ( T result ) { } }
boolean include = true ; for ( IQueryFilter < T > dataFilter : filters ) { include &= dataFilter . include ( result ) ; if ( ! include ) { break ; } } return include ;
public class MapLazyScrollUpdateOutputHandler { /** * { @ inheritDoc } */ public MapLazyScrollUpdateOutputHandler handle ( List < QueryParameters > outputList ) throws MjdbcException { } }
if ( outputList instanceof QueryParametersLazyList ) { return new MapLazyScrollUpdateOutputHandler ( this . processor , ( QueryParametersLazyList ) outputList ) ; } else { throw new MjdbcRuntimeException ( "LazyOutputHandler can be used only together with LazyStatementHandler. \n" + "Please assign LazyStatementHandler to this QueryRunner or create new QueryRunnerService via MjdbcFactory" ) ; }
public class ExcelUtils { /** * 单sheet导出 */ private SheetTemplate exportExcelByModuleHandler ( String templatePath , int sheetIndex , List < ? > data , Map < String , String > extendMap , Class clazz , boolean isWriteHeader ) throws Excel4JException { } }
SheetTemplate template = SheetTemplateHandler . sheetTemplateBuilder ( templatePath ) ; generateSheet ( sheetIndex , data , extendMap , clazz , isWriteHeader , template ) ; return template ;
public class NodeTypes { /** * Determine if the node and property definitions of the supplied primary type and mixin types allow the item with the * supplied name to be removed . * @ param primaryTypeNameOfParent the name of the primary type for the parent node ; may not be null * @ param mixinTypeNamesOfParent the names of the mixin types for the parent node ; may be null or empty if there are no mixins * to include in the search * @ param itemName the name of the item to be removed ; may not be null * @ param skipProtected true if this operation is being done from within the public JCR node and property API , or false if * this operation is being done from within internal implementations * @ return true if at least one child node definition does not require children with the supplied name to exist , or false * otherwise */ boolean canRemoveItem ( Name primaryTypeNameOfParent , List < Name > mixinTypeNamesOfParent , Name itemName , boolean skipProtected ) { } }
// First look in the primary type for a matching property definition . . . JcrNodeType primaryType = getNodeType ( primaryTypeNameOfParent ) ; if ( primaryType != null ) { for ( JcrPropertyDefinition definition : primaryType . allPropertyDefinitions ( itemName ) ) { // Skip protected definitions . . . if ( skipProtected && definition . isProtected ( ) ) continue ; // If this definition is not mandatory , then we have found that we CAN remove the property . . . return ! definition . isMandatory ( ) ; } } // Then , look in the primary type for a matching child node definition . . . if ( primaryType != null ) { for ( JcrNodeDefinition definition : primaryType . allChildNodeDefinitions ( itemName ) ) { // Skip protected definitions . . . if ( skipProtected && definition . isProtected ( ) ) continue ; // If this definition is not mandatory , then we have found that we CAN remove all children . . . return ! definition . isMandatory ( ) ; } } // Then , look in the mixin types for a matching property definition . . . if ( mixinTypeNamesOfParent != null && ! mixinTypeNamesOfParent . isEmpty ( ) ) { for ( Name mixinTypeName : mixinTypeNamesOfParent ) { JcrNodeType mixinType = getNodeType ( mixinTypeName ) ; if ( mixinType == null ) continue ; for ( JcrPropertyDefinition definition : mixinType . allPropertyDefinitions ( itemName ) ) { // Skip protected definitions . . . if ( skipProtected && definition . isProtected ( ) ) continue ; // If this definition is not mandatory , then we have found that we CAN remove the property . . . return ! definition . isMandatory ( ) ; } } } // Then , look in the mixin types for a matching child node definition . . . if ( mixinTypeNamesOfParent != null && ! mixinTypeNamesOfParent . isEmpty ( ) ) { for ( Name mixinTypeName : mixinTypeNamesOfParent ) { JcrNodeType mixinType = getNodeType ( mixinTypeName ) ; if ( mixinType == null ) continue ; for ( JcrNodeDefinition definition : mixinType . allChildNodeDefinitions ( itemName ) ) { // Skip protected definitions . . . if ( skipProtected && definition . isProtected ( ) ) continue ; // If this definition is not mandatory , then we have found that we CAN remove all children . . . return ! definition . isMandatory ( ) ; } } } // Nothing was found , so look for residual item definitions . . . if ( ! itemName . equals ( JcrNodeType . RESIDUAL_NAME ) ) return canRemoveItem ( primaryTypeNameOfParent , mixinTypeNamesOfParent , JcrNodeType . RESIDUAL_NAME , skipProtected ) ; return false ;
public class Objects { /** * Checks whether { @ code obj } is one of the elements of { @ code array } . */ public static boolean in ( Object obj , Object ... array ) { } }
for ( Object expected : array ) { if ( obj == expected ) { return true ; } } return false ;
public class Optionals { /** * Gets an optional of { @ code object } if { @ code object } is an instance of { @ code type } , or an empty optional . * @ param object the object * @ param type the type * @ param < T > the type * @ return the optional */ public static < T > @ NonNull Optional < T > cast ( final @ Nullable Object object , final @ NonNull Class < T > type ) { } }
return type . isInstance ( object ) ? Optional . of ( ( T ) object ) : Optional . empty ( ) ;
public class PlaceManager { /** * Called by the place manager after the place object has been successfully created . */ public void startup ( PlaceObject plobj ) { } }
// keep track of this _plobj = plobj ; // we usually want to create and register a speaker service instance that clients can use // to speak in this place if ( shouldCreateSpeakService ( ) ) { plobj . setSpeakService ( addProvider ( createSpeakHandler ( plobj ) , SpeakMarshaller . class ) ) ; } // we ' ll need to hear about place object events plobj . addListener ( this ) ; plobj . addListener ( _bodyUpdater ) ; plobj . addListener ( _occListener ) ; plobj . addListener ( _deathListener ) ; // configure this place ' s access controller plobj . setAccessController ( getAccessController ( ) ) ; // let our derived classes do their thang try { didStartup ( ) ; } catch ( Throwable t ) { log . warning ( "Manager choked in didStartup()" , "where" , where ( ) , t ) ; } // since we start empty , we need to immediately assume shutdown checkShutdownInterval ( ) ;
public class JvmGenericTypeImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case TypesPackage . JVM_GENERIC_TYPE__TYPE_PARAMETERS : getTypeParameters ( ) . clear ( ) ; getTypeParameters ( ) . addAll ( ( Collection < ? extends JvmTypeParameter > ) newValue ) ; return ; case TypesPackage . JVM_GENERIC_TYPE__INTERFACE : setInterface ( ( Boolean ) newValue ) ; return ; case TypesPackage . JVM_GENERIC_TYPE__STRICT_FLOATING_POINT : setStrictFloatingPoint ( ( Boolean ) newValue ) ; return ; case TypesPackage . JVM_GENERIC_TYPE__ANONYMOUS : setAnonymous ( ( Boolean ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class ExpressionParser { /** * adds and deletes characters to aid in the creation of the binary expression tree */ private String formatString ( String exp ) { } }
exp = exp . replaceAll ( "\\s" , "" ) ; // why exp = exp . toLowerCase ( ) ; int count = 0 ; if ( exp . substring ( 0 , 1 ) . equals ( "-" ) ) { // if expression starts with a minus sign , it is a unary one exp = "$" + exp . substring ( 1 ) ; // replace } for ( int i = 0 ; i < exp . length ( ) ; i ++ ) { if ( exp . substring ( i , i + 1 ) . equals ( "(" ) ) count ++ ; else if ( exp . substring ( i , i + 1 ) . equals ( ")" ) ) count -- ; } while ( count > 0 ) { exp += ")" ; count -- ; } // At the operators , when the operator is " - " and it is preceded by another operator , // or preceded by a left parenthesis , or when it is the first character of the input // it is a unary minus rather than binary . In this case , I change it to another // character , ' $ ' , and make its precedence the same as that of ' ^ ' . for ( int i = 0 ; i < exp . length ( ) - 1 ; i ++ ) { String tmp1 = exp . substring ( i , i + 1 ) ; String tmp2 = exp . substring ( i + 1 , i + 2 ) ; if ( tmp2 . equals ( "-" ) && ( ExpressionParser . isOperator ( tmp1 ) || tmp1 . equals ( "(" ) ) ) exp = exp . substring ( 0 , i + 1 ) + "$" + exp . substring ( i + 2 ) ; else if ( ( tmp1 . matches ( "[0-9]+" ) || tmp1 . equals ( var ) ) && ( tmp2 . equals ( "(" ) || tmp2 . equals ( var ) ) ) exp = exp . substring ( 0 , i + 1 ) + "*" + exp . substring ( i + 1 ) ; } return exp ;
public class AlignmentUtils { /** * Calculates score of alignment * @ param seq1 target sequence * @ param seq1Range aligned range * @ param mutations mutations ( alignment ) * @ param scoring scoring * @ param < S > sequence type * @ return score */ public static < S extends Sequence < S > > int calculateScore ( S seq1 , Range seq1Range , Mutations < S > mutations , LinearGapAlignmentScoring < S > scoring ) { } }
if ( ! mutations . isEmpty ( ) && mutations . getPositionByIndex ( 0 ) < seq1Range . getFrom ( ) - 1 ) throw new IllegalArgumentException ( ) ; final AlignmentIteratorForward < S > iterator = new AlignmentIteratorForward < > ( mutations , seq1Range ) ; int score = 0 ; while ( iterator . advance ( ) ) { final int mut = iterator . getCurrentMutation ( ) ; switch ( Mutation . getRawTypeCode ( mut ) ) { case RAW_MUTATION_TYPE_SUBSTITUTION : score += scoring . getScore ( Mutation . getFrom ( mut ) , Mutation . getTo ( mut ) ) ; break ; case RAW_MUTATION_TYPE_DELETION : case RAW_MUTATION_TYPE_INSERTION : score += scoring . getGapPenalty ( ) ; break ; default : byte c = seq1 . codeAt ( iterator . getSeq1Position ( ) ) ; score += scoring . getScore ( c , c ) ; break ; } } return score ;
public class LaxURI { /** * IA OVERRIDDEN IN LaxURI TO INCLUDE FIX FOR * http : / / issues . apache . org / jira / browse / HTTPCLIENT - 588 * AND * http : / / webteam . archive . org / jira / browse / HER - 1268 * In order to avoid any possilbity of conflict with non - ASCII characters , * Parse a URI reference as a < code > String < / code > with the character * encoding of the local system or the document . * The following line is the regular expression for breaking - down a URI * reference into its components . * < p > < blockquote > < pre > * 12 3 4 5 6 7 8 9 * < / pre > < / blockquote > < p > * For example , matching the above expression to * http : / / jakarta . apache . org / ietf / uri / # Related * results in the following subexpression matches : * < p > < blockquote > < pre > * $ 1 = http : * scheme = $ 2 = http * $ 3 = / / jakarta . apache . org * authority = $ 4 = jakarta . apache . org * path = $ 5 = / ietf / uri / * $ 6 = < undefined > * query = $ 7 = < undefined > * $ 8 = # Related * fragment = $ 9 = Related * < / pre > < / blockquote > < p > * @ param original the original character sequence * @ param escaped < code > true < / code > if < code > original < / code > is escaped * @ throws URIException If an error occurs . */ protected void parseUriReference ( String original , boolean escaped ) throws URIException { } }
// validate and contruct the URI character sequence if ( original == null ) { throw new URIException ( "URI-Reference required" ) ; } String tmp = original . trim ( ) ; /* * The length of the string sequence of characters . * It may not be equal to the length of the byte array . */ int length = tmp . length ( ) ; /* * Remove the delimiters like angle brackets around an URI . */ if ( length > 0 ) { char [ ] firstDelimiter = { tmp . charAt ( 0 ) } ; if ( validate ( firstDelimiter , delims ) ) { if ( length >= 2 ) { char [ ] lastDelimiter = { tmp . charAt ( length - 1 ) } ; if ( validate ( lastDelimiter , delims ) ) { tmp = tmp . substring ( 1 , length - 1 ) ; length = length - 2 ; } } } } /* * The starting index */ int from = 0 ; /* * The test flag whether the URI is started from the path component . */ boolean isStartedFromPath = false ; int atColon = tmp . indexOf ( ':' ) ; int atSlash = tmp . indexOf ( '/' ) ; if ( ! tmp . startsWith ( "//" ) && ( atColon <= 0 || ( atSlash >= 0 && atSlash < atColon ) ) ) { isStartedFromPath = true ; } /* * < p > < blockquote > < pre > * < / pre > < / blockquote > < p > */ int at = indexFirstOf ( tmp , isStartedFromPath ? "/?#" : ":/?#" , from ) ; if ( at == - 1 ) { at = 0 ; } /* * Parse the scheme . * < p > < blockquote > < pre > * scheme = $ 2 = http * < / pre > < / blockquote > < p > */ if ( at > 0 && at < length && tmp . charAt ( at ) == ':' ) { char [ ] target = tmp . substring ( 0 , at ) . toLowerCase ( ) . toCharArray ( ) ; if ( validate ( target , scheme ) ) { _scheme = target ; from = ++ at ; } else { // IA CHANGE : // do nothing ; allow interpretation as URI with // later colon in other syntactical component } } /* * Parse the authority component . * < p > < blockquote > < pre > * authority = $ 4 = jakarta . apache . org * < / pre > < / blockquote > < p > */ // Reset flags _is_net_path = _is_abs_path = _is_rel_path = _is_hier_part = false ; if ( 0 <= at && at < length && tmp . charAt ( at ) == '/' ) { // Set flag _is_hier_part = true ; if ( at + 2 < length && tmp . charAt ( at + 1 ) == '/' && ! isStartedFromPath ) { // the temporary index to start the search from int next = indexFirstOf ( tmp , "/?#" , at + 2 ) ; if ( next == - 1 ) { next = ( tmp . substring ( at + 2 ) . length ( ) == 0 ) ? at + 2 : tmp . length ( ) ; } parseAuthority ( tmp . substring ( at + 2 , next ) , escaped ) ; from = at = next ; // Set flag _is_net_path = true ; } if ( from == at ) { // Set flag _is_abs_path = true ; } } /* * Parse the path component . * < p > < blockquote > < pre > * path = $ 5 = / ietf / uri / * < / pre > < / blockquote > < p > */ if ( from < length ) { // rel _ path = rel _ segment [ abs _ path ] int next = indexFirstOf ( tmp , "?#" , from ) ; if ( next == - 1 ) { next = tmp . length ( ) ; } if ( ! _is_abs_path ) { if ( ! escaped && prevalidate ( tmp . substring ( from , next ) , disallowed_rel_path ) || escaped && validate ( tmp . substring ( from , next ) . toCharArray ( ) , rel_path ) ) { // Set flag _is_rel_path = true ; } else if ( ! escaped && prevalidate ( tmp . substring ( from , next ) , disallowed_opaque_part ) || escaped && validate ( tmp . substring ( from , next ) . toCharArray ( ) , opaque_part ) ) { // Set flag _is_opaque_part = true ; } else { // the path component may be empty _path = null ; } } String s = tmp . substring ( from , next ) ; if ( escaped ) { setRawPath ( s . toCharArray ( ) ) ; } else { setPath ( s ) ; } at = next ; } // set the charset to do escape encoding String charset = getProtocolCharset ( ) ; /* * Parse the query component . * < p > < blockquote > < pre > * query = $ 7 = < undefined > * < / pre > < / blockquote > < p > */ if ( 0 <= at && at + 1 < length && tmp . charAt ( at ) == '?' ) { int next = tmp . indexOf ( '#' , at + 1 ) ; if ( next == - 1 ) { next = tmp . length ( ) ; } if ( escaped ) { _query = tmp . substring ( at + 1 , next ) . toCharArray ( ) ; if ( ! validate ( _query , query ) ) { throw new URIException ( "Invalid query" ) ; } } else { _query = encode ( tmp . substring ( at + 1 , next ) , allowed_query , charset ) ; } at = next ; } /* * Parse the fragment component . * < p > < blockquote > < pre > * fragment = $ 9 = Related * < / pre > < / blockquote > < p > */ if ( 0 <= at && at + 1 <= length && tmp . charAt ( at ) == '#' ) { if ( at + 1 == length ) { // empty fragment _fragment = "" . toCharArray ( ) ; } else { _fragment = ( escaped ) ? tmp . substring ( at + 1 ) . toCharArray ( ) : encode ( tmp . substring ( at + 1 ) , allowed_fragment , charset ) ; } } // set this URI . setURI ( ) ;
public class LoggingPropertyOracle { /** * Try to find a counterexample to the given hypothesis , and log whenever such a spurious counterexample is found . * @ see PropertyOracle # findCounterExample ( Object , Collection ) */ @ Nullable @ Override public DefaultQuery < I , D > doFindCounterExample ( A hypothesis , Collection < ? extends I > inputs ) throws ModelCheckingException { } }
final DefaultQuery < I , D > result = propertyOracle . findCounterExample ( hypothesis , inputs ) ; if ( result != null ) { LOGGER . logEvent ( "Spurious counterexample found for property: '" + toString ( ) + "'" ) ; LOGGER . logCounterexample ( "Spurious counterexample: " + result ) ; } return result ;
public class Query { /** * < pre > * { field : < field > , regex : < ^ string $ > , caseInsensitive : true , . . . } * < / pre > */ public static Query withStringIgnoreCase ( String field , String value ) { } }
return Query . withString ( field , value , true ) ;
public class SeekableInMemoryByteChannel { /** * { @ inheritDoc } * @ see java . nio . channels . SeekableByteChannel # write ( java . nio . ByteBuffer ) */ @ Override public int write ( final ByteBuffer source ) throws IOException { } }
// Precondition checks this . checkClosed ( ) ; if ( source == null ) { throw new IllegalArgumentException ( "Source buffer must be supplied" ) ; } // Put the bytes to be written into a byte [ ] final int totalBytes = source . remaining ( ) ; final byte [ ] readContents = new byte [ totalBytes ] ; source . get ( readContents ) ; // Sync up , we ' re gonna access shared mutable state synchronized ( this ) { // Append the read contents to our internal contents this . contents = this . concat ( this . contents , readContents , this . position ) ; // Increment the position of this channel this . position += totalBytes ; } // Return the number of bytes read return totalBytes ;
public class CmsHtmlList { /** * Generates the need html code for ending a list . < p > * @ return html code */ protected String htmlEnd ( ) { } }
StringBuffer html = new StringBuffer ( 512 ) ; html . append ( "\t\t\t</td></tr>\n" ) ; html . append ( "\t\t</table>\n" ) ; if ( isBoxed ( ) ) { html . append ( getWp ( ) . dialogBlock ( CmsWorkplace . HTML_END , m_name . key ( getWp ( ) . getLocale ( ) ) , false ) ) ; } html . append ( "</div>\n" ) ; return html . toString ( ) ;
public class ApiOvhXdsl { /** * Alter this object properties * REST : PUT / xdsl / { serviceName } / modem / wifi / { wifiName } * @ param body [ required ] New object properties * @ param serviceName [ required ] The internal name of your XDSL offer * @ param wifiName [ required ] Name of the Wifi */ public void serviceName_modem_wifi_wifiName_PUT ( String serviceName , String wifiName , OvhWLAN body ) throws IOException { } }
String qPath = "/xdsl/{serviceName}/modem/wifi/{wifiName}" ; StringBuilder sb = path ( qPath , serviceName , wifiName ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ;
public class MemcachedConnection { /** * Log a exception to different levels depending on the state . * Exceptions get logged at debug level when happening during shutdown , but * at warning level when operating normally . * @ param e the exception to log . */ private void logRunException ( final Exception e ) { } }
if ( shutDown ) { getLogger ( ) . debug ( "Exception occurred during shutdown" , e ) ; } else { getLogger ( ) . warn ( "Problem handling memcached IO" , e ) ; }
public class TimestampUtils { /** * < p > Given a UTC timestamp { @ code millis } finds another point in time that is rendered in given time * zone { @ code tz } exactly as " millis in UTC " . < / p > * < p > For instance , given 7 Jan 16:00 UTC and tz = GMT + 02:00 it returns 7 Jan 14:00 UTC = = 7 Jan 16:00 * GMT + 02:00 Note that is not trivial for timestamps near DST change . For such cases , we rely on * { @ link Calendar } to figure out the proper timestamp . < / p > * @ param millis source timestamp * @ param tz desired time zone * @ return timestamp that would be rendered in { @ code tz } like { @ code millis } in UTC */ private long guessTimestamp ( long millis , TimeZone tz ) { } }
if ( tz == null ) { // If client did not provide us with time zone , we use system default time zone tz = getDefaultTz ( ) ; } // The story here : // Backend provided us with something like ' 2015-10-04 13:40 ' and it did NOT provide us with a // time zone . // On top of that , user asked us to treat the timestamp as if it were in GMT + 02:00. // The code below creates such a timestamp that is rendered as ' 2015-10-04 13:40 GMT + 02:00' // In other words , its UTC value should be 11:40 UTC = = 13:40 GMT + 02:00. // It is not sufficient to just subtract offset as you might cross DST change as you subtract . // For instance , on 2000-03-26 02:00:00 Moscow went to DST , thus local time became 03:00:00 // Suppose we deal with 2000-03-26 02:00:01 // If you subtract offset from the timestamp , the time will be " a hour behind " since // " just a couple of hours ago the OFFSET was different " // To make a long story short : we have UTC timestamp that looks like " 2000-03-26 02:00:01 " when // rendered in UTC tz . // We want to know another timestamp that will look like " 2000-03-26 02:00:01 " in Europe / Moscow // time zone . if ( isSimpleTimeZone ( tz . getID ( ) ) ) { // For well - known non - DST time zones , just subtract offset return millis - tz . getRawOffset ( ) ; } // For all the other time zones , enjoy debugging Calendar API // Here we do a straight - forward implementation that splits original timestamp into pieces and // composes it back . // Note : cal . setTimeZone alone is not sufficient as it would alter hour ( it will try to keep the // same time instant value ) Calendar cal = calendarWithUserTz ; cal . setTimeZone ( utcTz ) ; cal . setTimeInMillis ( millis ) ; int era = cal . get ( Calendar . ERA ) ; int year = cal . get ( Calendar . YEAR ) ; int month = cal . get ( Calendar . MONTH ) ; int day = cal . get ( Calendar . DAY_OF_MONTH ) ; int hour = cal . get ( Calendar . HOUR_OF_DAY ) ; int min = cal . get ( Calendar . MINUTE ) ; int sec = cal . get ( Calendar . SECOND ) ; int ms = cal . get ( Calendar . MILLISECOND ) ; cal . setTimeZone ( tz ) ; cal . set ( Calendar . ERA , era ) ; cal . set ( Calendar . YEAR , year ) ; cal . set ( Calendar . MONTH , month ) ; cal . set ( Calendar . DAY_OF_MONTH , day ) ; cal . set ( Calendar . HOUR_OF_DAY , hour ) ; cal . set ( Calendar . MINUTE , min ) ; cal . set ( Calendar . SECOND , sec ) ; cal . set ( Calendar . MILLISECOND , ms ) ; return cal . getTimeInMillis ( ) ;
public class SqlApplicationConfigurationUpdate { /** * The array of < a > ReferenceDataSourceUpdate < / a > objects describing the new reference data sources used by the * application . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setReferenceDataSourceUpdates ( java . util . Collection ) } or * { @ link # withReferenceDataSourceUpdates ( java . util . Collection ) } if you want to override the existing values . * @ param referenceDataSourceUpdates * The array of < a > ReferenceDataSourceUpdate < / a > objects describing the new reference data sources used by * the application . * @ return Returns a reference to this object so that method calls can be chained together . */ public SqlApplicationConfigurationUpdate withReferenceDataSourceUpdates ( ReferenceDataSourceUpdate ... referenceDataSourceUpdates ) { } }
if ( this . referenceDataSourceUpdates == null ) { setReferenceDataSourceUpdates ( new java . util . ArrayList < ReferenceDataSourceUpdate > ( referenceDataSourceUpdates . length ) ) ; } for ( ReferenceDataSourceUpdate ele : referenceDataSourceUpdates ) { this . referenceDataSourceUpdates . add ( ele ) ; } return this ;
public class SpiderService { /** * Delete all CFs used by the given application . */ @ Override public void deleteApplication ( ApplicationDefinition appDef ) { } }
checkServiceState ( ) ; deleteApplicationCFs ( appDef ) ; m_shardCache . clear ( appDef ) ;
public class InternalPureXbaseParser { /** * InternalPureXbase . g : 5033:1 : entryRuleXConstructorCall returns [ EObject current = null ] : iv _ ruleXConstructorCall = ruleXConstructorCall EOF ; */ public final EObject entryRuleXConstructorCall ( ) throws RecognitionException { } }
EObject current = null ; EObject iv_ruleXConstructorCall = null ; try { // InternalPureXbase . g : 5033:57 : ( iv _ ruleXConstructorCall = ruleXConstructorCall EOF ) // InternalPureXbase . g : 5034:2 : iv _ ruleXConstructorCall = ruleXConstructorCall EOF { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXConstructorCallRule ( ) ) ; } pushFollow ( FOLLOW_1 ) ; iv_ruleXConstructorCall = ruleXConstructorCall ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = iv_ruleXConstructorCall ; } match ( input , EOF , FOLLOW_2 ) ; if ( state . failed ) return current ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;