idx
int64
0
41.2k
question
stringlengths
83
4.15k
target
stringlengths
5
715
22,900
public static String getDateTimeStr ( Date d ) { if ( d == null ) return "" ; SimpleDateFormat sdf = new SimpleDateFormat ( "yyyy-MM-dd HH:mm:ss.SSSZ" ) ; return sdf . format ( d ) ; }
Gets the date time str .
22,901
public static String getDateTimeStrStandard ( Date d ) { if ( d == null ) return "" ; if ( d . getTime ( ) == 0L ) return "Never" ; SimpleDateFormat sdf = new SimpleDateFormat ( "yyyy.MM.dd.HH.mm.ss.SSSZ" ) ; return sdf . format ( d ) ; }
Gets the date time str standard .
22,902
public static String getDateTimeStrConcise ( Date d ) { if ( d == null ) return "" ; SimpleDateFormat sdf = new SimpleDateFormat ( "yyyyMMddHHmmssSSSZ" ) ; return sdf . format ( d ) ; }
Gets the date time str concise .
22,903
public static Date getDateFromConciseStr ( String str ) { Date d = null ; if ( str == null || str . isEmpty ( ) ) return null ; try { SimpleDateFormat sdf = new SimpleDateFormat ( "yyyyMMddHHmmssSSSZ" ) ; d = sdf . parse ( str ) ; } catch ( Exception ex ) { logger . error ( ex + "Exception while converting string to date : " + str ) ; } return d ; }
20130512 Converts the sdsm string generated above to Date format .
22,904
private final void handleHttpWorkerResponse ( ResponseOnSingeRequest respOnSingleReq ) throws Exception { String responseContent = respOnSingleReq . getResponseBody ( ) ; response . setResponseContent ( respOnSingleReq . getResponseBody ( ) ) ; if ( request . isPollable ( ) ) { boolean scheduleNextPoll = false ; boolean errorFindingUuid = false ; if ( ! pollerData . isUuidHasBeenSet ( ) ) { String jobId = httpPollerProcessor . getUuidFromResponse ( respOnSingleReq ) ; if ( jobId . equalsIgnoreCase ( PcConstants . NA ) ) { errorFindingUuid = true ; pollingErrorCount ++ ; logger . error ( "!!POLLING_JOB_FAIL_FIND_JOBID_IN_RESPONSE!! FAIL FAST NOW. PLEASE CHECK getJobIdRegex or retry. " + "DEBUG: REGEX_JOBID: " + httpPollerProcessor . getJobIdRegex ( ) + "RESPONSE: " + respOnSingleReq . getResponseBody ( ) + " polling Error count" + pollingErrorCount + " at " + PcDateUtils . getNowDateTimeStrStandard ( ) ) ; pollerData . setError ( true ) ; pollerData . setComplete ( true ) ; } else { pollerData . setJobIdAndMarkHasBeenSet ( jobId ) ; pollerData . setError ( httpPollerProcessor . ifThereIsErrorInResponse ( respOnSingleReq ) ) ; } } if ( ! pollerData . isError ( ) ) { pollerData . setComplete ( httpPollerProcessor . ifTaskCompletedSuccessOrFailureFromResponse ( respOnSingleReq ) ) ; pollerData . setCurrentProgress ( httpPollerProcessor . getProgressFromResponse ( respOnSingleReq ) ) ; } scheduleNextPoll = ! pollerData . isComplete ( ) && ( pollingErrorCount <= httpPollerProcessor . getMaxPollError ( ) ) ; if ( scheduleNextPoll && ( pollingErrorCount <= httpPollerProcessor . getMaxPollError ( ) ) ) { pollMessageCancellable = getContext ( ) . system ( ) . scheduler ( ) . scheduleOnce ( Duration . create ( httpPollerProcessor . getPollIntervalMillis ( ) , TimeUnit . MILLISECONDS ) , getSelf ( ) , OperationWorkerMsgType . POLL_PROGRESS , getContext ( ) . system ( ) . dispatcher ( ) , getSelf ( ) ) ; logger . info ( "\nPOLLER_NOW_ANOTHER_POLL: POLL_RECV_SEND" + String . format ( "PROGRESS:%.3f, BODY:%s " , pollerData . getCurrentProgress ( ) , responseContent , PcDateUtils . getNowDateTimeStrStandard ( ) ) ) ; String responseContentNew = errorFindingUuid ? responseContent + "_PollingErrorCount:" + pollingErrorCount : responseContent ; logger . info ( responseContentNew ) ; pollerData . getPollingHistoryMap ( ) . put ( "RECV_" + PcDateUtils . getNowDateTimeStrConciseNoZone ( ) , String . format ( "PROGRESS:%.3f, BODY:%s" , pollerData . getCurrentProgress ( ) , responseContent ) ) ; return ; } else { pollerData . getPollingHistoryMap ( ) . put ( "RECV_" + PcDateUtils . getNowDateTimeStrConciseNoZone ( ) , String . format ( "POLL_COMPLETED_OR_ERROR: PROGRESS:%.3f, BODY:%s " , pollerData . getCurrentProgress ( ) , responseContent ) ) ; } } reply ( respOnSingleReq . isFailObtainResponse ( ) , respOnSingleReq . getErrorMessage ( ) , respOnSingleReq . getStackTrace ( ) , respOnSingleReq . getStatusCode ( ) , respOnSingleReq . getStatusCodeInt ( ) , respOnSingleReq . getReceiveTime ( ) , respOnSingleReq . getResponseHeaders ( ) ) ; }
Handle http worker response .
22,905
private final void processMainRequest ( ) { sender = getSender ( ) ; startTimeMillis = System . currentTimeMillis ( ) ; timeoutDuration = Duration . create ( request . getActorMaxOperationTimeoutSec ( ) , TimeUnit . SECONDS ) ; actorMaxOperationTimeoutSec = request . getActorMaxOperationTimeoutSec ( ) ; if ( request . getProtocol ( ) == RequestProtocol . HTTP || request . getProtocol ( ) == RequestProtocol . HTTPS ) { String urlComplete = String . format ( "%s://%s:%d%s" , request . getProtocol ( ) . toString ( ) , trueTargetNode , request . getPort ( ) , request . getResourcePath ( ) ) ; if ( ! PcHttpUtils . isUrlValid ( urlComplete . trim ( ) ) ) { String errMsg = "INVALID_URL" ; logger . error ( "INVALID_URL: " + urlComplete + " return.." ) ; replyErrors ( errMsg , errMsg , PcConstants . NA , PcConstants . NA_INT ) ; return ; } else { logger . debug ( "url pass validation: " + urlComplete ) ; } asyncWorker = getContext ( ) . actorOf ( Props . create ( HttpWorker . class , actorMaxOperationTimeoutSec , client , urlComplete , request . getHttpMethod ( ) , request . getPostData ( ) , request . getHttpHeaderMap ( ) , request . getResponseHeaderMeta ( ) ) ) ; } else if ( request . getProtocol ( ) == RequestProtocol . SSH ) { asyncWorker = getContext ( ) . actorOf ( Props . create ( SshWorker . class , actorMaxOperationTimeoutSec , request . getSshMeta ( ) , trueTargetNode ) ) ; } else if ( request . getProtocol ( ) == RequestProtocol . TCP ) { asyncWorker = getContext ( ) . actorOf ( Props . create ( TcpWorker . class , actorMaxOperationTimeoutSec , request . getTcpMeta ( ) , trueTargetNode ) ) ; } else if ( request . getProtocol ( ) == RequestProtocol . UDP ) { asyncWorker = getContext ( ) . actorOf ( Props . create ( UdpWorker . class , actorMaxOperationTimeoutSec , request . getUdpMeta ( ) , trueTargetNode ) ) ; } else if ( request . getProtocol ( ) == RequestProtocol . PING ) { asyncWorker = getContext ( ) . actorOf ( Props . create ( PingWorker . class , actorMaxOperationTimeoutSec , request . getPingMeta ( ) , trueTargetNode ) ) ; } asyncWorker . tell ( RequestWorkerMsgType . PROCESS_REQUEST , getSelf ( ) ) ; cancelExistingIfAnyAndScheduleTimeoutCall ( ) ; }
the 1st request from the manager .
22,906
@ SuppressWarnings ( "deprecation" ) private final void operationTimeout ( ) { cancelCancellable ( ) ; if ( asyncWorker != null && ! asyncWorker . isTerminated ( ) ) { asyncWorker . tell ( RequestWorkerMsgType . PROCESS_ON_TIMEOUT , getSelf ( ) ) ; } else { logger . info ( "asyncWorker has been killed or uninitialized (null). " + "Not send PROCESS ON TIMEOUT.\nREQ: " + request . toString ( ) ) ; replyErrors ( PcConstants . OPERATION_TIMEOUT , PcConstants . OPERATION_TIMEOUT , PcConstants . NA , PcConstants . NA_INT ) ; } }
will trigger workers to cancel then wait for it to report back .
22,907
private final void replyErrors ( final String errorMessage , final String stackTrace , final String statusCode , final int statusCodeInt ) { reply ( true , errorMessage , stackTrace , statusCode , statusCodeInt , PcConstants . NA , null ) ; }
Reply used in error cases . set the response header as null .
22,908
public static void addHeaders ( BoundRequestBuilder builder , Map < String , String > headerMap ) { for ( Entry < String , String > entry : headerMap . entrySet ( ) ) { String name = entry . getKey ( ) ; String value = entry . getValue ( ) ; builder . addHeader ( name , value ) ; } }
Adds the headers .
22,909
@ SuppressWarnings ( "deprecation" ) public boolean cancelOnTargetHosts ( List < String > targetHosts ) { boolean success = false ; try { switch ( state ) { case IN_PROGRESS : if ( executionManager != null && ! executionManager . isTerminated ( ) ) { executionManager . tell ( new CancelTaskOnHostRequest ( targetHosts ) , executionManager ) ; logger . info ( "asked task to stop from running on target hosts with count {}..." , targetHosts . size ( ) ) ; } else { logger . info ( "manager already killed or not exist.. NO OP" ) ; } success = true ; break ; case COMPLETED_WITHOUT_ERROR : case COMPLETED_WITH_ERROR : case WAITING : logger . info ( "will NO OP for cancelOnTargetHost as it is not in IN_PROGRESS state" ) ; success = true ; break ; default : break ; } } catch ( Exception e ) { logger . error ( "cancel task {} on hosts with count {} error with exception details " , this . getTaskId ( ) , targetHosts . size ( ) , e ) ; } return success ; }
Cancel on target hosts .
22,910
public String generateTaskId ( ) { final String uuid = UUID . randomUUID ( ) . toString ( ) . substring ( 0 , 12 ) ; int size = this . targetHostMeta == null ? 0 : this . targetHostMeta . getHosts ( ) . size ( ) ; return "PT_" + size + "_" + PcDateUtils . getNowDateTimeStrConciseNoZone ( ) + "_" + uuid ; }
Gen job id .
22,911
public Double getProgress ( ) { if ( state . equals ( ParallelTaskState . IN_PROGRESS ) ) { if ( requestNum != 0 ) { return 100.0 * ( ( double ) responsedNum / ( double ) requestNumActual ) ; } else { return 0.0 ; } } if ( state . equals ( ParallelTaskState . WAITING ) ) { return 0.0 ; } if ( state . equals ( ParallelTaskState . COMPLETED_WITH_ERROR ) || state . equals ( ParallelTaskState . COMPLETED_WITHOUT_ERROR ) ) { return 100.0 ; } return 0.0 ; }
Gets the progress .
22,912
public Map < String , SetAndCount > getAggregateResultFullSummary ( ) { Map < String , SetAndCount > summaryMap = new ConcurrentHashMap < String , SetAndCount > ( ) ; for ( Entry < String , LinkedHashSet < String > > entry : aggregateResultMap . entrySet ( ) ) { summaryMap . put ( entry . getKey ( ) , new SetAndCount ( entry . getValue ( ) ) ) ; } return summaryMap ; }
Aggregate results to see the status code distribution with target hosts .
22,913
public Map < String , Integer > getAggregateResultCountSummary ( ) { Map < String , Integer > summaryMap = new LinkedHashMap < String , Integer > ( ) ; for ( Entry < String , LinkedHashSet < String > > entry : aggregateResultMap . entrySet ( ) ) { summaryMap . put ( entry . getKey ( ) , entry . getValue ( ) . size ( ) ) ; } return summaryMap ; }
Gets the aggregate result count summary . only list the counts for brief understanding
22,914
public PerformUsage getJVMMemoryUsage ( ) { int mb = 1024 * 1024 ; Runtime rt = Runtime . getRuntime ( ) ; PerformUsage usage = new PerformUsage ( ) ; usage . totalMemory = ( double ) rt . totalMemory ( ) / mb ; usage . freeMemory = ( double ) rt . freeMemory ( ) / mb ; usage . usedMemory = ( double ) rt . totalMemory ( ) / mb - rt . freeMemory ( ) / mb ; usage . maxMemory = ( double ) rt . maxMemory ( ) / mb ; usage . memoryUsagePercent = usage . usedMemory / usage . maxMemory * 100.0 ; currentJvmPerformUsage = usage ; return usage ; }
Gets the JVM memory usage .
22,915
public ThreadInfo [ ] getThreadDump ( ) { ThreadMXBean threadMxBean = ManagementFactory . getThreadMXBean ( ) ; return threadMxBean . dumpAllThreads ( true , true ) ; }
Gets the thread dump .
22,916
public ThreadUsage getThreadUsage ( ) { ThreadMXBean threadMxBean = ManagementFactory . getThreadMXBean ( ) ; ThreadUsage threadUsage = new ThreadUsage ( ) ; long [ ] threadIds = threadMxBean . getAllThreadIds ( ) ; threadUsage . liveThreadCount = threadIds . length ; for ( long tId : threadIds ) { ThreadInfo threadInfo = threadMxBean . getThreadInfo ( tId ) ; threadUsage . threadData . put ( Long . toString ( tId ) , new ThreadData ( threadInfo . getThreadName ( ) , threadInfo . getThreadState ( ) . name ( ) , threadMxBean . getThreadCpuTime ( tId ) ) ) ; } return threadUsage ; }
Gets the thread usage .
22,917
public String getHealthMemory ( ) { StringBuilder sb = new StringBuilder ( ) ; sb . append ( "Logging JVM Stats\n" ) ; MonitorProvider mp = MonitorProvider . getInstance ( ) ; PerformUsage perf = mp . getJVMMemoryUsage ( ) ; sb . append ( perf . toString ( ) ) ; if ( perf . memoryUsagePercent >= THRESHOLD_PERCENT ) { sb . append ( "========= WARNING: MEM USAGE > " + THRESHOLD_PERCENT + "!!" ) ; sb . append ( " !! Live Threads List=============\n" ) ; sb . append ( mp . getThreadUsage ( ) . toString ( ) ) ; sb . append ( "========================================\n" ) ; sb . append ( "========================JVM Thread Dump====================\n" ) ; ThreadInfo [ ] threadDump = mp . getThreadDump ( ) ; for ( ThreadInfo threadInfo : threadDump ) { sb . append ( threadInfo . toString ( ) + "\n" ) ; } sb . append ( "===========================================================\n" ) ; } sb . append ( "Logged JVM Stats\n" ) ; return sb . toString ( ) ; }
Gets the health memory .
22,918
public void shutdown ( ) { for ( Entry < HttpClientType , AsyncHttpClient > entry : map . entrySet ( ) ) { AsyncHttpClient client = entry . getValue ( ) ; if ( client != null ) client . close ( ) ; } }
Shutdown each AHC client in the map .
22,919
private String getContentFromPath ( String sourcePath , HostsSourceType sourceType ) throws IOException { String res = "" ; if ( sourceType == HostsSourceType . LOCAL_FILE ) { res = PcFileNetworkIoUtils . readFileContentToString ( sourcePath ) ; } else if ( sourceType == HostsSourceType . URL ) { res = PcFileNetworkIoUtils . readStringFromUrlGeneric ( sourcePath ) ; } return res ; }
note that for read from file this will just load all to memory . not fit if need to read a very large file . However for getting the host name . normally it is fine .
22,920
public List < String > setTargetHostsFromLineByLineText ( String sourcePath , HostsSourceType sourceType ) throws TargetHostsLoadException { List < String > targetHosts = new ArrayList < String > ( ) ; try { String content = getContentFromPath ( sourcePath , sourceType ) ; targetHosts = setTargetHostsFromString ( content ) ; } catch ( IOException e ) { throw new TargetHostsLoadException ( "IEException when reading " + sourcePath , e ) ; } return targetHosts ; }
get target hosts from line by line .
22,921
public ConnectionlessBootstrap bootStrapUdpClient ( ) throws HttpRequestCreateException { ConnectionlessBootstrap udpClient = null ; try { udpClient = new ConnectionlessBootstrap ( udpMeta . getChannelFactory ( ) ) ; udpClient . setPipeline ( new UdpPipelineFactory ( TcpUdpSshPingResourceStore . getInstance ( ) . getTimer ( ) , this ) . getPipeline ( ) ) ; } catch ( Exception t ) { throw new TcpUdpRequestCreateException ( "Error in creating request in udp worker. " + " If udpClient is null. Then fail to create." , t ) ; } return udpClient ; }
Creates the udpClient with proper handler .
22,922
public void initialize ( ) { if ( isClosed . get ( ) ) { logger . info ( "Initialing Parallel Client Resources: actor system, HttpClientStore, Task Manager ...." ) ; ActorConfig . createAndGetActorSystem ( ) ; httpClientStore . init ( ) ; tcpSshPingResourceStore . init ( ) ; isClosed . set ( false ) ; logger . info ( "Parallel Client Resources has been initialized." ) ; } else { logger . debug ( "NO OP. Parallel Client Resources has already been initialized." ) ; } }
Initialize . create the httpClientStore tcpClientStore
22,923
public void reinitIfClosed ( ) { if ( isClosed . get ( ) ) { logger . info ( "External Resource was released. Now Re-initializing resources ..." ) ; ActorConfig . createAndGetActorSystem ( ) ; httpClientStore . reinit ( ) ; tcpSshPingResourceStore . reinit ( ) ; try { Thread . sleep ( 1000l ) ; } catch ( InterruptedException e ) { logger . error ( "error reinit httpClientStore" , e ) ; } isClosed . set ( false ) ; logger . info ( "Parallel Client Resources has been reinitialized." ) ; } else { logger . debug ( "NO OP. Resource was not released." ) ; } }
Auto re - initialize external resourced if resources have been already released .
22,924
public ParallelTaskBuilder prepareSsh ( ) { reinitIfClosed ( ) ; ParallelTaskBuilder cb = new ParallelTaskBuilder ( ) ; cb . setProtocol ( RequestProtocol . SSH ) ; return cb ; }
Prepare a parallel SSH Task .
22,925
public ParallelTaskBuilder preparePing ( ) { reinitIfClosed ( ) ; ParallelTaskBuilder cb = new ParallelTaskBuilder ( ) ; cb . setProtocol ( RequestProtocol . PING ) ; return cb ; }
Prepare a parallel PING Task .
22,926
public ParallelTaskBuilder prepareTcp ( String command ) { reinitIfClosed ( ) ; ParallelTaskBuilder cb = new ParallelTaskBuilder ( ) ; cb . setProtocol ( RequestProtocol . TCP ) ; cb . getTcpMeta ( ) . setCommand ( command ) ; return cb ; }
Prepare a parallel TCP Task .
22,927
public ParallelTaskBuilder prepareUdp ( String command ) { reinitIfClosed ( ) ; ParallelTaskBuilder cb = new ParallelTaskBuilder ( ) ; cb . setProtocol ( RequestProtocol . UDP ) ; cb . getUdpMeta ( ) . setCommand ( command ) ; return cb ; }
Prepare a parallel UDP Task .
22,928
public ParallelTaskBuilder prepareHttpGet ( String url ) { reinitIfClosed ( ) ; ParallelTaskBuilder cb = new ParallelTaskBuilder ( ) ; cb . getHttpMeta ( ) . setHttpMethod ( HttpMethod . GET ) ; cb . getHttpMeta ( ) . setRequestUrlPostfix ( url ) ; return cb ; }
Prepare a parallel HTTP GET Task .
22,929
public ParallelTaskBuilder prepareHttpPost ( String url ) { reinitIfClosed ( ) ; ParallelTaskBuilder cb = new ParallelTaskBuilder ( ) ; cb . getHttpMeta ( ) . setHttpMethod ( HttpMethod . POST ) ; cb . getHttpMeta ( ) . setRequestUrlPostfix ( url ) ; return cb ; }
Prepare a parallel HTTP POST Task .
22,930
public ParallelTaskBuilder prepareHttpDelete ( String url ) { reinitIfClosed ( ) ; ParallelTaskBuilder cb = new ParallelTaskBuilder ( ) ; cb . getHttpMeta ( ) . setHttpMethod ( HttpMethod . DELETE ) ; cb . getHttpMeta ( ) . setRequestUrlPostfix ( url ) ; return cb ; }
Prepare a parallel HTTP DELETE Task .
22,931
public ParallelTaskBuilder prepareHttpPut ( String url ) { reinitIfClosed ( ) ; ParallelTaskBuilder cb = new ParallelTaskBuilder ( ) ; cb . getHttpMeta ( ) . setHttpMethod ( HttpMethod . PUT ) ; cb . getHttpMeta ( ) . setRequestUrlPostfix ( url ) ; return cb ; }
Prepare a parallel HTTP PUT Task .
22,932
public ParallelTaskBuilder prepareHttpHead ( String url ) { reinitIfClosed ( ) ; ParallelTaskBuilder cb = new ParallelTaskBuilder ( ) ; cb . getHttpMeta ( ) . setHttpMethod ( HttpMethod . HEAD ) ; cb . getHttpMeta ( ) . setRequestUrlPostfix ( url ) ; return cb ; }
Prepare a parallel HTTP HEAD Task .
22,933
public ParallelTaskBuilder prepareHttpOptions ( String url ) { reinitIfClosed ( ) ; ParallelTaskBuilder cb = new ParallelTaskBuilder ( ) ; cb . getHttpMeta ( ) . setHttpMethod ( HttpMethod . OPTIONS ) ; cb . getHttpMeta ( ) . setRequestUrlPostfix ( url ) ; return cb ; }
Prepare a parallel HTTP OPTION Task .
22,934
@ SuppressWarnings ( "deprecation" ) private void cancelRequestAndWorkers ( ) { for ( ActorRef worker : workers . values ( ) ) { if ( worker != null && ! worker . isTerminated ( ) ) { worker . tell ( OperationWorkerMsgType . CANCEL , getSelf ( ) ) ; } } logger . info ( "ExecutionManager sending cancelPendingRequest at time: " + PcDateUtils . getNowDateTimeStr ( ) ) ; }
Cancel request and workers .
22,935
@ SuppressWarnings ( "deprecation" ) private void cancelRequestAndWorkerOnHost ( List < String > targetHosts ) { List < String > validTargetHosts = new ArrayList < String > ( workers . keySet ( ) ) ; validTargetHosts . retainAll ( targetHosts ) ; logger . info ( "targetHosts for cancel: Total: {}" + " Valid in current manager with worker threads: {}" , targetHosts . size ( ) , validTargetHosts . size ( ) ) ; for ( String targetHost : validTargetHosts ) { ActorRef worker = workers . get ( targetHost ) ; if ( worker != null && ! worker . isTerminated ( ) ) { worker . tell ( OperationWorkerMsgType . CANCEL , getSelf ( ) ) ; logger . info ( "Submitted CANCEL request on Host {}" , targetHost ) ; } else { logger . info ( "Did NOT Submitted " + "CANCEL request on Host {} as worker on this host is null or already killed" , targetHost ) ; } } }
Cancel request and worker on host .
22,936
public Session startSshSessionAndObtainSession ( ) { Session session = null ; try { JSch jsch = new JSch ( ) ; if ( sshMeta . getSshLoginType ( ) == SshLoginType . KEY ) { String workingDir = System . getProperty ( "user.dir" ) ; String privKeyAbsPath = workingDir + "/" + sshMeta . getPrivKeyRelativePath ( ) ; logger . debug ( "use privkey: path: " + privKeyAbsPath ) ; if ( ! PcFileNetworkIoUtils . isFileExist ( privKeyAbsPath ) ) { throw new RuntimeException ( "file not found at " + privKeyAbsPath ) ; } if ( sshMeta . isPrivKeyUsePassphrase ( ) && sshMeta . getPassphrase ( ) != null ) { jsch . addIdentity ( privKeyAbsPath , sshMeta . getPassphrase ( ) ) ; } else { jsch . addIdentity ( privKeyAbsPath ) ; } } session = jsch . getSession ( sshMeta . getUserName ( ) , targetHost , sshMeta . getSshPort ( ) ) ; if ( sshMeta . getSshLoginType ( ) == SshLoginType . PASSWORD ) { session . setPassword ( sshMeta . getPassword ( ) ) ; } session . setConfig ( "StrictHostKeyChecking" , "no" ) ; } catch ( Exception t ) { throw new RuntimeException ( t ) ; } return session ; }
Start ssh session and obtain session .
22,937
public Channel sessionConnectGenerateChannel ( Session session ) throws JSchException { session . connect ( sshMeta . getSshConnectionTimeoutMillis ( ) ) ; ChannelExec channel = ( ChannelExec ) session . openChannel ( "exec" ) ; channel . setCommand ( sshMeta . getCommandLine ( ) ) ; if ( sshMeta . isRunAsSuperUser ( ) ) { try { channel . setInputStream ( null , true ) ; OutputStream out = channel . getOutputStream ( ) ; channel . setOutputStream ( System . out , true ) ; channel . setExtOutputStream ( System . err , true ) ; channel . setPty ( true ) ; channel . connect ( ) ; out . write ( ( sshMeta . getPassword ( ) + "\n" ) . getBytes ( ) ) ; out . flush ( ) ; } catch ( IOException e ) { logger . error ( "error in sessionConnectGenerateChannel for super user" , e ) ; } } else { channel . setInputStream ( null ) ; channel . connect ( ) ; } return channel ; }
Session connect generate channel .
22,938
public ResponseOnSingeRequest genErrorResponse ( Exception t ) { ResponseOnSingeRequest sshResponse = new ResponseOnSingeRequest ( ) ; String displayError = PcErrorMsgUtils . replaceErrorMsg ( t . toString ( ) ) ; sshResponse . setStackTrace ( PcStringUtils . printStackTrace ( t ) ) ; sshResponse . setErrorMessage ( displayError ) ; sshResponse . setFailObtainResponse ( true ) ; logger . error ( "error in exec SSH. \nIf exection is JSchException: " + "Auth cancel and using public key. " + "\nMake sure 1. private key full path is right (try sshMeta.getPrivKeyAbsPath()). " + "\n2. the user name and key matches " + t ) ; return sshResponse ; }
Gen error response .
22,939
public void genNodeDataMap ( ParallelTask task ) { TargetHostMeta targetHostMeta = task . getTargetHostMeta ( ) ; HttpMeta httpMeta = task . getHttpMeta ( ) ; String entityBody = httpMeta . getEntityBody ( ) ; String requestContent = HttpMeta . replaceDefaultFullRequestContent ( entityBody ) ; Map < String , NodeReqResponse > parallelTaskResult = task . getParallelTaskResult ( ) ; for ( String fqdn : targetHostMeta . getHosts ( ) ) { NodeReqResponse nodeReqResponse = new NodeReqResponse ( fqdn ) ; nodeReqResponse . setDefaultReqestContent ( requestContent ) ; parallelTaskResult . put ( fqdn , nodeReqResponse ) ; } }
Generate node data map .
22,940
public void filterUnsafeOrUnnecessaryRequest ( Map < String , NodeReqResponse > nodeDataMapValidSource , Map < String , NodeReqResponse > nodeDataMapValidSafe ) { for ( Entry < String , NodeReqResponse > entry : nodeDataMapValidSource . entrySet ( ) ) { String hostName = entry . getKey ( ) ; NodeReqResponse nrr = entry . getValue ( ) ; Map < String , String > map = nrr . getRequestParameters ( ) ; if ( map . containsKey ( PcConstants . NODE_REQUEST_WILL_EXECUTE ) ) { Boolean willExecute = Boolean . parseBoolean ( map . get ( PcConstants . NODE_REQUEST_WILL_EXECUTE ) ) ; if ( ! willExecute ) { logger . info ( "NOT_EXECUTE_COMMAND " + " on target: " + hostName + " at " + PcDateUtils . getNowDateTimeStrStandard ( ) ) ; continue ; } } nodeDataMapValidSafe . put ( hostName , nrr ) ; } }
Filter unsafe or unnecessary request .
22,941
public ClientBootstrap bootStrapTcpClient ( ) throws HttpRequestCreateException { ClientBootstrap tcpClient = null ; try { tcpClient = new ClientBootstrap ( tcpMeta . getChannelFactory ( ) ) ; tcpClient . setPipelineFactory ( new MyPipelineFactory ( TcpUdpSshPingResourceStore . getInstance ( ) . getTimer ( ) , this , tcpMeta . getTcpIdleTimeoutSec ( ) ) ) ; tcpClient . setOption ( "connectTimeoutMillis" , tcpMeta . getTcpConnectTimeoutMillis ( ) ) ; tcpClient . setOption ( "tcpNoDelay" , true ) ; } catch ( Exception t ) { throw new TcpUdpRequestCreateException ( "Error in creating request in Tcpworker. " + " If tcpClient is null. Then fail to create." , t ) ; } return tcpClient ; }
Creates the tcpClient with proper handler .
22,942
private void reply ( final String response , final boolean error , final String errorMessage , final String stackTrace , final String statusCode , final int statusCodeInt ) { if ( ! sentReply ) { sentReply = true ; if ( channel != null && channel . isOpen ( ) ) channel . close ( ) . awaitUninterruptibly ( ) ; final ResponseOnSingeRequest res = new ResponseOnSingeRequest ( response , error , errorMessage , stackTrace , statusCode , statusCodeInt , PcDateUtils . getNowDateTimeStrStandard ( ) , null ) ; if ( ! getContext ( ) . system ( ) . deadLetters ( ) . equals ( sender ) ) { sender . tell ( res , getSelf ( ) ) ; } if ( getContext ( ) != null ) { getContext ( ) . stop ( getSelf ( ) ) ; } } }
First close the connection . Then reply .
22,943
public static String stringMatcherByPattern ( String input , String patternStr ) { String output = PcConstants . SYSTEM_FAIL_MATCH_REGEX ; if ( patternStr == null ) { logger . error ( "patternStr is NULL! (Expected when the aggregation rule is not defined at " + PcDateUtils . getNowDateTimeStrStandard ( ) ) ; return output ; } if ( input == null ) { logger . error ( "input (Expected when the response is null and now try to match on response) is NULL in stringMatcherByPattern() at " + PcDateUtils . getNowDateTimeStrStandard ( ) ) ; return output ; } else { input = input . replace ( "\n" , "" ) . replace ( "\r" , "" ) ; } logger . debug ( "input: " + input ) ; logger . debug ( "patternStr: " + patternStr ) ; Pattern patternMetric = Pattern . compile ( patternStr , Pattern . MULTILINE ) ; final Matcher matcher = patternMetric . matcher ( input ) ; if ( matcher . matches ( ) ) { output = matcher . group ( 1 ) ; } return output ; }
this remove the linebreak .
22,944
public synchronized void initTaskSchedulerIfNot ( ) { if ( scheduler == null ) { scheduler = Executors . newSingleThreadScheduledExecutor ( DaemonThreadFactory . getInstance ( ) ) ; CapacityAwareTaskScheduler runner = new CapacityAwareTaskScheduler ( ) ; scheduler . scheduleAtFixedRate ( runner , ParallecGlobalConfig . schedulerInitDelay , ParallecGlobalConfig . schedulerCheckInterval , TimeUnit . MILLISECONDS ) ; logger . info ( "initialized daemon task scheduler to evaluate waitQ tasks." ) ; } }
as it is daemon thread
22,945
public synchronized void shutdownTaskScheduler ( ) { if ( scheduler != null && ! scheduler . isShutdown ( ) ) { scheduler . shutdown ( ) ; logger . info ( "shutdowned the task scheduler. No longer accepting new tasks" ) ; scheduler = null ; } }
Shutdown task scheduler .
22,946
public ParallelTask getTaskFromInProgressMap ( String jobId ) { if ( ! inprogressTaskMap . containsKey ( jobId ) ) return null ; return inprogressTaskMap . get ( jobId ) ; }
Gets the task from in progress map .
22,947
public int getTotalUsedCapacity ( ) { int totalCapacity = 0 ; for ( Entry < String , ParallelTask > entry : inprogressTaskMap . entrySet ( ) ) { ParallelTask task = entry . getValue ( ) ; if ( task != null ) totalCapacity += task . capacityUsed ( ) ; } return totalCapacity ; }
get current total used capacity .
22,948
public synchronized void cleanWaitTaskQueue ( ) { for ( ParallelTask task : waitQ ) { task . setState ( ParallelTaskState . COMPLETED_WITH_ERROR ) ; task . getTaskErrorMetas ( ) . add ( new TaskErrorMeta ( TaskErrorType . USER_CANCELED , "NA" ) ) ; logger . info ( "task {} removed from wait q. This task has been marked as USER CANCELED." , task . getTaskId ( ) ) ; } waitQ . clear ( ) ; }
Clean wait task queue .
22,949
public synchronized boolean removeTaskFromWaitQ ( ParallelTask taskTobeRemoved ) { boolean removed = false ; for ( ParallelTask task : waitQ ) { if ( task . getTaskId ( ) == taskTobeRemoved . getTaskId ( ) ) { task . setState ( ParallelTaskState . COMPLETED_WITH_ERROR ) ; task . getTaskErrorMetas ( ) . add ( new TaskErrorMeta ( TaskErrorType . USER_CANCELED , "NA" ) ) ; logger . info ( "task {} removed from wait q. This task has been marked as USER CANCELED." , task . getTaskId ( ) ) ; removed = true ; } } return removed ; }
Removes the task from wait q .
22,950
public ResponseFromManager generateUpdateExecuteTask ( ParallelTask task ) { ParallelTaskManager . getInstance ( ) . addTaskToInProgressMap ( task . getTaskId ( ) , task ) ; logger . info ( "Added task {} to the running inprogress map..." , task . getTaskId ( ) ) ; boolean useReplacementVarMap = false ; boolean useReplacementVarMapNodeSpecific = false ; Map < String , StrStrMap > replacementVarMapNodeSpecific = null ; Map < String , String > replacementVarMap = null ; ResponseFromManager batchResponseFromManager = null ; switch ( task . getRequestReplacementType ( ) ) { case UNIFORM_VAR_REPLACEMENT : useReplacementVarMap = true ; useReplacementVarMapNodeSpecific = false ; replacementVarMap = task . getReplacementVarMap ( ) ; break ; case TARGET_HOST_SPECIFIC_VAR_REPLACEMENT : useReplacementVarMap = false ; useReplacementVarMapNodeSpecific = true ; replacementVarMapNodeSpecific = task . getReplacementVarMapNodeSpecific ( ) ; break ; case NO_REPLACEMENT : useReplacementVarMap = false ; useReplacementVarMapNodeSpecific = false ; break ; default : logger . error ( "error request replacement type. default as no replacement" ) ; } InternalDataProvider dp = InternalDataProvider . getInstance ( ) ; dp . genNodeDataMap ( task ) ; VarReplacementProvider . getInstance ( ) . updateRequestWithReplacement ( task , useReplacementVarMap , replacementVarMap , useReplacementVarMapNodeSpecific , replacementVarMapNodeSpecific ) ; batchResponseFromManager = sendTaskToExecutionManager ( task ) ; removeTaskFromInProgressMap ( task . getTaskId ( ) ) ; logger . info ( "Removed task {} from the running inprogress map... " + ". This task should be garbage collected if there are no other pointers." , task . getTaskId ( ) ) ; return batchResponseFromManager ; }
key function to execute a parallel task .
22,951
@ SuppressWarnings ( "deprecation" ) public ResponseFromManager sendTaskToExecutionManager ( ParallelTask task ) { ResponseFromManager commandResponseFromManager = null ; ActorRef executionManager = null ; try { logger . info ( "!!STARTED sendAgentCommandToManager : " + task . getTaskId ( ) + " at " + PcDateUtils . getNowDateTimeStr ( ) ) ; executionManager = ActorConfig . createAndGetActorSystem ( ) . actorOf ( Props . create ( ExecutionManager . class , task ) , "ExecutionManager-" + task . getTaskId ( ) ) ; final FiniteDuration duration = Duration . create ( task . getConfig ( ) . getTimeoutAskManagerSec ( ) , TimeUnit . SECONDS ) ; Future < Object > future = Patterns . ask ( executionManager , new InitialRequestToManager ( task ) , new Timeout ( duration ) ) ; task . executionManager = executionManager ; commandResponseFromManager = ( ResponseFromManager ) Await . result ( future , duration ) ; logger . info ( "!!COMPLETED sendTaskToExecutionManager : " + task . getTaskId ( ) + " at " + PcDateUtils . getNowDateTimeStr ( ) + " \t\t GenericResponseMap in future size: " + commandResponseFromManager . getResponseCount ( ) ) ; } catch ( Exception ex ) { logger . error ( "Exception in sendTaskToExecutionManager {} details {}: " , ex , ex ) ; } finally { if ( executionManager != null && ! executionManager . isTerminated ( ) ) { ActorConfig . createAndGetActorSystem ( ) . stop ( executionManager ) ; } if ( task . getConfig ( ) . isAutoSaveLogToLocal ( ) ) { task . saveLogToLocal ( ) ; } } return commandResponseFromManager ; }
Send parallel task to execution manager .
22,952
public static boolean isFileExist ( String filePath ) { File f = new File ( filePath ) ; return f . exists ( ) && ! f . isDirectory ( ) ; }
Checks if is file exist .
22,953
public static String readFileContentToString ( String filePath ) throws IOException { String content = "" ; content = Files . toString ( new File ( filePath ) , Charsets . UTF_8 ) ; return content ; }
Read file content to string .
22,954
public static String readStringFromUrlGeneric ( String url ) throws IOException { InputStream is = null ; URL urlObj = null ; String responseString = PcConstants . NA ; try { urlObj = new URL ( url ) ; URLConnection con = urlObj . openConnection ( ) ; con . setConnectTimeout ( ParallecGlobalConfig . urlConnectionConnectTimeoutMillis ) ; con . setReadTimeout ( ParallecGlobalConfig . urlConnectionReadTimeoutMillis ) ; is = con . getInputStream ( ) ; BufferedReader rd = new BufferedReader ( new InputStreamReader ( is , Charset . forName ( "UTF-8" ) ) ) ; responseString = PcFileNetworkIoUtils . readAll ( rd ) ; } finally { if ( is != null ) { is . close ( ) ; } } return responseString ; }
Read string from url generic .
22,955
public void updateRequestByAddingReplaceVarPair ( ParallelTask task , String replaceVarKey , String replaceVarValue ) { Map < String , NodeReqResponse > taskResult = task . getParallelTaskResult ( ) ; for ( Entry < String , NodeReqResponse > entry : taskResult . entrySet ( ) ) { NodeReqResponse nodeReqResponse = entry . getValue ( ) ; nodeReqResponse . getRequestParameters ( ) . put ( PcConstants . NODE_REQUEST_PREFIX_REPLACE_VAR + replaceVarKey , replaceVarValue ) ; nodeReqResponse . getRequestParameters ( ) . put ( PcConstants . NODE_REQUEST_WILL_EXECUTE , Boolean . toString ( true ) ) ; } }
GENERIC!!! HELPER FUNCION FOR REPLACEMENT
22,956
public static ActorSystem createAndGetActorSystem ( ) { if ( actorSystem == null || actorSystem . isTerminated ( ) ) { actorSystem = ActorSystem . create ( PcConstants . ACTOR_SYSTEM , conf ) ; } return actorSystem ; }
Create and get actor system .
22,957
public static void shutDownActorSystemForce ( ) { if ( ! actorSystem . isTerminated ( ) ) { logger . info ( "shutting down actor system..." ) ; actorSystem . shutdown ( ) ; actorSystem . awaitTermination ( timeOutDuration ) ; logger . info ( "Actor system has been shut down." ) ; } else { logger . info ( "Actor system has been terminated already. NO OP." ) ; } }
Shut down actor system force .
22,958
public synchronized void init ( ) { channelFactory = new NioClientSocketChannelFactory ( Executors . newCachedThreadPool ( ) , Executors . newCachedThreadPool ( ) ) ; datagramChannelFactory = new NioDatagramChannelFactory ( Executors . newCachedThreadPool ( ) ) ; timer = new HashedWheelTimer ( ) ; }
Initialize ; cached threadpool is safe as it is releasing resources automatically if idle
22,959
public static String replaceErrorMsg ( String origMsg ) { String replaceMsg = origMsg ; for ( ERROR_TYPE errorType : ERROR_TYPE . values ( ) ) { if ( origMsg == null ) { replaceMsg = PcConstants . NA ; return replaceMsg ; } if ( origMsg . contains ( errorMapOrig . get ( errorType ) ) ) { replaceMsg = errorMapReplace . get ( errorType ) ; break ; } } return replaceMsg ; }
Replace error msg .
22,960
private void disableCertificateVerification ( ) throws KeyManagementException , NoSuchAlgorithmException { final TrustManager [ ] trustAllCerts = new TrustManager [ ] { new CustomTrustManager ( ) } ; final SSLContext sslContext = SSLContext . getInstance ( "SSL" ) ; sslContext . init ( null , trustAllCerts , new SecureRandom ( ) ) ; final SSLSocketFactory sslSocketFactory = sslContext . getSocketFactory ( ) ; HttpsURLConnection . setDefaultSSLSocketFactory ( sslSocketFactory ) ; final HostnameVerifier verifier = new HostnameVerifier ( ) { public boolean verify ( final String hostname , final SSLSession session ) { return true ; } } ; HttpsURLConnection . setDefaultHostnameVerifier ( verifier ) ; }
Disable certificate verification .
22,961
public static String replaceFullRequestContent ( String requestContentTemplate , String replacementString ) { return ( requestContentTemplate . replace ( PcConstants . COMMAND_VAR_DEFAULT_REQUEST_CONTENT , replacementString ) ) ; }
Replace full request content .
22,962
public void addFile ( String description , FileModel fileModel ) { Map < FileModel , ProblemFileSummary > files = addDescription ( description ) ; if ( files . containsKey ( fileModel ) ) { files . get ( fileModel ) . addOccurrence ( ) ; } else { files . put ( fileModel , new ProblemFileSummary ( fileModel , 1 ) ) ; } }
Adds a file with the provided description .
22,963
private boolean shouldIgnore ( String typeReference ) { typeReference = typeReference . replace ( '/' , '.' ) . replace ( '\\' , '.' ) ; return JavaClassIgnoreResolver . singletonInstance ( ) . matches ( typeReference ) ; }
This method is called on every reference that is in the . class file .
22,964
public FileModel resolvePayload ( GraphRewrite event , EvaluationContext context , WindupVertexFrame payload ) { checkVariableName ( event , context ) ; if ( payload instanceof FileReferenceModel ) { return ( ( FileReferenceModel ) payload ) . getFile ( ) ; } if ( payload instanceof FileModel ) { return ( FileModel ) payload ; } return null ; }
Set the payload to the fileModel of the given instance even though the variable is not directly referencing it . This is mainly to simplify the creation of the rule when the FileModel itself is not being iterated but just a model referencing it .
22,965
public static FreeMarkerOperation create ( Furnace furnace , String templatePath , String outputFilename , String ... varNames ) { return new FreeMarkerOperation ( furnace , templatePath , outputFilename , varNames ) ; }
Create a FreeMarkerOperation with the provided furnace instance template path and varNames .
22,966
private void recurseAndAddFiles ( GraphRewrite event , EvaluationContext context , Path tempFolder , FileService fileService , ArchiveModel archiveModel , FileModel parentFileModel , boolean subArchivesOnly ) { checkCancelled ( event ) ; int numberAdded = 0 ; FileFilter filter = TrueFileFilter . TRUE ; if ( archiveModel instanceof IdentifiedArchiveModel ) { filter = new IdentifiedArchiveFileFilter ( archiveModel ) ; } File fileReference ; if ( parentFileModel instanceof ArchiveModel ) fileReference = new File ( ( ( ArchiveModel ) parentFileModel ) . getUnzippedDirectory ( ) ) ; else fileReference = parentFileModel . asFile ( ) ; WindupJavaConfigurationService windupJavaConfigurationService = new WindupJavaConfigurationService ( event . getGraphContext ( ) ) ; File [ ] subFiles = fileReference . listFiles ( ) ; if ( subFiles == null ) return ; for ( File subFile : subFiles ) { if ( ! filter . accept ( subFile ) ) continue ; if ( subArchivesOnly && ! ZipUtil . endsWithZipExtension ( subFile . getAbsolutePath ( ) ) ) continue ; FileModel subFileModel = fileService . createByFilePath ( parentFileModel , subFile . getAbsolutePath ( ) ) ; if ( windupJavaConfigurationService . checkIfIgnored ( event , subFileModel ) ) continue ; numberAdded ++ ; if ( numberAdded % 250 == 0 ) event . getGraphContext ( ) . commit ( ) ; if ( subFile . isFile ( ) && ZipUtil . endsWithZipExtension ( subFileModel . getFilePath ( ) ) ) { File newZipFile = subFileModel . asFile ( ) ; ArchiveModel newArchiveModel = GraphService . addTypeToModel ( event . getGraphContext ( ) , subFileModel , ArchiveModel . class ) ; newArchiveModel . setParentArchive ( archiveModel ) ; newArchiveModel . setArchiveName ( newZipFile . getName ( ) ) ; newArchiveModel = GraphService . refresh ( event . getGraphContext ( ) , newArchiveModel ) ; ArchiveModel canonicalArchiveModel = null ; for ( FileModel otherMatches : fileService . findAllByProperty ( FileModel . SHA1_HASH , newArchiveModel . getSHA1Hash ( ) ) ) { if ( otherMatches instanceof ArchiveModel && ! otherMatches . equals ( newArchiveModel ) && ! ( otherMatches instanceof DuplicateArchiveModel ) ) { canonicalArchiveModel = ( ArchiveModel ) otherMatches ; break ; } } if ( canonicalArchiveModel != null ) { DuplicateArchiveModel duplicateArchive = GraphService . addTypeToModel ( event . getGraphContext ( ) , newArchiveModel , DuplicateArchiveModel . class ) ; duplicateArchive . setCanonicalArchive ( canonicalArchiveModel ) ; unzipToTempDirectory ( event , context , tempFolder , newZipFile , duplicateArchive , true ) ; } else { unzipToTempDirectory ( event , context , tempFolder , newZipFile , newArchiveModel , false ) ; } } else if ( subFile . isDirectory ( ) ) { recurseAndAddFiles ( event , context , tempFolder , fileService , archiveModel , subFileModel , false ) ; } } }
Recurses the given folder and adds references to these files to the graph as FileModels .
22,967
private void renderAsLI ( Writer writer , ProjectModel project , Iterator < Link > links , boolean wrap ) throws IOException { if ( ! links . hasNext ( ) ) return ; if ( wrap ) writer . append ( "<ul>" ) ; while ( links . hasNext ( ) ) { Link link = links . next ( ) ; writer . append ( "<li>" ) ; renderLink ( writer , project , link ) ; writer . append ( "</li>" ) ; } if ( wrap ) writer . append ( "</ul>" ) ; }
Renders in LI tags Wraps with UL tags optionally .
22,968
private ApplicationReportIndexModel createApplicationReportIndex ( GraphContext context , ProjectModel applicationProjectModel ) { ApplicationReportIndexService applicationReportIndexService = new ApplicationReportIndexService ( context ) ; ApplicationReportIndexModel index = applicationReportIndexService . create ( ) ; addAllProjectModels ( index , applicationProjectModel ) ; return index ; }
Create the index and associate it with all project models in the Application
22,969
private void addAllProjectModels ( ApplicationReportIndexModel navIdx , ProjectModel projectModel ) { navIdx . addProjectModel ( projectModel ) ; for ( ProjectModel childProject : projectModel . getChildProjects ( ) ) { if ( ! Iterators . asSet ( navIdx . getProjectModels ( ) ) . contains ( childProject ) ) addAllProjectModels ( navIdx , childProject ) ; } }
Attach all project models within the application to the index . This will make it easy to navigate from the projectModel to the application index .
22,970
public long getTimeRemainingInMillis ( ) { long batchTime = System . currentTimeMillis ( ) - startTime ; double timePerIteration = ( double ) batchTime / ( double ) worked . get ( ) ; return ( long ) ( timePerIteration * ( total - worked . get ( ) ) ) ; }
Gets the estimated time remaining in milliseconds based upon the total number of work units the start time and how many units have been done so far .
22,971
public FileModel getChildFile ( ArchiveModel archiveModel , String filePath ) { filePath = FilenameUtils . separatorsToUnix ( filePath ) ; StringTokenizer stk = new StringTokenizer ( filePath , "/" ) ; FileModel currentFileModel = archiveModel ; while ( stk . hasMoreTokens ( ) && currentFileModel != null ) { String pathElement = stk . nextToken ( ) ; currentFileModel = findFileModel ( currentFileModel , pathElement ) ; } return currentFileModel ; }
Finds the file at the provided path within the archive .
22,972
private void sort ( ) { DefaultDirectedWeightedGraph < RuleProvider , DefaultEdge > graph = new DefaultDirectedWeightedGraph < > ( DefaultEdge . class ) ; for ( RuleProvider provider : providers ) { graph . addVertex ( provider ) ; } addProviderRelationships ( graph ) ; checkForCycles ( graph ) ; List < RuleProvider > result = new ArrayList < > ( this . providers . size ( ) ) ; TopologicalOrderIterator < RuleProvider , DefaultEdge > iterator = new TopologicalOrderIterator < > ( graph ) ; while ( iterator . hasNext ( ) ) { RuleProvider provider = iterator . next ( ) ; result . add ( provider ) ; } this . providers = Collections . unmodifiableList ( result ) ; int index = 0 ; for ( RuleProvider provider : this . providers ) { if ( provider instanceof AbstractRuleProvider ) ( ( AbstractRuleProvider ) provider ) . setExecutionIndex ( index ++ ) ; } }
Perform the entire sort operation
22,973
private void checkForCycles ( DefaultDirectedWeightedGraph < RuleProvider , DefaultEdge > graph ) { CycleDetector < RuleProvider , DefaultEdge > cycleDetector = new CycleDetector < > ( graph ) ; if ( cycleDetector . detectCycles ( ) ) { Set < RuleProvider > cycles = cycleDetector . findCycles ( ) ; StringBuilder errorSB = new StringBuilder ( ) ; for ( RuleProvider cycle : cycles ) { errorSB . append ( "Found dependency cycle involving: " + cycle . getMetadata ( ) . getID ( ) ) . append ( System . lineSeparator ( ) ) ; Set < RuleProvider > subCycleSet = cycleDetector . findCyclesContainingVertex ( cycle ) ; for ( RuleProvider subCycle : subCycleSet ) { errorSB . append ( "\tSubcycle: " + subCycle . getMetadata ( ) . getID ( ) ) . append ( System . lineSeparator ( ) ) ; } } throw new RuntimeException ( "Dependency cycles detected: " + errorSB . toString ( ) ) ; } }
Use the jgrapht cycle checker to detect any cycles in the provided dependency graph .
22,974
private void recurseAndAddFiles ( GraphRewrite event , FileService fileService , WindupJavaConfigurationService javaConfigurationService , FileModel file ) { if ( javaConfigurationService . checkIfIgnored ( event , file ) ) return ; String filePath = file . getFilePath ( ) ; File fileReference = new File ( filePath ) ; Long directorySize = new Long ( 0 ) ; if ( fileReference . isDirectory ( ) ) { File [ ] subFiles = fileReference . listFiles ( ) ; if ( subFiles != null ) { for ( File reference : subFiles ) { FileModel subFile = fileService . createByFilePath ( file , reference . getAbsolutePath ( ) ) ; recurseAndAddFiles ( event , fileService , javaConfigurationService , subFile ) ; if ( subFile . isDirectory ( ) ) { directorySize = directorySize + subFile . getDirectorySize ( ) ; } else { directorySize = directorySize + subFile . getSize ( ) ; } } } file . setDirectorySize ( directorySize ) ; } }
Recurses the given folder and creates the FileModels vertices for the child files to the graph .
22,975
public static void checkFileOrDirectoryToBeRead ( File fileOrDir , String fileDesc ) { if ( fileOrDir == null ) throw new IllegalArgumentException ( fileDesc + " must not be null." ) ; if ( ! fileOrDir . exists ( ) ) throw new IllegalArgumentException ( fileDesc + " does not exist: " + fileOrDir . getAbsolutePath ( ) ) ; if ( ! ( fileOrDir . isDirectory ( ) || fileOrDir . isFile ( ) ) ) throw new IllegalArgumentException ( fileDesc + " must be a file or a directory: " + fileOrDir . getPath ( ) ) ; if ( fileOrDir . isDirectory ( ) ) { if ( fileOrDir . list ( ) . length == 0 ) throw new IllegalArgumentException ( fileDesc + " is an empty directory: " + fileOrDir . getPath ( ) ) ; } }
Throws if the given file is null is not a file or directory or is an empty directory .
22,976
public List < URL > scan ( Predicate < String > filter ) { List < URL > discoveredURLs = new ArrayList < > ( 128 ) ; for ( Addon addon : furnace . getAddonRegistry ( ) . getAddons ( AddonFilters . allStarted ( ) ) ) { List < String > filteredResourcePaths = filterAddonResources ( addon , filter ) ; for ( String filePath : filteredResourcePaths ) { URL ruleFile = addon . getClassLoader ( ) . getResource ( filePath ) ; if ( ruleFile != null ) discoveredURLs . add ( ruleFile ) ; } } return discoveredURLs ; }
Scans all Forge addons for files accepted by given filter .
22,977
public List < Class < ? > > scanClasses ( Predicate < String > filter ) { List < Class < ? > > discoveredClasses = new ArrayList < > ( 128 ) ; for ( Addon addon : furnace . getAddonRegistry ( ) . getAddons ( AddonFilters . allStarted ( ) ) ) { List < String > discoveredFileNames = filterAddonResources ( addon , filter ) ; for ( String discoveredFilename : discoveredFileNames ) { String clsName = PathUtil . classFilePathToClassname ( discoveredFilename ) ; try { Class < ? > clazz = addon . getClassLoader ( ) . loadClass ( clsName ) ; discoveredClasses . add ( clazz ) ; } catch ( ClassNotFoundException ex ) { LOG . log ( Level . WARNING , "Failed to load class for name '" + clsName + "':\n" + ex . getMessage ( ) , ex ) ; } } } return discoveredClasses ; }
Scans all Forge addons for classes accepted by given filter .
22,978
public List < String > filterAddonResources ( Addon addon , Predicate < String > filter ) { List < String > discoveredFileNames = new ArrayList < > ( ) ; List < File > addonResources = addon . getRepository ( ) . getAddonResources ( addon . getId ( ) ) ; for ( File addonFile : addonResources ) { if ( addonFile . isDirectory ( ) ) handleDirectory ( filter , addonFile , discoveredFileNames ) ; else handleArchiveByFile ( filter , addonFile , discoveredFileNames ) ; } return discoveredFileNames ; }
Returns a list of files in given addon passing given filter .
22,979
private void handleArchiveByFile ( Predicate < String > filter , File archive , List < String > discoveredFiles ) { try { try ( ZipFile zip = new ZipFile ( archive ) ) { Enumeration < ? extends ZipEntry > entries = zip . entries ( ) ; while ( entries . hasMoreElements ( ) ) { ZipEntry entry = entries . nextElement ( ) ; String name = entry . getName ( ) ; if ( filter . accept ( name ) ) discoveredFiles . add ( name ) ; } } } catch ( IOException e ) { throw new RuntimeException ( "Error handling file " + archive , e ) ; } }
Scans given archive for files passing given filter adds the results into given list .
22,980
private void handleDirectory ( final Predicate < String > filter , final File rootDir , final List < String > discoveredFiles ) { try { new DirectoryWalker < String > ( ) { private Path startDir ; public void walk ( ) throws IOException { this . startDir = rootDir . toPath ( ) ; this . walk ( rootDir , discoveredFiles ) ; } protected void handleFile ( File file , int depth , Collection < String > discoveredFiles ) throws IOException { String newPath = startDir . relativize ( file . toPath ( ) ) . toString ( ) ; if ( filter . accept ( newPath ) ) discoveredFiles . add ( newPath ) ; } } . walk ( ) ; } catch ( IOException ex ) { LOG . log ( Level . SEVERE , "Error reading Furnace addon directory" , ex ) ; } }
Scans given directory for files passing given filter adds the results into given list .
22,981
public static Project dependsOnArtifact ( Artifact artifact ) { Project project = new Project ( ) ; project . artifact = artifact ; return project ; }
Specify the Artifact for which the condition should search for .
22,982
public static final < T > T getSingle ( Iterable < T > it ) { if ( ! it . iterator ( ) . hasNext ( ) ) return null ; final Iterator < T > iterator = it . iterator ( ) ; T o = iterator . next ( ) ; if ( iterator . hasNext ( ) ) throw new IllegalStateException ( "Found multiple items in iterator over " + o . getClass ( ) . getName ( ) ) ; return o ; }
Returns a single item from the Iterator . If there s none returns null . If there are more throws an IllegalStateException .
22,983
public void perform ( GraphRewrite event , EvaluationContext context ) { checkVariableName ( event , context ) ; WindupVertexFrame payload = resolveVariable ( event , getVariableName ( ) ) ; if ( payload instanceof FileReferenceModel ) { FileModel file = ( ( FileReferenceModel ) payload ) . getFile ( ) ; perform ( event , context , ( XmlFileModel ) file ) ; } else { super . perform ( event , context ) ; } }
Set the payload to the fileModel of the given instance even though the variable is not directly of it s type . This is mainly to simplify the creation of the rule when the FileModel itself is not being iterated but just a model referencing it .
22,984
public Project dependsOnArtifact ( Artifact artifact ) { Project project = new Project ( ) ; project . setArtifact ( artifact ) ; project . setInputVariablesName ( inputVarName ) ; return project ; }
Specify the artifact configuration to be searched for
22,985
public static JmsDestinationType getTypeFromClass ( String aClass ) { if ( StringUtils . equals ( aClass , "javax.jms.Queue" ) || StringUtils . equals ( aClass , "javax.jms.QueueConnectionFactory" ) ) { return JmsDestinationType . QUEUE ; } else if ( StringUtils . equals ( aClass , "javax.jms.Topic" ) || StringUtils . equals ( aClass , "javax.jms.TopicConnectionFactory" ) ) { return JmsDestinationType . TOPIC ; } else { return null ; } }
Gets JmsDestinationType from java class name
22,986
protected void checkVariableName ( GraphRewrite event , EvaluationContext context ) { if ( variableName == null ) { setVariableName ( Iteration . getPayloadVariableName ( event , context ) ) ; } }
Check the variable name and if not set set it with the singleton variable name being on the top of the stack .
22,987
public static List < ClassReference > analyze ( WildcardImportResolver importResolver , Set < String > libraryPaths , Set < String > sourcePaths , Path sourceFile ) { ASTParser parser = ASTParser . newParser ( AST . JLS11 ) ; parser . setEnvironment ( libraryPaths . toArray ( new String [ libraryPaths . size ( ) ] ) , sourcePaths . toArray ( new String [ sourcePaths . size ( ) ] ) , null , true ) ; parser . setBindingsRecovery ( false ) ; parser . setResolveBindings ( true ) ; Map options = JavaCore . getOptions ( ) ; JavaCore . setComplianceOptions ( JavaCore . VERSION_1_8 , options ) ; parser . setCompilerOptions ( options ) ; String fileName = sourceFile . getFileName ( ) . toString ( ) ; parser . setUnitName ( fileName ) ; try { parser . setSource ( FileUtils . readFileToString ( sourceFile . toFile ( ) ) . toCharArray ( ) ) ; } catch ( IOException e ) { throw new ASTException ( "Failed to get source for file: " + sourceFile . toString ( ) + " due to: " + e . getMessage ( ) , e ) ; } parser . setKind ( ASTParser . K_COMPILATION_UNIT ) ; CompilationUnit cu = ( CompilationUnit ) parser . createAST ( null ) ; ReferenceResolvingVisitor visitor = new ReferenceResolvingVisitor ( importResolver , cu , sourceFile . toString ( ) ) ; cu . accept ( visitor ) ; return visitor . getJavaClassReferences ( ) ; }
Parses the provided file using the given libraryPaths and sourcePaths as context . The libraries may be either jar files or references to directories containing class files .
22,988
public static final String vertexAsString ( Vertex vertex , int depth , String withEdgesOfLabel ) { StringBuilder sb = new StringBuilder ( ) ; vertexAsString ( vertex , depth , withEdgesOfLabel , sb , 0 , new HashSet < > ( ) ) ; return sb . toString ( ) ; }
Formats a vertex using it s properties . Debugging purposes .
22,989
private static String normalizeDirName ( String name ) { if ( name == null ) return null ; return name . toLowerCase ( ) . replaceAll ( "[^a-zA-Z0-9]" , "-" ) ; }
Normalizes the name so it can be used as Maven artifactId or groupId .
22,990
private static String guessPackaging ( ProjectModel projectModel ) { String projectType = projectModel . getProjectType ( ) ; if ( projectType != null ) return projectType ; LOG . warning ( "WINDUP-983 getProjectType() returned null for: " + projectModel . getRootFileModel ( ) . getPrettyPath ( ) ) ; String suffix = StringUtils . substringAfterLast ( projectModel . getRootFileModel ( ) . getFileName ( ) , "." ) ; if ( "jar war ear sar har " . contains ( suffix + " " ) ) { projectModel . setProjectType ( suffix ) ; return suffix ; } return "unknown" ; }
Tries to guess the packaging of the archive - whether it s an EAR WAR JAR . Maybe not needed as we can rely on the suffix?
22,991
public static boolean xpathExists ( Node document , String xpathExpression , Map < String , String > namespaceMapping ) throws XPathException , MarshallingException { Boolean result = ( Boolean ) executeXPath ( document , xpathExpression , namespaceMapping , XPathConstants . BOOLEAN ) ; return result != null && result ; }
Runs the given xpath and returns a boolean result .
22,992
public static Object executeXPath ( Node document , String xpathExpression , Map < String , String > namespaceMapping , QName result ) throws XPathException , MarshallingException { NamespaceMapContext mapContext = new NamespaceMapContext ( namespaceMapping ) ; try { XPathFactory xPathfactory = XPathFactory . newInstance ( ) ; XPath xpath = xPathfactory . newXPath ( ) ; xpath . setNamespaceContext ( mapContext ) ; XPathExpression expr = xpath . compile ( xpathExpression ) ; return executeXPath ( document , expr , result ) ; } catch ( XPathExpressionException e ) { throw new XPathException ( "Xpath(" + xpathExpression + ") cannot be compiled" , e ) ; } catch ( Exception e ) { throw new MarshallingException ( "Exception unmarshalling XML." , e ) ; } }
Executes the given xpath and returns the result with the type specified .
22,993
String deriveGroupIdFromPackages ( ProjectModel projectModel ) { Map < Object , Long > pkgsMap = new HashMap < > ( ) ; Set < String > pkgs = new HashSet < > ( 1000 ) ; GraphTraversal < Vertex , Vertex > pipeline = new GraphTraversalSource ( graphContext . getGraph ( ) ) . V ( projectModel ) ; pkgsMap = pipeline . out ( ProjectModel . PROJECT_MODEL_TO_FILE ) . has ( WindupVertexFrame . TYPE_PROP , new P ( new BiPredicate < String , String > ( ) { public boolean test ( String o , String o2 ) { return o . contains ( o2 ) ; } } , GraphTypeManager . getTypeValue ( JavaClassFileModel . class ) ) ) . hasKey ( JavaClassFileModel . PROPERTY_PACKAGE_NAME ) . groupCount ( ) . by ( v -> upToThirdDot ( graphContext , ( Vertex ) v ) ) . toList ( ) . get ( 0 ) ; Map . Entry < Object , Long > biggest = null ; for ( Map . Entry < Object , Long > entry : pkgsMap . entrySet ( ) ) { if ( biggest == null || biggest . getValue ( ) < entry . getValue ( ) ) biggest = entry ; } if ( biggest != null && biggest . getValue ( ) > pkgsMap . size ( ) / 2 ) return biggest . getKey ( ) . toString ( ) ; return null ; }
Counts the packages prefixes appearing in this project and if some of them make more than half of the total of existing packages this prefix is returned . Otherwise returns null .
22,994
public JavaClassBuilderAt at ( TypeReferenceLocation ... locations ) { if ( locations != null ) this . locations = Arrays . asList ( locations ) ; return this ; }
Only match if the TypeReference is at the specified location within the file .
22,995
public ConditionBuilder as ( String variable ) { Assert . notNull ( variable , "Variable name must not be null." ) ; this . setOutputVariablesName ( variable ) ; return this ; }
Optionally specify the variable name to use for the output of this condition
22,996
protected void setResults ( GraphRewrite event , String variable , Iterable < ? extends WindupVertexFrame > results ) { Variables variables = Variables . instance ( event ) ; Iterable < ? extends WindupVertexFrame > existingVariables = variables . findVariable ( variable , 1 ) ; if ( existingVariables != null ) { variables . setVariable ( variable , Iterables . concat ( existingVariables , results ) ) ; } else { variables . setVariable ( variable , results ) ; } }
This sets the variable with the given name to the given value . If there is already a variable with the same name in the top - most stack frame we will combine them here .
22,997
private Map < String , Class < ? extends RulePhase > > loadPhases ( ) { Map < String , Class < ? extends RulePhase > > phases ; phases = new HashMap < > ( ) ; Furnace furnace = FurnaceHolder . getFurnace ( ) ; for ( RulePhase phase : furnace . getAddonRegistry ( ) . getServices ( RulePhase . class ) ) { @ SuppressWarnings ( "unchecked" ) Class < ? extends RulePhase > unwrappedClass = ( Class < ? extends RulePhase > ) Proxies . unwrap ( phase ) . getClass ( ) ; String simpleName = unwrappedClass . getSimpleName ( ) ; phases . put ( classNameToMapKey ( simpleName ) , unwrappedClass ) ; } return Collections . unmodifiableMap ( phases ) ; }
Loads the currently known phases from Furnace to the map .
22,998
private void allInput ( List < FileModel > vertices , GraphRewrite event , ParameterStore store ) { if ( StringUtils . isBlank ( getInputVariablesName ( ) ) && this . filenamePattern == null ) { FileService fileModelService = new FileService ( event . getGraphContext ( ) ) ; for ( FileModel fileModel : fileModelService . findAll ( ) ) { vertices . add ( fileModel ) ; } } }
Generating the input vertices is quite complex . Therefore there are multiple methods that handles the input vertices based on the attribute specified in specific order . This method generates all the vertices if there is no other way how to handle the input .
22,999
public T getById ( Object id ) { return context . getFramed ( ) . getFramedVertex ( this . type , id ) ; }
Returns the vertex with given ID framed into given interface .