idx
int64
0
41.2k
question
stringlengths
83
4.15k
target
stringlengths
5
715
9,200
public String createTempJob ( String mcUrl , String mcUserName , String mcPassword , String proxyAddress , String proxyUserName , String proxyPassword ) { JSONObject job = null ; String jobId = null ; String hp4mSecret = null ; String jsessionId = null ; String loginJson = loginToMC ( mcUrl , mcUserName , mcPassword , proxyAddress , proxyUserName , proxyPassword ) . toJSONString ( ) ; try { if ( loginJson != null ) { JSONObject jsonObject = ( JSONObject ) JSONValue . parseStrict ( loginJson ) ; hp4mSecret = ( String ) jsonObject . get ( Constants . LOGIN_SECRET ) ; jsessionId = ( String ) jsonObject . get ( Constants . JSESSIONID ) ; } } catch ( Exception e ) { e . printStackTrace ( ) ; } boolean isValid = argumentsCheck ( hp4mSecret , jsessionId ) ; if ( isValid ) { try { Map < String , String > headers = new HashMap < String , String > ( ) ; headers . put ( Constants . LOGIN_SECRET , hp4mSecret ) ; headers . put ( Constants . COOKIE , Constants . JESEEIONEQ + jsessionId ) ; HttpResponse response = HttpUtils . get ( HttpUtils . setProxyCfg ( proxyAddress , proxyUserName , proxyPassword ) , mcUrl + Constants . CREATE_JOB_URL , headers , null ) ; if ( response != null && response . getJsonObject ( ) != null ) { job = response . getJsonObject ( ) ; if ( job != null && job . get ( "data" ) != null ) { JSONObject data = ( JSONObject ) job . get ( "data" ) ; jobId = data . getAsString ( "id" ) ; } } } catch ( Exception e ) { e . printStackTrace ( ) ; } } return jobId ; }
create one temp job
9,201
public JSONObject getJobById ( String mcUrl , String mcUserName , String mcPassword , String proxyAddress , String proxyUsername , String proxyPassword , String jobUUID ) { JSONObject jobJsonObject = null ; String hp4mSecret = null ; String jsessionId = null ; String loginJson = loginToMC ( mcUrl , mcUserName , mcPassword , proxyAddress , proxyUsername , proxyPassword ) . toJSONString ( ) ; try { if ( loginJson != null ) { JSONObject jsonObject = ( JSONObject ) JSONValue . parseStrict ( loginJson ) ; hp4mSecret = ( String ) jsonObject . get ( Constants . LOGIN_SECRET ) ; jsessionId = ( String ) jsonObject . get ( Constants . JSESSIONID ) ; } } catch ( Exception e ) { e . printStackTrace ( ) ; } boolean b = argumentsCheck ( jobUUID , hp4mSecret , jsessionId ) ; if ( b ) { try { Map < String , String > headers = new HashMap < String , String > ( ) ; headers . put ( Constants . LOGIN_SECRET , hp4mSecret ) ; headers . put ( Constants . COOKIE , Constants . JESEEIONEQ + jsessionId ) ; HttpResponse response = HttpUtils . get ( HttpUtils . setProxyCfg ( proxyAddress , proxyUsername , proxyPassword ) , mcUrl + Constants . GET_JOB_UEL + jobUUID , headers , null ) ; if ( response != null && response . getJsonObject ( ) != null ) { jobJsonObject = response . getJsonObject ( ) ; } if ( jobJsonObject != null ) { jobJsonObject = ( JSONObject ) jobJsonObject . get ( Constants . DATA ) ; } } catch ( Exception e ) { e . printStackTrace ( ) ; } } return removeIcon ( jobJsonObject ) ; }
get one job by id
9,202
public List < Result . Suites > getSuites ( ) { if ( suites == null ) { suites = new ArrayList < Result . Suites > ( ) ; } return this . suites ; }
Gets the value of the suites property .
9,203
public MCServerSettingsModel getMCServerSettingsModel ( ) { for ( MCServerSettingsModel mcServer : getDescriptor ( ) . getMcServers ( ) ) { if ( this . runFromFileModel != null && runFromFileModel . getMcServerName ( ) != null && mcServer . getMcServerName ( ) != null && runFromFileModel . getMcServerName ( ) . equals ( mcServer . getMcServerName ( ) ) ) { return mcServer ; } } return null ; }
Gets mc server settings model .
9,204
static void constructPercentileTransactionGraph ( Map . Entry < String , LrProjectScenarioResults > scenarioResults , JSONObject scenarioGraphData ) { Map < Integer , TreeMap < String , PercentileTransactionWholeRun > > percentileTransactionResults = scenarioResults . getValue ( ) . getPercentileTransactionResults ( ) ; JSONObject percentileTransactionResultsGraphSet = extractPercentileTransactionSet ( percentileTransactionResults , scenarioResults . getValue ( ) . getTransactions ( ) ) ; if ( ! percentileTransactionResultsGraphSet . getJSONArray ( LABELS ) . isEmpty ( ) ) { percentileTransactionResultsGraphSet . put ( TITLE , PERCENTILE_TRANSACTION_RESPONSE_TIME ) ; percentileTransactionResultsGraphSet . put ( X_AXIS_TITLE , BUILD_NUMBER ) ; percentileTransactionResultsGraphSet . put ( Y_AXIS_TITLE , TRANSACTIONS_RESPONSE_TIME_SECONDS ) ; percentileTransactionResultsGraphSet . put ( DESCRIPTION , PRECENTILE_GRAPH_DESCRIPTION ) ; scenarioGraphData . put ( "percentileTransaction" , percentileTransactionResultsGraphSet ) ; } }
creates dataset for Percentile transaction graph
9,205
static void constructAvgTransactionGraph ( Map . Entry < String , LrProjectScenarioResults > scenarioResults , JSONObject scenarioGraphData ) { Map < Integer , TreeMap < String , AvgTransactionResponseTime > > avgTransactionResponseTimeResults = scenarioResults . getValue ( ) . getAvgTransactionResponseTimeResults ( ) ; JSONObject avgTransactionResponseTimeGraphSet = extractAvgTrtData ( avgTransactionResponseTimeResults , scenarioResults . getValue ( ) . getTransactions ( ) ) ; if ( ! avgTransactionResponseTimeGraphSet . getJSONArray ( LABELS ) . isEmpty ( ) ) { avgTransactionResponseTimeGraphSet . put ( TITLE , "Average Transaction Response TIme" ) ; avgTransactionResponseTimeGraphSet . put ( X_AXIS_TITLE , "Build number" ) ; avgTransactionResponseTimeGraphSet . put ( Y_AXIS_TITLE , "Time (Sec.)" ) ; avgTransactionResponseTimeGraphSet . put ( DESCRIPTION , "Displays the average time taken to perform transactions during each second of the load test." + " This graph helps you determine whether the performance of the server is within " + "acceptable minimum and maximum transaction performance time ranges defined for your " + "system." ) ; scenarioGraphData . put ( "averageTransactionResponseTime" , avgTransactionResponseTimeGraphSet ) ; } }
Construct avg transaction graph .
9,206
static void constructErrorGraph ( Map . Entry < String , LrProjectScenarioResults > scenarioResults , JSONObject scenarioGraphData ) { Map < Integer , TimeRangeResult > errPerSecResults = scenarioResults . getValue ( ) . getErrPerSecResults ( ) ; JSONObject errPerSecResultsResultsGraphSet = extractTimeRangeResult ( errPerSecResults ) ; if ( ! errPerSecResultsResultsGraphSet . getJSONArray ( LABELS ) . isEmpty ( ) ) { errPerSecResultsResultsGraphSet . put ( TITLE , "Total errors per second" ) ; errPerSecResultsResultsGraphSet . put ( X_AXIS_TITLE , "Build number" ) ; errPerSecResultsResultsGraphSet . put ( Y_AXIS_TITLE , "Errors" ) ; errPerSecResultsResultsGraphSet . put ( DESCRIPTION , "" ) ; scenarioGraphData . put ( "errorPerSecResults" , errPerSecResultsResultsGraphSet ) ; } }
Construct error graph .
9,207
static void constructAverageThroughput ( Map . Entry < String , LrProjectScenarioResults > scenarioResults , JSONObject scenarioGraphData ) { Map < Integer , WholeRunResult > averageThroughputResults = scenarioResults . getValue ( ) . getAverageThroughputResults ( ) ; JSONObject averageThroughputResultsGraphSet = extractWholeRunSlaResult ( averageThroughputResults , "Bytes/Sec" ) ; if ( ! averageThroughputResultsGraphSet . getJSONArray ( LABELS ) . isEmpty ( ) ) { averageThroughputResultsGraphSet . put ( TITLE , "Average Throughput per second" ) ; averageThroughputResultsGraphSet . put ( X_AXIS_TITLE , "Build number" ) ; averageThroughputResultsGraphSet . put ( Y_AXIS_TITLE , "Bytes" ) ; averageThroughputResultsGraphSet . put ( DESCRIPTION , " Displays the amount of throughput (in bytes) on the Web server during the load test. " + "Throughput represents the amount of data that the Vusers received from the server at" + " any given second. This graph helps you to evaluate the amount of load Vusers " + "generate, in terms of server throughput.\n" ) ; scenarioGraphData . put ( "averageThroughput" , averageThroughputResultsGraphSet ) ; } }
Construct average throughput .
9,208
static void constructTotalThroughputGraph ( Map . Entry < String , LrProjectScenarioResults > scenarioResults , JSONObject scenarioGraphData ) { Map < Integer , WholeRunResult > totalThroughputResults = scenarioResults . getValue ( ) . getTotalThroughtputResults ( ) ; JSONObject totalThroughputResultsGraphSet = extractWholeRunSlaResult ( totalThroughputResults , "Bytes" ) ; if ( ! totalThroughputResultsGraphSet . getJSONArray ( LABELS ) . isEmpty ( ) ) { totalThroughputResultsGraphSet . put ( TITLE , "Total Throughput" ) ; totalThroughputResultsGraphSet . put ( X_AXIS_TITLE , "Build number" ) ; totalThroughputResultsGraphSet . put ( Y_AXIS_TITLE , "Bytes" ) ; totalThroughputResultsGraphSet . put ( DESCRIPTION , " Displays the amount of throughput (in bytes) on the Web server during the load test. " + "Throughput represents the amount of data that the Vusers received from the server at" + " any given second. This graph helps you to evaluate the amount of load Vusers " + "generate, in terms of server throughput.\n" ) ; scenarioGraphData . put ( "totalThroughput" , totalThroughputResultsGraphSet ) ; } }
Construct total throughput graph .
9,209
static void constructAvgHitsGraph ( Map . Entry < String , LrProjectScenarioResults > scenarioResults , JSONObject scenarioGraphData ) { Map < Integer , WholeRunResult > avgHitsPerSec = scenarioResults . getValue ( ) . getAverageHitsPerSecondResults ( ) ; JSONObject avgHitsPerSecGraphSet = extractWholeRunSlaResult ( avgHitsPerSec , "Hits/Sec" ) ; if ( ! avgHitsPerSecGraphSet . getJSONArray ( LABELS ) . isEmpty ( ) ) { avgHitsPerSecGraphSet . put ( TITLE , "Average Hits per Second" ) ; avgHitsPerSecGraphSet . put ( X_AXIS_TITLE , "Build number" ) ; avgHitsPerSecGraphSet . put ( Y_AXIS_TITLE , "Hits" ) ; avgHitsPerSecGraphSet . put ( DESCRIPTION , "Displays the number of hits made on the Web server by Vusers " + "during each second of the load test. This graph helps you evaluate the amount of load " + "Vusers" + " " + "generate, in terms of the number of hits." ) ; scenarioGraphData . put ( "avgHitsPerSec" , avgHitsPerSecGraphSet ) ; } }
Construct avg hits graph .
9,210
static void constructTotalHitsGraph ( Map . Entry < String , LrProjectScenarioResults > scenarioResults , JSONObject scenarioGraphData ) { Map < Integer , WholeRunResult > totalHitsResults = scenarioResults . getValue ( ) . getTotalHitsResults ( ) ; JSONObject totalHitsGraphSet = extractWholeRunSlaResult ( totalHitsResults , "Hits" ) ; if ( ! totalHitsGraphSet . getJSONArray ( LABELS ) . isEmpty ( ) ) { totalHitsGraphSet . put ( TITLE , "Total Hits" ) ; totalHitsGraphSet . put ( X_AXIS_TITLE , "Build number" ) ; totalHitsGraphSet . put ( Y_AXIS_TITLE , "Hits" ) ; totalHitsGraphSet . put ( DESCRIPTION , "Displays the number of hits made on the Web server by Vusers " + "during each second of the load test. This graph helps you evaluate the amount of load " + "Vusers" + " " + "generate, in terms of the number of hits." ) ; scenarioGraphData . put ( "totalHits" , totalHitsGraphSet ) ; } }
Construct total hits graph .
9,211
private static String digestToString ( byte [ ] b ) { StringBuilder result = new StringBuilder ( 128 ) ; for ( byte aB : b ) { result . append ( Integer . toString ( ( aB & 0xff ) + 0x100 , 16 ) . substring ( 1 ) ) ; } return result . toString ( ) ; }
This method convert byte array to string regardless the charset
9,212
public JSONArray getScenarioList ( ) { JSONArray scenarioList = new JSONArray ( ) ; for ( String scenarioName : _projectResult . getScenarioResults ( ) . keySet ( ) ) { JSONObject scenario = new JSONObject ( ) ; scenario . put ( "ScenarioName" , scenarioName ) ; scenarioList . add ( scenario ) ; } return scenarioList ; }
Gets scenario list .
9,213
public JSONObject getGraphData ( ) { JSONObject projectDataSet = new JSONObject ( ) ; if ( _projectResult == null ) { return new JSONObject ( ) ; } for ( SortedMap . Entry < String , LrProjectScenarioResults > scenarioResults : _projectResult . getScenarioResults ( ) . entrySet ( ) ) { JSONObject scenarioData = new JSONObject ( ) ; JSONObject scenarioStats = new JSONObject ( ) ; scenarioData . put ( "scenarioStats" , scenarioStats ) ; JSONObject scenarioGraphData = new JSONObject ( ) ; LrGraphUtils . constructTotalHitsGraph ( scenarioResults , scenarioGraphData ) ; LrGraphUtils . constructAvgHitsGraph ( scenarioResults , scenarioGraphData ) ; LrGraphUtils . constructTotalThroughputGraph ( scenarioResults , scenarioGraphData ) ; LrGraphUtils . constructAverageThroughput ( scenarioResults , scenarioGraphData ) ; LrGraphUtils . constructErrorGraph ( scenarioResults , scenarioGraphData ) ; LrGraphUtils . constructAvgTransactionGraph ( scenarioResults , scenarioGraphData ) ; LrGraphUtils . constructPercentileTransactionGraph ( scenarioResults , scenarioGraphData ) ; scenarioData . put ( "scenarioData" , scenarioGraphData ) ; String scenarioName = scenarioResults . getKey ( ) ; projectDataSet . put ( scenarioName , scenarioData ) ; } return projectDataSet ; }
Collates graph data per scenario per build for the whole project . Adds the respected graphs with scenario as the key
9,214
boolean isVisible ( ) { List < ? extends Run < ? , ? > > builds = currentProject . getBuilds ( ) ; for ( Run run : builds ) { if ( run . getAction ( PerformanceJobReportAction . class ) != null ) { return true ; } } return false ; }
Is visible boolean .
9,215
public synchronized void getUpdatedData ( ) { if ( ! isUpdateDataNeeded ( ) ) { return ; } this . _projectResult = new ProjectLrResults ( ) ; _workedBuilds = new ArrayList < Integer > ( ) ; RunList < ? extends Run > projectBuilds = currentProject . getBuilds ( ) ; for ( Run run : projectBuilds ) { PerformanceJobReportAction performanceJobReportAction = run . getAction ( PerformanceJobReportAction . class ) ; if ( performanceJobReportAction == null ) { continue ; } if ( run . isBuilding ( ) ) { continue ; } int runNumber = run . getNumber ( ) ; if ( _workedBuilds . contains ( runNumber ) ) { continue ; } _workedBuilds . add ( runNumber ) ; LrJobResults jobLrResult = performanceJobReportAction . getLrResultBuildDataset ( ) ; for ( Map . Entry < String , JobLrScenarioResult > runResult : jobLrResult . getLrScenarioResults ( ) . entrySet ( ) ) { if ( ! _projectResult . getScenarioResults ( ) . containsKey ( runResult . getKey ( ) ) ) { _projectResult . addScenario ( new LrProjectScenarioResults ( runResult . getKey ( ) ) ) ; } LrProjectScenarioResults lrProjectScenarioResults = _projectResult . getScenarioResults ( ) . get ( runResult . getKey ( ) ) ; if ( lrProjectScenarioResults . getBuildCount ( ) > MAX_DISPLAY_BUILDS ) { continue ; } lrProjectScenarioResults . incBuildCount ( ) ; JobLrScenarioResult scenarioRunResult = runResult . getValue ( ) ; for ( GoalResult goalResult : scenarioRunResult . scenarioSlaResults ) { scenarioGoalResult ( runNumber , lrProjectScenarioResults , goalResult ) ; } joinSceanrioConnectionsStats ( runNumber , lrProjectScenarioResults , scenarioRunResult ) ; joinVUserScenarioStats ( runNumber , lrProjectScenarioResults , scenarioRunResult ) ; joinTransactionScenarioStats ( runNumber , lrProjectScenarioResults , scenarioRunResult ) ; joinDurationStats ( runNumber , lrProjectScenarioResults , scenarioRunResult ) ; } } }
Gets updated data .
9,216
public JSONObject getJobDetails ( String mcUrl , String proxyAddress , String proxyUserName , String proxyPassword ) { if ( StringUtils . isBlank ( fsUserName ) || StringUtils . isBlank ( fsPassword . getPlainText ( ) ) ) { return null ; } return JobConfigurationProxy . getInstance ( ) . getJobById ( mcUrl , fsUserName , fsPassword . getPlainText ( ) , proxyAddress , proxyUserName , proxyPassword , fsJobId ) ; }
Get proxy details json object .
9,217
public SWNativeAlignerResult align ( byte [ ] refArray , byte [ ] altArray , SWParameters parameters , SWOverhangStrategy overhangStrategy ) { int intStrategy = getStrategy ( overhangStrategy ) ; byte [ ] cigar = new byte [ 2 * Integer . max ( refArray . length , altArray . length ) ] ; int offset = alignNative ( refArray , altArray , cigar , parameters . getMatchValue ( ) , parameters . getMismatchPenalty ( ) , parameters . getGapOpenPenalty ( ) , parameters . getGapExtendPenalty ( ) , intStrategy ) ; return new SWNativeAlignerResult ( new String ( cigar ) . trim ( ) , offset ) ; }
Implements the native implementation of SmithWaterman and returns the Cigar String and alignment_offset
9,218
public Deflater makeDeflater ( final int compressionLevel , final boolean gzipCompatible ) { if ( intelDeflaterSupported ) { if ( ( compressionLevel == 1 && gzipCompatible ) || compressionLevel != 1 ) { return new IntelDeflater ( compressionLevel , gzipCompatible ) ; } } logger . warn ( "IntelDeflater is not supported, using Java.util.zip.Deflater" ) ; return new Deflater ( compressionLevel , gzipCompatible ) ; }
Returns an IntelDeflater if supported on the platform otherwise returns a Java Deflater
9,219
public void initialize ( PairHMMNativeArguments args ) { if ( args == null ) { args = new PairHMMNativeArguments ( ) ; args . useDoublePrecision = false ; args . maxNumberOfThreads = 1 ; } if ( ! useFpga && gklUtils . isAvx512Supported ( ) ) { logger . info ( "Using CPU-supported AVX-512 instructions" ) ; } if ( args . useDoublePrecision && useFpga ) { logger . warn ( "FPGA PairHMM does not support double precision floating-point. Using AVX PairHMM" ) ; } if ( ! gklUtils . getFlushToZero ( ) ) { logger . info ( "Flush-to-zero (FTZ) is enabled when running PairHMM" ) ; } initNative ( ReadDataHolder . class , HaplotypeDataHolder . class , args . useDoublePrecision , args . maxNumberOfThreads , useFpga ) ; int reqThreads = args . maxNumberOfThreads ; if ( useOmp ) { int availThreads = gklUtils . getAvailableOmpThreads ( ) ; int maxThreads = Math . min ( reqThreads , availThreads ) ; logger . info ( "Available threads: " + availThreads ) ; logger . info ( "Requested threads: " + reqThreads ) ; if ( reqThreads > availThreads ) { logger . warn ( "Using " + maxThreads + " available threads, but " + reqThreads + " were requested" ) ; } } else { if ( reqThreads != 1 ) { logger . warn ( "Ignoring request for " + reqThreads + " threads; not using OpenMP implementation" ) ; } } }
Initialize native PairHMM with the supplied args .
9,220
public void execute ( ) { if ( "pom" . equals ( project . getPackaging ( ) ) ) { return ; } File managedPath = new File ( project . getBuild ( ) . getDirectory ( ) , "src_managed" ) ; String managedPathStr = managedPath . getAbsolutePath ( ) ; if ( ! project . getCompileSourceRoots ( ) . contains ( managedPathStr ) ) { project . addCompileSourceRoot ( managedPathStr ) ; getLog ( ) . debug ( "Added source directory: " + managedPathStr ) ; } }
Adds default SBT managed sources location to Maven project .
9,221
public void execute ( ) { if ( "pom" . equals ( project . getPackaging ( ) ) ) { return ; } File baseDir = project . getBasedir ( ) ; File mainScalaPath = new File ( baseDir , "src/main/scala" ) ; if ( mainScalaPath . isDirectory ( ) ) { String mainScalaPathStr = mainScalaPath . getAbsolutePath ( ) ; if ( ! project . getCompileSourceRoots ( ) . contains ( mainScalaPathStr ) ) { project . addCompileSourceRoot ( mainScalaPathStr ) ; getLog ( ) . debug ( "Added source directory: " + mainScalaPathStr ) ; } } File testScalaPath = new File ( baseDir , "src/test/scala" ) ; if ( testScalaPath . isDirectory ( ) ) { String testScalaPathStr = testScalaPath . getAbsolutePath ( ) ; if ( ! project . getTestCompileSourceRoots ( ) . contains ( testScalaPathStr ) ) { project . addTestCompileSourceRoot ( testScalaPathStr ) ; getLog ( ) . debug ( "Added test source directory: " + testScalaPathStr ) ; } } }
Adds default Scala sources locations to Maven project .
9,222
public static File getCacheDirectory ( File classesDirectory ) { String classesDirectoryName = classesDirectory . getName ( ) ; String cacheDirectoryName = classesDirectoryName . replace ( TEST_CLASSES , CACHE ) . replace ( CLASSES , CACHE ) ; return new File ( classesDirectory . getParentFile ( ) , cacheDirectoryName ) ; }
Returns directory for incremental compilation cache files .
9,223
private CompilationProblem [ ] getScalacProblems ( Problem [ ] problems ) { CompilationProblem [ ] result = new CompilationProblem [ problems . length ] ; for ( int i = 0 ; i < problems . length ; i ++ ) { Problem problem = problems [ i ] ; Position position = problem . position ( ) ; Maybe < Integer > line = position . line ( ) ; String lineContent = position . lineContent ( ) ; Maybe < Integer > offset = position . offset ( ) ; Maybe < Integer > pointer = position . pointer ( ) ; Maybe < File > sourceFile = position . sourceFile ( ) ; SourcePosition sp = new DefaultSourcePosition ( line . isDefined ( ) ? line . get ( ) . intValue ( ) : - 1 , lineContent , offset . isDefined ( ) ? offset . get ( ) . intValue ( ) : - 1 , pointer . isDefined ( ) ? pointer . get ( ) . intValue ( ) : - 1 , sourceFile . isDefined ( ) ? sourceFile . get ( ) : null ) ; result [ i ] = new DefaultCompilationProblem ( problem . category ( ) , problem . message ( ) , sp , problem . severity ( ) . name ( ) ) ; } return result ; }
scalac problems conversion
9,224
private static void submitScalaProgram ( QdsClient client ) throws Exception { String sampleProgram = "println(\"hello world\")" ; SparkCommandBuilder sparkBuilder = client . command ( ) . spark ( ) ; sparkBuilder . name ( "spark-scala-test" ) ; sparkBuilder . program ( sampleProgram ) ; sparkBuilder . language ( "scala" ) ; CommandResponse commandResponse = sparkBuilder . invoke ( ) . get ( ) ; ResultLatch resultLatch = new ResultLatch ( client , commandResponse . getId ( ) ) ; ResultValue resultValue = resultLatch . awaitResult ( ) ; System . out . println ( resultValue . getResults ( ) ) ; String s = client . command ( ) . logs ( "" + commandResponse . getId ( ) ) . invoke ( ) . get ( ) ; System . err . println ( s ) ; }
An Example of submitting Spark Command as a Scala program . Similarly we can submit Spark Command as a SQL query R program and Java program .
9,225
private static void submitSQLQuery ( QdsClient client ) throws Exception { String sampleSqlQuery = "select * from default_qubole_airline_origin_destination limit 100" ; SparkCommandBuilder sparkBuilder = client . command ( ) . spark ( ) ; sparkBuilder . name ( "spark-sql-test" ) ; sparkBuilder . sql ( sampleSqlQuery ) ; CommandResponse commandResponse = sparkBuilder . invoke ( ) . get ( ) ; ResultLatch resultLatch = new ResultLatch ( client , commandResponse . getId ( ) ) ; ResultValue resultValue = resultLatch . awaitResult ( ) ; System . out . println ( resultValue . getResults ( ) ) ; String s = client . command ( ) . logs ( "" + commandResponse . getId ( ) ) . invoke ( ) . get ( ) ; System . err . println ( s ) ; }
An example of submitting Spark Command as a SQL query .
9,226
private void secondWalk ( TreeNode v , double m , int level , double levelStart ) { double levelChangeSign = getLevelChangeSign ( ) ; boolean levelChangeOnYAxis = isLevelChangeInYAxis ( ) ; double levelSize = getSizeOfLevel ( level ) ; double x = getPrelim ( v ) + m ; double y ; AlignmentInLevel alignment = configuration . getAlignmentInLevel ( ) ; if ( alignment == AlignmentInLevel . Center ) { y = levelStart + levelChangeSign * ( levelSize / 2 ) ; } else if ( alignment == AlignmentInLevel . TowardsRoot ) { y = levelStart + levelChangeSign * ( getNodeThickness ( v ) / 2 ) ; } else { y = levelStart + levelSize - levelChangeSign * ( getNodeThickness ( v ) / 2 ) ; } if ( ! levelChangeOnYAxis ) { double t = x ; x = y ; y = t ; } positions . put ( v , new NormalizedPosition ( x , y ) ) ; updateBounds ( v , x , y ) ; if ( ! tree . isLeaf ( v ) ) { double nextLevelStart = levelStart + ( levelSize + configuration . getGapBetweenLevels ( level + 1 ) ) * levelChangeSign ; for ( TreeNode w : tree . getChildren ( v ) ) { secondWalk ( w , m + getMod ( v ) , level + 1 , nextLevelStart ) ; } } }
In difference to the original algorithm we also pass in extra level information .
9,227
public void dumpTree ( PrintStream printStream , DumpConfiguration dumpConfiguration ) { dumpTree ( printStream , tree . getRoot ( ) , 0 , dumpConfiguration ) ; }
Prints a dump of the tree to the given printStream using the node s toString method .
9,228
public static void main ( String [ ] args ) throws IOException { String s = doc ( svg ( 160 , 200 , rect ( 0 , 0 , 160 , 200 , "fill:red;" ) + svg ( 10 , 10 , 100 , 100 , rect ( 0 , 0 , 100 , 100 , "fill:orange; stroke:rgb(0,0,0);" ) ) + line ( 20 , 20 , 100 , 100 , "stroke:black; stroke-width:2px;" ) + line ( 20 , 100 , 100 , 20 , "stroke:black; stroke-width:2px;" ) + text ( 10 , 140 , "font-family:verdana; font-size:20px; font-weight:bold;" , "Hello world" ) ) ) ; File file = new File ( "demo.svg" ) ; FileWriter w = null ; try { w = new FileWriter ( file ) ; w . write ( s ) ; } finally { if ( w != null ) { w . close ( ) ; } } System . out . println ( String . format ( "File written: %s" , file . getAbsolutePath ( ) ) ) ; if ( args . length > 0 && args [ 0 ] . equals ( "-view" ) ) { if ( ! viewSVG ( file ) ) { System . err . println ( "'-view' not supported on this platform" ) ; } } }
Creates a sample SVG file demo . svg
9,229
public Reader getResults ( ResultValue resultValue ) throws Exception { if ( resultValue . isInline ( ) ) { return new StringReader ( resultValue . getResults ( ) ) ; } return readFromS3 ( resultValue . getResult_location ( ) ) ; }
Return a stream over the given results . If the results are not inline the results will come from S3
9,230
private boolean checkCommandTypeSupported ( BaseCommand command ) { if ( ( command . getCommandType ( ) == BaseCommand . COMMAND_TYPE . NONE ) || ( command . getCommandType ( ) == BaseCommand . COMMAND_TYPE . COMPOSITE ) ) { return false ; } return true ; }
workflow or not
9,231
public static String quote ( String s , String nullResult ) { if ( s == null ) { return nullResult ; } StringBuffer result = new StringBuffer ( ) ; result . append ( '"' ) ; int length = s . length ( ) ; for ( int i = 0 ; i < length ; i ++ ) { char c = s . charAt ( i ) ; switch ( c ) { case '\b' : { result . append ( "\\b" ) ; break ; } case '\f' : { result . append ( "\\f" ) ; break ; } case '\n' : { result . append ( "\\n" ) ; break ; } case '\r' : { result . append ( "\\r" ) ; break ; } case '\t' : { result . append ( "\\t" ) ; break ; } case '\\' : { result . append ( "\\\\" ) ; break ; } case '"' : { result . append ( "\\\"" ) ; break ; } default : { if ( c < ' ' || c >= '\u0080' ) { String n = Integer . toHexString ( c ) ; result . append ( "\\u" ) ; result . append ( "0000" . substring ( n . length ( ) ) ) ; result . append ( n ) ; } else { result . append ( c ) ; } } } } result . append ( '"' ) ; return result . toString ( ) ; }
Returns a quoted version of a given string i . e . as a Java String Literal .
9,232
public static Document parse ( JsonReader reader ) throws IOException , NotImplemented { reader . beginArray ( ) ; int nodeType = reader . nextInt ( ) ; CoreDocumentImpl doc = new DocumentImpl ( ) ; if ( nodeType == Node . ELEMENT_NODE ) { addInitialElement ( reader , doc ) ; } else if ( nodeType == Node . DOCUMENT_NODE ) { addInitialDocNode ( doc , reader ) ; } else { throw new IllegalStateException ( "Don't know how to handle root node with type " + nodeType ) ; } reader . endArray ( ) ; return doc ; }
First element must be a document node or element node
9,233
public Document toXml ( InputStream json ) throws IOException , NotImplemented { JsonReader reader = null ; try { reader = new JsonReader ( new InputStreamReader ( json , "utf-8" ) ) ; return parse ( reader ) ; } catch ( UnsupportedEncodingException e ) { throw new IllegalStateException ( e ) ; } finally { if ( reader != null ) reader . close ( ) ; if ( json != null ) json . close ( ) ; } }
First element must be a document
9,234
public CombinedStatus getCombinedStatus ( String owner , String repository , String ref ) { Map < String , Object > uriVariables = new HashMap < > ( ) ; uriVariables . put ( "owner" , owner ) ; uriVariables . put ( "repository" , repository ) ; uriVariables . put ( "ref" , ref ) ; return getRestOperations ( ) . exchange ( buildUri ( "/repos/{owner}/{repository}/commits/{ref}/status" , uriVariables ) , HttpMethod . GET , null , CombinedStatus . class ) . getBody ( ) ; }
ref can be SHA branch or tag .
9,235
public void waitForLogMessage ( final String logSearchString , int waitTime ) throws TimeoutException { WaitForContainer . waitForCondition ( new LogChecker ( this , logSearchString ) , waitTime , describe ( ) ) ; }
Stop and wait till given string will show in container output .
9,236
public void waitForExit ( ) throws InterruptedException { try { dockerClient . waitContainer ( container . id ( ) ) ; } catch ( DockerException e ) { throw new IllegalStateException ( e ) ; } }
Block until container exit .
9,237
public String getLog ( ) { try ( LogStream stream = dockerClient . logs ( container . id ( ) , LogsParam . stdout ( ) , LogsParam . stderr ( ) ) ; ) { String fullLog = stream . readFully ( ) ; if ( log . isTraceEnabled ( ) ) { log . trace ( "{} full log: {}" , containerShortId , StringUtils . replace ( fullLog , "\n" , "|" ) ) ; } return fullLog ; } catch ( DockerException | InterruptedException e ) { throw new IllegalStateException ( e ) ; } }
Container log .
9,238
public DockerRuleBuilder keepContainer ( boolean keepContainer ) { if ( keepContainer ) { this . stopOptions . setOptions ( StopOption . KEEP ) ; } else { this . stopOptions . setOptions ( StopOption . REMOVE ) ; } return this ; }
Keep stopped container after test .
9,239
public DockerRuleBuilder env ( String envName , String envValue ) { env . add ( String . format ( "%s=%s" , envName , envValue ) ) ; return this ; }
Set environment variable in the container .
9,240
static void waitForCondition ( final StartConditionCheck condition , int timeoutSeconds , final String containerDescription ) throws TimeoutException { try { log . info ( "wait for {} started" , condition . describe ( ) ) ; new WaitForUnit ( TimeUnit . SECONDS , timeoutSeconds , TimeUnit . SECONDS , 1 , new WaitForUnit . WaitForCondition ( ) { public boolean isConditionMet ( ) { return condition . check ( ) ; } public String timeoutMessage ( ) { return String . format ( "timeout waiting for %s in container %s" , condition . describe ( ) , containerDescription ) ; } } ) . startWaiting ( ) ; log . info ( "wait for {} - condition met" , condition . describe ( ) ) ; } catch ( InterruptedException e ) { throw new IllegalStateException ( String . format ( "Interrupted while waiting for %s" , condition . describe ( ) ) , e ) ; } }
Wait till all given conditions are met .
9,241
public void update ( Client client ) { RestfulUtils . update ( ClientService . PATH , client , Client . class , super . httpClient ) ; }
This function updates the data of a client . To change only a specific attribute you can set this attribute in the update request . All other attributes that should not be edited are not inserted . You can only edit the description email and credit card . The subscription can not be changed by updating the client data . This has to be done in the subscription call .
9,242
public void delete ( Client client ) { RestfulUtils . delete ( ClientService . PATH , client , Client . class , super . httpClient ) ; }
This function deletes a client but its transactions are not deleted .
9,243
public void delete ( final Preauthorization preauthorization ) { RestfulUtils . delete ( PreauthorizationService . PATH , preauthorization , Preauthorization . class , super . httpClient ) ; }
This function deletes a preauthorization .
9,244
public void delete ( Offer offer , boolean removeWithSubscriptions ) { ParameterMap < String , String > params = new ParameterMap < String , String > ( ) ; params . add ( "remove_with_subscriptions" , String . valueOf ( removeWithSubscriptions ) ) ; RestfulUtils . delete ( OfferService . PATH , offer , params , Offer . class , super . httpClient ) ; }
Remove an offer .
9,245
public < O extends Schema > O setId ( String schemaId ) { getJson ( ) . put ( "id" , schemaId ) ; return ( O ) this ; }
Should be URI
9,246
public static Matcher < JsonElement > withCharsLessOrEqualTo ( final int value ) { return new TypeSafeDiagnosingMatcher < JsonElement > ( ) { protected boolean matchesSafely ( JsonElement item , Description mismatchDescription ) { if ( ! item . isString ( ) ) return true ; if ( item . asString ( ) . length ( ) > value ) { mismatchDescription . appendText ( "String length more than maximum value: " + value ) ; return false ; } return true ; } public void describeTo ( Description description ) { description . appendText ( "String maximum length" ) ; } } ; }
== > STRING == >
9,247
public static Matcher < JsonElement > isOfType ( final String type ) { return new TypeSafeDiagnosingMatcher < JsonElement > ( ) { protected boolean matchesSafely ( JsonElement item , Description mismatchDescription ) { if ( type . equals ( item . getJsonType ( ) ) ) return true ; else { mismatchDescription . appendText ( ", mismatch type '" + item . getJsonType ( ) + "'" ) ; return false ; } } public void describeTo ( Description description ) { description . appendText ( "\nMatch to type: " + type ) ; } } ; }
== > COMMON == >
9,248
public static Matcher < JsonElement > areItemsValid ( final Validator validator ) { return new TypeSafeDiagnosingMatcher < JsonElement > ( ) { protected boolean matchesSafely ( JsonElement item , Description mismatchDescription ) { if ( ! item . isJsonArray ( ) ) return true ; for ( int i = 0 ; i < item . asJsonArray ( ) . length ( ) ; i ++ ) { StringBuilder sb = new StringBuilder ( ) ; if ( ! validator . validate ( item . asJsonArray ( ) . opt ( i ) , sb ) ) { mismatchDescription . appendText ( "item at pos: " + i + ", does not validate by validator " + validator . getTitle ( ) ) . appendText ( "\nDetails: " ) . appendText ( sb . toString ( ) ) ; return false ; } } return true ; } public void describeTo ( Description description ) { description . appendText ( "are array items valid" ) ; } } ; }
== > ARRAY == >
9,249
public static Matcher < JsonElement > maxProperties ( final int maxProperties ) { return new TypeSafeDiagnosingMatcher < JsonElement > ( ) { protected boolean matchesSafely ( JsonElement item , Description mismatchDescription ) { if ( ! item . isJsonObject ( ) ) return true ; if ( item . asJsonObject ( ) . length ( ) > maxProperties ) { mismatchDescription . appendText ( "properties in Json object more than defined" ) ; return false ; } return true ; } public void describeTo ( Description description ) { description . appendText ( "object properties max count" ) ; } } ; }
== > OBJECT == >
9,250
public static JsonElement wrap ( Object o ) throws JsonException { if ( o == null ) { return null ; } if ( o instanceof JsonElement ) { return ( JsonElement ) o ; } if ( o instanceof ElementWrapper ) { return ( ( ElementWrapper ) o ) . getJson ( ) ; } if ( o instanceof Collection ) { return new JsonArray ( ( Collection ) o ) ; } else if ( o . getClass ( ) . isArray ( ) ) { return new JsonArray ( o ) ; } if ( o instanceof Map ) { return new JsonObject ( ( Map ) o ) ; } if ( o instanceof Boolean ) { return new JsonBoolean ( ( Boolean ) o ) ; } if ( o instanceof Number ) { return new JsonNumber ( ( Number ) o ) ; } if ( o instanceof String ) { return new JsonString ( ( String ) o ) ; } if ( o instanceof Character ) { return new JsonString ( Character . toString ( ( Character ) o ) ) ; } if ( o instanceof ByteBuffer ) { return new JsonString ( ( ( ByteBuffer ) o ) . asCharBuffer ( ) . toString ( ) ) ; } return new JsonString ( o . toString ( ) ) ; }
Wraps the given object if to JsonXXX object .
9,251
public Schema fetch ( URI targetUri , URI srcOrigUri , URI srcId ) { Schema res = null ; URI schemaUri = convertUri ( resolveUri ( targetUri , srcOrigUri , srcId ) ) ; if ( ! schemaUri . isAbsolute ( ) ) throw new RuntimeException ( "Json Schema Fetcher works only with absolute URIs" ) ; try { String fragment = schemaUri . getFragment ( ) ; JsonObject schemaJson = JsonElement . readFrom ( new InputStreamReader ( schemaUri . toURL ( ) . openStream ( ) ) ) . asJsonObject ( ) ; if ( fragment != null && ! fragment . trim ( ) . isEmpty ( ) ) { String [ ] pointers = fragment . split ( "/" ) ; for ( String pointer : pointers ) { if ( pointer != null && ! pointer . trim ( ) . isEmpty ( ) ) { schemaJson = schemaJson . getJsonObject ( pointer ) ; } } } String version = schemaJson . optString ( "$schema" , "" ) ; if ( version . equals ( Schema . VER_4 ) ) { res = new SchemaV4 ( ) . setSchemaFetcher ( this ) . setOrigSrc ( schemaUri ) . wrap ( schemaJson ) ; } else { res = new SchemaV4 ( ) . setSchemaFetcher ( this ) . setOrigSrc ( schemaUri ) . wrap ( schemaJson ) ; } } catch ( IOException e ) { e . printStackTrace ( ) ; } catch ( JsonException e ) { e . printStackTrace ( ) ; } return res ; }
accepts only absolute URI or converted absolute URI
9,252
private MetaInfo tryFetchMetaInfo ( URI jsonSchemaUri ) { if ( jsonSchemaUri == null ) return null ; try { metaInfo = doFetchMetaInfo ( jsonSchemaUri ) ; Validator validator = metaInfo . getDefaultValidator ( ) ; if ( validator != null ) { getValidators ( ) . add ( validator ) ; } } catch ( Exception ex ) { return null ; } return metaInfo ; }
Tries to fetch a schema and add the default Schema validator for it
9,253
public static String numberToString ( Number number ) throws JsonException { if ( number == null ) { throw new JsonException ( "Number must be non-null" ) ; } double doubleValue = number . doubleValue ( ) ; if ( number . equals ( NEGATIVE_ZERO ) ) { return "-0" ; } long longValue = number . longValue ( ) ; if ( doubleValue == ( double ) longValue ) { return Long . toString ( longValue ) ; } return number . toString ( ) ; }
Encodes the number as a Json string .
9,254
public JsonObject merge ( JsonObject another ) { for ( Map . Entry < String , JsonElement > anotherEntry : another ) { JsonElement curr = this . opt ( anotherEntry . getKey ( ) ) ; if ( curr == null ) { try { this . put ( anotherEntry . getKey ( ) , anotherEntry . getValue ( ) ) ; } catch ( JsonException e ) { e . printStackTrace ( ) ; } } else if ( curr . isJsonObject ( ) && anotherEntry . getValue ( ) . isJsonObject ( ) ) { curr . asJsonObject ( ) . merge ( anotherEntry . getValue ( ) . asJsonObject ( ) ) ; } } return this ; }
Merge Json Object with another Json Object . It does not change element of another with the same name exists . However if the element is Json Object then it will go down and merge that object .
9,255
public void sync ( ) throws MarkLogicSesameException { if ( WRITE_CACHE_ENABLED && timerWriteCache != null ) timerWriteCache . forceRun ( ) ; if ( DELETE_CACHE_ENABLED && timerDeleteCache != null ) timerDeleteCache . forceRun ( ) ; }
forces write cache to flush triples
9,256
public void sendAdd ( File file , String baseURI , RDFFormat dataFormat , Resource ... contexts ) throws RDFParseException { getClient ( ) . performAdd ( file , baseURI , dataFormat , this . tx , contexts ) ; }
add triples from file
9,257
public void sendAdd ( InputStream in , String baseURI , RDFFormat dataFormat , Resource ... contexts ) throws RDFParseException , MarkLogicSesameException { getClient ( ) . performAdd ( in , baseURI , dataFormat , this . tx , contexts ) ; }
add triples from InputStream
9,258
public void sendAdd ( String baseURI , Resource subject , URI predicate , Value object , Resource ... contexts ) throws MarkLogicSesameException { if ( WRITE_CACHE_ENABLED ) { timerWriteCache . add ( subject , predicate , object , contexts ) ; } else { getClient ( ) . performAdd ( baseURI , ( Resource ) skolemize ( subject ) , ( URI ) skolemize ( predicate ) , skolemize ( object ) , this . tx , contexts ) ; } }
add single triple if cache is enabled will add triple to cache model
9,259
public void sendRemove ( String baseURI , Resource subject , URI predicate , Value object , Resource ... contexts ) throws MarkLogicSesameException { if ( DELETE_CACHE_ENABLED ) { timerDeleteCache . add ( subject , predicate , object , contexts ) ; } else { if ( WRITE_CACHE_ENABLED ) sync ( ) ; getClient ( ) . performRemove ( baseURI , ( Resource ) skolemize ( subject ) , ( URI ) skolemize ( predicate ) , skolemize ( object ) , this . tx , contexts ) ; } }
remove single triple
9,260
public void commitTransaction ( ) throws MarkLogicTransactionException { if ( isActiveTransaction ( ) ) { try { sync ( ) ; this . tx . commit ( ) ; this . tx = null ; } catch ( MarkLogicSesameException e ) { logger . error ( e . getLocalizedMessage ( ) ) ; throw new MarkLogicTransactionException ( e ) ; } } else { throw new MarkLogicTransactionException ( "No active transaction to commit." ) ; } }
commits a transaction
9,261
public void setGraphPerms ( GraphPermissions graphPerms ) { if ( graphPerms != null ) { getClient ( ) . setGraphPerms ( graphPerms ) ; } else { getClient ( ) . setGraphPerms ( getClient ( ) . getDatabaseClient ( ) . newGraphManager ( ) . newGraphPermissions ( ) ) ; } }
setter for GraphPermissions
9,262
public synchronized MarkLogicClient getMarkLogicClient ( ) { if ( null != databaseClient ) { this . client = new MarkLogicClient ( databaseClient ) ; } else { this . client = new MarkLogicClient ( host , port , user , password , auth ) ; } return this . client ; }
returns MarkLogicClient object which manages communication to ML server via Java api client
9,263
private void setDatabaseClient ( DatabaseClient databaseClient ) { this . databaseClient = databaseClient ; this . sparqlManager = getDatabaseClient ( ) . newSPARQLQueryManager ( ) ; this . graphManager = getDatabaseClient ( ) . newGraphManager ( ) ; }
set databaseclient and instantate related managers
9,264
public void performAdd ( File file , String baseURI , RDFFormat dataFormat , Transaction tx , Resource ... contexts ) throws RDFParseException { try { graphManager . setDefaultMimetype ( dataFormat . getDefaultMIMEType ( ) ) ; if ( dataFormat . equals ( RDFFormat . NQUADS ) || dataFormat . equals ( RDFFormat . TRIG ) ) { graphManager . mergeGraphs ( new FileHandle ( file ) , tx ) ; } else { if ( notNull ( contexts ) && contexts . length > 0 ) { for ( int i = 0 ; i < contexts . length ; i ++ ) { if ( notNull ( contexts [ i ] ) ) { graphManager . mergeAs ( contexts [ i ] . toString ( ) , new FileHandle ( file ) , getGraphPerms ( ) , tx ) ; } else { graphManager . mergeAs ( DEFAULT_GRAPH_URI , new FileHandle ( file ) , getGraphPerms ( ) , tx ) ; } } } else { graphManager . mergeAs ( DEFAULT_GRAPH_URI , new FileHandle ( file ) , getGraphPerms ( ) , tx ) ; } } } catch ( FailedRequestException e ) { logger . error ( e . getLocalizedMessage ( ) ) ; throw new RDFParseException ( "Request to MarkLogic server failed, check file and format." ) ; } }
as we use mergeGraphs baseURI is always file . toURI
9,265
public void performAdd ( InputStream in , String baseURI , RDFFormat dataFormat , Transaction tx , Resource ... contexts ) throws RDFParseException , MarkLogicSesameException { try { graphManager . setDefaultMimetype ( dataFormat . getDefaultMIMEType ( ) ) ; if ( dataFormat . equals ( RDFFormat . NQUADS ) || dataFormat . equals ( RDFFormat . TRIG ) ) { graphManager . mergeGraphs ( new InputStreamHandle ( in ) , tx ) ; } else { if ( notNull ( contexts ) && contexts . length > 0 ) { for ( int i = 0 ; i < contexts . length ; i ++ ) { if ( notNull ( contexts [ i ] ) ) { graphManager . mergeAs ( contexts [ i ] . toString ( ) , new InputStreamHandle ( in ) , getGraphPerms ( ) , tx ) ; } else { graphManager . mergeAs ( DEFAULT_GRAPH_URI , new InputStreamHandle ( in ) , getGraphPerms ( ) , tx ) ; } } } else { graphManager . mergeAs ( DEFAULT_GRAPH_URI , new InputStreamHandle ( in ) , getGraphPerms ( ) , tx ) ; } } in . close ( ) ; } catch ( FailedRequestException e ) { logger . error ( e . getLocalizedMessage ( ) ) ; throw new RDFParseException ( "Request to MarkLogic server failed, check input is valid." ) ; } catch ( IOException e ) { logger . error ( e . getLocalizedMessage ( ) ) ; throw new MarkLogicSesameException ( "IO error" ) ; } }
executes merge of triples from InputStream
9,266
public void performAdd ( String baseURI , Resource subject , URI predicate , Value object , Transaction tx , Resource ... contexts ) throws MarkLogicSesameException { StringBuilder sb = new StringBuilder ( ) ; if ( notNull ( contexts ) && contexts . length > 0 ) { if ( notNull ( baseURI ) ) sb . append ( "BASE <" + baseURI + ">\n" ) ; sb . append ( "INSERT DATA { " ) ; for ( int i = 0 ; i < contexts . length ; i ++ ) { if ( notNull ( contexts [ i ] ) ) { sb . append ( "GRAPH <" + contexts [ i ] . stringValue ( ) + "> { ?s ?p ?o .} " ) ; } else { sb . append ( "GRAPH <" + DEFAULT_GRAPH_URI + "> { ?s ?p ?o .} " ) ; } } sb . append ( "}" ) ; } else { sb . append ( "INSERT DATA { GRAPH <" + DEFAULT_GRAPH_URI + "> {?s ?p ?o .}}" ) ; } SPARQLQueryDefinition qdef = sparqlManager . newQueryDefinition ( sb . toString ( ) ) ; if ( notNull ( ruleset ) ) { qdef . setRulesets ( ruleset ) ; } if ( notNull ( graphPerms ) ) { qdef . setUpdatePermissions ( graphPerms ) ; } if ( notNull ( baseURI ) && ! baseURI . isEmpty ( ) ) { qdef . setBaseUri ( baseURI ) ; } if ( notNull ( subject ) ) qdef . withBinding ( "s" , subject . stringValue ( ) ) ; if ( notNull ( predicate ) ) qdef . withBinding ( "p" , predicate . stringValue ( ) ) ; if ( notNull ( object ) ) bindObject ( qdef , "o" , object ) ; sparqlManager . executeUpdate ( qdef , tx ) ; }
executes INSERT of single triple
9,267
public void performClear ( Transaction tx , Resource ... contexts ) { if ( notNull ( contexts ) ) { for ( int i = 0 ; i < contexts . length ; i ++ ) { if ( notNull ( contexts [ i ] ) ) { graphManager . delete ( contexts [ i ] . stringValue ( ) , tx ) ; } else { graphManager . delete ( DEFAULT_GRAPH_URI , tx ) ; } } } else { graphManager . delete ( DEFAULT_GRAPH_URI , tx ) ; } }
clears triples from named graph
9,268
public void setRulesets ( SPARQLRuleset ... rulesets ) { if ( notNull ( rulesets ) ) { List < SPARQLRuleset > list = new ArrayList < > ( ) ; for ( Object r : rulesets ) { if ( r != null && rulesets . length > 0 ) { list . add ( ( SPARQLRuleset ) r ) ; } } this . ruleset = list . toArray ( new SPARQLRuleset [ list . size ( ) ] ) ; } else { this . ruleset = null ; } }
setter for rulesets filters out nulls
9,269
protected SPARQLBindings getSPARQLBindings ( SPARQLQueryBindingSet bindings ) { SPARQLBindings sps = new SPARQLBindingsImpl ( ) ; for ( Binding binding : bindings ) { sps . bind ( binding . getName ( ) , binding . getValue ( ) . stringValue ( ) ) ; } return sps ; }
converts Sesame BindingSet to java api client SPARQLBindings
9,270
public synchronized void run ( ) { Date now = new Date ( ) ; if ( ! cache . isEmpty ( ) && ( ( cache . size ( ) > cacheSize - 1 ) || ( now . getTime ( ) - lastCacheAccess . getTime ( ) > cacheMillis ) ) ) { try { flush ( ) ; } catch ( RepositoryException e ) { log . error ( e . getLocalizedMessage ( ) ) ; throw new RuntimeException ( e ) ; } catch ( MalformedQueryException e ) { log . error ( e . getLocalizedMessage ( ) ) ; throw new RuntimeException ( e ) ; } catch ( UpdateExecutionException e ) { log . error ( e . getLocalizedMessage ( ) ) ; throw new RuntimeException ( e ) ; } catch ( IOException e ) { log . error ( e . getLocalizedMessage ( ) ) ; throw new RuntimeException ( e ) ; } } }
tests to see if we should flush cache
9,271
public synchronized void forceRun ( ) throws MarkLogicSesameException { log . debug ( String . valueOf ( cache . size ( ) ) ) ; if ( ! cache . isEmpty ( ) ) { try { flush ( ) ; } catch ( RepositoryException e ) { throw new MarkLogicSesameException ( "Could not flush write cache, encountered repository issue." , e ) ; } catch ( MalformedQueryException e ) { throw new MarkLogicSesameException ( "Could not flush write cache, query was malformed." , e ) ; } catch ( UpdateExecutionException e ) { throw new MarkLogicSesameException ( "Could not flush write cache, query update failed." , e ) ; } catch ( IOException e ) { throw new MarkLogicSesameException ( "Could not flush write cache, encountered IO issue." , e ) ; } } }
min forces the cache to flush if there is anything in it
9,272
public synchronized void add ( Resource subject , URI predicate , Value object , Resource ... contexts ) throws MarkLogicSesameException { cache . add ( subject , predicate , object , contexts ) ; if ( cache . size ( ) > cacheSize - 1 ) { forceRun ( ) ; } }
add triple to cache Model
9,273
public Query prepareQuery ( String queryString , String baseURI ) throws RepositoryException , MalformedQueryException { return prepareQuery ( QueryLanguage . SPARQL , queryString , baseURI ) ; }
overload for prepareQuery
9,274
public MarkLogicQuery prepareQuery ( QueryLanguage queryLanguage , String queryString , String baseURI ) throws RepositoryException , MalformedQueryException { if ( SPARQL . equals ( queryLanguage ) ) { String queryStringWithoutProlog = QueryParserUtil . removeSPARQLQueryProlog ( queryString ) . toUpperCase ( ) ; if ( queryStringWithoutProlog . startsWith ( "SELECT" ) ) { return prepareTupleQuery ( queryLanguage , queryString , baseURI ) ; } else if ( queryStringWithoutProlog . startsWith ( "ASK" ) ) { return prepareBooleanQuery ( queryLanguage , queryString , baseURI ) ; } else { return prepareGraphQuery ( queryLanguage , queryString , baseURI ) ; } } throw new UnsupportedQueryLanguageException ( "Unsupported query language " + queryLanguage . getName ( ) ) ; }
base method for prepareQuery
9,275
public MarkLogicGraphQuery prepareGraphQuery ( String queryString , String baseURI ) throws RepositoryException , MalformedQueryException { return prepareGraphQuery ( QueryLanguage . SPARQL , queryString , baseURI ) ; }
overload for prepareGraphQuery
9,276
public MarkLogicBooleanQuery prepareBooleanQuery ( String queryString ) throws RepositoryException , MalformedQueryException { return prepareBooleanQuery ( QueryLanguage . SPARQL , queryString , null ) ; }
overload for prepareBooleanQuery
9,277
public MarkLogicUpdateQuery prepareUpdate ( String queryString , String baseURI ) throws RepositoryException , MalformedQueryException { return prepareUpdate ( QueryLanguage . SPARQL , queryString , baseURI ) ; }
overload for prepareUpdate
9,278
public MarkLogicUpdateQuery prepareUpdate ( QueryLanguage queryLanguage , String queryString , String baseURI ) throws RepositoryException , MalformedQueryException { if ( QueryLanguage . SPARQL . equals ( queryLanguage ) ) { return new MarkLogicUpdateQuery ( this . client , new SPARQLQueryBindingSet ( ) , baseURI , queryString , defaultGraphPerms , defaultQueryDef , defaultRulesets ) ; } throw new UnsupportedQueryLanguageException ( "Unsupported query language " + queryLanguage . getName ( ) ) ; }
base method for prepareUpdate
9,279
public RepositoryResult < Resource > getContextIDs ( ) throws RepositoryException { try { TupleQuery tupleQuery = prepareTupleQuery ( QueryLanguage . SPARQL , ALL_GRAPH_URIS ) ; TupleQueryResult result = tupleQuery . evaluate ( ) ; return new RepositoryResult < Resource > ( new ExceptionConvertingIteration < Resource , RepositoryException > ( new ConvertingIteration < BindingSet , Resource , QueryEvaluationException > ( result ) { protected Resource convert ( BindingSet bindings ) throws QueryEvaluationException { return ( Resource ) bindings . getValue ( "g" ) ; } } ) { protected RepositoryException convert ( Exception e ) { return new RepositoryException ( e ) ; } } ) ; } catch ( MalformedQueryException e ) { throw new RepositoryException ( e ) ; } catch ( QueryEvaluationException e ) { throw new RepositoryException ( e ) ; } }
returns list of graph names as Resource
9,280
public RepositoryResult < Statement > getStatements ( Resource subj , URI pred , Value obj , boolean includeInferred ) throws RepositoryException { try { if ( isQuadMode ( ) ) { TupleQuery tupleQuery = prepareTupleQuery ( GET_STATEMENTS ) ; setBindings ( tupleQuery , subj , pred , obj ) ; tupleQuery . setIncludeInferred ( includeInferred ) ; TupleQueryResult qRes = tupleQuery . evaluate ( ) ; return new RepositoryResult < Statement > ( new ExceptionConvertingIteration < Statement , RepositoryException > ( toStatementIteration ( qRes , subj , pred , obj ) ) { protected RepositoryException convert ( Exception e ) { return new RepositoryException ( e ) ; } } ) ; } else if ( subj != null && pred != null && obj != null ) { if ( hasStatement ( subj , pred , obj , includeInferred ) ) { Statement st = new StatementImpl ( subj , pred , obj ) ; CloseableIteration < Statement , RepositoryException > cursor ; cursor = new SingletonIteration < Statement , RepositoryException > ( st ) ; return new RepositoryResult < Statement > ( cursor ) ; } else { return new RepositoryResult < Statement > ( new EmptyIteration < Statement , RepositoryException > ( ) ) ; } } GraphQuery query = prepareGraphQuery ( EVERYTHING ) ; setBindings ( query , subj , pred , obj ) ; GraphQueryResult result = query . evaluate ( ) ; return new RepositoryResult < Statement > ( new ExceptionConvertingIteration < Statement , RepositoryException > ( result ) { protected RepositoryException convert ( Exception e ) { return new RepositoryException ( e ) ; } } ) ; } catch ( MalformedQueryException e ) { throw new RepositoryException ( e ) ; } catch ( QueryEvaluationException e ) { throw new RepositoryException ( e ) ; } }
returns all statements
9,281
public RepositoryResult < Statement > getStatements ( Resource subj , URI pred , Value obj , boolean includeInferred , Resource ... contexts ) throws RepositoryException { if ( contexts == null ) { contexts = new Resource [ ] { null } ; } try { if ( isQuadMode ( ) ) { StringBuilder sb = new StringBuilder ( ) ; sb . append ( "SELECT * WHERE { GRAPH ?ctx { ?s ?p ?o } filter (?ctx = (" ) ; boolean first = true ; for ( Resource context : contexts ) { if ( first ) { first = ! first ; } else { sb . append ( "," ) ; } if ( notNull ( context ) ) { sb . append ( "IRI(\"" + context . toString ( ) + "\")" ) ; } else { sb . append ( "IRI(\"" + DEFAULT_GRAPH_URI + "\")" ) ; } } sb . append ( ") ) }" ) ; TupleQuery tupleQuery = prepareTupleQuery ( sb . toString ( ) ) ; tupleQuery . setIncludeInferred ( includeInferred ) ; setBindings ( tupleQuery , subj , pred , obj , ( Resource ) null ) ; TupleQueryResult qRes = tupleQuery . evaluate ( ) ; return new RepositoryResult < Statement > ( new ExceptionConvertingIteration < Statement , RepositoryException > ( toStatementIteration ( qRes , subj , pred , obj ) ) { protected RepositoryException convert ( Exception e ) { return new RepositoryException ( e ) ; } } ) ; } else if ( subj != null && pred != null && obj != null ) { if ( hasStatement ( subj , pred , obj , includeInferred , contexts ) ) { Statement st = new StatementImpl ( subj , pred , obj ) ; CloseableIteration < Statement , RepositoryException > cursor ; cursor = new SingletonIteration < Statement , RepositoryException > ( st ) ; return new RepositoryResult < Statement > ( cursor ) ; } else { return new RepositoryResult < Statement > ( new EmptyIteration < Statement , RepositoryException > ( ) ) ; } } else { MarkLogicGraphQuery query = prepareGraphQuery ( EVERYTHING ) ; setBindings ( query , subj , pred , obj , contexts ) ; GraphQueryResult result = query . evaluate ( ) ; return new RepositoryResult < Statement > ( new ExceptionConvertingIteration < Statement , RepositoryException > ( result ) { protected RepositoryException convert ( Exception e ) { return new RepositoryException ( e ) ; } } ) ; } } catch ( MalformedQueryException e ) { throw new RepositoryException ( e ) ; } catch ( QueryEvaluationException e ) { throw new RepositoryException ( e ) ; } }
returns statements from supplied context
9,282
public long size ( ) throws RepositoryException { try { MarkLogicTupleQuery tupleQuery = prepareTupleQuery ( COUNT_EVERYTHING ) ; tupleQuery . setIncludeInferred ( false ) ; tupleQuery . setRulesets ( ( SPARQLRuleset ) null ) ; tupleQuery . setConstrainingQueryDefinition ( ( QueryDefinition ) null ) ; TupleQueryResult qRes = tupleQuery . evaluate ( ) ; BindingSet result = qRes . next ( ) ; qRes . close ( ) ; return ( ( Literal ) result . getBinding ( "ct" ) . getValue ( ) ) . longValue ( ) ; } catch ( QueryEvaluationException | MalformedQueryException e ) { throw new RepositoryException ( e ) ; } }
returns number of triples in the entire triple store
9,283
public long size ( Resource ... contexts ) throws RepositoryException { if ( contexts == null ) { contexts = new Resource [ ] { null } ; } try { StringBuilder sb = new StringBuilder ( ) ; sb . append ( "SELECT (count(?s) as ?ct) where { GRAPH ?g { ?s ?p ?o }" ) ; boolean first = true ; if ( contexts != null && contexts . length > 0 ) { sb . append ( "filter (?g = (" ) ; for ( Resource context : contexts ) { if ( first ) { first = ! first ; } else { sb . append ( "," ) ; } if ( context == null ) { sb . append ( "IRI(\"" + DEFAULT_GRAPH_URI + "\")" ) ; } else { sb . append ( "IRI(\"" + context . toString ( ) + "\")" ) ; } } sb . append ( ") )" ) ; } else { sb . append ( "filter (?g = (IRI(\"" + DEFAULT_GRAPH_URI + "\")))" ) ; } sb . append ( "}" ) ; logger . debug ( sb . toString ( ) ) ; MarkLogicTupleQuery tupleQuery = prepareTupleQuery ( sb . toString ( ) ) ; tupleQuery . setIncludeInferred ( false ) ; tupleQuery . setRulesets ( ( SPARQLRuleset ) null ) ; tupleQuery . setConstrainingQueryDefinition ( ( QueryDefinition ) null ) ; TupleQueryResult qRes = tupleQuery . evaluate ( ) ; BindingSet result = qRes . next ( ) ; qRes . close ( ) ; return ( ( Literal ) result . getBinding ( "ct" ) . getValue ( ) ) . longValue ( ) ; } catch ( QueryEvaluationException | MalformedQueryException e ) { throw new RepositoryException ( e ) ; } }
returns number of triples in supplied context
9,284
public void add ( InputStream in , String baseURI , RDFFormat dataFormat , Resource ... contexts ) throws IOException , RDFParseException , RepositoryException { getClient ( ) . sendAdd ( in , baseURI , dataFormat , contexts ) ; }
add triples via inputstream
9,285
public void add ( File file , String baseURI , RDFFormat dataFormat , Resource ... contexts ) throws IOException , RDFParseException , RepositoryException { if ( notNull ( baseURI ) ) { getClient ( ) . sendAdd ( file , baseURI , dataFormat , contexts ) ; } else { getClient ( ) . sendAdd ( file , file . toURI ( ) . toString ( ) , dataFormat , contexts ) ; } }
add triples via File
9,286
public void add ( Reader reader , String baseURI , RDFFormat dataFormat , Resource ... contexts ) throws IOException , RDFParseException , RepositoryException { getClient ( ) . sendAdd ( reader , baseURI , dataFormat , contexts ) ; }
add triples via Reader
9,287
public void add ( URL url , String baseURI , RDFFormat dataFormat , Resource ... contexts ) throws IOException , RDFParseException , RepositoryException { if ( notNull ( baseURI ) ) { getClient ( ) . sendAdd ( new URL ( url . toString ( ) ) . openStream ( ) , baseURI , dataFormat , contexts ) ; } else { getClient ( ) . sendAdd ( new URL ( url . toString ( ) ) . openStream ( ) , url . toString ( ) , dataFormat , contexts ) ; } }
add triples via URL
9,288
protected void addWithoutCommit ( Resource subject , URI predicate , Value object , Resource ... contexts ) throws RepositoryException { add ( subject , predicate , object , contexts ) ; }
add without commit
9,289
protected void removeWithoutCommit ( Resource subject , URI predicate , Value object , Resource ... contexts ) throws RepositoryException { remove ( subject , predicate , object , contexts ) ; }
remove without commit
9,290
public void setDefaultGraphPerms ( GraphPermissions graphPerms ) { if ( notNull ( graphPerms ) ) { this . defaultGraphPerms = graphPerms ; } else { this . defaultGraphPerms = client . emptyGraphPerms ( ) ; } }
sets default graph permissions to be used by all queries
9,291
public void configureWriteCache ( long initDelay , long delayCache , long cacheSize ) { client . initTimer ( initDelay , delayCache , cacheSize ) ; }
customise write cache interval and cache size .
9,292
private static Resource [ ] mergeResource ( Resource o , Resource ... arr ) { if ( o != null ) { Resource [ ] newArray = new Resource [ arr . length + 1 ] ; newArray [ 0 ] = o ; System . arraycopy ( arr , 0 , newArray , 1 , arr . length ) ; return newArray ; } else { return arr ; } }
private utility for merging Resource varargs
9,293
protected synchronized void flush ( ) throws RepositoryException , MalformedQueryException , UpdateExecutionException , IOException { if ( cache . isEmpty ( ) ) { return ; } StringBuffer entireQuery = new StringBuffer ( ) ; SPARQLQueryBindingSet bindingSet = new SPARQLQueryBindingSet ( ) ; for ( Namespace ns : cache . getNamespaces ( ) ) { entireQuery . append ( "PREFIX " + ns . getPrefix ( ) + ": <" + ns . getName ( ) + ">. " ) ; } entireQuery . append ( "DELETE DATA { " ) ; Set < Resource > distinctCtx = new HashSet < Resource > ( ) ; for ( Resource context : cache . contexts ( ) ) { distinctCtx . add ( context ) ; } for ( Resource ctx : distinctCtx ) { if ( ctx != null ) { entireQuery . append ( " GRAPH <" + ctx + "> { " ) ; } for ( Statement stmt : cache . filter ( null , null , null , ctx ) ) { entireQuery . append ( "<" + stmt . getSubject ( ) . stringValue ( ) + "> " ) ; entireQuery . append ( "<" + stmt . getPredicate ( ) . stringValue ( ) + "> " ) ; Value object = stmt . getObject ( ) ; if ( object instanceof Literal ) { Literal lit = ( Literal ) object ; entireQuery . append ( "\"" ) ; entireQuery . append ( SPARQLUtil . encodeString ( lit . getLabel ( ) ) ) ; entireQuery . append ( "\"" ) ; if ( null == lit . getLanguage ( ) ) { entireQuery . append ( "^^<" + lit . getDatatype ( ) . stringValue ( ) + ">" ) ; } else { entireQuery . append ( "@" + lit . getLanguage ( ) . toString ( ) ) ; } } else { entireQuery . append ( "<" + object . stringValue ( ) + "> " ) ; } entireQuery . append ( "." ) ; } if ( ctx != null ) { entireQuery . append ( " }" ) ; } } entireQuery . append ( "} " ) ; log . info ( entireQuery . toString ( ) ) ; client . sendUpdateQuery ( entireQuery . toString ( ) , bindingSet , false , null ) ; lastCacheAccess = new Date ( ) ; cache . clear ( ) ; }
flushes the cache writing triples as graph
9,294
public boolean evaluate ( ) throws QueryEvaluationException { try { sync ( ) ; return getMarkLogicClient ( ) . sendBooleanQuery ( getQueryString ( ) , getBindings ( ) , getIncludeInferred ( ) , getBaseURI ( ) ) ; } catch ( RepositoryException e ) { throw new QueryEvaluationException ( e . getMessage ( ) , e ) ; } catch ( MalformedQueryException e ) { throw new QueryEvaluationException ( e . getMessage ( ) , e ) ; } catch ( IOException e ) { throw new QueryEvaluationException ( e . getMessage ( ) , e ) ; } catch ( FailedRequestException e ) { throw new QueryEvaluationException ( e . getMessage ( ) , e ) ; } }
evaluate boolean query
9,295
protected void handleClose ( ) throws QueryEvaluationException { try { super . handleClose ( ) ; } catch ( Exception e ) { logger . error ( "MarkLogicBackgroundGraphResult handleClose() stream closed exception" , e ) ; throw new QueryEvaluationException ( e ) ; } }
wrap exception debug log
9,296
public GraphQueryResult evaluate ( ) throws QueryEvaluationException { try { sync ( ) ; return getMarkLogicClient ( ) . sendGraphQuery ( getQueryString ( ) , getBindings ( ) , getIncludeInferred ( ) , getBaseURI ( ) ) ; } catch ( IOException e ) { throw new QueryEvaluationException ( e ) ; } catch ( MarkLogicSesameException e ) { throw new QueryEvaluationException ( e ) ; } }
evaluate graph query
9,297
public void setBinding ( String name , String stringValue ) { bindingSet . addBinding ( name , ValueFactoryImpl . getInstance ( ) . createURI ( stringValue ) ) ; }
set individual binding
9,298
public void setBinding ( String name , Value value ) { bindingSet . addBinding ( name , value ) ; }
set individual binding and value
9,299
public TupleQueryResult evaluate ( long start , long pageLength ) throws QueryEvaluationException { try { sync ( ) ; return getMarkLogicClient ( ) . sendTupleQuery ( getQueryString ( ) , getBindings ( ) , start , pageLength , getIncludeInferred ( ) , getBaseURI ( ) ) ; } catch ( RepositoryException e ) { throw new QueryEvaluationException ( e . getMessage ( ) , e ) ; } catch ( MalformedQueryException e ) { throw new QueryEvaluationException ( e . getMessage ( ) , e ) ; } catch ( FailedRequestException e ) { throw new QueryEvaluationException ( e . getMessage ( ) , e ) ; } }
evaluate tuple query with pagination