idx int64 0 41.2k | question stringlengths 74 4.04k | target stringlengths 7 750 |
|---|---|---|
28,900 | public final long set0 ( int idx , long l ) { setWrite ( ) ; if ( _chk2 . set_impl ( idx , l ) ) return l ; ( _chk2 = inflate_impl ( new NewChunk ( this ) ) ) . set_impl ( idx , l ) ; return l ; } | Set a long element in a chunk given a 0 - based chunk local index . |
28,901 | public final double set0 ( int idx , double d ) { setWrite ( ) ; if ( _chk2 . set_impl ( idx , d ) ) return d ; ( _chk2 = inflate_impl ( new NewChunk ( this ) ) ) . set_impl ( idx , d ) ; return d ; } | Set a double element in a chunk given a 0 - based chunk local index . |
28,902 | public final float set0 ( int idx , float f ) { setWrite ( ) ; if ( _chk2 . set_impl ( idx , f ) ) return f ; ( _chk2 = inflate_impl ( new NewChunk ( this ) ) ) . set_impl ( idx , f ) ; return f ; } | Set a floating element in a chunk given a 0 - based chunk local index . |
28,903 | public final boolean setNA0 ( int idx ) { setWrite ( ) ; if ( _chk2 . setNA_impl ( idx ) ) return true ; ( _chk2 = inflate_impl ( new NewChunk ( this ) ) ) . setNA_impl ( idx ) ; return true ; } | Set the element in a chunk as missing given a 0 - based chunk local index . |
28,904 | protected Frame scoreImpl ( Frame adaptFrm ) { if ( isSupervised ( ) ) { int ridx = adaptFrm . find ( responseName ( ) ) ; assert ridx == - 1 : "Adapted frame should not contain response in scoring method!" ; assert nfeatures ( ) == adaptFrm . numCols ( ) : "Number of model features " + nfeatures ( ) + " != number of test set columns: " + adaptFrm . numCols ( ) ; assert adaptFrm . vecs ( ) . length == nfeatures ( ) : "Scoring data set contains wrong number of columns: " + adaptFrm . vecs ( ) . length + " instead of " + nfeatures ( ) ; } int nc = nclasses ( ) ; Vec [ ] newVecs = new Vec [ ] { adaptFrm . anyVec ( ) . makeZero ( classNames ( ) ) } ; if ( nc > 1 ) newVecs = Utils . join ( newVecs , adaptFrm . anyVec ( ) . makeZeros ( nc ) ) ; String [ ] names = new String [ newVecs . length ] ; names [ 0 ] = "predict" ; for ( int i = 1 ; i < names . length ; ++ i ) names [ i ] = classNames ( ) [ i - 1 ] ; final int num_features = nfeatures ( ) ; new MRTask2 ( ) { public void map ( Chunk chks [ ] ) { double tmp [ ] = new double [ num_features ] ; float preds [ ] = new float [ nclasses ( ) == 1 ? 1 : nclasses ( ) + 1 ] ; int len = chks [ 0 ] . _len ; for ( int row = 0 ; row < len ; row ++ ) { float p [ ] = score0 ( chks , row , tmp , preds ) ; for ( int c = 0 ; c < preds . length ; c ++ ) chks [ num_features + c ] . set0 ( row , p [ c ] ) ; } } } . doAll ( Utils . join ( adaptFrm . vecs ( ) , newVecs ) ) ; return new Frame ( names , newVecs ) ; } | Score already adapted frame . |
28,905 | public final float [ ] score ( Frame fr , boolean exact , int row ) { double tmp [ ] = new double [ fr . numCols ( ) ] ; for ( int i = 0 ; i < tmp . length ; i ++ ) tmp [ i ] = fr . vecs ( ) [ i ] . at ( row ) ; return score ( fr . names ( ) , fr . domains ( ) , exact , tmp ) ; } | Single row scoring on a compatible Frame . |
28,906 | public final float [ ] score ( String names [ ] , String domains [ ] [ ] , boolean exact , double row [ ] ) { return score ( adapt ( names , domains , exact ) , row , new float [ nclasses ( ) ] ) ; } | Single row scoring on a compatible set of data . Fairly expensive to adapt . |
28,907 | protected SB toJavaSuper ( SB sb ) { sb . nl ( ) ; sb . ii ( 1 ) ; sb . i ( ) . p ( "public String[] getNames() { return NAMES; } " ) . nl ( ) ; sb . i ( ) . p ( "public String[][] getDomainValues() { return DOMAINS; }" ) . nl ( ) ; String uuid = this . uniqueId != null ? this . uniqueId . getId ( ) : this . _key . toString ( ) ; sb . i ( ) . p ( "public String getUUID() { return " ) . ps ( uuid ) . p ( "; }" ) . nl ( ) ; return sb ; } | Generate implementation for super class . |
28,908 | private SB toJavaPredict ( SB ccsb , SB fileCtxSb ) { ccsb . nl ( ) ; ccsb . p ( " // Pass in data in a double[], pre-aligned to the Model's requirements." ) . nl ( ) ; ccsb . p ( " // Jam predictions into the preds[] array; preds[0] is reserved for the" ) . nl ( ) ; ccsb . p ( " // main prediction (class for classifiers or value for regression)," ) . nl ( ) ; ccsb . p ( " // and remaining columns hold a probability distribution for classifiers." ) . nl ( ) ; ccsb . p ( " public final float[] predict( double[] data, float[] preds) { preds = predict( data, preds, " + toJavaDefaultMaxIters ( ) + "); return preds; }" ) . nl ( ) ; ccsb . p ( " public final float[] predict( double[] data, float[] preds, int maxIters ) {" ) . nl ( ) ; SB classCtxSb = new SB ( ) ; toJavaPredictBody ( ccsb . ii ( 1 ) , classCtxSb , fileCtxSb ) ; ccsb . di ( 1 ) ; ccsb . p ( " return preds;" ) . nl ( ) ; ccsb . p ( " }" ) . nl ( ) ; ccsb . p ( classCtxSb ) ; return ccsb ; } | Wrapper around the main predict call including the signature and return value |
28,909 | protected final void emptyLTrash ( ) { if ( _lVecTrash . isEmpty ( ) ) return ; Futures fs = new Futures ( ) ; cleanupTrash ( _lVecTrash , fs ) ; fs . blockForPending ( ) ; } | User call which empty local trash of vectors . |
28,910 | protected void registered ( RequestServer . API_VERSION ver ) { super . registered ( ver ) ; for ( Argument arg : _arguments ) { if ( arg . _name . equals ( "activation" ) || arg . _name . equals ( "initial_weight_distribution" ) || arg . _name . equals ( "expert_mode" ) || arg . _name . equals ( "adaptive_rate" ) || arg . _name . equals ( "replicate_training_data" ) || arg . _name . equals ( "balance_classes" ) || arg . _name . equals ( "n_folds" ) || arg . _name . equals ( "autoencoder" ) || arg . _name . equals ( "checkpoint" ) ) { arg . setRefreshOnChange ( ) ; } } } | Helper to specify which arguments trigger a refresh on change |
28,911 | private DataInfo prepareDataInfo ( ) { final boolean del_enum_resp = classification && ! response . isEnum ( ) ; final Frame train = FrameTask . DataInfo . prepareFrame ( source , autoencoder ? null : response , ignored_cols , classification , ignore_const_cols , true ) ; final DataInfo dinfo = new FrameTask . DataInfo ( train , autoencoder ? 0 : 1 , true , autoencoder || use_all_factor_levels , autoencoder ? DataInfo . TransformType . NORMALIZE : DataInfo . TransformType . STANDARDIZE , classification ? DataInfo . TransformType . NONE : DataInfo . TransformType . STANDARDIZE ) ; if ( ! autoencoder ) { final Vec resp = dinfo . _adaptedFrame . lastVec ( ) ; assert ( ! classification ^ resp . isEnum ( ) ) : "Must have enum response for classification!" ; if ( del_enum_resp ) ltrash ( resp ) ; } return dinfo ; } | Helper to create a DataInfo object from the source and response |
28,912 | Frame updateFrame ( Frame target , Frame src ) { if ( src != target ) ltrash ( src ) ; return src ; } | Helper to update a Frame and adding it to the local trash at the same time |
28,913 | private void lock_data ( ) { source . read_lock ( self ( ) ) ; if ( validation != null && source . _key != null && validation . _key != null && ! source . _key . equals ( validation . _key ) ) validation . read_lock ( self ( ) ) ; } | Lock the input datasets against deletes |
28,914 | private void unlock_data ( ) { source . unlock ( self ( ) ) ; if ( validation != null && source . _key != null && validation . _key != null && ! source . _key . equals ( validation . _key ) ) validation . unlock ( self ( ) ) ; } | Release the lock for the input datasets |
28,915 | private Frame reBalance ( final Frame fr , boolean local ) { int chunks = ( int ) Math . min ( 4 * H2O . NUMCPUS * ( local ? 1 : H2O . CLOUD . size ( ) ) , fr . numRows ( ) ) ; if ( fr . anyVec ( ) . nChunks ( ) > chunks && ! reproducible ) { Log . info ( "Dataset already contains " + fr . anyVec ( ) . nChunks ( ) + " chunks. No need to rebalance." ) ; return fr ; } else if ( reproducible ) { Log . warn ( "Reproducibility enforced - using only 1 thread - can be slow." ) ; chunks = 1 ; } if ( ! quiet_mode ) Log . info ( "ReBalancing dataset into (at least) " + chunks + " chunks." ) ; String snewKey = fr . _key != null ? ( fr . _key . toString ( ) + ".balanced" ) : Key . rand ( ) ; Key newKey = Key . makeSystem ( snewKey ) ; RebalanceDataSet rb = new RebalanceDataSet ( fr , newKey , chunks ) ; H2O . submitTask ( rb ) ; rb . join ( ) ; return UKV . get ( newKey ) ; } | Rebalance a frame for load balancing |
28,916 | private static float computeRowUsageFraction ( final long numRows , final long train_samples_per_iteration , final boolean replicate_training_data ) { float rowUsageFraction = ( float ) train_samples_per_iteration / numRows ; if ( replicate_training_data ) rowUsageFraction /= H2O . CLOUD . size ( ) ; assert ( rowUsageFraction > 0 ) ; return rowUsageFraction ; } | Compute the fraction of rows that need to be used for training during one iteration |
28,917 | public static void crossValidate ( Job . ValidatedJob job ) { if ( job . state != Job . JobState . RUNNING ) return ; if ( job . validation != null ) throw new IllegalArgumentException ( "Cannot provide validation dataset and n_folds > 0 at the same time." ) ; if ( job . n_folds <= 1 ) throw new IllegalArgumentException ( "n_folds must be >= 2 for cross-validation." ) ; final String basename = job . destination_key . toString ( ) ; long [ ] offsets = new long [ job . n_folds + 1 ] ; Frame [ ] cv_preds = new Frame [ job . n_folds ] ; try { for ( int i = 0 ; i < job . n_folds ; ++ i ) { if ( job . state != Job . JobState . RUNNING ) break ; Key [ ] destkeys = new Key [ ] { Key . make ( basename + "_xval" + i + "_train" ) , Key . make ( basename + "_xval" + i + "_holdout" ) } ; NFoldFrameExtractor nffe = new NFoldFrameExtractor ( job . source , job . n_folds , i , destkeys , Key . make ( ) ) ; H2O . submitTask ( nffe ) ; Frame [ ] splits = nffe . getResult ( ) ; try { job . crossValidate ( splits , cv_preds , offsets , i ) ; job . _cv_count ++ ; } finally { if ( ! job . keep_cross_validation_splits ) for ( Frame f : splits ) f . delete ( ) ; } } if ( job . state != Job . JobState . RUNNING ) return ; final int resp_idx = job . source . find ( job . _responseName ) ; Vec response = job . source . vecs ( ) [ resp_idx ] ; boolean put_back = UKV . get ( job . response . _key ) == null ; if ( put_back ) { job . response = response ; if ( job . classification ) job . response = job . response . toEnum ( ) ; DKV . put ( job . response . _key , job . response ) ; } ( ( Model ) UKV . get ( job . destination_key ) ) . scoreCrossValidation ( job , job . source , response , cv_preds , offsets ) ; if ( put_back ) UKV . remove ( job . response . _key ) ; } finally { for ( Frame f : cv_preds ) if ( f != null ) f . delete ( ) ; } } | Cross - Validate a ValidatedJob |
28,918 | protected static List < water . ModelMetrics > fetchAll ( ) { return new ArrayList < water . ModelMetrics > ( H2O . KeySnapshot . globalSnapshot ( ) . fetchAll ( water . ModelMetrics . class ) . values ( ) ) ; } | Fetch all ModelMetrics from the KV store . |
28,919 | private Response serveOneOrAll ( List < water . ModelMetrics > list ) { JsonArray metricsArray = new JsonArray ( ) ; for ( water . ModelMetrics metrics : list ) { JsonObject metricsJson = metrics . toJSON ( ) ; metricsArray . add ( metricsJson ) ; } JsonObject result = new JsonObject ( ) ; result . add ( "metrics" , metricsArray ) ; return Response . done ( result ) ; } | For one or more water . ModelMetrics from the KV store return Response containing a map of them . |
28,920 | public static void scoreTree ( double data [ ] , float preds [ ] , CompressedTree [ ] ts ) { for ( int c = 0 ; c < ts . length ; c ++ ) if ( ts [ c ] != null ) preds [ ts . length == 1 ? 0 : c + 1 ] += ts [ c ] . score ( data ) ; } | Score given tree on the row of data . |
28,921 | public static Request registerRequest ( Request req ) { assert req . supportedVersions ( ) . length > 0 ; for ( API_VERSION ver : req . supportedVersions ( ) ) { String href = req . href ( ver ) ; assert ( ! _requests . containsKey ( href ) ) : "Request with href " + href + " already registered" ; _requests . put ( href , req ) ; req . registered ( ver ) ; } return req ; } | Registers the request with the request server . |
28,922 | public static void start ( ) { new Thread ( new Runnable ( ) { public void run ( ) { while ( true ) { try { SERVER = new RequestServer ( H2O . _apiSocket ) ; break ; } catch ( Exception ioe ) { Log . err ( Sys . HTTPD , "Launching NanoHTTP server got " , ioe ) ; try { Thread . sleep ( 1000 ) ; } catch ( InterruptedException e ) { } } } } } , "Request Server launcher" ) . start ( ) ; } | Keep spinning until we get to launch the NanoHTTPD |
28,923 | public final T invoke ( Key key ) { RPC < Atomic < T > > rpc = fork ( key ) ; return ( T ) ( rpc == null ? this : rpc . get ( ) ) ; } | Block until it completes even if run remotely |
28,924 | public static String [ ] concat ( String [ ] ... aa ) { int l = 0 ; for ( String [ ] a : aa ) l += a . length ; String [ ] r = new String [ l ] ; l = 0 ; for ( String [ ] a : aa ) { System . arraycopy ( a , 0 , r , l , a . length ) ; l += a . length ; } return r ; } | Concatenate given list of arrays into one long array . |
28,925 | public static Frame parse ( File file ) { Key fkey = NFSFileVec . make ( file ) ; Key dest = Key . make ( file . getName ( ) ) ; Frame frame = ParseDataset2 . parse ( dest , new Key [ ] { fkey } ) ; return frame ; } | Parse a dataset into a Frame . |
28,926 | public static Frame create ( String [ ] headers , double [ ] [ ] rows ) { Futures fs = new Futures ( ) ; Vec [ ] vecs = new Vec [ rows [ 0 ] . length ] ; Key keys [ ] = new Vec . VectorGroup ( ) . addVecs ( vecs . length ) ; for ( int c = 0 ; c < vecs . length ; c ++ ) { AppendableVec vec = new AppendableVec ( keys [ c ] ) ; NewChunk chunk = new NewChunk ( vec , 0 ) ; for ( int r = 0 ; r < rows . length ; r ++ ) chunk . addNum ( rows [ r ] [ c ] ) ; chunk . close ( 0 , fs ) ; vecs [ c ] = vec . close ( fs ) ; } fs . blockForPending ( ) ; return new Frame ( headers , vecs ) ; } | Creates a frame programmatically . |
28,927 | public Value chunkIdx ( int cidx ) { final long nchk = nChunks ( ) ; assert 0 <= cidx && cidx < nchk ; Key dkey = chunkKey ( cidx ) ; Value val1 = DKV . get ( dkey ) ; if ( val1 != null ) return val1 ; int len = ( int ) ( cidx < nchk - 1 ? CHUNK_SZ : ( _len - chunk2StartElem ( cidx ) ) ) ; Value val2 = new Value ( dkey , len , null , TypeMap . C1NCHUNK , _be ) ; val2 . setdsk ( ) ; Futures fs = dkey . home ( ) ? null : new Futures ( ) ; Value val3 = DKV . DputIfMatch ( dkey , val2 , null , fs ) ; if ( ! dkey . home ( ) && fs != null ) fs . blockForPending ( ) ; return val3 == null ? val2 : val3 ; } | Touching the DVec will force the file load . |
28,928 | private static void summarizeAndEnhanceFrame ( FrameSummary summary , Frame frame , boolean find_compatible_models , Map < String , Model > all_models , Map < String , Set < String > > all_models_cols ) { UniqueId unique_id = frame . getUniqueId ( ) ; summary . id = unique_id . getId ( ) ; summary . key = unique_id . getKey ( ) ; summary . creation_epoch_time_millis = unique_id . getCreationEpochTimeMillis ( ) ; summary . column_names = frame . _names ; summary . is_raw_frame = frame . isRawData ( ) ; if ( find_compatible_models ) { Map < String , Model > compatible_models = findCompatibleModels ( frame , all_models , all_models_cols ) ; summary . compatible_models = compatible_models . keySet ( ) ; } } | Summarize fields in water . fvec . Frame . |
28,929 | private Response serveOneOrAll ( Map < String , Frame > framesMap ) { Pair < Map < String , Model > , Map < String , Set < String > > > models_info = fetchModels ( ) ; Map < String , Model > all_models = models_info . getFirst ( ) ; Map < String , Set < String > > all_models_cols = models_info . getSecond ( ) ; Map < String , FrameSummary > frameSummaries = Frames . generateFrameSummaries ( null , framesMap , find_compatible_models , all_models , all_models_cols ) ; Map resultsMap = new LinkedHashMap ( ) ; resultsMap . put ( "frames" , frameSummaries ) ; if ( this . find_compatible_models ) { Set < String > all_referenced_models = new TreeSet < String > ( ) ; for ( Map . Entry < String , FrameSummary > entry : frameSummaries . entrySet ( ) ) { FrameSummary summary = entry . getValue ( ) ; all_referenced_models . addAll ( summary . compatible_models ) ; } Map < String , ModelSummary > modelSummaries = Models . generateModelSummaries ( all_referenced_models , all_models , false , null , null ) ; resultsMap . put ( "models" , modelSummaries ) ; } String json = gson . toJson ( resultsMap ) ; JsonObject result = gson . fromJson ( json , JsonElement . class ) . getAsJsonObject ( ) ; return Response . done ( result ) ; } | For one or more Frame from the KV store sumamrize and enhance them and Response containing a map of them . |
28,930 | public static Vec compose ( TransfVec origVec , int [ ] [ ] transfMap , String [ ] domain , boolean keepOrig ) { int [ ] [ ] domMap = Utils . compose ( new int [ ] [ ] { origVec . _values , origVec . _indexes } , transfMap ) ; Vec result = origVec . masterVec ( ) . makeTransf ( domMap [ 0 ] , domMap [ 1 ] , domain ) ; ; if ( ! keepOrig ) DKV . remove ( origVec . _key ) ; return result ; } | Compose given origVector with given transformation . Always returns a new vector . Original vector is kept if keepOrig is true . |
28,931 | public static Response redirect ( Request req , Key src_key ) { return Response . redirect ( req , "/2/Inspector" , "src_key" , src_key . toString ( ) ) ; } | Called from some other page to redirect that other page to this page . |
28,932 | public void addr ( NewChunk nc ) { long [ ] tmpl = _ls ; _ls = nc . _ls ; nc . _ls = tmpl ; int [ ] tmpi = _xs ; _xs = nc . _xs ; nc . _xs = tmpi ; tmpi = _id ; _id = nc . _id ; nc . _id = tmpi ; double [ ] tmpd = _ds ; _ds = nc . _ds ; nc . _ds = tmpd ; int tmp = _sparseLen ; _sparseLen = nc . _sparseLen ; nc . _sparseLen = tmp ; tmp = _len ; _len = nc . _len ; nc . _len = tmp ; add ( nc ) ; } | PREpend all of nc onto the current NewChunk . Kill nc . |
28,933 | void append2 ( long l , int x ) { if ( _id == null || l != 0 ) { if ( _ls == null || _sparseLen == _ls . length ) { append2slow ( ) ; append2 ( l , x ) ; return ; } _ls [ _sparseLen ] = l ; _xs [ _sparseLen ] = x ; if ( _id != null ) _id [ _sparseLen ] = _len ; _sparseLen ++ ; } _len ++ ; assert _sparseLen <= _len ; } | Fast - path append long data |
28,934 | static public void put ( Key key , Value val , Futures fs ) { assert ! val . isLockable ( ) ; Value res = DKV . put ( key , val , fs ) ; assert res == null || ! res . isLockable ( ) ; } | have to use the Lockable interface for all updates . |
28,935 | static public void put ( Key key , Freezable fr ) { if ( fr == null ) UKV . remove ( key ) ; else UKV . put ( key , new Value ( key , fr ) ) ; } | Also allow auto - serialization |
28,936 | void remove_task_tracking ( int task ) { RPC . RPCCall rpc = _work . get ( task ) ; if ( rpc == null ) return ; DTask dt = rpc . _dt ; if ( dt != null && RPC . RPCCall . CAS_DT . compareAndSet ( rpc , dt , null ) ) { assert rpc . _computed : "Still not done #" + task + " " + dt . getClass ( ) + " from " + rpc . _client ; AckAckTimeOutThread . PENDING . remove ( rpc ) ; dt . onAckAck ( ) ; } while ( true ) { int t = _removed_task_ids . get ( ) ; RPC . RPCCall rpc2 = _work . get ( t + 1 ) ; if ( rpc2 == null || rpc2 . _dt != null || ! _removed_task_ids . compareAndSet ( t , t + 1 ) ) break ; _work . remove ( t + 1 ) ; } } | Stop tracking a remote task because we got an ACKACK . |
28,937 | public static Frame frame ( String [ ] names , double [ ] ... rows ) { assert names == null || names . length == rows [ 0 ] . length ; Futures fs = new Futures ( ) ; Vec [ ] vecs = new Vec [ rows [ 0 ] . length ] ; Key keys [ ] = Vec . VectorGroup . VG_LEN1 . addVecs ( vecs . length ) ; for ( int c = 0 ; c < vecs . length ; c ++ ) { AppendableVec vec = new AppendableVec ( keys [ c ] ) ; NewChunk chunk = new NewChunk ( vec , 0 ) ; for ( int r = 0 ; r < rows . length ; r ++ ) chunk . addNum ( rows [ r ] [ c ] ) ; chunk . close ( 0 , fs ) ; vecs [ c ] = vec . close ( fs ) ; } fs . blockForPending ( ) ; return new Frame ( names , vecs ) ; } | Create a new frame based on given row data . |
28,938 | public static Frame parseFrame ( Key okey , File ... files ) { assert files . length > 0 : "Ups. No files to parse!" ; for ( File f : files ) if ( ! f . exists ( ) ) throw new RuntimeException ( "File not found " + f ) ; if ( okey == null ) okey = Key . make ( files [ 0 ] . getName ( ) ) ; Key [ ] fkeys = new Key [ files . length ] ; int cnt = 0 ; for ( File f : files ) fkeys [ cnt ++ ] = NFSFileVec . make ( f ) ; return parseFrame ( okey , fkeys ) ; } | Parse given file into the form of frame represented by the given key . |
28,939 | private void processCustomDimensionParameters ( @ SuppressWarnings ( "rawtypes" ) GoogleAnalyticsRequest request , List < NameValuePair > postParms ) { Map < String , String > customDimParms = new HashMap < String , String > ( ) ; for ( String defaultCustomDimKey : defaultRequest . customDimentions ( ) . keySet ( ) ) { customDimParms . put ( defaultCustomDimKey , defaultRequest . customDimentions ( ) . get ( defaultCustomDimKey ) ) ; } @ SuppressWarnings ( "unchecked" ) Map < String , String > requestCustomDims = request . customDimentions ( ) ; for ( String requestCustomDimKey : requestCustomDims . keySet ( ) ) { customDimParms . put ( requestCustomDimKey , requestCustomDims . get ( requestCustomDimKey ) ) ; } for ( String key : customDimParms . keySet ( ) ) { postParms . add ( new BasicNameValuePair ( key , customDimParms . get ( key ) ) ) ; } } | Processes the custom dimensions and adds the values to list of parameters which would be posted to GA . |
28,940 | private void processCustomMetricParameters ( @ SuppressWarnings ( "rawtypes" ) GoogleAnalyticsRequest request , List < NameValuePair > postParms ) { Map < String , String > customMetricParms = new HashMap < String , String > ( ) ; for ( String defaultCustomMetricKey : defaultRequest . custommMetrics ( ) . keySet ( ) ) { customMetricParms . put ( defaultCustomMetricKey , defaultRequest . custommMetrics ( ) . get ( defaultCustomMetricKey ) ) ; } @ SuppressWarnings ( "unchecked" ) Map < String , String > requestCustomMetrics = request . custommMetrics ( ) ; for ( String requestCustomDimKey : requestCustomMetrics . keySet ( ) ) { customMetricParms . put ( requestCustomDimKey , requestCustomMetrics . get ( requestCustomDimKey ) ) ; } for ( String key : customMetricParms . keySet ( ) ) { postParms . add ( new BasicNameValuePair ( key , customMetricParms . get ( key ) ) ) ; } } | Processes the custom metrics and adds the values to list of parameters which would be posted to GA . |
28,941 | public static int clen ( int values , int bpv ) { int len = ( values * bpv ) >> 3 ; return values * bpv % 8 == 0 ? len : len + 1 ; } | Returns compressed len of the given array length if the value if represented by bpv - bits . |
28,942 | public static Value get ( H2ONode target , Key key , int priority ) { RPC < TaskGetKey > rpc , old ; while ( true ) { rpc = TGKS . get ( key ) ; if ( rpc != null && rpc . _dt . _priority >= priority ) break ; old = rpc ; rpc = new RPC ( target , new TaskGetKey ( key , priority ) , 1.0f ) ; if ( TGKS . putIfMatchUnlocked ( key , rpc , old ) == old ) { rpc . setTaskNum ( ) . call ( ) ; break ; } } Value val = rpc . get ( ) . _val ; TGKS . putIfMatchUnlocked ( key , null , rpc ) ; return val ; } | Get a value from a named remote node |
28,943 | protected String build ( Response response ) { StringBuilder sb = new StringBuilder ( ) ; sb . append ( "<div class='container'>" ) ; sb . append ( "<div class='row-fluid'>" ) ; sb . append ( "<div class='span12'>" ) ; sb . append ( buildJSONResponseBox ( response ) ) ; if ( response . _status == Response . Status . done ) response . toJava ( sb ) ; sb . append ( buildResponseHeader ( response ) ) ; Builder builder = response . getBuilderFor ( ROOT_OBJECT ) ; if ( builder == null ) { sb . append ( "<h3>" + name ( ) + "</h3>" ) ; builder = OBJECT_BUILDER ; } for ( String h : response . getHeaders ( ) ) sb . append ( h ) ; if ( response . _response == null ) { boolean done = response . _req . toHTML ( sb ) ; if ( ! done ) { JsonParser parser = new JsonParser ( ) ; String json = new String ( response . _req . writeJSON ( new AutoBuffer ( ) ) . buf ( ) ) ; JsonObject o = ( JsonObject ) parser . parse ( json ) ; sb . append ( builder . build ( response , o , "" ) ) ; } } else sb . append ( builder . build ( response , response . _response , "" ) ) ; sb . append ( "</div></div></div>" ) ; return sb . toString ( ) ; } | Builds the HTML for the given response . |
28,944 | public void addExternalJars ( File file ) throws IllegalAccessException , InvocationTargetException , MalformedURLException { assert file . exists ( ) : "Unable to find external file: " + file . getAbsolutePath ( ) ; if ( file . isDirectory ( ) ) { for ( File f : file . listFiles ( ) ) addExternalJars ( f ) ; } else if ( file . getName ( ) . endsWith ( ".jar" ) ) { Log . POST ( 22 , "before (in addExternalJars) invoke _addUrl " + file . toURI ( ) . toURL ( ) ) ; _addUrl . invoke ( _systemLoader , file . toURI ( ) . toURL ( ) ) ; Log . POST ( 22 , "after (in addExternalJars) invoke _addUrl " + file . toURI ( ) . toURL ( ) ) ; } } | Adds all jars in given directory to the classpath . |
28,945 | private void extractInternalFiles ( ) throws IOException { Enumeration entries = _h2oJar . entries ( ) ; while ( entries . hasMoreElements ( ) ) { ZipEntry e = ( ZipEntry ) entries . nextElement ( ) ; String name = e . getName ( ) ; if ( e . isDirectory ( ) ) continue ; if ( ! name . endsWith ( ".jar" ) ) continue ; File out = internalFile ( name ) ; out . getParentFile ( ) . mkdirs ( ) ; try { FileOutputStream fos = new FileOutputStream ( out ) ; BufferedInputStream is = new BufferedInputStream ( _h2oJar . getInputStream ( e ) ) ; BufferedOutputStream os = new BufferedOutputStream ( fos ) ; int read ; byte [ ] buffer = new byte [ 4096 ] ; while ( ( read = is . read ( buffer ) ) != - 1 ) os . write ( buffer , 0 , read ) ; os . flush ( ) ; fos . getFD ( ) . sync ( ) ; os . close ( ) ; is . close ( ) ; } catch ( FileNotFoundException ex ) { } catch ( IOException ex ) { Log . die ( "Unable to extract file " + name + " because of " + ex + ". Make sure that directory " + _parentDir + " contains at least 50MB of free space to unpack H2O libraries." ) ; throw ex ; } } } | Extracts the libraries from the jar file to given local path . |
28,946 | public synchronized Class loadClass ( String name , boolean resolve ) throws ClassNotFoundException { assert ! name . equals ( Weaver . class . getName ( ) ) ; Class z = loadClass2 ( name ) ; if ( resolve ) resolveClass ( z ) ; return z ; } | search THEN the System or parent loader . |
28,947 | private final Class loadClass2 ( String name ) throws ClassNotFoundException { Class z = findLoadedClass ( name ) ; if ( z != null ) return z ; if ( _weaver == null ) _weaver = new Weaver ( ) ; z = _weaver . weaveAndLoad ( name , this ) ; if ( z != null ) { return z ; } z = getParent ( ) . loadClass ( name ) ; if ( z != null ) return z ; return z ; } | Run the class lookups in my favorite non - default order . |
28,948 | private RequestArguments . Argument arg ( Request R ) { if ( _arg != null ) return _arg ; Class clzz = R . getClass ( ) ; while ( true ) { try { Field field = clzz . getDeclaredField ( _name ) ; field . setAccessible ( true ) ; Object o = field . get ( R ) ; return _arg = ( ( RequestArguments . Argument ) o ) ; } catch ( NoSuchFieldException ie ) { clzz = clzz . getSuperclass ( ) ; } catch ( IllegalAccessException ie ) { break ; } catch ( ClassCastException ie ) { break ; } } return null ; } | Specific accessors for input arguments . Not valid for JSON output fields . |
28,949 | public static void launchEC2 ( Class < ? extends Job > job , int boxes ) throws Exception { EC2 ec2 = new EC2 ( ) ; ec2 . boxes = boxes ; Cloud c = ec2 . resize ( ) ; launch ( c , job ) ; } | Starts EC2 machines and builds a cluster . |
28,950 | public void compute2 ( ) { if ( Job . isRunning ( _jobKey ) ) { Timer timer = new Timer ( ) ; _stats [ 0 ] = new ThreadLocal < hex . singlenoderf . Statistic > ( ) ; _stats [ 1 ] = new ThreadLocal < hex . singlenoderf . Statistic > ( ) ; Data d = _sampler . sample ( _data , _seed , _modelKey , _local_mode ) ; hex . singlenoderf . Statistic left = getStatistic ( 0 , d , _seed , _exclusiveSplitLimit ) ; for ( Row r : d ) left . addQ ( r , _regression ) ; if ( ! _regression ) left . applyClassWeights ( ) ; hex . singlenoderf . Statistic . Split spl = left . split ( d , false ) ; if ( spl . isLeafNode ( ) ) { if ( _regression ) { float av = d . computeAverage ( ) ; _tree = new LeafNode ( - 1 , d . rows ( ) , av ) ; } else { _tree = new LeafNode ( _data . unmapClass ( spl . _split ) , d . rows ( ) , - 1 ) ; } } else { _tree = new FJBuild ( spl , d , 0 , _seed ) . compute ( ) ; } _stats = null ; if ( _jobKey != null && ! Job . isRunning ( _jobKey ) ) throw new Job . JobCancelledException ( ) ; Key tkey = toKey ( ) ; Key dtreeKey = null ; if ( _score_pojo ) dtreeKey = toCompressedKey ( ) ; appendKey ( _modelKey , tkey , dtreeKey , _verbose > 10 ? _tree . toString ( new StringBuilder ( "" ) , Integer . MAX_VALUE ) . toString ( ) : "" , _data_id ) ; StringBuilder sb = new StringBuilder ( "[RF] Tree : " ) . append ( _data_id + 1 ) ; sb . append ( " d=" ) . append ( _tree . depth ( ) ) . append ( " leaves=" ) . append ( _tree . leaves ( ) ) . append ( " done in " ) . append ( timer ) . append ( '\n' ) ; Log . info ( sb . toString ( ) ) ; if ( _verbose > 10 ) { } } else throw new Job . JobCancelledException ( ) ; tryComplete ( ) ; } | Actually build the tree |
28,951 | static void appendKey ( Key model , final Key tKey , final Key dtKey , final String tString , final int tree_id ) { final int selfIdx = H2O . SELF . index ( ) ; new TAtomic < SpeeDRFModel > ( ) { public SpeeDRFModel atomic ( SpeeDRFModel old ) { if ( old == null ) return null ; return SpeeDRFModel . make ( old , tKey , dtKey , selfIdx , tString , tree_id ) ; } } . invoke ( model ) ; } | which serializes for free . |
28,952 | public Key toKey ( ) { AutoBuffer bs = new AutoBuffer ( ) ; bs . put4 ( _data_id ) ; bs . put8 ( _seed ) ; bs . put1 ( _producerId ) ; _tree . write ( bs ) ; Key key = Key . make ( ( byte ) 1 , Key . DFJ_INTERNAL_USER , H2O . SELF ) ; DKV . put ( key , new Value ( key , bs . buf ( ) ) ) ; return key ; } | Write the Tree to a random Key homed here . |
28,953 | public static double classify ( AutoBuffer ts , double [ ] ds , double badat , boolean regression ) { ts . get4 ( ) ; ts . get8 ( ) ; ts . get1 ( ) ; byte b ; while ( ( b = ( byte ) ts . get1 ( ) ) != '[' ) { assert b == '(' || b == 'S' || b == 'E' ; int col = ts . get2 ( ) ; float fcmp = ts . get4f ( ) ; float fdat = Double . isNaN ( ds [ col ] ) ? fcmp - 1 : ( float ) ds [ col ] ; int skip = ( ts . get1 ( ) & 0xFF ) ; if ( skip == 0 ) skip = ts . get3 ( ) ; if ( b == 'E' ) { if ( fdat != fcmp ) ts . position ( ts . position ( ) + skip ) ; } else { if ( fdat > fcmp ) ts . position ( ts . position ( ) + skip ) ; } } if ( regression ) return ts . get4f ( ) ; return ts . get1 ( ) & 0xFF ; } | Classify on the compressed tree bytes from the pre - packed double data |
28,954 | public TreeModel . CompressedTree compress ( ) { int size = _tree . dtreeSize ( ) ; if ( _tree instanceof LeafNode ) { size += 3 ; } AutoBuffer ab = new AutoBuffer ( size ) ; if ( _tree instanceof LeafNode ) ab . put1 ( 0 ) . put2 ( ( char ) 65535 ) ; _tree . compress ( ab ) ; assert ab . position ( ) == size : "Actual size doesn't agree calculated size." ; char _nclass = ( char ) _data . classes ( ) ; return new TreeModel . CompressedTree ( ab . buf ( ) , _nclass , _seed ) ; } | Build a compressed - tree struct |
28,955 | protected void execImpl ( ) { Frame frame = source ; if ( shuffle ) { frame = MRUtils . shuffleFramePerChunk ( Utils . generateShuffledKey ( frame . _key ) , frame , seed ) ; frame . delete_and_lock ( null ) . unlock ( null ) ; gtrash ( frame ) ; } FrameSplitter fs = new FrameSplitter ( frame , ratios ) ; H2O . submitTask ( fs ) ; Frame [ ] splits = fs . getResult ( ) ; split_keys = new Key [ splits . length ] ; split_rows = new long [ splits . length ] ; float rsum = Utils . sum ( ratios ) ; split_ratios = Arrays . copyOf ( ratios , splits . length ) ; split_ratios [ splits . length - 1 ] = 1f - rsum ; long sum = 0 ; for ( int i = 0 ; i < splits . length ; i ++ ) { sum += splits [ i ] . numRows ( ) ; split_keys [ i ] = splits [ i ] . _key ; split_rows [ i ] = splits [ i ] . numRows ( ) ; } assert sum == source . numRows ( ) : "Frame split produced wrong number of rows: nrows(source) != sum(nrows(splits))" ; } | Run the function |
28,956 | public static < T > String qlink ( Class < T > page , Key k , String content ) { return qlink ( page , "source" , k , content ) ; } | Return the query link to this page |
28,957 | public static Object malloc ( int elems , long bytes , int type , Object orig , int from ) { return malloc ( elems , bytes , type , orig , from , false ) ; } | Catches OutOfMemory clears cache & retries . |
28,958 | public static boolean tryReserveTaskMem ( long m ) { if ( ! CAN_ALLOC ) return false ; if ( m == 0 ) return true ; assert m >= 0 : "m < 0: " + m ; long current = _taskMem . addAndGet ( - m ) ; if ( current < 0 ) { _taskMem . addAndGet ( m ) ; return false ; } return true ; } | Try to reserve memory needed for task execution and return true if succeeded . Tasks have a shared pool of memory which they should ask for in advance before they even try to allocate it . |
28,959 | private static int [ ] determineSeparatorCounts ( String from , int single_quote ) { int [ ] result = new int [ separators . length ] ; byte [ ] bits = from . getBytes ( ) ; boolean in_quote = false ; for ( int j = 0 ; j < bits . length ; j ++ ) { byte c = bits [ j ] ; if ( ( c == single_quote ) || ( c == CHAR_DOUBLE_QUOTE ) ) in_quote ^= true ; if ( ! in_quote || c == HIVE_SEP ) for ( int i = 0 ; i < separators . length ; ++ i ) if ( c == separators [ i ] ) ++ result [ i ] ; } return result ; } | Dermines the number of separators in given line . Correctly handles quoted tokens . |
28,960 | private static String [ ] determineTokens ( String from , byte separator , int single_quote ) { ArrayList < String > tokens = new ArrayList ( ) ; byte [ ] bits = from . getBytes ( ) ; int offset = 0 ; int quotes = 0 ; while ( offset < bits . length ) { while ( ( offset < bits . length ) && ( bits [ offset ] == CHAR_SPACE ) ) ++ offset ; if ( offset == bits . length ) break ; StringBuilder t = new StringBuilder ( ) ; byte c = bits [ offset ] ; if ( ( c == CHAR_DOUBLE_QUOTE ) || ( c == single_quote ) ) { quotes = c ; ++ offset ; } while ( offset < bits . length ) { c = bits [ offset ] ; if ( ( c == quotes ) ) { ++ offset ; if ( ( offset < bits . length ) && ( bits [ offset ] == c ) ) { t . append ( ( char ) c ) ; ++ offset ; continue ; } quotes = 0 ; } else if ( ( quotes == 0 ) && ( ( c == separator ) || ( c == CHAR_CR ) || ( c == CHAR_LF ) ) ) { break ; } else { t . append ( ( char ) c ) ; ++ offset ; } } c = ( offset == bits . length ) ? CHAR_LF : bits [ offset ] ; tokens . add ( t . toString ( ) ) ; if ( ( c == CHAR_CR ) || ( c == CHAR_LF ) || ( offset == bits . length ) ) break ; if ( c != separator ) return new String [ 0 ] ; ++ offset ; } if ( bits [ bits . length - 1 ] == separator && bits [ bits . length - 1 ] != CHAR_SPACE ) tokens . add ( "" ) ; return tokens . toArray ( new String [ tokens . size ( ) ] ) ; } | Determines the tokens that are inside a line and returns them as strings in an array . Assumes the given separator . |
28,961 | protected static void summarizeAndEnhanceModel ( ModelSummary summary , Model model , boolean find_compatible_frames , Map < String , Frame > all_frames , Map < String , Set < String > > all_frames_cols ) { if ( model instanceof GLMModel ) { summarizeGLMModel ( summary , ( GLMModel ) model ) ; } else if ( model instanceof DRF . DRFModel ) { summarizeDRFModel ( summary , ( DRF . DRFModel ) model ) ; } else if ( model instanceof hex . deeplearning . DeepLearningModel ) { summarizeDeepLearningModel ( summary , ( hex . deeplearning . DeepLearningModel ) model ) ; } else if ( model instanceof hex . gbm . GBM . GBMModel ) { summarizeGBMModel ( summary , ( hex . gbm . GBM . GBMModel ) model ) ; } else if ( model instanceof hex . singlenoderf . SpeeDRFModel ) { summarizeSpeeDRFModel ( summary , ( hex . singlenoderf . SpeeDRFModel ) model ) ; } else if ( model instanceof NBModel ) { summarizeNBModel ( summary , ( NBModel ) model ) ; } else { summarizeModelCommonFields ( summary , model ) ; } if ( find_compatible_frames ) { Map < String , Frame > compatible_frames = findCompatibleFrames ( model , all_frames , all_frames_cols ) ; summary . compatible_frames = compatible_frames . keySet ( ) ; } } | Summarize subclasses of water . Model . |
28,962 | private static void summarizeModelCommonFields ( ModelSummary summary , Model model ) { String [ ] names = model . _names ; summary . warnings = model . warnings ; summary . model_algorithm = model . getClass ( ) . toString ( ) ; Key job_key = ( ( Job ) model . job ( ) ) . self ( ) ; if ( null == job_key ) throw H2O . fail ( "Null job key for model: " + ( model == null ? "null model" : model . _key ) ) ; Job job = DKV . get ( job_key ) . get ( ) ; summary . state = job . getState ( ) ; summary . model_category = model . getModelCategory ( ) ; UniqueId unique_id = model . getUniqueId ( ) ; summary . id = unique_id . getId ( ) ; summary . key = unique_id . getKey ( ) ; summary . creation_epoch_time_millis = unique_id . getCreationEpochTimeMillis ( ) ; summary . training_duration_in_ms = model . training_duration_in_ms ; summary . response_column_name = names [ names . length - 1 ] ; for ( int i = 0 ; i < names . length - 1 ; i ++ ) summary . input_column_names . add ( names [ i ] ) ; VarImp vi = model . varimp ( ) ; if ( null != vi ) { summary . variable_importances = new LinkedHashMap ( ) ; summary . variable_importances . put ( "varimp" , vi . varimp ) ; summary . variable_importances . put ( "variables" , vi . getVariables ( ) ) ; summary . variable_importances . put ( "method" , vi . method ) ; summary . variable_importances . put ( "max_var" , vi . max_var ) ; summary . variable_importances . put ( "scaled" , vi . scaled ( ) ) ; } } | Summarize fields which are generic to water . Model . |
28,963 | private static void summarizeGLMModel ( ModelSummary summary , hex . glm . GLMModel model ) { summarizeModelCommonFields ( summary , model ) ; summary . model_algorithm = "GLM" ; JsonObject all_params = ( model . get_params ( ) ) . toJSON ( ) ; summary . critical_parameters = whitelistJsonObject ( all_params , GLM_critical_params ) ; summary . secondary_parameters = whitelistJsonObject ( all_params , GLM_secondary_params ) ; summary . expert_parameters = whitelistJsonObject ( all_params , GLM_expert_params ) ; } | Summarize fields which are specific to hex . glm . GLMModel . |
28,964 | private static void summarizeDRFModel ( ModelSummary summary , hex . drf . DRF . DRFModel model ) { summarizeModelCommonFields ( summary , model ) ; summary . model_algorithm = "BigData RF" ; JsonObject all_params = ( model . get_params ( ) ) . toJSON ( ) ; summary . critical_parameters = whitelistJsonObject ( all_params , DRF_critical_params ) ; summary . secondary_parameters = whitelistJsonObject ( all_params , DRF_secondary_params ) ; summary . expert_parameters = whitelistJsonObject ( all_params , DRF_expert_params ) ; } | Summarize fields which are specific to hex . drf . DRF . DRFModel . |
28,965 | private static void summarizeSpeeDRFModel ( ModelSummary summary , hex . singlenoderf . SpeeDRFModel model ) { summarizeModelCommonFields ( summary , model ) ; summary . model_algorithm = "Random Forest" ; JsonObject all_params = ( model . get_params ( ) ) . toJSON ( ) ; summary . critical_parameters = whitelistJsonObject ( all_params , SpeeDRF_critical_params ) ; summary . secondary_parameters = whitelistJsonObject ( all_params , SpeeDRF_secondary_params ) ; summary . expert_parameters = whitelistJsonObject ( all_params , SpeeDRF_expert_params ) ; } | Summarize fields which are specific to hex . drf . DRF . SpeeDRFModel . |
28,966 | private static void summarizeDeepLearningModel ( ModelSummary summary , hex . deeplearning . DeepLearningModel model ) { summarizeModelCommonFields ( summary , model ) ; summary . model_algorithm = "DeepLearning" ; JsonObject all_params = ( model . get_params ( ) ) . toJSON ( ) ; summary . critical_parameters = whitelistJsonObject ( all_params , DL_critical_params ) ; summary . secondary_parameters = whitelistJsonObject ( all_params , DL_secondary_params ) ; summary . expert_parameters = whitelistJsonObject ( all_params , DL_expert_params ) ; } | Summarize fields which are specific to hex . deeplearning . DeepLearningModel . |
28,967 | private static void summarizeGBMModel ( ModelSummary summary , hex . gbm . GBM . GBMModel model ) { summarizeModelCommonFields ( summary , model ) ; summary . model_algorithm = "GBM" ; JsonObject all_params = ( model . get_params ( ) ) . toJSON ( ) ; summary . critical_parameters = whitelistJsonObject ( all_params , GBM_critical_params ) ; summary . secondary_parameters = whitelistJsonObject ( all_params , GBM_secondary_params ) ; summary . expert_parameters = whitelistJsonObject ( all_params , GBM_expert_params ) ; } | Summarize fields which are specific to hex . gbm . GBM . GBMModel . |
28,968 | private static void summarizeNBModel ( ModelSummary summary , hex . nb . NBModel model ) { summarizeModelCommonFields ( summary , model ) ; summary . model_algorithm = "Naive Bayes" ; JsonObject all_params = ( model . get_params ( ) ) . toJSON ( ) ; summary . critical_parameters = whitelistJsonObject ( all_params , NB_critical_params ) ; summary . secondary_parameters = whitelistJsonObject ( all_params , NB_secondary_params ) ; summary . expert_parameters = whitelistJsonObject ( all_params , NB_expert_params ) ; } | Summarize fields which are specific to hex . nb . NBModel . |
28,969 | protected Map < String , Model > fetchAll ( ) { return H2O . KeySnapshot . globalSnapshot ( ) . fetchAll ( water . Model . class ) ; } | Fetch all Models from the KV store . |
28,970 | private Response serveOneOrAll ( Map < String , Model > modelsMap ) { Pair < Map < String , Frame > , Map < String , Set < String > > > frames_info = fetchFrames ( ) ; Map < String , Frame > all_frames = frames_info . getFirst ( ) ; Map < String , Set < String > > all_frames_cols = frames_info . getSecond ( ) ; Map < String , ModelSummary > modelSummaries = Models . generateModelSummaries ( null , modelsMap , find_compatible_frames , all_frames , all_frames_cols ) ; Map resultsMap = new LinkedHashMap ( ) ; resultsMap . put ( "models" , modelSummaries ) ; if ( this . find_compatible_frames ) { Set < String > all_referenced_frames = new TreeSet < String > ( ) ; for ( Map . Entry < String , ModelSummary > entry : modelSummaries . entrySet ( ) ) { ModelSummary summary = entry . getValue ( ) ; all_referenced_frames . addAll ( summary . compatible_frames ) ; } Map < String , FrameSummary > frameSummaries = Frames . generateFrameSummaries ( all_referenced_frames , all_frames , false , null , null ) ; resultsMap . put ( "frames" , frameSummaries ) ; } String json = gson . toJson ( resultsMap ) ; JsonObject result = gson . fromJson ( json , JsonElement . class ) . getAsJsonObject ( ) ; return Response . done ( result ) ; } | Fetch all the Models from the KV store sumamrize and enhance them and return a map of them . |
28,971 | static ParseProgress make ( Key [ ] fkeys ) { long total = 0 ; for ( Key fkey : fkeys ) total += getVec ( fkey ) . length ( ) ; return new ParseProgress ( 0 , total ) ; } | Total number of steps is equal to total bytecount across files |
28,972 | protected static String xml2jname ( String xml ) { StringBuilder nn = new StringBuilder ( ) ; char [ ] cs = xml . toCharArray ( ) ; if ( ! Character . isJavaIdentifierStart ( cs [ 0 ] ) ) nn . append ( 'X' ) ; for ( char c : cs ) { if ( ! Character . isJavaIdentifierPart ( c ) ) { nn . append ( '_' ) ; } else { nn . append ( c ) ; } } String jname = nn . toString ( ) ; return jname ; } | Convert an XML name to a java name |
28,973 | protected static String uniqueClassName ( String name ) { String cname = xml2jname ( name ) ; if ( CLASS_NAMES . contains ( cname ) ) { int i = 0 ; while ( CLASS_NAMES . contains ( cname + i ) ) i ++ ; cname = cname + i ; } CLASS_NAMES . add ( cname ) ; return cname ; } | Make a unique class name for jit d subclasses of ScoreModel |
28,974 | public int [ ] columnMapping ( String [ ] features ) { int [ ] map = new int [ _colNames . length ] ; for ( int i = 0 ; i < _colNames . length ; i ++ ) { map [ i ] = - 1 ; for ( int j = 0 ; j < features . length ; j ++ ) { if ( _colNames [ i ] . equals ( features [ j ] ) ) { if ( map [ i ] != - 1 ) throw new IllegalArgumentException ( "duplicate feature " + _colNames [ i ] ) ; map [ i ] = j ; } } if ( map [ i ] == - 1 ) Log . warn ( Sys . SCORM , "Model feature " + _colNames [ i ] + " not in the provided feature list from the data" ) ; } return map ; } | needs then this map will contain a - 1 for the missing feature index . |
28,975 | public void setHdfs ( ) { assert onICE ( ) ; byte [ ] mem = memOrLoad ( ) ; _persist = Value . HDFS | Value . NOTdsk ; Persist . I [ Value . HDFS ] . store ( this ) ; removeIce ( ) ; assert onHDFS ( ) ; _mem = mem ; } | Set persistence to HDFS from ICE |
28,976 | public InputStream openStream ( ProgressMonitor p ) throws IOException { if ( onNFS ( ) ) return PersistNFS . openStream ( _key ) ; if ( onHDFS ( ) ) return PersistHdfs . openStream ( _key , p ) ; if ( onS3 ( ) ) return PersistS3 . openStream ( _key , p ) ; if ( onTachyon ( ) ) return PersistTachyon . openStream ( _key , p ) ; if ( isFrame ( ) ) throw new IllegalArgumentException ( "Tried to pass a Frame to openStream (maybe tried to parse a (already-parsed) Frame?)" ) ; assert _type == TypeMap . PRIM_B : "Expected byte[] type but got " + TypeMap . className ( _type ) ; return new ByteArrayInputStream ( memOrLoad ( ) ) ; } | Creates a Stream for reading bytes |
28,977 | void lowerActiveGetCount ( H2ONode h2o ) { assert _key . home ( ) ; assert h2o != H2O . SELF ; while ( true ) { int old = _rwlock . get ( ) ; assert old > 0 ; assert old != - 1 ; assert _replicas . contains ( h2o . _unique_idx ) ; if ( RW_CAS ( old , old - 1 , "rlock-" ) ) { if ( old - 1 == 0 ) synchronized ( this ) { notifyAll ( ) ; } return ; } } } | Atomically lower active GET count |
28,978 | void startRemotePut ( ) { assert ! _key . home ( ) ; int x = 0 ; while ( ( x = _rwlock . get ( ) ) != - 1 ) if ( x == 1 || RW_CAS ( 0 , 1 , "remote_need_notify" ) ) try { ForkJoinPool . managedBlock ( this ) ; } catch ( InterruptedException e ) { } } | Block this thread until all prior remote PUTs complete - to force remote - PUT ordering on the home node . |
28,979 | public void clear ( ) { for ( Placeholder p : _placeholders . values ( ) ) { p . start . removeTill ( p . end ) ; } } | they can be used again . |
28,980 | public void replace ( String what , Object with ) { if ( what . charAt ( 0 ) == '$' ) throw new RuntimeException ( "$ is now control char that denotes URL encoding!" ) ; for ( Placeholder p : _placeholders . get ( what ) ) p . end . insertAndAdvance ( with . toString ( ) ) ; for ( Placeholder p : _placeholders . get ( "$" + what ) ) try { p . end . insertAndAdvance ( URLEncoder . encode ( with . toString ( ) , "UTF-8" ) ) ; } catch ( IOException e ) { p . end . insertAndAdvance ( e . toString ( ) ) ; } } | another in order . |
28,981 | public RString restartGroup ( String what ) { List < Placeholder > all = _placeholders . get ( what ) ; assert all . size ( ) == 1 ; Placeholder result = all . get ( 0 ) ; if ( result . group == null ) { throw new NoSuchElementException ( "Element " + what + " is not a group." ) ; } result . group . clear ( ) ; return result . group ; } | can be filled again . |
28,982 | protected boolean handleAuthHeader ( GMS . GmsHeader gms_hdr , AuthHeader auth_hdr , Message msg ) { if ( needsAuthentication ( gms_hdr ) ) { if ( this . auth_token . authenticate ( auth_hdr . getToken ( ) , msg ) ) return true ; else { log . warn ( "%s: failed to validate AuthHeader (token: %s) from %s; dropping message and sending " + "rejection message" , local_addr , auth_token . getClass ( ) . getSimpleName ( ) , msg . src ( ) ) ; sendRejectionMessage ( gms_hdr . getType ( ) , msg . getSrc ( ) , "authentication failed" ) ; return false ; } } return true ; } | Handles a GMS header |
28,983 | public synchronized void stop ( ) { Thread tmp = runner ; runner = null ; if ( tmp != null ) { tmp . interrupt ( ) ; try { tmp . join ( 500 ) ; } catch ( InterruptedException e ) { } } while ( ! queue . isEmpty ( ) ) for ( Task entry : queue ) { entry . cancel ( true ) ; queue . remove ( entry ) ; } queue . clear ( ) ; if ( pool instanceof ThreadPoolExecutor && shut_down_pool ) { ThreadPoolExecutor p = ( ThreadPoolExecutor ) pool ; List < Runnable > remaining_tasks = p . shutdownNow ( ) ; remaining_tasks . stream ( ) . filter ( task -> task instanceof Future ) . forEach ( task -> ( ( Future ) task ) . cancel ( true ) ) ; p . getQueue ( ) . clear ( ) ; try { p . awaitTermination ( Global . THREADPOOL_SHUTDOWN_WAIT_TIME , TimeUnit . MILLISECONDS ) ; } catch ( InterruptedException e ) { } } if ( timer_thread_factory instanceof LazyThreadFactory ) ( ( LazyThreadFactory ) timer_thread_factory ) . destroy ( ) ; } | Stops the timer cancelling all tasks |
28,984 | @ ManagedOperation ( description = "Prints the send and receive buffers" ) public String printBuffers ( ) { StringBuilder sb = new StringBuilder ( "\n" ) ; synchronized ( this ) { for ( Map . Entry < Address , Connection > entry : conns . entrySet ( ) ) { NioConnection val = ( NioConnection ) entry . getValue ( ) ; sb . append ( entry . getKey ( ) ) . append ( ":\n " ) . append ( "recv_buf: " ) . append ( val . recv_buf ) . append ( "\n send_buf: " ) . append ( val . send_buf ) . append ( "\n" ) ; } } return sb . toString ( ) ; } | Prints send and receive buffers for all connections |
28,985 | public void stable ( long seqno ) { lock . lock ( ) ; try { if ( seqno <= low ) return ; if ( seqno > hd ) throw new IllegalArgumentException ( "seqno " + seqno + " cannot be bigger than hd (" + hd + ")" ) ; int from = index ( low + 1 ) , length = ( int ) ( seqno - low ) , capacity = capacity ( ) ; for ( int i = from ; i < from + length ; i ++ ) { int index = i & ( capacity - 1 ) ; buf [ index ] = null ; } if ( seqno > low ) { low = seqno ; buffer_full . signalAll ( ) ; } } finally { lock . unlock ( ) ; } } | Nulls elements between low and seqno and forwards low |
28,986 | public void adjustNodes ( java . util . List < Address > v ) { Node n ; boolean removed = false ; synchronized ( nodes ) { for ( int i = 0 ; i < nodes . size ( ) ; i ++ ) { n = nodes . get ( i ) ; if ( ! v . contains ( n . addr ) ) { System . out . println ( "adjustNodes(): node " + n + " was removed" ) ; nodes . remove ( n ) ; removed = true ; } } if ( removed ) repaint ( ) ; } } | Removes nodes that are not in the view |
28,987 | public void start ( JChannel ch ) throws Exception { channel = ch ; channel . setReceiver ( this ) ; channel . connect ( "ChatCluster" ) ; eventLoop ( ) ; channel . close ( ) ; } | Method called from other app injecting channel |
28,988 | public boolean waitForAllResponses ( long timeout ) { if ( timeout <= 0 ) timeout = 2000L ; return cond . waitFor ( this :: hasAllResponses , timeout , TimeUnit . MILLISECONDS ) ; } | Waits until all responses have been received or until a timeout has elapsed . |
28,989 | public int deliveryTableSize ( ) { int retval = 0 ; for ( BoundedHashMap < Long , Long > val : delivery_table . values ( ) ) retval += val . size ( ) ; return retval ; } | Total size of all queues of the delivery table |
28,990 | protected boolean canDeliver ( Address sender , long seqno ) { BoundedHashMap < Long , Long > seqno_set = delivery_table . get ( sender ) ; if ( seqno_set == null ) { seqno_set = new BoundedHashMap < > ( delivery_table_max_size ) ; BoundedHashMap < Long , Long > existing = delivery_table . put ( sender , seqno_set ) ; if ( existing != null ) seqno_set = existing ; } return seqno_set . add ( seqno , seqno ) ; } | Checks if seqno has already been received from sender . This weeds out duplicates . Note that this method is never called concurrently for the same sender . |
28,991 | public static Subject generateSecuritySubject ( String jassLoginConfig , String username , String password ) throws LoginException { LoginContext loginCtx = null ; try { loginCtx = new LoginContext ( jassLoginConfig , new Krb5TokenUtils . LoginCallbackHandler ( username , password ) ) ; loginCtx . login ( ) ; log . debug ( " : Krb5Token Kerberos login succeeded against user: %s" , username ) ; return loginCtx . getSubject ( ) ; } catch ( LoginException e ) { log . debug ( " : Krb5Token Kerberos login failed against user: %s" , username ) ; throw e ; } } | Authenticate against the KDC using JAAS . |
28,992 | public static byte [ ] initiateSecurityContext ( Subject subject , String servicePrincipalName ) throws GSSException { GSSManager manager = GSSManager . getInstance ( ) ; GSSName serverName = manager . createName ( servicePrincipalName , GSSName . NT_HOSTBASED_SERVICE ) ; final GSSContext context = manager . createContext ( serverName , krb5Oid , null , GSSContext . DEFAULT_LIFETIME ) ; return Subject . doAs ( subject , ( PrivilegedAction < byte [ ] > ) ( ) -> { try { byte [ ] token = new byte [ 0 ] ; context . requestMutualAuth ( false ) ; context . requestCredDeleg ( false ) ; return context . initSecContext ( token , 0 , token . length ) ; } catch ( GSSException e ) { log . error ( Util . getMessage ( "Krb5TokenKerberosContextProcessingException" ) , e ) ; return null ; } } ) ; } | Generate the service ticket that will be passed to the cluster master for authentication |
28,993 | public static String validateSecurityContext ( Subject subject , final byte [ ] serviceTicket ) throws GSSException { return Subject . doAs ( subject , ( PrivilegedAction < String > ) ( ) -> { try { GSSManager manager = GSSManager . getInstance ( ) ; GSSContext context = manager . createContext ( ( GSSCredential ) null ) ; context . acceptSecContext ( serviceTicket , 0 , serviceTicket . length ) ; return context . getSrcName ( ) . toString ( ) ; } catch ( Exception e ) { log . error ( Util . getMessage ( "Krb5TokenKerberosContextProcessingException" ) , e ) ; return null ; } } ) ; } | Validate the service ticket by extracting the client principal name |
28,994 | public int compareTo ( ViewId other ) { return id > other . id ? 1 : id < other . id ? - 1 : creator . compareTo ( other . creator ) ; } | Establishes an order between 2 ViewIds . The comparison is done on the IDs if they are equal we use the creator . |
28,995 | public < T extends ViewHandler < R > > T processing ( boolean flag ) { lock . lock ( ) ; try { setProcessing ( flag ) ; return ( T ) this ; } finally { lock . unlock ( ) ; } } | To be used by testing only! |
28,996 | protected void process ( Collection < R > requests ) { for ( ; ; ) { while ( ! requests . isEmpty ( ) ) { removeAndProcess ( requests ) ; } lock . lock ( ) ; try { if ( requests . isEmpty ( ) ) { setProcessing ( false ) ; return ; } } finally { lock . unlock ( ) ; } } } | We re guaranteed that only one thread will be called with this method at any time |
28,997 | public void setResult ( T obj ) { lock . lock ( ) ; try { result = obj ; hasResult = true ; cond . signal ( true ) ; } finally { lock . unlock ( ) ; } } | Sets the result and notifies any threads waiting for it |
28,998 | protected T _getResultWithTimeout ( final long timeout ) throws TimeoutException { if ( timeout <= 0 ) cond . waitFor ( this :: hasResult ) ; else if ( ! cond . waitFor ( this :: hasResult , timeout , TimeUnit . MILLISECONDS ) ) throw new TimeoutException ( ) ; return result ; } | Blocks until a result is available or timeout milliseconds have elapsed . Needs to be called with lock held |
28,999 | public void handleViewChange ( View view , Digest digest ) { if ( gms . isLeaving ( ) && ! view . containsMember ( gms . local_addr ) ) return ; View prev_view = gms . view ( ) ; gms . installView ( view , digest ) ; Address prev_coord = prev_view != null ? prev_view . getCoord ( ) : null , curr_coord = view . getCoord ( ) ; if ( ! Objects . equals ( curr_coord , prev_coord ) ) coordChanged ( prev_coord , curr_coord ) ; } | Called by the GMS when a VIEW is received . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.