repo stringlengths 7 58 | path stringlengths 12 218 | func_name stringlengths 3 140 | original_string stringlengths 73 34.1k | language stringclasses 1
value | code stringlengths 73 34.1k | code_tokens list | docstring stringlengths 3 16k | docstring_tokens list | sha stringlengths 40 40 | url stringlengths 105 339 | partition stringclasses 1
value |
|---|---|---|---|---|---|---|---|---|---|---|---|
h2oai/h2o-2 | src/main/java/water/fvec/Chunk.java | Chunk.set0 | public final long set0(int idx, long l) {
setWrite();
if( _chk2.set_impl(idx,l) ) return l;
(_chk2 = inflate_impl(new NewChunk(this))).set_impl(idx,l);
return l;
} | java | public final long set0(int idx, long l) {
setWrite();
if( _chk2.set_impl(idx,l) ) return l;
(_chk2 = inflate_impl(new NewChunk(this))).set_impl(idx,l);
return l;
} | [
"public",
"final",
"long",
"set0",
"(",
"int",
"idx",
",",
"long",
"l",
")",
"{",
"setWrite",
"(",
")",
";",
"if",
"(",
"_chk2",
".",
"set_impl",
"(",
"idx",
",",
"l",
")",
")",
"return",
"l",
";",
"(",
"_chk2",
"=",
"inflate_impl",
"(",
"new",
... | Set a long element in a chunk given a 0-based chunk local index.
Write into a chunk.
May rewrite/replace chunks if the chunk needs to be
"inflated" to hold larger values. Returns the input value.
Note that the idx is an int (instead of a long), which tells you
that index 0 is the first row in the chunk, not the whole Vec. | [
"Set",
"a",
"long",
"element",
"in",
"a",
"chunk",
"given",
"a",
"0",
"-",
"based",
"chunk",
"local",
"index",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/fvec/Chunk.java#L129-L134 | train |
h2oai/h2o-2 | src/main/java/water/fvec/Chunk.java | Chunk.set0 | public final double set0(int idx, double d) {
setWrite();
if( _chk2.set_impl(idx,d) ) return d;
(_chk2 = inflate_impl(new NewChunk(this))).set_impl(idx,d);
return d;
} | java | public final double set0(int idx, double d) {
setWrite();
if( _chk2.set_impl(idx,d) ) return d;
(_chk2 = inflate_impl(new NewChunk(this))).set_impl(idx,d);
return d;
} | [
"public",
"final",
"double",
"set0",
"(",
"int",
"idx",
",",
"double",
"d",
")",
"{",
"setWrite",
"(",
")",
";",
"if",
"(",
"_chk2",
".",
"set_impl",
"(",
"idx",
",",
"d",
")",
")",
"return",
"d",
";",
"(",
"_chk2",
"=",
"inflate_impl",
"(",
"new... | Set a double element in a chunk given a 0-based chunk local index. | [
"Set",
"a",
"double",
"element",
"in",
"a",
"chunk",
"given",
"a",
"0",
"-",
"based",
"chunk",
"local",
"index",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/fvec/Chunk.java#L137-L142 | train |
h2oai/h2o-2 | src/main/java/water/fvec/Chunk.java | Chunk.set0 | public final float set0(int idx, float f) {
setWrite();
if( _chk2.set_impl(idx,f) ) return f;
(_chk2 = inflate_impl(new NewChunk(this))).set_impl(idx,f);
return f;
} | java | public final float set0(int idx, float f) {
setWrite();
if( _chk2.set_impl(idx,f) ) return f;
(_chk2 = inflate_impl(new NewChunk(this))).set_impl(idx,f);
return f;
} | [
"public",
"final",
"float",
"set0",
"(",
"int",
"idx",
",",
"float",
"f",
")",
"{",
"setWrite",
"(",
")",
";",
"if",
"(",
"_chk2",
".",
"set_impl",
"(",
"idx",
",",
"f",
")",
")",
"return",
"f",
";",
"(",
"_chk2",
"=",
"inflate_impl",
"(",
"new",... | Set a floating element in a chunk given a 0-based chunk local index. | [
"Set",
"a",
"floating",
"element",
"in",
"a",
"chunk",
"given",
"a",
"0",
"-",
"based",
"chunk",
"local",
"index",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/fvec/Chunk.java#L145-L150 | train |
h2oai/h2o-2 | src/main/java/water/fvec/Chunk.java | Chunk.setNA0 | public final boolean setNA0(int idx) {
setWrite();
if( _chk2.setNA_impl(idx) ) return true;
(_chk2 = inflate_impl(new NewChunk(this))).setNA_impl(idx);
return true;
} | java | public final boolean setNA0(int idx) {
setWrite();
if( _chk2.setNA_impl(idx) ) return true;
(_chk2 = inflate_impl(new NewChunk(this))).setNA_impl(idx);
return true;
} | [
"public",
"final",
"boolean",
"setNA0",
"(",
"int",
"idx",
")",
"{",
"setWrite",
"(",
")",
";",
"if",
"(",
"_chk2",
".",
"setNA_impl",
"(",
"idx",
")",
")",
"return",
"true",
";",
"(",
"_chk2",
"=",
"inflate_impl",
"(",
"new",
"NewChunk",
"(",
"this"... | Set the element in a chunk as missing given a 0-based chunk local index. | [
"Set",
"the",
"element",
"in",
"a",
"chunk",
"as",
"missing",
"given",
"a",
"0",
"-",
"based",
"chunk",
"local",
"index",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/fvec/Chunk.java#L153-L158 | train |
h2oai/h2o-2 | src/main/java/water/Model.java | Model.scoreImpl | protected Frame scoreImpl(Frame adaptFrm) {
if (isSupervised()) {
int ridx = adaptFrm.find(responseName());
assert ridx == -1 : "Adapted frame should not contain response in scoring method!";
assert nfeatures() == adaptFrm.numCols() : "Number of model features " + nfeatures() + " != number of test set columns: " + adaptFrm.numCols();
assert adaptFrm.vecs().length == nfeatures() : "Scoring data set contains wrong number of columns: " + adaptFrm.vecs().length + " instead of " + nfeatures();
}
// Create a new vector for response
// If the model produces a classification/enum, copy the domain into the
// result vector.
int nc = nclasses();
Vec [] newVecs = new Vec[]{adaptFrm.anyVec().makeZero(classNames())};
if(nc > 1)
newVecs = Utils.join(newVecs,adaptFrm.anyVec().makeZeros(nc));
String [] names = new String[newVecs.length];
names[0] = "predict";
for(int i = 1; i < names.length; ++i)
names[i] = classNames()[i-1];
final int num_features = nfeatures();
new MRTask2() {
@Override public void map( Chunk chks[] ) {
double tmp [] = new double[num_features]; // We do not need the last field representing response
float preds[] = new float [nclasses()==1?1:nclasses()+1];
int len = chks[0]._len;
for( int row=0; row<len; row++ ) {
float p[] = score0(chks,row,tmp,preds);
for( int c=0; c<preds.length; c++ )
chks[num_features+c].set0(row,p[c]);
}
}
}.doAll(Utils.join(adaptFrm.vecs(),newVecs));
// Return just the output columns
return new Frame(names,newVecs);
} | java | protected Frame scoreImpl(Frame adaptFrm) {
if (isSupervised()) {
int ridx = adaptFrm.find(responseName());
assert ridx == -1 : "Adapted frame should not contain response in scoring method!";
assert nfeatures() == adaptFrm.numCols() : "Number of model features " + nfeatures() + " != number of test set columns: " + adaptFrm.numCols();
assert adaptFrm.vecs().length == nfeatures() : "Scoring data set contains wrong number of columns: " + adaptFrm.vecs().length + " instead of " + nfeatures();
}
// Create a new vector for response
// If the model produces a classification/enum, copy the domain into the
// result vector.
int nc = nclasses();
Vec [] newVecs = new Vec[]{adaptFrm.anyVec().makeZero(classNames())};
if(nc > 1)
newVecs = Utils.join(newVecs,adaptFrm.anyVec().makeZeros(nc));
String [] names = new String[newVecs.length];
names[0] = "predict";
for(int i = 1; i < names.length; ++i)
names[i] = classNames()[i-1];
final int num_features = nfeatures();
new MRTask2() {
@Override public void map( Chunk chks[] ) {
double tmp [] = new double[num_features]; // We do not need the last field representing response
float preds[] = new float [nclasses()==1?1:nclasses()+1];
int len = chks[0]._len;
for( int row=0; row<len; row++ ) {
float p[] = score0(chks,row,tmp,preds);
for( int c=0; c<preds.length; c++ )
chks[num_features+c].set0(row,p[c]);
}
}
}.doAll(Utils.join(adaptFrm.vecs(),newVecs));
// Return just the output columns
return new Frame(names,newVecs);
} | [
"protected",
"Frame",
"scoreImpl",
"(",
"Frame",
"adaptFrm",
")",
"{",
"if",
"(",
"isSupervised",
"(",
")",
")",
"{",
"int",
"ridx",
"=",
"adaptFrm",
".",
"find",
"(",
"responseName",
"(",
")",
")",
";",
"assert",
"ridx",
"==",
"-",
"1",
":",
"\"Adap... | Score already adapted frame.
@param adaptFrm
@return | [
"Score",
"already",
"adapted",
"frame",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/Model.java#L252-L285 | train |
h2oai/h2o-2 | src/main/java/water/Model.java | Model.score | public final float[] score( Frame fr, boolean exact, int row ) {
double tmp[] = new double[fr.numCols()];
for( int i=0; i<tmp.length; i++ )
tmp[i] = fr.vecs()[i].at(row);
return score(fr.names(),fr.domains(),exact,tmp);
} | java | public final float[] score( Frame fr, boolean exact, int row ) {
double tmp[] = new double[fr.numCols()];
for( int i=0; i<tmp.length; i++ )
tmp[i] = fr.vecs()[i].at(row);
return score(fr.names(),fr.domains(),exact,tmp);
} | [
"public",
"final",
"float",
"[",
"]",
"score",
"(",
"Frame",
"fr",
",",
"boolean",
"exact",
",",
"int",
"row",
")",
"{",
"double",
"tmp",
"[",
"]",
"=",
"new",
"double",
"[",
"fr",
".",
"numCols",
"(",
")",
"]",
";",
"for",
"(",
"int",
"i",
"="... | Single row scoring, on a compatible Frame. | [
"Single",
"row",
"scoring",
"on",
"a",
"compatible",
"Frame",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/Model.java#L288-L293 | train |
h2oai/h2o-2 | src/main/java/water/Model.java | Model.score | public final float[] score( String names[], String domains[][], boolean exact, double row[] ) {
return score(adapt(names,domains,exact),row,new float[nclasses()]);
} | java | public final float[] score( String names[], String domains[][], boolean exact, double row[] ) {
return score(adapt(names,domains,exact),row,new float[nclasses()]);
} | [
"public",
"final",
"float",
"[",
"]",
"score",
"(",
"String",
"names",
"[",
"]",
",",
"String",
"domains",
"[",
"]",
"[",
"]",
",",
"boolean",
"exact",
",",
"double",
"row",
"[",
"]",
")",
"{",
"return",
"score",
"(",
"adapt",
"(",
"names",
",",
... | Single row scoring, on a compatible set of data. Fairly expensive to adapt. | [
"Single",
"row",
"scoring",
"on",
"a",
"compatible",
"set",
"of",
"data",
".",
"Fairly",
"expensive",
"to",
"adapt",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/Model.java#L296-L298 | train |
h2oai/h2o-2 | src/main/java/water/Model.java | Model.toJavaSuper | protected SB toJavaSuper( SB sb ) {
sb.nl();
sb.ii(1);
sb.i().p("public String[] getNames() { return NAMES; } ").nl();
sb.i().p("public String[][] getDomainValues() { return DOMAINS; }").nl();
String uuid = this.uniqueId != null ? this.uniqueId.getId() : this._key.toString();
sb.i().p("public String getUUID() { return ").ps(uuid).p("; }").nl();
return sb;
} | java | protected SB toJavaSuper( SB sb ) {
sb.nl();
sb.ii(1);
sb.i().p("public String[] getNames() { return NAMES; } ").nl();
sb.i().p("public String[][] getDomainValues() { return DOMAINS; }").nl();
String uuid = this.uniqueId != null ? this.uniqueId.getId() : this._key.toString();
sb.i().p("public String getUUID() { return ").ps(uuid).p("; }").nl();
return sb;
} | [
"protected",
"SB",
"toJavaSuper",
"(",
"SB",
"sb",
")",
"{",
"sb",
".",
"nl",
"(",
")",
";",
"sb",
".",
"ii",
"(",
"1",
")",
";",
"sb",
".",
"i",
"(",
")",
".",
"p",
"(",
"\"public String[] getNames() { return NAMES; } \"",
")",
".",
"nl",
"(",
"... | Generate implementation for super class. | [
"Generate",
"implementation",
"for",
"super",
"class",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/Model.java#L641-L650 | train |
h2oai/h2o-2 | src/main/java/water/Model.java | Model.toJavaPredict | private SB toJavaPredict(SB ccsb, SB fileCtxSb) { // ccsb = classContext
ccsb.nl();
ccsb.p(" // Pass in data in a double[], pre-aligned to the Model's requirements.").nl();
ccsb.p(" // Jam predictions into the preds[] array; preds[0] is reserved for the").nl();
ccsb.p(" // main prediction (class for classifiers or value for regression),").nl();
ccsb.p(" // and remaining columns hold a probability distribution for classifiers.").nl();
ccsb.p(" public final float[] predict( double[] data, float[] preds) { preds = predict( data, preds, "+toJavaDefaultMaxIters()+"); return preds; }").nl();
// ccsb.p(" public final float[] predict( double[] data, float[] preds) { return predict( data, preds, "+toJavaDefaultMaxIters()+"); }").nl();
ccsb.p(" public final float[] predict( double[] data, float[] preds, int maxIters ) {").nl();
SB classCtxSb = new SB();
toJavaPredictBody(ccsb.ii(1), classCtxSb, fileCtxSb); ccsb.di(1);
ccsb.p(" return preds;").nl();
ccsb.p(" }").nl();
ccsb.p(classCtxSb);
return ccsb;
} | java | private SB toJavaPredict(SB ccsb, SB fileCtxSb) { // ccsb = classContext
ccsb.nl();
ccsb.p(" // Pass in data in a double[], pre-aligned to the Model's requirements.").nl();
ccsb.p(" // Jam predictions into the preds[] array; preds[0] is reserved for the").nl();
ccsb.p(" // main prediction (class for classifiers or value for regression),").nl();
ccsb.p(" // and remaining columns hold a probability distribution for classifiers.").nl();
ccsb.p(" public final float[] predict( double[] data, float[] preds) { preds = predict( data, preds, "+toJavaDefaultMaxIters()+"); return preds; }").nl();
// ccsb.p(" public final float[] predict( double[] data, float[] preds) { return predict( data, preds, "+toJavaDefaultMaxIters()+"); }").nl();
ccsb.p(" public final float[] predict( double[] data, float[] preds, int maxIters ) {").nl();
SB classCtxSb = new SB();
toJavaPredictBody(ccsb.ii(1), classCtxSb, fileCtxSb); ccsb.di(1);
ccsb.p(" return preds;").nl();
ccsb.p(" }").nl();
ccsb.p(classCtxSb);
return ccsb;
} | [
"private",
"SB",
"toJavaPredict",
"(",
"SB",
"ccsb",
",",
"SB",
"fileCtxSb",
")",
"{",
"// ccsb = classContext",
"ccsb",
".",
"nl",
"(",
")",
";",
"ccsb",
".",
"p",
"(",
"\" // Pass in data in a double[], pre-aligned to the Model's requirements.\"",
")",
".",
"nl",... | Wrapper around the main predict call, including the signature and return value | [
"Wrapper",
"around",
"the",
"main",
"predict",
"call",
"including",
"the",
"signature",
"and",
"return",
"value"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/Model.java#L696-L711 | train |
h2oai/h2o-2 | src/main/java/water/Func.java | Func.emptyLTrash | protected final void emptyLTrash() {
if (_lVecTrash.isEmpty()) return;
Futures fs = new Futures();
cleanupTrash(_lVecTrash, fs);
fs.blockForPending();
} | java | protected final void emptyLTrash() {
if (_lVecTrash.isEmpty()) return;
Futures fs = new Futures();
cleanupTrash(_lVecTrash, fs);
fs.blockForPending();
} | [
"protected",
"final",
"void",
"emptyLTrash",
"(",
")",
"{",
"if",
"(",
"_lVecTrash",
".",
"isEmpty",
"(",
")",
")",
"return",
";",
"Futures",
"fs",
"=",
"new",
"Futures",
"(",
")",
";",
"cleanupTrash",
"(",
"_lVecTrash",
",",
"fs",
")",
";",
"fs",
".... | User call which empty local trash of vectors. | [
"User",
"call",
"which",
"empty",
"local",
"trash",
"of",
"vectors",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/Func.java#L74-L79 | train |
h2oai/h2o-2 | src/main/java/hex/deeplearning/DeepLearning.java | DeepLearning.registered | @Override
protected void registered(RequestServer.API_VERSION ver) {
super.registered(ver);
for (Argument arg : _arguments) {
if ( arg._name.equals("activation") || arg._name.equals("initial_weight_distribution")
|| arg._name.equals("expert_mode") || arg._name.equals("adaptive_rate")
|| arg._name.equals("replicate_training_data")
|| arg._name.equals("balance_classes")
|| arg._name.equals("n_folds")
|| arg._name.equals("autoencoder")
|| arg._name.equals("checkpoint")) {
arg.setRefreshOnChange();
}
}
} | java | @Override
protected void registered(RequestServer.API_VERSION ver) {
super.registered(ver);
for (Argument arg : _arguments) {
if ( arg._name.equals("activation") || arg._name.equals("initial_weight_distribution")
|| arg._name.equals("expert_mode") || arg._name.equals("adaptive_rate")
|| arg._name.equals("replicate_training_data")
|| arg._name.equals("balance_classes")
|| arg._name.equals("n_folds")
|| arg._name.equals("autoencoder")
|| arg._name.equals("checkpoint")) {
arg.setRefreshOnChange();
}
}
} | [
"@",
"Override",
"protected",
"void",
"registered",
"(",
"RequestServer",
".",
"API_VERSION",
"ver",
")",
"{",
"super",
".",
"registered",
"(",
"ver",
")",
";",
"for",
"(",
"Argument",
"arg",
":",
"_arguments",
")",
"{",
"if",
"(",
"arg",
".",
"_name",
... | Helper to specify which arguments trigger a refresh on change
@param ver | [
"Helper",
"to",
"specify",
"which",
"arguments",
"trigger",
"a",
"refresh",
"on",
"change"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/hex/deeplearning/DeepLearning.java#L581-L595 | train |
h2oai/h2o-2 | src/main/java/hex/deeplearning/DeepLearning.java | DeepLearning.prepareDataInfo | private DataInfo prepareDataInfo() {
final boolean del_enum_resp = classification && !response.isEnum();
final Frame train = FrameTask.DataInfo.prepareFrame(source, autoencoder ? null : response, ignored_cols, classification, ignore_const_cols, true /*drop >20% NA cols*/);
final DataInfo dinfo = new FrameTask.DataInfo(train, autoencoder ? 0 : 1, true, autoencoder || use_all_factor_levels, //use all FactorLevels for auto-encoder
autoencoder ? DataInfo.TransformType.NORMALIZE : DataInfo.TransformType.STANDARDIZE, //transform predictors
classification ? DataInfo.TransformType.NONE : DataInfo.TransformType.STANDARDIZE); //transform response
if (!autoencoder) {
final Vec resp = dinfo._adaptedFrame.lastVec(); //convention from DataInfo: response is the last Vec
assert (!classification ^ resp.isEnum()) : "Must have enum response for classification!"; //either regression or enum response
if (del_enum_resp) ltrash(resp);
}
return dinfo;
} | java | private DataInfo prepareDataInfo() {
final boolean del_enum_resp = classification && !response.isEnum();
final Frame train = FrameTask.DataInfo.prepareFrame(source, autoencoder ? null : response, ignored_cols, classification, ignore_const_cols, true /*drop >20% NA cols*/);
final DataInfo dinfo = new FrameTask.DataInfo(train, autoencoder ? 0 : 1, true, autoencoder || use_all_factor_levels, //use all FactorLevels for auto-encoder
autoencoder ? DataInfo.TransformType.NORMALIZE : DataInfo.TransformType.STANDARDIZE, //transform predictors
classification ? DataInfo.TransformType.NONE : DataInfo.TransformType.STANDARDIZE); //transform response
if (!autoencoder) {
final Vec resp = dinfo._adaptedFrame.lastVec(); //convention from DataInfo: response is the last Vec
assert (!classification ^ resp.isEnum()) : "Must have enum response for classification!"; //either regression or enum response
if (del_enum_resp) ltrash(resp);
}
return dinfo;
} | [
"private",
"DataInfo",
"prepareDataInfo",
"(",
")",
"{",
"final",
"boolean",
"del_enum_resp",
"=",
"classification",
"&&",
"!",
"response",
".",
"isEnum",
"(",
")",
";",
"final",
"Frame",
"train",
"=",
"FrameTask",
".",
"DataInfo",
".",
"prepareFrame",
"(",
... | Helper to create a DataInfo object from the source and response
@return DataInfo object | [
"Helper",
"to",
"create",
"a",
"DataInfo",
"object",
"from",
"the",
"source",
"and",
"response"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/hex/deeplearning/DeepLearning.java#L979-L991 | train |
h2oai/h2o-2 | src/main/java/hex/deeplearning/DeepLearning.java | DeepLearning.updateFrame | Frame updateFrame(Frame target, Frame src) {
if (src != target) ltrash(src);
return src;
} | java | Frame updateFrame(Frame target, Frame src) {
if (src != target) ltrash(src);
return src;
} | [
"Frame",
"updateFrame",
"(",
"Frame",
"target",
",",
"Frame",
"src",
")",
"{",
"if",
"(",
"src",
"!=",
"target",
")",
"ltrash",
"(",
"src",
")",
";",
"return",
"src",
";",
"}"
] | Helper to update a Frame and adding it to the local trash at the same time
@param target Frame referece, to be overwritten
@param src Newly made frame, to be deleted via local trash
@return src | [
"Helper",
"to",
"update",
"a",
"Frame",
"and",
"adding",
"it",
"to",
"the",
"local",
"trash",
"at",
"the",
"same",
"time"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/hex/deeplearning/DeepLearning.java#L1020-L1023 | train |
h2oai/h2o-2 | src/main/java/hex/deeplearning/DeepLearning.java | DeepLearning.lock_data | private void lock_data() {
source.read_lock(self());
if( validation != null && source._key != null && validation._key !=null && !source._key.equals(validation._key) )
validation.read_lock(self());
} | java | private void lock_data() {
source.read_lock(self());
if( validation != null && source._key != null && validation._key !=null && !source._key.equals(validation._key) )
validation.read_lock(self());
} | [
"private",
"void",
"lock_data",
"(",
")",
"{",
"source",
".",
"read_lock",
"(",
"self",
"(",
")",
")",
";",
"if",
"(",
"validation",
"!=",
"null",
"&&",
"source",
".",
"_key",
"!=",
"null",
"&&",
"validation",
".",
"_key",
"!=",
"null",
"&&",
"!",
... | Lock the input datasets against deletes | [
"Lock",
"the",
"input",
"datasets",
"against",
"deletes"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/hex/deeplearning/DeepLearning.java#L1149-L1153 | train |
h2oai/h2o-2 | src/main/java/hex/deeplearning/DeepLearning.java | DeepLearning.unlock_data | private void unlock_data() {
source.unlock(self());
if( validation != null && source._key != null && validation._key != null && !source._key.equals(validation._key) )
validation.unlock(self());
} | java | private void unlock_data() {
source.unlock(self());
if( validation != null && source._key != null && validation._key != null && !source._key.equals(validation._key) )
validation.unlock(self());
} | [
"private",
"void",
"unlock_data",
"(",
")",
"{",
"source",
".",
"unlock",
"(",
"self",
"(",
")",
")",
";",
"if",
"(",
"validation",
"!=",
"null",
"&&",
"source",
".",
"_key",
"!=",
"null",
"&&",
"validation",
".",
"_key",
"!=",
"null",
"&&",
"!",
"... | Release the lock for the input datasets | [
"Release",
"the",
"lock",
"for",
"the",
"input",
"datasets"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/hex/deeplearning/DeepLearning.java#L1158-L1162 | train |
h2oai/h2o-2 | src/main/java/hex/deeplearning/DeepLearning.java | DeepLearning.reBalance | private Frame reBalance(final Frame fr, boolean local) {
int chunks = (int)Math.min( 4 * H2O.NUMCPUS * (local ? 1 : H2O.CLOUD.size()), fr.numRows());
if (fr.anyVec().nChunks() > chunks && !reproducible) {
Log.info("Dataset already contains " + fr.anyVec().nChunks() + " chunks. No need to rebalance.");
return fr;
} else if (reproducible) {
Log.warn("Reproducibility enforced - using only 1 thread - can be slow.");
chunks = 1;
}
if (!quiet_mode) Log.info("ReBalancing dataset into (at least) " + chunks + " chunks.");
// return MRUtils.shuffleAndBalance(fr, chunks, seed, local, shuffle_training_data);
String snewKey = fr._key != null ? (fr._key.toString() + ".balanced") : Key.rand();
Key newKey = Key.makeSystem(snewKey);
RebalanceDataSet rb = new RebalanceDataSet(fr, newKey, chunks);
H2O.submitTask(rb);
rb.join();
return UKV.get(newKey);
} | java | private Frame reBalance(final Frame fr, boolean local) {
int chunks = (int)Math.min( 4 * H2O.NUMCPUS * (local ? 1 : H2O.CLOUD.size()), fr.numRows());
if (fr.anyVec().nChunks() > chunks && !reproducible) {
Log.info("Dataset already contains " + fr.anyVec().nChunks() + " chunks. No need to rebalance.");
return fr;
} else if (reproducible) {
Log.warn("Reproducibility enforced - using only 1 thread - can be slow.");
chunks = 1;
}
if (!quiet_mode) Log.info("ReBalancing dataset into (at least) " + chunks + " chunks.");
// return MRUtils.shuffleAndBalance(fr, chunks, seed, local, shuffle_training_data);
String snewKey = fr._key != null ? (fr._key.toString() + ".balanced") : Key.rand();
Key newKey = Key.makeSystem(snewKey);
RebalanceDataSet rb = new RebalanceDataSet(fr, newKey, chunks);
H2O.submitTask(rb);
rb.join();
return UKV.get(newKey);
} | [
"private",
"Frame",
"reBalance",
"(",
"final",
"Frame",
"fr",
",",
"boolean",
"local",
")",
"{",
"int",
"chunks",
"=",
"(",
"int",
")",
"Math",
".",
"min",
"(",
"4",
"*",
"H2O",
".",
"NUMCPUS",
"*",
"(",
"local",
"?",
"1",
":",
"H2O",
".",
"CLOUD... | Rebalance a frame for load balancing
@param fr Input frame
@param local whether to only create enough chunks to max out all cores on one node only
@return Frame that has potentially more chunks | [
"Rebalance",
"a",
"frame",
"for",
"load",
"balancing"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/hex/deeplearning/DeepLearning.java#L1179-L1196 | train |
h2oai/h2o-2 | src/main/java/hex/deeplearning/DeepLearning.java | DeepLearning.computeRowUsageFraction | private static float computeRowUsageFraction(final long numRows, final long train_samples_per_iteration, final boolean replicate_training_data) {
float rowUsageFraction = (float)train_samples_per_iteration / numRows;
if (replicate_training_data) rowUsageFraction /= H2O.CLOUD.size();
assert(rowUsageFraction > 0);
return rowUsageFraction;
} | java | private static float computeRowUsageFraction(final long numRows, final long train_samples_per_iteration, final boolean replicate_training_data) {
float rowUsageFraction = (float)train_samples_per_iteration / numRows;
if (replicate_training_data) rowUsageFraction /= H2O.CLOUD.size();
assert(rowUsageFraction > 0);
return rowUsageFraction;
} | [
"private",
"static",
"float",
"computeRowUsageFraction",
"(",
"final",
"long",
"numRows",
",",
"final",
"long",
"train_samples_per_iteration",
",",
"final",
"boolean",
"replicate_training_data",
")",
"{",
"float",
"rowUsageFraction",
"=",
"(",
"float",
")",
"train_sam... | Compute the fraction of rows that need to be used for training during one iteration
@param numRows number of training rows
@param train_samples_per_iteration number of training rows to be processed per iteration
@param replicate_training_data whether of not the training data is replicated on each node
@return fraction of rows to be used for training during one iteration | [
"Compute",
"the",
"fraction",
"of",
"rows",
"that",
"need",
"to",
"be",
"used",
"for",
"training",
"during",
"one",
"iteration"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/hex/deeplearning/DeepLearning.java#L1288-L1293 | train |
h2oai/h2o-2 | src/main/java/water/util/CrossValUtils.java | CrossValUtils.crossValidate | public static void crossValidate(Job.ValidatedJob job) {
if (job.state != Job.JobState.RUNNING) return; //don't do cross-validation if the full model builder failed
if (job.validation != null)
throw new IllegalArgumentException("Cannot provide validation dataset and n_folds > 0 at the same time.");
if (job.n_folds <= 1)
throw new IllegalArgumentException("n_folds must be >= 2 for cross-validation.");
final String basename = job.destination_key.toString();
long[] offsets = new long[job.n_folds +1];
Frame[] cv_preds = new Frame[job.n_folds];
try {
for (int i = 0; i < job.n_folds; ++i) {
if (job.state != Job.JobState.RUNNING) break;
Key[] destkeys = new Key[]{Key.make(basename + "_xval" + i + "_train"), Key.make(basename + "_xval" + i + "_holdout")};
NFoldFrameExtractor nffe = new NFoldFrameExtractor(job.source, job.n_folds, i, destkeys, Key.make() /*key used for locking only*/);
H2O.submitTask(nffe);
Frame[] splits = nffe.getResult();
// Cross-validate individual splits
try {
job.crossValidate(splits, cv_preds, offsets, i); //this removes the enum-ified response!
job._cv_count++;
} finally {
// clean-up the results
if (!job.keep_cross_validation_splits) for(Frame f : splits) f.delete();
}
}
if (job.state != Job.JobState.RUNNING)
return;
final int resp_idx = job.source.find(job._responseName);
Vec response = job.source.vecs()[resp_idx];
boolean put_back = UKV.get(job.response._key) == null; // In the case of rebalance, rebalance response will be deleted
if (put_back) {
job.response = response;
if (job.classification)
job.response = job.response.toEnum();
DKV.put(job.response._key, job.response); //put enum-ified response back to K-V store
}
((Model)UKV.get(job.destination_key)).scoreCrossValidation(job, job.source, response, cv_preds, offsets);
if (put_back) UKV.remove(job.response._key);
} finally {
// clean-up prediction frames for splits
for(Frame f: cv_preds) if (f!=null) f.delete();
}
} | java | public static void crossValidate(Job.ValidatedJob job) {
if (job.state != Job.JobState.RUNNING) return; //don't do cross-validation if the full model builder failed
if (job.validation != null)
throw new IllegalArgumentException("Cannot provide validation dataset and n_folds > 0 at the same time.");
if (job.n_folds <= 1)
throw new IllegalArgumentException("n_folds must be >= 2 for cross-validation.");
final String basename = job.destination_key.toString();
long[] offsets = new long[job.n_folds +1];
Frame[] cv_preds = new Frame[job.n_folds];
try {
for (int i = 0; i < job.n_folds; ++i) {
if (job.state != Job.JobState.RUNNING) break;
Key[] destkeys = new Key[]{Key.make(basename + "_xval" + i + "_train"), Key.make(basename + "_xval" + i + "_holdout")};
NFoldFrameExtractor nffe = new NFoldFrameExtractor(job.source, job.n_folds, i, destkeys, Key.make() /*key used for locking only*/);
H2O.submitTask(nffe);
Frame[] splits = nffe.getResult();
// Cross-validate individual splits
try {
job.crossValidate(splits, cv_preds, offsets, i); //this removes the enum-ified response!
job._cv_count++;
} finally {
// clean-up the results
if (!job.keep_cross_validation_splits) for(Frame f : splits) f.delete();
}
}
if (job.state != Job.JobState.RUNNING)
return;
final int resp_idx = job.source.find(job._responseName);
Vec response = job.source.vecs()[resp_idx];
boolean put_back = UKV.get(job.response._key) == null; // In the case of rebalance, rebalance response will be deleted
if (put_back) {
job.response = response;
if (job.classification)
job.response = job.response.toEnum();
DKV.put(job.response._key, job.response); //put enum-ified response back to K-V store
}
((Model)UKV.get(job.destination_key)).scoreCrossValidation(job, job.source, response, cv_preds, offsets);
if (put_back) UKV.remove(job.response._key);
} finally {
// clean-up prediction frames for splits
for(Frame f: cv_preds) if (f!=null) f.delete();
}
} | [
"public",
"static",
"void",
"crossValidate",
"(",
"Job",
".",
"ValidatedJob",
"job",
")",
"{",
"if",
"(",
"job",
".",
"state",
"!=",
"Job",
".",
"JobState",
".",
"RUNNING",
")",
"return",
";",
"//don't do cross-validation if the full model builder failed",
"if",
... | Cross-Validate a ValidatedJob
@param job (must contain valid entries for n_folds, validation, destination_key, source, response) | [
"Cross",
"-",
"Validate",
"a",
"ValidatedJob"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/util/CrossValUtils.java#L14-L56 | train |
h2oai/h2o-2 | src/main/java/water/api/ModelMetrics.java | ModelMetrics.fetchAll | protected static List<water.ModelMetrics>fetchAll() {
return new ArrayList<water.ModelMetrics>(H2O.KeySnapshot.globalSnapshot().fetchAll(water.ModelMetrics.class).values());
} | java | protected static List<water.ModelMetrics>fetchAll() {
return new ArrayList<water.ModelMetrics>(H2O.KeySnapshot.globalSnapshot().fetchAll(water.ModelMetrics.class).values());
} | [
"protected",
"static",
"List",
"<",
"water",
".",
"ModelMetrics",
">",
"fetchAll",
"(",
")",
"{",
"return",
"new",
"ArrayList",
"<",
"water",
".",
"ModelMetrics",
">",
"(",
"H2O",
".",
"KeySnapshot",
".",
"globalSnapshot",
"(",
")",
".",
"fetchAll",
"(",
... | Fetch all ModelMetrics from the KV store. | [
"Fetch",
"all",
"ModelMetrics",
"from",
"the",
"KV",
"store",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/api/ModelMetrics.java#L52-L54 | train |
h2oai/h2o-2 | src/main/java/water/api/ModelMetrics.java | ModelMetrics.serveOneOrAll | private Response serveOneOrAll(List<water.ModelMetrics> list) {
JsonArray metricsArray = new JsonArray();
for (water.ModelMetrics metrics : list) {
JsonObject metricsJson = metrics.toJSON();
metricsArray.add(metricsJson);
}
JsonObject result = new JsonObject();
result.add("metrics", metricsArray);
return Response.done(result);
} | java | private Response serveOneOrAll(List<water.ModelMetrics> list) {
JsonArray metricsArray = new JsonArray();
for (water.ModelMetrics metrics : list) {
JsonObject metricsJson = metrics.toJSON();
metricsArray.add(metricsJson);
}
JsonObject result = new JsonObject();
result.add("metrics", metricsArray);
return Response.done(result);
} | [
"private",
"Response",
"serveOneOrAll",
"(",
"List",
"<",
"water",
".",
"ModelMetrics",
">",
"list",
")",
"{",
"JsonArray",
"metricsArray",
"=",
"new",
"JsonArray",
"(",
")",
";",
"for",
"(",
"water",
".",
"ModelMetrics",
"metrics",
":",
"list",
")",
"{",
... | For one or more water.ModelMetrics from the KV store return Response containing a map of them. | [
"For",
"one",
"or",
"more",
"water",
".",
"ModelMetrics",
"from",
"the",
"KV",
"store",
"return",
"Response",
"containing",
"a",
"map",
"of",
"them",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/api/ModelMetrics.java#L60-L71 | train |
h2oai/h2o-2 | src/main/java/hex/gbm/DTreeUtils.java | DTreeUtils.scoreTree | public static void scoreTree(double data[], float preds[], CompressedTree[] ts) {
for( int c=0; c<ts.length; c++ )
if( ts[c] != null )
preds[ts.length==1?0:c+1] += ts[c].score(data);
} | java | public static void scoreTree(double data[], float preds[], CompressedTree[] ts) {
for( int c=0; c<ts.length; c++ )
if( ts[c] != null )
preds[ts.length==1?0:c+1] += ts[c].score(data);
} | [
"public",
"static",
"void",
"scoreTree",
"(",
"double",
"data",
"[",
"]",
",",
"float",
"preds",
"[",
"]",
",",
"CompressedTree",
"[",
"]",
"ts",
")",
"{",
"for",
"(",
"int",
"c",
"=",
"0",
";",
"c",
"<",
"ts",
".",
"length",
";",
"c",
"++",
")... | Score given tree on the row of data.
@param data row of data
@param preds array to hold resulting prediction
@param ts a tree representation (single regression tree, or multi tree) | [
"Score",
"given",
"tree",
"on",
"the",
"row",
"of",
"data",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/hex/gbm/DTreeUtils.java#L15-L19 | train |
h2oai/h2o-2 | src/main/java/water/api/RequestServer.java | RequestServer.registerRequest | public static Request registerRequest(Request req) {
assert req.supportedVersions().length > 0;
for (API_VERSION ver : req.supportedVersions()) {
String href = req.href(ver);
assert (! _requests.containsKey(href)) : "Request with href "+href+" already registered";
_requests.put(href,req);
req.registered(ver);
}
return req;
} | java | public static Request registerRequest(Request req) {
assert req.supportedVersions().length > 0;
for (API_VERSION ver : req.supportedVersions()) {
String href = req.href(ver);
assert (! _requests.containsKey(href)) : "Request with href "+href+" already registered";
_requests.put(href,req);
req.registered(ver);
}
return req;
} | [
"public",
"static",
"Request",
"registerRequest",
"(",
"Request",
"req",
")",
"{",
"assert",
"req",
".",
"supportedVersions",
"(",
")",
".",
"length",
">",
"0",
";",
"for",
"(",
"API_VERSION",
"ver",
":",
"req",
".",
"supportedVersions",
"(",
")",
")",
"... | Registers the request with the request server. | [
"Registers",
"the",
"request",
"with",
"the",
"request",
"server",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/api/RequestServer.java#L277-L286 | train |
h2oai/h2o-2 | src/main/java/water/api/RequestServer.java | RequestServer.start | public static void start() {
new Thread( new Runnable() {
@Override public void run() {
while( true ) {
try {
// Try to get the NanoHTTP daemon started
SERVER = new RequestServer(H2O._apiSocket);
break;
} catch( Exception ioe ) {
Log.err(Sys.HTTPD,"Launching NanoHTTP server got ",ioe);
try { Thread.sleep(1000); } catch( InterruptedException e ) { } // prevent denial-of-service
}
}
}
}, "Request Server launcher").start();
} | java | public static void start() {
new Thread( new Runnable() {
@Override public void run() {
while( true ) {
try {
// Try to get the NanoHTTP daemon started
SERVER = new RequestServer(H2O._apiSocket);
break;
} catch( Exception ioe ) {
Log.err(Sys.HTTPD,"Launching NanoHTTP server got ",ioe);
try { Thread.sleep(1000); } catch( InterruptedException e ) { } // prevent denial-of-service
}
}
}
}, "Request Server launcher").start();
} | [
"public",
"static",
"void",
"start",
"(",
")",
"{",
"new",
"Thread",
"(",
"new",
"Runnable",
"(",
")",
"{",
"@",
"Override",
"public",
"void",
"run",
"(",
")",
"{",
"while",
"(",
"true",
")",
"{",
"try",
"{",
"// Try to get the NanoHTTP daemon started",
... | Keep spinning until we get to launch the NanoHTTPD | [
"Keep",
"spinning",
"until",
"we",
"get",
"to",
"launch",
"the",
"NanoHTTPD"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/api/RequestServer.java#L361-L376 | train |
h2oai/h2o-2 | src/main/java/water/Atomic.java | Atomic.invoke | public final T invoke( Key key ) {
RPC<Atomic<T>> rpc = fork(key);
return (T)(rpc == null ? this : rpc.get()); // Block for it
} | java | public final T invoke( Key key ) {
RPC<Atomic<T>> rpc = fork(key);
return (T)(rpc == null ? this : rpc.get()); // Block for it
} | [
"public",
"final",
"T",
"invoke",
"(",
"Key",
"key",
")",
"{",
"RPC",
"<",
"Atomic",
"<",
"T",
">>",
"rpc",
"=",
"fork",
"(",
"key",
")",
";",
"return",
"(",
"T",
")",
"(",
"rpc",
"==",
"null",
"?",
"this",
":",
"rpc",
".",
"get",
"(",
")",
... | Block until it completes, even if run remotely | [
"Block",
"until",
"it",
"completes",
"even",
"if",
"run",
"remotely"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/Atomic.java#L33-L36 | train |
h2oai/h2o-2 | src/main/java/water/genmodel/GenUtils.java | GenUtils.concat | public static String[] concat(String[] ...aa) {
int l = 0;
for (String[] a : aa) l += a.length;
String[] r = new String[l];
l = 0;
for (String[] a : aa) {
System.arraycopy(a, 0, r, l, a.length);
l += a.length;
}
return r;
} | java | public static String[] concat(String[] ...aa) {
int l = 0;
for (String[] a : aa) l += a.length;
String[] r = new String[l];
l = 0;
for (String[] a : aa) {
System.arraycopy(a, 0, r, l, a.length);
l += a.length;
}
return r;
} | [
"public",
"static",
"String",
"[",
"]",
"concat",
"(",
"String",
"[",
"]",
"...",
"aa",
")",
"{",
"int",
"l",
"=",
"0",
";",
"for",
"(",
"String",
"[",
"]",
"a",
":",
"aa",
")",
"l",
"+=",
".",
"length",
";",
"String",
"[",
"]",
"r",
"=",
"... | Concatenate given list of arrays into one long array.
<p>Expect not null array.</p>
@param aa list of string arrays
@return a long array create by concatenation of given arrays. | [
"Concatenate",
"given",
"list",
"of",
"arrays",
"into",
"one",
"long",
"array",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/genmodel/GenUtils.java#L14-L24 | train |
h2oai/h2o-2 | h2o-samples/src/main/java/samples/expert/Frames.java | Frames.parse | public static Frame parse(File file) {
Key fkey = NFSFileVec.make(file);
Key dest = Key.make(file.getName());
Frame frame = ParseDataset2.parse(dest, new Key[] { fkey });
return frame;
} | java | public static Frame parse(File file) {
Key fkey = NFSFileVec.make(file);
Key dest = Key.make(file.getName());
Frame frame = ParseDataset2.parse(dest, new Key[] { fkey });
return frame;
} | [
"public",
"static",
"Frame",
"parse",
"(",
"File",
"file",
")",
"{",
"Key",
"fkey",
"=",
"NFSFileVec",
".",
"make",
"(",
"file",
")",
";",
"Key",
"dest",
"=",
"Key",
".",
"make",
"(",
"file",
".",
"getName",
"(",
")",
")",
";",
"Frame",
"frame",
... | Parse a dataset into a Frame. | [
"Parse",
"a",
"dataset",
"into",
"a",
"Frame",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/h2o-samples/src/main/java/samples/expert/Frames.java#L45-L50 | train |
h2oai/h2o-2 | h2o-samples/src/main/java/samples/expert/Frames.java | Frames.create | public static Frame create(String[] headers, double[][] rows) {
Futures fs = new Futures();
Vec[] vecs = new Vec[rows[0].length];
Key keys[] = new Vec.VectorGroup().addVecs(vecs.length);
for( int c = 0; c < vecs.length; c++ ) {
AppendableVec vec = new AppendableVec(keys[c]);
NewChunk chunk = new NewChunk(vec, 0);
for( int r = 0; r < rows.length; r++ )
chunk.addNum(rows[r][c]);
chunk.close(0, fs);
vecs[c] = vec.close(fs);
}
fs.blockForPending();
return new Frame(headers, vecs);
} | java | public static Frame create(String[] headers, double[][] rows) {
Futures fs = new Futures();
Vec[] vecs = new Vec[rows[0].length];
Key keys[] = new Vec.VectorGroup().addVecs(vecs.length);
for( int c = 0; c < vecs.length; c++ ) {
AppendableVec vec = new AppendableVec(keys[c]);
NewChunk chunk = new NewChunk(vec, 0);
for( int r = 0; r < rows.length; r++ )
chunk.addNum(rows[r][c]);
chunk.close(0, fs);
vecs[c] = vec.close(fs);
}
fs.blockForPending();
return new Frame(headers, vecs);
} | [
"public",
"static",
"Frame",
"create",
"(",
"String",
"[",
"]",
"headers",
",",
"double",
"[",
"]",
"[",
"]",
"rows",
")",
"{",
"Futures",
"fs",
"=",
"new",
"Futures",
"(",
")",
";",
"Vec",
"[",
"]",
"vecs",
"=",
"new",
"Vec",
"[",
"rows",
"[",
... | Creates a frame programmatically. | [
"Creates",
"a",
"frame",
"programmatically",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/h2o-samples/src/main/java/samples/expert/Frames.java#L55-L69 | train |
h2oai/h2o-2 | src/main/java/water/fvec/FileVec.java | FileVec.chunkIdx | @Override public Value chunkIdx( int cidx ) {
final long nchk = nChunks();
assert 0 <= cidx && cidx < nchk;
Key dkey = chunkKey(cidx);
Value val1 = DKV.get(dkey);// Check for an existing one... will fetch data as needed
if( val1 != null ) return val1; // Found an existing one?
// Lazily create a DVec for this chunk
int len = (int)(cidx < nchk-1 ? CHUNK_SZ : (_len-chunk2StartElem(cidx)));
// DVec is just the raw file data with a null-compression scheme
Value val2 = new Value(dkey,len,null,TypeMap.C1NCHUNK,_be);
val2.setdsk(); // It is already on disk.
// If not-home, then block till the Key is everywhere. Most calls here are
// from the parser loading a text file, and the parser splits the work such
// that most puts here are on home - so this is a simple speed optimization:
// do not make a Futures nor block on it on home.
Futures fs = dkey.home() ? null : new Futures();
// Atomically insert: fails on a race, but then return the old version
Value val3 = DKV.DputIfMatch(dkey,val2,null,fs);
if( !dkey.home() && fs != null ) fs.blockForPending();
return val3 == null ? val2 : val3;
} | java | @Override public Value chunkIdx( int cidx ) {
final long nchk = nChunks();
assert 0 <= cidx && cidx < nchk;
Key dkey = chunkKey(cidx);
Value val1 = DKV.get(dkey);// Check for an existing one... will fetch data as needed
if( val1 != null ) return val1; // Found an existing one?
// Lazily create a DVec for this chunk
int len = (int)(cidx < nchk-1 ? CHUNK_SZ : (_len-chunk2StartElem(cidx)));
// DVec is just the raw file data with a null-compression scheme
Value val2 = new Value(dkey,len,null,TypeMap.C1NCHUNK,_be);
val2.setdsk(); // It is already on disk.
// If not-home, then block till the Key is everywhere. Most calls here are
// from the parser loading a text file, and the parser splits the work such
// that most puts here are on home - so this is a simple speed optimization:
// do not make a Futures nor block on it on home.
Futures fs = dkey.home() ? null : new Futures();
// Atomically insert: fails on a race, but then return the old version
Value val3 = DKV.DputIfMatch(dkey,val2,null,fs);
if( !dkey.home() && fs != null ) fs.blockForPending();
return val3 == null ? val2 : val3;
} | [
"@",
"Override",
"public",
"Value",
"chunkIdx",
"(",
"int",
"cidx",
")",
"{",
"final",
"long",
"nchk",
"=",
"nChunks",
"(",
")",
";",
"assert",
"0",
"<=",
"cidx",
"&&",
"cidx",
"<",
"nchk",
";",
"Key",
"dkey",
"=",
"chunkKey",
"(",
"cidx",
")",
";"... | Touching the DVec will force the file load. | [
"Touching",
"the",
"DVec",
"will",
"force",
"the",
"file",
"load",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/fvec/FileVec.java#L68-L88 | train |
h2oai/h2o-2 | src/main/java/water/api/Frames.java | Frames.summarizeAndEnhanceFrame | private static void summarizeAndEnhanceFrame(FrameSummary summary, Frame frame, boolean find_compatible_models, Map<String, Model> all_models, Map<String, Set<String>> all_models_cols) {
UniqueId unique_id = frame.getUniqueId();
summary.id = unique_id.getId();
summary.key = unique_id.getKey();
summary.creation_epoch_time_millis = unique_id.getCreationEpochTimeMillis();
summary.column_names = frame._names;
summary.is_raw_frame = frame.isRawData();
if (find_compatible_models) {
Map<String, Model> compatible_models = findCompatibleModels(frame, all_models, all_models_cols);
summary.compatible_models = compatible_models.keySet();
}
} | java | private static void summarizeAndEnhanceFrame(FrameSummary summary, Frame frame, boolean find_compatible_models, Map<String, Model> all_models, Map<String, Set<String>> all_models_cols) {
UniqueId unique_id = frame.getUniqueId();
summary.id = unique_id.getId();
summary.key = unique_id.getKey();
summary.creation_epoch_time_millis = unique_id.getCreationEpochTimeMillis();
summary.column_names = frame._names;
summary.is_raw_frame = frame.isRawData();
if (find_compatible_models) {
Map<String, Model> compatible_models = findCompatibleModels(frame, all_models, all_models_cols);
summary.compatible_models = compatible_models.keySet();
}
} | [
"private",
"static",
"void",
"summarizeAndEnhanceFrame",
"(",
"FrameSummary",
"summary",
",",
"Frame",
"frame",
",",
"boolean",
"find_compatible_models",
",",
"Map",
"<",
"String",
",",
"Model",
">",
"all_models",
",",
"Map",
"<",
"String",
",",
"Set",
"<",
"S... | Summarize fields in water.fvec.Frame. | [
"Summarize",
"fields",
"in",
"water",
".",
"fvec",
".",
"Frame",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/api/Frames.java#L145-L158 | train |
h2oai/h2o-2 | src/main/java/water/api/Frames.java | Frames.serveOneOrAll | private Response serveOneOrAll(Map<String, Frame> framesMap) {
// returns empty sets if !this.find_compatible_models
Pair<Map<String, Model>, Map<String, Set<String>>> models_info = fetchModels();
Map<String, Model> all_models = models_info.getFirst();
Map<String, Set<String>> all_models_cols = models_info.getSecond();
Map<String, FrameSummary> frameSummaries = Frames.generateFrameSummaries(null, framesMap, find_compatible_models, all_models, all_models_cols);
Map resultsMap = new LinkedHashMap();
resultsMap.put("frames", frameSummaries);
// If find_compatible_models then include a map of the Model summaries. Should we put this on a separate switch?
if (this.find_compatible_models) {
Set<String> all_referenced_models = new TreeSet<String>();
for (Map.Entry<String, FrameSummary> entry: frameSummaries.entrySet()) {
FrameSummary summary = entry.getValue();
all_referenced_models.addAll(summary.compatible_models);
}
Map<String, ModelSummary> modelSummaries = Models.generateModelSummaries(all_referenced_models, all_models, false, null, null);
resultsMap.put("models", modelSummaries);
}
// TODO: temporary hack to get things going
String json = gson.toJson(resultsMap);
JsonObject result = gson.fromJson(json, JsonElement.class).getAsJsonObject();
return Response.done(result);
} | java | private Response serveOneOrAll(Map<String, Frame> framesMap) {
// returns empty sets if !this.find_compatible_models
Pair<Map<String, Model>, Map<String, Set<String>>> models_info = fetchModels();
Map<String, Model> all_models = models_info.getFirst();
Map<String, Set<String>> all_models_cols = models_info.getSecond();
Map<String, FrameSummary> frameSummaries = Frames.generateFrameSummaries(null, framesMap, find_compatible_models, all_models, all_models_cols);
Map resultsMap = new LinkedHashMap();
resultsMap.put("frames", frameSummaries);
// If find_compatible_models then include a map of the Model summaries. Should we put this on a separate switch?
if (this.find_compatible_models) {
Set<String> all_referenced_models = new TreeSet<String>();
for (Map.Entry<String, FrameSummary> entry: frameSummaries.entrySet()) {
FrameSummary summary = entry.getValue();
all_referenced_models.addAll(summary.compatible_models);
}
Map<String, ModelSummary> modelSummaries = Models.generateModelSummaries(all_referenced_models, all_models, false, null, null);
resultsMap.put("models", modelSummaries);
}
// TODO: temporary hack to get things going
String json = gson.toJson(resultsMap);
JsonObject result = gson.fromJson(json, JsonElement.class).getAsJsonObject();
return Response.done(result);
} | [
"private",
"Response",
"serveOneOrAll",
"(",
"Map",
"<",
"String",
",",
"Frame",
">",
"framesMap",
")",
"{",
"// returns empty sets if !this.find_compatible_models",
"Pair",
"<",
"Map",
"<",
"String",
",",
"Model",
">",
",",
"Map",
"<",
"String",
",",
"Set",
"... | For one or more Frame from the KV store, sumamrize and enhance them and Response containing a map of them. | [
"For",
"one",
"or",
"more",
"Frame",
"from",
"the",
"KV",
"store",
"sumamrize",
"and",
"enhance",
"them",
"and",
"Response",
"containing",
"a",
"map",
"of",
"them",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/api/Frames.java#L172-L201 | train |
h2oai/h2o-2 | src/main/java/water/fvec/TransfVec.java | TransfVec.compose | public static Vec compose(TransfVec origVec, int[][] transfMap, String[] domain, boolean keepOrig) {
// Do a mapping from INT -> ENUM -> this vector ENUM
int[][] domMap = Utils.compose(new int[][] {origVec._values, origVec._indexes }, transfMap);
Vec result = origVec.masterVec().makeTransf(domMap[0], domMap[1], domain);;
if (!keepOrig) DKV.remove(origVec._key);
return result;
} | java | public static Vec compose(TransfVec origVec, int[][] transfMap, String[] domain, boolean keepOrig) {
// Do a mapping from INT -> ENUM -> this vector ENUM
int[][] domMap = Utils.compose(new int[][] {origVec._values, origVec._indexes }, transfMap);
Vec result = origVec.masterVec().makeTransf(domMap[0], domMap[1], domain);;
if (!keepOrig) DKV.remove(origVec._key);
return result;
} | [
"public",
"static",
"Vec",
"compose",
"(",
"TransfVec",
"origVec",
",",
"int",
"[",
"]",
"[",
"]",
"transfMap",
",",
"String",
"[",
"]",
"domain",
",",
"boolean",
"keepOrig",
")",
"{",
"// Do a mapping from INT -> ENUM -> this vector ENUM",
"int",
"[",
"]",
"[... | Compose given origVector with given transformation. Always returns a new vector.
Original vector is kept if keepOrig is true.
@param origVec
@param transfMap
@param keepOrig
@return a new instance of {@link TransfVec} composing transformation of origVector and tranfsMap | [
"Compose",
"given",
"origVector",
"with",
"given",
"transformation",
".",
"Always",
"returns",
"a",
"new",
"vector",
".",
"Original",
"vector",
"is",
"kept",
"if",
"keepOrig",
"is",
"true",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/fvec/TransfVec.java#L127-L133 | train |
h2oai/h2o-2 | src/main/java/water/api/Inspector.java | Inspector.redirect | public static Response redirect(Request req, Key src_key) {
return Response.redirect(req, "/2/Inspector", "src_key", src_key.toString());
} | java | public static Response redirect(Request req, Key src_key) {
return Response.redirect(req, "/2/Inspector", "src_key", src_key.toString());
} | [
"public",
"static",
"Response",
"redirect",
"(",
"Request",
"req",
",",
"Key",
"src_key",
")",
"{",
"return",
"Response",
".",
"redirect",
"(",
"req",
",",
"\"/2/Inspector\"",
",",
"\"src_key\"",
",",
"src_key",
".",
"toString",
"(",
")",
")",
";",
"}"
] | Called from some other page, to redirect that other page to this page. | [
"Called",
"from",
"some",
"other",
"page",
"to",
"redirect",
"that",
"other",
"page",
"to",
"this",
"page",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/api/Inspector.java#L83-L85 | train |
h2oai/h2o-2 | src/main/java/water/fvec/NewChunk.java | NewChunk.addr | public void addr( NewChunk nc ) {
long [] tmpl = _ls; _ls = nc._ls; nc._ls = tmpl;
int [] tmpi = _xs; _xs = nc._xs; nc._xs = tmpi;
tmpi = _id; _id = nc._id; nc._id = tmpi;
double[] tmpd = _ds; _ds = nc._ds; nc._ds = tmpd;
int tmp = _sparseLen; _sparseLen=nc._sparseLen; nc._sparseLen=tmp;
tmp = _len; _len = nc._len; nc._len = tmp;
add(nc);
} | java | public void addr( NewChunk nc ) {
long [] tmpl = _ls; _ls = nc._ls; nc._ls = tmpl;
int [] tmpi = _xs; _xs = nc._xs; nc._xs = tmpi;
tmpi = _id; _id = nc._id; nc._id = tmpi;
double[] tmpd = _ds; _ds = nc._ds; nc._ds = tmpd;
int tmp = _sparseLen; _sparseLen=nc._sparseLen; nc._sparseLen=tmp;
tmp = _len; _len = nc._len; nc._len = tmp;
add(nc);
} | [
"public",
"void",
"addr",
"(",
"NewChunk",
"nc",
")",
"{",
"long",
"[",
"]",
"tmpl",
"=",
"_ls",
";",
"_ls",
"=",
"nc",
".",
"_ls",
";",
"nc",
".",
"_ls",
"=",
"tmpl",
";",
"int",
"[",
"]",
"tmpi",
"=",
"_xs",
";",
"_xs",
"=",
"nc",
".",
"_... | PREpend all of 'nc' onto the current NewChunk. Kill nc. | [
"PREpend",
"all",
"of",
"nc",
"onto",
"the",
"current",
"NewChunk",
".",
"Kill",
"nc",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/fvec/NewChunk.java#L309-L317 | train |
h2oai/h2o-2 | src/main/java/water/fvec/NewChunk.java | NewChunk.append2 | void append2( long l, int x ) {
if(_id == null || l != 0){
if(_ls == null || _sparseLen == _ls.length) {
append2slow();
// again call append2 since calling append2slow might have changed things (eg might have switched to sparse and l could be 0)
append2(l,x);
return;
}
_ls[_sparseLen] = l;
_xs[_sparseLen] = x;
if(_id != null)_id[_sparseLen] = _len;
_sparseLen++;
}
_len++;
assert _sparseLen <= _len;
} | java | void append2( long l, int x ) {
if(_id == null || l != 0){
if(_ls == null || _sparseLen == _ls.length) {
append2slow();
// again call append2 since calling append2slow might have changed things (eg might have switched to sparse and l could be 0)
append2(l,x);
return;
}
_ls[_sparseLen] = l;
_xs[_sparseLen] = x;
if(_id != null)_id[_sparseLen] = _len;
_sparseLen++;
}
_len++;
assert _sparseLen <= _len;
} | [
"void",
"append2",
"(",
"long",
"l",
",",
"int",
"x",
")",
"{",
"if",
"(",
"_id",
"==",
"null",
"||",
"l",
"!=",
"0",
")",
"{",
"if",
"(",
"_ls",
"==",
"null",
"||",
"_sparseLen",
"==",
"_ls",
".",
"length",
")",
"{",
"append2slow",
"(",
")",
... | Fast-path append long data | [
"Fast",
"-",
"path",
"append",
"long",
"data"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/fvec/NewChunk.java#L320-L335 | train |
h2oai/h2o-2 | src/main/java/water/UKV.java | UKV.put | static public void put( Key key, Value val, Futures fs ) {
assert !val.isLockable();
Value res = DKV.put(key,val,fs);
assert res == null || !res.isLockable();
} | java | static public void put( Key key, Value val, Futures fs ) {
assert !val.isLockable();
Value res = DKV.put(key,val,fs);
assert res == null || !res.isLockable();
} | [
"static",
"public",
"void",
"put",
"(",
"Key",
"key",
",",
"Value",
"val",
",",
"Futures",
"fs",
")",
"{",
"assert",
"!",
"val",
".",
"isLockable",
"(",
")",
";",
"Value",
"res",
"=",
"DKV",
".",
"put",
"(",
"key",
",",
"val",
",",
"fs",
")",
"... | have to use the Lockable interface for all updates. | [
"have",
"to",
"use",
"the",
"Lockable",
"interface",
"for",
"all",
"updates",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/UKV.java#L26-L30 | train |
h2oai/h2o-2 | src/main/java/water/UKV.java | UKV.put | static public void put( Key key, Freezable fr ) {
if( fr == null ) UKV.remove(key);
else UKV.put(key,new Value(key, fr));
} | java | static public void put( Key key, Freezable fr ) {
if( fr == null ) UKV.remove(key);
else UKV.put(key,new Value(key, fr));
} | [
"static",
"public",
"void",
"put",
"(",
"Key",
"key",
",",
"Freezable",
"fr",
")",
"{",
"if",
"(",
"fr",
"==",
"null",
")",
"UKV",
".",
"remove",
"(",
"key",
")",
";",
"else",
"UKV",
".",
"put",
"(",
"key",
",",
"new",
"Value",
"(",
"key",
",",... | Also, allow auto-serialization | [
"Also",
"allow",
"auto",
"-",
"serialization"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/UKV.java#L54-L57 | train |
h2oai/h2o-2 | src/main/java/water/H2ONode.java | H2ONode.remove_task_tracking | void remove_task_tracking( int task ) {
RPC.RPCCall rpc = _work.get(task);
if( rpc == null ) return; // Already stopped tracking
// Atomically attempt to remove the 'dt'. If we win, we are the sole
// thread running the dt.onAckAck. Also helps GC: the 'dt' is done (sent
// to client and we received the ACKACK), but the rpc might need to stick
// around a long time - and the dt might be big.
DTask dt = rpc._dt; // The existing DTask, if any
if( dt != null && RPC.RPCCall.CAS_DT.compareAndSet(rpc,dt,null) ) {
assert rpc._computed : "Still not done #"+task+" "+dt.getClass()+" from "+rpc._client;
AckAckTimeOutThread.PENDING.remove(rpc);
dt.onAckAck(); // One-time call on stop-tracking
}
// Roll-up as many done RPCs as we can, into the _removed_task_ids list
while( true ) {
int t = _removed_task_ids.get(); // Last already-removed ID
RPC.RPCCall rpc2 = _work.get(t+1); // RPC of 1st not-removed ID
if( rpc2 == null || rpc2._dt != null || !_removed_task_ids.compareAndSet(t,t+1) )
break; // Stop when we hit in-progress tasks
_work.remove(t+1); // Else we can remove the tracking now
}
} | java | void remove_task_tracking( int task ) {
RPC.RPCCall rpc = _work.get(task);
if( rpc == null ) return; // Already stopped tracking
// Atomically attempt to remove the 'dt'. If we win, we are the sole
// thread running the dt.onAckAck. Also helps GC: the 'dt' is done (sent
// to client and we received the ACKACK), but the rpc might need to stick
// around a long time - and the dt might be big.
DTask dt = rpc._dt; // The existing DTask, if any
if( dt != null && RPC.RPCCall.CAS_DT.compareAndSet(rpc,dt,null) ) {
assert rpc._computed : "Still not done #"+task+" "+dt.getClass()+" from "+rpc._client;
AckAckTimeOutThread.PENDING.remove(rpc);
dt.onAckAck(); // One-time call on stop-tracking
}
// Roll-up as many done RPCs as we can, into the _removed_task_ids list
while( true ) {
int t = _removed_task_ids.get(); // Last already-removed ID
RPC.RPCCall rpc2 = _work.get(t+1); // RPC of 1st not-removed ID
if( rpc2 == null || rpc2._dt != null || !_removed_task_ids.compareAndSet(t,t+1) )
break; // Stop when we hit in-progress tasks
_work.remove(t+1); // Else we can remove the tracking now
}
} | [
"void",
"remove_task_tracking",
"(",
"int",
"task",
")",
"{",
"RPC",
".",
"RPCCall",
"rpc",
"=",
"_work",
".",
"get",
"(",
"task",
")",
";",
"if",
"(",
"rpc",
"==",
"null",
")",
"return",
";",
"// Already stopped tracking",
"// Atomically attempt to remove the... | Stop tracking a remote task, because we got an ACKACK. | [
"Stop",
"tracking",
"a",
"remote",
"task",
"because",
"we",
"got",
"an",
"ACKACK",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/H2ONode.java#L315-L338 | train |
h2oai/h2o-2 | src/main/java/water/util/FrameUtils.java | FrameUtils.frame | public static Frame frame(String[] names, double[]... rows) {
assert names == null || names.length == rows[0].length;
Futures fs = new Futures();
Vec[] vecs = new Vec[rows[0].length];
Key keys[] = Vec.VectorGroup.VG_LEN1.addVecs(vecs.length);
for( int c = 0; c < vecs.length; c++ ) {
AppendableVec vec = new AppendableVec(keys[c]);
NewChunk chunk = new NewChunk(vec, 0);
for( int r = 0; r < rows.length; r++ )
chunk.addNum(rows[r][c]);
chunk.close(0, fs);
vecs[c] = vec.close(fs);
}
fs.blockForPending();
return new Frame(names, vecs);
} | java | public static Frame frame(String[] names, double[]... rows) {
assert names == null || names.length == rows[0].length;
Futures fs = new Futures();
Vec[] vecs = new Vec[rows[0].length];
Key keys[] = Vec.VectorGroup.VG_LEN1.addVecs(vecs.length);
for( int c = 0; c < vecs.length; c++ ) {
AppendableVec vec = new AppendableVec(keys[c]);
NewChunk chunk = new NewChunk(vec, 0);
for( int r = 0; r < rows.length; r++ )
chunk.addNum(rows[r][c]);
chunk.close(0, fs);
vecs[c] = vec.close(fs);
}
fs.blockForPending();
return new Frame(names, vecs);
} | [
"public",
"static",
"Frame",
"frame",
"(",
"String",
"[",
"]",
"names",
",",
"double",
"[",
"]",
"...",
"rows",
")",
"{",
"assert",
"names",
"==",
"null",
"||",
"names",
".",
"length",
"==",
"rows",
"[",
"0",
"]",
".",
"length",
";",
"Futures",
"fs... | Create a new frame based on given row data.
@param names names of frame columns
@param rows data given in the form of rows
@return new frame which contains columns named according given names and including given data | [
"Create",
"a",
"new",
"frame",
"based",
"on",
"given",
"row",
"data",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/util/FrameUtils.java#L31-L46 | train |
h2oai/h2o-2 | src/main/java/water/util/FrameUtils.java | FrameUtils.parseFrame | public static Frame parseFrame(Key okey, File ...files) {
assert files.length > 0 : "Ups. No files to parse!";
for (File f : files)
if (!f.exists())
throw new RuntimeException("File not found " + f);
// Create output key if not specified
if(okey == null)
okey = Key.make(files[0].getName());
Key[] fkeys = new Key[files.length];
int cnt = 0;
for (File f : files) fkeys[cnt++] = NFSFileVec.make(f);
return parseFrame(okey, fkeys);
} | java | public static Frame parseFrame(Key okey, File ...files) {
assert files.length > 0 : "Ups. No files to parse!";
for (File f : files)
if (!f.exists())
throw new RuntimeException("File not found " + f);
// Create output key if not specified
if(okey == null)
okey = Key.make(files[0].getName());
Key[] fkeys = new Key[files.length];
int cnt = 0;
for (File f : files) fkeys[cnt++] = NFSFileVec.make(f);
return parseFrame(okey, fkeys);
} | [
"public",
"static",
"Frame",
"parseFrame",
"(",
"Key",
"okey",
",",
"File",
"...",
"files",
")",
"{",
"assert",
"files",
".",
"length",
">",
"0",
":",
"\"Ups. No files to parse!\"",
";",
"for",
"(",
"File",
"f",
":",
"files",
")",
"if",
"(",
"!",
"f",
... | Parse given file into the form of frame represented by the given key.
@param okey destination key for parsed frame
@param files files to parse
@return a new frame | [
"Parse",
"given",
"file",
"into",
"the",
"form",
"of",
"frame",
"represented",
"by",
"the",
"given",
"key",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/util/FrameUtils.java#L54-L67 | train |
h2oai/h2o-2 | src/main/java/water/ga/GoogleAnalytics.java | GoogleAnalytics.processCustomDimensionParameters | private void processCustomDimensionParameters(@SuppressWarnings("rawtypes") GoogleAnalyticsRequest request, List<NameValuePair> postParms) {
Map<String, String> customDimParms = new HashMap<String, String>();
for (String defaultCustomDimKey : defaultRequest.customDimentions().keySet()) {
customDimParms.put(defaultCustomDimKey, defaultRequest.customDimentions().get(defaultCustomDimKey));
}
@SuppressWarnings("unchecked")
Map<String, String> requestCustomDims = request.customDimentions();
for (String requestCustomDimKey : requestCustomDims.keySet()) {
customDimParms.put(requestCustomDimKey, requestCustomDims.get(requestCustomDimKey));
}
for (String key : customDimParms.keySet()) {
postParms.add(new BasicNameValuePair(key, customDimParms.get(key)));
}
} | java | private void processCustomDimensionParameters(@SuppressWarnings("rawtypes") GoogleAnalyticsRequest request, List<NameValuePair> postParms) {
Map<String, String> customDimParms = new HashMap<String, String>();
for (String defaultCustomDimKey : defaultRequest.customDimentions().keySet()) {
customDimParms.put(defaultCustomDimKey, defaultRequest.customDimentions().get(defaultCustomDimKey));
}
@SuppressWarnings("unchecked")
Map<String, String> requestCustomDims = request.customDimentions();
for (String requestCustomDimKey : requestCustomDims.keySet()) {
customDimParms.put(requestCustomDimKey, requestCustomDims.get(requestCustomDimKey));
}
for (String key : customDimParms.keySet()) {
postParms.add(new BasicNameValuePair(key, customDimParms.get(key)));
}
} | [
"private",
"void",
"processCustomDimensionParameters",
"(",
"@",
"SuppressWarnings",
"(",
"\"rawtypes\"",
")",
"GoogleAnalyticsRequest",
"request",
",",
"List",
"<",
"NameValuePair",
">",
"postParms",
")",
"{",
"Map",
"<",
"String",
",",
"String",
">",
"customDimPar... | Processes the custom dimensions and adds the values to list of parameters, which would be posted to GA.
@param request
@param postParms | [
"Processes",
"the",
"custom",
"dimensions",
"and",
"adds",
"the",
"values",
"to",
"list",
"of",
"parameters",
"which",
"would",
"be",
"posted",
"to",
"GA",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/ga/GoogleAnalytics.java#L217-L232 | train |
h2oai/h2o-2 | src/main/java/water/ga/GoogleAnalytics.java | GoogleAnalytics.processCustomMetricParameters | private void processCustomMetricParameters(@SuppressWarnings("rawtypes") GoogleAnalyticsRequest request, List<NameValuePair> postParms) {
Map<String, String> customMetricParms = new HashMap<String, String>();
for (String defaultCustomMetricKey : defaultRequest.custommMetrics().keySet()) {
customMetricParms.put(defaultCustomMetricKey, defaultRequest.custommMetrics().get(defaultCustomMetricKey));
}
@SuppressWarnings("unchecked")
Map<String, String> requestCustomMetrics = request.custommMetrics();
for (String requestCustomDimKey : requestCustomMetrics.keySet()) {
customMetricParms.put(requestCustomDimKey, requestCustomMetrics.get(requestCustomDimKey));
}
for (String key : customMetricParms.keySet()) {
postParms.add(new BasicNameValuePair(key, customMetricParms.get(key)));
}
} | java | private void processCustomMetricParameters(@SuppressWarnings("rawtypes") GoogleAnalyticsRequest request, List<NameValuePair> postParms) {
Map<String, String> customMetricParms = new HashMap<String, String>();
for (String defaultCustomMetricKey : defaultRequest.custommMetrics().keySet()) {
customMetricParms.put(defaultCustomMetricKey, defaultRequest.custommMetrics().get(defaultCustomMetricKey));
}
@SuppressWarnings("unchecked")
Map<String, String> requestCustomMetrics = request.custommMetrics();
for (String requestCustomDimKey : requestCustomMetrics.keySet()) {
customMetricParms.put(requestCustomDimKey, requestCustomMetrics.get(requestCustomDimKey));
}
for (String key : customMetricParms.keySet()) {
postParms.add(new BasicNameValuePair(key, customMetricParms.get(key)));
}
} | [
"private",
"void",
"processCustomMetricParameters",
"(",
"@",
"SuppressWarnings",
"(",
"\"rawtypes\"",
")",
"GoogleAnalyticsRequest",
"request",
",",
"List",
"<",
"NameValuePair",
">",
"postParms",
")",
"{",
"Map",
"<",
"String",
",",
"String",
">",
"customMetricPar... | Processes the custom metrics and adds the values to list of parameters, which would be posted to GA.
@param request
@param postParms | [
"Processes",
"the",
"custom",
"metrics",
"and",
"adds",
"the",
"values",
"to",
"list",
"of",
"parameters",
"which",
"would",
"be",
"posted",
"to",
"GA",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/ga/GoogleAnalytics.java#L240-L255 | train |
h2oai/h2o-2 | src/main/java/water/fvec/CBSChunk.java | CBSChunk.clen | public static int clen(int values, int bpv) {
int len = (values*bpv) >> 3;
return values*bpv % 8 == 0 ? len : len + 1;
} | java | public static int clen(int values, int bpv) {
int len = (values*bpv) >> 3;
return values*bpv % 8 == 0 ? len : len + 1;
} | [
"public",
"static",
"int",
"clen",
"(",
"int",
"values",
",",
"int",
"bpv",
")",
"{",
"int",
"len",
"=",
"(",
"values",
"*",
"bpv",
")",
">>",
"3",
";",
"return",
"values",
"*",
"bpv",
"%",
"8",
"==",
"0",
"?",
"len",
":",
"len",
"+",
"1",
";... | Returns compressed len of the given array length if the value if represented by bpv-bits. | [
"Returns",
"compressed",
"len",
"of",
"the",
"given",
"array",
"length",
"if",
"the",
"value",
"if",
"represented",
"by",
"bpv",
"-",
"bits",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/fvec/CBSChunk.java#L84-L87 | train |
h2oai/h2o-2 | src/main/java/water/TaskGetKey.java | TaskGetKey.get | public static Value get( H2ONode target, Key key, int priority ) {
RPC<TaskGetKey> rpc, old;
while( true ) { // Repeat until we get a unique TGK installed per key
// Do we have an old TaskGetKey in-progress?
rpc = TGKS.get(key);
if( rpc != null && rpc._dt._priority >= priority )
break;
old = rpc;
// Make a new TGK.
rpc = new RPC(target,new TaskGetKey(key,priority),1.0f);
if( TGKS.putIfMatchUnlocked(key,rpc,old) == old ) {
rpc.setTaskNum().call(); // Start the op
break; // Successful install of a fresh RPC
}
}
Value val = rpc.get()._val; // Block for, then fetch out the result
TGKS.putIfMatchUnlocked(key,null,rpc); // Clear from cache
return val;
} | java | public static Value get( H2ONode target, Key key, int priority ) {
RPC<TaskGetKey> rpc, old;
while( true ) { // Repeat until we get a unique TGK installed per key
// Do we have an old TaskGetKey in-progress?
rpc = TGKS.get(key);
if( rpc != null && rpc._dt._priority >= priority )
break;
old = rpc;
// Make a new TGK.
rpc = new RPC(target,new TaskGetKey(key,priority),1.0f);
if( TGKS.putIfMatchUnlocked(key,rpc,old) == old ) {
rpc.setTaskNum().call(); // Start the op
break; // Successful install of a fresh RPC
}
}
Value val = rpc.get()._val; // Block for, then fetch out the result
TGKS.putIfMatchUnlocked(key,null,rpc); // Clear from cache
return val;
} | [
"public",
"static",
"Value",
"get",
"(",
"H2ONode",
"target",
",",
"Key",
"key",
",",
"int",
"priority",
")",
"{",
"RPC",
"<",
"TaskGetKey",
">",
"rpc",
",",
"old",
";",
"while",
"(",
"true",
")",
"{",
"// Repeat until we get a unique TGK installed per key",
... | Get a value from a named remote node | [
"Get",
"a",
"value",
"from",
"a",
"named",
"remote",
"node"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/TaskGetKey.java#L26-L44 | train |
h2oai/h2o-2 | src/main/java/water/api/RequestBuilders.java | RequestBuilders.build | protected String build(Response response) {
StringBuilder sb = new StringBuilder();
sb.append("<div class='container'>");
sb.append("<div class='row-fluid'>");
sb.append("<div class='span12'>");
sb.append(buildJSONResponseBox(response));
if( response._status == Response.Status.done ) response.toJava(sb);
sb.append(buildResponseHeader(response));
Builder builder = response.getBuilderFor(ROOT_OBJECT);
if (builder == null) {
sb.append("<h3>"+name()+"</h3>");
builder = OBJECT_BUILDER;
}
for( String h : response.getHeaders() ) sb.append(h);
if( response._response==null ) {
boolean done = response._req.toHTML(sb);
if(!done) {
JsonParser parser = new JsonParser();
String json = new String(response._req.writeJSON(new AutoBuffer()).buf());
JsonObject o = (JsonObject) parser.parse(json);
sb.append(builder.build(response, o, ""));
}
} else sb.append(builder.build(response,response._response,""));
sb.append("</div></div></div>");
return sb.toString();
} | java | protected String build(Response response) {
StringBuilder sb = new StringBuilder();
sb.append("<div class='container'>");
sb.append("<div class='row-fluid'>");
sb.append("<div class='span12'>");
sb.append(buildJSONResponseBox(response));
if( response._status == Response.Status.done ) response.toJava(sb);
sb.append(buildResponseHeader(response));
Builder builder = response.getBuilderFor(ROOT_OBJECT);
if (builder == null) {
sb.append("<h3>"+name()+"</h3>");
builder = OBJECT_BUILDER;
}
for( String h : response.getHeaders() ) sb.append(h);
if( response._response==null ) {
boolean done = response._req.toHTML(sb);
if(!done) {
JsonParser parser = new JsonParser();
String json = new String(response._req.writeJSON(new AutoBuffer()).buf());
JsonObject o = (JsonObject) parser.parse(json);
sb.append(builder.build(response, o, ""));
}
} else sb.append(builder.build(response,response._response,""));
sb.append("</div></div></div>");
return sb.toString();
} | [
"protected",
"String",
"build",
"(",
"Response",
"response",
")",
"{",
"StringBuilder",
"sb",
"=",
"new",
"StringBuilder",
"(",
")",
";",
"sb",
".",
"append",
"(",
"\"<div class='container'>\"",
")",
";",
"sb",
".",
"append",
"(",
"\"<div class='row-fluid'>\"",
... | Builds the HTML for the given response.
This is the root of the HTML. Should display all what is needed, including
the status, timing, etc. Then call the recursive builders for the
response's JSON. | [
"Builds",
"the",
"HTML",
"for",
"the",
"given",
"response",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/api/RequestBuilders.java#L60-L85 | train |
h2oai/h2o-2 | src/main/java/water/Boot.java | Boot.addExternalJars | public void addExternalJars(File file) throws IllegalAccessException, InvocationTargetException, MalformedURLException {
assert file.exists() : "Unable to find external file: " + file.getAbsolutePath();
if( file.isDirectory() ) {
for( File f : file.listFiles() ) addExternalJars(f);
} else if( file.getName().endsWith(".jar") ) {
Log.POST(22, "before (in addExternalJars) invoke _addUrl " + file.toURI().toURL());
_addUrl.invoke(_systemLoader, file.toURI().toURL());
Log.POST(22, "after (in addExternalJars) invoke _addUrl " + file.toURI().toURL());
}
} | java | public void addExternalJars(File file) throws IllegalAccessException, InvocationTargetException, MalformedURLException {
assert file.exists() : "Unable to find external file: " + file.getAbsolutePath();
if( file.isDirectory() ) {
for( File f : file.listFiles() ) addExternalJars(f);
} else if( file.getName().endsWith(".jar") ) {
Log.POST(22, "before (in addExternalJars) invoke _addUrl " + file.toURI().toURL());
_addUrl.invoke(_systemLoader, file.toURI().toURL());
Log.POST(22, "after (in addExternalJars) invoke _addUrl " + file.toURI().toURL());
}
} | [
"public",
"void",
"addExternalJars",
"(",
"File",
"file",
")",
"throws",
"IllegalAccessException",
",",
"InvocationTargetException",
",",
"MalformedURLException",
"{",
"assert",
"file",
".",
"exists",
"(",
")",
":",
"\"Unable to find external file: \"",
"+",
"file",
"... | Adds all jars in given directory to the classpath. | [
"Adds",
"all",
"jars",
"in",
"given",
"directory",
"to",
"the",
"classpath",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/Boot.java#L343-L352 | train |
h2oai/h2o-2 | src/main/java/water/Boot.java | Boot.extractInternalFiles | private void extractInternalFiles() throws IOException {
Enumeration entries = _h2oJar.entries();
while( entries.hasMoreElements() ) {
ZipEntry e = (ZipEntry) entries.nextElement();
String name = e.getName();
if( e.isDirectory() ) continue; // mkdirs() will handle these
if(! name.endsWith(".jar") ) continue;
// extract the entry
File out = internalFile(name);
out.getParentFile().mkdirs();
try {
FileOutputStream fos = new FileOutputStream(out);
BufferedInputStream is = new BufferedInputStream (_h2oJar.getInputStream(e));
BufferedOutputStream os = new BufferedOutputStream(fos);
int read;
byte[] buffer = new byte[4096];
while( (read = is.read(buffer)) != -1 ) os.write(buffer,0,read);
os.flush();
fos.getFD().sync(); // Force the output; throws SyncFailedException if full
os.close();
is.close();
} catch( FileNotFoundException ex ) {
// Expected FNF if 2 H2O instances are attempting to unpack in the same directory
} catch( IOException ex ) {
Log.die("Unable to extract file "+name+" because of "+ex+". Make sure that directory " + _parentDir + " contains at least 50MB of free space to unpack H2O libraries.");
throw ex; // dead code
}
}
} | java | private void extractInternalFiles() throws IOException {
Enumeration entries = _h2oJar.entries();
while( entries.hasMoreElements() ) {
ZipEntry e = (ZipEntry) entries.nextElement();
String name = e.getName();
if( e.isDirectory() ) continue; // mkdirs() will handle these
if(! name.endsWith(".jar") ) continue;
// extract the entry
File out = internalFile(name);
out.getParentFile().mkdirs();
try {
FileOutputStream fos = new FileOutputStream(out);
BufferedInputStream is = new BufferedInputStream (_h2oJar.getInputStream(e));
BufferedOutputStream os = new BufferedOutputStream(fos);
int read;
byte[] buffer = new byte[4096];
while( (read = is.read(buffer)) != -1 ) os.write(buffer,0,read);
os.flush();
fos.getFD().sync(); // Force the output; throws SyncFailedException if full
os.close();
is.close();
} catch( FileNotFoundException ex ) {
// Expected FNF if 2 H2O instances are attempting to unpack in the same directory
} catch( IOException ex ) {
Log.die("Unable to extract file "+name+" because of "+ex+". Make sure that directory " + _parentDir + " contains at least 50MB of free space to unpack H2O libraries.");
throw ex; // dead code
}
}
} | [
"private",
"void",
"extractInternalFiles",
"(",
")",
"throws",
"IOException",
"{",
"Enumeration",
"entries",
"=",
"_h2oJar",
".",
"entries",
"(",
")",
";",
"while",
"(",
"entries",
".",
"hasMoreElements",
"(",
")",
")",
"{",
"ZipEntry",
"e",
"=",
"(",
"Zip... | Extracts the libraries from the jar file to given local path. | [
"Extracts",
"the",
"libraries",
"from",
"the",
"jar",
"file",
"to",
"given",
"local",
"path",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/Boot.java#L355-L384 | train |
h2oai/h2o-2 | src/main/java/water/Boot.java | Boot.loadClass | @Override public synchronized Class loadClass( String name, boolean resolve ) throws ClassNotFoundException {
assert !name.equals(Weaver.class.getName());
Class z = loadClass2(name); // Do all the work in here
if( resolve ) resolveClass(z); // Resolve here instead in the work method
return z;
} | java | @Override public synchronized Class loadClass( String name, boolean resolve ) throws ClassNotFoundException {
assert !name.equals(Weaver.class.getName());
Class z = loadClass2(name); // Do all the work in here
if( resolve ) resolveClass(z); // Resolve here instead in the work method
return z;
} | [
"@",
"Override",
"public",
"synchronized",
"Class",
"loadClass",
"(",
"String",
"name",
",",
"boolean",
"resolve",
")",
"throws",
"ClassNotFoundException",
"{",
"assert",
"!",
"name",
".",
"equals",
"(",
"Weaver",
".",
"class",
".",
"getName",
"(",
")",
")",... | search, THEN the System or parent loader. | [
"search",
"THEN",
"the",
"System",
"or",
"parent",
"loader",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/Boot.java#L424-L429 | train |
h2oai/h2o-2 | src/main/java/water/Boot.java | Boot.loadClass2 | private final Class loadClass2( String name ) throws ClassNotFoundException {
Class z = findLoadedClass(name); // Look for pre-existing class
if( z != null ) return z;
if( _weaver == null ) _weaver = new Weaver();
z = _weaver.weaveAndLoad(name, this); // Try the Happy Class Loader
if( z != null ) {
// Occasionally it's useful to print out class names that are actually Weaved.
// Leave this commented out println here so I can easily find it for next time.
// System.out.println("WEAVED: " + name);
return z;
}
z = getParent().loadClass(name); // Try the parent loader. Probably the System loader.
if( z != null ) return z;
return z;
} | java | private final Class loadClass2( String name ) throws ClassNotFoundException {
Class z = findLoadedClass(name); // Look for pre-existing class
if( z != null ) return z;
if( _weaver == null ) _weaver = new Weaver();
z = _weaver.weaveAndLoad(name, this); // Try the Happy Class Loader
if( z != null ) {
// Occasionally it's useful to print out class names that are actually Weaved.
// Leave this commented out println here so I can easily find it for next time.
// System.out.println("WEAVED: " + name);
return z;
}
z = getParent().loadClass(name); // Try the parent loader. Probably the System loader.
if( z != null ) return z;
return z;
} | [
"private",
"final",
"Class",
"loadClass2",
"(",
"String",
"name",
")",
"throws",
"ClassNotFoundException",
"{",
"Class",
"z",
"=",
"findLoadedClass",
"(",
"name",
")",
";",
"// Look for pre-existing class",
"if",
"(",
"z",
"!=",
"null",
")",
"return",
"z",
";"... | Run the class lookups in my favorite non-default order. | [
"Run",
"the",
"class",
"lookups",
"in",
"my",
"favorite",
"non",
"-",
"default",
"order",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/Boot.java#L432-L446 | train |
h2oai/h2o-2 | src/main/java/water/api/DocGen.java | FieldDoc.arg | private RequestArguments.Argument arg(Request R) {
if( _arg != null ) return _arg;
Class clzz = R.getClass();
// An amazing crazy API from the JDK again. Cannot search for protected
// fields without either (1) throwing NoSuchFieldException if you ask in
// a subclass, or (2) sorting through the list of ALL fields and EACH
// level of the hierarchy. Sadly, I catch NSFE & loop.
while( true ) {
try {
Field field = clzz.getDeclaredField(_name);
field.setAccessible(true);
Object o = field.get(R);
return _arg=((RequestArguments.Argument)o);
}
catch( NoSuchFieldException ie ) { clzz = clzz.getSuperclass(); }
catch( IllegalAccessException ie ) { break; }
catch( ClassCastException ie ) { break; }
}
return null;
} | java | private RequestArguments.Argument arg(Request R) {
if( _arg != null ) return _arg;
Class clzz = R.getClass();
// An amazing crazy API from the JDK again. Cannot search for protected
// fields without either (1) throwing NoSuchFieldException if you ask in
// a subclass, or (2) sorting through the list of ALL fields and EACH
// level of the hierarchy. Sadly, I catch NSFE & loop.
while( true ) {
try {
Field field = clzz.getDeclaredField(_name);
field.setAccessible(true);
Object o = field.get(R);
return _arg=((RequestArguments.Argument)o);
}
catch( NoSuchFieldException ie ) { clzz = clzz.getSuperclass(); }
catch( IllegalAccessException ie ) { break; }
catch( ClassCastException ie ) { break; }
}
return null;
} | [
"private",
"RequestArguments",
".",
"Argument",
"arg",
"(",
"Request",
"R",
")",
"{",
"if",
"(",
"_arg",
"!=",
"null",
")",
"return",
"_arg",
";",
"Class",
"clzz",
"=",
"R",
".",
"getClass",
"(",
")",
";",
"// An amazing crazy API from the JDK again. Cannot s... | Specific accessors for input arguments. Not valid for JSON output fields. | [
"Specific",
"accessors",
"for",
"input",
"arguments",
".",
"Not",
"valid",
"for",
"JSON",
"output",
"fields",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/api/DocGen.java#L104-L123 | train |
h2oai/h2o-2 | h2o-samples/src/main/java/samples/launchers/CloudRemote.java | CloudRemote.launchEC2 | public static void launchEC2(Class<? extends Job> job, int boxes) throws Exception {
EC2 ec2 = new EC2();
ec2.boxes = boxes;
Cloud c = ec2.resize();
launch(c, job);
} | java | public static void launchEC2(Class<? extends Job> job, int boxes) throws Exception {
EC2 ec2 = new EC2();
ec2.boxes = boxes;
Cloud c = ec2.resize();
launch(c, job);
} | [
"public",
"static",
"void",
"launchEC2",
"(",
"Class",
"<",
"?",
"extends",
"Job",
">",
"job",
",",
"int",
"boxes",
")",
"throws",
"Exception",
"{",
"EC2",
"ec2",
"=",
"new",
"EC2",
"(",
")",
";",
"ec2",
".",
"boxes",
"=",
"boxes",
";",
"Cloud",
"c... | Starts EC2 machines and builds a cluster. | [
"Starts",
"EC2",
"machines",
"and",
"builds",
"a",
"cluster",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/h2o-samples/src/main/java/samples/launchers/CloudRemote.java#L25-L30 | train |
h2oai/h2o-2 | src/main/java/hex/singlenoderf/Tree.java | Tree.compute2 | @Override public void compute2() {
if(Job.isRunning(_jobKey)) {
Timer timer = new Timer();
_stats[0] = new ThreadLocal<hex.singlenoderf.Statistic>();
_stats[1] = new ThreadLocal<hex.singlenoderf.Statistic>();
Data d = _sampler.sample(_data, _seed, _modelKey, _local_mode);
hex.singlenoderf.Statistic left = getStatistic(0, d, _seed, _exclusiveSplitLimit);
// calculate the split
for( Row r : d ) left.addQ(r, _regression);
if (!_regression)
left.applyClassWeights(); // Weight the distributions
hex.singlenoderf.Statistic.Split spl = left.split(d, false);
if(spl.isLeafNode()) {
if(_regression) {
float av = d.computeAverage();
_tree = new LeafNode(-1, d.rows(), av);
} else {
_tree = new LeafNode(_data.unmapClass(spl._split), d.rows(),-1);
}
} else {
_tree = new FJBuild (spl, d, 0, _seed).compute();
}
_stats = null; // GC
if(_jobKey != null && !Job.isRunning(_jobKey)) throw new Job.JobCancelledException();
// Atomically improve the Model as well
Key tkey = toKey();
Key dtreeKey = null;
if (_score_pojo) dtreeKey = toCompressedKey();
appendKey(_modelKey, tkey, dtreeKey, _verbose > 10 ? _tree.toString(new StringBuilder(""), Integer.MAX_VALUE).toString() : "", _data_id);
// appendKey(_modelKey, tkey, _verbose > 10 ? _tree.toString(new StringBuilder(""), Integer.MAX_VALUE).toString() : "", _data_id);
StringBuilder sb = new StringBuilder("[RF] Tree : ").append(_data_id+1);
sb.append(" d=").append(_tree.depth()).append(" leaves=").append(_tree.leaves()).append(" done in ").append(timer).append('\n');
Log.info(sb.toString());
if (_verbose > 10) {
// Log.info(Sys.RANDF, _tree.toString(sb, Integer.MAX_VALUE).toString());
// Log.info(Sys.RANDF, _tree.toJava(sb, Integer.MAX_VALUE).toString());
}
} else throw new Job.JobCancelledException();
// Wait for completion
tryComplete();
} | java | @Override public void compute2() {
if(Job.isRunning(_jobKey)) {
Timer timer = new Timer();
_stats[0] = new ThreadLocal<hex.singlenoderf.Statistic>();
_stats[1] = new ThreadLocal<hex.singlenoderf.Statistic>();
Data d = _sampler.sample(_data, _seed, _modelKey, _local_mode);
hex.singlenoderf.Statistic left = getStatistic(0, d, _seed, _exclusiveSplitLimit);
// calculate the split
for( Row r : d ) left.addQ(r, _regression);
if (!_regression)
left.applyClassWeights(); // Weight the distributions
hex.singlenoderf.Statistic.Split spl = left.split(d, false);
if(spl.isLeafNode()) {
if(_regression) {
float av = d.computeAverage();
_tree = new LeafNode(-1, d.rows(), av);
} else {
_tree = new LeafNode(_data.unmapClass(spl._split), d.rows(),-1);
}
} else {
_tree = new FJBuild (spl, d, 0, _seed).compute();
}
_stats = null; // GC
if(_jobKey != null && !Job.isRunning(_jobKey)) throw new Job.JobCancelledException();
// Atomically improve the Model as well
Key tkey = toKey();
Key dtreeKey = null;
if (_score_pojo) dtreeKey = toCompressedKey();
appendKey(_modelKey, tkey, dtreeKey, _verbose > 10 ? _tree.toString(new StringBuilder(""), Integer.MAX_VALUE).toString() : "", _data_id);
// appendKey(_modelKey, tkey, _verbose > 10 ? _tree.toString(new StringBuilder(""), Integer.MAX_VALUE).toString() : "", _data_id);
StringBuilder sb = new StringBuilder("[RF] Tree : ").append(_data_id+1);
sb.append(" d=").append(_tree.depth()).append(" leaves=").append(_tree.leaves()).append(" done in ").append(timer).append('\n');
Log.info(sb.toString());
if (_verbose > 10) {
// Log.info(Sys.RANDF, _tree.toString(sb, Integer.MAX_VALUE).toString());
// Log.info(Sys.RANDF, _tree.toJava(sb, Integer.MAX_VALUE).toString());
}
} else throw new Job.JobCancelledException();
// Wait for completion
tryComplete();
} | [
"@",
"Override",
"public",
"void",
"compute2",
"(",
")",
"{",
"if",
"(",
"Job",
".",
"isRunning",
"(",
"_jobKey",
")",
")",
"{",
"Timer",
"timer",
"=",
"new",
"Timer",
"(",
")",
";",
"_stats",
"[",
"0",
"]",
"=",
"new",
"ThreadLocal",
"<",
"hex",
... | Actually build the tree | [
"Actually",
"build",
"the",
"tree"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/hex/singlenoderf/Tree.java#L139-L181 | train |
h2oai/h2o-2 | src/main/java/hex/singlenoderf/Tree.java | Tree.appendKey | static void appendKey(Key model, final Key tKey, final Key dtKey, final String tString, final int tree_id) {
final int selfIdx = H2O.SELF.index();
new TAtomic<SpeeDRFModel>() {
@Override public SpeeDRFModel atomic(SpeeDRFModel old) {
if(old == null) return null;
return SpeeDRFModel.make(old, tKey, dtKey, selfIdx, tString, tree_id);
}
}.invoke(model);
} | java | static void appendKey(Key model, final Key tKey, final Key dtKey, final String tString, final int tree_id) {
final int selfIdx = H2O.SELF.index();
new TAtomic<SpeeDRFModel>() {
@Override public SpeeDRFModel atomic(SpeeDRFModel old) {
if(old == null) return null;
return SpeeDRFModel.make(old, tKey, dtKey, selfIdx, tString, tree_id);
}
}.invoke(model);
} | [
"static",
"void",
"appendKey",
"(",
"Key",
"model",
",",
"final",
"Key",
"tKey",
",",
"final",
"Key",
"dtKey",
",",
"final",
"String",
"tString",
",",
"final",
"int",
"tree_id",
")",
"{",
"final",
"int",
"selfIdx",
"=",
"H2O",
".",
"SELF",
".",
"index"... | which serializes "for free". | [
"which",
"serializes",
"for",
"free",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/hex/singlenoderf/Tree.java#L185-L193 | train |
h2oai/h2o-2 | src/main/java/hex/singlenoderf/Tree.java | Tree.toKey | public Key toKey() {
AutoBuffer bs = new AutoBuffer();
bs.put4(_data_id);
bs.put8(_seed);
bs.put1(_producerId);
_tree.write(bs);
Key key = Key.make((byte)1,Key.DFJ_INTERNAL_USER, H2O.SELF);
DKV.put(key,new Value(key, bs.buf()));
return key;
} | java | public Key toKey() {
AutoBuffer bs = new AutoBuffer();
bs.put4(_data_id);
bs.put8(_seed);
bs.put1(_producerId);
_tree.write(bs);
Key key = Key.make((byte)1,Key.DFJ_INTERNAL_USER, H2O.SELF);
DKV.put(key,new Value(key, bs.buf()));
return key;
} | [
"public",
"Key",
"toKey",
"(",
")",
"{",
"AutoBuffer",
"bs",
"=",
"new",
"AutoBuffer",
"(",
")",
";",
"bs",
".",
"put4",
"(",
"_data_id",
")",
";",
"bs",
".",
"put8",
"(",
"_seed",
")",
";",
"bs",
".",
"put1",
"(",
"_producerId",
")",
";",
"_tree... | Write the Tree to a random Key homed here. | [
"Write",
"the",
"Tree",
"to",
"a",
"random",
"Key",
"homed",
"here",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/hex/singlenoderf/Tree.java#L609-L618 | train |
h2oai/h2o-2 | src/main/java/hex/singlenoderf/Tree.java | Tree.classify | public static double classify( AutoBuffer ts, double[] ds, double badat, boolean regression ) {
ts.get4(); // Skip tree-id
ts.get8(); // Skip seed
ts.get1(); // Skip producer id
byte b;
while( (b = (byte) ts.get1()) != '[' ) { // While not a leaf indicator
assert b == '(' || b == 'S' || b == 'E';
int col = ts.get2(); // Column number in model-space
float fcmp = ts.get4f(); // Float to compare against
float fdat = Double.isNaN(ds[col]) ? fcmp - 1 : (float)ds[col];
int skip = (ts.get1()&0xFF);
if( skip == 0 ) skip = ts.get3();
if (b == 'E') {
if (fdat != fcmp)
ts.position(ts.position() + skip);
} else {
// Picking right subtree? then skip left subtree
if( fdat > fcmp ) ts.position(ts.position() + skip);
}
}
if(regression) return ts.get4f();
return ts.get1()&0xFF; // Return the leaf's class
} | java | public static double classify( AutoBuffer ts, double[] ds, double badat, boolean regression ) {
ts.get4(); // Skip tree-id
ts.get8(); // Skip seed
ts.get1(); // Skip producer id
byte b;
while( (b = (byte) ts.get1()) != '[' ) { // While not a leaf indicator
assert b == '(' || b == 'S' || b == 'E';
int col = ts.get2(); // Column number in model-space
float fcmp = ts.get4f(); // Float to compare against
float fdat = Double.isNaN(ds[col]) ? fcmp - 1 : (float)ds[col];
int skip = (ts.get1()&0xFF);
if( skip == 0 ) skip = ts.get3();
if (b == 'E') {
if (fdat != fcmp)
ts.position(ts.position() + skip);
} else {
// Picking right subtree? then skip left subtree
if( fdat > fcmp ) ts.position(ts.position() + skip);
}
}
if(regression) return ts.get4f();
return ts.get1()&0xFF; // Return the leaf's class
} | [
"public",
"static",
"double",
"classify",
"(",
"AutoBuffer",
"ts",
",",
"double",
"[",
"]",
"ds",
",",
"double",
"badat",
",",
"boolean",
"regression",
")",
"{",
"ts",
".",
"get4",
"(",
")",
";",
"// Skip tree-id",
"ts",
".",
"get8",
"(",
")",
";",
"... | Classify on the compressed tree bytes, from the pre-packed double data | [
"Classify",
"on",
"the",
"compressed",
"tree",
"bytes",
"from",
"the",
"pre",
"-",
"packed",
"double",
"data"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/hex/singlenoderf/Tree.java#L660-L683 | train |
h2oai/h2o-2 | src/main/java/hex/singlenoderf/Tree.java | Tree.compress | public TreeModel.CompressedTree compress() {
// Log.info(Sys.RANDF, _tree.toString(new StringBuilder(), Integer.MAX_VALUE).toString());
int size = _tree.dtreeSize();
if (_tree instanceof LeafNode) {
size += 3;
}
AutoBuffer ab = new AutoBuffer(size);
if( _tree instanceof LeafNode)
ab.put1(0).put2((char)65535);
_tree.compress(ab);
assert ab.position() == size: "Actual size doesn't agree calculated size.";
char _nclass = (char)_data.classes();
return new TreeModel.CompressedTree(ab.buf(),_nclass,_seed);
} | java | public TreeModel.CompressedTree compress() {
// Log.info(Sys.RANDF, _tree.toString(new StringBuilder(), Integer.MAX_VALUE).toString());
int size = _tree.dtreeSize();
if (_tree instanceof LeafNode) {
size += 3;
}
AutoBuffer ab = new AutoBuffer(size);
if( _tree instanceof LeafNode)
ab.put1(0).put2((char)65535);
_tree.compress(ab);
assert ab.position() == size: "Actual size doesn't agree calculated size.";
char _nclass = (char)_data.classes();
return new TreeModel.CompressedTree(ab.buf(),_nclass,_seed);
} | [
"public",
"TreeModel",
".",
"CompressedTree",
"compress",
"(",
")",
"{",
"// Log.info(Sys.RANDF, _tree.toString(new StringBuilder(), Integer.MAX_VALUE).toString());",
"int",
"size",
"=",
"_tree",
".",
"dtreeSize",
"(",
")",
";",
"if",
"(",
"_tree",
"instanceof",
"LeafN... | Build a compressed-tree struct | [
"Build",
"a",
"compressed",
"-",
"tree",
"struct"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/hex/singlenoderf/Tree.java#L738-L751 | train |
h2oai/h2o-2 | src/main/java/water/api/FrameSplitPage.java | FrameSplitPage.execImpl | @Override protected void execImpl() {
Frame frame = source;
if (shuffle) {
// FIXME: switch to global shuffle
frame = MRUtils.shuffleFramePerChunk(Utils.generateShuffledKey(frame._key), frame, seed);
frame.delete_and_lock(null).unlock(null); // save frame to DKV
// delete frame on the end
gtrash(frame);
}
FrameSplitter fs = new FrameSplitter(frame, ratios);
H2O.submitTask(fs);
Frame[] splits = fs.getResult();
split_keys = new Key [splits.length];
split_rows = new long[splits.length];
float rsum = Utils.sum(ratios);
split_ratios = Arrays.copyOf(ratios, splits.length);
split_ratios[splits.length-1] = 1f-rsum;
long sum = 0;
for(int i=0; i<splits.length; i++) {
sum += splits[i].numRows();
split_keys[i] = splits[i]._key;
split_rows[i] = splits[i].numRows();
}
assert sum == source.numRows() : "Frame split produced wrong number of rows: nrows(source) != sum(nrows(splits))";
} | java | @Override protected void execImpl() {
Frame frame = source;
if (shuffle) {
// FIXME: switch to global shuffle
frame = MRUtils.shuffleFramePerChunk(Utils.generateShuffledKey(frame._key), frame, seed);
frame.delete_and_lock(null).unlock(null); // save frame to DKV
// delete frame on the end
gtrash(frame);
}
FrameSplitter fs = new FrameSplitter(frame, ratios);
H2O.submitTask(fs);
Frame[] splits = fs.getResult();
split_keys = new Key [splits.length];
split_rows = new long[splits.length];
float rsum = Utils.sum(ratios);
split_ratios = Arrays.copyOf(ratios, splits.length);
split_ratios[splits.length-1] = 1f-rsum;
long sum = 0;
for(int i=0; i<splits.length; i++) {
sum += splits[i].numRows();
split_keys[i] = splits[i]._key;
split_rows[i] = splits[i].numRows();
}
assert sum == source.numRows() : "Frame split produced wrong number of rows: nrows(source) != sum(nrows(splits))";
} | [
"@",
"Override",
"protected",
"void",
"execImpl",
"(",
")",
"{",
"Frame",
"frame",
"=",
"source",
";",
"if",
"(",
"shuffle",
")",
"{",
"// FIXME: switch to global shuffle",
"frame",
"=",
"MRUtils",
".",
"shuffleFramePerChunk",
"(",
"Utils",
".",
"generateShuffle... | Run the function | [
"Run",
"the",
"function"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/api/FrameSplitPage.java#L65-L91 | train |
h2oai/h2o-2 | src/main/java/water/util/UIUtils.java | UIUtils.qlink | public static <T> String qlink(Class<T> page, Key k, String content) {
return qlink(page, "source", k, content );
} | java | public static <T> String qlink(Class<T> page, Key k, String content) {
return qlink(page, "source", k, content );
} | [
"public",
"static",
"<",
"T",
">",
"String",
"qlink",
"(",
"Class",
"<",
"T",
">",
"page",
",",
"Key",
"k",
",",
"String",
"content",
")",
"{",
"return",
"qlink",
"(",
"page",
",",
"\"source\"",
",",
"k",
",",
"content",
")",
";",
"}"
] | Return the query link to this page | [
"Return",
"the",
"query",
"link",
"to",
"this",
"page"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/util/UIUtils.java#L10-L12 | train |
h2oai/h2o-2 | src/main/java/water/MemoryManager.java | MemoryManager.malloc | public static Object malloc(int elems, long bytes, int type, Object orig, int from ) {
return malloc(elems,bytes,type,orig,from,false);
} | java | public static Object malloc(int elems, long bytes, int type, Object orig, int from ) {
return malloc(elems,bytes,type,orig,from,false);
} | [
"public",
"static",
"Object",
"malloc",
"(",
"int",
"elems",
",",
"long",
"bytes",
",",
"int",
"type",
",",
"Object",
"orig",
",",
"int",
"from",
")",
"{",
"return",
"malloc",
"(",
"elems",
",",
"bytes",
",",
"type",
",",
"orig",
",",
"from",
",",
... | Catches OutOfMemory, clears cache & retries. | [
"Catches",
"OutOfMemory",
"clears",
"cache",
"&",
"retries",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/MemoryManager.java#L222-L224 | train |
h2oai/h2o-2 | src/main/java/water/MemoryManager.java | MemoryManager.tryReserveTaskMem | public static boolean tryReserveTaskMem(long m){
if(!CAN_ALLOC)return false;
if( m == 0 ) return true;
assert m >= 0:"m < 0: " + m;
long current = _taskMem.addAndGet(-m);
if(current < 0){
_taskMem.addAndGet(m);
return false;
}
return true;
} | java | public static boolean tryReserveTaskMem(long m){
if(!CAN_ALLOC)return false;
if( m == 0 ) return true;
assert m >= 0:"m < 0: " + m;
long current = _taskMem.addAndGet(-m);
if(current < 0){
_taskMem.addAndGet(m);
return false;
}
return true;
} | [
"public",
"static",
"boolean",
"tryReserveTaskMem",
"(",
"long",
"m",
")",
"{",
"if",
"(",
"!",
"CAN_ALLOC",
")",
"return",
"false",
";",
"if",
"(",
"m",
"==",
"0",
")",
"return",
"true",
";",
"assert",
"m",
">=",
"0",
":",
"\"m < 0: \"",
"+",
"m",
... | Try to reserve memory needed for task execution and return true if
succeeded. Tasks have a shared pool of memory which they should ask for
in advance before they even try to allocate it.
This method is another backpressure mechanism to make sure we do not
exhaust system's resources by running too many tasks at the same time.
Tasks are expected to reserve memory before proceeding with their
execution and making sure they release it when done.
@param m - requested number of bytes
@return true if there is enough free memory | [
"Try",
"to",
"reserve",
"memory",
"needed",
"for",
"task",
"execution",
"and",
"return",
"true",
"if",
"succeeded",
".",
"Tasks",
"have",
"a",
"shared",
"pool",
"of",
"memory",
"which",
"they",
"should",
"ask",
"for",
"in",
"advance",
"before",
"they",
"ev... | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/MemoryManager.java#L310-L320 | train |
h2oai/h2o-2 | src/main/java/water/parser/CsvParser.java | CsvParser.determineSeparatorCounts | private static int[] determineSeparatorCounts(String from, int single_quote) {
int[] result = new int[separators.length];
byte[] bits = from.getBytes();
boolean in_quote = false;
for( int j=0; j< bits.length; j++ ) {
byte c = bits[j];
if( (c == single_quote) || (c == CHAR_DOUBLE_QUOTE) )
in_quote ^= true;
if( !in_quote || c == HIVE_SEP )
for( int i = 0; i < separators.length; ++i)
if (c == separators[i])
++result[i];
}
return result;
} | java | private static int[] determineSeparatorCounts(String from, int single_quote) {
int[] result = new int[separators.length];
byte[] bits = from.getBytes();
boolean in_quote = false;
for( int j=0; j< bits.length; j++ ) {
byte c = bits[j];
if( (c == single_quote) || (c == CHAR_DOUBLE_QUOTE) )
in_quote ^= true;
if( !in_quote || c == HIVE_SEP )
for( int i = 0; i < separators.length; ++i)
if (c == separators[i])
++result[i];
}
return result;
} | [
"private",
"static",
"int",
"[",
"]",
"determineSeparatorCounts",
"(",
"String",
"from",
",",
"int",
"single_quote",
")",
"{",
"int",
"[",
"]",
"result",
"=",
"new",
"int",
"[",
"separators",
".",
"length",
"]",
";",
"byte",
"[",
"]",
"bits",
"=",
"fro... | Dermines the number of separators in given line. Correctly handles quoted
tokens. | [
"Dermines",
"the",
"number",
"of",
"separators",
"in",
"given",
"line",
".",
"Correctly",
"handles",
"quoted",
"tokens",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/parser/CsvParser.java#L512-L526 | train |
h2oai/h2o-2 | src/main/java/water/parser/CsvParser.java | CsvParser.determineTokens | private static String[] determineTokens(String from, byte separator, int single_quote) {
ArrayList<String> tokens = new ArrayList();
byte[] bits = from.getBytes();
int offset = 0;
int quotes = 0;
while (offset < bits.length) {
while ((offset < bits.length) && (bits[offset] == CHAR_SPACE)) ++offset; // skip first whitespace
if(offset == bits.length)break;
StringBuilder t = new StringBuilder();
byte c = bits[offset];
if ((c == CHAR_DOUBLE_QUOTE) || (c == single_quote)) {
quotes = c;
++offset;
}
while (offset < bits.length) {
c = bits[offset];
if ((c == quotes)) {
++offset;
if ((offset < bits.length) && (bits[offset] == c)) {
t.append((char)c);
++offset;
continue;
}
quotes = 0;
} else if ((quotes == 0) && ((c == separator) || (c == CHAR_CR) || (c == CHAR_LF))) {
break;
} else {
t.append((char)c);
++offset;
}
}
c = (offset == bits.length) ? CHAR_LF : bits[offset];
tokens.add(t.toString());
if ((c == CHAR_CR) || (c == CHAR_LF) || (offset == bits.length))
break;
if (c != separator)
return new String[0]; // an error
++offset; // Skip separator
}
// If we have trailing empty columns (split by seperators) such as ",,\n"
// then we did not add the final (empty) column, so the column count will
// be down by 1. Add an extra empty column here
if( bits[bits.length-1] == separator && bits[bits.length-1] != CHAR_SPACE)
tokens.add("");
return tokens.toArray(new String[tokens.size()]);
} | java | private static String[] determineTokens(String from, byte separator, int single_quote) {
ArrayList<String> tokens = new ArrayList();
byte[] bits = from.getBytes();
int offset = 0;
int quotes = 0;
while (offset < bits.length) {
while ((offset < bits.length) && (bits[offset] == CHAR_SPACE)) ++offset; // skip first whitespace
if(offset == bits.length)break;
StringBuilder t = new StringBuilder();
byte c = bits[offset];
if ((c == CHAR_DOUBLE_QUOTE) || (c == single_quote)) {
quotes = c;
++offset;
}
while (offset < bits.length) {
c = bits[offset];
if ((c == quotes)) {
++offset;
if ((offset < bits.length) && (bits[offset] == c)) {
t.append((char)c);
++offset;
continue;
}
quotes = 0;
} else if ((quotes == 0) && ((c == separator) || (c == CHAR_CR) || (c == CHAR_LF))) {
break;
} else {
t.append((char)c);
++offset;
}
}
c = (offset == bits.length) ? CHAR_LF : bits[offset];
tokens.add(t.toString());
if ((c == CHAR_CR) || (c == CHAR_LF) || (offset == bits.length))
break;
if (c != separator)
return new String[0]; // an error
++offset; // Skip separator
}
// If we have trailing empty columns (split by seperators) such as ",,\n"
// then we did not add the final (empty) column, so the column count will
// be down by 1. Add an extra empty column here
if( bits[bits.length-1] == separator && bits[bits.length-1] != CHAR_SPACE)
tokens.add("");
return tokens.toArray(new String[tokens.size()]);
} | [
"private",
"static",
"String",
"[",
"]",
"determineTokens",
"(",
"String",
"from",
",",
"byte",
"separator",
",",
"int",
"single_quote",
")",
"{",
"ArrayList",
"<",
"String",
">",
"tokens",
"=",
"new",
"ArrayList",
"(",
")",
";",
"byte",
"[",
"]",
"bits"... | Determines the tokens that are inside a line and returns them as strings
in an array. Assumes the given separator. | [
"Determines",
"the",
"tokens",
"that",
"are",
"inside",
"a",
"line",
"and",
"returns",
"them",
"as",
"strings",
"in",
"an",
"array",
".",
"Assumes",
"the",
"given",
"separator",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/parser/CsvParser.java#L531-L576 | train |
h2oai/h2o-2 | src/main/java/water/api/Models.java | Models.summarizeAndEnhanceModel | protected static void summarizeAndEnhanceModel(ModelSummary summary, Model model, boolean find_compatible_frames, Map<String, Frame> all_frames, Map<String, Set<String>> all_frames_cols) {
if (model instanceof GLMModel) {
summarizeGLMModel(summary, (GLMModel) model);
} else if (model instanceof DRF.DRFModel) {
summarizeDRFModel(summary, (DRF.DRFModel) model);
} else if (model instanceof hex.deeplearning.DeepLearningModel) {
summarizeDeepLearningModel(summary, (hex.deeplearning.DeepLearningModel) model);
} else if (model instanceof hex.gbm.GBM.GBMModel) {
summarizeGBMModel(summary, (hex.gbm.GBM.GBMModel) model);
} else if (model instanceof hex.singlenoderf.SpeeDRFModel) {
summarizeSpeeDRFModel(summary, (hex.singlenoderf.SpeeDRFModel) model);
} else if (model instanceof NBModel) {
summarizeNBModel(summary, (NBModel) model);
} else {
// catch-all
summarizeModelCommonFields(summary, model);
}
if (find_compatible_frames) {
Map<String, Frame> compatible_frames = findCompatibleFrames(model, all_frames, all_frames_cols);
summary.compatible_frames = compatible_frames.keySet();
}
} | java | protected static void summarizeAndEnhanceModel(ModelSummary summary, Model model, boolean find_compatible_frames, Map<String, Frame> all_frames, Map<String, Set<String>> all_frames_cols) {
if (model instanceof GLMModel) {
summarizeGLMModel(summary, (GLMModel) model);
} else if (model instanceof DRF.DRFModel) {
summarizeDRFModel(summary, (DRF.DRFModel) model);
} else if (model instanceof hex.deeplearning.DeepLearningModel) {
summarizeDeepLearningModel(summary, (hex.deeplearning.DeepLearningModel) model);
} else if (model instanceof hex.gbm.GBM.GBMModel) {
summarizeGBMModel(summary, (hex.gbm.GBM.GBMModel) model);
} else if (model instanceof hex.singlenoderf.SpeeDRFModel) {
summarizeSpeeDRFModel(summary, (hex.singlenoderf.SpeeDRFModel) model);
} else if (model instanceof NBModel) {
summarizeNBModel(summary, (NBModel) model);
} else {
// catch-all
summarizeModelCommonFields(summary, model);
}
if (find_compatible_frames) {
Map<String, Frame> compatible_frames = findCompatibleFrames(model, all_frames, all_frames_cols);
summary.compatible_frames = compatible_frames.keySet();
}
} | [
"protected",
"static",
"void",
"summarizeAndEnhanceModel",
"(",
"ModelSummary",
"summary",
",",
"Model",
"model",
",",
"boolean",
"find_compatible_frames",
",",
"Map",
"<",
"String",
",",
"Frame",
">",
"all_frames",
",",
"Map",
"<",
"String",
",",
"Set",
"<",
... | Summarize subclasses of water.Model. | [
"Summarize",
"subclasses",
"of",
"water",
".",
"Model",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/api/Models.java#L168-L190 | train |
h2oai/h2o-2 | src/main/java/water/api/Models.java | Models.summarizeModelCommonFields | private static void summarizeModelCommonFields(ModelSummary summary, Model model) {
String[] names = model._names;
summary.warnings = model.warnings;
summary.model_algorithm = model.getClass().toString(); // fallback only
// model.job() is a local copy; on multinode clusters we need to get from the DKV
Key job_key = ((Job)model.job()).self();
if (null == job_key) throw H2O.fail("Null job key for model: " + (model == null ? "null model" : model._key)); // later when we deserialize models from disk we'll relax this constraint
Job job = DKV.get(job_key).get();
summary.state = job.getState();
summary.model_category = model.getModelCategory();
UniqueId unique_id = model.getUniqueId();
summary.id = unique_id.getId();
summary.key = unique_id.getKey();
summary.creation_epoch_time_millis = unique_id.getCreationEpochTimeMillis();
summary.training_duration_in_ms = model.training_duration_in_ms;
summary.response_column_name = names[names.length - 1];
for (int i = 0; i < names.length - 1; i++)
summary.input_column_names.add(names[i]);
// Ugh.
VarImp vi = model.varimp();
if (null != vi) {
summary.variable_importances = new LinkedHashMap();
summary.variable_importances.put("varimp", vi.varimp);
summary.variable_importances.put("variables", vi.getVariables());
summary.variable_importances.put("method", vi.method);
summary.variable_importances.put("max_var", vi.max_var);
summary.variable_importances.put("scaled", vi.scaled());
}
} | java | private static void summarizeModelCommonFields(ModelSummary summary, Model model) {
String[] names = model._names;
summary.warnings = model.warnings;
summary.model_algorithm = model.getClass().toString(); // fallback only
// model.job() is a local copy; on multinode clusters we need to get from the DKV
Key job_key = ((Job)model.job()).self();
if (null == job_key) throw H2O.fail("Null job key for model: " + (model == null ? "null model" : model._key)); // later when we deserialize models from disk we'll relax this constraint
Job job = DKV.get(job_key).get();
summary.state = job.getState();
summary.model_category = model.getModelCategory();
UniqueId unique_id = model.getUniqueId();
summary.id = unique_id.getId();
summary.key = unique_id.getKey();
summary.creation_epoch_time_millis = unique_id.getCreationEpochTimeMillis();
summary.training_duration_in_ms = model.training_duration_in_ms;
summary.response_column_name = names[names.length - 1];
for (int i = 0; i < names.length - 1; i++)
summary.input_column_names.add(names[i]);
// Ugh.
VarImp vi = model.varimp();
if (null != vi) {
summary.variable_importances = new LinkedHashMap();
summary.variable_importances.put("varimp", vi.varimp);
summary.variable_importances.put("variables", vi.getVariables());
summary.variable_importances.put("method", vi.method);
summary.variable_importances.put("max_var", vi.max_var);
summary.variable_importances.put("scaled", vi.scaled());
}
} | [
"private",
"static",
"void",
"summarizeModelCommonFields",
"(",
"ModelSummary",
"summary",
",",
"Model",
"model",
")",
"{",
"String",
"[",
"]",
"names",
"=",
"model",
".",
"_names",
";",
"summary",
".",
"warnings",
"=",
"model",
".",
"warnings",
";",
"summar... | Summarize fields which are generic to water.Model. | [
"Summarize",
"fields",
"which",
"are",
"generic",
"to",
"water",
".",
"Model",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/api/Models.java#L196-L231 | train |
h2oai/h2o-2 | src/main/java/water/api/Models.java | Models.summarizeGLMModel | private static void summarizeGLMModel(ModelSummary summary, hex.glm.GLMModel model) {
// add generic fields such as column names
summarizeModelCommonFields(summary, model);
summary.model_algorithm = "GLM";
JsonObject all_params = (model.get_params()).toJSON();
summary.critical_parameters = whitelistJsonObject(all_params, GLM_critical_params);
summary.secondary_parameters = whitelistJsonObject(all_params, GLM_secondary_params);
summary.expert_parameters = whitelistJsonObject(all_params, GLM_expert_params);
} | java | private static void summarizeGLMModel(ModelSummary summary, hex.glm.GLMModel model) {
// add generic fields such as column names
summarizeModelCommonFields(summary, model);
summary.model_algorithm = "GLM";
JsonObject all_params = (model.get_params()).toJSON();
summary.critical_parameters = whitelistJsonObject(all_params, GLM_critical_params);
summary.secondary_parameters = whitelistJsonObject(all_params, GLM_secondary_params);
summary.expert_parameters = whitelistJsonObject(all_params, GLM_expert_params);
} | [
"private",
"static",
"void",
"summarizeGLMModel",
"(",
"ModelSummary",
"summary",
",",
"hex",
".",
"glm",
".",
"GLMModel",
"model",
")",
"{",
"// add generic fields such as column names",
"summarizeModelCommonFields",
"(",
"summary",
",",
"model",
")",
";",
"summary",... | Summarize fields which are specific to hex.glm.GLMModel. | [
"Summarize",
"fields",
"which",
"are",
"specific",
"to",
"hex",
".",
"glm",
".",
"GLMModel",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/api/Models.java#L244-L254 | train |
h2oai/h2o-2 | src/main/java/water/api/Models.java | Models.summarizeDRFModel | private static void summarizeDRFModel(ModelSummary summary, hex.drf.DRF.DRFModel model) {
// add generic fields such as column names
summarizeModelCommonFields(summary, model);
summary.model_algorithm = "BigData RF";
JsonObject all_params = (model.get_params()).toJSON();
summary.critical_parameters = whitelistJsonObject(all_params, DRF_critical_params);
summary.secondary_parameters = whitelistJsonObject(all_params, DRF_secondary_params);
summary.expert_parameters = whitelistJsonObject(all_params, DRF_expert_params);
} | java | private static void summarizeDRFModel(ModelSummary summary, hex.drf.DRF.DRFModel model) {
// add generic fields such as column names
summarizeModelCommonFields(summary, model);
summary.model_algorithm = "BigData RF";
JsonObject all_params = (model.get_params()).toJSON();
summary.critical_parameters = whitelistJsonObject(all_params, DRF_critical_params);
summary.secondary_parameters = whitelistJsonObject(all_params, DRF_secondary_params);
summary.expert_parameters = whitelistJsonObject(all_params, DRF_expert_params);
} | [
"private",
"static",
"void",
"summarizeDRFModel",
"(",
"ModelSummary",
"summary",
",",
"hex",
".",
"drf",
".",
"DRF",
".",
"DRFModel",
"model",
")",
"{",
"// add generic fields such as column names",
"summarizeModelCommonFields",
"(",
"summary",
",",
"model",
")",
"... | Summarize fields which are specific to hex.drf.DRF.DRFModel. | [
"Summarize",
"fields",
"which",
"are",
"specific",
"to",
"hex",
".",
"drf",
".",
"DRF",
".",
"DRFModel",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/api/Models.java#L267-L277 | train |
h2oai/h2o-2 | src/main/java/water/api/Models.java | Models.summarizeSpeeDRFModel | private static void summarizeSpeeDRFModel(ModelSummary summary, hex.singlenoderf.SpeeDRFModel model) {
// add generic fields such as column names
summarizeModelCommonFields(summary, model);
summary.model_algorithm = "Random Forest";
JsonObject all_params = (model.get_params()).toJSON();
summary.critical_parameters = whitelistJsonObject(all_params, SpeeDRF_critical_params);
summary.secondary_parameters = whitelistJsonObject(all_params, SpeeDRF_secondary_params);
summary.expert_parameters = whitelistJsonObject(all_params, SpeeDRF_expert_params);
} | java | private static void summarizeSpeeDRFModel(ModelSummary summary, hex.singlenoderf.SpeeDRFModel model) {
// add generic fields such as column names
summarizeModelCommonFields(summary, model);
summary.model_algorithm = "Random Forest";
JsonObject all_params = (model.get_params()).toJSON();
summary.critical_parameters = whitelistJsonObject(all_params, SpeeDRF_critical_params);
summary.secondary_parameters = whitelistJsonObject(all_params, SpeeDRF_secondary_params);
summary.expert_parameters = whitelistJsonObject(all_params, SpeeDRF_expert_params);
} | [
"private",
"static",
"void",
"summarizeSpeeDRFModel",
"(",
"ModelSummary",
"summary",
",",
"hex",
".",
"singlenoderf",
".",
"SpeeDRFModel",
"model",
")",
"{",
"// add generic fields such as column names",
"summarizeModelCommonFields",
"(",
"summary",
",",
"model",
")",
... | Summarize fields which are specific to hex.drf.DRF.SpeeDRFModel. | [
"Summarize",
"fields",
"which",
"are",
"specific",
"to",
"hex",
".",
"drf",
".",
"DRF",
".",
"SpeeDRFModel",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/api/Models.java#L289-L299 | train |
h2oai/h2o-2 | src/main/java/water/api/Models.java | Models.summarizeDeepLearningModel | private static void summarizeDeepLearningModel(ModelSummary summary, hex.deeplearning.DeepLearningModel model) {
// add generic fields such as column names
summarizeModelCommonFields(summary, model);
summary.model_algorithm = "DeepLearning";
JsonObject all_params = (model.get_params()).toJSON();
summary.critical_parameters = whitelistJsonObject(all_params, DL_critical_params);
summary.secondary_parameters = whitelistJsonObject(all_params, DL_secondary_params);
summary.expert_parameters = whitelistJsonObject(all_params, DL_expert_params);
} | java | private static void summarizeDeepLearningModel(ModelSummary summary, hex.deeplearning.DeepLearningModel model) {
// add generic fields such as column names
summarizeModelCommonFields(summary, model);
summary.model_algorithm = "DeepLearning";
JsonObject all_params = (model.get_params()).toJSON();
summary.critical_parameters = whitelistJsonObject(all_params, DL_critical_params);
summary.secondary_parameters = whitelistJsonObject(all_params, DL_secondary_params);
summary.expert_parameters = whitelistJsonObject(all_params, DL_expert_params);
} | [
"private",
"static",
"void",
"summarizeDeepLearningModel",
"(",
"ModelSummary",
"summary",
",",
"hex",
".",
"deeplearning",
".",
"DeepLearningModel",
"model",
")",
"{",
"// add generic fields such as column names",
"summarizeModelCommonFields",
"(",
"summary",
",",
"model",... | Summarize fields which are specific to hex.deeplearning.DeepLearningModel. | [
"Summarize",
"fields",
"which",
"are",
"specific",
"to",
"hex",
".",
"deeplearning",
".",
"DeepLearningModel",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/api/Models.java#L311-L321 | train |
h2oai/h2o-2 | src/main/java/water/api/Models.java | Models.summarizeGBMModel | private static void summarizeGBMModel(ModelSummary summary, hex.gbm.GBM.GBMModel model) {
// add generic fields such as column names
summarizeModelCommonFields(summary, model);
summary.model_algorithm = "GBM";
JsonObject all_params = (model.get_params()).toJSON();
summary.critical_parameters = whitelistJsonObject(all_params, GBM_critical_params);
summary.secondary_parameters = whitelistJsonObject(all_params, GBM_secondary_params);
summary.expert_parameters = whitelistJsonObject(all_params, GBM_expert_params);
} | java | private static void summarizeGBMModel(ModelSummary summary, hex.gbm.GBM.GBMModel model) {
// add generic fields such as column names
summarizeModelCommonFields(summary, model);
summary.model_algorithm = "GBM";
JsonObject all_params = (model.get_params()).toJSON();
summary.critical_parameters = whitelistJsonObject(all_params, GBM_critical_params);
summary.secondary_parameters = whitelistJsonObject(all_params, GBM_secondary_params);
summary.expert_parameters = whitelistJsonObject(all_params, GBM_expert_params);
} | [
"private",
"static",
"void",
"summarizeGBMModel",
"(",
"ModelSummary",
"summary",
",",
"hex",
".",
"gbm",
".",
"GBM",
".",
"GBMModel",
"model",
")",
"{",
"// add generic fields such as column names",
"summarizeModelCommonFields",
"(",
"summary",
",",
"model",
")",
"... | Summarize fields which are specific to hex.gbm.GBM.GBMModel. | [
"Summarize",
"fields",
"which",
"are",
"specific",
"to",
"hex",
".",
"gbm",
".",
"GBM",
".",
"GBMModel",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/api/Models.java#L333-L343 | train |
h2oai/h2o-2 | src/main/java/water/api/Models.java | Models.summarizeNBModel | private static void summarizeNBModel(ModelSummary summary, hex.nb.NBModel model) {
// add generic fields such as column names
summarizeModelCommonFields(summary, model);
summary.model_algorithm = "Naive Bayes";
JsonObject all_params = (model.get_params()).toJSON();
summary.critical_parameters = whitelistJsonObject(all_params, NB_critical_params);
summary.secondary_parameters = whitelistJsonObject(all_params, NB_secondary_params);
summary.expert_parameters = whitelistJsonObject(all_params, NB_expert_params);
} | java | private static void summarizeNBModel(ModelSummary summary, hex.nb.NBModel model) {
// add generic fields such as column names
summarizeModelCommonFields(summary, model);
summary.model_algorithm = "Naive Bayes";
JsonObject all_params = (model.get_params()).toJSON();
summary.critical_parameters = whitelistJsonObject(all_params, NB_critical_params);
summary.secondary_parameters = whitelistJsonObject(all_params, NB_secondary_params);
summary.expert_parameters = whitelistJsonObject(all_params, NB_expert_params);
} | [
"private",
"static",
"void",
"summarizeNBModel",
"(",
"ModelSummary",
"summary",
",",
"hex",
".",
"nb",
".",
"NBModel",
"model",
")",
"{",
"// add generic fields such as column names",
"summarizeModelCommonFields",
"(",
"summary",
",",
"model",
")",
";",
"summary",
... | Summarize fields which are specific to hex.nb.NBModel. | [
"Summarize",
"fields",
"which",
"are",
"specific",
"to",
"hex",
".",
"nb",
".",
"NBModel",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/api/Models.java#L355-L365 | train |
h2oai/h2o-2 | src/main/java/water/api/Models.java | Models.fetchAll | protected Map<String, Model> fetchAll() {
return H2O.KeySnapshot.globalSnapshot().fetchAll(water.Model.class);
} | java | protected Map<String, Model> fetchAll() {
return H2O.KeySnapshot.globalSnapshot().fetchAll(water.Model.class);
} | [
"protected",
"Map",
"<",
"String",
",",
"Model",
">",
"fetchAll",
"(",
")",
"{",
"return",
"H2O",
".",
"KeySnapshot",
".",
"globalSnapshot",
"(",
")",
".",
"fetchAll",
"(",
"water",
".",
"Model",
".",
"class",
")",
";",
"}"
] | Fetch all Models from the KV store. | [
"Fetch",
"all",
"Models",
"from",
"the",
"KV",
"store",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/api/Models.java#L370-L372 | train |
h2oai/h2o-2 | src/main/java/water/api/Models.java | Models.serveOneOrAll | private Response serveOneOrAll(Map<String, Model> modelsMap) {
// returns empty sets if !this.find_compatible_frames
Pair<Map<String, Frame>, Map<String, Set<String>>> frames_info = fetchFrames();
Map<String, Frame> all_frames = frames_info.getFirst();
Map<String, Set<String>> all_frames_cols = frames_info.getSecond();
Map<String, ModelSummary> modelSummaries = Models.generateModelSummaries(null, modelsMap, find_compatible_frames, all_frames, all_frames_cols);
Map resultsMap = new LinkedHashMap();
resultsMap.put("models", modelSummaries);
// If find_compatible_frames then include a map of the Frame summaries. Should we put this on a separate switch?
if (this.find_compatible_frames) {
Set<String> all_referenced_frames = new TreeSet<String>();
for (Map.Entry<String, ModelSummary> entry: modelSummaries.entrySet()) {
ModelSummary summary = entry.getValue();
all_referenced_frames.addAll(summary.compatible_frames);
}
Map<String, FrameSummary> frameSummaries = Frames.generateFrameSummaries(all_referenced_frames, all_frames, false, null, null);
resultsMap.put("frames", frameSummaries);
}
// TODO: temporary hack to get things going
String json = gson.toJson(resultsMap);
JsonObject result = gson.fromJson(json, JsonElement.class).getAsJsonObject();
return Response.done(result);
} | java | private Response serveOneOrAll(Map<String, Model> modelsMap) {
// returns empty sets if !this.find_compatible_frames
Pair<Map<String, Frame>, Map<String, Set<String>>> frames_info = fetchFrames();
Map<String, Frame> all_frames = frames_info.getFirst();
Map<String, Set<String>> all_frames_cols = frames_info.getSecond();
Map<String, ModelSummary> modelSummaries = Models.generateModelSummaries(null, modelsMap, find_compatible_frames, all_frames, all_frames_cols);
Map resultsMap = new LinkedHashMap();
resultsMap.put("models", modelSummaries);
// If find_compatible_frames then include a map of the Frame summaries. Should we put this on a separate switch?
if (this.find_compatible_frames) {
Set<String> all_referenced_frames = new TreeSet<String>();
for (Map.Entry<String, ModelSummary> entry: modelSummaries.entrySet()) {
ModelSummary summary = entry.getValue();
all_referenced_frames.addAll(summary.compatible_frames);
}
Map<String, FrameSummary> frameSummaries = Frames.generateFrameSummaries(all_referenced_frames, all_frames, false, null, null);
resultsMap.put("frames", frameSummaries);
}
// TODO: temporary hack to get things going
String json = gson.toJson(resultsMap);
JsonObject result = gson.fromJson(json, JsonElement.class).getAsJsonObject();
return Response.done(result);
} | [
"private",
"Response",
"serveOneOrAll",
"(",
"Map",
"<",
"String",
",",
"Model",
">",
"modelsMap",
")",
"{",
"// returns empty sets if !this.find_compatible_frames",
"Pair",
"<",
"Map",
"<",
"String",
",",
"Frame",
">",
",",
"Map",
"<",
"String",
",",
"Set",
"... | Fetch all the Models from the KV store, sumamrize and enhance them, and return a map of them. | [
"Fetch",
"all",
"the",
"Models",
"from",
"the",
"KV",
"store",
"sumamrize",
"and",
"enhance",
"them",
"and",
"return",
"a",
"map",
"of",
"them",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/api/Models.java#L386-L415 | train |
h2oai/h2o-2 | src/main/java/water/fvec/ParseDataset2.java | ParseProgress.make | static ParseProgress make( Key[] fkeys ) {
long total = 0;
for( Key fkey : fkeys )
total += getVec(fkey).length();
return new ParseProgress(0,total);
} | java | static ParseProgress make( Key[] fkeys ) {
long total = 0;
for( Key fkey : fkeys )
total += getVec(fkey).length();
return new ParseProgress(0,total);
} | [
"static",
"ParseProgress",
"make",
"(",
"Key",
"[",
"]",
"fkeys",
")",
"{",
"long",
"total",
"=",
"0",
";",
"for",
"(",
"Key",
"fkey",
":",
"fkeys",
")",
"total",
"+=",
"getVec",
"(",
"fkey",
")",
".",
"length",
"(",
")",
";",
"return",
"new",
"P... | Total number of steps is equal to total bytecount across files | [
"Total",
"number",
"of",
"steps",
"is",
"equal",
"to",
"total",
"bytecount",
"across",
"files"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/fvec/ParseDataset2.java#L159-L164 | train |
h2oai/h2o-2 | src/main/java/water/score/ScoreModel.java | ScoreModel.xml2jname | protected static String xml2jname( String xml ) {
// Convert pname to a valid java name
StringBuilder nn = new StringBuilder();
char[] cs = xml.toCharArray();
if( !Character.isJavaIdentifierStart(cs[0]) )
nn.append('X');
for( char c : cs ) {
if( !Character.isJavaIdentifierPart(c) ) {
nn.append('_');
} else {
nn.append(c);
}
}
String jname = nn.toString();
return jname;
} | java | protected static String xml2jname( String xml ) {
// Convert pname to a valid java name
StringBuilder nn = new StringBuilder();
char[] cs = xml.toCharArray();
if( !Character.isJavaIdentifierStart(cs[0]) )
nn.append('X');
for( char c : cs ) {
if( !Character.isJavaIdentifierPart(c) ) {
nn.append('_');
} else {
nn.append(c);
}
}
String jname = nn.toString();
return jname;
} | [
"protected",
"static",
"String",
"xml2jname",
"(",
"String",
"xml",
")",
"{",
"// Convert pname to a valid java name",
"StringBuilder",
"nn",
"=",
"new",
"StringBuilder",
"(",
")",
";",
"char",
"[",
"]",
"cs",
"=",
"xml",
".",
"toCharArray",
"(",
")",
";",
"... | Convert an XML name to a java name | [
"Convert",
"an",
"XML",
"name",
"to",
"a",
"java",
"name"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/score/ScoreModel.java#L22-L37 | train |
h2oai/h2o-2 | src/main/java/water/score/ScoreModel.java | ScoreModel.uniqueClassName | protected static String uniqueClassName(String name) {
// Make a unique class name
String cname = xml2jname(name);
if( CLASS_NAMES.contains(cname) ) {
int i=0;
while( CLASS_NAMES.contains(cname+i) ) i++;
cname = cname+i;
}
CLASS_NAMES.add(cname);
return cname;
} | java | protected static String uniqueClassName(String name) {
// Make a unique class name
String cname = xml2jname(name);
if( CLASS_NAMES.contains(cname) ) {
int i=0;
while( CLASS_NAMES.contains(cname+i) ) i++;
cname = cname+i;
}
CLASS_NAMES.add(cname);
return cname;
} | [
"protected",
"static",
"String",
"uniqueClassName",
"(",
"String",
"name",
")",
"{",
"// Make a unique class name",
"String",
"cname",
"=",
"xml2jname",
"(",
"name",
")",
";",
"if",
"(",
"CLASS_NAMES",
".",
"contains",
"(",
"cname",
")",
")",
"{",
"int",
"i"... | Make a unique class name for jit'd subclasses of ScoreModel | [
"Make",
"a",
"unique",
"class",
"name",
"for",
"jit",
"d",
"subclasses",
"of",
"ScoreModel"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/score/ScoreModel.java#L44-L54 | train |
h2oai/h2o-2 | src/main/java/water/score/ScoreModel.java | ScoreModel.columnMapping | public int[] columnMapping( String[] features ) {
int[] map = new int[_colNames.length];
for( int i=0; i<_colNames.length; i++ ) {
map[i] = -1; // Assume it is missing
for( int j=0; j<features.length; j++ ) {
if( _colNames[i].equals(features[j]) ) {
if( map[i] != -1 ) throw new IllegalArgumentException("duplicate feature "+_colNames[i]);
map[i] = j;
}
}
if( map[i] == -1 ) Log.warn(Sys.SCORM,"Model feature "+_colNames[i]+" not in the provided feature list from the data");
}
return map;
} | java | public int[] columnMapping( String[] features ) {
int[] map = new int[_colNames.length];
for( int i=0; i<_colNames.length; i++ ) {
map[i] = -1; // Assume it is missing
for( int j=0; j<features.length; j++ ) {
if( _colNames[i].equals(features[j]) ) {
if( map[i] != -1 ) throw new IllegalArgumentException("duplicate feature "+_colNames[i]);
map[i] = j;
}
}
if( map[i] == -1 ) Log.warn(Sys.SCORM,"Model feature "+_colNames[i]+" not in the provided feature list from the data");
}
return map;
} | [
"public",
"int",
"[",
"]",
"columnMapping",
"(",
"String",
"[",
"]",
"features",
")",
"{",
"int",
"[",
"]",
"map",
"=",
"new",
"int",
"[",
"_colNames",
".",
"length",
"]",
";",
"for",
"(",
"int",
"i",
"=",
"0",
";",
"i",
"<",
"_colNames",
".",
... | needs, then this map will contain a -1 for the missing feature index. | [
"needs",
"then",
"this",
"map",
"will",
"contain",
"a",
"-",
"1",
"for",
"the",
"missing",
"feature",
"index",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/score/ScoreModel.java#L61-L74 | train |
h2oai/h2o-2 | src/main/java/water/Value.java | Value.setHdfs | public void setHdfs() {
assert onICE();
byte[] mem = memOrLoad(); // Get into stable memory
_persist = Value.HDFS|Value.NOTdsk;
Persist.I[Value.HDFS].store(this);
removeIce(); // Remove from ICE disk
assert onHDFS(); // Flip to HDFS
_mem = mem; // Close a race with the H2O cleaner zapping _mem while removing from ice
} | java | public void setHdfs() {
assert onICE();
byte[] mem = memOrLoad(); // Get into stable memory
_persist = Value.HDFS|Value.NOTdsk;
Persist.I[Value.HDFS].store(this);
removeIce(); // Remove from ICE disk
assert onHDFS(); // Flip to HDFS
_mem = mem; // Close a race with the H2O cleaner zapping _mem while removing from ice
} | [
"public",
"void",
"setHdfs",
"(",
")",
"{",
"assert",
"onICE",
"(",
")",
";",
"byte",
"[",
"]",
"mem",
"=",
"memOrLoad",
"(",
")",
";",
"// Get into stable memory",
"_persist",
"=",
"Value",
".",
"HDFS",
"|",
"Value",
".",
"NOTdsk",
";",
"Persist",
"."... | Set persistence to HDFS from ICE | [
"Set",
"persistence",
"to",
"HDFS",
"from",
"ICE"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/Value.java#L211-L219 | train |
h2oai/h2o-2 | src/main/java/water/Value.java | Value.openStream | public InputStream openStream(ProgressMonitor p) throws IOException {
if(onNFS() ) return PersistNFS .openStream(_key );
if(onHDFS()) return PersistHdfs.openStream(_key,p);
if(onS3() ) return PersistS3 .openStream(_key,p);
if(onTachyon()) return PersistTachyon.openStream(_key,p);
if( isFrame() ) throw new IllegalArgumentException("Tried to pass a Frame to openStream (maybe tried to parse a (already-parsed) Frame?)");
assert _type==TypeMap.PRIM_B : "Expected byte[] type but got "+TypeMap.className(_type);
return new ByteArrayInputStream(memOrLoad());
} | java | public InputStream openStream(ProgressMonitor p) throws IOException {
if(onNFS() ) return PersistNFS .openStream(_key );
if(onHDFS()) return PersistHdfs.openStream(_key,p);
if(onS3() ) return PersistS3 .openStream(_key,p);
if(onTachyon()) return PersistTachyon.openStream(_key,p);
if( isFrame() ) throw new IllegalArgumentException("Tried to pass a Frame to openStream (maybe tried to parse a (already-parsed) Frame?)");
assert _type==TypeMap.PRIM_B : "Expected byte[] type but got "+TypeMap.className(_type);
return new ByteArrayInputStream(memOrLoad());
} | [
"public",
"InputStream",
"openStream",
"(",
"ProgressMonitor",
"p",
")",
"throws",
"IOException",
"{",
"if",
"(",
"onNFS",
"(",
")",
")",
"return",
"PersistNFS",
".",
"openStream",
"(",
"_key",
")",
";",
"if",
"(",
"onHDFS",
"(",
")",
")",
"return",
"Per... | Creates a Stream for reading bytes | [
"Creates",
"a",
"Stream",
"for",
"reading",
"bytes"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/Value.java#L281-L289 | train |
h2oai/h2o-2 | src/main/java/water/Value.java | Value.lowerActiveGetCount | void lowerActiveGetCount( H2ONode h2o ) {
assert _key.home(); // Only the HOME node for a key tracks replicas
assert h2o != H2O.SELF;// Do not track self as a replica
while( true ) { // Repeat, in case racing GETs are bumping the counter
int old = _rwlock.get(); // Read the lock-word
assert old > 0; // Since lowering, must be at least 1
assert old != -1; // Not write-locked, because we are an active reader
assert _replicas.contains(h2o._unique_idx); // Self-bit is set
if( RW_CAS(old,old-1,"rlock-") ) {
if( old-1 == 0 ) // GET count fell to zero?
synchronized( this ) { notifyAll(); } // Notify any pending blocked PUTs
return; // Repeat until count is lowered
}
}
} | java | void lowerActiveGetCount( H2ONode h2o ) {
assert _key.home(); // Only the HOME node for a key tracks replicas
assert h2o != H2O.SELF;// Do not track self as a replica
while( true ) { // Repeat, in case racing GETs are bumping the counter
int old = _rwlock.get(); // Read the lock-word
assert old > 0; // Since lowering, must be at least 1
assert old != -1; // Not write-locked, because we are an active reader
assert _replicas.contains(h2o._unique_idx); // Self-bit is set
if( RW_CAS(old,old-1,"rlock-") ) {
if( old-1 == 0 ) // GET count fell to zero?
synchronized( this ) { notifyAll(); } // Notify any pending blocked PUTs
return; // Repeat until count is lowered
}
}
} | [
"void",
"lowerActiveGetCount",
"(",
"H2ONode",
"h2o",
")",
"{",
"assert",
"_key",
".",
"home",
"(",
")",
";",
"// Only the HOME node for a key tracks replicas",
"assert",
"h2o",
"!=",
"H2O",
".",
"SELF",
";",
"// Do not track self as a replica",
"while",
"(",
"true"... | Atomically lower active GET count | [
"Atomically",
"lower",
"active",
"GET",
"count"
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/Value.java#L502-L516 | train |
h2oai/h2o-2 | src/main/java/water/Value.java | Value.startRemotePut | void startRemotePut() {
assert !_key.home();
int x = 0;
// assert I am waiting on threads with higher priority?
while( (x=_rwlock.get()) != -1 ) // Spin until rwlock==-1
if( x == 1 || RW_CAS(0,1,"remote_need_notify") )
try { ForkJoinPool.managedBlock(this); } catch( InterruptedException e ) { }
} | java | void startRemotePut() {
assert !_key.home();
int x = 0;
// assert I am waiting on threads with higher priority?
while( (x=_rwlock.get()) != -1 ) // Spin until rwlock==-1
if( x == 1 || RW_CAS(0,1,"remote_need_notify") )
try { ForkJoinPool.managedBlock(this); } catch( InterruptedException e ) { }
} | [
"void",
"startRemotePut",
"(",
")",
"{",
"assert",
"!",
"_key",
".",
"home",
"(",
")",
";",
"int",
"x",
"=",
"0",
";",
"// assert I am waiting on threads with higher priority?",
"while",
"(",
"(",
"x",
"=",
"_rwlock",
".",
"get",
"(",
")",
")",
"!=",
"-"... | Block this thread until all prior remote PUTs complete - to force
remote-PUT ordering on the home node. | [
"Block",
"this",
"thread",
"until",
"all",
"prior",
"remote",
"PUTs",
"complete",
"-",
"to",
"force",
"remote",
"-",
"PUT",
"ordering",
"on",
"the",
"home",
"node",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/Value.java#L564-L571 | train |
h2oai/h2o-2 | src/main/java/water/util/RString.java | RString.clear | public void clear() {
for( Placeholder p : _placeholders.values() ) {
p.start.removeTill(p.end);
}
} | java | public void clear() {
for( Placeholder p : _placeholders.values() ) {
p.start.removeTill(p.end);
}
} | [
"public",
"void",
"clear",
"(",
")",
"{",
"for",
"(",
"Placeholder",
"p",
":",
"_placeholders",
".",
"values",
"(",
")",
")",
"{",
"p",
".",
"start",
".",
"removeTill",
"(",
"p",
".",
"end",
")",
";",
"}",
"}"
] | they can be used again. | [
"they",
"can",
"be",
"used",
"again",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/util/RString.java#L273-L277 | train |
h2oai/h2o-2 | src/main/java/water/util/RString.java | RString.replace | public void replace(String what, Object with) {
if (what.charAt(0)=='$')
throw new RuntimeException("$ is now control char that denotes URL encoding!");
for (Placeholder p : _placeholders.get(what))
p.end.insertAndAdvance(with.toString());
for (Placeholder p : _placeholders.get("$"+what))
try {
p.end.insertAndAdvance(URLEncoder.encode(with.toString(),"UTF-8"));
} catch (IOException e) {
p.end.insertAndAdvance(e.toString());
}
} | java | public void replace(String what, Object with) {
if (what.charAt(0)=='$')
throw new RuntimeException("$ is now control char that denotes URL encoding!");
for (Placeholder p : _placeholders.get(what))
p.end.insertAndAdvance(with.toString());
for (Placeholder p : _placeholders.get("$"+what))
try {
p.end.insertAndAdvance(URLEncoder.encode(with.toString(),"UTF-8"));
} catch (IOException e) {
p.end.insertAndAdvance(e.toString());
}
} | [
"public",
"void",
"replace",
"(",
"String",
"what",
",",
"Object",
"with",
")",
"{",
"if",
"(",
"what",
".",
"charAt",
"(",
"0",
")",
"==",
"'",
"'",
")",
"throw",
"new",
"RuntimeException",
"(",
"\"$ is now control char that denotes URL encoding!\"",
")",
"... | another in order. | [
"another",
"in",
"order",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/util/RString.java#L304-L316 | train |
h2oai/h2o-2 | src/main/java/water/util/RString.java | RString.restartGroup | public RString restartGroup(String what) {
List<Placeholder> all = _placeholders.get(what);
assert all.size() == 1;
Placeholder result = all.get(0);
if( result.group == null ) {
throw new NoSuchElementException("Element " + what + " is not a group.");
}
result.group.clear();
return result.group;
} | java | public RString restartGroup(String what) {
List<Placeholder> all = _placeholders.get(what);
assert all.size() == 1;
Placeholder result = all.get(0);
if( result.group == null ) {
throw new NoSuchElementException("Element " + what + " is not a group.");
}
result.group.clear();
return result.group;
} | [
"public",
"RString",
"restartGroup",
"(",
"String",
"what",
")",
"{",
"List",
"<",
"Placeholder",
">",
"all",
"=",
"_placeholders",
".",
"get",
"(",
"what",
")",
";",
"assert",
"all",
".",
"size",
"(",
")",
"==",
"1",
";",
"Placeholder",
"result",
"=",... | can be filled again. | [
"can",
"be",
"filled",
"again",
"."
] | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | https://github.com/h2oai/h2o-2/blob/be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1/src/main/java/water/util/RString.java#L320-L330 | train |
belaban/JGroups | src/org/jgroups/protocols/AUTH.java | AUTH.handleAuthHeader | protected boolean handleAuthHeader(GMS.GmsHeader gms_hdr, AuthHeader auth_hdr, Message msg) {
if(needsAuthentication(gms_hdr)) {
if(this.auth_token.authenticate(auth_hdr.getToken(), msg))
return true; // authentication passed, send message up the stack
else {
log.warn("%s: failed to validate AuthHeader (token: %s) from %s; dropping message and sending " +
"rejection message",
local_addr, auth_token.getClass().getSimpleName(), msg.src());
sendRejectionMessage(gms_hdr.getType(), msg.getSrc(), "authentication failed");
return false;
}
}
return true;
} | java | protected boolean handleAuthHeader(GMS.GmsHeader gms_hdr, AuthHeader auth_hdr, Message msg) {
if(needsAuthentication(gms_hdr)) {
if(this.auth_token.authenticate(auth_hdr.getToken(), msg))
return true; // authentication passed, send message up the stack
else {
log.warn("%s: failed to validate AuthHeader (token: %s) from %s; dropping message and sending " +
"rejection message",
local_addr, auth_token.getClass().getSimpleName(), msg.src());
sendRejectionMessage(gms_hdr.getType(), msg.getSrc(), "authentication failed");
return false;
}
}
return true;
} | [
"protected",
"boolean",
"handleAuthHeader",
"(",
"GMS",
".",
"GmsHeader",
"gms_hdr",
",",
"AuthHeader",
"auth_hdr",
",",
"Message",
"msg",
")",
"{",
"if",
"(",
"needsAuthentication",
"(",
"gms_hdr",
")",
")",
"{",
"if",
"(",
"this",
".",
"auth_token",
".",
... | Handles a GMS header
@param gms_hdr
@param msg
@return true if the message should be passed up, or else false | [
"Handles",
"a",
"GMS",
"header"
] | bd3ca786aa57fed41dfbc10a94b1281e388be03b | https://github.com/belaban/JGroups/blob/bd3ca786aa57fed41dfbc10a94b1281e388be03b/src/org/jgroups/protocols/AUTH.java#L214-L227 | train |
belaban/JGroups | src/org/jgroups/util/TimeScheduler3.java | TimeScheduler3.stop | public synchronized void stop() {
Thread tmp=runner;
runner=null;
if(tmp != null) {
tmp.interrupt();
try {tmp.join(500);} catch(InterruptedException e) {}
}
// we may need to do multiple iterations as the iterator works on a copy and tasks might have been added just
// after the iterator() call returned
while(!queue.isEmpty())
for(Task entry: queue) {
entry.cancel(true);
queue.remove(entry);
}
queue.clear();
if(pool instanceof ThreadPoolExecutor && shut_down_pool) {
ThreadPoolExecutor p=(ThreadPoolExecutor)pool;
List<Runnable> remaining_tasks=p.shutdownNow();
remaining_tasks.stream().filter(task -> task instanceof Future).forEach(task -> ((Future)task).cancel(true));
p.getQueue().clear();
try {
p.awaitTermination(Global.THREADPOOL_SHUTDOWN_WAIT_TIME, TimeUnit.MILLISECONDS);
}
catch(InterruptedException e) {
}
}
// clears the threads list (https://issues.jboss.org/browse/JGRP-1971)
if(timer_thread_factory instanceof LazyThreadFactory)
((LazyThreadFactory)timer_thread_factory).destroy();
} | java | public synchronized void stop() {
Thread tmp=runner;
runner=null;
if(tmp != null) {
tmp.interrupt();
try {tmp.join(500);} catch(InterruptedException e) {}
}
// we may need to do multiple iterations as the iterator works on a copy and tasks might have been added just
// after the iterator() call returned
while(!queue.isEmpty())
for(Task entry: queue) {
entry.cancel(true);
queue.remove(entry);
}
queue.clear();
if(pool instanceof ThreadPoolExecutor && shut_down_pool) {
ThreadPoolExecutor p=(ThreadPoolExecutor)pool;
List<Runnable> remaining_tasks=p.shutdownNow();
remaining_tasks.stream().filter(task -> task instanceof Future).forEach(task -> ((Future)task).cancel(true));
p.getQueue().clear();
try {
p.awaitTermination(Global.THREADPOOL_SHUTDOWN_WAIT_TIME, TimeUnit.MILLISECONDS);
}
catch(InterruptedException e) {
}
}
// clears the threads list (https://issues.jboss.org/browse/JGRP-1971)
if(timer_thread_factory instanceof LazyThreadFactory)
((LazyThreadFactory)timer_thread_factory).destroy();
} | [
"public",
"synchronized",
"void",
"stop",
"(",
")",
"{",
"Thread",
"tmp",
"=",
"runner",
";",
"runner",
"=",
"null",
";",
"if",
"(",
"tmp",
"!=",
"null",
")",
"{",
"tmp",
".",
"interrupt",
"(",
")",
";",
"try",
"{",
"tmp",
".",
"join",
"(",
"500"... | Stops the timer, cancelling all tasks | [
"Stops",
"the",
"timer",
"cancelling",
"all",
"tasks"
] | bd3ca786aa57fed41dfbc10a94b1281e388be03b | https://github.com/belaban/JGroups/blob/bd3ca786aa57fed41dfbc10a94b1281e388be03b/src/org/jgroups/util/TimeScheduler3.java#L158-L190 | train |
belaban/JGroups | src/org/jgroups/blocks/cs/NioBaseServer.java | NioBaseServer.printBuffers | @ManagedOperation(description="Prints the send and receive buffers")
public String printBuffers() {
StringBuilder sb=new StringBuilder("\n");
synchronized(this) {
for(Map.Entry<Address,Connection> entry: conns.entrySet()) {
NioConnection val=(NioConnection)entry.getValue();
sb.append(entry.getKey()).append(":\n ").append("recv_buf: ").append(val.recv_buf)
.append("\n send_buf: ").append(val.send_buf).append("\n");
}
}
return sb.toString();
} | java | @ManagedOperation(description="Prints the send and receive buffers")
public String printBuffers() {
StringBuilder sb=new StringBuilder("\n");
synchronized(this) {
for(Map.Entry<Address,Connection> entry: conns.entrySet()) {
NioConnection val=(NioConnection)entry.getValue();
sb.append(entry.getKey()).append(":\n ").append("recv_buf: ").append(val.recv_buf)
.append("\n send_buf: ").append(val.send_buf).append("\n");
}
}
return sb.toString();
} | [
"@",
"ManagedOperation",
"(",
"description",
"=",
"\"Prints the send and receive buffers\"",
")",
"public",
"String",
"printBuffers",
"(",
")",
"{",
"StringBuilder",
"sb",
"=",
"new",
"StringBuilder",
"(",
"\"\\n\"",
")",
";",
"synchronized",
"(",
"this",
")",
"{"... | Prints send and receive buffers for all connections | [
"Prints",
"send",
"and",
"receive",
"buffers",
"for",
"all",
"connections"
] | bd3ca786aa57fed41dfbc10a94b1281e388be03b | https://github.com/belaban/JGroups/blob/bd3ca786aa57fed41dfbc10a94b1281e388be03b/src/org/jgroups/blocks/cs/NioBaseServer.java#L76-L87 | train |
belaban/JGroups | src/org/jgroups/util/RingBufferSeqno.java | RingBufferSeqno.stable | public void stable(long seqno) {
lock.lock();
try {
if(seqno <= low)
return;
if(seqno > hd)
throw new IllegalArgumentException("seqno " + seqno + " cannot be bigger than hd (" + hd + ")");
int from=index(low+1), length=(int)(seqno - low), capacity=capacity();
for(int i=from; i < from+length; i++) {
int index=i & (capacity - 1);
buf[index]=null;
}
// Releases some of the blocked adders
if(seqno > low) {
low=seqno;
buffer_full.signalAll();
}
}
finally {
lock.unlock();
}
} | java | public void stable(long seqno) {
lock.lock();
try {
if(seqno <= low)
return;
if(seqno > hd)
throw new IllegalArgumentException("seqno " + seqno + " cannot be bigger than hd (" + hd + ")");
int from=index(low+1), length=(int)(seqno - low), capacity=capacity();
for(int i=from; i < from+length; i++) {
int index=i & (capacity - 1);
buf[index]=null;
}
// Releases some of the blocked adders
if(seqno > low) {
low=seqno;
buffer_full.signalAll();
}
}
finally {
lock.unlock();
}
} | [
"public",
"void",
"stable",
"(",
"long",
"seqno",
")",
"{",
"lock",
".",
"lock",
"(",
")",
";",
"try",
"{",
"if",
"(",
"seqno",
"<=",
"low",
")",
"return",
";",
"if",
"(",
"seqno",
">",
"hd",
")",
"throw",
"new",
"IllegalArgumentException",
"(",
"\... | Nulls elements between low and seqno and forwards low | [
"Nulls",
"elements",
"between",
"low",
"and",
"seqno",
"and",
"forwards",
"low"
] | bd3ca786aa57fed41dfbc10a94b1281e388be03b | https://github.com/belaban/JGroups/blob/bd3ca786aa57fed41dfbc10a94b1281e388be03b/src/org/jgroups/util/RingBufferSeqno.java#L278-L301 | train |
belaban/JGroups | src/org/jgroups/demos/wb/GraphPanel.java | GraphPanel.adjustNodes | public void adjustNodes(java.util.List<Address> v) {
Node n;
boolean removed=false;
synchronized(nodes) {
for(int i=0; i < nodes.size(); i++) {
n=nodes.get(i);
if(!v.contains(n.addr)) {
System.out.println("adjustNodes(): node " + n + " was removed");
nodes.remove(n);
removed=true;
}
}
if(removed)
repaint();
}
} | java | public void adjustNodes(java.util.List<Address> v) {
Node n;
boolean removed=false;
synchronized(nodes) {
for(int i=0; i < nodes.size(); i++) {
n=nodes.get(i);
if(!v.contains(n.addr)) {
System.out.println("adjustNodes(): node " + n + " was removed");
nodes.remove(n);
removed=true;
}
}
if(removed)
repaint();
}
} | [
"public",
"void",
"adjustNodes",
"(",
"java",
".",
"util",
".",
"List",
"<",
"Address",
">",
"v",
")",
"{",
"Node",
"n",
";",
"boolean",
"removed",
"=",
"false",
";",
"synchronized",
"(",
"nodes",
")",
"{",
"for",
"(",
"int",
"i",
"=",
"0",
";",
... | Removes nodes that are not in the view | [
"Removes",
"nodes",
"that",
"are",
"not",
"in",
"the",
"view"
] | bd3ca786aa57fed41dfbc10a94b1281e388be03b | https://github.com/belaban/JGroups/blob/bd3ca786aa57fed41dfbc10a94b1281e388be03b/src/org/jgroups/demos/wb/GraphPanel.java#L113-L129 | train |
belaban/JGroups | src/org/jgroups/demos/Chat.java | Chat.start | public void start(JChannel ch) throws Exception {
channel=ch;
channel.setReceiver(this);
channel.connect("ChatCluster");
eventLoop();
channel.close();
} | java | public void start(JChannel ch) throws Exception {
channel=ch;
channel.setReceiver(this);
channel.connect("ChatCluster");
eventLoop();
channel.close();
} | [
"public",
"void",
"start",
"(",
"JChannel",
"ch",
")",
"throws",
"Exception",
"{",
"channel",
"=",
"ch",
";",
"channel",
".",
"setReceiver",
"(",
"this",
")",
";",
"channel",
".",
"connect",
"(",
"\"ChatCluster\"",
")",
";",
"eventLoop",
"(",
")",
";",
... | Method called from other app, injecting channel | [
"Method",
"called",
"from",
"other",
"app",
"injecting",
"channel"
] | bd3ca786aa57fed41dfbc10a94b1281e388be03b | https://github.com/belaban/JGroups/blob/bd3ca786aa57fed41dfbc10a94b1281e388be03b/src/org/jgroups/demos/Chat.java#L24-L30 | train |
belaban/JGroups | src/org/jgroups/util/ResponseCollector.java | ResponseCollector.waitForAllResponses | public boolean waitForAllResponses(long timeout) {
if(timeout <= 0)
timeout=2000L;
return cond.waitFor(this::hasAllResponses, timeout, TimeUnit.MILLISECONDS);
} | java | public boolean waitForAllResponses(long timeout) {
if(timeout <= 0)
timeout=2000L;
return cond.waitFor(this::hasAllResponses, timeout, TimeUnit.MILLISECONDS);
} | [
"public",
"boolean",
"waitForAllResponses",
"(",
"long",
"timeout",
")",
"{",
"if",
"(",
"timeout",
"<=",
"0",
")",
"timeout",
"=",
"2000L",
";",
"return",
"cond",
".",
"waitFor",
"(",
"this",
"::",
"hasAllResponses",
",",
"timeout",
",",
"TimeUnit",
".",
... | Waits until all responses have been received, or until a timeout has elapsed.
@param timeout Number of milliseconds to wait max. This value needs to be greater than 0, or else
it will be adjusted to 2000
@return boolean True if all responses have been received within timeout ms, else false (e.g. if interrupted) | [
"Waits",
"until",
"all",
"responses",
"have",
"been",
"received",
"or",
"until",
"a",
"timeout",
"has",
"elapsed",
"."
] | bd3ca786aa57fed41dfbc10a94b1281e388be03b | https://github.com/belaban/JGroups/blob/bd3ca786aa57fed41dfbc10a94b1281e388be03b/src/org/jgroups/util/ResponseCollector.java#L160-L164 | train |
belaban/JGroups | src/org/jgroups/util/ForwardQueue.java | ForwardQueue.deliveryTableSize | public int deliveryTableSize() {
int retval=0;
for(BoundedHashMap<Long,Long> val: delivery_table.values())
retval+=val.size();
return retval;
} | java | public int deliveryTableSize() {
int retval=0;
for(BoundedHashMap<Long,Long> val: delivery_table.values())
retval+=val.size();
return retval;
} | [
"public",
"int",
"deliveryTableSize",
"(",
")",
"{",
"int",
"retval",
"=",
"0",
";",
"for",
"(",
"BoundedHashMap",
"<",
"Long",
",",
"Long",
">",
"val",
":",
"delivery_table",
".",
"values",
"(",
")",
")",
"retval",
"+=",
"val",
".",
"size",
"(",
")"... | Total size of all queues of the delivery table | [
"Total",
"size",
"of",
"all",
"queues",
"of",
"the",
"delivery",
"table"
] | bd3ca786aa57fed41dfbc10a94b1281e388be03b | https://github.com/belaban/JGroups/blob/bd3ca786aa57fed41dfbc10a94b1281e388be03b/src/org/jgroups/util/ForwardQueue.java#L81-L86 | train |
belaban/JGroups | src/org/jgroups/util/ForwardQueue.java | ForwardQueue.canDeliver | protected boolean canDeliver(Address sender, long seqno) {
BoundedHashMap<Long,Long> seqno_set=delivery_table.get(sender);
if(seqno_set == null) {
seqno_set=new BoundedHashMap<>(delivery_table_max_size);
BoundedHashMap<Long,Long> existing=delivery_table.put(sender,seqno_set);
if(existing != null)
seqno_set=existing;
}
return seqno_set.add(seqno, seqno);
} | java | protected boolean canDeliver(Address sender, long seqno) {
BoundedHashMap<Long,Long> seqno_set=delivery_table.get(sender);
if(seqno_set == null) {
seqno_set=new BoundedHashMap<>(delivery_table_max_size);
BoundedHashMap<Long,Long> existing=delivery_table.put(sender,seqno_set);
if(existing != null)
seqno_set=existing;
}
return seqno_set.add(seqno, seqno);
} | [
"protected",
"boolean",
"canDeliver",
"(",
"Address",
"sender",
",",
"long",
"seqno",
")",
"{",
"BoundedHashMap",
"<",
"Long",
",",
"Long",
">",
"seqno_set",
"=",
"delivery_table",
".",
"get",
"(",
"sender",
")",
";",
"if",
"(",
"seqno_set",
"==",
"null",
... | Checks if seqno has already been received from sender. This weeds out duplicates.
Note that this method is never called concurrently for the same sender. | [
"Checks",
"if",
"seqno",
"has",
"already",
"been",
"received",
"from",
"sender",
".",
"This",
"weeds",
"out",
"duplicates",
".",
"Note",
"that",
"this",
"method",
"is",
"never",
"called",
"concurrently",
"for",
"the",
"same",
"sender",
"."
] | bd3ca786aa57fed41dfbc10a94b1281e388be03b | https://github.com/belaban/JGroups/blob/bd3ca786aa57fed41dfbc10a94b1281e388be03b/src/org/jgroups/util/ForwardQueue.java#L236-L245 | train |
belaban/JGroups | src/org/jgroups/auth/Krb5TokenUtils.java | Krb5TokenUtils.generateSecuritySubject | public static Subject generateSecuritySubject(String jassLoginConfig, String username, String password) throws LoginException {
LoginContext loginCtx = null;
try {
// "Client" references the JAAS configuration in the jaas.conf file.
loginCtx = new LoginContext(jassLoginConfig, new Krb5TokenUtils.LoginCallbackHandler(username, password));
loginCtx.login();
log.debug(" : Krb5Token Kerberos login succeeded against user: %s", username);
return loginCtx.getSubject();
}
catch(LoginException e) {
log.debug(" : Krb5Token Kerberos login failed against user: %s", username);
throw e;
}
} | java | public static Subject generateSecuritySubject(String jassLoginConfig, String username, String password) throws LoginException {
LoginContext loginCtx = null;
try {
// "Client" references the JAAS configuration in the jaas.conf file.
loginCtx = new LoginContext(jassLoginConfig, new Krb5TokenUtils.LoginCallbackHandler(username, password));
loginCtx.login();
log.debug(" : Krb5Token Kerberos login succeeded against user: %s", username);
return loginCtx.getSubject();
}
catch(LoginException e) {
log.debug(" : Krb5Token Kerberos login failed against user: %s", username);
throw e;
}
} | [
"public",
"static",
"Subject",
"generateSecuritySubject",
"(",
"String",
"jassLoginConfig",
",",
"String",
"username",
",",
"String",
"password",
")",
"throws",
"LoginException",
"{",
"LoginContext",
"loginCtx",
"=",
"null",
";",
"try",
"{",
"// \"Client\" references ... | Authenticate against the KDC using JAAS. | [
"Authenticate",
"against",
"the",
"KDC",
"using",
"JAAS",
"."
] | bd3ca786aa57fed41dfbc10a94b1281e388be03b | https://github.com/belaban/JGroups/blob/bd3ca786aa57fed41dfbc10a94b1281e388be03b/src/org/jgroups/auth/Krb5TokenUtils.java#L42-L60 | train |
belaban/JGroups | src/org/jgroups/auth/Krb5TokenUtils.java | Krb5TokenUtils.initiateSecurityContext | public static byte[] initiateSecurityContext(Subject subject, String servicePrincipalName) throws GSSException {
GSSManager manager = GSSManager.getInstance();
GSSName serverName = manager.createName(servicePrincipalName, GSSName.NT_HOSTBASED_SERVICE);
final GSSContext context = manager.createContext(serverName, krb5Oid, null, GSSContext.DEFAULT_LIFETIME);
// The GSS context initiation has to be performed as a privileged action.
return Subject.doAs(subject,
(PrivilegedAction<byte[]>)() -> {
try {
byte[] token = new byte[0];
// This is a one pass context initialization.
context.requestMutualAuth(false);
context.requestCredDeleg(false);
return context.initSecContext(token, 0,
token.length);
} catch (GSSException e) {
log.error(Util.getMessage("Krb5TokenKerberosContextProcessingException"),e);
return null;
}
});
} | java | public static byte[] initiateSecurityContext(Subject subject, String servicePrincipalName) throws GSSException {
GSSManager manager = GSSManager.getInstance();
GSSName serverName = manager.createName(servicePrincipalName, GSSName.NT_HOSTBASED_SERVICE);
final GSSContext context = manager.createContext(serverName, krb5Oid, null, GSSContext.DEFAULT_LIFETIME);
// The GSS context initiation has to be performed as a privileged action.
return Subject.doAs(subject,
(PrivilegedAction<byte[]>)() -> {
try {
byte[] token = new byte[0];
// This is a one pass context initialization.
context.requestMutualAuth(false);
context.requestCredDeleg(false);
return context.initSecContext(token, 0,
token.length);
} catch (GSSException e) {
log.error(Util.getMessage("Krb5TokenKerberosContextProcessingException"),e);
return null;
}
});
} | [
"public",
"static",
"byte",
"[",
"]",
"initiateSecurityContext",
"(",
"Subject",
"subject",
",",
"String",
"servicePrincipalName",
")",
"throws",
"GSSException",
"{",
"GSSManager",
"manager",
"=",
"GSSManager",
".",
"getInstance",
"(",
")",
";",
"GSSName",
"server... | Generate the service ticket that will be passed to the cluster master for authentication | [
"Generate",
"the",
"service",
"ticket",
"that",
"will",
"be",
"passed",
"to",
"the",
"cluster",
"master",
"for",
"authentication"
] | bd3ca786aa57fed41dfbc10a94b1281e388be03b | https://github.com/belaban/JGroups/blob/bd3ca786aa57fed41dfbc10a94b1281e388be03b/src/org/jgroups/auth/Krb5TokenUtils.java#L63-L84 | train |
belaban/JGroups | src/org/jgroups/auth/Krb5TokenUtils.java | Krb5TokenUtils.validateSecurityContext | public static String validateSecurityContext(Subject subject, final byte[] serviceTicket) throws GSSException {
// Accept the context and return the client principal name.
return Subject.doAs(subject, (PrivilegedAction<String>)() -> {
try {
// Identify the server that communications are being made
// to.
GSSManager manager = GSSManager.getInstance();
GSSContext context = manager.createContext((GSSCredential) null);
context.acceptSecContext(serviceTicket, 0, serviceTicket.length);
return context.getSrcName().toString();
} catch (Exception e) {
log.error(Util.getMessage("Krb5TokenKerberosContextProcessingException"),e);
return null;
}
});
} | java | public static String validateSecurityContext(Subject subject, final byte[] serviceTicket) throws GSSException {
// Accept the context and return the client principal name.
return Subject.doAs(subject, (PrivilegedAction<String>)() -> {
try {
// Identify the server that communications are being made
// to.
GSSManager manager = GSSManager.getInstance();
GSSContext context = manager.createContext((GSSCredential) null);
context.acceptSecContext(serviceTicket, 0, serviceTicket.length);
return context.getSrcName().toString();
} catch (Exception e) {
log.error(Util.getMessage("Krb5TokenKerberosContextProcessingException"),e);
return null;
}
});
} | [
"public",
"static",
"String",
"validateSecurityContext",
"(",
"Subject",
"subject",
",",
"final",
"byte",
"[",
"]",
"serviceTicket",
")",
"throws",
"GSSException",
"{",
"// Accept the context and return the client principal name.",
"return",
"Subject",
".",
"doAs",
"(",
... | Validate the service ticket by extracting the client principal name | [
"Validate",
"the",
"service",
"ticket",
"by",
"extracting",
"the",
"client",
"principal",
"name"
] | bd3ca786aa57fed41dfbc10a94b1281e388be03b | https://github.com/belaban/JGroups/blob/bd3ca786aa57fed41dfbc10a94b1281e388be03b/src/org/jgroups/auth/Krb5TokenUtils.java#L87-L103 | train |
belaban/JGroups | src/org/jgroups/ViewId.java | ViewId.compareTo | public int compareTo(ViewId other) {
return id > other.id ? 1 : id < other.id ? -1 : creator.compareTo(other.creator);
} | java | public int compareTo(ViewId other) {
return id > other.id ? 1 : id < other.id ? -1 : creator.compareTo(other.creator);
} | [
"public",
"int",
"compareTo",
"(",
"ViewId",
"other",
")",
"{",
"return",
"id",
">",
"other",
".",
"id",
"?",
"1",
":",
"id",
"<",
"other",
".",
"id",
"?",
"-",
"1",
":",
"creator",
".",
"compareTo",
"(",
"other",
".",
"creator",
")",
";",
"}"
] | Establishes an order between 2 ViewIds. The comparison is done on the IDs, if they are equal, we use the creator.
@return 0 for equality, value less than 0 if smaller, greater than 0 if greater. | [
"Establishes",
"an",
"order",
"between",
"2",
"ViewIds",
".",
"The",
"comparison",
"is",
"done",
"on",
"the",
"IDs",
"if",
"they",
"are",
"equal",
"we",
"use",
"the",
"creator",
"."
] | bd3ca786aa57fed41dfbc10a94b1281e388be03b | https://github.com/belaban/JGroups/blob/bd3ca786aa57fed41dfbc10a94b1281e388be03b/src/org/jgroups/ViewId.java#L86-L88 | train |
belaban/JGroups | src/org/jgroups/protocols/pbcast/ViewHandler.java | ViewHandler.processing | public <T extends ViewHandler<R>> T processing(boolean flag) {
lock.lock();
try {
setProcessing(flag);
return (T)this;
}
finally {
lock.unlock();
}
} | java | public <T extends ViewHandler<R>> T processing(boolean flag) {
lock.lock();
try {
setProcessing(flag);
return (T)this;
}
finally {
lock.unlock();
}
} | [
"public",
"<",
"T",
"extends",
"ViewHandler",
"<",
"R",
">",
">",
"T",
"processing",
"(",
"boolean",
"flag",
")",
"{",
"lock",
".",
"lock",
"(",
")",
";",
"try",
"{",
"setProcessing",
"(",
"flag",
")",
";",
"return",
"(",
"T",
")",
"this",
";",
"... | To be used by testing only! | [
"To",
"be",
"used",
"by",
"testing",
"only!"
] | bd3ca786aa57fed41dfbc10a94b1281e388be03b | https://github.com/belaban/JGroups/blob/bd3ca786aa57fed41dfbc10a94b1281e388be03b/src/org/jgroups/protocols/pbcast/ViewHandler.java#L139-L148 | train |
belaban/JGroups | src/org/jgroups/protocols/pbcast/ViewHandler.java | ViewHandler.process | protected void process(Collection<R> requests) {
for(;;) {
while(!requests.isEmpty()) {
removeAndProcess(requests); // remove matching requests and process them
}
lock.lock();
try {
if(requests.isEmpty()) {
setProcessing(false);
return;
}
}
finally {
lock.unlock();
}
}
} | java | protected void process(Collection<R> requests) {
for(;;) {
while(!requests.isEmpty()) {
removeAndProcess(requests); // remove matching requests and process them
}
lock.lock();
try {
if(requests.isEmpty()) {
setProcessing(false);
return;
}
}
finally {
lock.unlock();
}
}
} | [
"protected",
"void",
"process",
"(",
"Collection",
"<",
"R",
">",
"requests",
")",
"{",
"for",
"(",
";",
";",
")",
"{",
"while",
"(",
"!",
"requests",
".",
"isEmpty",
"(",
")",
")",
"{",
"removeAndProcess",
"(",
"requests",
")",
";",
"// remove matchin... | We're guaranteed that only one thread will be called with this method at any time | [
"We",
"re",
"guaranteed",
"that",
"only",
"one",
"thread",
"will",
"be",
"called",
"with",
"this",
"method",
"at",
"any",
"time"
] | bd3ca786aa57fed41dfbc10a94b1281e388be03b | https://github.com/belaban/JGroups/blob/bd3ca786aa57fed41dfbc10a94b1281e388be03b/src/org/jgroups/protocols/pbcast/ViewHandler.java#L241-L257 | train |
belaban/JGroups | src/org/jgroups/util/Promise.java | Promise.setResult | public void setResult(T obj) {
lock.lock();
try {
result=obj;
hasResult=true;
cond.signal(true);
}
finally {
lock.unlock();
}
} | java | public void setResult(T obj) {
lock.lock();
try {
result=obj;
hasResult=true;
cond.signal(true);
}
finally {
lock.unlock();
}
} | [
"public",
"void",
"setResult",
"(",
"T",
"obj",
")",
"{",
"lock",
".",
"lock",
"(",
")",
";",
"try",
"{",
"result",
"=",
"obj",
";",
"hasResult",
"=",
"true",
";",
"cond",
".",
"signal",
"(",
"true",
")",
";",
"}",
"finally",
"{",
"lock",
".",
... | Sets the result and notifies any threads waiting for it | [
"Sets",
"the",
"result",
"and",
"notifies",
"any",
"threads",
"waiting",
"for",
"it"
] | bd3ca786aa57fed41dfbc10a94b1281e388be03b | https://github.com/belaban/JGroups/blob/bd3ca786aa57fed41dfbc10a94b1281e388be03b/src/org/jgroups/util/Promise.java#L96-L106 | train |
belaban/JGroups | src/org/jgroups/util/Promise.java | Promise._getResultWithTimeout | protected T _getResultWithTimeout(final long timeout) throws TimeoutException {
if(timeout <= 0)
cond.waitFor(this::hasResult);
else if(!cond.waitFor(this::hasResult, timeout, TimeUnit.MILLISECONDS))
throw new TimeoutException();
return result;
} | java | protected T _getResultWithTimeout(final long timeout) throws TimeoutException {
if(timeout <= 0)
cond.waitFor(this::hasResult);
else if(!cond.waitFor(this::hasResult, timeout, TimeUnit.MILLISECONDS))
throw new TimeoutException();
return result;
} | [
"protected",
"T",
"_getResultWithTimeout",
"(",
"final",
"long",
"timeout",
")",
"throws",
"TimeoutException",
"{",
"if",
"(",
"timeout",
"<=",
"0",
")",
"cond",
".",
"waitFor",
"(",
"this",
"::",
"hasResult",
")",
";",
"else",
"if",
"(",
"!",
"cond",
".... | Blocks until a result is available, or timeout milliseconds have elapsed. Needs to be called with lock held
@param timeout in ms
@return An object
@throws TimeoutException If a timeout occurred (implies that timeout > 0) | [
"Blocks",
"until",
"a",
"result",
"is",
"available",
"or",
"timeout",
"milliseconds",
"have",
"elapsed",
".",
"Needs",
"to",
"be",
"called",
"with",
"lock",
"held"
] | bd3ca786aa57fed41dfbc10a94b1281e388be03b | https://github.com/belaban/JGroups/blob/bd3ca786aa57fed41dfbc10a94b1281e388be03b/src/org/jgroups/util/Promise.java#L143-L149 | train |
belaban/JGroups | src/org/jgroups/protocols/pbcast/ServerGmsImpl.java | ServerGmsImpl.handleViewChange | public void handleViewChange(View view, Digest digest) {
if(gms.isLeaving() && !view.containsMember(gms.local_addr))
return;
View prev_view=gms.view();
gms.installView(view, digest);
Address prev_coord=prev_view != null? prev_view.getCoord() : null, curr_coord=view.getCoord();
if(!Objects.equals(curr_coord, prev_coord))
coordChanged(prev_coord, curr_coord);
} | java | public void handleViewChange(View view, Digest digest) {
if(gms.isLeaving() && !view.containsMember(gms.local_addr))
return;
View prev_view=gms.view();
gms.installView(view, digest);
Address prev_coord=prev_view != null? prev_view.getCoord() : null, curr_coord=view.getCoord();
if(!Objects.equals(curr_coord, prev_coord))
coordChanged(prev_coord, curr_coord);
} | [
"public",
"void",
"handleViewChange",
"(",
"View",
"view",
",",
"Digest",
"digest",
")",
"{",
"if",
"(",
"gms",
".",
"isLeaving",
"(",
")",
"&&",
"!",
"view",
".",
"containsMember",
"(",
"gms",
".",
"local_addr",
")",
")",
"return",
";",
"View",
"prev_... | Called by the GMS when a VIEW is received.
@param view The view to be installed
@param digest If view is a MergeView, the digest contains the seqnos of all members and has to be set by GMS | [
"Called",
"by",
"the",
"GMS",
"when",
"a",
"VIEW",
"is",
"received",
"."
] | bd3ca786aa57fed41dfbc10a94b1281e388be03b | https://github.com/belaban/JGroups/blob/bd3ca786aa57fed41dfbc10a94b1281e388be03b/src/org/jgroups/protocols/pbcast/ServerGmsImpl.java#L63-L71 | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.