method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
list | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
list | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
|---|---|---|---|---|---|---|---|---|---|---|---|
public void updateHeaders(Uri uri, SendReq sendReq) {
synchronized(PDU_CACHE_INSTANCE) {
// If the cache item is getting updated, wait until it's done updating before
// purging it.
if (PDU_CACHE_INSTANCE.isUpdating(uri)) {
if (LOCAL_LOGV) Log.v(TAG, "updateHeaders: " + uri + " blocked by isUpdating()");
try {
PDU_CACHE_INSTANCE.wait();
} catch (InterruptedException e) {
Log.e(TAG, "updateHeaders: ", e);
}
}
}
PDU_CACHE_INSTANCE.purge(uri);
ContentValues values = new ContentValues(10);
byte[] contentType = sendReq.getContentType();
if (contentType != null) {
values.put("ct_t", toIsoString(contentType));
}
long date = sendReq.getDate();
if (date != -1) {
values.put("date", date);
}
int deliveryReport = sendReq.getDeliveryReport();
if (deliveryReport != 0) {
values.put("d_rpt", deliveryReport);
}
long expiry = sendReq.getExpiry();
if (expiry != -1) {
values.put("exp", expiry);
}
byte[] msgClass = sendReq.getMessageClass();
if (msgClass != null) {
values.put("m_cls", toIsoString(msgClass));
}
int priority = sendReq.getPriority();
if (priority != 0) {
values.put("pri", priority);
}
int readReport = sendReq.getReadReport();
if (readReport != 0) {
values.put("rr", readReport);
}
byte[] transId = sendReq.getTransactionId();
if (transId != null) {
values.put("tr_id", toIsoString(transId));
}
EncodedStringValue subject = sendReq.getSubject();
if (subject != null) {
values.put("sub", toIsoString(subject.getTextString()));
values.put("sub_cs", subject.getCharacterSet());
} else {
values.put("sub", "");
}
long messageSize = sendReq.getMessageSize();
if (messageSize > 0) {
values.put("m_size", messageSize);
}
PduHeaders headers = sendReq.getPduHeaders();
HashSet<String> recipients = new HashSet<String>();
for (int addrType : ADDRESS_FIELDS) {
EncodedStringValue[] array = null;
if (addrType == PduHeaders.FROM) {
EncodedStringValue v = headers.getEncodedStringValue(addrType);
if (v != null) {
array = new EncodedStringValue[1];
array[0] = v;
}
} else {
array = headers.getEncodedStringValues(addrType);
}
if (array != null) {
long msgId = ContentUris.parseId(uri);
updateAddress(msgId, addrType, array);
if (addrType == PduHeaders.TO) {
for (EncodedStringValue v : array) {
if (v != null) {
recipients.add(v.getString());
}
}
}
}
}
if (!recipients.isEmpty()) {
long threadId = Utils.getOrCreateThreadId(mContext, recipients);
values.put("thread_id", threadId);
}
SqliteWrapper.update(mContext, mContentResolver, uri, values, null, null);
}
|
void function(Uri uri, SendReq sendReq) { synchronized(PDU_CACHE_INSTANCE) { if (PDU_CACHE_INSTANCE.isUpdating(uri)) { if (LOCAL_LOGV) Log.v(TAG, STR + uri + STR); try { PDU_CACHE_INSTANCE.wait(); } catch (InterruptedException e) { Log.e(TAG, STR, e); } } } PDU_CACHE_INSTANCE.purge(uri); ContentValues values = new ContentValues(10); byte[] contentType = sendReq.getContentType(); if (contentType != null) { values.put("ct_t", toIsoString(contentType)); } long date = sendReq.getDate(); if (date != -1) { values.put("date", date); } int deliveryReport = sendReq.getDeliveryReport(); if (deliveryReport != 0) { values.put("d_rpt", deliveryReport); } long expiry = sendReq.getExpiry(); if (expiry != -1) { values.put("exp", expiry); } byte[] msgClass = sendReq.getMessageClass(); if (msgClass != null) { values.put("m_cls", toIsoString(msgClass)); } int priority = sendReq.getPriority(); if (priority != 0) { values.put("pri", priority); } int readReport = sendReq.getReadReport(); if (readReport != 0) { values.put("rr", readReport); } byte[] transId = sendReq.getTransactionId(); if (transId != null) { values.put("tr_id", toIsoString(transId)); } EncodedStringValue subject = sendReq.getSubject(); if (subject != null) { values.put("sub", toIsoString(subject.getTextString())); values.put(STR, subject.getCharacterSet()); } else { values.put("sub", STRm_sizeSTRthread_id", threadId); } SqliteWrapper.update(mContext, mContentResolver, uri, values, null, null); }
|
/**
* Update headers of a SendReq.
*
* @param uri The PDU which need to be updated.
* @param pdu New headers.
* @throws MmsException Bad URI or updating failed.
*/
|
Update headers of a SendReq
|
updateHeaders
|
{
"repo_name": "BennoGAP/notification-forwarder",
"path": "SMS/src/main/java/com/google/android/mms/pdu_alt/PduPersister.java",
"license": "gpl-3.0",
"size": 63944
}
|
[
"android.content.ContentValues",
"android.net.Uri",
"android.util.Log",
"com.google.android.mms.util_alt.SqliteWrapper"
] |
import android.content.ContentValues; import android.net.Uri; import android.util.Log; import com.google.android.mms.util_alt.SqliteWrapper;
|
import android.content.*; import android.net.*; import android.util.*; import com.google.android.mms.util_alt.*;
|
[
"android.content",
"android.net",
"android.util",
"com.google.android"
] |
android.content; android.net; android.util; com.google.android;
| 2,874,238
|
public static BundleContext getContext() {
return context;
}
|
static BundleContext function() { return context; }
|
/**
* Returns the bundle context of this bundle
*
* @return the bundle context
*/
|
Returns the bundle context of this bundle
|
getContext
|
{
"repo_name": "paolodenti/openhab",
"path": "bundles/binding/org.openhab.binding.xpl/src/main/java/org/openhab/binding/xpl/internal/XplActivator.java",
"license": "epl-1.0",
"size": 1430
}
|
[
"org.osgi.framework.BundleContext"
] |
import org.osgi.framework.BundleContext;
|
import org.osgi.framework.*;
|
[
"org.osgi.framework"
] |
org.osgi.framework;
| 770,254
|
protected void processExchange(Exchange exchange) {
processor.process(exchange, sync -> postProcess(exchange));
}
|
void function(Exchange exchange) { processor.process(exchange, sync -> postProcess(exchange)); }
|
/**
* Strategy Method to process an exchange in the batch. This method allows derived classes to perform
* custom processing before or after an individual exchange is processed
*/
|
Strategy Method to process an exchange in the batch. This method allows derived classes to perform custom processing before or after an individual exchange is processed
|
processExchange
|
{
"repo_name": "punkhorn/camel-upstream",
"path": "core/camel-core/src/main/java/org/apache/camel/processor/Resequencer.java",
"license": "apache-2.0",
"size": 19183
}
|
[
"org.apache.camel.Exchange"
] |
import org.apache.camel.Exchange;
|
import org.apache.camel.*;
|
[
"org.apache.camel"
] |
org.apache.camel;
| 99,918
|
public final void setEnterCallback(EnterCallback enterCallback) {
ENTER_PROPERTY_HANDLER.setCallback(this, AnnotationPlugin.ID, enterCallback, enterCallbackProxy.getProxy());
}
|
final void function(EnterCallback enterCallback) { ENTER_PROPERTY_HANDLER.setCallback(this, AnnotationPlugin.ID, enterCallback, enterCallbackProxy.getProxy()); }
|
/**
* Sets the callback called when a "enter" event is occurring.
*
* @param enterCallback the callback called when a "enter" event is occurring
*/
|
Sets the callback called when a "enter" event is occurring
|
setEnterCallback
|
{
"repo_name": "pepstock-org/Charba",
"path": "src/org/pepstock/charba/client/annotation/AbstractAnnotation.java",
"license": "apache-2.0",
"size": 41191
}
|
[
"org.pepstock.charba.client.annotation.listeners.EnterCallback"
] |
import org.pepstock.charba.client.annotation.listeners.EnterCallback;
|
import org.pepstock.charba.client.annotation.listeners.*;
|
[
"org.pepstock.charba"
] |
org.pepstock.charba;
| 1,030,180
|
protected List<CmsWidgetDialogParameter> getWidgets() {
if (m_widgets == null) {
m_widgets = new ArrayList<CmsWidgetDialogParameter>();
}
return m_widgets;
}
|
List<CmsWidgetDialogParameter> function() { if (m_widgets == null) { m_widgets = new ArrayList<CmsWidgetDialogParameter>(); } return m_widgets; }
|
/**
* Returns the list of all widgets used on this widget dialog, the
* List must contain Objects of type <code>{@link CmsWidgetDialogParameter}</code>.<p>
*
* @return the list of all widgets used on this widget dialog
*/
|
Returns the list of all widgets used on this widget dialog, the List must contain Objects of type <code><code>CmsWidgetDialogParameter</code></code>
|
getWidgets
|
{
"repo_name": "mediaworx/opencms-core",
"path": "src/org/opencms/workplace/CmsWidgetDialog.java",
"license": "lgpl-2.1",
"size": 59561
}
|
[
"java.util.ArrayList",
"java.util.List"
] |
import java.util.ArrayList; import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,277,051
|
public void registerCallableInterceptors(CallableProcessingInterceptor... interceptors) {
Assert.notNull(interceptors, "A CallableProcessingInterceptor is required");
for (CallableProcessingInterceptor interceptor : interceptors) {
String key = interceptor.getClass().getName() + ":" + interceptor.hashCode();
this.callableInterceptors.put(key, interceptor);
}
}
|
void function(CallableProcessingInterceptor... interceptors) { Assert.notNull(interceptors, STR); for (CallableProcessingInterceptor interceptor : interceptors) { String key = interceptor.getClass().getName() + ":" + interceptor.hashCode(); this.callableInterceptors.put(key, interceptor); } }
|
/**
* Register a {@link CallableProcessingInterceptor} without a key.
* The key is derived from the class name and hashcode.
* @param interceptors one or more interceptors to register
*/
|
Register a <code>CallableProcessingInterceptor</code> without a key. The key is derived from the class name and hashcode
|
registerCallableInterceptors
|
{
"repo_name": "bbossgroups/bbossgroups-3.5",
"path": "bboss-mvc/src/org/frameworkset/web/request/async/WebAsyncManager.java",
"license": "apache-2.0",
"size": 16510
}
|
[
"org.frameworkset.util.Assert"
] |
import org.frameworkset.util.Assert;
|
import org.frameworkset.util.*;
|
[
"org.frameworkset.util"
] |
org.frameworkset.util;
| 1,619,177
|
private static void encodeTupleAsParameterListForRuntime(final boolean resolveTargs, final Node node,
Type _tuple, boolean nameAndMetatype, GenerateJsVisitor gen) {
gen.out("[");
int pos = 1;
final Type empty = node.getUnit().getEmptyType();
while (_tuple != null && !(_tuple.isSubtypeOf(empty) || _tuple.isTypeParameter())) {
if (pos > 1) gen.out(",");
pos++;
if (nameAndMetatype) {
gen.out("{", MetamodelGenerator.KEY_NAME, ":'p", Integer.toString(pos), "',");
gen.out(MetamodelGenerator.KEY_METATYPE, ":'", MetamodelGenerator.METATYPE_PARAMETER, "',");
gen.out(MetamodelGenerator.KEY_TYPE, ":");
}
if (isTuple(_tuple)) {
if (_tuple.isUnion()) {
//Handle union types for defaulted parameters
for (Type mt : _tuple.getCaseTypes()) {
if (mt.isTuple()) {
metamodelTypeNameOrList(resolveTargs, node, gen.getCurrentPackage(),
mt.getTypeArgumentList().get(1), null, gen);
_tuple = mt.getTypeArgumentList().get(2);
break;
}
}
if (nameAndMetatype) {
gen.out(",", MetamodelGenerator.KEY_DEFAULT,":1");
}
} else {
metamodelTypeNameOrList(resolveTargs, node, gen.getCurrentPackage(),
_tuple.getTypeArgumentList().get(1), null, gen);
_tuple = _tuple.getTypeArgumentList().get(2);
}
} else if (isSequential(_tuple)) {
Type _t2 = _tuple.getSupertype(node.getUnit().getSequenceDeclaration());
final int seq;
if (_t2 == null) {
_t2 = _tuple.getSupertype(node.getUnit().getSequentialDeclaration());
seq = 1;
} else {
seq = 2;
}
//Handle Sequence, for nonempty variadic parameters
if (nameAndMetatype) {
metamodelTypeNameOrList(resolveTargs, node, gen.getCurrentPackage(),
_t2.getTypeArgumentList().get(0), null, gen);
gen.out(",seq:", Integer.toString(seq));
} else {
gen.out(gen.getClAlias(), "mkseq$(");
metamodelTypeNameOrList(resolveTargs, node, gen.getCurrentPackage(),
_t2.getTypeArgumentList().get(0), null, gen);
gen.out(",", Integer.toString(seq), ")");
}
_tuple = null;
} else if (_tuple.isUnion()) {
metamodelTypeNameOrList(resolveTargs, node, gen.getCurrentPackage(), _tuple, null, gen);
_tuple=null;
} else {
gen.out("\n");
if (pos > 100) {
break;
}
}
if (nameAndMetatype) {
gen.out("}");
}
}
gen.out("]");
}
|
static void function(final boolean resolveTargs, final Node node, Type _tuple, boolean nameAndMetatype, GenerateJsVisitor gen) { gen.out("["); int pos = 1; final Type empty = node.getUnit().getEmptyType(); while (_tuple != null && !(_tuple.isSubtypeOf(empty) _tuple.isTypeParameter())) { if (pos > 1) gen.out(","); pos++; if (nameAndMetatype) { gen.out("{", MetamodelGenerator.KEY_NAME, ":'p", Integer.toString(pos), "',"); gen.out(MetamodelGenerator.KEY_METATYPE, ":'", MetamodelGenerator.METATYPE_PARAMETER, "',"); gen.out(MetamodelGenerator.KEY_TYPE, ":"); } if (isTuple(_tuple)) { if (_tuple.isUnion()) { for (Type mt : _tuple.getCaseTypes()) { if (mt.isTuple()) { metamodelTypeNameOrList(resolveTargs, node, gen.getCurrentPackage(), mt.getTypeArgumentList().get(1), null, gen); _tuple = mt.getTypeArgumentList().get(2); break; } } if (nameAndMetatype) { gen.out(",", MetamodelGenerator.KEY_DEFAULT,":1"); } } else { metamodelTypeNameOrList(resolveTargs, node, gen.getCurrentPackage(), _tuple.getTypeArgumentList().get(1), null, gen); _tuple = _tuple.getTypeArgumentList().get(2); } } else if (isSequential(_tuple)) { Type _t2 = _tuple.getSupertype(node.getUnit().getSequenceDeclaration()); final int seq; if (_t2 == null) { _t2 = _tuple.getSupertype(node.getUnit().getSequentialDeclaration()); seq = 1; } else { seq = 2; } if (nameAndMetatype) { metamodelTypeNameOrList(resolveTargs, node, gen.getCurrentPackage(), _t2.getTypeArgumentList().get(0), null, gen); gen.out(",seq:", Integer.toString(seq)); } else { gen.out(gen.getClAlias(), STR); metamodelTypeNameOrList(resolveTargs, node, gen.getCurrentPackage(), _t2.getTypeArgumentList().get(0), null, gen); gen.out(",", Integer.toString(seq), ")"); } _tuple = null; } else if (_tuple.isUnion()) { metamodelTypeNameOrList(resolveTargs, node, gen.getCurrentPackage(), _tuple, null, gen); _tuple=null; } else { gen.out("\n"); if (pos > 100) { break; } } if (nameAndMetatype) { gen.out("}"); } } gen.out("]"); }
|
/** This method encodes the type parameters of a Tuple in the same way
* as a parameter list for runtime. */
|
This method encodes the type parameters of a Tuple in the same way
|
encodeTupleAsParameterListForRuntime
|
{
"repo_name": "ceylon/ceylon-js",
"path": "src/main/java/com/redhat/ceylon/compiler/js/util/TypeUtils.java",
"license": "apache-2.0",
"size": 70510
}
|
[
"com.redhat.ceylon.compiler.js.GenerateJsVisitor",
"com.redhat.ceylon.compiler.js.loader.MetamodelGenerator",
"com.redhat.ceylon.compiler.typechecker.tree.Node",
"com.redhat.ceylon.model.typechecker.model.Type"
] |
import com.redhat.ceylon.compiler.js.GenerateJsVisitor; import com.redhat.ceylon.compiler.js.loader.MetamodelGenerator; import com.redhat.ceylon.compiler.typechecker.tree.Node; import com.redhat.ceylon.model.typechecker.model.Type;
|
import com.redhat.ceylon.compiler.js.*; import com.redhat.ceylon.compiler.js.loader.*; import com.redhat.ceylon.compiler.typechecker.tree.*; import com.redhat.ceylon.model.typechecker.model.*;
|
[
"com.redhat.ceylon"
] |
com.redhat.ceylon;
| 1,659,980
|
public void removeServiceEndpoint(Thing value) {
Base.remove(this.model, this.getResource(), SERVICEENDPOINT, value);
}
|
void function(Thing value) { Base.remove(this.model, this.getResource(), SERVICEENDPOINT, value); }
|
/**
* Removes a value of property ServiceEndpoint given as an instance of Thing
*
* @param value
* the value to be removed
*
* [Generated from RDFReactor template rule #remove4dynamic]
*/
|
Removes a value of property ServiceEndpoint given as an instance of Thing
|
removeServiceEndpoint
|
{
"repo_name": "m0ep/master-thesis",
"path": "source/apis/rdf2go/rdf2go-sioc-services/src/main/java/org/rdfs/sioc/services/Service.java",
"license": "mit",
"size": 80965
}
|
[
"org.ontoware.rdfreactor.runtime.Base"
] |
import org.ontoware.rdfreactor.runtime.Base;
|
import org.ontoware.rdfreactor.runtime.*;
|
[
"org.ontoware.rdfreactor"
] |
org.ontoware.rdfreactor;
| 2,349,279
|
public static GeoDistanceSortBuilder fromXContent(XContentParser parser, String elementName) throws IOException {
String fieldName = null;
List<GeoPoint> geoPoints = new ArrayList<>();
DistanceUnit unit = DistanceUnit.DEFAULT;
GeoDistance geoDistance = GeoDistance.ARC;
SortOrder order = SortOrder.ASC;
SortMode sortMode = null;
QueryBuilder nestedFilter = null;
String nestedPath = null;
GeoValidationMethod validation = null;
XContentParser.Token token;
String currentName = parser.currentName();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
parseGeoPoints(parser, geoPoints);
fieldName = currentName;
} else if (token == XContentParser.Token.START_OBJECT) {
if (NESTED_FILTER_FIELD.match(currentName)) {
nestedFilter = parseInnerQueryBuilder(parser);
} else {
// the json in the format of -> field : { lat : 30, lon : 12 }
if (fieldName != null && fieldName.equals(currentName) == false) {
throw new ParsingException(
parser.getTokenLocation(),
"Trying to reset fieldName to [{}], already set to [{}].",
currentName,
fieldName);
}
fieldName = currentName;
GeoPoint point = new GeoPoint();
GeoUtils.parseGeoPoint(parser, point);
geoPoints.add(point);
}
} else if (token.isValue()) {
if (ORDER_FIELD.match(currentName)) {
order = SortOrder.fromString(parser.text());
} else if (UNIT_FIELD.match(currentName)) {
unit = DistanceUnit.fromString(parser.text());
} else if (DISTANCE_TYPE_FIELD.match(currentName)) {
geoDistance = GeoDistance.fromString(parser.text());
} else if (VALIDATION_METHOD_FIELD.match(currentName)) {
validation = GeoValidationMethod.fromString(parser.text());
} else if (SORTMODE_FIELD.match(currentName)) {
sortMode = SortMode.fromString(parser.text());
} else if (NESTED_PATH_FIELD.match(currentName)) {
nestedPath = parser.text();
} else if (token == Token.VALUE_STRING){
if (fieldName != null && fieldName.equals(currentName) == false) {
throw new ParsingException(
parser.getTokenLocation(),
"Trying to reset fieldName to [{}], already set to [{}].",
currentName,
fieldName);
}
GeoPoint point = new GeoPoint();
point.resetFromString(parser.text());
geoPoints.add(point);
fieldName = currentName;
} else if (fieldName.equals(currentName)){
throw new ParsingException(
parser.getTokenLocation(),
"Only geohashes of type string supported for field [{}]",
currentName);
} else {
throw new ParsingException(
parser.getTokenLocation(),
"[{}] does not support [{}]",
NAME, currentName
);
}
}
}
GeoDistanceSortBuilder result = new GeoDistanceSortBuilder(fieldName, geoPoints.toArray(new GeoPoint[geoPoints.size()]));
result.geoDistance(geoDistance);
result.unit(unit);
result.order(order);
if (sortMode != null) {
result.sortMode(sortMode);
}
if (nestedFilter != null) {
result.setNestedFilter(nestedFilter);
}
result.setNestedPath(nestedPath);
if (validation != null) {
result.validation(validation);
}
return result;
}
|
static GeoDistanceSortBuilder function(XContentParser parser, String elementName) throws IOException { String fieldName = null; List<GeoPoint> geoPoints = new ArrayList<>(); DistanceUnit unit = DistanceUnit.DEFAULT; GeoDistance geoDistance = GeoDistance.ARC; SortOrder order = SortOrder.ASC; SortMode sortMode = null; QueryBuilder nestedFilter = null; String nestedPath = null; GeoValidationMethod validation = null; XContentParser.Token token; String currentName = parser.currentName(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentName = parser.currentName(); } else if (token == XContentParser.Token.START_ARRAY) { parseGeoPoints(parser, geoPoints); fieldName = currentName; } else if (token == XContentParser.Token.START_OBJECT) { if (NESTED_FILTER_FIELD.match(currentName)) { nestedFilter = parseInnerQueryBuilder(parser); } else { if (fieldName != null && fieldName.equals(currentName) == false) { throw new ParsingException( parser.getTokenLocation(), STR, currentName, fieldName); } fieldName = currentName; GeoPoint point = new GeoPoint(); GeoUtils.parseGeoPoint(parser, point); geoPoints.add(point); } } else if (token.isValue()) { if (ORDER_FIELD.match(currentName)) { order = SortOrder.fromString(parser.text()); } else if (UNIT_FIELD.match(currentName)) { unit = DistanceUnit.fromString(parser.text()); } else if (DISTANCE_TYPE_FIELD.match(currentName)) { geoDistance = GeoDistance.fromString(parser.text()); } else if (VALIDATION_METHOD_FIELD.match(currentName)) { validation = GeoValidationMethod.fromString(parser.text()); } else if (SORTMODE_FIELD.match(currentName)) { sortMode = SortMode.fromString(parser.text()); } else if (NESTED_PATH_FIELD.match(currentName)) { nestedPath = parser.text(); } else if (token == Token.VALUE_STRING){ if (fieldName != null && fieldName.equals(currentName) == false) { throw new ParsingException( parser.getTokenLocation(), STR, currentName, fieldName); } GeoPoint point = new GeoPoint(); point.resetFromString(parser.text()); geoPoints.add(point); fieldName = currentName; } else if (fieldName.equals(currentName)){ throw new ParsingException( parser.getTokenLocation(), STR, currentName); } else { throw new ParsingException( parser.getTokenLocation(), STR, NAME, currentName ); } } } GeoDistanceSortBuilder result = new GeoDistanceSortBuilder(fieldName, geoPoints.toArray(new GeoPoint[geoPoints.size()])); result.geoDistance(geoDistance); result.unit(unit); result.order(order); if (sortMode != null) { result.sortMode(sortMode); } if (nestedFilter != null) { result.setNestedFilter(nestedFilter); } result.setNestedPath(nestedPath); if (validation != null) { result.validation(validation); } return result; }
|
/**
* Creates a new {@link GeoDistanceSortBuilder} from the query held by the {@link XContentParser} in
* {@link org.elasticsearch.common.xcontent.XContent} format.
*
* @param parser the input parser. The state on the parser contained in this context will be changed as a
* side effect of this method call
* @param elementName in some sort syntax variations the field name precedes the xContent object that specifies
* further parameters, e.g. in '{ "foo": { "order" : "asc"} }'. When parsing the inner object,
* the field name can be passed in via this argument
*/
|
Creates a new <code>GeoDistanceSortBuilder</code> from the query held by the <code>XContentParser</code> in <code>org.elasticsearch.common.xcontent.XContent</code> format
|
fromXContent
|
{
"repo_name": "naveenhooda2000/elasticsearch",
"path": "core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java",
"license": "apache-2.0",
"size": 24791
}
|
[
"java.io.IOException",
"java.util.ArrayList",
"java.util.List",
"org.elasticsearch.common.ParsingException",
"org.elasticsearch.common.geo.GeoDistance",
"org.elasticsearch.common.geo.GeoPoint",
"org.elasticsearch.common.geo.GeoUtils",
"org.elasticsearch.common.unit.DistanceUnit",
"org.elasticsearch.common.xcontent.XContentParser",
"org.elasticsearch.index.query.AbstractQueryBuilder",
"org.elasticsearch.index.query.GeoValidationMethod",
"org.elasticsearch.index.query.QueryBuilder"
] |
import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.GeoValidationMethod; import org.elasticsearch.index.query.QueryBuilder;
|
import java.io.*; import java.util.*; import org.elasticsearch.common.*; import org.elasticsearch.common.geo.*; import org.elasticsearch.common.unit.*; import org.elasticsearch.common.xcontent.*; import org.elasticsearch.index.query.*;
|
[
"java.io",
"java.util",
"org.elasticsearch.common",
"org.elasticsearch.index"
] |
java.io; java.util; org.elasticsearch.common; org.elasticsearch.index;
| 2,309,290
|
void setTag(@Nullable String tag);
|
void setTag(@Nullable String tag);
|
/**
* Set the party tag
*
* @param tag The tag of the party
*/
|
Set the party tag
|
setTag
|
{
"repo_name": "AlessioDP/Parties",
"path": "api/src/main/java/com/alessiodp/parties/api/interfaces/Party.java",
"license": "agpl-3.0",
"size": 8749
}
|
[
"org.jetbrains.annotations.Nullable"
] |
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.*;
|
[
"org.jetbrains.annotations"
] |
org.jetbrains.annotations;
| 2,916,501
|
@Override
public boolean onBlockActivated(World par1World, int par2, int par3, int par4, EntityPlayer par5EntityPlayer, int par6, float par7, float par8, float par9)
{
this.eatCakeSlice(par1World, par2, par3, par4, par5EntityPlayer);
return true;
}
|
boolean function(World par1World, int par2, int par3, int par4, EntityPlayer par5EntityPlayer, int par6, float par7, float par8, float par9) { this.eatCakeSlice(par1World, par2, par3, par4, par5EntityPlayer); return true; }
|
/**
* Called upon block activation (right click on the block.)
*/
|
Called upon block activation (right click on the block.)
|
onBlockActivated
|
{
"repo_name": "Albloutant/Galacticraft",
"path": "common/micdoodle8/mods/galacticraft/moon/blocks/GCMoonBlockCheese.java",
"license": "lgpl-3.0",
"size": 6580
}
|
[
"net.minecraft.entity.player.EntityPlayer",
"net.minecraft.world.World"
] |
import net.minecraft.entity.player.EntityPlayer; import net.minecraft.world.World;
|
import net.minecraft.entity.player.*; import net.minecraft.world.*;
|
[
"net.minecraft.entity",
"net.minecraft.world"
] |
net.minecraft.entity; net.minecraft.world;
| 441,554
|
public static void multipleInstancesDetected(final Iterable<OtrEngineListener> listeners, final SessionID sessionID) {
for (final OtrEngineListener l : listeners) {
try {
// Calling the listeners in order to inform of events. As a service to the user we log any problems that
// occur while calling listeners.
l.multipleInstancesDetected(sessionID);
} catch (final RuntimeException e) {
LOGGER.log(Level.WARNING, "Faulty listener! Runtime exception thrown while calling 'multipleInstancesDetected' on listener '" + l.getClass().getCanonicalName() + "' for session " + sessionID, e);
}
}
}
|
static void function(final Iterable<OtrEngineListener> listeners, final SessionID sessionID) { for (final OtrEngineListener l : listeners) { try { l.multipleInstancesDetected(sessionID); } catch (final RuntimeException e) { LOGGER.log(Level.WARNING, STR + l.getClass().getCanonicalName() + STR + sessionID, e); } } }
|
/**
* Safely call multipleInstancesDetected on all listeners in provided
* iterable.
*
* @param listeners All listeners to be called.
* @param sessionID the session ID
*/
|
Safely call multipleInstancesDetected on all listeners in provided iterable
|
multipleInstancesDetected
|
{
"repo_name": "otr4j/otr4j",
"path": "src/main/java/net/java/otr4j/api/OtrEngineListeners.java",
"license": "lgpl-3.0",
"size": 4330
}
|
[
"java.util.logging.Level"
] |
import java.util.logging.Level;
|
import java.util.logging.*;
|
[
"java.util"
] |
java.util;
| 1,305,104
|
public int body() throws IOException
{
return sendCommand(NNTPCommand.BODY);
}
|
int function() throws IOException { return sendCommand(NNTPCommand.BODY); }
|
/***
* A convenience method to send the NNTP BODY command to the server,
* receive the initial reply, and return the reply code.
* <p>
* @return The reply code received from the server.
* @throws NNTPConnectionClosedException
* If the NNTP server prematurely closes the connection as a result
* of the client being idle or some other reason causing the server
* to send NNTP reply code 400. This exception may be caught either
* as an IOException or independently as itself.
* @throws IOException If an I/O error occurs while either sending the
* command or receiving the server reply.
***/
|
A convenience method to send the NNTP BODY command to the server, receive the initial reply, and return the reply code.
|
body
|
{
"repo_name": "grtlinux/KIEA_JAVA7",
"path": "KIEA_JAVA7/src/tain/kr/com/commons/net/v01/nntp/NNTP.java",
"license": "gpl-3.0",
"size": 42708
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 502,423
|
Collection<AdHocFragment> getAdHocFragmentsForCase(String caseId);
|
Collection<AdHocFragment> getAdHocFragmentsForCase(String caseId);
|
/**
* Returns list of AdHocFragments available in given case. It includes all ad hoc fragments that are
* eligible for triggering - meaning it's container is active (case instance or stage)
* @param caseId unique id of the case
*/
|
Returns list of AdHocFragments available in given case. It includes all ad hoc fragments that are eligible for triggering - meaning it's container is active (case instance or stage)
|
getAdHocFragmentsForCase
|
{
"repo_name": "DuncanDoyle/jbpm",
"path": "jbpm-case-mgmt/jbpm-case-mgmt-api/src/main/java/org/jbpm/casemgmt/api/CaseRuntimeDataService.java",
"license": "apache-2.0",
"size": 14153
}
|
[
"java.util.Collection",
"org.jbpm.casemgmt.api.model.AdHocFragment"
] |
import java.util.Collection; import org.jbpm.casemgmt.api.model.AdHocFragment;
|
import java.util.*; import org.jbpm.casemgmt.api.model.*;
|
[
"java.util",
"org.jbpm.casemgmt"
] |
java.util; org.jbpm.casemgmt;
| 1,031,447
|
protected final Iterator allTables() {
return new WrapperIterator(database.schemaManager.allTablesIterator(),
new WrapperIterator(sysTables, true));
}
|
final Iterator function() { return new WrapperIterator(database.schemaManager.allTablesIterator(), new WrapperIterator(sysTables, true)); }
|
/**
* Retrieves an enumeration over all of the tables in this database.
* This means all user tables, views, system tables, system views,
* including temporary and text tables. <p>
*
* @return an enumeration over all of the tables in this database
*/
|
Retrieves an enumeration over all of the tables in this database. This means all user tables, views, system tables, system views, including temporary and text tables.
|
allTables
|
{
"repo_name": "minghao7896321/canyin",
"path": "hsqldb/src/org/hsqldb/DatabaseInformationMain.java",
"license": "apache-2.0",
"size": 119838
}
|
[
"org.hsqldb.lib.Iterator",
"org.hsqldb.lib.WrapperIterator"
] |
import org.hsqldb.lib.Iterator; import org.hsqldb.lib.WrapperIterator;
|
import org.hsqldb.lib.*;
|
[
"org.hsqldb.lib"
] |
org.hsqldb.lib;
| 2,241,976
|
public boolean isObbMounted(String filename) throws RemoteException;
|
boolean function(String filename) throws RemoteException;
|
/**
* Checks whether the specified Opaque Binary Blob (OBB) is mounted
* somewhere.
*/
|
Checks whether the specified Opaque Binary Blob (OBB) is mounted somewhere
|
isObbMounted
|
{
"repo_name": "doctang/TestPlatform",
"path": "AutoTest/src/android/os/storage/IMountService.java",
"license": "apache-2.0",
"size": 54707
}
|
[
"android.os.RemoteException"
] |
import android.os.RemoteException;
|
import android.os.*;
|
[
"android.os"
] |
android.os;
| 898,688
|
public final HttpAsyncClientBuilder setProxyAuthenticationStrategy(
final AuthenticationStrategy proxyAuthStrategy) {
this.proxyAuthStrategy = proxyAuthStrategy;
return this;
}
|
final HttpAsyncClientBuilder function( final AuthenticationStrategy proxyAuthStrategy) { this.proxyAuthStrategy = proxyAuthStrategy; return this; }
|
/**
* Assigns {@link AuthenticationStrategy} instance for proxy
* authentication.
*/
|
Assigns <code>AuthenticationStrategy</code> instance for proxy authentication
|
setProxyAuthenticationStrategy
|
{
"repo_name": "UlrichColby/httpcomponents-client",
"path": "httpclient5/src/main/java/org/apache/hc/client5/http/impl/async/HttpAsyncClientBuilder.java",
"license": "apache-2.0",
"size": 40087
}
|
[
"org.apache.hc.client5.http.AuthenticationStrategy"
] |
import org.apache.hc.client5.http.AuthenticationStrategy;
|
import org.apache.hc.client5.http.*;
|
[
"org.apache.hc"
] |
org.apache.hc;
| 574,206
|
Collection<Resource<D>> instances();
|
Collection<Resource<D>> instances();
|
/**
* Returns a collection of all Resource Instances of this ResourceType.
*
* @return The resource instances of this ResourceType
*/
|
Returns a collection of all Resource Instances of this ResourceType
|
instances
|
{
"repo_name": "mikonapoli/grakn",
"path": "grakn-core/src/main/java/ai/grakn/concept/ResourceType.java",
"license": "gpl-3.0",
"size": 7241
}
|
[
"java.util.Collection"
] |
import java.util.Collection;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 680,094
|
public BigDecimal getMianpeie() {
return mianpeie;
}
|
BigDecimal function() { return mianpeie; }
|
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column policy_riskclass.mianpeie
*
* @return the value of policy_riskclass.mianpeie
*
* @mbggenerated
*/
|
This method was generated by MyBatis Generator. This method returns the value of the database column policy_riskclass.mianpeie
|
getMianpeie
|
{
"repo_name": "ThomasYangLin/DbToJson",
"path": "src/main/java/com/yanglin/model/PolicyRiskclass.java",
"license": "apache-2.0",
"size": 5636
}
|
[
"java.math.BigDecimal"
] |
import java.math.BigDecimal;
|
import java.math.*;
|
[
"java.math"
] |
java.math;
| 1,205,081
|
private String getEmailContent(BatchContext context, BatchObject batchObject, ObjectMapDetails objectDetails, Boolean lastEmail){
String str = "";
if (lastEmail) {
str = "<br><br><font class=\"trHeader\">" +
"<B>Please Note :</B></font><font class=\"trRow1\">This is the last email for FAILED Objects as maximum number " +
"of email alerts has been reached. Please contact help desk for further asistance." +
"</font>";
}
StringBuilder sb = new StringBuilder();
sb.append(" <HTML> ");
sb.append(" <HEAD> ");
sb.append(" <META NAME='GENERATOR' Content='Microsoft Visual Studio 6.0'> ");
sb.append(" <TITLE></TITLE> ");
sb.append(getDefaultStyleSheet());
sb.append(" </HEAD> ");
sb.append(" <BODY> ");
sb.append(getDefaultHeader());
sb.append(" <HR width='100%' size='1' />");
sb.append(" <TABLE width='100%' border=0> ");
sb.append(" <TR> ");
sb.append(" <TD class='blockHeader'>Batch Object " + batchObject.getObjectName() + " <font color=\"red\">FAILED.</font>" + str +"</TD>");
sb.append(" </TR> ");
sb.append(" </TABLE>");
sb.append(" <BR/> ");
sb.append(" <TABLE width='50%' border=0> ");
sb.append(" <TR> ");
sb.append(" <TD class='blockHeader'>FAILED Batch Object Details</TD> ");
sb.append(" </TR> ");
sb.append(" <TR> ");
sb.append(" <TD> ");
sb.append(" <TABLE width='100%' border=0 class='dTable'> ");
sb.append(" <TR> ");
sb.append(" <TD width='48%' class='trHeader'>Name</TD> ");
sb.append(" <TD width='60%' class='trHeader'>Value</TD> ");
sb.append(" </TR> ");
sb.append(" <TR> ");
sb.append(" <TD width='40%' class='trRow1'>Sequence# :</TD> ");
sb.append(" <TD width='60%' class='trRow1'>" + batchObject.getSequence() + "</TD> ");
sb.append(" </TR> ");
sb.append(" <TR> ");
sb.append(" <TD width='40%' class='trRow2'>Object Name :</TD> ");
sb.append(" <TD width='60%' class='trRow2'>" + batchObject.getObjectName() + "</TD> ");
sb.append(" </TR> ");
sb.append(" <TR> ");
sb.append(" <TD width='40%' class='trRow1'>Job Description :</TD> ");
sb.append(" <TD width='60%' class='trRow1'>" + batchObject.getJobDesc() + "</TD> ");
sb.append(" </TR> ");
sb.append(" <TR> ");
sb.append(" <TD width='40%' class='trRow2'>Job Type :</TD> ");
sb.append(" <TD width='60%' class='trRow2'>" + batchObject.getJobType() + "</TD> ");
sb.append(" </TR> ");
sb.append(" <TR> ");
sb.append(" <TR> ");
sb.append(" <TD width='40%' class='trRow2'>Criticality :</TD> ");
sb.append(" <TD width='60%' class='trRow2'>" + ((objectDetails == null)?"unknown":objectDetails.getEscalationLevel()) + "</TD> ");
sb.append(" </TR> ");
sb.append(" <TR> ");
sb.append(" <TD width='40%' class='trRow1'>Error Type:</TD> ");
sb.append(" <TD width='60%' class='trRow1'>" + batchObject.getErrorType() + "</TD> ");
sb.append(" </TR> ");
sb.append(" <TR> ");
sb.append(" <TR> ");
sb.append(" <TD width='40%' class='trRow2'>Error Description:</TD> ");
sb.append(" <TD width='60%' class='trRow2'>" + batchObject.getErrorDescription() + "</TD> ");
sb.append(" </TR> ");
sb.append(" </TABLE> ");
sb.append(" </TD> ");
sb.append(" </TR> ");
sb.append(" </TABLE> ");
sb.append(" <BR /> ");
sb.append(getDefaultBatchInfo(context));
sb.append(getDefaultFooter());
sb.append(" </BODY> ");
sb.append(" </HTML> ");
return sb.toString();
}
}
/*
* Revision Log
* -------------------------------
* $Log:: /Product_Base/Projects/Batch/Code/Java/Core/src/com/stgmastek/core/util/email/BatchObjectFailureEmailContentGenerator.java $
*
* 3 4/23/10 2:21p Kedarr
* Installation name added to subject line.
*
* 2 4/13/10 2:32p Kedarr
* Changes made to add the escalation level and for passing object details as changed in the interface.
*
* 1 4/12/10 12:27p Kedarr
* Moved from project jbeam-impl as this is now the part of the core.
*
* 7 4/07/10 4:49p Kedarr
* Updates made as per the PRE upgrade.
*
* 6 4/06/10 2:43p Kedarr
* Changes made to make use of super header and footer
*
* 5 4/06/10 2:38p Kedarr
* Changes made to make use of super header and footer
*
* 4 4/06/10 2:19p Kedarr
* Changes made to update the context batch info with the execution end datetime.
*
* 3 3/29/10 1:45p Kedarr
* Changes made to add more beautification
*
* 2 3/25/10 4:53p Kedarr
* Changes made to implement the last email
*
* 1 3/24/10 6:05p Kedarr
* Initial Version
*
* 7 3/24/10 12:47p Kedarr
* Changed the API to add batch object as a parameter
*
* 6 2/25/10 9:47a Grahesh
* Changes made for fail over.
*
* 5 2/15/10 11:38a Mandar.vaidya
* Modified the toMap method
*
* 4 1/07/10 5:37p Grahesh
* Updated Java Doc comments
*
* 3 12/18/09 12:32p Grahesh
* Updated the comments
*
* 2 12/17/09 11:46a Grahesh
* Initial Version
*
*
|
String function(BatchContext context, BatchObject batchObject, ObjectMapDetails objectDetails, Boolean lastEmail){ String str = STR<br><br><font class=\STR>STR<B>Please Note :</B></font><font class=\STR>This is the last email for FAILED Objects as maximum number STRof email alerts has been reached. Please contact help desk for further asistance.STR</font>STR <HTML> STR <HEAD> STR <META NAME='GENERATOR' Content='Microsoft Visual Studio 6.0'> STR <TITLE></TITLE> STR </HEAD> STR <BODY> STR <HR width='100%' size='1' />STR <TABLE width='100%' border=0> STR <TR> STR <TD class='blockHeader'>Batch Object STR <font color=\"red\">FAILED.</font>STR</TD>STR </TR> STR </TABLE>STR <BR/> STR <TABLE width='50%' border=0> STR <TR> STR <TD class='blockHeader'>FAILED Batch Object Details</TD> STR </TR> STR <TR> STR <TD> STR <TABLE width='100%' border=0 class='dTable'> STR <TR> STR <TD width='48%' class='trHeader'>Name</TD> STR <TD width='60%' class='trHeader'>Value</TD> STR </TR> STR <TR> STR <TD width='40%' class='trRow1'>Sequence# :</TD> STR <TD width='60%' class='trRow1'>STR</TD> STR </TR> STR <TR> STR <TD width='40%' class='trRow2'>Object Name :</TD> STR <TD width='60%' class='trRow2'>STR</TD> STR </TR> STR <TR> STR <TD width='40%' class='trRow1'>Job Description :</TD> STR <TD width='60%' class='trRow1'>STR</TD> STR </TR> STR <TR> STR <TD width='40%' class='trRow2'>Job Type :</TD> STR <TD width='60%' class='trRow2'>STR</TD> STR </TR> STR <TR> STR <TR> STR <TD width='40%' class='trRow2'>Criticality :</TD> STR <TD width='60%' class='trRow2'>STRunknownSTR</TD> STR </TR> STR <TR> STR <TD width='40%' class='trRow1'>Error Type:</TD> STR <TD width='60%' class='trRow1'>STR</TD> STR </TR> STR <TR> STR <TR> STR <TD width='40%' class='trRow2'>Error Description:</TD> STR <TD width='60%' class='trRow2'>STR</TD> STR </TR> STR </TABLE> STR </TD> STR </TR> STR </TABLE> STR <BR /> STR </BODY> STR </HTML> "); return sb.toString(); } } /* * Revision Log * ------------------------------- * $Log:: /Product_Base/Projects/Batch/Code/Java/Core/src/com/stgmastek/core/util/email/BatchObjectFailureEmailContentGenerator.java $ * * 3 4/23/10 2:21p Kedarr * Installation name added to subject line. * * 2 4/13/10 2:32p Kedarr * Changes made to add the escalation level and for passing object details as changed in the interface. * * 1 4/12/10 12:27p Kedarr * Moved from project jbeam-impl as this is now the part of the core. * * 7 4/07/10 4:49p Kedarr * Updates made as per the PRE upgrade. * * 6 4/06/10 2:43p Kedarr * Changes made to make use of super header and footer * * 5 4/06/10 2:38p Kedarr * Changes made to make use of super header and footer * * 4 4/06/10 2:19p Kedarr * Changes made to update the context batch info with the execution end datetime. * * 3 3/29/10 1:45p Kedarr * Changes made to add more beautification * * 2 3/25/10 4:53p Kedarr * Changes made to implement the last email * * 1 3/24/10 6:05p Kedarr * Initial Version * * 7 3/24/10 12:47p Kedarr * Changed the API to add batch object as a parameter * * 6 2/25/10 9:47a Grahesh * Changes made for fail over. * * 5 2/15/10 11:38a Mandar.vaidya * Modified the toMap method * * 4 1/07/10 5:37p Grahesh * Updated Java Doc comments * * 3 12/18/09 12:32p Grahesh * Updated the comments * * 2 12/17/09 11:46a Grahesh * Initial Version * *
|
/**
* Helper method to create the content when the batch starts
*
* @param context
* The batch context
* @param batchObject
* The batch object under consideration.
* @param objectDetails
* @param lastEmail
* @return the content of the email as string
*/
|
Helper method to create the content when the batch starts
|
getEmailContent
|
{
"repo_name": "MastekLtd/JBEAM",
"path": "jbeam-core-components/jbeam-core/src/main/java/com/stgmastek/core/util/email/BatchObjectFailureEmailContentGenerator.java",
"license": "lgpl-3.0",
"size": 9254
}
|
[
"com.stgmastek.core.util.BatchContext",
"com.stgmastek.core.util.BatchObject",
"com.stgmastek.core.util.ObjectMapDetails"
] |
import com.stgmastek.core.util.BatchContext; import com.stgmastek.core.util.BatchObject; import com.stgmastek.core.util.ObjectMapDetails;
|
import com.stgmastek.core.util.*;
|
[
"com.stgmastek.core"
] |
com.stgmastek.core;
| 755,592
|
public List<DataObject> loadParentsOfAnnotations(SecurityContext ctx, long annotationId) throws DSOutOfServiceException,
DSAccessException {
ExperimenterData exp = (ExperimenterData) context
.lookup(LookupNames.CURRENT_USER_DETAILS);
long userId = exp.getId();
return loadParentsOfAnnotations(ctx, annotationId, userId);
}
|
List<DataObject> function(SecurityContext ctx, long annotationId) throws DSOutOfServiceException, DSAccessException { ExperimenterData exp = (ExperimenterData) context .lookup(LookupNames.CURRENT_USER_DETAILS); long userId = exp.getId(); return loadParentsOfAnnotations(ctx, annotationId, userId); }
|
/**
* Implemented as specified by {@link OmeroImageService}.
*
* @see OmeroMetadataService#loadParentsOfAnnotations(SecurityContext, long)
*/
|
Implemented as specified by <code>OmeroImageService</code>
|
loadParentsOfAnnotations
|
{
"repo_name": "dpwrussell/openmicroscopy",
"path": "components/insight/SRC/org/openmicroscopy/shoola/env/data/OmeroMetadataServiceImpl.java",
"license": "gpl-2.0",
"size": 88712
}
|
[
"java.util.List",
"org.openmicroscopy.shoola.env.LookupNames"
] |
import java.util.List; import org.openmicroscopy.shoola.env.LookupNames;
|
import java.util.*; import org.openmicroscopy.shoola.env.*;
|
[
"java.util",
"org.openmicroscopy.shoola"
] |
java.util; org.openmicroscopy.shoola;
| 635,640
|
private int getThumbnailDimension() {
// Converts dp to pixel
Resources r = MainApp.getAppContext().getResources();
Double d = Math.pow(2, Math.floor(Math.log(r.getDimension(R.dimen.file_icon_size_grid)) / Math.log(2)));
return d.intValue();
}
|
int function() { Resources r = MainApp.getAppContext().getResources(); Double d = Math.pow(2, Math.floor(Math.log(r.getDimension(R.dimen.file_icon_size_grid)) / Math.log(2))); return d.intValue(); }
|
/**
* Converts size of file icon from dp to pixel
*
* @return int
*/
|
Converts size of file icon from dp to pixel
|
getThumbnailDimension
|
{
"repo_name": "jsargent7089/android",
"path": "src/main/java/com/owncloud/android/datamodel/ThumbnailsCacheManager.java",
"license": "gpl-2.0",
"size": 51562
}
|
[
"android.content.res.Resources",
"com.owncloud.android.MainApp"
] |
import android.content.res.Resources; import com.owncloud.android.MainApp;
|
import android.content.res.*; import com.owncloud.android.*;
|
[
"android.content",
"com.owncloud.android"
] |
android.content; com.owncloud.android;
| 809,598
|
public Instant getStartTime() {
return this.startTime;
}
|
Instant function() { return this.startTime; }
|
/**
* Return the start time of this event.
* @return the start time
*/
|
Return the start time of this event
|
getStartTime
|
{
"repo_name": "shakuzen/spring-boot",
"path": "spring-boot-project/spring-boot/src/main/java/org/springframework/boot/context/metrics/buffering/StartupTimeline.java",
"license": "apache-2.0",
"size": 3325
}
|
[
"java.time.Instant"
] |
import java.time.Instant;
|
import java.time.*;
|
[
"java.time"
] |
java.time;
| 1,107,060
|
public byte[] toBytes() {
byte typeByte = (byte) (((isLeaf()) ? 0 : 1) + ((isRoot()) ? 5 : 0));
byte[] node = flatten();
return ByteBuffer.allocate(node.length + 1).put(typeByte).put(node)
.array();
}
|
byte[] function() { byte typeByte = (byte) (((isLeaf()) ? 0 : 1) + ((isRoot()) ? 5 : 0)); byte[] node = flatten(); return ByteBuffer.allocate(node.length + 1).put(typeByte).put(node) .array(); }
|
/**
* Helper function to flattent a node to bytes.
*
* @return
*/
|
Helper function to flattent a node to bytes
|
toBytes
|
{
"repo_name": "hgrimberg01/BPlusTreeSim",
"path": "src/edu/ku/eecs/db/disk/TreeNode.java",
"license": "mit",
"size": 3669
}
|
[
"java.nio.ByteBuffer"
] |
import java.nio.ByteBuffer;
|
import java.nio.*;
|
[
"java.nio"
] |
java.nio;
| 2,897,513
|
@Override
public void notifyChanged(Notification notification) {
updateChildren(notification);
super.notifyChanged(notification);
}
|
void function(Notification notification) { updateChildren(notification); super.notifyChanged(notification); }
|
/**
* This handles model notifications by calling {@link #updateChildren} to update any cached
* children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
|
This handles model notifications by calling <code>#updateChildren</code> to update any cached children and by creating a viewer notification, which it passes to <code>#fireNotifyChanged</code>.
|
notifyChanged
|
{
"repo_name": "prabushi/devstudio-tooling-esb",
"path": "plugins/org.wso2.developerstudio.eclipse.gmf.esb.edit/src/org/wso2/developerstudio/eclipse/gmf/esb/provider/BAMMediatorInputConnectorItemProvider.java",
"license": "apache-2.0",
"size": 3674
}
|
[
"org.eclipse.emf.common.notify.Notification"
] |
import org.eclipse.emf.common.notify.Notification;
|
import org.eclipse.emf.common.notify.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 2,181,691
|
protected FractionalPermissions makeModifiable(String neededRoot, boolean inFrame, FractionConstraints constraints) {
ArrayList<FractionalPermission> newPs = new ArrayList<FractionalPermission>(inFrame ? framePermissions : permissions);
FractionalPermission p = PermissionSet.removePermission(newPs, neededRoot, false, constraints);
if(p == null)
constraints.addConstraint(FractionConstraint.impossible("No permission for available for root " + neededRoot));
else {
p = p.makeModifiable(constraints);
newPs.add(p);
}
if(inFrame)
return createPermissions(permissions, newPs, constraints);
else
return createPermissions(newPs, framePermissions, constraints);
}
|
FractionalPermissions function(String neededRoot, boolean inFrame, FractionConstraints constraints) { ArrayList<FractionalPermission> newPs = new ArrayList<FractionalPermission>(inFrame ? framePermissions : permissions); FractionalPermission p = PermissionSet.removePermission(newPs, neededRoot, false, constraints); if(p == null) constraints.addConstraint(FractionConstraint.impossible(STR + neededRoot)); else { p = p.makeModifiable(constraints); newPs.add(p); } if(inFrame) return createPermissions(permissions, newPs, constraints); else return createPermissions(newPs, framePermissions, constraints); }
|
/**
* Do <b>not</b> call this method on bottom!
* @param neededRoot
* @param inFrame
* @param constraints
* @return
*/
|
Do not call this method on bottom
|
makeModifiable
|
{
"repo_name": "plaidgroup/plural",
"path": "Plural/src/edu/cmu/cs/plural/fractions/FractionalPermissions.java",
"license": "gpl-2.0",
"size": 34733
}
|
[
"java.util.ArrayList"
] |
import java.util.ArrayList;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,132,672
|
public final void SendDatagram ( int dgramTyp, String fromName, char fromNameType, String toName, char toNameType,
byte[] userData, int userLen, int userOff, InetAddress addr, int port)
throws IOException, UnknownHostException {
// Set the datagram header values
setMessageType ( dgramTyp);
setSourceName ( fromName, fromNameType);
setDestinationName ( toName, toNameType);
setSourcePort ( RFCNetBIOSProtocol.DATAGRAM);
setSourceIPAddress( InetAddress.getLocalHost().getAddress());
setFlags ( FLG_FIRSTPKT);
if ( m_nextId == 0)
m_nextId = ( int) ( System.currentTimeMillis() & 0x7FFF);
setDatagramId( m_nextId++);
// Set the user data and length
setLength ( userLen + NB_USERDATA);
setUserData ( userData, userLen, userOff);
// Use the global NetBIOS datagram socket to sent the broadcast datagram
NetBIOSDatagramSocket nbSocket = NetBIOSDatagramSocket.getInstance();
nbSocket.sendDatagram(this,addr,port);
}
|
final void function ( int dgramTyp, String fromName, char fromNameType, String toName, char toNameType, byte[] userData, int userLen, int userOff, InetAddress addr, int port) throws IOException, UnknownHostException { setMessageType ( dgramTyp); setSourceName ( fromName, fromNameType); setDestinationName ( toName, toNameType); setSourcePort ( RFCNetBIOSProtocol.DATAGRAM); setSourceIPAddress( InetAddress.getLocalHost().getAddress()); setFlags ( FLG_FIRSTPKT); if ( m_nextId == 0) m_nextId = ( int) ( System.currentTimeMillis() & 0x7FFF); setDatagramId( m_nextId++); setLength ( userLen + NB_USERDATA); setUserData ( userData, userLen, userOff); NetBIOSDatagramSocket nbSocket = NetBIOSDatagramSocket.getInstance(); nbSocket.sendDatagram(this,addr,port); }
|
/**
* Send a datagram to the specified NetBIOS name using the global NetBIOS datagram socket
*
* @param dgramTyp Datagram type
* @param fromName From NetBIOS name
* @param fromNameType From NetBIOS name type.
* @param toName To NetBIOS name
* @param toNameType To NetBIOS name type.
* @param userData User data buffer
* @param userLen User data length.
* @param userOff Offset of data within user buffer.
* @param addr Address to send to
* @param port Port to send to
* @exception java.io.IOException Error occurred sending datagram
* @exception UnknownHostException Failed to generate the broadcast mask for the network
*/
|
Send a datagram to the specified NetBIOS name using the global NetBIOS datagram socket
|
SendDatagram
|
{
"repo_name": "loftuxab/community-edition-old",
"path": "projects/alfresco-jlan/source/java/org/alfresco/jlan/netbios/NetBIOSDatagram.java",
"license": "lgpl-3.0",
"size": 16554
}
|
[
"java.io.IOException",
"java.net.InetAddress",
"java.net.UnknownHostException"
] |
import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException;
|
import java.io.*; import java.net.*;
|
[
"java.io",
"java.net"
] |
java.io; java.net;
| 1,465,448
|
public java.sql.ResultSetMetaData getMetaData() throws SQLException {
checkClosed();
return new com.mysql.jdbc.ResultSetMetaData(this.fields, this.connection.getUseOldAliasMetadataBehavior(), this.connection.getYearIsDateType(),
getExceptionInterceptor());
}
|
java.sql.ResultSetMetaData function() throws SQLException { checkClosed(); return new com.mysql.jdbc.ResultSetMetaData(this.fields, this.connection.getUseOldAliasMetadataBehavior(), this.connection.getYearIsDateType(), getExceptionInterceptor()); }
|
/**
* The numbers, types and properties of a ResultSet's columns are provided
* by the getMetaData method
*
* @return a description of the ResultSet's columns
*
* @exception SQLException
* if a database access error occurs
*/
|
The numbers, types and properties of a ResultSet's columns are provided by the getMetaData method
|
getMetaData
|
{
"repo_name": "mwaylabs/mysql-connector-j",
"path": "src/com/mysql/jdbc/ResultSetImpl.java",
"license": "gpl-2.0",
"size": 288724
}
|
[
"java.sql.SQLException"
] |
import java.sql.SQLException;
|
import java.sql.*;
|
[
"java.sql"
] |
java.sql;
| 1,180,181
|
public final T read(Path file) throws IOException {
try (Directory dir = newDirectory(file.getParent())) {
try (final IndexInput indexInput = dir.openInput(file.getFileName().toString(), IOContext.DEFAULT)) {
// We checksum the entire file before we even go and parse it. If it's corrupted we barf right here.
CodecUtil.checksumEntireFile(indexInput);
CodecUtil.checkHeader(indexInput, STATE_FILE_CODEC, STATE_FILE_VERSION, STATE_FILE_VERSION);
final XContentType xContentType = XContentType.values()[indexInput.readInt()];
indexInput.readLong(); // version currently unused
long filePointer = indexInput.getFilePointer();
long contentSize = indexInput.length() - CodecUtil.footerLength() - filePointer;
try (IndexInput slice = indexInput.slice("state_xcontent", filePointer, contentSize)) {
try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(new InputStreamIndexInput(slice, contentSize))) {
return fromXContent(parser);
}
}
} catch(CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) {
// we trick this into a dedicated exception with the original stacktrace
throw new CorruptStateException(ex);
}
}
}
|
final T function(Path file) throws IOException { try (Directory dir = newDirectory(file.getParent())) { try (final IndexInput indexInput = dir.openInput(file.getFileName().toString(), IOContext.DEFAULT)) { CodecUtil.checksumEntireFile(indexInput); CodecUtil.checkHeader(indexInput, STATE_FILE_CODEC, STATE_FILE_VERSION, STATE_FILE_VERSION); final XContentType xContentType = XContentType.values()[indexInput.readInt()]; indexInput.readLong(); long filePointer = indexInput.getFilePointer(); long contentSize = indexInput.length() - CodecUtil.footerLength() - filePointer; try (IndexInput slice = indexInput.slice(STR, filePointer, contentSize)) { try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(new InputStreamIndexInput(slice, contentSize))) { return fromXContent(parser); } } } catch(CorruptIndexException IndexFormatTooOldException IndexFormatTooNewException ex) { throw new CorruptStateException(ex); } } }
|
/**
* Reads the state from a given file and compares the expected version against the actual version of
* the state.
*/
|
Reads the state from a given file and compares the expected version against the actual version of the state
|
read
|
{
"repo_name": "camilojd/elasticsearch",
"path": "core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java",
"license": "apache-2.0",
"size": 18328
}
|
[
"java.io.IOException",
"java.nio.file.Path",
"org.apache.lucene.codecs.CodecUtil",
"org.apache.lucene.index.CorruptIndexException",
"org.apache.lucene.index.IndexFormatTooNewException",
"org.apache.lucene.index.IndexFormatTooOldException",
"org.apache.lucene.store.Directory",
"org.apache.lucene.store.IOContext",
"org.apache.lucene.store.IndexInput",
"org.elasticsearch.common.lucene.store.InputStreamIndexInput",
"org.elasticsearch.common.xcontent.XContentFactory",
"org.elasticsearch.common.xcontent.XContentParser",
"org.elasticsearch.common.xcontent.XContentType"
] |
import java.io.IOException; import java.nio.file.Path; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooOldException; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.elasticsearch.common.lucene.store.InputStreamIndexInput; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType;
|
import java.io.*; import java.nio.file.*; import org.apache.lucene.codecs.*; import org.apache.lucene.index.*; import org.apache.lucene.store.*; import org.elasticsearch.common.lucene.store.*; import org.elasticsearch.common.xcontent.*;
|
[
"java.io",
"java.nio",
"org.apache.lucene",
"org.elasticsearch.common"
] |
java.io; java.nio; org.apache.lucene; org.elasticsearch.common;
| 1,076,494
|
@Override
public void doFilter(ServletRequest request, ServletResponse response,
FilterChain chain)
throws IOException, ServletException {
// Conditionally select and set the character encoding to be used
if (ignore || (request.getCharacterEncoding() == null)) {
String selected = selectEncoding(request);
if (selected != null) {
request.setCharacterEncoding(selected);
}
}
// Pass control on to the next filter
chain.doFilter(request, response);
}
|
void function(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { if (ignore (request.getCharacterEncoding() == null)) { String selected = selectEncoding(request); if (selected != null) { request.setCharacterEncoding(selected); } } chain.doFilter(request, response); }
|
/**
* Select and set (if specified) the character encoding to be used to
* interpret request parameters for this request.
*
* @param request The servlet request we are processing
* @param response The servlet response we are creating
* @param chain The filter chain we are processing
*
* @exception IOException if an input/output error occurs
* @exception ServletException if a servlet error occurs
*/
|
Select and set (if specified) the character encoding to be used to interpret request parameters for this request
|
doFilter
|
{
"repo_name": "julie-sullivan/phytomine",
"path": "intermine/web/main/src/org/intermine/web/filters/SetCharacterEncodingFilter.java",
"license": "lgpl-2.1",
"size": 6231
}
|
[
"java.io.IOException",
"javax.servlet.FilterChain",
"javax.servlet.ServletException",
"javax.servlet.ServletRequest",
"javax.servlet.ServletResponse"
] |
import java.io.IOException; import javax.servlet.FilterChain; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse;
|
import java.io.*; import javax.servlet.*;
|
[
"java.io",
"javax.servlet"
] |
java.io; javax.servlet;
| 138,231
|
public static String collectionToDelimitedString(Collection coll, String delim) {
return collectionToDelimitedString(coll, delim, "", "");
}
|
static String function(Collection coll, String delim) { return collectionToDelimitedString(coll, delim, STR"); }
|
/**
* Convenience method to return a Collection as a delimited (e.g. CSV)
* String. E.g. useful for <code>toString()</code> implementations.
* @param coll the Collection to display
* @param delim the delimiter to use (probably a ",")
* @return the delimited String
*
* Borrowed from Spring, under the ASL2.0 license.
*/
|
Convenience method to return a Collection as a delimited (e.g. CSV) String. E.g. useful for <code>toString()</code> implementations
|
collectionToDelimitedString
|
{
"repo_name": "lanceleverich/drools",
"path": "drools-core/src/main/java/org/drools/core/util/StringUtils.java",
"license": "apache-2.0",
"size": 47859
}
|
[
"java.util.Collection"
] |
import java.util.Collection;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,913,925
|
if (label == null) {
label = new HashMap<>();
}
return label;
}
|
if (label == null) { label = new HashMap<>(); } return label; }
|
/**
* Gets the value of the label property.
* @return possible object is {@link Multilang }
*/
|
Gets the value of the label property
|
getLabel
|
{
"repo_name": "SilverDav/Silverpeas-Core",
"path": "core-library/src/main/java/org/silverpeas/core/admin/component/model/GroupOfParameters.java",
"license": "agpl-3.0",
"size": 5418
}
|
[
"java.util.HashMap"
] |
import java.util.HashMap;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 163,736
|
@Atomic
static public void upload(final PhotographUploadBean photo, final Person person) throws FileNotFoundException, IOException {
try (InputStream stream = photo.getFileInputStream()) {
person.setPersonalPhoto(new Photograph(PhotoType.USER, ContentType.getContentType(photo.getContentType()),
ByteStreams.toByteArray(stream)));
}
}
|
static void function(final PhotographUploadBean photo, final Person person) throws FileNotFoundException, IOException { try (InputStream stream = photo.getFileInputStream()) { person.setPersonalPhoto(new Photograph(PhotoType.USER, ContentType.getContentType(photo.getContentType()), ByteStreams.toByteArray(stream))); } }
|
/**
*
* Service used in public candidacies, so person may not have Person role
* yet
*
* @param photo
* @param person
* @throws FileNotFoundException
* @throws IOException
*/
|
Service used in public candidacies, so person may not have Person role yet
|
upload
|
{
"repo_name": "jcarvalho/fenixedu-academic",
"path": "src/main/java/org/fenixedu/academic/service/services/fileManager/UploadOwnPhoto.java",
"license": "lgpl-3.0",
"size": 2249
}
|
[
"com.google.common.io.ByteStreams",
"java.io.FileNotFoundException",
"java.io.IOException",
"java.io.InputStream",
"org.fenixedu.academic.domain.Person",
"org.fenixedu.academic.domain.PhotoType",
"org.fenixedu.academic.domain.Photograph",
"org.fenixedu.academic.dto.person.PhotographUploadBean",
"org.fenixedu.academic.util.ContentType"
] |
import com.google.common.io.ByteStreams; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import org.fenixedu.academic.domain.Person; import org.fenixedu.academic.domain.PhotoType; import org.fenixedu.academic.domain.Photograph; import org.fenixedu.academic.dto.person.PhotographUploadBean; import org.fenixedu.academic.util.ContentType;
|
import com.google.common.io.*; import java.io.*; import org.fenixedu.academic.domain.*; import org.fenixedu.academic.dto.person.*; import org.fenixedu.academic.util.*;
|
[
"com.google.common",
"java.io",
"org.fenixedu.academic"
] |
com.google.common; java.io; org.fenixedu.academic;
| 1,244,514
|
@Generated
@Selector("minimumWithNaNPropagationWithPrimaryTensor:secondaryTensor:name:")
public native MPSGraphTensor minimumWithNaNPropagationWithPrimaryTensorSecondaryTensorName(
MPSGraphTensor primaryTensor, MPSGraphTensor secondaryTensor, String name);
|
@Selector(STR) native MPSGraphTensor function( MPSGraphTensor primaryTensor, MPSGraphTensor secondaryTensor, String name);
|
/**
* Create Minimum op and return the result tensor, it supports broadcasting as well. If any of the operands is NaN, NaN is returned
* <p>
* [@code]
* resultTensor = isNaN(primaryTensor) || isNan(secondaryTensor) ? NaN : min(primaryTensor, secondaryTensor)
* [@endcode]
*
* @param primaryTensor LHS tensor of the binary Op
* @param secondaryTensor RHS tensor of the binary Op
* @param name name for the operation
* @return A valid MPSGraphTensor object.
*/
|
Create Minimum op and return the result tensor, it supports broadcasting as well. If any of the operands is NaN, NaN is returned [@code] resultTensor = isNaN(primaryTensor) || isNan(secondaryTensor) ? NaN : min(primaryTensor, secondaryTensor) [@endcode]
|
minimumWithNaNPropagationWithPrimaryTensorSecondaryTensorName
|
{
"repo_name": "multi-os-engine/moe-core",
"path": "moe.apple/moe.platform.ios/src/main/java/apple/metalperformanceshadersgraph/MPSGraph.java",
"license": "apache-2.0",
"size": 97735
}
|
[
"org.moe.natj.objc.ann.Selector"
] |
import org.moe.natj.objc.ann.Selector;
|
import org.moe.natj.objc.ann.*;
|
[
"org.moe.natj"
] |
org.moe.natj;
| 2,057,616
|
private void addCredentials(String accessId) throws Exception {
temporarySubject = new Subject();
Hashtable<String, Object> hashtable = new Hashtable<String, Object>();
hashtable.put(AttributeNameConstants.WSCREDENTIAL_UNIQUEID, AccessIdUtil.getUniqueId(accessId));
temporarySubject.getPublicCredentials().add(hashtable);
setWSPrincipal(temporarySubject, username, accessId, WSPrincipal.AUTH_METHOD_CERTIFICATE);
setCredentials(temporarySubject, username, username);
temporarySubject.getPublicCredentials().remove(hashtable);
}
|
void function(String accessId) throws Exception { temporarySubject = new Subject(); Hashtable<String, Object> hashtable = new Hashtable<String, Object>(); hashtable.put(AttributeNameConstants.WSCREDENTIAL_UNIQUEID, AccessIdUtil.getUniqueId(accessId)); temporarySubject.getPublicCredentials().add(hashtable); setWSPrincipal(temporarySubject, username, accessId, WSPrincipal.AUTH_METHOD_CERTIFICATE); setCredentials(temporarySubject, username, username); temporarySubject.getPublicCredentials().remove(hashtable); }
|
/**
* Add unique ID and call setPrincipalAndCredentials
*
* @param accessId
* @throws Exception
*/
|
Add unique ID and call setPrincipalAndCredentials
|
addCredentials
|
{
"repo_name": "kgibm/open-liberty",
"path": "dev/com.ibm.ws.security.authentication.builtin/src/com/ibm/ws/security/authentication/jaas/modules/CertificateLoginModule.java",
"license": "epl-1.0",
"size": 16216
}
|
[
"com.ibm.ws.security.AccessIdUtil",
"com.ibm.ws.security.authentication.principals.WSPrincipal",
"com.ibm.wsspi.security.token.AttributeNameConstants",
"java.util.Hashtable",
"javax.security.auth.Subject"
] |
import com.ibm.ws.security.AccessIdUtil; import com.ibm.ws.security.authentication.principals.WSPrincipal; import com.ibm.wsspi.security.token.AttributeNameConstants; import java.util.Hashtable; import javax.security.auth.Subject;
|
import com.ibm.ws.security.*; import com.ibm.ws.security.authentication.principals.*; import com.ibm.wsspi.security.token.*; import java.util.*; import javax.security.auth.*;
|
[
"com.ibm.ws",
"com.ibm.wsspi",
"java.util",
"javax.security"
] |
com.ibm.ws; com.ibm.wsspi; java.util; javax.security;
| 914,957
|
public DcmElement putLO(int tag, String value) {
return put(value != null ? StringElement.createLO(tag, value,
getSpecificCharacterSet()) : StringElement.createLO(tag));
}
|
DcmElement function(int tag, String value) { return put(value != null ? StringElement.createLO(tag, value, getSpecificCharacterSet()) : StringElement.createLO(tag)); }
|
/**
* Description of the Method
*
* @param tag
* Description of the Parameter
* @param value
* Description of the Parameter
* @return Description of the Return Value
*/
|
Description of the Method
|
putLO
|
{
"repo_name": "medicayun/medicayundicom",
"path": "dcm4che14/branches/DCM4CHE_2_14_22_BRANCHA/src/java/org/dcm4cheri/data/DcmObjectImpl.java",
"license": "apache-2.0",
"size": 86392
}
|
[
"org.dcm4che.data.DcmElement"
] |
import org.dcm4che.data.DcmElement;
|
import org.dcm4che.data.*;
|
[
"org.dcm4che.data"
] |
org.dcm4che.data;
| 2,695,308
|
public void writeTo(OutputStream os) throws IOException {
tree.writeTo(os);
}
|
void function(OutputStream os) throws IOException { tree.writeTo(os); }
|
/**
* Writes model to output stream. Doesn't close the stream
*
* @param os stream to write model in
* @throws IOException when any i/o error occurs
*/
|
Writes model to output stream. Doesn't close the stream
|
writeTo
|
{
"repo_name": "TypeFox/che",
"path": "plugins/plugin-maven/che-plugin-maven-tools/src/main/java/org/eclipse/che/ide/maven/tools/Model.java",
"license": "epl-1.0",
"size": 38042
}
|
[
"java.io.IOException",
"java.io.OutputStream"
] |
import java.io.IOException; import java.io.OutputStream;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 4,724
|
public Component getMainViewTabComponent()
{
Component comp = mangedTabPane.getSelectedComponent();
if (comp instanceof JScrollPane)
comp = ((JScrollPane) comp).getViewport().getView();
return comp;
}
|
Component function() { Component comp = mangedTabPane.getSelectedComponent(); if (comp instanceof JScrollPane) comp = ((JScrollPane) comp).getViewport().getView(); return comp; }
|
/**
* Returns component which is currently in the main tab
*
* @return
*/
|
Returns component which is currently in the main tab
|
getMainViewTabComponent
|
{
"repo_name": "skoulouzis/vlet-1.5.0",
"path": "source/core/nl.uva.vlet.gui.vbrowser/src/nl/uva/vlet/gui/vbrowser/VBrowser.java",
"license": "apache-2.0",
"size": 56629
}
|
[
"java.awt.Component",
"javax.swing.JScrollPane"
] |
import java.awt.Component; import javax.swing.JScrollPane;
|
import java.awt.*; import javax.swing.*;
|
[
"java.awt",
"javax.swing"
] |
java.awt; javax.swing;
| 1,988,903
|
public static int computeByteBufferSizeNoTag(final ByteBuffer value) {
return computeLengthDelimitedFieldSize(value.capacity());
}
|
static int function(final ByteBuffer value) { return computeLengthDelimitedFieldSize(value.capacity()); }
|
/**
* Compute the number of bytes that would be needed to encode a
* {@code bytes} field.
*/
|
Compute the number of bytes that would be needed to encode a bytes field
|
computeByteBufferSizeNoTag
|
{
"repo_name": "gustavoanatoly/hbase",
"path": "hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedOutputStream.java",
"license": "apache-2.0",
"size": 105107
}
|
[
"java.nio.ByteBuffer"
] |
import java.nio.ByteBuffer;
|
import java.nio.*;
|
[
"java.nio"
] |
java.nio;
| 2,567,165
|
public static void outputBucketedCurveSensitivities(String label, Result result) {
if (result.isSuccess()) {
System.out.println(label + ": Bucketed PV01");
BucketedCurveSensitivities bcs = (BucketedCurveSensitivities) result.getValue();
Map sensitivities = bcs.getSensitivities();
Iterator entryIterator = sensitivities.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry entry = (Map.Entry) entryIterator.next();
Pair pair = (Pair) entry.getKey();
DoubleLabelledMatrix1D matrix = (DoubleLabelledMatrix1D) sensitivities.get(pair);
System.out.println(" " + pair.getFirst().toString() + ": " + pair.getSecond().toString());
for (int i = 0; i < matrix.getLabels().length; i++) {
System.out.println(" " + matrix.getLabels()[i].toString() + ": " + matrix.getValues()[i]);
}
}
} else {
System.out.println(label + ": Error - " + result.getFailureMessage());
}
}
|
static void function(String label, Result result) { if (result.isSuccess()) { System.out.println(label + STR); BucketedCurveSensitivities bcs = (BucketedCurveSensitivities) result.getValue(); Map sensitivities = bcs.getSensitivities(); Iterator entryIterator = sensitivities.entrySet().iterator(); while (entryIterator.hasNext()) { Map.Entry entry = (Map.Entry) entryIterator.next(); Pair pair = (Pair) entry.getKey(); DoubleLabelledMatrix1D matrix = (DoubleLabelledMatrix1D) sensitivities.get(pair); System.out.println(" " + pair.getFirst().toString() + STR + pair.getSecond().toString()); for (int i = 0; i < matrix.getLabels().length; i++) { System.out.println(" " + matrix.getLabels()[i].toString() + STR + matrix.getValues()[i]); } } } else { System.out.println(label + STR + result.getFailureMessage()); } }
|
/**
* Output bucketed sensitive as an ascii table
* @param label the label of the table
* @param result the result object
*/
|
Output bucketed sensitive as an ascii table
|
outputBucketedCurveSensitivities
|
{
"repo_name": "jeorme/OG-Platform",
"path": "sesame/sesame-examples/example-data/src/main/java/com/opengamma/solutions/util/ViewUtils.java",
"license": "apache-2.0",
"size": 9370
}
|
[
"com.opengamma.financial.analytics.DoubleLabelledMatrix1D",
"com.opengamma.financial.analytics.model.fixedincome.BucketedCurveSensitivities",
"com.opengamma.util.result.Result",
"com.opengamma.util.tuple.Pair",
"java.util.Iterator",
"java.util.Map"
] |
import com.opengamma.financial.analytics.DoubleLabelledMatrix1D; import com.opengamma.financial.analytics.model.fixedincome.BucketedCurveSensitivities; import com.opengamma.util.result.Result; import com.opengamma.util.tuple.Pair; import java.util.Iterator; import java.util.Map;
|
import com.opengamma.financial.analytics.*; import com.opengamma.financial.analytics.model.fixedincome.*; import com.opengamma.util.result.*; import com.opengamma.util.tuple.*; import java.util.*;
|
[
"com.opengamma.financial",
"com.opengamma.util",
"java.util"
] |
com.opengamma.financial; com.opengamma.util; java.util;
| 2,121,813
|
public MultitapAlertCompatDialog setNegativeButton( int resLabel, Runnable task )
{
final Context ctx = this.getContext() ;
m_taskNegative = task ;
final String sNegativeLabel = ctx.getString( resLabel ) ;
this.setButton( AlertDialog.BUTTON_NEGATIVE, sNegativeLabel, this ) ;
return this ;
}
|
MultitapAlertCompatDialog function( int resLabel, Runnable task ) { final Context ctx = this.getContext() ; m_taskNegative = task ; final String sNegativeLabel = ctx.getString( resLabel ) ; this.setButton( AlertDialog.BUTTON_NEGATIVE, sNegativeLabel, this ) ; return this ; }
|
/**
* Add a negative button with the specified label, such that the specified
* task is executed when tapped <i>once</i>.
* @param resLabel the label for the button
* @param task the task to be executed
* @return (fluid)
*/
|
Add a negative button with the specified label, such that the specified task is executed when tapped once
|
setNegativeButton
|
{
"repo_name": "zerobandwidth-net/android",
"path": "libZeroAndroid/src/main/java/net/zer0bandwidth/android/lib/ui/MultitapAlertCompatDialog.java",
"license": "mit",
"size": 7929
}
|
[
"android.content.Context",
"android.support.v7.app.AlertDialog"
] |
import android.content.Context; import android.support.v7.app.AlertDialog;
|
import android.content.*; import android.support.v7.app.*;
|
[
"android.content",
"android.support"
] |
android.content; android.support;
| 856,231
|
Call<ResponseBody> deleteAsyncRelativeRetrySucceededAsync(final ServiceCallback<Void> serviceCallback);
|
Call<ResponseBody> deleteAsyncRelativeRetrySucceededAsync(final ServiceCallback<Void> serviceCallback);
|
/**
* Long running delete request, service returns a 500, then a 202 to the initial request. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link Call} object
*/
|
Long running delete request, service returns a 500, then a 202 to the initial request. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status
|
deleteAsyncRelativeRetrySucceededAsync
|
{
"repo_name": "BretJohnson/autorest",
"path": "AutoRest/Generators/Java/Azure.Java.Tests/src/main/java/fixtures/lro/LRORetrys.java",
"license": "mit",
"size": 17439
}
|
[
"com.microsoft.rest.ServiceCallback",
"com.squareup.okhttp.ResponseBody"
] |
import com.microsoft.rest.ServiceCallback; import com.squareup.okhttp.ResponseBody;
|
import com.microsoft.rest.*; import com.squareup.okhttp.*;
|
[
"com.microsoft.rest",
"com.squareup.okhttp"
] |
com.microsoft.rest; com.squareup.okhttp;
| 1,844,001
|
public RegistryInfo<T,C> addRegistry(C registryClass, Locatable upstream ) {
return new RegistryInfoImpl<T,C,F,M>(this,upstream,registryClass);
}
|
RegistryInfo<T,C> function(C registryClass, Locatable upstream ) { return new RegistryInfoImpl<T,C,F,M>(this,upstream,registryClass); }
|
/**
* Visits a class with {@link XmlRegistry} and records all the element mappings
* in it.
*/
|
Visits a class with <code>XmlRegistry</code> and records all the element mappings in it
|
addRegistry
|
{
"repo_name": "B3Partners/b3p-commons-csw",
"path": "src/main/jaxb/jaxb-ri-20090708/lib/jaxb-impl.src/com/sun/xml/bind/v2/model/impl/ModelBuilder.java",
"license": "lgpl-3.0",
"size": 15049
}
|
[
"com.sun.xml.bind.v2.model.annotation.Locatable",
"com.sun.xml.bind.v2.model.core.RegistryInfo"
] |
import com.sun.xml.bind.v2.model.annotation.Locatable; import com.sun.xml.bind.v2.model.core.RegistryInfo;
|
import com.sun.xml.bind.v2.model.annotation.*; import com.sun.xml.bind.v2.model.core.*;
|
[
"com.sun.xml"
] |
com.sun.xml;
| 1,887,802
|
void enterRuleXNullLiteral(@NotNull XtendParser.RuleXNullLiteralContext ctx);
void exitRuleXNullLiteral(@NotNull XtendParser.RuleXNullLiteralContext ctx);
|
void enterRuleXNullLiteral(@NotNull XtendParser.RuleXNullLiteralContext ctx); void exitRuleXNullLiteral(@NotNull XtendParser.RuleXNullLiteralContext ctx);
|
/**
* Exit a parse tree produced by {@link XtendParser#ruleXNullLiteral}.
* @param ctx the parse tree
*/
|
Exit a parse tree produced by <code>XtendParser#ruleXNullLiteral</code>
|
exitRuleXNullLiteral
|
{
"repo_name": "szarnekow/XtendParserGeneratorComparison",
"path": "antrl3_vs_antlr4/src/xtend/antlr4_2/XtendListener.java",
"license": "epl-1.0",
"size": 44107
}
|
[
"org.antlr.v4.runtime.misc.NotNull"
] |
import org.antlr.v4.runtime.misc.NotNull;
|
import org.antlr.v4.runtime.misc.*;
|
[
"org.antlr.v4"
] |
org.antlr.v4;
| 1,197,740
|
@Test
public void testExportStacktraceAutoGenerateFile()
throws ClassNotFoundException, IOException, GfJsonException {
setupSystem();
// test auto generated file when file name is not provided
CommandStringBuilder commandStringBuilder =
new CommandStringBuilder(CliStrings.EXPORT_STACKTRACE);
String exportCommandString = commandStringBuilder.toString();
getLogWriter().info("CommandString : " + exportCommandString);
CommandResult exportCommandResult = executeCommand(exportCommandString);
getLogWriter().info("Output : \n" + commandResultToString(exportCommandResult));
assertTrue(exportCommandResult.getStatus().equals(Status.OK));
assertTrue(
((String) exportCommandResult.getResultData().getGfJsonObject().getJSONObject("content")
.getJSONArray("message").get(0)).contains("stack-trace(s) exported to file:"));
}
|
void function() throws ClassNotFoundException, IOException, GfJsonException { setupSystem(); CommandStringBuilder commandStringBuilder = new CommandStringBuilder(CliStrings.EXPORT_STACKTRACE); String exportCommandString = commandStringBuilder.toString(); getLogWriter().info(STR + exportCommandString); CommandResult exportCommandResult = executeCommand(exportCommandString); getLogWriter().info(STR + commandResultToString(exportCommandResult)); assertTrue(exportCommandResult.getStatus().equals(Status.OK)); assertTrue( ((String) exportCommandResult.getResultData().getGfJsonObject().getJSONObject(STR) .getJSONArray(STR).get(0)).contains(STR)); }
|
/***
* Tests the behavior of the show stack-trace command when file option is not provided File should
* get auto-generated
*
* @throws ClassNotFoundException
* @throws IOException
*/
|
Tests the behavior of the show stack-trace command when file option is not provided File should get auto-generated
|
testExportStacktraceAutoGenerateFile
|
{
"repo_name": "pivotal-amurmann/geode",
"path": "geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ShowStackTraceDUnitTest.java",
"license": "apache-2.0",
"size": 12310
}
|
[
"java.io.IOException",
"org.apache.geode.management.cli.Result",
"org.apache.geode.management.internal.cli.i18n.CliStrings",
"org.apache.geode.management.internal.cli.json.GfJsonException",
"org.apache.geode.management.internal.cli.result.CommandResult",
"org.apache.geode.management.internal.cli.util.CommandStringBuilder",
"org.apache.geode.test.dunit.Assert",
"org.apache.geode.test.dunit.LogWriterUtils"
] |
import java.io.IOException; import org.apache.geode.management.cli.Result; import org.apache.geode.management.internal.cli.i18n.CliStrings; import org.apache.geode.management.internal.cli.json.GfJsonException; import org.apache.geode.management.internal.cli.result.CommandResult; import org.apache.geode.management.internal.cli.util.CommandStringBuilder; import org.apache.geode.test.dunit.Assert; import org.apache.geode.test.dunit.LogWriterUtils;
|
import java.io.*; import org.apache.geode.management.cli.*; import org.apache.geode.management.internal.cli.i18n.*; import org.apache.geode.management.internal.cli.json.*; import org.apache.geode.management.internal.cli.result.*; import org.apache.geode.management.internal.cli.util.*; import org.apache.geode.test.dunit.*;
|
[
"java.io",
"org.apache.geode"
] |
java.io; org.apache.geode;
| 1,560,475
|
public void openStreams(String language) {
try {
File dictCitiesFile = new File("_anal/dictionaries/" + language +
"/cities/dict_cities.csv");
int fileLengthInBytes = (int) dictCitiesFile.length();
inputStream = new FileInputStream(dictCitiesFile);
inputStreamReader = new InputStreamReader(inputStream);
bufferedReader = new BufferedReader(inputStreamReader);
if (!bufferedReader.markSupported()) {
log4j.error("stream of CitiesDictLoader cannot be marked, " +
"dictCitiesFile=" + dictCitiesFile);
}
bufferedReader.mark(fileLengthInBytes);
} catch (FileNotFoundException fnfe) {
log4j.error("openStream language=" + language, fnfe.getCause());
fnfe.printStackTrace();
} catch (IOException ioe) {
log4j.error("openStreams language=" + language, ioe.getCause());
ioe.printStackTrace();
}
}
|
void function(String language) { try { File dictCitiesFile = new File(STR + language + STR); int fileLengthInBytes = (int) dictCitiesFile.length(); inputStream = new FileInputStream(dictCitiesFile); inputStreamReader = new InputStreamReader(inputStream); bufferedReader = new BufferedReader(inputStreamReader); if (!bufferedReader.markSupported()) { log4j.error(STR + STR + dictCitiesFile); } bufferedReader.mark(fileLengthInBytes); } catch (FileNotFoundException fnfe) { log4j.error(STR + language, fnfe.getCause()); fnfe.printStackTrace(); } catch (IOException ioe) { log4j.error(STR + language, ioe.getCause()); ioe.printStackTrace(); } }
|
/**
* Metoda, ktora otvori stramy na cintanie zo slovnika. Cestu k suboru si
* doplni podl apredaneho parametru language napr. "cz" , "sk" ...
*
* @param language jazyk, ktory sa pouzije na otvorenie toho spravneho
* slovnika.
*/
|
Metoda, ktora otvori stramy na cintanie zo slovnika. Cestu k suboru si doplni podl apredaneho parametru language napr. "cz" , "sk" ..
|
openStreams
|
{
"repo_name": "WebArchivCZ/webanalyzer",
"path": "src/org/archive/analyzer/dictionary/CitiesDictLoader.java",
"license": "gpl-3.0",
"size": 4267
}
|
[
"java.io.BufferedReader",
"java.io.File",
"java.io.FileInputStream",
"java.io.FileNotFoundException",
"java.io.IOException",
"java.io.InputStreamReader"
] |
import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 944,464
|
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
|
void function(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); }
|
/**
* Handles the HTTP <code>POST</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
|
Handles the HTTP <code>POST</code> method
|
doPost
|
{
"repo_name": "gialnet/monitor-home-temp",
"path": "src/main/java/eu/redmoon/monitor/home/temp/AjaxServletTemp.java",
"license": "apache-2.0",
"size": 3908
}
|
[
"java.io.IOException",
"javax.servlet.ServletException",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse"
] |
import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse;
|
import java.io.*; import javax.servlet.*; import javax.servlet.http.*;
|
[
"java.io",
"javax.servlet"
] |
java.io; javax.servlet;
| 2,266,423
|
void updateSubscriptions(String clientID, Set<Subscription> subscriptions);
|
void updateSubscriptions(String clientID, Set<Subscription> subscriptions);
|
/**
* Updates the subscriptions set for the clientID
* */
|
Updates the subscriptions set for the clientID
|
updateSubscriptions
|
{
"repo_name": "jiangyubao/moquette-mqtt",
"path": "broker/src/main/java/org/dna/mqtt/moquette/messaging/spi/ISessionsStore.java",
"license": "apache-2.0",
"size": 1480
}
|
[
"java.util.Set",
"org.dna.mqtt.moquette.messaging.spi.impl.subscriptions.Subscription"
] |
import java.util.Set; import org.dna.mqtt.moquette.messaging.spi.impl.subscriptions.Subscription;
|
import java.util.*; import org.dna.mqtt.moquette.messaging.spi.impl.subscriptions.*;
|
[
"java.util",
"org.dna.mqtt"
] |
java.util; org.dna.mqtt;
| 830,650
|
protected void registerAnnotations() throws KettlePluginException {
// This is no longer done because it was deemed too slow. Only jar files in the plugins/ folders are scanned for
// annotations.
}
|
void function() throws KettlePluginException { }
|
/**
* Scan & register internal job entry plugins
*/
|
Scan & register internal job entry plugins
|
registerAnnotations
|
{
"repo_name": "apratkin/pentaho-kettle",
"path": "ui/src/org/pentaho/di/ui/job/dialog/JobDialogPluginType.java",
"license": "apache-2.0",
"size": 4235
}
|
[
"org.pentaho.di.core.exception.KettlePluginException"
] |
import org.pentaho.di.core.exception.KettlePluginException;
|
import org.pentaho.di.core.exception.*;
|
[
"org.pentaho.di"
] |
org.pentaho.di;
| 504,254
|
public SasDefinitionBundle updateSasDefinition(String vaultBaseUrl, String storageAccountName, String sasDefinitionName) {
return updateSasDefinitionWithServiceResponseAsync(vaultBaseUrl, storageAccountName, sasDefinitionName).toBlocking().single().body();
}
|
SasDefinitionBundle function(String vaultBaseUrl, String storageAccountName, String sasDefinitionName) { return updateSasDefinitionWithServiceResponseAsync(vaultBaseUrl, storageAccountName, sasDefinitionName).toBlocking().single().body(); }
|
/**
* Updates the specified attributes associated with the given SAS definition. This operation requires the storage/setsas permission.
*
* @param vaultBaseUrl The vault name, for example https://myvault.vault.azure.net.
* @param storageAccountName The name of the storage account.
* @param sasDefinitionName The name of the SAS definition.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws KeyVaultErrorException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the SasDefinitionBundle object if successful.
*/
|
Updates the specified attributes associated with the given SAS definition. This operation requires the storage/setsas permission
|
updateSasDefinition
|
{
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/keyvault/microsoft-azure-keyvault/src/main/java/com/microsoft/azure/keyvault/implementation/KeyVaultClientBaseImpl.java",
"license": "mit",
"size": 884227
}
|
[
"com.microsoft.azure.keyvault.models.SasDefinitionBundle"
] |
import com.microsoft.azure.keyvault.models.SasDefinitionBundle;
|
import com.microsoft.azure.keyvault.models.*;
|
[
"com.microsoft.azure"
] |
com.microsoft.azure;
| 1,553,219
|
public ResultSet getCatalogs() throws SQLException {
throw new SQLException("tinySQL does not support catalogues.");
}
|
ResultSet function() throws SQLException { throw new SQLException(STR); }
|
/**
* Gets the catalog names available in this database. The results
* are ordered by catalog name.
*
* <P>The catalog column is:
* <OL>
* <LI><B>TABLE_CAT</B> String => catalog name
* </OL>
*
* @return ResultSet - each row has a single String column that is a
* catalog name
* @exception SQLException if a database access error occurs
*/
|
Gets the catalog names available in this database. The results are ordered by catalog name. The catalog column is: TABLE_CAT String => catalog name
|
getCatalogs
|
{
"repo_name": "hermanwu/cs4420_tinySQL",
"path": "src/com/sqlmagic/tinysql/tinySQLDatabaseMetaData.java",
"license": "lgpl-2.1",
"size": 107261
}
|
[
"java.sql.ResultSet",
"java.sql.SQLException"
] |
import java.sql.ResultSet; import java.sql.SQLException;
|
import java.sql.*;
|
[
"java.sql"
] |
java.sql;
| 2,160,330
|
@Test
public void testPartitionConsistencyWithBackupsRestart() throws Exception {
backups = 2;
final int srvNodes = SERVER_NODES + 1; // Add one non-owner node to test to increase entropy.
Ignite prim = startGrids(srvNodes);
prim.cluster().active(true);
IgniteCache<Object, Object> cache = prim.cache(DEFAULT_CACHE_NAME);
List<Integer> primaryKeys = primaryKeys(cache, 10_000);
List<Ignite> backups = backupNodes(primaryKeys.get(0), DEFAULT_CACHE_NAME);
assertFalse(backups.contains(prim));
long stop = U.currentTimeMillis() + 3 * 60_000;
long seed = System.nanoTime();
log.info("Seed: " + seed);
Random r = new Random(seed);
assertTrue(prim == grid(0));
IgniteInternalFuture<?> fut = multithreadedAsync(() -> {
while (U.currentTimeMillis() < stop) {
doSleep(5_000);
Ignite restartNode = grid(1 + r.nextInt(backups.size()));
assertFalse(prim == restartNode);
String name = restartNode.name();
stopGrid(true, name);
try {
waitForTopology(SERVER_NODES);
doSleep(15_000);
startGrid(name);
awaitPartitionMapExchange();
}
catch (Exception e) {
fail(X.getFullStackTrace(e));
}
}
}, 1, "node-restarter");
doRandomUpdates(r, prim, primaryKeys, cache, stop).get();
fut.get();
assertPartitionsSame(idleVerify(prim, DEFAULT_CACHE_NAME));
}
|
void function() throws Exception { backups = 2; final int srvNodes = SERVER_NODES + 1; Ignite prim = startGrids(srvNodes); prim.cluster().active(true); IgniteCache<Object, Object> cache = prim.cache(DEFAULT_CACHE_NAME); List<Integer> primaryKeys = primaryKeys(cache, 10_000); List<Ignite> backups = backupNodes(primaryKeys.get(0), DEFAULT_CACHE_NAME); assertFalse(backups.contains(prim)); long stop = U.currentTimeMillis() + 3 * 60_000; long seed = System.nanoTime(); log.info(STR + seed); Random r = new Random(seed); assertTrue(prim == grid(0)); IgniteInternalFuture<?> fut = multithreadedAsync(() -> { while (U.currentTimeMillis() < stop) { doSleep(5_000); Ignite restartNode = grid(1 + r.nextInt(backups.size())); assertFalse(prim == restartNode); String name = restartNode.name(); stopGrid(true, name); try { waitForTopology(SERVER_NODES); doSleep(15_000); startGrid(name); awaitPartitionMapExchange(); } catch (Exception e) { fail(X.getFullStackTrace(e)); } } }, 1, STR); doRandomUpdates(r, prim, primaryKeys, cache, stop).get(); fut.get(); assertPartitionsSame(idleVerify(prim, DEFAULT_CACHE_NAME)); }
|
/**
* Test primary-backup partitions consistency while restarting random backup nodes under load.
*/
|
Test primary-backup partitions consistency while restarting random backup nodes under load
|
testPartitionConsistencyWithBackupsRestart
|
{
"repo_name": "shroman/ignite",
"path": "modules/core/src/test/java/org/apache/ignite/internal/processors/cache/transactions/TxPartitionCounterStateConsistencyTest.java",
"license": "apache-2.0",
"size": 16602
}
|
[
"java.util.List",
"java.util.Random",
"org.apache.ignite.Ignite",
"org.apache.ignite.IgniteCache",
"org.apache.ignite.internal.IgniteInternalFuture",
"org.apache.ignite.internal.util.typedef.X",
"org.apache.ignite.internal.util.typedef.internal.U"
] |
import java.util.List; import java.util.Random; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.internal.util.typedef.internal.U;
|
import java.util.*; import org.apache.ignite.*; import org.apache.ignite.internal.*; import org.apache.ignite.internal.util.typedef.*; import org.apache.ignite.internal.util.typedef.internal.*;
|
[
"java.util",
"org.apache.ignite"
] |
java.util; org.apache.ignite;
| 2,483,848
|
public Iterator<Creature> getCreatures() {
ArrayList<Creature> creatures = new ArrayList<Creature>();
for (int y = 0; y < this.world.length; y++) {
for (int x = 0; x < this.world[y].length; x++) {
if (world[y][x].hasCreature()) {
if (!world[y][x].creature.isEve()) {
creatures.add(world[y][x].creature);
}
}
}
}
return creatures.iterator();
}
|
Iterator<Creature> function() { ArrayList<Creature> creatures = new ArrayList<Creature>(); for (int y = 0; y < this.world.length; y++) { for (int x = 0; x < this.world[y].length; x++) { if (world[y][x].hasCreature()) { if (!world[y][x].creature.isEve()) { creatures.add(world[y][x].creature); } } } } return creatures.iterator(); }
|
/**
* Gets all the creatures EXCLUDING Eve
*
* @see #getEve()
*
* @return the creatures in the world EXCLUDING eve
*/
|
Gets all the creatures EXCLUDING Eve
|
getCreatures
|
{
"repo_name": "kapadiamush/Eve-s-Adventure",
"path": "src/models/campaign/World.java",
"license": "mit",
"size": 35669
}
|
[
"java.util.ArrayList",
"java.util.Iterator"
] |
import java.util.ArrayList; import java.util.Iterator;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,470,171
|
public String addAttendee() {
TimeslotWrapper timeslotWrapper = (TimeslotWrapper) timeslotWrapperTable.getRowData();
String newAttendeeEidOrEmail = null;
if (isEidInputMode()) {
newAttendeeEidOrEmail = getEidOrEmailInputByUser();
} else {
if (addNewAttendeeUserEidOrEmail != null && ((String) addNewAttendeeUserEidOrEmail.getValue()).trim().length() > 0) {
newAttendeeEidOrEmail = ((String) addNewAttendeeUserEidOrEmail.getValue()).trim();
}
}
if (StringUtils.isBlank(newAttendeeEidOrEmail)) {
return ORGANIZER_MEETING_PAGE_URL;
}
//check if there are multiple email addresses associated with input
List<String> associatedEids = getEidsForEmail(newAttendeeEidOrEmail.trim());
if(associatedEids.size() > 1) {
Utilities.addErrorMessage(MessageFormat.format(Utilities.rb.getString("exception.multiple.eids"), new Object[] {newAttendeeEidOrEmail, StringUtils.join(associatedEids, ", ")}));
return ORGANIZER_MEETING_PAGE_URL;
}
String newUserId = getUserIdForEidOrEmail(newAttendeeEidOrEmail.trim());
if(StringUtils.isBlank(newUserId)){
Utilities.addErrorMessage(Utilities.rb.getString("exception.no.such.user") + newAttendeeEidOrEmail);
return ORGANIZER_MEETING_PAGE_URL;
}
SignupUser newAttendeeSignUser = getSakaiFacade().getSignupUser(getMeetingWrapper().getMeeting(), newUserId);
if(newAttendeeSignUser ==null){
Utilities.addErrorMessage(MessageFormat.format(Utilities.rb.getString("user.has.no.permission.attend"), new Object[] {newAttendeeEidOrEmail}));
return ORGANIZER_MEETING_PAGE_URL;
}
SignupAttendee newAttendee = new SignupAttendee(newUserId, newAttendeeSignUser.getMainSiteId());
timeslotWrapper.setNewAttendee(newAttendee);
SignupMeeting meeting = null;
try {
AddAttendee addAttendee = new AddAttendee(signupMeetingService, currentUserId(), currentSiteId(), true);
meeting = addAttendee.signup(getMeetingWrapper().getMeeting(), timeslotWrapper.getTimeSlot(),
timeslotWrapper.getNewAttendee());
boolean sendAttendeeEmail = false;//Utilities.getSignupConfigParamVal("signup.email.notification.attendee.signed.up", true);
if (sendEmail || sendAttendeeEmail) {
try {
signupMeetingService.sendEmailToParticipantsByOrganizerAction(addAttendee
.getSignupEventTrackingInfo());
} catch (Exception e) {
log.error(Utilities.rb.getString("email.exception") + " - " + e.getMessage(), e);
Utilities.addErrorMessage(Utilities.rb.getString("email.exception"));
}
}
} catch (SignupUserActionException ue) {
Utilities.addErrorMessage(ue.getMessage());
} catch (Exception e) {
log.error(Utilities.rb.getString("error.occurred_try_again") + " - " + e.getMessage());
Utilities.addErrorMessage(Utilities.rb.getString("error.occurred_try_again"));
}
String nextPage = updateMeetingwrapper(meeting, ORGANIZER_MEETING_PAGE_URL);
// TODO calendar event id;
if (ORGANIZER_MEETING_PAGE_URL.equals(nextPage)) {
setAddNewAttendee(false);
setSelectedTimeslotId(null);
}
return nextPage;
}
|
String function() { TimeslotWrapper timeslotWrapper = (TimeslotWrapper) timeslotWrapperTable.getRowData(); String newAttendeeEidOrEmail = null; if (isEidInputMode()) { newAttendeeEidOrEmail = getEidOrEmailInputByUser(); } else { if (addNewAttendeeUserEidOrEmail != null && ((String) addNewAttendeeUserEidOrEmail.getValue()).trim().length() > 0) { newAttendeeEidOrEmail = ((String) addNewAttendeeUserEidOrEmail.getValue()).trim(); } } if (StringUtils.isBlank(newAttendeeEidOrEmail)) { return ORGANIZER_MEETING_PAGE_URL; } List<String> associatedEids = getEidsForEmail(newAttendeeEidOrEmail.trim()); if(associatedEids.size() > 1) { Utilities.addErrorMessage(MessageFormat.format(Utilities.rb.getString(STR), new Object[] {newAttendeeEidOrEmail, StringUtils.join(associatedEids, STR)})); return ORGANIZER_MEETING_PAGE_URL; } String newUserId = getUserIdForEidOrEmail(newAttendeeEidOrEmail.trim()); if(StringUtils.isBlank(newUserId)){ Utilities.addErrorMessage(Utilities.rb.getString(STR) + newAttendeeEidOrEmail); return ORGANIZER_MEETING_PAGE_URL; } SignupUser newAttendeeSignUser = getSakaiFacade().getSignupUser(getMeetingWrapper().getMeeting(), newUserId); if(newAttendeeSignUser ==null){ Utilities.addErrorMessage(MessageFormat.format(Utilities.rb.getString(STR), new Object[] {newAttendeeEidOrEmail})); return ORGANIZER_MEETING_PAGE_URL; } SignupAttendee newAttendee = new SignupAttendee(newUserId, newAttendeeSignUser.getMainSiteId()); timeslotWrapper.setNewAttendee(newAttendee); SignupMeeting meeting = null; try { AddAttendee addAttendee = new AddAttendee(signupMeetingService, currentUserId(), currentSiteId(), true); meeting = addAttendee.signup(getMeetingWrapper().getMeeting(), timeslotWrapper.getTimeSlot(), timeslotWrapper.getNewAttendee()); boolean sendAttendeeEmail = false; if (sendEmail sendAttendeeEmail) { try { signupMeetingService.sendEmailToParticipantsByOrganizerAction(addAttendee .getSignupEventTrackingInfo()); } catch (Exception e) { log.error(Utilities.rb.getString(STR) + STR + e.getMessage(), e); Utilities.addErrorMessage(Utilities.rb.getString(STR)); } } } catch (SignupUserActionException ue) { Utilities.addErrorMessage(ue.getMessage()); } catch (Exception e) { log.error(Utilities.rb.getString(STR) + STR + e.getMessage()); Utilities.addErrorMessage(Utilities.rb.getString(STR)); } String nextPage = updateMeetingwrapper(meeting, ORGANIZER_MEETING_PAGE_URL); if (ORGANIZER_MEETING_PAGE_URL.equals(nextPage)) { setAddNewAttendee(false); setSelectedTimeslotId(null); } return nextPage; }
|
/**
* This is a JSF action call method by UI to add a new attendee into the
* event/meeting.
*
* @return an action outcome string.
*/
|
This is a JSF action call method by UI to add a new attendee into the event/meeting
|
addAttendee
|
{
"repo_name": "OpenCollabZA/sakai",
"path": "signup/tool/src/java/org/sakaiproject/signup/tool/jsf/organizer/OrganizerSignupMBean.java",
"license": "apache-2.0",
"size": 45309
}
|
[
"java.text.MessageFormat",
"java.util.List",
"org.apache.commons.lang3.StringUtils",
"org.sakaiproject.signup.logic.SignupUser",
"org.sakaiproject.signup.logic.SignupUserActionException",
"org.sakaiproject.signup.model.SignupAttendee",
"org.sakaiproject.signup.model.SignupMeeting",
"org.sakaiproject.signup.tool.jsf.TimeslotWrapper",
"org.sakaiproject.signup.tool.jsf.organizer.action.AddAttendee",
"org.sakaiproject.signup.tool.util.Utilities"
] |
import java.text.MessageFormat; import java.util.List; import org.apache.commons.lang3.StringUtils; import org.sakaiproject.signup.logic.SignupUser; import org.sakaiproject.signup.logic.SignupUserActionException; import org.sakaiproject.signup.model.SignupAttendee; import org.sakaiproject.signup.model.SignupMeeting; import org.sakaiproject.signup.tool.jsf.TimeslotWrapper; import org.sakaiproject.signup.tool.jsf.organizer.action.AddAttendee; import org.sakaiproject.signup.tool.util.Utilities;
|
import java.text.*; import java.util.*; import org.apache.commons.lang3.*; import org.sakaiproject.signup.logic.*; import org.sakaiproject.signup.model.*; import org.sakaiproject.signup.tool.jsf.*; import org.sakaiproject.signup.tool.jsf.organizer.action.*; import org.sakaiproject.signup.tool.util.*;
|
[
"java.text",
"java.util",
"org.apache.commons",
"org.sakaiproject.signup"
] |
java.text; java.util; org.apache.commons; org.sakaiproject.signup;
| 1,615,288
|
public static Workflow cloneWorkflow(HelixManager manager, String origWorkflowName,
String newWorkflowName, Date newStartTime) {
// Read all resources, including the workflow and jobs of interest
HelixDataAccessor accessor = manager.getHelixDataAccessor();
PropertyKey.Builder keyBuilder = accessor.keyBuilder();
Map<String, HelixProperty> resourceConfigMap =
accessor.getChildValuesMap(keyBuilder.resourceConfigs(), true);
if (!resourceConfigMap.containsKey(origWorkflowName)) {
LOG.error("No such workflow named " + origWorkflowName);
return null;
}
if (resourceConfigMap.containsKey(newWorkflowName)) {
LOG.error("Workflow with name " + newWorkflowName + " already exists!");
return null;
}
// Create a new workflow with a new name
Map<String, String> workflowConfigsMap =
resourceConfigMap.get(origWorkflowName).getRecord().getSimpleFields();
WorkflowConfig.Builder workflowConfigBlder = WorkflowConfig.Builder.fromMap(workflowConfigsMap);
// Set the schedule, if applicable
if (newStartTime != null) {
ScheduleConfig scheduleConfig = ScheduleConfig.oneTimeDelayedStart(newStartTime);
workflowConfigBlder.setScheduleConfig(scheduleConfig);
}
workflowConfigBlder.setTerminable(true);
WorkflowConfig workflowConfig = workflowConfigBlder.build();
JobDag jobDag = workflowConfig.getJobDag();
Map<String, Set<String>> parentsToChildren = jobDag.getParentsToChildren();
Workflow.Builder workflowBuilder = new Workflow.Builder(newWorkflowName);
workflowBuilder.setWorkflowConfig(workflowConfig);
// Add each job back as long as the original exists
Set<String> namespacedJobs = jobDag.getAllNodes();
for (String namespacedJob : namespacedJobs) {
if (resourceConfigMap.containsKey(namespacedJob)) {
// Copy over job-level and task-level configs
String job = TaskUtil.getDenamespacedJobName(origWorkflowName, namespacedJob);
HelixProperty jobConfig = resourceConfigMap.get(namespacedJob);
Map<String, String> jobSimpleFields = jobConfig.getRecord().getSimpleFields();
JobConfig.Builder jobCfgBuilder = JobConfig.Builder.fromMap(jobSimpleFields);
jobCfgBuilder.setWorkflow(newWorkflowName); // overwrite workflow name
Map<String, Map<String, String>> rawTaskConfigMap = jobConfig.getRecord().getMapFields();
List<TaskConfig> taskConfigs = Lists.newLinkedList();
for (Map<String, String> rawTaskConfig : rawTaskConfigMap.values()) {
TaskConfig taskConfig = TaskConfig.Builder.from(rawTaskConfig);
taskConfigs.add(taskConfig);
}
jobCfgBuilder.addTaskConfigs(taskConfigs);
workflowBuilder.addJob(job, jobCfgBuilder);
// Add dag dependencies
Set<String> children = parentsToChildren.get(namespacedJob);
if (children != null) {
for (String namespacedChild : children) {
String child = TaskUtil.getDenamespacedJobName(origWorkflowName, namespacedChild);
workflowBuilder.addParentChildDependency(job, child);
}
}
}
}
return workflowBuilder.build();
}
|
static Workflow function(HelixManager manager, String origWorkflowName, String newWorkflowName, Date newStartTime) { HelixDataAccessor accessor = manager.getHelixDataAccessor(); PropertyKey.Builder keyBuilder = accessor.keyBuilder(); Map<String, HelixProperty> resourceConfigMap = accessor.getChildValuesMap(keyBuilder.resourceConfigs(), true); if (!resourceConfigMap.containsKey(origWorkflowName)) { LOG.error(STR + origWorkflowName); return null; } if (resourceConfigMap.containsKey(newWorkflowName)) { LOG.error(STR + newWorkflowName + STR); return null; } Map<String, String> workflowConfigsMap = resourceConfigMap.get(origWorkflowName).getRecord().getSimpleFields(); WorkflowConfig.Builder workflowConfigBlder = WorkflowConfig.Builder.fromMap(workflowConfigsMap); if (newStartTime != null) { ScheduleConfig scheduleConfig = ScheduleConfig.oneTimeDelayedStart(newStartTime); workflowConfigBlder.setScheduleConfig(scheduleConfig); } workflowConfigBlder.setTerminable(true); WorkflowConfig workflowConfig = workflowConfigBlder.build(); JobDag jobDag = workflowConfig.getJobDag(); Map<String, Set<String>> parentsToChildren = jobDag.getParentsToChildren(); Workflow.Builder workflowBuilder = new Workflow.Builder(newWorkflowName); workflowBuilder.setWorkflowConfig(workflowConfig); Set<String> namespacedJobs = jobDag.getAllNodes(); for (String namespacedJob : namespacedJobs) { if (resourceConfigMap.containsKey(namespacedJob)) { String job = TaskUtil.getDenamespacedJobName(origWorkflowName, namespacedJob); HelixProperty jobConfig = resourceConfigMap.get(namespacedJob); Map<String, String> jobSimpleFields = jobConfig.getRecord().getSimpleFields(); JobConfig.Builder jobCfgBuilder = JobConfig.Builder.fromMap(jobSimpleFields); jobCfgBuilder.setWorkflow(newWorkflowName); Map<String, Map<String, String>> rawTaskConfigMap = jobConfig.getRecord().getMapFields(); List<TaskConfig> taskConfigs = Lists.newLinkedList(); for (Map<String, String> rawTaskConfig : rawTaskConfigMap.values()) { TaskConfig taskConfig = TaskConfig.Builder.from(rawTaskConfig); taskConfigs.add(taskConfig); } jobCfgBuilder.addTaskConfigs(taskConfigs); workflowBuilder.addJob(job, jobCfgBuilder); Set<String> children = parentsToChildren.get(namespacedJob); if (children != null) { for (String namespacedChild : children) { String child = TaskUtil.getDenamespacedJobName(origWorkflowName, namespacedChild); workflowBuilder.addParentChildDependency(job, child); } } } } return workflowBuilder.build(); }
|
/**
* Create a new workflow based on an existing one
* @param manager connection to Helix
* @param origWorkflowName the name of the existing workflow
* @param newWorkflowName the name of the new workflow
* @param newStartTime a provided start time that deviates from the desired start time
* @return the cloned workflow, or null if there was a problem cloning the existing one
*/
|
Create a new workflow based on an existing one
|
cloneWorkflow
|
{
"repo_name": "lei-xia/helix",
"path": "helix-core/src/main/java/org/apache/helix/task/WorkflowDispatcher.java",
"license": "apache-2.0",
"size": 28077
}
|
[
"com.google.common.collect.Lists",
"java.util.Date",
"java.util.List",
"java.util.Map",
"java.util.Set",
"org.apache.helix.HelixDataAccessor",
"org.apache.helix.HelixManager",
"org.apache.helix.HelixProperty",
"org.apache.helix.PropertyKey"
] |
import com.google.common.collect.Lists; import java.util.Date; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.helix.HelixDataAccessor; import org.apache.helix.HelixManager; import org.apache.helix.HelixProperty; import org.apache.helix.PropertyKey;
|
import com.google.common.collect.*; import java.util.*; import org.apache.helix.*;
|
[
"com.google.common",
"java.util",
"org.apache.helix"
] |
com.google.common; java.util; org.apache.helix;
| 833,775
|
@Test
public void testShutdownOwnExecutor() {
final TimedSemaphore semaphore = new TimedSemaphore(PERIOD, UNIT, LIMIT);
semaphore.shutdown();
assertTrue("Not shutdown", semaphore.isShutdown());
assertTrue("Executor not shutdown", semaphore.getExecutorService()
.isShutdown());
}
|
void function() { final TimedSemaphore semaphore = new TimedSemaphore(PERIOD, UNIT, LIMIT); semaphore.shutdown(); assertTrue(STR, semaphore.isShutdown()); assertTrue(STR, semaphore.getExecutorService() .isShutdown()); }
|
/**
* Tests the shutdown() method if the executor belongs to the semaphore. In
* this case it has to be shut down.
*/
|
Tests the shutdown() method if the executor belongs to the semaphore. In this case it has to be shut down
|
testShutdownOwnExecutor
|
{
"repo_name": "cjug/jigsaw-commons-lang3",
"path": "common-lang-jigsaw/src/test/java/org/apache/commons/lang3/concurrent/TimedSemaphoreTest.java",
"license": "apache-2.0",
"size": 18794
}
|
[
"org.junit.Assert"
] |
import org.junit.Assert;
|
import org.junit.*;
|
[
"org.junit"
] |
org.junit;
| 2,433,080
|
public Timestamp getUpdated();
public static final String COLUMNNAME_UpdatedBy = "UpdatedBy";
|
Timestamp function(); public static final String COLUMNNAME_UpdatedBy = STR;
|
/** Get Updated.
* Date this record was updated
*/
|
Get Updated. Date this record was updated
|
getUpdated
|
{
"repo_name": "klst-com/metasfresh",
"path": "de.metas.adempiere.adempiere/base/src/main/java-gen/org/compiere/model/I_T_Transaction.java",
"license": "gpl-2.0",
"size": 11820
}
|
[
"java.sql.Timestamp"
] |
import java.sql.Timestamp;
|
import java.sql.*;
|
[
"java.sql"
] |
java.sql;
| 2,105,819
|
EAttribute getDrivingLicense_DeliveryLocation();
|
EAttribute getDrivingLicense_DeliveryLocation();
|
/**
* Returns the meta object for the attribute '{@link com.opcoach.training.carrental.DrivingLicense#getDeliveryLocation <em>Delivery Location</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Delivery Location</em>'.
* @see com.opcoach.training.carrental.DrivingLicense#getDeliveryLocation()
* @see #getDrivingLicense()
* @generated
*/
|
Returns the meta object for the attribute '<code>com.opcoach.training.carrental.DrivingLicense#getDeliveryLocation Delivery Location</code>'.
|
getDrivingLicense_DeliveryLocation
|
{
"repo_name": "opcoach/training",
"path": "EMF/com.opcoach.training.carrental/src/com/opcoach/training/carrental/CarrentalPackage.java",
"license": "epl-1.0",
"size": 23780
}
|
[
"org.eclipse.emf.ecore.EAttribute"
] |
import org.eclipse.emf.ecore.EAttribute;
|
import org.eclipse.emf.ecore.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 1,764,366
|
static void throwIfRequested(Binder binder) {
try {
binder.bind(PROPERTY_NAME, BOOLEAN, BIND_HANDLER);
}
catch (BindException ex) {
if (ex.getCause() instanceof UseLegacyConfigProcessingException) {
throw (UseLegacyConfigProcessingException) ex.getCause();
}
throw ex;
}
}
private static class UseLegacyProcessingBindHandler implements BindHandler {
|
static void throwIfRequested(Binder binder) { try { binder.bind(PROPERTY_NAME, BOOLEAN, BIND_HANDLER); } catch (BindException ex) { if (ex.getCause() instanceof UseLegacyConfigProcessingException) { throw (UseLegacyConfigProcessingException) ex.getCause(); } throw ex; } } private static class UseLegacyProcessingBindHandler implements BindHandler {
|
/**
* Throw a new {@link UseLegacyConfigProcessingException} instance if
* {@link #PROPERTY_NAME} binds to {@code true}.
* @param binder the binder to use
*/
|
Throw a new <code>UseLegacyConfigProcessingException</code> instance if <code>#PROPERTY_NAME</code> binds to true
|
throwIfRequested
|
{
"repo_name": "mdeinum/spring-boot",
"path": "spring-boot-project/spring-boot/src/main/java/org/springframework/boot/context/config/UseLegacyConfigProcessingException.java",
"license": "apache-2.0",
"size": 3213
}
|
[
"org.springframework.boot.context.properties.bind.BindException",
"org.springframework.boot.context.properties.bind.BindHandler",
"org.springframework.boot.context.properties.bind.Binder"
] |
import org.springframework.boot.context.properties.bind.BindException; import org.springframework.boot.context.properties.bind.BindHandler; import org.springframework.boot.context.properties.bind.Binder;
|
import org.springframework.boot.context.properties.bind.*;
|
[
"org.springframework.boot"
] |
org.springframework.boot;
| 548,924
|
public Map<String, Schema> getMetadataColumnMap() {
return this.metadataColumnMap;
}
|
Map<String, Schema> function() { return this.metadataColumnMap; }
|
/**
* Metadata column mapping to lookup columns specified in input query
*
* @return metadata(schema) column mapping
*/
|
Metadata column mapping to lookup columns specified in input query
|
getMetadataColumnMap
|
{
"repo_name": "jenniferzheng/gobblin",
"path": "gobblin-modules/gobblin-sql/src/main/java/org/apache/gobblin/source/jdbc/JdbcExtractor.java",
"license": "apache-2.0",
"size": 43184
}
|
[
"java.util.Map",
"org.apache.gobblin.source.extractor.schema.Schema"
] |
import java.util.Map; import org.apache.gobblin.source.extractor.schema.Schema;
|
import java.util.*; import org.apache.gobblin.source.extractor.schema.*;
|
[
"java.util",
"org.apache.gobblin"
] |
java.util; org.apache.gobblin;
| 1,280,769
|
public byte[] readBytes(int howmany, boolean allowless)
throws EOFException, FormatException;
|
byte[] function(int howmany, boolean allowless) throws EOFException, FormatException;
|
/**
* Read from the file.
*
* @param howmany the number of bytes to read
* @param allowless if we can return fewer bytes than requested
* @return the array of bytes read.
* @exception FormatException Any IO Exceptions, plus an
* end-of-file encountered after reading some, but now
* enough, bytes when allowless was <code>false</code>
* @exception EOFException Encountered an end-of-file while
* allowless was <code>false</code>, but NO bytes
* had been read.
*/
|
Read from the file
|
readBytes
|
{
"repo_name": "d2fn/passage",
"path": "src/main/java/com/bbn/openmap/io/InputReader.java",
"license": "mit",
"size": 4484
}
|
[
"java.io.EOFException"
] |
import java.io.EOFException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 465,365
|
private GridNioServer<HadoopMessage> resetNioServer() throws IgniteCheckedException {
if (boundTcpPort >= 0)
throw new IgniteCheckedException("Tcp NIO server was already created on port " + boundTcpPort);
IgniteCheckedException lastEx = null;
// If configured TCP port is busy, find first available in range.
for (int port = locPort; port < locPort + locPortRange; port++) {
try {
GridNioServer<HadoopMessage> srvr =
GridNioServer.<HadoopMessage>builder()
.address(locHost)
.port(port)
.listener(srvLsnr)
.logger(log.getLogger(GridNioServer.class))
.selectorCount(selectorsCnt)
.gridName(gridName)
.serverName("hadoop")
.tcpNoDelay(tcpNoDelay)
.directBuffer(directBuf)
.byteOrder(ByteOrder.nativeOrder())
.socketSendBufferSize(sockSndBuf)
.socketReceiveBufferSize(sockRcvBuf)
.sendQueueLimit(msgQueueLimit)
.directMode(false)
.filters(filters())
.build();
boundTcpPort = port;
// Ack Port the TCP server was bound to.
if (log.isInfoEnabled())
log.info("Successfully bound to TCP port [port=" + boundTcpPort +
", locHost=" + locHost + ']');
return srvr;
}
catch (IgniteCheckedException e) {
lastEx = e;
if (log.isDebugEnabled())
log.debug("Failed to bind to local port (will try next port within range) [port=" + port +
", locHost=" + locHost + ']');
}
}
// If free port wasn't found.
throw new IgniteCheckedException("Failed to bind to any port within range [startPort=" + locPort +
", portRange=" + locPortRange + ", locHost=" + locHost + ']', lastEx);
}
|
GridNioServer<HadoopMessage> function() throws IgniteCheckedException { if (boundTcpPort >= 0) throw new IgniteCheckedException(STR + boundTcpPort); IgniteCheckedException lastEx = null; for (int port = locPort; port < locPort + locPortRange; port++) { try { GridNioServer<HadoopMessage> srvr = GridNioServer.<HadoopMessage>builder() .address(locHost) .port(port) .listener(srvLsnr) .logger(log.getLogger(GridNioServer.class)) .selectorCount(selectorsCnt) .gridName(gridName) .serverName(STR) .tcpNoDelay(tcpNoDelay) .directBuffer(directBuf) .byteOrder(ByteOrder.nativeOrder()) .socketSendBufferSize(sockSndBuf) .socketReceiveBufferSize(sockRcvBuf) .sendQueueLimit(msgQueueLimit) .directMode(false) .filters(filters()) .build(); boundTcpPort = port; if (log.isInfoEnabled()) log.info(STR + boundTcpPort + STR + locHost + ']'); return srvr; } catch (IgniteCheckedException e) { lastEx = e; if (log.isDebugEnabled()) log.debug(STR + port + STR + locHost + ']'); } } throw new IgniteCheckedException(STR + locPort + STR + locPortRange + STR + locHost + ']', lastEx); }
|
/**
* Recreates tpcSrvr socket instance.
*
* @return Server instance.
* @throws IgniteCheckedException Thrown if it's not possible to create server.
*/
|
Recreates tpcSrvr socket instance
|
resetNioServer
|
{
"repo_name": "afinka77/ignite",
"path": "modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/communication/HadoopExternalCommunication.java",
"license": "apache-2.0",
"size": 49054
}
|
[
"java.nio.ByteOrder",
"org.apache.ignite.IgniteCheckedException",
"org.apache.ignite.internal.processors.hadoop.message.HadoopMessage",
"org.apache.ignite.internal.util.nio.GridNioServer"
] |
import java.nio.ByteOrder; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.processors.hadoop.message.HadoopMessage; import org.apache.ignite.internal.util.nio.GridNioServer;
|
import java.nio.*; import org.apache.ignite.*; import org.apache.ignite.internal.processors.hadoop.message.*; import org.apache.ignite.internal.util.nio.*;
|
[
"java.nio",
"org.apache.ignite"
] |
java.nio; org.apache.ignite;
| 132,111
|
@GET
@Path(ALLOW_NEW)
@Produces(PLAIN)
@Description("Whether to allow new workflow runs to be created.")
boolean getAllowNew();
|
@Path(ALLOW_NEW) @Produces(PLAIN) @Description(STR) boolean getAllowNew();
|
/**
* Get whether to allow new workflow runs to be created.
*
* @return The current setting.
*/
|
Get whether to allow new workflow runs to be created
|
getAllowNew
|
{
"repo_name": "taverna/taverna-server",
"path": "server-webapp/src/main/java/org/taverna/server/master/admin/Admin.java",
"license": "lgpl-2.1",
"size": 30447
}
|
[
"javax.ws.rs.Path",
"javax.ws.rs.Produces",
"org.apache.cxf.jaxrs.model.wadl.Description"
] |
import javax.ws.rs.Path; import javax.ws.rs.Produces; import org.apache.cxf.jaxrs.model.wadl.Description;
|
import javax.ws.rs.*; import org.apache.cxf.jaxrs.model.wadl.*;
|
[
"javax.ws",
"org.apache.cxf"
] |
javax.ws; org.apache.cxf;
| 1,906,320
|
default boolean renewAccessTokenPerRequest(OAuthAuthzReqMessageContext oauthAuthzMsgCtx) {
return renewAccessTokenPerRequest();
}
|
default boolean renewAccessTokenPerRequest(OAuthAuthzReqMessageContext oauthAuthzMsgCtx) { return renewAccessTokenPerRequest(); }
|
/**
* Renew access token per request.
* @param oauthAuthzMsgCtx Message context of the token request.
* @return true if new access token per each request.
*/
|
Renew access token per request
|
renewAccessTokenPerRequest
|
{
"repo_name": "darshanasbg/identity-inbound-auth-oauth",
"path": "components/org.wso2.carbon.identity.oauth/src/main/java/org/wso2/carbon/identity/oauth2/token/OauthTokenIssuer.java",
"license": "apache-2.0",
"size": 3204
}
|
[
"org.wso2.carbon.identity.oauth2.authz.OAuthAuthzReqMessageContext"
] |
import org.wso2.carbon.identity.oauth2.authz.OAuthAuthzReqMessageContext;
|
import org.wso2.carbon.identity.oauth2.authz.*;
|
[
"org.wso2.carbon"
] |
org.wso2.carbon;
| 2,274,523
|
public void addGridTestTableRecords(TestTable testTable)
{
boolean auto = false;
if (testTable.getField(TestTable.kID) instanceof CounterField)
auto = true;
Category cat = Category.getInstance(BaseTest.class.getName());
int iCount = 0;
cat.debug("Begin Test\n");
cat.debug("Open table.\n");
try {
testTable.open(); // Open the table
} catch (DBException e) {
String strError = e.getMessage();
cat.debug(strError);
fail("Error open");
}
try {
testTable.setKeyArea(DBConstants.MAIN_KEY_AREA);
iCount = 0;
testTable.close();
while (testTable.hasNext()) {
testTable.move(+1);
cat.debug(testTable.toString());
iCount++;
}
} catch (DBException e) {
cat.debug("Error reading through file: Error" + e.getMessage() + "\n");
}
cat.debug("Delete all old records.\n");
iCount = 0;
try {
testTable.close();
while (testTable.hasNext()) {
testTable.move(+1);
testTable.remove();
iCount++;
}
} catch (DBException e) {
cat.debug("Could not delete record: Error" + e.getMessage() + "\n");
cat.debug(testTable.toString());
fail("Error on delete");
}
cat.debug("deleted records " + iCount + "\n");
try {
cat.debug("Add records.\n");
if (!auto)
testTable.setOpenMode(DBConstants.OPEN_REFRESH_AND_LOCK_ON_CHANGE_STRATEGY); // Make sure keys are updated before sync
testTable.addNew();
if (!auto)
testTable.getField(TestTable.kID).setString("1");
testTable.getField(TestTable.TEST_NAME).setString("A - Excellent Agent");
testTable.getField(TestTable.TEST_KEY).setString("A");
String str = testTable.getField(TestTable.kID).getString();
testTable.add();
testTable.addNew();
if (!auto)
testTable.getField(TestTable.kID).setString("2");
testTable.getField(TestTable.TEST_NAME).setString("B - Good Agent");
testTable.getField(TestTable.TEST_KEY).setString("B");
testTable.getField(TestTable.TEST_CODE).setString("B");
str = testTable.getField(TestTable.kID).getString();
testTable.add();
testTable.addNew();
if (!auto)
testTable.getField(TestTable.kID).setString("3");
testTable.getField(TestTable.TEST_NAME).setString("C - Average Agent");
testTable.getField(TestTable.TEST_KEY).setString("C");
testTable.add();
testTable.addNew();
if (!auto)
testTable.getField(TestTable.kID).setString("4");
testTable.getField(TestTable.TEST_NAME).setString("F - Fam Trip Agent");
testTable.getField(TestTable.TEST_KEY).setString("B");
testTable.add();
testTable.addNew();
if (!auto)
testTable.getField(TestTable.kID).setString("5");
testTable.getField(TestTable.TEST_NAME).setString("T - Tour Operator");
testTable.getField(TestTable.TEST_KEY).setString("B");
testTable.add();
testTable.addNew();
if (!auto)
testTable.getField(TestTable.kID).setString("6");
testTable.getField(TestTable.TEST_NAME).setString("6 - Q Agency");
testTable.getField(TestTable.TEST_KEY).setString("Q");
testTable.getField(TestTable.TEST_CODE).setString("Q");
testTable.add();
testTable.addNew();
if (!auto)
testTable.getField(TestTable.kID).setString("7");
testTable.getField(TestTable.TEST_NAME).setString("7 - Q Agency");
testTable.getField(TestTable.TEST_KEY).setString("G");
testTable.add();
testTable.addNew();
if (!auto)
testTable.getField(TestTable.kID).setString("8");
testTable.getField(TestTable.TEST_NAME).setString("8 - H Agency");
testTable.getField(TestTable.TEST_KEY).setString("H");
testTable.add();
testTable.addNew();
if (!auto)
testTable.getField(TestTable.kID).setString("9");
testTable.getField(TestTable.TEST_NAME).setString("9 - I Agency");
testTable.getField(TestTable.TEST_KEY).setString("I");
testTable.add();
testTable.addNew();
if (!auto)
testTable.getField(TestTable.kID).setString("10");
testTable.getField(TestTable.TEST_NAME).setString("10 - J Agency");
testTable.getField(TestTable.TEST_KEY).setString("J");
testTable.add();
testTable.addNew();
if (!auto)
testTable.getField(TestTable.kID).setString("11");
testTable.getField(TestTable.TEST_NAME).setString("K - 11 Agency");
testTable.getField(TestTable.TEST_KEY).setString("K");
testTable.add();
testTable.addNew();
if (!auto)
testTable.getField(TestTable.kID).setString("12");
testTable.getField(TestTable.TEST_NAME).setString("L - 12 Agency");
testTable.getField(TestTable.TEST_KEY).setString("L");
testTable.add();
testTable.addNew();
if (!auto)
testTable.getField(TestTable.kID).setString("13");
testTable.getField(TestTable.TEST_NAME).setString("M - 13 Agency");
testTable.getField(TestTable.TEST_KEY).setString("M");
testTable.add();
testTable.addNew();
if (!auto)
testTable.getField(TestTable.kID).setString("14");
testTable.getField(TestTable.TEST_NAME).setString("N - 14 Agency");
testTable.getField(TestTable.TEST_KEY).setString("N");
testTable.add();
testTable.addNew();
if (!auto)
testTable.getField(TestTable.kID).setString("15");
testTable.getField(TestTable.TEST_NAME).setString("O - 15 Agency");
testTable.getField(TestTable.TEST_KEY).setString("O");
testTable.add();
testTable.addNew();
if (!auto)
testTable.getField(TestTable.kID).setString("16");
testTable.getField(TestTable.TEST_NAME).setString("P - 16 Agency");
testTable.getField(TestTable.TEST_KEY).setString("P");
testTable.add();
cat.debug("16 records added.\n");
} catch (DBException e) {
cat.debug("Error adding record. Error: " + e.getMessage() + "\n");
cat.debug(testTable.toString());
fail("Error on add");
}
cat.debug("Count keys in both indexes.\n");
try {
testTable.setKeyArea(DBConstants.MAIN_KEY_AREA);
iCount = 0;
testTable.close();
while (testTable.hasNext()) {
testTable.move(+1);
iCount++;
}
cat.debug("Main index: Count: " + iCount + "\n");
testTable.setKeyArea(DBConstants.MAIN_KEY_AREA+1);
iCount = 0;
testTable.close();
while (testTable.hasPrevious()) {
testTable.move(-1);
iCount++;
}
cat.debug("Secondary index: Count: " + iCount + "\n");
testTable.setKeyArea(DBConstants.MAIN_KEY_AREA);
} catch (DBException e) {
cat.debug("Error reading through file: Error" + e.getMessage() + "\n");
cat.debug(testTable.toString());
fail("Error reading through file");
}
}
|
void function(TestTable testTable) { boolean auto = false; if (testTable.getField(TestTable.kID) instanceof CounterField) auto = true; Category cat = Category.getInstance(BaseTest.class.getName()); int iCount = 0; cat.debug(STR); cat.debug(STR); try { testTable.open(); } catch (DBException e) { String strError = e.getMessage(); cat.debug(strError); fail(STR); } try { testTable.setKeyArea(DBConstants.MAIN_KEY_AREA); iCount = 0; testTable.close(); while (testTable.hasNext()) { testTable.move(+1); cat.debug(testTable.toString()); iCount++; } } catch (DBException e) { cat.debug(STR + e.getMessage() + "\n"); } cat.debug(STR); iCount = 0; try { testTable.close(); while (testTable.hasNext()) { testTable.move(+1); testTable.remove(); iCount++; } } catch (DBException e) { cat.debug(STR + e.getMessage() + "\n"); cat.debug(testTable.toString()); fail(STR); } cat.debug(STR + iCount + "\n"); try { cat.debug(STR); if (!auto) testTable.setOpenMode(DBConstants.OPEN_REFRESH_AND_LOCK_ON_CHANGE_STRATEGY); testTable.addNew(); if (!auto) testTable.getField(TestTable.kID).setString("1"); testTable.getField(TestTable.TEST_NAME).setString(STR); testTable.getField(TestTable.TEST_KEY).setString("A"); String str = testTable.getField(TestTable.kID).getString(); testTable.add(); testTable.addNew(); if (!auto) testTable.getField(TestTable.kID).setString("2"); testTable.getField(TestTable.TEST_NAME).setString(STR); testTable.getField(TestTable.TEST_KEY).setString("B"); testTable.getField(TestTable.TEST_CODE).setString("B"); str = testTable.getField(TestTable.kID).getString(); testTable.add(); testTable.addNew(); if (!auto) testTable.getField(TestTable.kID).setString("3"); testTable.getField(TestTable.TEST_NAME).setString(STR); testTable.getField(TestTable.TEST_KEY).setString("C"); testTable.add(); testTable.addNew(); if (!auto) testTable.getField(TestTable.kID).setString("4"); testTable.getField(TestTable.TEST_NAME).setString(STR); testTable.getField(TestTable.TEST_KEY).setString("B"); testTable.add(); testTable.addNew(); if (!auto) testTable.getField(TestTable.kID).setString("5"); testTable.getField(TestTable.TEST_NAME).setString(STR); testTable.getField(TestTable.TEST_KEY).setString("B"); testTable.add(); testTable.addNew(); if (!auto) testTable.getField(TestTable.kID).setString("6"); testTable.getField(TestTable.TEST_NAME).setString(STR); testTable.getField(TestTable.TEST_KEY).setString("Q"); testTable.getField(TestTable.TEST_CODE).setString("Q"); testTable.add(); testTable.addNew(); if (!auto) testTable.getField(TestTable.kID).setString("7"); testTable.getField(TestTable.TEST_NAME).setString(STR); testTable.getField(TestTable.TEST_KEY).setString("G"); testTable.add(); testTable.addNew(); if (!auto) testTable.getField(TestTable.kID).setString("8"); testTable.getField(TestTable.TEST_NAME).setString(STR); testTable.getField(TestTable.TEST_KEY).setString("H"); testTable.add(); testTable.addNew(); if (!auto) testTable.getField(TestTable.kID).setString("9"); testTable.getField(TestTable.TEST_NAME).setString(STR); testTable.getField(TestTable.TEST_KEY).setString("I"); testTable.add(); testTable.addNew(); if (!auto) testTable.getField(TestTable.kID).setString("10"); testTable.getField(TestTable.TEST_NAME).setString(STR); testTable.getField(TestTable.TEST_KEY).setString("J"); testTable.add(); testTable.addNew(); if (!auto) testTable.getField(TestTable.kID).setString("11"); testTable.getField(TestTable.TEST_NAME).setString(STR); testTable.getField(TestTable.TEST_KEY).setString("K"); testTable.add(); testTable.addNew(); if (!auto) testTable.getField(TestTable.kID).setString("12"); testTable.getField(TestTable.TEST_NAME).setString(STR); testTable.getField(TestTable.TEST_KEY).setString("L"); testTable.add(); testTable.addNew(); if (!auto) testTable.getField(TestTable.kID).setString("13"); testTable.getField(TestTable.TEST_NAME).setString(STR); testTable.getField(TestTable.TEST_KEY).setString("M"); testTable.add(); testTable.addNew(); if (!auto) testTable.getField(TestTable.kID).setString("14"); testTable.getField(TestTable.TEST_NAME).setString(STR); testTable.getField(TestTable.TEST_KEY).setString("N"); testTable.add(); testTable.addNew(); if (!auto) testTable.getField(TestTable.kID).setString("15"); testTable.getField(TestTable.TEST_NAME).setString(STR); testTable.getField(TestTable.TEST_KEY).setString("O"); testTable.add(); testTable.addNew(); if (!auto) testTable.getField(TestTable.kID).setString("16"); testTable.getField(TestTable.TEST_NAME).setString(STR); testTable.getField(TestTable.TEST_KEY).setString("P"); testTable.add(); cat.debug(STR); } catch (DBException e) { cat.debug(STR + e.getMessage() + "\n"); cat.debug(testTable.toString()); fail(STR); } cat.debug(STR); try { testTable.setKeyArea(DBConstants.MAIN_KEY_AREA); iCount = 0; testTable.close(); while (testTable.hasNext()) { testTable.move(+1); iCount++; } cat.debug(STR + iCount + "\n"); testTable.setKeyArea(DBConstants.MAIN_KEY_AREA+1); iCount = 0; testTable.close(); while (testTable.hasPrevious()) { testTable.move(-1); iCount++; } cat.debug(STR + iCount + "\n"); testTable.setKeyArea(DBConstants.MAIN_KEY_AREA); } catch (DBException e) { cat.debug(STR + e.getMessage() + "\n"); cat.debug(testTable.toString()); fail(STR); } }
|
/**
* Add the test table records.
*/
|
Add the test table records
|
addGridTestTableRecords
|
{
"repo_name": "jbundle/jbundle",
"path": "app/test/suite/db/base/src/main/java/org/jbundle/test/manual/test/db/thick/BaseTest.java",
"license": "gpl-3.0",
"size": 18646
}
|
[
"org.apache.log4j.Category",
"org.jbundle.app.test.test.db.TestTable",
"org.jbundle.base.field.CounterField",
"org.jbundle.base.model.DBConstants",
"org.jbundle.model.DBException"
] |
import org.apache.log4j.Category; import org.jbundle.app.test.test.db.TestTable; import org.jbundle.base.field.CounterField; import org.jbundle.base.model.DBConstants; import org.jbundle.model.DBException;
|
import org.apache.log4j.*; import org.jbundle.app.test.test.db.*; import org.jbundle.base.field.*; import org.jbundle.base.model.*; import org.jbundle.model.*;
|
[
"org.apache.log4j",
"org.jbundle.app",
"org.jbundle.base",
"org.jbundle.model"
] |
org.apache.log4j; org.jbundle.app; org.jbundle.base; org.jbundle.model;
| 2,740,326
|
public static char max(char... array) {
checkArgument(array.length > 0);
char max = array[0];
for (int i = 1; i < array.length; i++) {
if (array[i] > max) {
max = array[i];
}
}
return max;
}
/**
* Returns the value nearest to {@code value} which is within the closed range {@code [min..max]}.
*
* <p>If {@code value} is within the range {@code [min..max]}, {@code value} is returned
* unchanged. If {@code value} is less than {@code min}, {@code min} is returned, and if
* {@code value} is greater than {@code max}, {@code max} is returned.
*
* @param value the {@code char} value to constrain
* @param min the lower bound (inclusive) of the range to constrain {@code value} to
* @param max the upper bound (inclusive) of the range to constrain {@code value} to
* @throws IllegalArgumentException if {@code min > max}
|
static char function(char... array) { checkArgument(array.length > 0); char max = array[0]; for (int i = 1; i < array.length; i++) { if (array[i] > max) { max = array[i]; } } return max; } /** * Returns the value nearest to {@code value} which is within the closed range {@code [min..max]}. * * <p>If {@code value} is within the range {@code [min..max]}, {@code value} is returned * unchanged. If {@code value} is less than {@code min}, {@code min} is returned, and if * {@code value} is greater than {@code max}, {@code max} is returned. * * @param value the {@code char} value to constrain * @param min the lower bound (inclusive) of the range to constrain {@code value} to * @param max the upper bound (inclusive) of the range to constrain {@code value} to * @throws IllegalArgumentException if {@code min > max}
|
/**
* Returns the greatest value present in {@code array}.
*
* @param array a <i>nonempty</i> array of {@code char} values
* @return the value present in {@code array} that is greater than or equal to every other value
* in the array
* @throws IllegalArgumentException if {@code array} is empty
*/
|
Returns the greatest value present in array
|
max
|
{
"repo_name": "DavesMan/guava",
"path": "guava/src/com/google/common/primitives/Chars.java",
"license": "apache-2.0",
"size": 20134
}
|
[
"com.google.common.base.Preconditions"
] |
import com.google.common.base.Preconditions;
|
import com.google.common.base.*;
|
[
"com.google.common"
] |
com.google.common;
| 2,065,179
|
private void revealEndOfDocument() {
viewer.revealEndOfDocument();
for (Iterator<WeakReference<IScriptConsoleViewer2ForDocumentListener>> it = otherViewers.iterator(); it
.hasNext();) {
WeakReference<IScriptConsoleViewer2ForDocumentListener> ref = it.next();
IScriptConsoleViewer2ForDocumentListener v = ref.get();
if (v == null) {
it.remove();
} else {
v.revealEndOfDocument();
}
}
}
|
void function() { viewer.revealEndOfDocument(); for (Iterator<WeakReference<IScriptConsoleViewer2ForDocumentListener>> it = otherViewers.iterator(); it .hasNext();) { WeakReference<IScriptConsoleViewer2ForDocumentListener> ref = it.next(); IScriptConsoleViewer2ForDocumentListener v = ref.get(); if (v == null) { it.remove(); } else { v.revealEndOfDocument(); } } }
|
/**
* Shows the end of the document for the main viewer and all the related viewer for the same document.
*/
|
Shows the end of the document for the main viewer and all the related viewer for the same document
|
revealEndOfDocument
|
{
"repo_name": "akurtakov/Pydev",
"path": "plugins/org.python.pydev.shared_interactive_console/src/org/python/pydev/shared_interactive_console/console/ui/internal/ScriptConsoleDocumentListener.java",
"license": "epl-1.0",
"size": 41497
}
|
[
"java.lang.ref.WeakReference",
"java.util.Iterator"
] |
import java.lang.ref.WeakReference; import java.util.Iterator;
|
import java.lang.ref.*; import java.util.*;
|
[
"java.lang",
"java.util"
] |
java.lang; java.util;
| 2,214,781
|
public void setName(String name) {
this.name = name;
this.lastUpdated = Calendar.getInstance().getTime();
}
|
void function(String name) { this.name = name; this.lastUpdated = Calendar.getInstance().getTime(); }
|
/**
* Sets the node name.
* @param name the name to set
*/
|
Sets the node name
|
setName
|
{
"repo_name": "pravinw/openhab",
"path": "bundles/binding/org.openhab.binding.zwave/src/main/java/org/openhab/binding/zwave/internal/protocol/ZWaveNode.java",
"license": "epl-1.0",
"size": 19865
}
|
[
"java.util.Calendar"
] |
import java.util.Calendar;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,450,879
|
public List<DataSet> getDataSetsWithAcquisitionDateAfter(java.util.Date _minDate);
|
List<DataSet> function(java.util.Date _minDate);
|
/**
* Returns all DataSets where acquisitionDate is set to a value after '_minDate'.
*/
|
Returns all DataSets where acquisitionDate is set to a value after '_minDate'
|
getDataSetsWithAcquisitionDateAfter
|
{
"repo_name": "CBSti/csv2DB",
"path": "src/main/java/de/peterspan/csv2db/domain/dao/IDBOperationsBase.java",
"license": "gpl-3.0",
"size": 4316
}
|
[
"de.peterspan.csv2db.domain.entities.DataSet",
"java.util.List"
] |
import de.peterspan.csv2db.domain.entities.DataSet; import java.util.List;
|
import de.peterspan.csv2db.domain.entities.*; import java.util.*;
|
[
"de.peterspan.csv2db",
"java.util"
] |
de.peterspan.csv2db; java.util;
| 516,246
|
public static void runOfflineValAndCompaction(String diskStoreName,
File[] diskDirArr) {
Vector<String> diskStoreNames = TestConfig.tab().vecAt(DiskStorePrms.names);
int i = diskStoreNames.indexOf(diskStoreName);
Vector<String> maxOpLogSizes = TestConfig.tab().vecAt(DiskStorePrms.maxOplogSize, new HydraVector());
long opLogSize = 0;
if ((maxOpLogSizes.size() == 0) || (i < 0) || (i >= maxOpLogSizes.size())) {
opLogSize = DiskStoreFactory.DEFAULT_MAX_OPLOG_SIZE;
} else {
opLogSize = Long.valueOf(maxOpLogSizes.get(i));
}
try {
// run and save validator results before compaction
Object[] tmp = runOfflineValidate(diskStoreName, diskDirArr);
String beforeCompactionStr = (String)tmp[0];
Map<String, Map<String, Integer>> beforeCompactionMap = (Map<String, Map<String, Integer>>)tmp[1];
Log.getLogWriter().info("Before compaction, results map is " + beforeCompactionMap);
// run compaction
runOfflineCompaction(diskStoreName, diskDirArr, opLogSize);
// run and save validator results after compaction
tmp = runOfflineValidate(diskStoreName, diskDirArr);
String afterCompactionStr = (String)tmp[0];
Map<String, Map<String, Integer>> afterCompactionMap = (Map<String, Map<String, Integer>>)tmp[1];
Log.getLogWriter().info("After compaction, results map is " + afterCompactionMap);
// check that before results is the same as after results; this compares the entryCount and
// bucketCount for each region in the disk files
if (!beforeCompactionMap.equals(afterCompactionMap)) {
throw new TestException("Before compaction, validator results was " + beforeCompactionStr +
" and after compaction, validator results is " + afterCompactionStr +
", but expect validator results for entryCount/bucketCount to be equal");
}
} catch (Exception e) {
throw new TestException(TestHelper.getStackTrace(e));
}
}
|
static void function(String diskStoreName, File[] diskDirArr) { Vector<String> diskStoreNames = TestConfig.tab().vecAt(DiskStorePrms.names); int i = diskStoreNames.indexOf(diskStoreName); Vector<String> maxOpLogSizes = TestConfig.tab().vecAt(DiskStorePrms.maxOplogSize, new HydraVector()); long opLogSize = 0; if ((maxOpLogSizes.size() == 0) (i < 0) (i >= maxOpLogSizes.size())) { opLogSize = DiskStoreFactory.DEFAULT_MAX_OPLOG_SIZE; } else { opLogSize = Long.valueOf(maxOpLogSizes.get(i)); } try { Object[] tmp = runOfflineValidate(diskStoreName, diskDirArr); String beforeCompactionStr = (String)tmp[0]; Map<String, Map<String, Integer>> beforeCompactionMap = (Map<String, Map<String, Integer>>)tmp[1]; Log.getLogWriter().info(STR + beforeCompactionMap); runOfflineCompaction(diskStoreName, diskDirArr, opLogSize); tmp = runOfflineValidate(diskStoreName, diskDirArr); String afterCompactionStr = (String)tmp[0]; Map<String, Map<String, Integer>> afterCompactionMap = (Map<String, Map<String, Integer>>)tmp[1]; Log.getLogWriter().info(STR + afterCompactionMap); if (!beforeCompactionMap.equals(afterCompactionMap)) { throw new TestException(STR + beforeCompactionStr + STR + afterCompactionStr + STR); } } catch (Exception e) { throw new TestException(TestHelper.getStackTrace(e)); } }
|
/** Given a diskStoreName and an array of disk directories:
* 1) run the offline validator
* 2) run the offline compactor
* 3) run the offline validator again
* then check that the validator returns the same results before and
* after the compactor.
*
* @param diskStoreName The name of a diskStore.
* @param diskDirArr An array of Files, each element being a disk directory.
*/
|
Given a diskStoreName and an array of disk directories: 1) run the offline validator 2) run the offline compactor 3) run the offline validator again then check that the validator returns the same results before and after the compactor
|
runOfflineValAndCompaction
|
{
"repo_name": "SnappyDataInc/snappy-store",
"path": "tests/core/src/main/java/util/PersistenceUtil.java",
"license": "apache-2.0",
"size": 36051
}
|
[
"com.gemstone.gemfire.cache.DiskStoreFactory",
"java.io.File",
"java.util.Map",
"java.util.Vector"
] |
import com.gemstone.gemfire.cache.DiskStoreFactory; import java.io.File; import java.util.Map; import java.util.Vector;
|
import com.gemstone.gemfire.cache.*; import java.io.*; import java.util.*;
|
[
"com.gemstone.gemfire",
"java.io",
"java.util"
] |
com.gemstone.gemfire; java.io; java.util;
| 2,091,605
|
@Override
public void onSaveInstanceState (Bundle outState) {
super.onSaveInstanceState(outState);
outState.putParcelable(KEY_FILE, mFile);
}
|
void function (Bundle outState) { super.onSaveInstanceState(outState); outState.putParcelable(KEY_FILE, mFile); }
|
/**
* Saves the current listed folder.
*/
|
Saves the current listed folder
|
onSaveInstanceState
|
{
"repo_name": "varesa/owncloud_android",
"path": "src/com/owncloud/android/ui/fragment/OCFileListFragment.java",
"license": "gpl-2.0",
"size": 15101
}
|
[
"android.os.Bundle"
] |
import android.os.Bundle;
|
import android.os.*;
|
[
"android.os"
] |
android.os;
| 1,430,560
|
private static void printUsage() throws IOException {
System.out.println(TOOLS_RESOURCES.getString("WinstoneControl.Usage"));
}
|
static void function() throws IOException { System.out.println(TOOLS_RESOURCES.getString(STR)); }
|
/**
* Displays the usage message
*/
|
Displays the usage message
|
printUsage
|
{
"repo_name": "KingBowser/hatter-source-code",
"path": "tools/winstone/src/java/winstone/tools/WinstoneControl.java",
"license": "bsd-3-clause",
"size": 3644
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,891,294
|
boolean canBeMember(PerunSession sess, Vo vo, User user, String loa);
|
boolean canBeMember(PerunSession sess, Vo vo, User user, String loa);
|
/**
* Checks if the user can apply membership to the VO, it decides based on extendMembershipRules on the doNotAllowLoa key
* @param sess
* @param vo
* @param user
* @param loa
* @return true if user can be apply for membership to the VO
* @throws InternalErrorException
*/
|
Checks if the user can apply membership to the VO, it decides based on extendMembershipRules on the doNotAllowLoa key
|
canBeMember
|
{
"repo_name": "balcirakpeter/perun",
"path": "perun-core/src/main/java/cz/metacentrum/perun/core/bl/MembersManagerBl.java",
"license": "bsd-2-clause",
"size": 74930
}
|
[
"cz.metacentrum.perun.core.api.PerunSession",
"cz.metacentrum.perun.core.api.User",
"cz.metacentrum.perun.core.api.Vo"
] |
import cz.metacentrum.perun.core.api.PerunSession; import cz.metacentrum.perun.core.api.User; import cz.metacentrum.perun.core.api.Vo;
|
import cz.metacentrum.perun.core.api.*;
|
[
"cz.metacentrum.perun"
] |
cz.metacentrum.perun;
| 1,151,354
|
public void testWithoutCandidatesWithParametersInMap()
throws Exception
{
PersistenceManager pm = pmf.getPersistenceManager();
Transaction tx = pm.currentTransaction();
try
{
// insert a new element for table person
tx.begin();
Person p = new Person(1, "Nobody", "Nobody", "nobody@datanucleus.org");
pm.makePersistent(p);
tx.commit();
tx.begin();
String sqlText = "SELECT count(*) FROM PERSON WHERE EMAIL_ADDRESS = ?";
Query query = pm.newQuery("javax.jdo.query.SQL", sqlText);
Map params = new HashMap();
params.put(Integer.valueOf("1"), "nobody@datanucleus.org");
List results = (List) query.executeWithMap(params);
Iterator iter = results.iterator();
while (iter.hasNext())
{
Object obj = iter.next();
if (obj.getClass().isArray())
{
fail("SQL Query selecting count(*) has returned an Object[] yet should have been Object");
}
assertTrue("SQL Query selecting count(*) has returned an object of the wrong type : was " + obj.getClass().getName() + " but should have been Number or subclass", obj instanceof Number);
Number value = (Number)obj;
assertEquals("SQL Query selecting count(*) returned the wrong value : was " + value.longValue() + " but should have been 1", value.longValue(), 1);
}
tx.commit();
}
catch (Exception e)
{
e.printStackTrace();
LOG.error(e);
fail("Exception thrown while running SQL query with parameters : " + e.getMessage());
}
finally
{
if (tx.isActive())
{
tx.rollback();
}
pm.close();
clean(Person.class);
}
}
|
void function() throws Exception { PersistenceManager pm = pmf.getPersistenceManager(); Transaction tx = pm.currentTransaction(); try { tx.begin(); Person p = new Person(1, STR, STR, STR); pm.makePersistent(p); tx.commit(); tx.begin(); String sqlText = STR; Query query = pm.newQuery(STR, sqlText); Map params = new HashMap(); params.put(Integer.valueOf("1"), STR); List results = (List) query.executeWithMap(params); Iterator iter = results.iterator(); while (iter.hasNext()) { Object obj = iter.next(); if (obj.getClass().isArray()) { fail(STR); } assertTrue(STR + obj.getClass().getName() + STR, obj instanceof Number); Number value = (Number)obj; assertEquals(STR + value.longValue() + STR, value.longValue(), 1); } tx.commit(); } catch (Exception e) { e.printStackTrace(); LOG.error(e); fail(STR + e.getMessage()); } finally { if (tx.isActive()) { tx.rollback(); } pm.close(); clean(Person.class); } }
|
/**
* Basic test of SQL without a candidate class but with parameters specified in a Map.
*/
|
Basic test of SQL without a candidate class but with parameters specified in a Map
|
testWithoutCandidatesWithParametersInMap
|
{
"repo_name": "datanucleus/tests",
"path": "jdo/rdbms/src/test/org/datanucleus/tests/datastore/SQLQueryTest.java",
"license": "apache-2.0",
"size": 61369
}
|
[
"java.util.HashMap",
"java.util.Iterator",
"java.util.List",
"java.util.Map",
"javax.jdo.PersistenceManager",
"javax.jdo.Query",
"javax.jdo.Transaction",
"org.datanucleus.samples.models.company.Person"
] |
import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.jdo.PersistenceManager; import javax.jdo.Query; import javax.jdo.Transaction; import org.datanucleus.samples.models.company.Person;
|
import java.util.*; import javax.jdo.*; import org.datanucleus.samples.models.company.*;
|
[
"java.util",
"javax.jdo",
"org.datanucleus.samples"
] |
java.util; javax.jdo; org.datanucleus.samples;
| 689,643
|
@Test
public void test_getDeclaredMethodNames_ContainExtendsParentMethods() throws Exception {
assertArrayEquals(new String[]{"hello",}, Wrapper.getWrapper(Parent1.class).getMethodNames());
assertArrayEquals(new String[]{}, Wrapper.getWrapper(Son.class).getDeclaredMethodNames());
}
|
void function() throws Exception { assertArrayEquals(new String[]{"hello",}, Wrapper.getWrapper(Parent1.class).getMethodNames()); assertArrayEquals(new String[]{}, Wrapper.getWrapper(Son.class).getDeclaredMethodNames()); }
|
/**
* see http://code.alibabatech.com/jira/browse/DUBBO-571
*/
|
see HREF
|
test_getDeclaredMethodNames_ContainExtendsParentMethods
|
{
"repo_name": "dadarom/dubbo",
"path": "dubbo-common/src/test/java/com/alibaba/dubbo/common/bytecode/WrapperTest.java",
"license": "apache-2.0",
"size": 4260
}
|
[
"org.junit.Assert"
] |
import org.junit.Assert;
|
import org.junit.*;
|
[
"org.junit"
] |
org.junit;
| 1,128,729
|
public static synchronized InetAddress getLocalHost() throws IOException {
if (locHost == null)
// Cache it.
resetLocalHost();
return locHost;
}
|
static synchronized InetAddress function() throws IOException { if (locHost == null) resetLocalHost(); return locHost; }
|
/**
* Gets local host. Implementation will first attempt to get a non-loopback
* address. If that fails, then loopback address will be returned.
* <p>
* Note that this method is synchronized to make sure that local host
* initialization happens only once.
*
* @return Address representing local host.
* @throws IOException If attempt to get local host failed.
*/
|
Gets local host. Implementation will first attempt to get a non-loopback address. If that fails, then loopback address will be returned. Note that this method is synchronized to make sure that local host initialization happens only once
|
getLocalHost
|
{
"repo_name": "ascherbakoff/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/util/IgniteUtils.java",
"license": "apache-2.0",
"size": 385578
}
|
[
"java.io.IOException",
"java.net.InetAddress"
] |
import java.io.IOException; import java.net.InetAddress;
|
import java.io.*; import java.net.*;
|
[
"java.io",
"java.net"
] |
java.io; java.net;
| 2,273,512
|
@Test
public void testConfigurationClusterSpecification() throws Exception {
final Configuration configuration = new Configuration();
final int jobManagerMemory = 1337;
configuration.set(JobManagerOptions.TOTAL_PROCESS_MEMORY, MemorySize.ofMebiBytes(jobManagerMemory));
final int taskManagerMemory = 7331;
configuration.set(TaskManagerOptions.TOTAL_PROCESS_MEMORY, MemorySize.ofMebiBytes(taskManagerMemory));
final int slotsPerTaskManager = 42;
configuration.setInteger(TaskManagerOptions.NUM_TASK_SLOTS, slotsPerTaskManager);
final String[] args = {"-e", KubernetesSessionClusterExecutor.NAME};
final KubernetesSessionCli cli = new KubernetesSessionCli(
configuration,
tmp.getRoot().getAbsolutePath());
Configuration executorConfig = cli.getEffectiveConfiguration(args);
ClusterClientFactory<String> clientFactory = getClusterClientFactory(executorConfig);
ClusterSpecification clusterSpecification = clientFactory.getClusterSpecification(executorConfig);
assertThat(clusterSpecification.getMasterMemoryMB(), is(jobManagerMemory));
assertThat(clusterSpecification.getTaskManagerMemoryMB(), is(taskManagerMemory));
assertThat(clusterSpecification.getSlotsPerTaskManager(), is(slotsPerTaskManager));
}
|
void function() throws Exception { final Configuration configuration = new Configuration(); final int jobManagerMemory = 1337; configuration.set(JobManagerOptions.TOTAL_PROCESS_MEMORY, MemorySize.ofMebiBytes(jobManagerMemory)); final int taskManagerMemory = 7331; configuration.set(TaskManagerOptions.TOTAL_PROCESS_MEMORY, MemorySize.ofMebiBytes(taskManagerMemory)); final int slotsPerTaskManager = 42; configuration.setInteger(TaskManagerOptions.NUM_TASK_SLOTS, slotsPerTaskManager); final String[] args = {"-e", KubernetesSessionClusterExecutor.NAME}; final KubernetesSessionCli cli = new KubernetesSessionCli( configuration, tmp.getRoot().getAbsolutePath()); Configuration executorConfig = cli.getEffectiveConfiguration(args); ClusterClientFactory<String> clientFactory = getClusterClientFactory(executorConfig); ClusterSpecification clusterSpecification = clientFactory.getClusterSpecification(executorConfig); assertThat(clusterSpecification.getMasterMemoryMB(), is(jobManagerMemory)); assertThat(clusterSpecification.getTaskManagerMemoryMB(), is(taskManagerMemory)); assertThat(clusterSpecification.getSlotsPerTaskManager(), is(slotsPerTaskManager)); }
|
/**
* Tests that the configuration settings are used to create the
* {@link ClusterSpecification}.
*/
|
Tests that the configuration settings are used to create the <code>ClusterSpecification</code>
|
testConfigurationClusterSpecification
|
{
"repo_name": "tzulitai/flink",
"path": "flink-kubernetes/src/test/java/org/apache/flink/kubernetes/cli/KubernetesSessionCliTest.java",
"license": "apache-2.0",
"size": 12366
}
|
[
"org.apache.flink.client.deployment.ClusterClientFactory",
"org.apache.flink.client.deployment.ClusterSpecification",
"org.apache.flink.configuration.Configuration",
"org.apache.flink.configuration.JobManagerOptions",
"org.apache.flink.configuration.MemorySize",
"org.apache.flink.configuration.TaskManagerOptions",
"org.apache.flink.kubernetes.executors.KubernetesSessionClusterExecutor",
"org.hamcrest.Matchers",
"org.junit.Assert"
] |
import org.apache.flink.client.deployment.ClusterClientFactory; import org.apache.flink.client.deployment.ClusterSpecification; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.JobManagerOptions; import org.apache.flink.configuration.MemorySize; import org.apache.flink.configuration.TaskManagerOptions; import org.apache.flink.kubernetes.executors.KubernetesSessionClusterExecutor; import org.hamcrest.Matchers; import org.junit.Assert;
|
import org.apache.flink.client.deployment.*; import org.apache.flink.configuration.*; import org.apache.flink.kubernetes.executors.*; import org.hamcrest.*; import org.junit.*;
|
[
"org.apache.flink",
"org.hamcrest",
"org.junit"
] |
org.apache.flink; org.hamcrest; org.junit;
| 13,326
|
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<CheckNameAvailabilityResultInner> checkNameAvailabilityAsync(
CheckNameAvailabilityParameter parameters) {
return checkNameAvailabilityWithResponseAsync(parameters)
.flatMap(
(Response<CheckNameAvailabilityResultInner> res) -> {
if (res.getValue() != null) {
return Mono.just(res.getValue());
} else {
return Mono.empty();
}
});
}
|
@ServiceMethod(returns = ReturnType.SINGLE) Mono<CheckNameAvailabilityResultInner> function( CheckNameAvailabilityParameter parameters) { return checkNameAvailabilityWithResponseAsync(parameters) .flatMap( (Response<CheckNameAvailabilityResultInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); }
|
/**
* Check the give Namespace name availability.
*
* @param parameters Parameters to check availability of the given Namespace name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the Result of the CheckNameAvailability operation.
*/
|
Check the give Namespace name availability
|
checkNameAvailabilityAsync
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanagerhybrid/azure-resourcemanager-eventhubs/src/main/java/com/azure/resourcemanager/eventhubs/implementation/NamespacesClientImpl.java",
"license": "mit",
"size": 172558
}
|
[
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.Response",
"com.azure.resourcemanager.eventhubs.fluent.models.CheckNameAvailabilityResultInner",
"com.azure.resourcemanager.eventhubs.models.CheckNameAvailabilityParameter"
] |
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.Response; import com.azure.resourcemanager.eventhubs.fluent.models.CheckNameAvailabilityResultInner; import com.azure.resourcemanager.eventhubs.models.CheckNameAvailabilityParameter;
|
import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.resourcemanager.eventhubs.fluent.models.*; import com.azure.resourcemanager.eventhubs.models.*;
|
[
"com.azure.core",
"com.azure.resourcemanager"
] |
com.azure.core; com.azure.resourcemanager;
| 1,032,300
|
public void replaceViews(View oldView, View newView) {
if (oldView == null || newView == null) {
return;
}
ViewGroup parent = (ViewGroup) oldView.getParent();
int index = parent.indexOfChild(oldView);
parent.removeView(oldView);
parent.addView(newView, index);
}
|
void function(View oldView, View newView) { if (oldView == null newView == null) { return; } ViewGroup parent = (ViewGroup) oldView.getParent(); int index = parent.indexOfChild(oldView); parent.removeView(oldView); parent.addView(newView, index); }
|
/**
* Replace a view with another
*/
|
Replace a view with another
|
replaceViews
|
{
"repo_name": "0359xiaodong/CheckListView",
"path": "checklistview/src/main/java/it/feio/android/checklistview/ChecklistManager.java",
"license": "apache-2.0",
"size": 11584
}
|
[
"android.view.View",
"android.view.ViewGroup"
] |
import android.view.View; import android.view.ViewGroup;
|
import android.view.*;
|
[
"android.view"
] |
android.view;
| 284,281
|
public void setDescription(String description) {
if (!ObjectUtils.equalObjects(this.description, description)) {
this.description = description;
fireProductNodeChanged(PROPERTY_NAME_DESCRIPTION);
setModified(true);
}
}
|
void function(String description) { if (!ObjectUtils.equalObjects(this.description, description)) { this.description = description; fireProductNodeChanged(PROPERTY_NAME_DESCRIPTION); setModified(true); } }
|
/**
* Sets a short textual description for this products node.
*
* @param description a description, can be <code>null</code>
*/
|
Sets a short textual description for this products node
|
setDescription
|
{
"repo_name": "lveci/nest",
"path": "beam/beam-core/src/main/java/org/esa/beam/framework/datamodel/ProductNode.java",
"license": "gpl-3.0",
"size": 14491
}
|
[
"org.esa.beam.util.ObjectUtils"
] |
import org.esa.beam.util.ObjectUtils;
|
import org.esa.beam.util.*;
|
[
"org.esa.beam"
] |
org.esa.beam;
| 816,288
|
public void openTabList() {
seleniumWebDriverHelper.waitAndClick(By.id(TAB_LIST_BUTTON));
}
|
void function() { seleniumWebDriverHelper.waitAndClick(By.id(TAB_LIST_BUTTON)); }
|
/**
* Opens list of the tabs.
*
* <p>Note! This is possible if opened tabs don't fit in the tab bar.
*/
|
Opens list of the tabs. Note! This is possible if opened tabs don't fit in the tab bar
|
openTabList
|
{
"repo_name": "davidfestal/che",
"path": "selenium/che-selenium-test/src/main/java/org/eclipse/che/selenium/pageobject/CodenvyEditor.java",
"license": "epl-1.0",
"size": 90896
}
|
[
"org.openqa.selenium.By"
] |
import org.openqa.selenium.By;
|
import org.openqa.selenium.*;
|
[
"org.openqa.selenium"
] |
org.openqa.selenium;
| 162,938
|
public TimeSpec parse() throws RrdException {
long now = Util.getTime();
int hr = 0;
spec.localtime(now);
token = scanner.nextToken();
switch (token.id) {
case TimeToken.PLUS:
case TimeToken.MINUS:
break;
case TimeToken.START:
spec.type = TimeSpec.TYPE_START;
case TimeToken.END:
if (spec.type != TimeSpec.TYPE_START) {
spec.type = TimeSpec.TYPE_END;
}
case TimeToken.EPOCH:
case TimeToken.NOW:
int time_reference = token.id;
if (token.id != TimeToken.NOW) {
spec.year = spec.month = spec.day = spec.hour = spec.min = spec.sec = 0;
}
token = scanner.nextToken();
if (token.id == TimeToken.PLUS || token.id == TimeToken.MINUS) {
break;
}
if (time_reference == TimeToken.START || time_reference == TimeToken.END) {
throw new RrdException("Words 'start' or 'end' MUST be followed by +|- offset");
}
else if (token.id != TimeToken.EOF) {
throw new RrdException("If 'now' or 'epoch' is followed by a token it must be +|- offset");
}
break;
case TimeToken.NUMBER:
timeOfDay();
//Keep going; there might be a date after the time of day, which day() will pick up
case TimeToken.JAN:
case TimeToken.FEB:
case TimeToken.MAR:
case TimeToken.APR:
case TimeToken.MAY:
case TimeToken.JUN:
case TimeToken.JUL:
case TimeToken.AUG:
case TimeToken.SEP:
case TimeToken.OCT:
case TimeToken.NOV:
case TimeToken.DEC:
case TimeToken.TODAY:
case TimeToken.YESTERDAY:
case TimeToken.TOMORROW:
day();
if (token.id != TimeToken.NUMBER) {
break;
}
//Allows (but does not require) the time to be specified after the day. This extends the rrdfetch specifiation
timeOfDay();
break;
case TimeToken.TEATIME:
hr += 4;
case TimeToken.NOON:
hr += 12;
case TimeToken.MIDNIGHT:
spec.hour = hr;
spec.min = 0;
spec.sec = 0;
token = scanner.nextToken();
day();
break;
default:
throw new RrdException("Unparsable time: " + token.value);
}
if (token.id == TimeToken.PLUS || token.id == TimeToken.MINUS) {
scanner.setContext(false);
while (token.id == TimeToken.PLUS || token.id == TimeToken.MINUS ||
token.id == TimeToken.NUMBER) {
if (token.id == TimeToken.NUMBER) {
plusMinus(PREVIOUS_OP);
}
else {
plusMinus(token.id);
}
token = scanner.nextToken();
}
}
if (token.id != TimeToken.EOF) {
throw new RrdException("Unparsable trailing text: " + token.value);
}
return spec;
}
|
TimeSpec function() throws RrdException { long now = Util.getTime(); int hr = 0; spec.localtime(now); token = scanner.nextToken(); switch (token.id) { case TimeToken.PLUS: case TimeToken.MINUS: break; case TimeToken.START: spec.type = TimeSpec.TYPE_START; case TimeToken.END: if (spec.type != TimeSpec.TYPE_START) { spec.type = TimeSpec.TYPE_END; } case TimeToken.EPOCH: case TimeToken.NOW: int time_reference = token.id; if (token.id != TimeToken.NOW) { spec.year = spec.month = spec.day = spec.hour = spec.min = spec.sec = 0; } token = scanner.nextToken(); if (token.id == TimeToken.PLUS token.id == TimeToken.MINUS) { break; } if (time_reference == TimeToken.START time_reference == TimeToken.END) { throw new RrdException(STR); } else if (token.id != TimeToken.EOF) { throw new RrdException(STR); } break; case TimeToken.NUMBER: timeOfDay(); case TimeToken.JAN: case TimeToken.FEB: case TimeToken.MAR: case TimeToken.APR: case TimeToken.MAY: case TimeToken.JUN: case TimeToken.JUL: case TimeToken.AUG: case TimeToken.SEP: case TimeToken.OCT: case TimeToken.NOV: case TimeToken.DEC: case TimeToken.TODAY: case TimeToken.YESTERDAY: case TimeToken.TOMORROW: day(); if (token.id != TimeToken.NUMBER) { break; } timeOfDay(); break; case TimeToken.TEATIME: hr += 4; case TimeToken.NOON: hr += 12; case TimeToken.MIDNIGHT: spec.hour = hr; spec.min = 0; spec.sec = 0; token = scanner.nextToken(); day(); break; default: throw new RrdException(STR + token.value); } if (token.id == TimeToken.PLUS token.id == TimeToken.MINUS) { scanner.setContext(false); while (token.id == TimeToken.PLUS token.id == TimeToken.MINUS token.id == TimeToken.NUMBER) { if (token.id == TimeToken.NUMBER) { plusMinus(PREVIOUS_OP); } else { plusMinus(token.id); } token = scanner.nextToken(); } } if (token.id != TimeToken.EOF) { throw new RrdException(STR + token.value); } return spec; }
|
/**
* Parses the input string specified in the constructor.
*
* @return Object representing parsed date/time.
* @throws RrdException Thrown if the date string cannot be parsed.
*/
|
Parses the input string specified in the constructor
|
parse
|
{
"repo_name": "roskens/jrobin",
"path": "src/main/java/org/jrobin/core/timespec/TimeParser.java",
"license": "lgpl-2.1",
"size": 13499
}
|
[
"org.jrobin.core.RrdException",
"org.jrobin.core.Util"
] |
import org.jrobin.core.RrdException; import org.jrobin.core.Util;
|
import org.jrobin.core.*;
|
[
"org.jrobin.core"
] |
org.jrobin.core;
| 56,857
|
protected void handleArtifacts( WarPackagingContext context )
throws MojoExecutionException
{
ArtifactsPackagingTask task =
new ArtifactsPackagingTask( context.getProject().getArtifacts(), currentProjectOverlay );
task.performPackaging( context );
}
|
void function( WarPackagingContext context ) throws MojoExecutionException { ArtifactsPackagingTask task = new ArtifactsPackagingTask( context.getProject().getArtifacts(), currentProjectOverlay ); task.performPackaging( context ); }
|
/**
* Handles the webapp artifacts.
*
* @param context the packaging context
* @throws MojoExecutionException if the artifacts could not be packaged
*/
|
Handles the webapp artifacts
|
handleArtifacts
|
{
"repo_name": "lennartj/maven-plugins",
"path": "maven-war-plugin/src/main/java/org/apache/maven/plugins/war/packaging/WarProjectPackagingTask.java",
"license": "apache-2.0",
"size": 14628
}
|
[
"org.apache.maven.plugin.MojoExecutionException"
] |
import org.apache.maven.plugin.MojoExecutionException;
|
import org.apache.maven.plugin.*;
|
[
"org.apache.maven"
] |
org.apache.maven;
| 1,815,718
|
@InterfaceAudience.Public
public void addChangeListener(ChangeListener listener) {
changeListeners.add(listener);
}
|
@InterfaceAudience.Public void function(ChangeListener listener) { changeListeners.add(listener); }
|
/**
* Adds a Database change delegate that will be called whenever a Document
* within the Database changes.
*/
|
Adds a Database change delegate that will be called whenever a Document within the Database changes
|
addChangeListener
|
{
"repo_name": "couchbase/couchbase-lite-java-core",
"path": "src/main/java/com/couchbase/lite/Database.java",
"license": "apache-2.0",
"size": 114145
}
|
[
"com.couchbase.lite.internal.InterfaceAudience"
] |
import com.couchbase.lite.internal.InterfaceAudience;
|
import com.couchbase.lite.internal.*;
|
[
"com.couchbase.lite"
] |
com.couchbase.lite;
| 327,060
|
public @Test void testResizingTooSmallStage() {
s.setWidth(60);
s.setHeight(70);
s.setMinWidth(150);
s.setMinHeight(140);
pulse();
assertEquals(150.0, peer.width, 0.0001);
assertEquals(140.0, peer.height, 0.0001);
}
|
@Test void function() { s.setWidth(60); s.setHeight(70); s.setMinWidth(150); s.setMinHeight(140); pulse(); assertEquals(150.0, peer.width, 0.0001); assertEquals(140.0, peer.height, 0.0001); }
|
/**
* Simple test which checks whether changing the minimum w/h of the Stage
* resize the window if necessary
*/
|
Simple test which checks whether changing the minimum w/h of the Stage resize the window if necessary
|
testResizingTooSmallStage
|
{
"repo_name": "teamfx/openjfx-10-dev-rt",
"path": "modules/javafx.graphics/src/test/java/test/javafx/stage/StageTest.java",
"license": "gpl-2.0",
"size": 15109
}
|
[
"junit.framework.Assert",
"org.junit.Test"
] |
import junit.framework.Assert; import org.junit.Test;
|
import junit.framework.*; import org.junit.*;
|
[
"junit.framework",
"org.junit"
] |
junit.framework; org.junit;
| 2,042,365
|
public void flattenModule(LogicalPlan parentDAG, Configuration conf)
{
if (flattened) {
return;
}
module.populateDAG(dag, conf);
for (ModuleMeta subModuleMeta : dag.getAllModules()) {
subModuleMeta.setParent(this);
subModuleMeta.flattenModule(dag, conf);
}
dag.applyStreamLinks();
parentDAG.addDAGToCurrentDAG(this);
flattened = true;
}
|
void function(LogicalPlan parentDAG, Configuration conf) { if (flattened) { return; } module.populateDAG(dag, conf); for (ModuleMeta subModuleMeta : dag.getAllModules()) { subModuleMeta.setParent(this); subModuleMeta.flattenModule(dag, conf); } dag.applyStreamLinks(); parentDAG.addDAGToCurrentDAG(this); flattened = true; }
|
/**
* Expand the module and add its operator to the parentDAG. After this method finishes the module is expanded fully
* with all its submodules also expanded. The parentDAG contains the operator added by all the modules.
*
* @param parentDAG parent dag to populate with operators from this and inner modules.
* @param conf configuration object.
*/
|
Expand the module and add its operator to the parentDAG. After this method finishes the module is expanded fully with all its submodules also expanded. The parentDAG contains the operator added by all the modules
|
flattenModule
|
{
"repo_name": "tweise/incubator-apex-core",
"path": "engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java",
"license": "apache-2.0",
"size": 87362
}
|
[
"org.apache.hadoop.conf.Configuration"
] |
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.*;
|
[
"org.apache.hadoop"
] |
org.apache.hadoop;
| 924,786
|
private void waitUntilCorruptFileCount(DistributedFileSystem dfs,
int corruptFiles)
throws IOException {
int initialCorruptFiles = RaidDFSUtil.getCorruptFiles(dfs).length;
long waitStart = System.currentTimeMillis();
while (RaidDFSUtil.getCorruptFiles(dfs).length != corruptFiles) {
try {
Thread.sleep(1000);
} catch (InterruptedException ignore) {
}
if (System.currentTimeMillis() > waitStart + 20000L) {
break;
}
}
long waited = System.currentTimeMillis() - waitStart;
int corruptFilesFound = RaidDFSUtil.getCorruptFiles(dfs).length;
if (corruptFilesFound != corruptFiles) {
throw new IOException("expected " + corruptFiles +
" corrupt files but got " +
corruptFilesFound);
}
}
|
void function(DistributedFileSystem dfs, int corruptFiles) throws IOException { int initialCorruptFiles = RaidDFSUtil.getCorruptFiles(dfs).length; long waitStart = System.currentTimeMillis(); while (RaidDFSUtil.getCorruptFiles(dfs).length != corruptFiles) { try { Thread.sleep(1000); } catch (InterruptedException ignore) { } if (System.currentTimeMillis() > waitStart + 20000L) { break; } } long waited = System.currentTimeMillis() - waitStart; int corruptFilesFound = RaidDFSUtil.getCorruptFiles(dfs).length; if (corruptFilesFound != corruptFiles) { throw new IOException(STR + corruptFiles + STR + corruptFilesFound); } }
|
/**
* sleeps for up to 20s until the number of corrupt files
* in the file system is equal to the number specified
*/
|
sleeps for up to 20s until the number of corrupt files in the file system is equal to the number specified
|
waitUntilCorruptFileCount
|
{
"repo_name": "steveloughran/hadoop-mapreduce",
"path": "src/contrib/raid/src/test/org/apache/hadoop/raid/TestRaidShellFsck.java",
"license": "apache-2.0",
"size": 25716
}
|
[
"java.io.IOException",
"org.apache.hadoop.hdfs.DistributedFileSystem",
"org.apache.hadoop.hdfs.RaidDFSUtil"
] |
import java.io.IOException; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.RaidDFSUtil;
|
import java.io.*; import org.apache.hadoop.hdfs.*;
|
[
"java.io",
"org.apache.hadoop"
] |
java.io; org.apache.hadoop;
| 1,304,374
|
public PrincipalName getPrincipalName()
{
return principalName;
}
|
PrincipalName function() { return principalName; }
|
/**
* Returns the principal name.
*
* @return The principal name.
*/
|
Returns the principal name
|
getPrincipalName
|
{
"repo_name": "drankye/directory-server",
"path": "protocol-changepw/src/main/java/org/apache/directory/server/changepw/value/ChangePasswordData.java",
"license": "apache-2.0",
"size": 2030
}
|
[
"org.apache.directory.shared.kerberos.components.PrincipalName"
] |
import org.apache.directory.shared.kerberos.components.PrincipalName;
|
import org.apache.directory.shared.kerberos.components.*;
|
[
"org.apache.directory"
] |
org.apache.directory;
| 380,920
|
public String getPublicObjectInstanciatorURI(Class<? extends UIComponent> windowToOpen) {
return getPublicObjectInstanciatorURI(windowToOpen, -1);
}
|
String function(Class<? extends UIComponent> windowToOpen) { return getPublicObjectInstanciatorURI(windowToOpen, -1); }
|
/**
* Returns the prefix for the 'window opener' URI that is meant for windows to be open for all users (even not logged on)<br>
* For the new platform this is '/window/E0410143-CF32-42B1-A97B-E712AA702962'
* but for older versions this is '/servlet/WindowOpener?idegaweb_frame_class=1234'
*/
|
Returns the prefix for the 'window opener' URI that is meant for windows to be open for all users (even not logged on) For the new platform this is '/window/E0410143-CF32-42B1-A97B-E712AA702962' but for older versions this is '/servlet/WindowOpener?idegaweb_frame_class=1234'
|
getPublicObjectInstanciatorURI
|
{
"repo_name": "grimurjonsson/com.idega.core",
"path": "src/java/com/idega/idegaweb/IWMainApplication.java",
"license": "gpl-3.0",
"size": 86797
}
|
[
"javax.faces.component.UIComponent"
] |
import javax.faces.component.UIComponent;
|
import javax.faces.component.*;
|
[
"javax.faces"
] |
javax.faces;
| 2,733,668
|
@Test
public void updateArtistTest() throws IOException {
Cookie cookieSessionId = login(username, password);
ArtistProfileDetailDTO artist = new ArtistProfileDetailDTO(oraculo.get(0));
ArtistDTO artistChanged = factory.manufacturePojo(ArtistDTO.class);
artist.setName(artistChanged.getName());
Response response = target
.path(artist.getId().toString())
.request().cookie(cookieSessionId)
.put(Entity.entity(artist, MediaType.APPLICATION_JSON));
ArtistDTO artistTest = (ArtistDTO) response.readEntity(ArtistDTO.class);
Assert.assertEquals(Ok, response.getStatus());
Assert.assertEquals(artist.getName(), artistTest.getName());
}
|
void function() throws IOException { Cookie cookieSessionId = login(username, password); ArtistProfileDetailDTO artist = new ArtistProfileDetailDTO(oraculo.get(0)); ArtistDTO artistChanged = factory.manufacturePojo(ArtistDTO.class); artist.setName(artistChanged.getName()); Response response = target .path(artist.getId().toString()) .request().cookie(cookieSessionId) .put(Entity.entity(artist, MediaType.APPLICATION_JSON)); ArtistDTO artistTest = (ArtistDTO) response.readEntity(ArtistDTO.class); Assert.assertEquals(Ok, response.getStatus()); Assert.assertEquals(artist.getName(), artistTest.getName()); }
|
/**
* Prueba para actualizar un Artist
*
* @generated
*/
|
Prueba para actualizar un Artist
|
updateArtistTest
|
{
"repo_name": "Uniandes-MISO4203/artwork-201620-2",
"path": "artwork-api/src/test/java/co/edu/uniandes/csw/artwork/tests/rest/ArtistTest.java",
"license": "mit",
"size": 9737
}
|
[
"co.edu.uniandes.csw.artwork.dtos.detail.ArtistProfileDetailDTO",
"co.edu.uniandes.csw.artwork.dtos.minimum.ArtistDTO",
"java.io.IOException",
"javax.ws.rs.client.Entity",
"javax.ws.rs.core.Cookie",
"javax.ws.rs.core.MediaType",
"javax.ws.rs.core.Response",
"org.junit.Assert"
] |
import co.edu.uniandes.csw.artwork.dtos.detail.ArtistProfileDetailDTO; import co.edu.uniandes.csw.artwork.dtos.minimum.ArtistDTO; import java.io.IOException; import javax.ws.rs.client.Entity; import javax.ws.rs.core.Cookie; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.junit.Assert;
|
import co.edu.uniandes.csw.artwork.dtos.detail.*; import co.edu.uniandes.csw.artwork.dtos.minimum.*; import java.io.*; import javax.ws.rs.client.*; import javax.ws.rs.core.*; import org.junit.*;
|
[
"co.edu.uniandes",
"java.io",
"javax.ws",
"org.junit"
] |
co.edu.uniandes; java.io; javax.ws; org.junit;
| 1,608,415
|
protected NestedSet<Artifact> fullInputsForLink(
RuleContext ruleContext, NestedSet<Artifact> link) {
return NestedSetBuilder.<Artifact>stableOrder()
.addTransitive(link)
.addTransitive(AnalysisUtils.getMiddlemanFor(ruleContext, ":libc_top", Mode.TARGET))
.add(ruleContext.getPrerequisiteArtifact("$interface_library_builder", Mode.HOST))
.add(ruleContext.getPrerequisiteArtifact("$link_dynamic_library_tool", Mode.HOST))
.build();
}
|
NestedSet<Artifact> function( RuleContext ruleContext, NestedSet<Artifact> link) { return NestedSetBuilder.<Artifact>stableOrder() .addTransitive(link) .addTransitive(AnalysisUtils.getMiddlemanFor(ruleContext, STR, Mode.TARGET)) .add(ruleContext.getPrerequisiteArtifact(STR, Mode.HOST)) .add(ruleContext.getPrerequisiteArtifact(STR, Mode.HOST)) .build(); }
|
/**
* Returns the crosstool-derived link action inputs for a given rule. Adds the given set of
* artifacts as extra inputs.
*/
|
Returns the crosstool-derived link action inputs for a given rule. Adds the given set of artifacts as extra inputs
|
fullInputsForLink
|
{
"repo_name": "spxtr/bazel",
"path": "src/main/java/com/google/devtools/build/lib/rules/cpp/CcToolchain.java",
"license": "apache-2.0",
"size": 29820
}
|
[
"com.google.devtools.build.lib.actions.Artifact",
"com.google.devtools.build.lib.analysis.AnalysisUtils",
"com.google.devtools.build.lib.analysis.RuleContext",
"com.google.devtools.build.lib.analysis.configuredtargets.RuleConfiguredTarget",
"com.google.devtools.build.lib.collect.nestedset.NestedSet",
"com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder"
] |
import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.analysis.AnalysisUtils; import com.google.devtools.build.lib.analysis.RuleContext; import com.google.devtools.build.lib.analysis.configuredtargets.RuleConfiguredTarget; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
|
import com.google.devtools.build.lib.actions.*; import com.google.devtools.build.lib.analysis.*; import com.google.devtools.build.lib.analysis.configuredtargets.*; import com.google.devtools.build.lib.collect.nestedset.*;
|
[
"com.google.devtools"
] |
com.google.devtools;
| 1,330,992
|
ArrayList<WikipediaPage> list;
DocumentBuilderFactory dbf;
DocumentBuilder db;
Document doc;
NodeList nl;
try {
// get the NEW document builder factory
dbf = DocumentBuilderFactory.newInstance();
// use factory to get instance of document builder
db = dbf.newDocumentBuilder();
// use builder to get instance of document
doc = db.parse(xmlFN);
// get a nodelist of elements (wiki pages)
nl = doc.getElementsByTagName("page");
// get the list for storing articles
list = new ArrayList<>();
// convert NodeList to ArrayList<WikiArticle>
for (int i = 0; i < nl.getLength(); i++) {
WikipediaPage newWikiPage = new WikipediaPage(nl.item(i));
list.add(newWikiPage);
}
System.out.println("Success Parsing XML!");
return list;
} catch (ParserConfigurationException
| SAXException
| IOException ex) {
System.err.println("ERROR: " + ex.getMessage());
System.err.println("!! Failed XML Parsing !!");
}
return null;
}
|
ArrayList<WikipediaPage> list; DocumentBuilderFactory dbf; DocumentBuilder db; Document doc; NodeList nl; try { dbf = DocumentBuilderFactory.newInstance(); db = dbf.newDocumentBuilder(); doc = db.parse(xmlFN); nl = doc.getElementsByTagName("page"); list = new ArrayList<>(); for (int i = 0; i < nl.getLength(); i++) { WikipediaPage newWikiPage = new WikipediaPage(nl.item(i)); list.add(newWikiPage); } System.out.println(STR); return list; } catch (ParserConfigurationException SAXException IOException ex) { System.err.println(STR + ex.getMessage()); System.err.println(STR); } return null; }
|
/**
* Takes an XML file name as the only argument, specifically one that was
* downloaded from https://en.wikipedia.org/wiki/Special:Export , which then
* uses the DOM to parse the file and create WikiArticle objects that are
* added to the list and returned to the caller.
*
* @param xmlFN: the XML path/file name
* @return list: a list of WikiArticles
*/
|
Takes an XML file name as the only argument, specifically one that was downloaded from HREF , which then uses the DOM to parse the file and create WikiArticle objects that are added to the list and returned to the caller
|
importWikiXMLFile
|
{
"repo_name": "whatfield-ii/WikipediaSpecialExportXMLParser",
"path": "src/wikiparser/WikiParser.java",
"license": "mit",
"size": 11553
}
|
[
"java.io.IOException",
"java.util.ArrayList",
"javax.xml.parsers.DocumentBuilder",
"javax.xml.parsers.DocumentBuilderFactory",
"javax.xml.parsers.ParserConfigurationException",
"org.w3c.dom.Document",
"org.w3c.dom.NodeList",
"org.xml.sax.SAXException"
] |
import java.io.IOException; import java.util.ArrayList; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.w3c.dom.Document; import org.w3c.dom.NodeList; import org.xml.sax.SAXException;
|
import java.io.*; import java.util.*; import javax.xml.parsers.*; import org.w3c.dom.*; import org.xml.sax.*;
|
[
"java.io",
"java.util",
"javax.xml",
"org.w3c.dom",
"org.xml.sax"
] |
java.io; java.util; javax.xml; org.w3c.dom; org.xml.sax;
| 691,198
|
public int getInt(String key) {
verifyIsNull();
Object o = get(key);
if (o != null) {
return o instanceof Number ? ((Number) o).intValue() : (int) getDouble(key);
}
throw new JSONException("JSONObject[" + JSONUtils.quote(key) + "] is not a number.");
}
|
int function(String key) { verifyIsNull(); Object o = get(key); if (o != null) { return o instanceof Number ? ((Number) o).intValue() : (int) getDouble(key); } throw new JSONException(STR + JSONUtils.quote(key) + STR); }
|
/**
* Get the int value associated with a key. If the number value is too large
* for an int, it will be clipped.
*
* @param key A key string.
*
* @return The integer value.
*
* @throws JSONException if the key is not found or if the value cannot be
* converted to an integer.
*/
|
Get the int value associated with a key. If the number value is too large for an int, it will be clipped
|
getInt
|
{
"repo_name": "aalmiray/Json-lib",
"path": "subprojects/json-lib-core/src/main/java/org/kordamp/json/JSONObject.java",
"license": "apache-2.0",
"size": 107677
}
|
[
"org.kordamp.json.util.JSONUtils"
] |
import org.kordamp.json.util.JSONUtils;
|
import org.kordamp.json.util.*;
|
[
"org.kordamp.json"
] |
org.kordamp.json;
| 1,400,536
|
public void doCancelImport(RunData data, Context context)
{
SessionState state = ((JetspeedRunData) data).getPortletSessionState(((JetspeedRunData) data).getJs_peid());
if (!"POST".equals(data.getRequest().getMethod())) {
return;
}
//cleanup session
state.removeAttribute("importedUsers");
//also cleanup our state handler (I think this should be combined into SessionState)
UsersActionState sstate = (UsersActionState)getState(context, data, UsersActionState.class);
sstate.setAttachments(new ArrayList());
sstate.setStatus(null);
// return to main mode
state.removeAttribute("mode");
// make sure auto-updates are enabled
enableObserver(state);
} // doCancelImport
|
void function(RunData data, Context context) { SessionState state = ((JetspeedRunData) data).getPortletSessionState(((JetspeedRunData) data).getJs_peid()); if (!"POST".equals(data.getRequest().getMethod())) { return; } state.removeAttribute(STR); UsersActionState sstate = (UsersActionState)getState(context, data, UsersActionState.class); sstate.setAttachments(new ArrayList()); sstate.setStatus(null); state.removeAttribute("mode"); enableObserver(state); }
|
/**
* doCancelImport called when "eventSubmit_doCancelImport" is in the request parameters to cancel user imports
*/
|
doCancelImport called when "eventSubmit_doCancelImport" is in the request parameters to cancel user imports
|
doCancelImport
|
{
"repo_name": "eemirtekin/Sakai-10.6-TR",
"path": "user/user-tool/tool/src/java/org/sakaiproject/user/tool/UsersAction.java",
"license": "apache-2.0",
"size": 56071
}
|
[
"java.util.ArrayList",
"org.sakaiproject.cheftool.Context",
"org.sakaiproject.cheftool.JetspeedRunData",
"org.sakaiproject.cheftool.RunData",
"org.sakaiproject.event.api.SessionState"
] |
import java.util.ArrayList; import org.sakaiproject.cheftool.Context; import org.sakaiproject.cheftool.JetspeedRunData; import org.sakaiproject.cheftool.RunData; import org.sakaiproject.event.api.SessionState;
|
import java.util.*; import org.sakaiproject.cheftool.*; import org.sakaiproject.event.api.*;
|
[
"java.util",
"org.sakaiproject.cheftool",
"org.sakaiproject.event"
] |
java.util; org.sakaiproject.cheftool; org.sakaiproject.event;
| 1,042,916
|
public DiskEncryptionSetInner withActiveKey(KeyForDiskEncryptionSet activeKey) {
if (this.innerProperties() == null) {
this.innerProperties = new EncryptionSetProperties();
}
this.innerProperties().withActiveKey(activeKey);
return this;
}
|
DiskEncryptionSetInner function(KeyForDiskEncryptionSet activeKey) { if (this.innerProperties() == null) { this.innerProperties = new EncryptionSetProperties(); } this.innerProperties().withActiveKey(activeKey); return this; }
|
/**
* Set the activeKey property: The key vault key which is currently used by this disk encryption set.
*
* @param activeKey the activeKey value to set.
* @return the DiskEncryptionSetInner object itself.
*/
|
Set the activeKey property: The key vault key which is currently used by this disk encryption set
|
withActiveKey
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-compute/src/main/java/com/azure/resourcemanager/compute/fluent/models/DiskEncryptionSetInner.java",
"license": "mit",
"size": 7290
}
|
[
"com.azure.resourcemanager.compute.models.KeyForDiskEncryptionSet"
] |
import com.azure.resourcemanager.compute.models.KeyForDiskEncryptionSet;
|
import com.azure.resourcemanager.compute.models.*;
|
[
"com.azure.resourcemanager"
] |
com.azure.resourcemanager;
| 2,522,873
|
public static void loadFromFile(String configFile) {
try {
Hashtable <String,Object> hash = (Hashtable) _registry.get(HASHNAME);
BufferedReader fileReader = new BufferedReader(new FileReader(configFile));
String line; //O linie din fisier
StringTokenizer st,aux; //Tokenizeri
String leftToken,rightToken; //Valoare din stanga si dreapta egalului
String key,value; //Valorile ce vor fi retinute
while ((line = fileReader.readLine()) != null) {
//Linia ce incepe cu "#" nu va fi interpretata
if (!line.startsWith("#") && (line.indexOf("=")!=-1)){
st = new StringTokenizer(line,"=");
leftToken = st.nextToken();
rightToken = st.nextToken();
//Pregatesc cheia
key=new String();
aux = new StringTokenizer(leftToken, " \t");
if (aux.hasMoreTokens()) key += aux.nextToken();
while (aux.hasMoreTokens()) {
key += " "+aux.nextToken();
}
//Pregatesc valoare
value=new String();
aux = new StringTokenizer(rightToken, " \t");
if (aux.hasMoreTokens()) value += aux.nextToken();
while (aux.hasMoreTokens()) {
value += " "+aux.nextToken();
}
hash.put(key, value);
}
}
fileReader.close();
} catch (FileNotFoundException ex) {
Logger.getLogger(ResultSet.class.getName()).log(Level.SEVERE, null, ex);
} catch (java.io.IOException ex) {
Logger.getLogger(ResultSet.class.getName()).log(Level.SEVERE, null, ex);
}
}
|
static void function(String configFile) { try { Hashtable <String,Object> hash = (Hashtable) _registry.get(HASHNAME); BufferedReader fileReader = new BufferedReader(new FileReader(configFile)); String line; StringTokenizer st,aux; String leftToken,rightToken; String key,value; while ((line = fileReader.readLine()) != null) { if (!line.startsWith("#") && (line.indexOf("=")!=-1)){ st = new StringTokenizer(line,"="); leftToken = st.nextToken(); rightToken = st.nextToken(); key=new String(); aux = new StringTokenizer(leftToken, STR); if (aux.hasMoreTokens()) key += aux.nextToken(); while (aux.hasMoreTokens()) { key += " "+aux.nextToken(); } value=new String(); aux = new StringTokenizer(rightToken, STR); if (aux.hasMoreTokens()) value += aux.nextToken(); while (aux.hasMoreTokens()) { value += " "+aux.nextToken(); } hash.put(key, value); } } fileReader.close(); } catch (FileNotFoundException ex) { Logger.getLogger(ResultSet.class.getName()).log(Level.SEVERE, null, ex); } catch (java.io.IOException ex) { Logger.getLogger(ResultSet.class.getName()).log(Level.SEVERE, null, ex); } }
|
/**
* Incarc setarile dintr-un fisier
*/
|
Incarc setarile dintr-un fisier
|
loadFromFile
|
{
"repo_name": "genomeartist/genomeartist",
"path": "sources_java/guiTransposon/src/ro/genomeartist/gui/utils/ReadWriteConfiguration.java",
"license": "gpl-3.0",
"size": 6463
}
|
[
"java.io.BufferedReader",
"java.io.FileNotFoundException",
"java.io.FileReader",
"java.io.IOException",
"java.sql.ResultSet",
"java.util.Hashtable",
"java.util.StringTokenizer",
"java.util.logging.Level",
"java.util.logging.Logger"
] |
import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.sql.ResultSet; import java.util.Hashtable; import java.util.StringTokenizer; import java.util.logging.Level; import java.util.logging.Logger;
|
import java.io.*; import java.sql.*; import java.util.*; import java.util.logging.*;
|
[
"java.io",
"java.sql",
"java.util"
] |
java.io; java.sql; java.util;
| 1,542,698
|
public void search() {
// Use spanConstraintMap to check whether the spanUpload is duplicated, the map is saved in
// the session
@SuppressWarnings("unchecked")
Map<GenomicRegionSearchConstraint, String> spanConstraintMap =
(HashMap<GenomicRegionSearchConstraint, String>) request
.getSession().getAttribute("spanConstraintMap");
if (spanConstraintMap == null) {
spanConstraintMap = new HashMap<GenomicRegionSearchConstraint, String>();
}
if (spanConstraintMap.size() == 0) {
spanConstraintMap.put(grsc, spanUUIDString);
} else {
if (spanConstraintMap.containsKey(grsc)) {
spanUUIDString = spanConstraintMap.get(grsc);
request.setAttribute("spanUUIDString", spanUUIDString);
} else {
spanConstraintMap.put(grsc, spanUUIDString);
}
}
request.getSession().setAttribute("spanConstraintMap", spanConstraintMap);
request.setAttribute("spanQueryTotalCount", grsc.getGenomicRegionList().size());
(new Thread(this)).start();
}
|
void function() { @SuppressWarnings(STR) Map<GenomicRegionSearchConstraint, String> spanConstraintMap = (HashMap<GenomicRegionSearchConstraint, String>) request .getSession().getAttribute(STR); if (spanConstraintMap == null) { spanConstraintMap = new HashMap<GenomicRegionSearchConstraint, String>(); } if (spanConstraintMap.size() == 0) { spanConstraintMap.put(grsc, spanUUIDString); } else { if (spanConstraintMap.containsKey(grsc)) { spanUUIDString = spanConstraintMap.get(grsc); request.setAttribute(STR, spanUUIDString); } else { spanConstraintMap.put(grsc, spanUUIDString); } } request.getSession().setAttribute(STR, spanConstraintMap); request.setAttribute(STR, grsc.getGenomicRegionList().size()); (new Thread(this)).start(); }
|
/**
* Main body of db search
*/
|
Main body of db search
|
search
|
{
"repo_name": "elsiklab/intermine",
"path": "bio/webapp/src/org/intermine/bio/web/logic/GenomicRegionSearchQueryRunner.java",
"license": "lgpl-2.1",
"size": 18247
}
|
[
"java.util.HashMap",
"java.util.Map",
"org.intermine.bio.web.model.GenomicRegionSearchConstraint"
] |
import java.util.HashMap; import java.util.Map; import org.intermine.bio.web.model.GenomicRegionSearchConstraint;
|
import java.util.*; import org.intermine.bio.web.model.*;
|
[
"java.util",
"org.intermine.bio"
] |
java.util; org.intermine.bio;
| 312,668
|
protected void skipCommaSpaces() throws IOException {
wsp1: for (;;) {
switch (current) {
default:
break wsp1;
case 0x20:
case 0x9:
case 0xD:
case 0xA:
}
current = reader.read();
}
if (current == ',') {
wsp2: for (;;) {
switch (current = reader.read()) {
default:
break wsp2;
case 0x20:
case 0x9:
case 0xD:
case 0xA:
}
}
}
}
|
void function() throws IOException { wsp1: for (;;) { switch (current) { default: break wsp1; case 0x20: case 0x9: case 0xD: case 0xA: } current = reader.read(); } if (current == ',') { wsp2: for (;;) { switch (current = reader.read()) { default: break wsp2; case 0x20: case 0x9: case 0xD: case 0xA: } } } }
|
/**
* Skips the whitespaces and an optional comma.
*/
|
Skips the whitespaces and an optional comma
|
skipCommaSpaces
|
{
"repo_name": "sflyphotobooks/crp-batik",
"path": "sources/org/apache/batik/parser/AbstractParser.java",
"license": "apache-2.0",
"size": 7854
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 2,590,642
|
private static void s_uarmean( SparseRow[] a, double[] c, int m, int n, KahanObject kbuff, Mean kmean, int rl, int ru )
{
for( int i=rl, cix=rl*3; i<ru; i++, cix+=3 )
{
//correction remaining tuples (not sparse-safe)
//note: before aggregate computation in order to
//exploit 0 sum (noop) and better numerical stability
int count = (a[i]==null)? n : n-a[i].size();
kbuff.set(0, 0); //reset buffer
SparseRow arow = a[i];
if( arow!=null && !arow.isEmpty() )
{
int alen = arow.size();
double[] avals = arow.getValueContainer();
mean(avals, 0, alen, count, kbuff, kmean);
}
//OLD VERSION: correction remaining tuples (not sparse-safe)
//int count = ((arow==null) ? 0 : arow.size());
//mean(0, n-count, count, kbuff, kplus);
c[cix+0] = kbuff._sum;
c[cix+1] = n;
c[cix+2] = kbuff._correction;
}
}
|
static void function( SparseRow[] a, double[] c, int m, int n, KahanObject kbuff, Mean kmean, int rl, int ru ) { for( int i=rl, cix=rl*3; i<ru; i++, cix+=3 ) { int count = (a[i]==null)? n : n-a[i].size(); kbuff.set(0, 0); SparseRow arow = a[i]; if( arow!=null && !arow.isEmpty() ) { int alen = arow.size(); double[] avals = arow.getValueContainer(); mean(avals, 0, alen, count, kbuff, kmean); } c[cix+0] = kbuff._sum; c[cix+1] = n; c[cix+2] = kbuff._correction; } }
|
/**
* ROWMEAN, opcode: uarmean, sparse input.
*
* @param a
* @param c
* @param m
* @param n
* @param kbuff
* @param kplus
*/
|
ROWMEAN, opcode: uarmean, sparse input
|
s_uarmean
|
{
"repo_name": "Myasuka/systemml",
"path": "system-ml/src/main/java/com/ibm/bi/dml/runtime/matrix/data/LibMatrixAgg.java",
"license": "apache-2.0",
"size": 77315
}
|
[
"com.ibm.bi.dml.runtime.functionobjects.Mean",
"com.ibm.bi.dml.runtime.instructions.cp.KahanObject"
] |
import com.ibm.bi.dml.runtime.functionobjects.Mean; import com.ibm.bi.dml.runtime.instructions.cp.KahanObject;
|
import com.ibm.bi.dml.runtime.functionobjects.*; import com.ibm.bi.dml.runtime.instructions.cp.*;
|
[
"com.ibm.bi"
] |
com.ibm.bi;
| 947,180
|
private String getMessage(User user, String body, PhotoIdentification photoIdentification) {
String bodyFormatted = "";
ArrayList list = new ArrayList();
String url =
"http://www.aves.brasil.nom.br/servlet/initIdentification?photoId="
+ photoIdentification.getPhoto().getId();
list.add(user.getName());
list.add(photoIdentification.getUser().getName());
list.add(url);
try {
bodyFormatted = MessageComposer.formatMessage(body, list);
} catch (WrongNumberOfValuesException e) {
logger.error("UploadPhoto.getMessage : Exception", e);
}
return bodyFormatted;
}
|
String function(User user, String body, PhotoIdentification photoIdentification) { String bodyFormatted = STRhttp: + photoIdentification.getPhoto().getId(); list.add(user.getName()); list.add(photoIdentification.getUser().getName()); list.add(url); try { bodyFormatted = MessageComposer.formatMessage(body, list); } catch (WrongNumberOfValuesException e) { logger.error(STR, e); } return bodyFormatted; }
|
/**
* Formats the message to be sent to the photo author about a new identification that has been
* done to his/her photo
*
* @param user The photo author
* @param body The body text template to be formatted
* @param photo The photo identification
*
* @return A formatted message to be sent to the photo author about a new identification that
* has been done to his/her photo
*/
|
Formats the message to be sent to the photo author about a new identification that has been done to his/her photo
|
getMessage
|
{
"repo_name": "BackupTheBerlios/arara-svn",
"path": "core/tags/arara-1.0/src/main/java/net/indrix/arara/model/PhotoModel.java",
"license": "gpl-2.0",
"size": 27864
}
|
[
"net.indrix.arara.tools.email.MessageComposer",
"net.indrix.arara.tools.email.WrongNumberOfValuesException",
"net.indrix.arara.vo.PhotoIdentification",
"net.indrix.arara.vo.User"
] |
import net.indrix.arara.tools.email.MessageComposer; import net.indrix.arara.tools.email.WrongNumberOfValuesException; import net.indrix.arara.vo.PhotoIdentification; import net.indrix.arara.vo.User;
|
import net.indrix.arara.tools.email.*; import net.indrix.arara.vo.*;
|
[
"net.indrix.arara"
] |
net.indrix.arara;
| 3,267
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.