method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
list | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
list | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
|---|---|---|---|---|---|---|---|---|---|---|---|
private static int getShortcutListSize(final ArrayList<WeightedString> shortcutList) {
if (null == shortcutList) return 0;
int size = FormatSpec.GROUP_SHORTCUT_LIST_SIZE_SIZE;
for (final WeightedString shortcut : shortcutList) {
size += getShortcutSize(shortcut);
}
return size;
}
|
static int function(final ArrayList<WeightedString> shortcutList) { if (null == shortcutList) return 0; int size = FormatSpec.GROUP_SHORTCUT_LIST_SIZE_SIZE; for (final WeightedString shortcut : shortcutList) { size += getShortcutSize(shortcut); } return size; }
|
/**
* Compute the size of a shortcut list in bytes.
*
* This is known in advance and does not change according to position in the file
* like address lists do.
*/
|
Compute the size of a shortcut list in bytes. This is known in advance and does not change according to position in the file like address lists do
|
getShortcutListSize
|
{
"repo_name": "rex-xxx/mt6572_x201",
"path": "packages/inputmethods/LatinIME/java/src/com/android/inputmethod/latin/makedict/BinaryDictInputOutput.java",
"license": "gpl-2.0",
"size": 76116
}
|
[
"com.android.inputmethod.latin.makedict.FusionDictionary",
"java.util.ArrayList"
] |
import com.android.inputmethod.latin.makedict.FusionDictionary; import java.util.ArrayList;
|
import com.android.inputmethod.latin.makedict.*; import java.util.*;
|
[
"com.android.inputmethod",
"java.util"
] |
com.android.inputmethod; java.util;
| 2,368,731
|
public final MetaProperty<UniqueId> uniqueId() {
return _uniqueId;
}
|
final MetaProperty<UniqueId> function() { return _uniqueId; }
|
/**
* The meta-property for the {@code uniqueId} property.
* @return the meta-property, not null
*/
|
The meta-property for the uniqueId property
|
uniqueId
|
{
"repo_name": "McLeodMoores/starling",
"path": "projects/master/src/main/java/com/opengamma/master/config/ConfigDocument.java",
"license": "apache-2.0",
"size": 10664
}
|
[
"com.opengamma.id.UniqueId",
"org.joda.beans.MetaProperty"
] |
import com.opengamma.id.UniqueId; import org.joda.beans.MetaProperty;
|
import com.opengamma.id.*; import org.joda.beans.*;
|
[
"com.opengamma.id",
"org.joda.beans"
] |
com.opengamma.id; org.joda.beans;
| 351,586
|
public void logMapTaskStarted(TaskAttemptID taskAttemptId, long startTime,
String trackerName, int httpPort,
String taskType) {
if (disableHistory) {
return;
}
JobID id = taskAttemptId.getJobID();
if (!this.jobId.equals(id)) {
throw new RuntimeException("JobId from task: " + id +
" does not match expected: " + jobId);
}
if (null != writers) {
log(writers, RecordTypes.MapAttempt,
new Keys[]{ Keys.TASK_TYPE, Keys.TASKID,
Keys.TASK_ATTEMPT_ID, Keys.START_TIME,
Keys.TRACKER_NAME, Keys.HTTP_PORT},
new String[]{taskType,
taskAttemptId.getTaskID().toString(),
taskAttemptId.toString(),
String.valueOf(startTime), trackerName,
httpPort == -1 ? "" :
String.valueOf(httpPort)});
}
}
|
void function(TaskAttemptID taskAttemptId, long startTime, String trackerName, int httpPort, String taskType) { if (disableHistory) { return; } JobID id = taskAttemptId.getJobID(); if (!this.jobId.equals(id)) { throw new RuntimeException(STR + id + STR + jobId); } if (null != writers) { log(writers, RecordTypes.MapAttempt, new Keys[]{ Keys.TASK_TYPE, Keys.TASKID, Keys.TASK_ATTEMPT_ID, Keys.START_TIME, Keys.TRACKER_NAME, Keys.HTTP_PORT}, new String[]{taskType, taskAttemptId.getTaskID().toString(), taskAttemptId.toString(), String.valueOf(startTime), trackerName, httpPort == -1 ? "" : String.valueOf(httpPort)}); } }
|
/**
* Log start time of this map task attempt.
*
* @param taskAttemptId task attempt id
* @param startTime start time of task attempt as reported by task tracker.
* @param trackerName name of the tracker executing the task attempt.
* @param httpPort http port of the task tracker executing the task attempt
* @param taskType Whether the attempt is cleanup or setup or map
*/
|
Log start time of this map task attempt
|
logMapTaskStarted
|
{
"repo_name": "nvoron23/hadoop-20",
"path": "src/contrib/corona/src/java/org/apache/hadoop/mapred/CoronaJobHistory.java",
"license": "apache-2.0",
"size": 32684
}
|
[
"org.apache.hadoop.mapred.JobHistory"
] |
import org.apache.hadoop.mapred.JobHistory;
|
import org.apache.hadoop.mapred.*;
|
[
"org.apache.hadoop"
] |
org.apache.hadoop;
| 350,569
|
public static IHttpCredentialsPlugin GetDrpcHttpCredentialsPlugin(Map conf) {
String klassName = (String)conf.get(Config.DRPC_HTTP_CREDS_PLUGIN);
return AuthUtils.GetHttpCredentialsPlugin(conf, klassName);
}
|
static IHttpCredentialsPlugin function(Map conf) { String klassName = (String)conf.get(Config.DRPC_HTTP_CREDS_PLUGIN); return AuthUtils.GetHttpCredentialsPlugin(conf, klassName); }
|
/**
* Construct an HttpServletRequest credential plugin specified by the DRPC
* storm configuration
* @param conf storm configuration
* @return the plugin
*/
|
Construct an HttpServletRequest credential plugin specified by the DRPC storm configuration
|
GetDrpcHttpCredentialsPlugin
|
{
"repo_name": "anshuiisc/storm-Allbolts-wiring",
"path": "storm-core/src/jvm/org/apache/storm/security/auth/AuthUtils.java",
"license": "apache-2.0",
"size": 15031
}
|
[
"java.util.Map",
"org.apache.storm.Config"
] |
import java.util.Map; import org.apache.storm.Config;
|
import java.util.*; import org.apache.storm.*;
|
[
"java.util",
"org.apache.storm"
] |
java.util; org.apache.storm;
| 2,492,811
|
public void addExpressionFactory(ExpressionFactory expressionFactory) {
expressionFactories.add(expressionFactory);
}
public void close() {}
|
void function(ExpressionFactory expressionFactory) { expressionFactories.add(expressionFactory); } public void close() {}
|
/**
* Adds an expression factory to the list of expression factories.
*
* @param expressionFactory the expression factory to add
*/
|
Adds an expression factory to the list of expression factories
|
addExpressionFactory
|
{
"repo_name": "levi-h/aluminumproject",
"path": "tests/src/test/java/com/googlecode/aluminumproject/configuration/TestConfiguration.java",
"license": "apache-2.0",
"size": 6606
}
|
[
"com.googlecode.aluminumproject.expressions.ExpressionFactory"
] |
import com.googlecode.aluminumproject.expressions.ExpressionFactory;
|
import com.googlecode.aluminumproject.expressions.*;
|
[
"com.googlecode.aluminumproject"
] |
com.googlecode.aluminumproject;
| 2,623,674
|
public final native JsArray<ParameterJSO> getParameters() ;
|
final native JsArray<ParameterJSO> function() ;
|
/**
* A JSNI method that returns the JsArray of component parameters.
*/
|
A JSNI method that returns the JsArray of component parameters
|
getParameters
|
{
"repo_name": "mdpiper/wmt-client",
"path": "src/edu/colorado/csdms/wmt/client/data/ComponentJSO.java",
"license": "mit",
"size": 6646
}
|
[
"com.google.gwt.core.client.JsArray"
] |
import com.google.gwt.core.client.JsArray;
|
import com.google.gwt.core.client.*;
|
[
"com.google.gwt"
] |
com.google.gwt;
| 1,177,440
|
EList<Float> getTwSSStif();
|
EList<Float> getTwSSStif();
|
/**
* Returns the value of the '<em><b>Tw SS Stif</b></em>' attribute list.
* The list contents are of type {@link java.lang.Float}.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Tw SS Stif</em>' attribute list isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Tw SS Stif</em>' attribute list.
* @see sc.ndt.editor.fast.fasttwr.FasttwrPackage#getaTwrStat_TwSSStif()
* @model unique="false"
* @generated
*/
|
Returns the value of the 'Tw SS Stif' attribute list. The list contents are of type <code>java.lang.Float</code>. If the meaning of the 'Tw SS Stif' attribute list isn't clear, there really should be more of a description here...
|
getTwSSStif
|
{
"repo_name": "cooked/NDT",
"path": "sc.ndt.editor.fast.twr/src-gen/sc/ndt/editor/fast/fasttwr/aTwrStat.java",
"license": "gpl-3.0",
"size": 7106
}
|
[
"org.eclipse.emf.common.util.EList"
] |
import org.eclipse.emf.common.util.EList;
|
import org.eclipse.emf.common.util.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 1,759,976
|
public ApiResponse<V1Lease> patchNamespacedLeaseWithHttpInfo(
String name,
String namespace,
V1Patch body,
String pretty,
String dryRun,
String fieldManager,
String fieldValidation,
Boolean force)
throws ApiException {
okhttp3.Call localVarCall =
patchNamespacedLeaseValidateBeforeCall(
name, namespace, body, pretty, dryRun, fieldManager, fieldValidation, force, null);
Type localVarReturnType = new TypeToken<V1Lease>() {}.getType();
return localVarApiClient.execute(localVarCall, localVarReturnType);
}
|
ApiResponse<V1Lease> function( String name, String namespace, V1Patch body, String pretty, String dryRun, String fieldManager, String fieldValidation, Boolean force) throws ApiException { okhttp3.Call localVarCall = patchNamespacedLeaseValidateBeforeCall( name, namespace, body, pretty, dryRun, fieldManager, fieldValidation, force, null); Type localVarReturnType = new TypeToken<V1Lease>() {}.getType(); return localVarApiClient.execute(localVarCall, localVarReturnType); }
|
/**
* partially update the specified Lease
*
* @param name name of the Lease (required)
* @param namespace object name and auth scope, such as for teams and projects (required)
* @param body (required)
* @param pretty If 'true', then the output is pretty printed. (optional)
* @param dryRun When present, indicates that modifications should not be persisted. An invalid or
* unrecognized dryRun directive will result in an error response and no further processing of
* the request. Valid values are: - All: all dry run stages will be processed (optional)
* @param fieldManager fieldManager is a name associated with the actor or entity that is making
* these changes. The value must be less than or 128 characters long, and only contain
* printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is
* required for apply requests (application/apply-patch) but optional for non-apply patch
* types (JsonPatch, MergePatch, StrategicMergePatch). (optional)
* @param fieldValidation fieldValidation determines how the server should respond to
* unknown/duplicate fields in the object in the request. Introduced as alpha in 1.23, older
* servers or servers with the `ServerSideFieldValidation` feature disabled will
* discard valid values specified in this param and not perform any server side field
* validation. Valid values are: - Ignore: ignores unknown/duplicate fields. - Warn: responds
* with a warning for each unknown/duplicate field, but successfully serves the request. -
* Strict: fails the request on unknown/duplicate fields. (optional)
* @param force Force is going to \"force\" Apply requests. It means user will
* re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply
* patch requests. (optional)
* @return ApiResponse<V1Lease>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the
* response body
* @http.response.details
* <table summary="Response Details" border="1">
* <tr><td> Status Code </td><td> Description </td><td> Response Headers </td></tr>
* <tr><td> 200 </td><td> OK </td><td> - </td></tr>
* <tr><td> 201 </td><td> Created </td><td> - </td></tr>
* <tr><td> 401 </td><td> Unauthorized </td><td> - </td></tr>
* </table>
*/
|
partially update the specified Lease
|
patchNamespacedLeaseWithHttpInfo
|
{
"repo_name": "kubernetes-client/java",
"path": "kubernetes/src/main/java/io/kubernetes/client/openapi/apis/CoordinationV1Api.java",
"license": "apache-2.0",
"size": 162125
}
|
[
"com.google.gson.reflect.TypeToken",
"io.kubernetes.client.custom.V1Patch",
"io.kubernetes.client.openapi.ApiException",
"io.kubernetes.client.openapi.ApiResponse",
"io.kubernetes.client.openapi.models.V1Lease",
"java.lang.reflect.Type"
] |
import com.google.gson.reflect.TypeToken; import io.kubernetes.client.custom.V1Patch; import io.kubernetes.client.openapi.ApiException; import io.kubernetes.client.openapi.ApiResponse; import io.kubernetes.client.openapi.models.V1Lease; import java.lang.reflect.Type;
|
import com.google.gson.reflect.*; import io.kubernetes.client.custom.*; import io.kubernetes.client.openapi.*; import io.kubernetes.client.openapi.models.*; import java.lang.reflect.*;
|
[
"com.google.gson",
"io.kubernetes.client",
"java.lang"
] |
com.google.gson; io.kubernetes.client; java.lang;
| 1,691,853
|
public IElementAttributes getElementAttributes()
{
if (attr != null)
{
return attr.clone();
}
return null;
}
|
IElementAttributes function() { if (attr != null) { return attr.clone(); } return null; }
|
/**
* Gets the default element attribute of the Cache object This returns a copy. It does not
* return a reference to the attributes.
* <p>
* @return The attributes value
*/
|
Gets the default element attribute of the Cache object This returns a copy. It does not return a reference to the attributes.
|
getElementAttributes
|
{
"repo_name": "apache/commons-jcs",
"path": "commons-jcs-core/src/main/java/org/apache/commons/jcs3/engine/control/CompositeCache.java",
"license": "apache-2.0",
"size": 56663
}
|
[
"org.apache.commons.jcs3.engine.behavior.IElementAttributes"
] |
import org.apache.commons.jcs3.engine.behavior.IElementAttributes;
|
import org.apache.commons.jcs3.engine.behavior.*;
|
[
"org.apache.commons"
] |
org.apache.commons;
| 614,250
|
private void parseListNodes(Node node, List<String> resultsList, String childNodeName) {
NodeList children = node.getChildNodes();
for (int j = 0; j < children.getLength(); j++) {
Node child = children.item(j);
if (child.getNodeName().equals(childNodeName)) {
resultsList.add(XMLUtil.getTextContent(child));
} else {
logUnknownChild(node, child);
}
}
}
|
void function(Node node, List<String> resultsList, String childNodeName) { NodeList children = node.getChildNodes(); for (int j = 0; j < children.getLength(); j++) { Node child = children.item(j); if (child.getNodeName().equals(childNodeName)) { resultsList.add(XMLUtil.getTextContent(child)); } else { logUnknownChild(node, child); } } }
|
/**
* Utility method for parsing nodes that are simply lists of values.
*/
|
Utility method for parsing nodes that are simply lists of values
|
parseListNodes
|
{
"repo_name": "opendk/openbiwiki",
"path": "openbiwiki-core/src/main/java/org/jamwiki/WikiConfiguration.java",
"license": "mit",
"size": 13412
}
|
[
"java.util.List",
"org.jamwiki.utils.XMLUtil",
"org.w3c.dom.Node",
"org.w3c.dom.NodeList"
] |
import java.util.List; import org.jamwiki.utils.XMLUtil; import org.w3c.dom.Node; import org.w3c.dom.NodeList;
|
import java.util.*; import org.jamwiki.utils.*; import org.w3c.dom.*;
|
[
"java.util",
"org.jamwiki.utils",
"org.w3c.dom"
] |
java.util; org.jamwiki.utils; org.w3c.dom;
| 992,889
|
void openConnection(OperatedClientConnection conn,
HttpHost target,
InetAddress local,
HttpContext context,
HttpParams params)
throws IOException
;
|
void openConnection(OperatedClientConnection conn, HttpHost target, InetAddress local, HttpContext context, HttpParams params) throws IOException ;
|
/**
* Opens a connection to the given target host.
*
* @param conn the connection to open
* @param target the target host to connect to
* @param local the local address to route from, or
* <code>null</code> for the default
* @param context the context for the connection
* @param params the parameters for the connection
*
* @throws IOException in case of a problem
*/
|
Opens a connection to the given target host
|
openConnection
|
{
"repo_name": "s20121035/rk3288_android5.1_repo",
"path": "external/apache-http/src/org/apache/http/conn/ClientConnectionOperator.java",
"license": "gpl-3.0",
"size": 4662
}
|
[
"java.io.IOException",
"java.net.InetAddress",
"org.apache.http.HttpHost",
"org.apache.http.params.HttpParams",
"org.apache.http.protocol.HttpContext"
] |
import java.io.IOException; import java.net.InetAddress; import org.apache.http.HttpHost; import org.apache.http.params.HttpParams; import org.apache.http.protocol.HttpContext;
|
import java.io.*; import java.net.*; import org.apache.http.*; import org.apache.http.params.*; import org.apache.http.protocol.*;
|
[
"java.io",
"java.net",
"org.apache.http"
] |
java.io; java.net; org.apache.http;
| 815,403
|
public void setUnitsPaint(Paint paint) {
unitXPaint = paint;
unitYPaint = paint;
}
|
void function(Paint paint) { unitXPaint = paint; unitYPaint = paint; }
|
/**
* Set which paint the unit lines should be painted with.
*
* @param paint
* {@code Paint} to paint the unit lines with.
*/
|
Set which paint the unit lines should be painted with
|
setUnitsPaint
|
{
"repo_name": "andern/jcoolib",
"path": "src/cartesian/coordinate/CCSystem.java",
"license": "gpl-3.0",
"size": 37342
}
|
[
"java.awt.Paint"
] |
import java.awt.Paint;
|
import java.awt.*;
|
[
"java.awt"
] |
java.awt;
| 148,140
|
Aspect aspect = EMFModelUtil.getRootContainerOfType(classifier, RamPackage.Literals.ASPECT);
EditingDomain editingDomain = EMFEditUtil.getEditingDomain(aspect);
Command moveClassCommand = createMoveCommand(editingDomain, aspect.getStructuralView(), classifier, x, y);
doExecute(editingDomain, moveClassCommand);
}
|
Aspect aspect = EMFModelUtil.getRootContainerOfType(classifier, RamPackage.Literals.ASPECT); EditingDomain editingDomain = EMFEditUtil.getEditingDomain(aspect); Command moveClassCommand = createMoveCommand(editingDomain, aspect.getStructuralView(), classifier, x, y); doExecute(editingDomain, moveClassCommand); }
|
/**
* Moves the given classifier to a new position.
*
* @param classifier the classifier to move
* @param x the new x position
* @param y the new y position
*/
|
Moves the given classifier to a new position
|
moveClassifier
|
{
"repo_name": "mschoettle/ecse429-fall15-project",
"path": "ca.mcgill.sel.ram.controller/src/ca/mcgill/sel/ram/controller/ClassController.java",
"license": "gpl-2.0",
"size": 35270
}
|
[
"ca.mcgill.sel.commons.emf.util.EMFEditUtil",
"ca.mcgill.sel.commons.emf.util.EMFModelUtil",
"ca.mcgill.sel.ram.Aspect",
"ca.mcgill.sel.ram.RamPackage",
"org.eclipse.emf.common.command.Command",
"org.eclipse.emf.edit.domain.EditingDomain"
] |
import ca.mcgill.sel.commons.emf.util.EMFEditUtil; import ca.mcgill.sel.commons.emf.util.EMFModelUtil; import ca.mcgill.sel.ram.Aspect; import ca.mcgill.sel.ram.RamPackage; import org.eclipse.emf.common.command.Command; import org.eclipse.emf.edit.domain.EditingDomain;
|
import ca.mcgill.sel.commons.emf.util.*; import ca.mcgill.sel.ram.*; import org.eclipse.emf.common.command.*; import org.eclipse.emf.edit.domain.*;
|
[
"ca.mcgill.sel",
"org.eclipse.emf"
] |
ca.mcgill.sel; org.eclipse.emf;
| 1,142,171
|
public static Map<String, Object> scaleImage(BufferedImage bufImg, double imgHeight, double imgWidth, Map<String, Map<String, String>> dimensionMap, String sizeType, Locale locale) {
BufferedImage bufNewImg;
double defaultHeight, defaultWidth, scaleFactor;
Map<String, Object> result = new LinkedHashMap<String, Object>();
// A missed dimension is authorized
if (dimensionMap.get(sizeType).containsKey("height")) {
defaultHeight = Double.parseDouble(dimensionMap.get(sizeType).get("height").toString());
} else {
defaultHeight = -1;
}
if (dimensionMap.get(sizeType).containsKey("width")) {
defaultWidth = Double.parseDouble(dimensionMap.get(sizeType).get("width").toString());
} else {
defaultWidth = -1;
}
if (defaultHeight == 0.0 || defaultWidth == 0.0) {
String errMsg = UtilProperties.getMessage(resource, "ImageTransform.one_default_dimension_is_null", locale) + " : defaultHeight = " + defaultHeight + " ; defaultWidth = " + defaultWidth;
Debug.logError(errMsg, module);
result.put(ModelService.ERROR_MESSAGE, errMsg);
return result;
}
// find the right Scale Factor related to the Image Dimensions
if (defaultHeight == -1) {
scaleFactor = defaultWidth / imgWidth;
if (scaleFactor == 0.0) {
String errMsg = UtilProperties.getMessage(resource, "ImageTransform.width_scale_factor_is_null", locale) + " (defaultWidth = " + defaultWidth + "; imgWidth = " + imgWidth;
Debug.logError(errMsg, module);
result.put(ModelService.ERROR_MESSAGE, errMsg);
return result;
}
} else if (defaultWidth == -1) {
scaleFactor = defaultHeight / imgHeight;
if (scaleFactor == 0.0) {
String errMsg = UtilProperties.getMessage(resource, "ImageTransform.height_scale_factor_is_null", locale) + " (defaultHeight = " + defaultHeight + "; imgHeight = " + imgHeight;
Debug.logError(errMsg, module);
result.put(ModelService.ERROR_MESSAGE, errMsg);
return result;
}
} else if (imgHeight > imgWidth) {
scaleFactor = defaultHeight / imgHeight;
if (scaleFactor == 0.0) {
String errMsg = UtilProperties.getMessage(resource, "ImageTransform.height_scale_factor_is_null", locale) + " (defaultHeight = " + defaultHeight + "; imgHeight = " + imgHeight;
Debug.logError(errMsg, module);
result.put(ModelService.ERROR_MESSAGE, errMsg);
return result;
}
// get scaleFactor from the smallest width
if (defaultWidth < (imgWidth * scaleFactor)) {
scaleFactor = defaultWidth / imgWidth;
}
} else {
scaleFactor = defaultWidth / imgWidth;
if (scaleFactor == 0.0) {
String errMsg = UtilProperties.getMessage(resource, "ImageTransform.width_scale_factor_is_null", locale) + " (defaultWidth = " + defaultWidth + "; imgWidth = " + imgWidth;
Debug.logError(errMsg, module);
result.put(ModelService.ERROR_MESSAGE, errMsg);
return result;
}
// get scaleFactor from the smallest height
if (defaultHeight < (imgHeight * scaleFactor)) {
scaleFactor = defaultHeight / imgHeight;
}
}
if (scaleFactor == 0.0) {
String errMsg = UtilProperties.getMessage(resource, "ImageTransform.final_scale_factor_is_null", locale) + " = " + scaleFactor;
Debug.logError(errMsg, module);
result.put(ModelService.ERROR_MESSAGE, errMsg);
return result;
}
int bufImgType;
if (BufferedImage.TYPE_CUSTOM == bufImg.getType()) {
String errMsg = UtilProperties.getMessage(resource, "ImageTransform.unknown_buffered_image_type", locale);
Debug.logWarning(errMsg, module);
// apply a type for image majority
bufImgType = BufferedImage.TYPE_INT_ARGB_PRE;
} else {
bufImgType = bufImg.getType();
}
// scale original image with new size
Image newImg = bufImg.getScaledInstance((int) (imgWidth * scaleFactor), (int) (imgHeight * scaleFactor), Image.SCALE_SMOOTH);
bufNewImg = ImageTransform.toBufferedImage(newImg, bufImgType);
result.put("responseMessage", "success");
result.put("bufferedImage", bufNewImg);
result.put("scaleFactor", scaleFactor);
return result;
}
|
static Map<String, Object> function(BufferedImage bufImg, double imgHeight, double imgWidth, Map<String, Map<String, String>> dimensionMap, String sizeType, Locale locale) { BufferedImage bufNewImg; double defaultHeight, defaultWidth, scaleFactor; Map<String, Object> result = new LinkedHashMap<String, Object>(); if (dimensionMap.get(sizeType).containsKey(STR)) { defaultHeight = Double.parseDouble(dimensionMap.get(sizeType).get(STR).toString()); } else { defaultHeight = -1; } if (dimensionMap.get(sizeType).containsKey("width")) { defaultWidth = Double.parseDouble(dimensionMap.get(sizeType).get("width").toString()); } else { defaultWidth = -1; } if (defaultHeight == 0.0 defaultWidth == 0.0) { String errMsg = UtilProperties.getMessage(resource, STR, locale) + STR + defaultHeight + STR + defaultWidth; Debug.logError(errMsg, module); result.put(ModelService.ERROR_MESSAGE, errMsg); return result; } if (defaultHeight == -1) { scaleFactor = defaultWidth / imgWidth; if (scaleFactor == 0.0) { String errMsg = UtilProperties.getMessage(resource, STR, locale) + STR + defaultWidth + STR + imgWidth; Debug.logError(errMsg, module); result.put(ModelService.ERROR_MESSAGE, errMsg); return result; } } else if (defaultWidth == -1) { scaleFactor = defaultHeight / imgHeight; if (scaleFactor == 0.0) { String errMsg = UtilProperties.getMessage(resource, STR, locale) + STR + defaultHeight + STR + imgHeight; Debug.logError(errMsg, module); result.put(ModelService.ERROR_MESSAGE, errMsg); return result; } } else if (imgHeight > imgWidth) { scaleFactor = defaultHeight / imgHeight; if (scaleFactor == 0.0) { String errMsg = UtilProperties.getMessage(resource, STR, locale) + STR + defaultHeight + STR + imgHeight; Debug.logError(errMsg, module); result.put(ModelService.ERROR_MESSAGE, errMsg); return result; } if (defaultWidth < (imgWidth * scaleFactor)) { scaleFactor = defaultWidth / imgWidth; } } else { scaleFactor = defaultWidth / imgWidth; if (scaleFactor == 0.0) { String errMsg = UtilProperties.getMessage(resource, STR, locale) + STR + defaultWidth + STR + imgWidth; Debug.logError(errMsg, module); result.put(ModelService.ERROR_MESSAGE, errMsg); return result; } if (defaultHeight < (imgHeight * scaleFactor)) { scaleFactor = defaultHeight / imgHeight; } } if (scaleFactor == 0.0) { String errMsg = UtilProperties.getMessage(resource, STR, locale) + STR + scaleFactor; Debug.logError(errMsg, module); result.put(ModelService.ERROR_MESSAGE, errMsg); return result; } int bufImgType; if (BufferedImage.TYPE_CUSTOM == bufImg.getType()) { String errMsg = UtilProperties.getMessage(resource, STR, locale); Debug.logWarning(errMsg, module); bufImgType = BufferedImage.TYPE_INT_ARGB_PRE; } else { bufImgType = bufImg.getType(); } Image newImg = bufImg.getScaledInstance((int) (imgWidth * scaleFactor), (int) (imgHeight * scaleFactor), Image.SCALE_SMOOTH); bufNewImg = ImageTransform.toBufferedImage(newImg, bufImgType); result.put(STR, STR); result.put(STR, bufNewImg); result.put(STR, scaleFactor); return result; }
|
/**
* scaleImage
* <p>
* scale original image related to the ImageProperties.xml dimensions
*
* @param bufImg Buffered image to scale
* @param imgHeight Original image height
* @param imgWidth Original image width
* @param dimensionMap Image dimensions by size type
* @param sizeType Size type to scale
* @return New scaled buffered image
*/
|
scaleImage scale original image related to the ImageProperties.xml dimensions
|
scaleImage
|
{
"repo_name": "rohankarthik/Ofbiz",
"path": "framework/common/src/main/java/org/apache/ofbiz/common/image/ImageTransform.java",
"license": "apache-2.0",
"size": 13355
}
|
[
"java.awt.Image",
"java.awt.image.BufferedImage",
"java.util.LinkedHashMap",
"java.util.Locale",
"java.util.Map",
"org.apache.ofbiz.base.util.Debug",
"org.apache.ofbiz.base.util.UtilProperties",
"org.apache.ofbiz.service.ModelService"
] |
import java.awt.Image; import java.awt.image.BufferedImage; import java.util.LinkedHashMap; import java.util.Locale; import java.util.Map; import org.apache.ofbiz.base.util.Debug; import org.apache.ofbiz.base.util.UtilProperties; import org.apache.ofbiz.service.ModelService;
|
import java.awt.*; import java.awt.image.*; import java.util.*; import org.apache.ofbiz.base.util.*; import org.apache.ofbiz.service.*;
|
[
"java.awt",
"java.util",
"org.apache.ofbiz"
] |
java.awt; java.util; org.apache.ofbiz;
| 933,772
|
private void readOneStripe(CorruptedBlocks corruptedBlocks)
throws IOException {
resetCurStripeBuffer();
// compute stripe range based on pos
final long offsetInBlockGroup = getOffsetInBlockGroup();
final long stripeLen = cellSize * dataBlkNum;
final int stripeIndex = (int) (offsetInBlockGroup / stripeLen);
final int stripeBufOffset = (int) (offsetInBlockGroup % stripeLen);
final int stripeLimit = (int) Math.min(currentLocatedBlock.getBlockSize()
- (stripeIndex * stripeLen), stripeLen);
StripeRange stripeRange =
new StripeRange(offsetInBlockGroup, stripeLimit - stripeBufOffset);
LocatedStripedBlock blockGroup = (LocatedStripedBlock) currentLocatedBlock;
AlignedStripe[] stripes = StripedBlockUtil.divideOneStripe(ecPolicy,
cellSize, blockGroup, offsetInBlockGroup,
offsetInBlockGroup + stripeRange.getLength() - 1, curStripeBuf);
final LocatedBlock[] blks = StripedBlockUtil.parseStripedBlockGroup(
blockGroup, cellSize, dataBlkNum, parityBlkNum);
// read the whole stripe
for (AlignedStripe stripe : stripes) {
// Parse group to get chosen DN location
StripeReader sreader = new StatefulStripeReader(stripe, ecPolicy, blks,
blockReaders, corruptedBlocks, decoder, this);
sreader.readStripe();
}
curStripeBuf.position(stripeBufOffset);
curStripeBuf.limit(stripeLimit);
curStripeRange = stripeRange;
}
|
void function(CorruptedBlocks corruptedBlocks) throws IOException { resetCurStripeBuffer(); final long offsetInBlockGroup = getOffsetInBlockGroup(); final long stripeLen = cellSize * dataBlkNum; final int stripeIndex = (int) (offsetInBlockGroup / stripeLen); final int stripeBufOffset = (int) (offsetInBlockGroup % stripeLen); final int stripeLimit = (int) Math.min(currentLocatedBlock.getBlockSize() - (stripeIndex * stripeLen), stripeLen); StripeRange stripeRange = new StripeRange(offsetInBlockGroup, stripeLimit - stripeBufOffset); LocatedStripedBlock blockGroup = (LocatedStripedBlock) currentLocatedBlock; AlignedStripe[] stripes = StripedBlockUtil.divideOneStripe(ecPolicy, cellSize, blockGroup, offsetInBlockGroup, offsetInBlockGroup + stripeRange.getLength() - 1, curStripeBuf); final LocatedBlock[] blks = StripedBlockUtil.parseStripedBlockGroup( blockGroup, cellSize, dataBlkNum, parityBlkNum); for (AlignedStripe stripe : stripes) { StripeReader sreader = new StatefulStripeReader(stripe, ecPolicy, blks, blockReaders, corruptedBlocks, decoder, this); sreader.readStripe(); } curStripeBuf.position(stripeBufOffset); curStripeBuf.limit(stripeLimit); curStripeRange = stripeRange; }
|
/**
* Read a new stripe covering the current position, and store the data in the
* {@link #curStripeBuf}.
*/
|
Read a new stripe covering the current position, and store the data in the <code>#curStripeBuf</code>
|
readOneStripe
|
{
"repo_name": "ChetnaChaudhari/hadoop",
"path": "hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSStripedInputStream.java",
"license": "apache-2.0",
"size": 18396
}
|
[
"java.io.IOException",
"org.apache.hadoop.hdfs.DFSUtilClient",
"org.apache.hadoop.hdfs.protocol.LocatedBlock",
"org.apache.hadoop.hdfs.protocol.LocatedStripedBlock",
"org.apache.hadoop.hdfs.util.StripedBlockUtil"
] |
import java.io.IOException; import org.apache.hadoop.hdfs.DFSUtilClient; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedStripedBlock; import org.apache.hadoop.hdfs.util.StripedBlockUtil;
|
import java.io.*; import org.apache.hadoop.hdfs.*; import org.apache.hadoop.hdfs.protocol.*; import org.apache.hadoop.hdfs.util.*;
|
[
"java.io",
"org.apache.hadoop"
] |
java.io; org.apache.hadoop;
| 2,589,412
|
private Map<String, PollingCluster> lesePollingCluster() {
Map<String, PollingCluster> clusterMap = new HashMap<>();
// JMX-Verbindungen lesen und Polling-Modus ermitteln.
Map<String, JMXConnectionParameter> jmxVerbindungenMap = leseJmxVerbindungen();
// Im Standalone-Modus wird keine Clusterkonfiguration benötigt.
if (modusStandalone) {
return clusterMap;
}
isyPollingProperties.getCluster().forEach((clusterId, cluster) -> {
String clusterName = cluster.getName();
// zugeordnete JMX-Verbindungen ermitteln
List<JMXConnectionParameter> jmxParameterListe = new ArrayList<>();
if (cluster.getJmxverbindungen().isEmpty()) {
// Wenn keine speziellen JMX-Verbindungen zugeordnet sind,
// werden alle definierten JMX-Verbindungen zugeordnet
jmxParameterListe.addAll(jmxVerbindungenMap.values());
} else {
for (String jmxVerbindung : cluster.getJmxverbindungen()) {
JMXConnectionParameter jmxConnectionParameter = jmxVerbindungenMap.get(jmxVerbindung);
if (jmxConnectionParameter == null) {
throw new PollingClusterKonfigurationException(
Fehlerschluessel.MSG_UNBEKANNTE_VERBINDUNGSZUORDNUNG, jmxVerbindung, clusterId);
}
jmxParameterListe.add(jmxConnectionParameter);
}
}
// Polling-Cluster erzeugen
PollingCluster pollingCluster = new PollingCluster(isyPollingProperties.getJmx().getDomain(), clusterId, clusterName,
cluster.getWartezeit(),
jmxParameterListe);
clusterMap.put(clusterId, pollingCluster);
});
return clusterMap;
}
|
Map<String, PollingCluster> function() { Map<String, PollingCluster> clusterMap = new HashMap<>(); Map<String, JMXConnectionParameter> jmxVerbindungenMap = leseJmxVerbindungen(); if (modusStandalone) { return clusterMap; } isyPollingProperties.getCluster().forEach((clusterId, cluster) -> { String clusterName = cluster.getName(); List<JMXConnectionParameter> jmxParameterListe = new ArrayList<>(); if (cluster.getJmxverbindungen().isEmpty()) { jmxParameterListe.addAll(jmxVerbindungenMap.values()); } else { for (String jmxVerbindung : cluster.getJmxverbindungen()) { JMXConnectionParameter jmxConnectionParameter = jmxVerbindungenMap.get(jmxVerbindung); if (jmxConnectionParameter == null) { throw new PollingClusterKonfigurationException( Fehlerschluessel.MSG_UNBEKANNTE_VERBINDUNGSZUORDNUNG, jmxVerbindung, clusterId); } jmxParameterListe.add(jmxConnectionParameter); } } PollingCluster pollingCluster = new PollingCluster(isyPollingProperties.getJmx().getDomain(), clusterId, clusterName, cluster.getWartezeit(), jmxParameterListe); clusterMap.put(clusterId, pollingCluster); }); return clusterMap; }
|
/**
* Liest die Konfiguration aus und baut eine Map mit Polling-Clustern auf.
* @return Map mit den Polling-Clustern.
*/
|
Liest die Konfiguration aus und baut eine Map mit Polling-Clustern auf
|
lesePollingCluster
|
{
"repo_name": "IsyFact/IsyFact-Standards",
"path": "isy-polling/src/main/java/de/bund/bva/isyfact/polling/impl/PollingVerwalterImpl.java",
"license": "apache-2.0",
"size": 12488
}
|
[
"de.bund.bva.isyfact.polling.common.exception.PollingClusterKonfigurationException",
"de.bund.bva.isyfact.polling.common.konstanten.Fehlerschluessel",
"java.util.ArrayList",
"java.util.HashMap",
"java.util.List",
"java.util.Map"
] |
import de.bund.bva.isyfact.polling.common.exception.PollingClusterKonfigurationException; import de.bund.bva.isyfact.polling.common.konstanten.Fehlerschluessel; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map;
|
import de.bund.bva.isyfact.polling.common.exception.*; import de.bund.bva.isyfact.polling.common.konstanten.*; import java.util.*;
|
[
"de.bund.bva",
"java.util"
] |
de.bund.bva; java.util;
| 2,170,103
|
@Test
public void testNullPropertyInfo() {
final Driver driver = new Driver();
Assert.assertEquals("Invalid property size", 7,
driver.getPropertyInfo("jdbc:paradox:target/test-classes/", null).length);
}
|
void function() { final Driver driver = new Driver(); Assert.assertEquals(STR, 7, driver.getPropertyInfo(STR, null).length); }
|
/**
* Test for null property info.
*/
|
Test for null property info
|
testNullPropertyInfo
|
{
"repo_name": "leonhad/paradoxdriver",
"path": "src/test/java/com/googlecode/paradox/DriverTest.java",
"license": "lgpl-3.0",
"size": 4800
}
|
[
"org.junit.Assert"
] |
import org.junit.Assert;
|
import org.junit.*;
|
[
"org.junit"
] |
org.junit;
| 2,437,095
|
public static void addEmojis(Context context, Spannable text, int emojiSize, int emojiAlignment, int textSize, int index, int length) {
addEmojis(context, text, emojiSize, emojiAlignment, textSize, index, length, false);
}
|
static void function(Context context, Spannable text, int emojiSize, int emojiAlignment, int textSize, int index, int length) { addEmojis(context, text, emojiSize, emojiAlignment, textSize, index, length, false); }
|
/**
* Convert emoji characters of the given Spannable to the according emojicon.
*
* @param context
* @param text
* @param emojiSize
* @param emojiAlignment
* @param textSize
* @param index
* @param length
*/
|
Convert emoji characters of the given Spannable to the according emojicon
|
addEmojis
|
{
"repo_name": "chaitanya0bhagvan/emojicon",
"path": "library/src/main/java/com/rockerhieu/emojicon/EmojiconHandler.java",
"license": "apache-2.0",
"size": 84574
}
|
[
"android.content.Context",
"android.text.Spannable"
] |
import android.content.Context; import android.text.Spannable;
|
import android.content.*; import android.text.*;
|
[
"android.content",
"android.text"
] |
android.content; android.text;
| 1,337,652
|
@Override
public void handleData(ByteBuffer buf, Link link) {
if (buf == null)
return;
try {
if (verbose)
System.out.println("[" + this + "] INFO: Writing data to stream: " + buf + ".");
os.write(buf.data, buf.offset, buf.length);
os.flush();
} catch (IOException e) {
System.err.println("[" + this + "] ERROR: " + e.getMessage());
closeStream();
}
}
|
void function(ByteBuffer buf, Link link) { if (buf == null) return; try { if (verbose) System.out.println("[" + this + STR + buf + "."); os.write(buf.data, buf.offset, buf.length); os.flush(); } catch (IOException e) { System.err.println("[" + this + STR + e.getMessage()); closeStream(); } }
|
/**
* Send incoming data to stream.
*/
|
Send incoming data to stream
|
handleData
|
{
"repo_name": "GabrielBrascher/cloudstack",
"path": "services/console-proxy/rdpconsole/src/main/java/streamer/OutputStreamSink.java",
"license": "apache-2.0",
"size": 4590
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 2,255,980
|
public List<Integer> getColumnIds() {
return columns;
}
}
public static class HashBucketSchema {
private final List<Integer> columnIds;
private int numBuckets;
private int seed;
public HashBucketSchema(List<Integer> columnIds, int numBuckets, int seed) {
this.columnIds = columnIds;
this.numBuckets = numBuckets;
this.seed = seed;
}
|
List<Integer> function() { return columns; } } public static class HashBucketSchema { private final List<Integer> columnIds; private int numBuckets; private int seed; public HashBucketSchema(List<Integer> columnIds, int numBuckets, int seed) { this.columnIds = columnIds; this.numBuckets = numBuckets; this.seed = seed; }
|
/**
* Gets the column IDs of the columns in the range partition.
* @return the column IDs of the columns in the range partition
*/
|
Gets the column IDs of the columns in the range partition
|
getColumnIds
|
{
"repo_name": "helifu/kudu",
"path": "java/kudu-client/src/main/java/org/apache/kudu/client/PartitionSchema.java",
"license": "apache-2.0",
"size": 4839
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,296,261
|
@Override
public void onDeleted(Context context, int[] appWidgetIds) {
for (int appWidgetId : appWidgetIds) {
UnreadWidgetConfiguration.deleteWidgetConfiguration(context, appWidgetId);
}
}
|
void function(Context context, int[] appWidgetIds) { for (int appWidgetId : appWidgetIds) { UnreadWidgetConfiguration.deleteWidgetConfiguration(context, appWidgetId); } }
|
/**
* Called when a widget instance is deleted.
*/
|
Called when a widget instance is deleted
|
onDeleted
|
{
"repo_name": "AvatarBlueray/k9-mail-5.002-spam-filter-edition",
"path": "src/com/fsck/k9/provider/UnreadWidgetProvider.java",
"license": "bsd-3-clause",
"size": 6350
}
|
[
"android.content.Context",
"com.fsck.k9.activity.UnreadWidgetConfiguration"
] |
import android.content.Context; import com.fsck.k9.activity.UnreadWidgetConfiguration;
|
import android.content.*; import com.fsck.k9.activity.*;
|
[
"android.content",
"com.fsck.k9"
] |
android.content; com.fsck.k9;
| 763,312
|
public static String getDisplayPath(Path path, boolean showLeaf)
{
// This method was moved here from org.alfresco.web.bean.repository.Repository
StringBuilder buf = new StringBuilder(64);
int count = path.size() - (showLeaf ? 0 : 1);
for (int i = 0; i < count; i++)
{
String elementString = null;
Path.Element element = path.get(i);
if (element instanceof Path.ChildAssocElement)
{
ChildAssociationRef elementRef = ((Path.ChildAssocElement) element).getRef();
if (elementRef.getParentRef() != null)
{
elementString = elementRef.getQName().getLocalName();
}
} else
{
elementString = element.getElementString();
}
if (elementString != null)
{
buf.append("/");
buf.append(elementString);
}
}
return buf.toString();
}
|
static String function(Path path, boolean showLeaf) { StringBuilder buf = new StringBuilder(64); int count = path.size() - (showLeaf ? 0 : 1); for (int i = 0; i < count; i++) { String elementString = null; Path.Element element = path.get(i); if (element instanceof Path.ChildAssocElement) { ChildAssociationRef elementRef = ((Path.ChildAssocElement) element).getRef(); if (elementRef.getParentRef() != null) { elementString = elementRef.getQName().getLocalName(); } } else { elementString = element.getElementString(); } if (elementString != null) { buf.append("/"); buf.append(elementString); } } return buf.toString(); }
|
/**
* Return the human readable form of the specified node Path. Fast version
* of the method that simply converts QName localname components to Strings.
*
* @param path Path to extract readable form from
* @param showLeaf Whether to process the final leaf element of the path
*
* @return human readable form of the Path
*/
|
Return the human readable form of the specified node Path. Fast version of the method that simply converts QName localname components to Strings
|
getDisplayPath
|
{
"repo_name": "nguyentienlong/community-edition",
"path": "projects/repository/source/java/org/alfresco/util/PathUtil.java",
"license": "lgpl-3.0",
"size": 2401
}
|
[
"org.alfresco.service.cmr.repository.ChildAssociationRef",
"org.alfresco.service.cmr.repository.Path"
] |
import org.alfresco.service.cmr.repository.ChildAssociationRef; import org.alfresco.service.cmr.repository.Path;
|
import org.alfresco.service.cmr.repository.*;
|
[
"org.alfresco.service"
] |
org.alfresco.service;
| 2,429,133
|
protected void setPosition(Point pos) {
this.x = pos.x;
this.y = pos.y;
}
|
void function(Point pos) { this.x = pos.x; this.y = pos.y; }
|
/**
* Set position
* @param pos
*/
|
Set position
|
setPosition
|
{
"repo_name": "winspeednl/LibZ",
"path": "Java/src/me/sven/libz/input/Mouse.java",
"license": "apache-2.0",
"size": 3050
}
|
[
"java.awt.Point"
] |
import java.awt.Point;
|
import java.awt.*;
|
[
"java.awt"
] |
java.awt;
| 1,595,681
|
//EXCLUDE-START-lockdiag-
public Map<Lockable, Control> shallowClone() {
HashMap<Lockable, Control> clone = new HashMap<Lockable, Control>();
for (Entry entry : locks.values()) {
entry.lock();
try {
Control control = entry.control;
if (control != null) {
clone.put(control.getLockable(), control.shallowClone());
}
} finally {
entry.unlock();
}
}
return clone;
}
//EXCLUDE-END-lockdiag-
|
Map<Lockable, Control> function() { HashMap<Lockable, Control> clone = new HashMap<Lockable, Control>(); for (Entry entry : locks.values()) { entry.lock(); try { Control control = entry.control; if (control != null) { clone.put(control.getLockable(), control.shallowClone()); } } finally { entry.unlock(); } } return clone; }
|
/**
* make a shallow clone of myself and my lock controls
*/
|
make a shallow clone of myself and my lock controls
|
shallowClone
|
{
"repo_name": "scnakandala/derby",
"path": "java/engine/org/apache/derby/impl/services/locks/ConcurrentLockSet.java",
"license": "apache-2.0",
"size": 34813
}
|
[
"java.util.HashMap",
"java.util.Map",
"org.apache.derby.iapi.services.locks.Lockable"
] |
import java.util.HashMap; import java.util.Map; import org.apache.derby.iapi.services.locks.Lockable;
|
import java.util.*; import org.apache.derby.iapi.services.locks.*;
|
[
"java.util",
"org.apache.derby"
] |
java.util; org.apache.derby;
| 2,276,666
|
private boolean isEmpty(Object o) {
if (o == null) {
return true;
} else if (o instanceof String && "".equals(o)) {
return true;
} else if (o instanceof List<?> && ((List<?>) o).size() < 1) {
return true;
} else if (o instanceof Map<?, ?> && ((Map<?, ?>) o).size() < 1) {
return true;
} else if (o instanceof byte[] && ((byte[]) o).length < 1) {
return true;
}
return false;
}
|
boolean function(Object o) { if (o == null) { return true; } else if (o instanceof String && "".equals(o)) { return true; } else if (o instanceof List<?> && ((List<?>) o).size() < 1) { return true; } else if (o instanceof Map<?, ?> && ((Map<?, ?>) o).size() < 1) { return true; } else if (o instanceof byte[] && ((byte[]) o).length < 1) { return true; } return false; }
|
/**
* Returns true if the argument is null or is "empty", which is determined based on the type of
* the argument.
*
* @param o
* @return
*/
|
Returns true if the argument is null or is "empty", which is determined based on the type of the argument
|
isEmpty
|
{
"repo_name": "nmldiegues/stibt",
"path": "infinispan/cachestore/hbase/src/main/java/org/infinispan/loaders/hbase/HBaseFacade.java",
"license": "apache-2.0",
"size": 24005
}
|
[
"java.util.List",
"java.util.Map"
] |
import java.util.List; import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,062,251
|
public void dispatchToEvents(int nodeHandle, org.xml.sax.ContentHandler ch)
throws org.xml.sax.SAXException
{
TreeWalker treeWalker = m_walker;
ContentHandler prevCH = treeWalker.getContentHandler();
if(null != prevCH)
{
treeWalker = new TreeWalker(null);
}
treeWalker.setContentHandler(ch);
try
{
Node node = getNode(nodeHandle);
treeWalker.traverseFragment(node);
}
finally
{
treeWalker.setContentHandler(null);
}
}
|
void function(int nodeHandle, org.xml.sax.ContentHandler ch) throws org.xml.sax.SAXException { TreeWalker treeWalker = m_walker; ContentHandler prevCH = treeWalker.getContentHandler(); if(null != prevCH) { treeWalker = new TreeWalker(null); } treeWalker.setContentHandler(ch); try { Node node = getNode(nodeHandle); treeWalker.traverseFragment(node); } finally { treeWalker.setContentHandler(null); } }
|
/**
* Directly create SAX parser events from a subtree.
*
* @param nodeHandle The node ID.
* @param ch A non-null reference to a ContentHandler.
*
* @throws org.xml.sax.SAXException
*/
|
Directly create SAX parser events from a subtree
|
dispatchToEvents
|
{
"repo_name": "YouDiSN/OpenJDK-Research",
"path": "jdk9/jaxp/src/java.xml/share/classes/com/sun/org/apache/xml/internal/dtm/ref/dom2dtm/DOM2DTM.java",
"license": "gpl-2.0",
"size": 58410
}
|
[
"com.sun.org.apache.xml.internal.utils.TreeWalker",
"org.w3c.dom.Node",
"org.xml.sax.ContentHandler"
] |
import com.sun.org.apache.xml.internal.utils.TreeWalker; import org.w3c.dom.Node; import org.xml.sax.ContentHandler;
|
import com.sun.org.apache.xml.internal.utils.*; import org.w3c.dom.*; import org.xml.sax.*;
|
[
"com.sun.org",
"org.w3c.dom",
"org.xml.sax"
] |
com.sun.org; org.w3c.dom; org.xml.sax;
| 458,229
|
public static List<ZKAuthInfo> parseAuth(String authString) {
List<ZKAuthInfo> ret = Lists.newArrayList();
if (authString == null) {
return ret;
}
List<String> authComps = Lists.newArrayList(
Splitter.on(',').omitEmptyStrings().trimResults()
.split(authString));
for (String comp : authComps) {
String parts[] = comp.split(":", 2);
if (parts.length != 2) {
throw new BadAuthFormatException(
"Auth '" + comp + "' not of expected form scheme:auth");
}
ret.add(new ZKAuthInfo(parts[0],
parts[1].getBytes(Charsets.UTF_8)));
}
return ret;
}
|
static List<ZKAuthInfo> function(String authString) { List<ZKAuthInfo> ret = Lists.newArrayList(); if (authString == null) { return ret; } List<String> authComps = Lists.newArrayList( Splitter.on(',').omitEmptyStrings().trimResults() .split(authString)); for (String comp : authComps) { String parts[] = comp.split(":", 2); if (parts.length != 2) { throw new BadAuthFormatException( STR + comp + STR); } ret.add(new ZKAuthInfo(parts[0], parts[1].getBytes(Charsets.UTF_8))); } return ret; }
|
/**
* Parse a comma-separated list of authentication mechanisms. Each
* such mechanism should be of the form 'scheme:auth' -- the same
* syntax used for the 'addAuth' command in the ZK CLI.
*
* @param authString the comma-separated auth mechanisms
* @return a list of parsed authentications
*/
|
Parse a comma-separated list of authentication mechanisms. Each such mechanism should be of the form 'scheme:auth' -- the same syntax used for the 'addAuth' command in the ZK CLI
|
parseAuth
|
{
"repo_name": "ict-carch/hadoop-plus",
"path": "hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAZKUtil.java",
"license": "apache-2.0",
"size": 6064
}
|
[
"com.google.common.base.Charsets",
"com.google.common.base.Splitter",
"com.google.common.collect.Lists",
"java.util.List"
] |
import com.google.common.base.Charsets; import com.google.common.base.Splitter; import com.google.common.collect.Lists; import java.util.List;
|
import com.google.common.base.*; import com.google.common.collect.*; import java.util.*;
|
[
"com.google.common",
"java.util"
] |
com.google.common; java.util;
| 1,246,112
|
@Override
public int quantityDroppedWithBonus(int fortune, Random random)
{
int bonus = random.nextInt(fortune + 2) - 1;
if (bonus < 0) { bonus = 0; }
return this.quantityDropped(random) * (bonus + 1);
}
|
int function(int fortune, Random random) { int bonus = random.nextInt(fortune + 2) - 1; if (bonus < 0) { bonus = 0; } return this.quantityDropped(random) * (bonus + 1); }
|
/**
* Returns the quantity of items to drop with fortune.
*/
|
Returns the quantity of items to drop with fortune
|
quantityDroppedWithBonus
|
{
"repo_name": "sidben/Tutorial",
"path": "src/java/sidben/tutorialmod/block/BlockRandomDrops.java",
"license": "mit",
"size": 4061
}
|
[
"java.util.Random"
] |
import java.util.Random;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 470,774
|
@Test
public void testSizeLimit() {
try (VarCharVector vector = allocVector(1000)) {
TestIndex index = new TestIndex();
VarCharColumnWriter writer = makeWriter(vector, index);
writer.bindListener(new ColumnWriterListener() {
// Because assumed array size is 10, so 10 * 1000 = 10,000
// rounded to 16K
int totalAlloc = 16384;
|
void function() { try (VarCharVector vector = allocVector(1000)) { TestIndex index = new TestIndex(); VarCharColumnWriter writer = makeWriter(vector, index); writer.bindListener(new ColumnWriterListener() { int totalAlloc = 16384;
|
/**
* Test resize monitoring. Add a listener to an Varchar writer,
* capture each resize, and refuse a resize when the s
* of the vector exceeds 1 MB. This will trigger an overflow,
* which will throw an exception which we then check for.
*/
|
Test resize monitoring. Add a listener to an Varchar writer, capture each resize, and refuse a resize when the s of the vector exceeds 1 MB. This will trigger an overflow, which will throw an exception which we then check for
|
testSizeLimit
|
{
"repo_name": "parthchandra/incubator-drill",
"path": "exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestVariableWidthWriter.java",
"license": "apache-2.0",
"size": 12062
}
|
[
"org.apache.drill.exec.vector.VarCharVector",
"org.apache.drill.exec.vector.accessor.ColumnAccessors",
"org.apache.drill.exec.vector.accessor.ScalarWriter",
"org.apache.drill.test.rowSet.test.TestFixedWidthWriter"
] |
import org.apache.drill.exec.vector.VarCharVector; import org.apache.drill.exec.vector.accessor.ColumnAccessors; import org.apache.drill.exec.vector.accessor.ScalarWriter; import org.apache.drill.test.rowSet.test.TestFixedWidthWriter;
|
import org.apache.drill.exec.vector.*; import org.apache.drill.exec.vector.accessor.*; import org.apache.drill.test.*;
|
[
"org.apache.drill"
] |
org.apache.drill;
| 134,653
|
@Test
public void testCheckpointWithSNN() throws Exception {
MiniDFSCluster cluster = null;
DistributedFileSystem dfs = null;
SecondaryNameNode snn = null;
try {
Configuration conf = new HdfsConfiguration();
cluster = new MiniDFSCluster.Builder(conf).build();
cluster.waitActive();
conf.set(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY,
"0.0.0.0:0");
snn = new SecondaryNameNode(conf);
dfs = cluster.getFileSystem();
dfs.mkdirs(new Path("/test/foo"));
snn.doCheckpoint();
//start rolling upgrade
dfs.setSafeMode(SafeModeAction.SAFEMODE_ENTER);
dfs.rollingUpgrade(RollingUpgradeAction.PREPARE);
dfs.setSafeMode(SafeModeAction.SAFEMODE_LEAVE);
dfs.mkdirs(new Path("/test/bar"));
// do checkpoint in SNN again
snn.doCheckpoint();
} finally {
IOUtils.cleanup(null, dfs);
if (snn != null) {
snn.shutdown();
}
if (cluster != null) {
cluster.shutdown();
}
}
}
|
void function() throws Exception { MiniDFSCluster cluster = null; DistributedFileSystem dfs = null; SecondaryNameNode snn = null; try { Configuration conf = new HdfsConfiguration(); cluster = new MiniDFSCluster.Builder(conf).build(); cluster.waitActive(); conf.set(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, STR); snn = new SecondaryNameNode(conf); dfs = cluster.getFileSystem(); dfs.mkdirs(new Path(STR)); snn.doCheckpoint(); dfs.setSafeMode(SafeModeAction.SAFEMODE_ENTER); dfs.rollingUpgrade(RollingUpgradeAction.PREPARE); dfs.setSafeMode(SafeModeAction.SAFEMODE_LEAVE); dfs.mkdirs(new Path(STR)); snn.doCheckpoint(); } finally { IOUtils.cleanup(null, dfs); if (snn != null) { snn.shutdown(); } if (cluster != null) { cluster.shutdown(); } } }
|
/**
* In non-HA setup, after rolling upgrade prepare, the Secondary NN should
* still be able to do checkpoint
*/
|
In non-HA setup, after rolling upgrade prepare, the Secondary NN should still be able to do checkpoint
|
testCheckpointWithSNN
|
{
"repo_name": "tseen/Federated-HDFS",
"path": "tseenliu/FedHDFS-hadoop-src/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestRollingUpgrade.java",
"license": "apache-2.0",
"size": 18862
}
|
[
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.fs.Path",
"org.apache.hadoop.hdfs.protocol.HdfsConstants",
"org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode",
"org.apache.hadoop.io.IOUtils"
] |
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode; import org.apache.hadoop.io.IOUtils;
|
import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.hdfs.protocol.*; import org.apache.hadoop.hdfs.server.namenode.*; import org.apache.hadoop.io.*;
|
[
"org.apache.hadoop"
] |
org.apache.hadoop;
| 363,579
|
public List<String> getExcludeOperationTags() {
return this.excludeOperationTags;
}
|
List<String> function() { return this.excludeOperationTags; }
|
/**
* This gets the excludeOperationTags
* @return the excludeOperationTags
*/
|
This gets the excludeOperationTags
|
getExcludeOperationTags
|
{
"repo_name": "rodney757/swagger-doclet",
"path": "swagger-doclet/src/main/java/com/tenxerconsulting/swagger/doclet/DocletOptions.java",
"license": "apache-2.0",
"size": 59779
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,193,707
|
protected int getCurrentLockState() {
Toolkit t = Toolkit.getDefaultToolkit();
int lockState = 0;
try {
if (t.getLockingKeyState(KeyEvent.VK_KANA_LOCK)) {
lockState++;
}
} catch (UnsupportedOperationException ex) {
}
lockState <<= 1;
try {
if (t.getLockingKeyState(KeyEvent.VK_SCROLL_LOCK)) {
lockState++;
}
} catch (UnsupportedOperationException ex) {
}
lockState <<= 1;
try {
if (t.getLockingKeyState(KeyEvent.VK_NUM_LOCK)) {
lockState++;
}
} catch (UnsupportedOperationException ex) {
}
lockState <<= 1;
try {
if (t.getLockingKeyState(KeyEvent.VK_CAPS_LOCK)) {
lockState++;
}
} catch (UnsupportedOperationException ex) {
}
return lockState;
}
|
int function() { Toolkit t = Toolkit.getDefaultToolkit(); int lockState = 0; try { if (t.getLockingKeyState(KeyEvent.VK_KANA_LOCK)) { lockState++; } } catch (UnsupportedOperationException ex) { } lockState <<= 1; try { if (t.getLockingKeyState(KeyEvent.VK_SCROLL_LOCK)) { lockState++; } } catch (UnsupportedOperationException ex) { } lockState <<= 1; try { if (t.getLockingKeyState(KeyEvent.VK_NUM_LOCK)) { lockState++; } } catch (UnsupportedOperationException ex) { } lockState <<= 1; try { if (t.getLockingKeyState(KeyEvent.VK_CAPS_LOCK)) { lockState++; } } catch (UnsupportedOperationException ex) { } return lockState; }
|
/**
* Returns a bitmask representing the state of the key locks.
*/
|
Returns a bitmask representing the state of the key locks
|
getCurrentLockState
|
{
"repo_name": "apache/batik",
"path": "batik-gvt/src/main/java/org/apache/batik/gvt/event/AWTEventDispatcher.java",
"license": "apache-2.0",
"size": 28814
}
|
[
"java.awt.Toolkit",
"java.awt.event.KeyEvent"
] |
import java.awt.Toolkit; import java.awt.event.KeyEvent;
|
import java.awt.*; import java.awt.event.*;
|
[
"java.awt"
] |
java.awt;
| 2,884,231
|
public QueryFilterType getQueryFilterType(QueryType query) throws ServiceException {
if (query == null || query.getQueryFilter() == null) {
logger.error("getQueryFilterType: invalid QueryFilterType.");
throw ExceptionCodes.buildProviderException(ExceptionCodes.MISSING_PARAMETER, "QueryFilterType", "<null>");
}
return query.getQueryFilter();
}
|
QueryFilterType function(QueryType query) throws ServiceException { if (query == null query.getQueryFilter() == null) { logger.error(STR); throw ExceptionCodes.buildProviderException(ExceptionCodes.MISSING_PARAMETER, STR, STR); } return query.getQueryFilter(); }
|
/**
* Extracts QueryFilterType from the incoming QueryType.
*
* @param QueryType query
* @return QueryFilterType
* @throws ServiceException
*/
|
Extracts QueryFilterType from the incoming QueryType
|
getQueryFilterType
|
{
"repo_name": "jmacauley/OpenDRAC",
"path": "Nsi/NsiServer/src/main/java/org/opendrac/nsi/endpoints/ConnectionServiceProvider.java",
"license": "gpl-3.0",
"size": 29447
}
|
[
"org.ogf.schemas.nsi._2011._10.connection.provider.ServiceException",
"org.ogf.schemas.nsi._2011._10.connection.types.QueryFilterType",
"org.ogf.schemas.nsi._2011._10.connection.types.QueryType",
"org.opendrac.nsi.util.ExceptionCodes"
] |
import org.ogf.schemas.nsi._2011._10.connection.provider.ServiceException; import org.ogf.schemas.nsi._2011._10.connection.types.QueryFilterType; import org.ogf.schemas.nsi._2011._10.connection.types.QueryType; import org.opendrac.nsi.util.ExceptionCodes;
|
import org.ogf.schemas.nsi.*; import org.opendrac.nsi.util.*;
|
[
"org.ogf.schemas",
"org.opendrac.nsi"
] |
org.ogf.schemas; org.opendrac.nsi;
| 601,208
|
public ByteBuffer getByteBuffer() throws IOException {
InputStream in = cachedInputStream();
if (in instanceof ByteBuffered) {
return ((ByteBuffered)in).getByteBuffer();
}
return null;
}
|
ByteBuffer function() throws IOException { InputStream in = cachedInputStream(); if (in instanceof ByteBuffered) { return ((ByteBuffered)in).getByteBuffer(); } return null; }
|
/**
* Returns the Resource data as a ByteBuffer, but only if the input stream
* was implemented on top of a ByteBuffer. Return {@code null} otherwise.
* @return Resource data or null.
*/
|
Returns the Resource data as a ByteBuffer, but only if the input stream was implemented on top of a ByteBuffer. Return null otherwise
|
getByteBuffer
|
{
"repo_name": "FauxFaux/jdk9-jdk",
"path": "src/java.base/share/classes/jdk/internal/loader/Resource.java",
"license": "gpl-2.0",
"size": 6045
}
|
[
"java.io.IOException",
"java.io.InputStream",
"java.nio.ByteBuffer"
] |
import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer;
|
import java.io.*; import java.nio.*;
|
[
"java.io",
"java.nio"
] |
java.io; java.nio;
| 277,090
|
public int getEncryptionState() throws RemoteException;
|
int function() throws RemoteException;
|
/**
* Determines the encryption state of the volume.
* @return a numerical value. See {@code ENCRYPTION_STATE_*} for possible values.
*/
|
Determines the encryption state of the volume
|
getEncryptionState
|
{
"repo_name": "doctang/TestPlatform",
"path": "AutoTest/src/android/os/storage/IMountService.java",
"license": "apache-2.0",
"size": 54707
}
|
[
"android.os.RemoteException"
] |
import android.os.RemoteException;
|
import android.os.*;
|
[
"android.os"
] |
android.os;
| 898,700
|
@CustomValidator(type = "hibernate")
public InputOutputObject getInputOutputObject() {
return inputOutputObject;
}
|
@CustomValidator(type = STR) InputOutputObject function() { return inputOutputObject; }
|
/**
* Gets the inputOutputObject.
*
* @return the inputOutputObject.
*/
|
Gets the inputOutputObject
|
getInputOutputObject
|
{
"repo_name": "NCIP/prot-express",
"path": "software/src/main/java/gov/nih/nci/protexpress/ui/actions/experiment/viewedit/InputOutputDetailsAction.java",
"license": "bsd-3-clause",
"size": 5375
}
|
[
"com.opensymphony.xwork2.validator.annotations.CustomValidator",
"gov.nih.nci.protexpress.domain.protocol.InputOutputObject"
] |
import com.opensymphony.xwork2.validator.annotations.CustomValidator; import gov.nih.nci.protexpress.domain.protocol.InputOutputObject;
|
import com.opensymphony.xwork2.validator.annotations.*; import gov.nih.nci.protexpress.domain.protocol.*;
|
[
"com.opensymphony.xwork2",
"gov.nih.nci"
] |
com.opensymphony.xwork2; gov.nih.nci;
| 2,369,901
|
public Collection<String> getAuthorizedIps();
|
Collection<String> function();
|
/**
* Returns a set of strings representing IP address representing
* IPv4 or IPv6 ranges / CIDRs. e.g. 192.168.0.0/16 from which
* access should be allowed (if and only if the IP is not included
* in the list of denied IPs)
*/
|
Returns a set of strings representing IP address representing IPv4 or IPv6 ranges / CIDRs. e.g. 192.168.0.0/16 from which access should be allowed (if and only if the IP is not included in the list of denied IPs)
|
getAuthorizedIps
|
{
"repo_name": "apache/shiro",
"path": "web/src/main/java/org/apache/shiro/web/filter/authz/IpSource.java",
"license": "apache-2.0",
"size": 1549
}
|
[
"java.util.Collection"
] |
import java.util.Collection;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 125,301
|
@Nullable public static List<UUID> readUuids(DataInput in) throws IOException {
int size = in.readInt();
// Check null flag.
if (size == -1)
return null;
List<UUID> col = new ArrayList<>(size);
for (int i = 0; i < size; i++)
col.add(readUuid(in));
return col;
}
|
@Nullable static List<UUID> function(DataInput in) throws IOException { int size = in.readInt(); if (size == -1) return null; List<UUID> col = new ArrayList<>(size); for (int i = 0; i < size; i++) col.add(readUuid(in)); return col; }
|
/**
* Reads UUIDs from input stream. This method is meant to be used by
* implementations of {@link Externalizable} interface.
*
* @param in Input stream.
* @return Read UUIDs.
* @throws IOException If read failed.
*/
|
Reads UUIDs from input stream. This method is meant to be used by implementations of <code>Externalizable</code> interface
|
readUuids
|
{
"repo_name": "NSAmelchev/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/util/IgniteUtils.java",
"license": "apache-2.0",
"size": 388551
}
|
[
"java.io.DataInput",
"java.io.IOException",
"java.util.ArrayList",
"java.util.List",
"org.jetbrains.annotations.Nullable"
] |
import java.io.DataInput; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.jetbrains.annotations.Nullable;
|
import java.io.*; import java.util.*; import org.jetbrains.annotations.*;
|
[
"java.io",
"java.util",
"org.jetbrains.annotations"
] |
java.io; java.util; org.jetbrains.annotations;
| 1,549,672
|
public void addInput(String name, Map<String, Object> elements);
|
void function(String name, Map<String, Object> elements);
|
/**
* Adds an input that is a dictionary of strings to objects.
*/
|
Adds an input that is a dictionary of strings to objects
|
addInput
|
{
"repo_name": "prabeesh/DataflowJavaSDK",
"path": "sdk/src/main/java/com/google/cloud/dataflow/sdk/runners/DataflowPipelineTranslator.java",
"license": "apache-2.0",
"size": 40151
}
|
[
"java.util.Map"
] |
import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 727,391
|
@Column(nullable = false, name="item_nr")
public String getNumber() {
return this.number;
}
|
@Column(nullable = false, name=STR) String function() { return this.number; }
|
/**
* The number is a unique product number, like for example an EAN.
*
* @return Returns the number.
*/
|
The number is a unique product number, like for example an EAN
|
getNumber
|
{
"repo_name": "prozakis/mywms-Server",
"path": "server.app/mywms.as/src/org/mywms/model/ItemData.java",
"license": "gpl-3.0",
"size": 11604
}
|
[
"javax.persistence.Column"
] |
import javax.persistence.Column;
|
import javax.persistence.*;
|
[
"javax.persistence"
] |
javax.persistence;
| 1,223,410
|
private static void flush(IEclipsePreferences prefs) {
try {
prefs.flush();
} catch (BackingStoreException e) {
EASyLoggerFactory.INSTANCE.getLogger(EASyPreferenceStore.class, Activator.PLUGIN_ID).exception(e);
}
}
|
static void function(IEclipsePreferences prefs) { try { prefs.flush(); } catch (BackingStoreException e) { EASyLoggerFactory.INSTANCE.getLogger(EASyPreferenceStore.class, Activator.PLUGIN_ID).exception(e); } }
|
/**
* Flushes (and saves) the given preferences object.
*
* @param prefs the preferences to be flushed
*/
|
Flushes (and saves) the given preferences object
|
flush
|
{
"repo_name": "SSEHUB/EASyProducer",
"path": "Plugins/EASy-Producer/EASy-Producer.UI/src/net/ssehub/easy/producer/ui/internal/EASyPreferenceStore.java",
"license": "apache-2.0",
"size": 11034
}
|
[
"net.ssehub.easy.basics.logger.EASyLoggerFactory",
"org.eclipse.core.runtime.preferences.IEclipsePreferences",
"org.osgi.service.prefs.BackingStoreException"
] |
import net.ssehub.easy.basics.logger.EASyLoggerFactory; import org.eclipse.core.runtime.preferences.IEclipsePreferences; import org.osgi.service.prefs.BackingStoreException;
|
import net.ssehub.easy.basics.logger.*; import org.eclipse.core.runtime.preferences.*; import org.osgi.service.prefs.*;
|
[
"net.ssehub.easy",
"org.eclipse.core",
"org.osgi.service"
] |
net.ssehub.easy; org.eclipse.core; org.osgi.service;
| 1,985,479
|
private void validate1Tree(YdtNode ydtNode) {
ydtNode = ydtNode.getFirstChild();
validateLeafContents(ydtNode, "revInteger", "100");
ydtNode = ydtNode.getParent();
ydtNode = ydtNode.getNextSibling();
validateNodeContents(ydtNode, MRV, MERGE);
ydtNode = ydtNode.getFirstChild();
validateLeafContents(ydtNode, "revInteger", "2147483647");
ydtNode = ydtNode.getParent();
ydtNode = ydtNode.getNextSibling();
validateNodeContents(ydtNode, MRV, MERGE);
ydtNode = ydtNode.getFirstChild();
validateLeafContents(ydtNode, RUI, MINVALUE);
ydtNode = ydtNode.getParent();
ydtNode = ydtNode.getNextSibling();
validateNodeContents(ydtNode, MRV, MERGE);
ydtNode = ydtNode.getFirstChild();
validateLeafContents(ydtNode, RUI, "1");
ydtNode = ydtNode.getParent();
ydtNode = ydtNode.getNextSibling();
validateNodeContents(ydtNode, MRV, MERGE);
ydtNode = ydtNode.getFirstChild();
validateLeafContents(ydtNode, RUI, "2");
ydtNode = ydtNode.getParent();
ydtNode = ydtNode.getNextSibling();
validateNodeContents(ydtNode, MRV, MERGE);
ydtNode = ydtNode.getFirstChild();
validateLeafContents(ydtNode, RUI, "10");
ydtNode = ydtNode.getParent();
ydtNode = ydtNode.getNextSibling();
validateNodeContents(ydtNode, MRV, MERGE);
ydtNode = ydtNode.getFirstChild();
validateLeafContents(ydtNode, RUI, "20");
ydtNode = ydtNode.getParent();
ydtNode = ydtNode.getNextSibling();
validateNodeContents(ydtNode, MRV, MERGE);
ydtNode = ydtNode.getFirstChild();
validateLeafContents(ydtNode, RUI, "100");
ydtNode = ydtNode.getParent();
ydtNode = ydtNode.getNextSibling();
validateNodeContents(ydtNode, MRV, MERGE);
ydtNode = ydtNode.getFirstChild();
validateLeafContents(ydtNode, RUI, MAXUINT32);
}
//TODO negative scenario will be handled later
// @Test
// public void negative1Test() throws YdtException {
// validateErrMsg("posInt", INT32NS, "integer", SINT32, null);
// validateErrMsg("posInt", INT32NS, "127.0", SINT32, null);
// validateErrMsg("negInt", INT32NS, "-2147483649", SINT32, null);
// validateErrMsg("posInt", INT32NS, "2147483648", SINT32, null);
// validateErrMsg(MINIWR, INT32NS, "9", CAPSINT32, null);
// validateErrMsg(MAXIWR, INT32NS, "101", CAPSINT32, null);
// }
//
//
// @Test
// public void negative2Test() throws YdtException {
// validateErrMsg("maxUInt", INT32NS, "integer", SUINT32, null);
// validateErrMsg("maxUInt", INT32NS, "127.0", SUINT32, null);
// validateErrMsg("minUInt", INT32NS, "-2147483649", MINVALUE, null);
// validateErrMsg("maxUInt", INT32NS, "4294967296", MAXUINT32, null);
// validateErrMsg(MINUIWR, INT32NS, "9", CAPSUINT32, null);
// validateErrMsg(MAXUIWR, INT32NS, "101", CAPSUINT32, null);
// }
//
//
// @Test
// public void negative3Test() throws YdtException {
// validateErrMsg("integer", INT32NS, "9", CAPSINT32, MRV);
// validateErrMsg("integer", INT32NS, "41", CAPSINT32, MRV);
// validateErrMsg("integer", INT32NS, "49", CAPSINT32, MRV);
// validateErrMsg("integer", INT32NS, "101", CAPSINT32, MRV);
// }
//
//
// @Test
// public void negative4Test() throws YdtException {
// validateErrMsg("UnInteger", INT32NS, "9", CAPSUINT32, MRV);
// validateErrMsg("UnInteger", INT32NS, "41", CAPSUINT32, MRV);
// validateErrMsg("UnInteger", INT32NS, "49", CAPSUINT32, MRV);
// validateErrMsg("UnInteger", INT32NS, "101", CAPSUINT32, MRV);
// }
//
//
// @Test
// public void negative5Test() throws YdtException {
// // Multi range validation
// validateErrMsg("revInteger", INT32NS, "-2147483649", SINT32, MRV);
// validateErrMsg("revInteger", INT32NS, "4", CAPSINT32, MRV);
// validateErrMsg("revInteger", INT32NS, "9", CAPSINT32, MRV);
// validateErrMsg("revInteger", INT32NS, "11", CAPSINT32, MRV);
// validateErrMsg("revInteger", INT32NS, "19", CAPSINT32, MRV);
// validateErrMsg("revInteger", INT32NS, "2147483648", SINT32, MRV);
// }
//
//
// @Test
// public void negative6Test() throws YdtException {
// // Multi range validation
// validateErrMsg(RUI, INT32NS, "-2147483649", MINVALUE, MRV);
// validateErrMsg(RUI, INT32NS, "4", CAPSUINT32, MRV);
// validateErrMsg(RUI, INT32NS, "9", CAPSUINT32, MRV);
// validateErrMsg(RUI, INT32NS, "11", CAPSUINT32, MRV);
// validateErrMsg(RUI, INT32NS, "19", CAPSUINT32, MRV);
// validateErrMsg(RUI, INT32NS, "4294967296", MAXUINT32, MRV);
// }
|
void function(YdtNode ydtNode) { ydtNode = ydtNode.getFirstChild(); validateLeafContents(ydtNode, STR, "100"); ydtNode = ydtNode.getParent(); ydtNode = ydtNode.getNextSibling(); validateNodeContents(ydtNode, MRV, MERGE); ydtNode = ydtNode.getFirstChild(); validateLeafContents(ydtNode, STR, STR); ydtNode = ydtNode.getParent(); ydtNode = ydtNode.getNextSibling(); validateNodeContents(ydtNode, MRV, MERGE); ydtNode = ydtNode.getFirstChild(); validateLeafContents(ydtNode, RUI, MINVALUE); ydtNode = ydtNode.getParent(); ydtNode = ydtNode.getNextSibling(); validateNodeContents(ydtNode, MRV, MERGE); ydtNode = ydtNode.getFirstChild(); validateLeafContents(ydtNode, RUI, "1"); ydtNode = ydtNode.getParent(); ydtNode = ydtNode.getNextSibling(); validateNodeContents(ydtNode, MRV, MERGE); ydtNode = ydtNode.getFirstChild(); validateLeafContents(ydtNode, RUI, "2"); ydtNode = ydtNode.getParent(); ydtNode = ydtNode.getNextSibling(); validateNodeContents(ydtNode, MRV, MERGE); ydtNode = ydtNode.getFirstChild(); validateLeafContents(ydtNode, RUI, "10"); ydtNode = ydtNode.getParent(); ydtNode = ydtNode.getNextSibling(); validateNodeContents(ydtNode, MRV, MERGE); ydtNode = ydtNode.getFirstChild(); validateLeafContents(ydtNode, RUI, "20"); ydtNode = ydtNode.getParent(); ydtNode = ydtNode.getNextSibling(); validateNodeContents(ydtNode, MRV, MERGE); ydtNode = ydtNode.getFirstChild(); validateLeafContents(ydtNode, RUI, "100"); ydtNode = ydtNode.getParent(); ydtNode = ydtNode.getNextSibling(); validateNodeContents(ydtNode, MRV, MERGE); ydtNode = ydtNode.getFirstChild(); validateLeafContents(ydtNode, RUI, MAXUINT32); }
|
/**
* Validates the given built ydt.
*/
|
Validates the given built ydt
|
validate1Tree
|
{
"repo_name": "sdnwiselab/onos",
"path": "apps/yms/ut/src/test/java/org/onosproject/yms/app/ydt/YdtInteger32Test.java",
"license": "apache-2.0",
"size": 16480
}
|
[
"org.onosproject.yms.app.ydt.YdtTestUtils"
] |
import org.onosproject.yms.app.ydt.YdtTestUtils;
|
import org.onosproject.yms.app.ydt.*;
|
[
"org.onosproject.yms"
] |
org.onosproject.yms;
| 1,747,740
|
@Override
public void contributeToMenu(IMenuManager menuManager) {
super.contributeToMenu(menuManager);
IMenuManager submenuManager = new MenuManager(OverviewEditorPlugin.INSTANCE.getString("_UI_ParametertypeEditor_menu"), "org.scaledl.overview.parametertypeMenuID");
menuManager.insertAfter("additions", submenuManager);
submenuManager.add(new Separator("settings"));
submenuManager.add(new Separator("actions"));
submenuManager.add(new Separator("additions"));
submenuManager.add(new Separator("additions-end"));
// Prepare for CreateChild item addition or removal.
//
createChildMenuManager = new MenuManager(OverviewEditorPlugin.INSTANCE.getString("_UI_CreateChild_menu_item"));
submenuManager.insertBefore("additions", createChildMenuManager);
// Prepare for CreateSibling item addition or removal.
//
createSiblingMenuManager = new MenuManager(OverviewEditorPlugin.INSTANCE.getString("_UI_CreateSibling_menu_item"));
submenuManager.insertBefore("additions", createSiblingMenuManager);
|
void function(IMenuManager menuManager) { super.contributeToMenu(menuManager); IMenuManager submenuManager = new MenuManager(OverviewEditorPlugin.INSTANCE.getString(STR), STR); menuManager.insertAfter(STR, submenuManager); submenuManager.add(new Separator(STR)); submenuManager.add(new Separator(STR)); submenuManager.add(new Separator(STR)); submenuManager.add(new Separator(STR)); submenuManager.insertBefore(STR, createChildMenuManager); submenuManager.insertBefore(STR, createSiblingMenuManager);
|
/**
* This adds to the menu bar a menu and some separators for editor additions,
* as well as the sub-menus for object creation items.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
|
This adds to the menu bar a menu and some separators for editor additions, as well as the sub-menus for object creation items.
|
contributeToMenu
|
{
"repo_name": "CloudScale-Project/Environment",
"path": "plugins/org.scaledl.overview.editor/src/org/scaledl/overview/parametertype/presentation/ParametertypeActionBarContributor.java",
"license": "epl-1.0",
"size": 14937
}
|
[
"org.eclipse.jface.action.IMenuManager",
"org.eclipse.jface.action.MenuManager",
"org.eclipse.jface.action.Separator",
"org.scaledl.overview.presentation.OverviewEditorPlugin"
] |
import org.eclipse.jface.action.IMenuManager; import org.eclipse.jface.action.MenuManager; import org.eclipse.jface.action.Separator; import org.scaledl.overview.presentation.OverviewEditorPlugin;
|
import org.eclipse.jface.action.*; import org.scaledl.overview.presentation.*;
|
[
"org.eclipse.jface",
"org.scaledl.overview"
] |
org.eclipse.jface; org.scaledl.overview;
| 524,799
|
@Test
public void testWebHdfsDeleteSnapshot() throws Exception {
MiniDFSCluster cluster = null;
final Configuration conf = WebHdfsTestUtil.createConf();
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0).build();
cluster.waitActive();
final DistributedFileSystem dfs = cluster.getFileSystem();
final FileSystem webHdfs = WebHdfsTestUtil.getWebHdfsFileSystem(conf,
WebHdfsConstants.WEBHDFS_SCHEME);
final Path foo = new Path("/foo");
dfs.mkdirs(foo);
dfs.allowSnapshot(foo);
webHdfs.createSnapshot(foo, "s1");
final Path spath = webHdfs.createSnapshot(foo, null);
Assert.assertTrue(webHdfs.exists(spath));
final Path s1path = SnapshotTestHelper.getSnapshotRoot(foo, "s1");
Assert.assertTrue(webHdfs.exists(s1path));
// delete operation snapshot name as null
try {
webHdfs.deleteSnapshot(foo, null);
fail("Expected IllegalArgumentException");
} catch (RemoteException e) {
Assert.assertEquals("Required param snapshotname for "
+ "op: DELETESNAPSHOT is null or empty", e.getLocalizedMessage());
}
// delete the two snapshots
webHdfs.deleteSnapshot(foo, "s1");
assertFalse(webHdfs.exists(s1path));
webHdfs.deleteSnapshot(foo, spath.getName());
assertFalse(webHdfs.exists(spath));
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
|
void function() throws Exception { MiniDFSCluster cluster = null; final Configuration conf = WebHdfsTestUtil.createConf(); try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0).build(); cluster.waitActive(); final DistributedFileSystem dfs = cluster.getFileSystem(); final FileSystem webHdfs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsConstants.WEBHDFS_SCHEME); final Path foo = new Path("/foo"); dfs.mkdirs(foo); dfs.allowSnapshot(foo); webHdfs.createSnapshot(foo, "s1"); final Path spath = webHdfs.createSnapshot(foo, null); Assert.assertTrue(webHdfs.exists(spath)); final Path s1path = SnapshotTestHelper.getSnapshotRoot(foo, "s1"); Assert.assertTrue(webHdfs.exists(s1path)); try { webHdfs.deleteSnapshot(foo, null); fail(STR); } catch (RemoteException e) { Assert.assertEquals(STR + STR, e.getLocalizedMessage()); } webHdfs.deleteSnapshot(foo, "s1"); assertFalse(webHdfs.exists(s1path)); webHdfs.deleteSnapshot(foo, spath.getName()); assertFalse(webHdfs.exists(spath)); } finally { if (cluster != null) { cluster.shutdown(); } } }
|
/**
* Test snapshot deletion through WebHdfs
*/
|
Test snapshot deletion through WebHdfs
|
testWebHdfsDeleteSnapshot
|
{
"repo_name": "ctrezzo/hadoop",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java",
"license": "apache-2.0",
"size": 56873
}
|
[
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.fs.FileSystem",
"org.apache.hadoop.fs.Path",
"org.apache.hadoop.hdfs.DistributedFileSystem",
"org.apache.hadoop.hdfs.MiniDFSCluster",
"org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotTestHelper",
"org.apache.hadoop.ipc.RemoteException",
"org.junit.Assert"
] |
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotTestHelper; import org.apache.hadoop.ipc.RemoteException; import org.junit.Assert;
|
import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.hdfs.*; import org.apache.hadoop.hdfs.server.namenode.snapshot.*; import org.apache.hadoop.ipc.*; import org.junit.*;
|
[
"org.apache.hadoop",
"org.junit"
] |
org.apache.hadoop; org.junit;
| 2,131,088
|
public static ReactiveSocket fromClientConnection(
DuplexConnection connection,
ConnectionSetupPayload setup,
RequestHandler handler,
Consumer<Throwable> errorStream
) {
if (connection == null) {
throw new IllegalArgumentException("DuplexConnection can not be null");
}
if (setup == null) {
throw new IllegalArgumentException("ConnectionSetupPayload can not be null");
}
final RequestHandler h = handler != null ? handler : EMPTY_HANDLER;
Consumer<Throwable> es = errorStream != null ? errorStream : DEFAULT_ERROR_STREAM;
return new ReactiveSocket(connection, false, setup, h, null, NULL_LEASE_GOVERNOR, es);
}
|
static ReactiveSocket function( DuplexConnection connection, ConnectionSetupPayload setup, RequestHandler handler, Consumer<Throwable> errorStream ) { if (connection == null) { throw new IllegalArgumentException(STR); } if (setup == null) { throw new IllegalArgumentException(STR); } final RequestHandler h = handler != null ? handler : EMPTY_HANDLER; Consumer<Throwable> es = errorStream != null ? errorStream : DEFAULT_ERROR_STREAM; return new ReactiveSocket(connection, false, setup, h, null, NULL_LEASE_GOVERNOR, es); }
|
/**
* Create a ReactiveSocket from a client-side {@link DuplexConnection}.
* <p>
* A client-side connection is one that initiated the connection with a
* server and will define the ReactiveSocket behaviors via the
* {@link ConnectionSetupPayload} that define mime-types, leasing
* behavior and other connection-level details.
*
* @param connection
* DuplexConnection of client-side initiated connection for
* the ReactiveSocket protocol to use.
* @param setup
* ConnectionSetupPayload that defines mime-types and other
* connection behavior details.
* @param handler
* (Optional) RequestHandler for responding to requests from
* the server. If 'null' requests will be responded to with
* "Not Found" errors.
* @param errorStream
* (Optional) Callback for errors while processing streams
* over connection. If 'null' then error messages will be
* output to System.err.
* @return ReactiveSocket for start, shutdown and sending requests.
*/
|
Create a ReactiveSocket from a client-side <code>DuplexConnection</code>. A client-side connection is one that initiated the connection with a server and will define the ReactiveSocket behaviors via the <code>ConnectionSetupPayload</code> that define mime-types, leasing behavior and other connection-level details
|
fromClientConnection
|
{
"repo_name": "stevegury/reactivesocket-java",
"path": "src/main/java/io/reactivesocket/ReactiveSocket.java",
"license": "apache-2.0",
"size": 18965
}
|
[
"java.util.function.Consumer"
] |
import java.util.function.Consumer;
|
import java.util.function.*;
|
[
"java.util"
] |
java.util;
| 2,458,219
|
public int update(ContentResolver contentResolver, KeySelection where) {
return contentResolver.update(uri(), values(), where == null ? null : where.sel(), where == null ? null : where.args());
}
|
int function(ContentResolver contentResolver, KeySelection where) { return contentResolver.update(uri(), values(), where == null ? null : where.sel(), where == null ? null : where.args()); }
|
/**
* Update row(s) using the values stored by this object and the given selection.
*
* @param contentResolver The content resolver to use.
* @param where The selection to use (can be {@code null}).
*/
|
Update row(s) using the values stored by this object and the given selection
|
update
|
{
"repo_name": "skubit/skubit-comics",
"path": "billing/src/main/java/com/skubit/iab/provider/key/KeyContentValues.java",
"license": "apache-2.0",
"size": 2349
}
|
[
"android.content.ContentResolver"
] |
import android.content.ContentResolver;
|
import android.content.*;
|
[
"android.content"
] |
android.content;
| 2,357,894
|
public static int getWfcMode(Context context) {
int setting = android.provider.Settings.Global.getInt(context.getContentResolver(),
android.provider.Settings.Global.WFC_IMS_MODE,
ImsConfig.WfcModeFeatureValueConstants.WIFI_PREFERRED);
if (DBG) log("getWfcMode - setting=" + setting);
return setting;
}
|
static int function(Context context) { int setting = android.provider.Settings.Global.getInt(context.getContentResolver(), android.provider.Settings.Global.WFC_IMS_MODE, ImsConfig.WfcModeFeatureValueConstants.WIFI_PREFERRED); if (DBG) log(STR + setting); return setting; }
|
/**
* Returns the user configuration of WFC modem setting
*/
|
Returns the user configuration of WFC modem setting
|
getWfcMode
|
{
"repo_name": "syslover33/ctank",
"path": "java/android-sdk-linux_r24.4.1_src/sources/android-23/com/android/ims/ImsManager.java",
"license": "gpl-3.0",
"size": 41066
}
|
[
"android.content.Context",
"android.provider.Settings"
] |
import android.content.Context; import android.provider.Settings;
|
import android.content.*; import android.provider.*;
|
[
"android.content",
"android.provider"
] |
android.content; android.provider;
| 1,148,085
|
public void getData() {
specificationMethod = jobExecutorMeta.getSpecificationMethod();
switch ( specificationMethod ) {
case FILENAME:
wPath.setText( Const.NVL( jobExecutorMeta.getFileName(), "" ) );
break;
case REPOSITORY_BY_NAME:
String fullPath = Const.NVL( jobExecutorMeta.getDirectoryPath(), "" ) + "/" + Const
.NVL( jobExecutorMeta.getJobName(), "" );
wPath.setText( fullPath );
break;
case REPOSITORY_BY_REFERENCE:
referenceObjectId = jobExecutorMeta.getJobObjectId();
getByReferenceData( referenceObjectId );
break;
default:
break;
}
// TODO: throw in a separate thread.
//
try {
String[] prevSteps = transMeta.getStepNames();
Arrays.sort( prevSteps );
wExecutionResultTarget.setItems( prevSteps );
wResultFilesTarget.setItems( prevSteps );
wResultRowsTarget.setItems( prevSteps );
String[] inputFields = transMeta.getPrevStepFields( stepMeta ).getFieldNames();
parameterColumns[ 1 ].setComboValues( inputFields );
wGroupField.setItems( inputFields );
} catch ( Exception e ) {
log.logError( "couldn't get previous step list", e );
}
wGroupSize.setText( Const.NVL( jobExecutorMeta.getGroupSize(), "" ) );
wGroupTime.setText( Const.NVL( jobExecutorMeta.getGroupTime(), "" ) );
wGroupField.setText( Const.NVL( jobExecutorMeta.getGroupField(), "" ) );
wExecutionResultTarget.setText( jobExecutorMeta.getExecutionResultTargetStepMeta() == null
? "" : jobExecutorMeta.getExecutionResultTargetStepMeta().getName() );
tiExecutionTimeField.setText( FIELD_NAME, Const.NVL( jobExecutorMeta.getExecutionTimeField(), "" ) );
tiExecutionResultField.setText( FIELD_NAME, Const.NVL( jobExecutorMeta.getExecutionResultField(), "" ) );
tiExecutionNrErrorsField.setText( FIELD_NAME, Const.NVL( jobExecutorMeta.getExecutionNrErrorsField(), "" ) );
tiExecutionLinesReadField.setText( FIELD_NAME, Const.NVL( jobExecutorMeta.getExecutionLinesReadField(), "" ) );
tiExecutionLinesWrittenField
.setText( FIELD_NAME, Const.NVL( jobExecutorMeta.getExecutionLinesWrittenField(), "" ) );
tiExecutionLinesInputField.setText( FIELD_NAME, Const.NVL( jobExecutorMeta.getExecutionLinesInputField(), "" ) );
tiExecutionLinesOutputField
.setText( FIELD_NAME, Const.NVL( jobExecutorMeta.getExecutionLinesOutputField(), "" ) );
tiExecutionLinesRejectedField
.setText( FIELD_NAME, Const.NVL( jobExecutorMeta.getExecutionLinesRejectedField(), "" ) );
tiExecutionLinesUpdatedField
.setText( FIELD_NAME, Const.NVL( jobExecutorMeta.getExecutionLinesUpdatedField(), "" ) );
tiExecutionLinesDeletedField
.setText( FIELD_NAME, Const.NVL( jobExecutorMeta.getExecutionLinesDeletedField(), "" ) );
tiExecutionFilesRetrievedField
.setText( FIELD_NAME, Const.NVL( jobExecutorMeta.getExecutionFilesRetrievedField(), "" ) );
tiExecutionExitStatusField.setText( FIELD_NAME, Const.NVL( jobExecutorMeta.getExecutionExitStatusField(), "" ) );
tiExecutionLogTextField.setText( FIELD_NAME, Const.NVL( jobExecutorMeta.getExecutionLogTextField(), "" ) );
tiExecutionLogChannelIdField
.setText( FIELD_NAME, Const.NVL( jobExecutorMeta.getExecutionLogChannelIdField(), "" ) );
// result files
//
wResultFilesTarget.setText( jobExecutorMeta.getResultFilesTargetStepMeta() == null ? "" : jobExecutorMeta
.getResultFilesTargetStepMeta().getName() );
wResultFileNameField.setText( Const.NVL( jobExecutorMeta.getResultFilesFileNameField(), "" ) );
// Result rows
//
wResultRowsTarget.setText( jobExecutorMeta.getResultRowsTargetStepMeta() == null ? "" : jobExecutorMeta
.getResultRowsTargetStepMeta().getName() );
for ( int i = 0; i < jobExecutorMeta.getResultRowsField().length; i++ ) {
TableItem item = new TableItem( wResultRowsFields.table, SWT.NONE );
item.setText( 1, Const.NVL( jobExecutorMeta.getResultRowsField()[ i ], "" ) );
item.setText( 2, ValueMetaFactory.getValueMetaName( jobExecutorMeta.getResultRowsType()[ i ] ) );
int length = jobExecutorMeta.getResultRowsLength()[ i ];
item.setText( 3, length < 0 ? "" : Integer.toString( length ) );
int precision = jobExecutorMeta.getResultRowsPrecision()[ i ];
item.setText( 4, precision < 0 ? "" : Integer.toString( precision ) );
}
wResultRowsFields.removeEmptyRows();
wResultRowsFields.setRowNums();
wResultRowsFields.optWidth( true );
wTabFolder.setSelection( 0 );
try {
loadJob();
} catch ( Throwable t ) {
// Ignore errors
}
setFlags();
wStepname.selectAll();
wStepname.setFocus();
}
|
void function() { specificationMethod = jobExecutorMeta.getSpecificationMethod(); switch ( specificationMethod ) { case FILENAME: wPath.setText( Const.NVL( jobExecutorMeta.getFileName(), STRSTR/STRSTRcouldn't get previous step listSTRSTRSTRSTRSTRSTRSTRSTRSTRSTRSTRSTRSTRSTRSTRSTRSTRSTRSTRSTRSTRSTR" : Integer.toString( precision ) ); } wResultRowsFields.removeEmptyRows(); wResultRowsFields.setRowNums(); wResultRowsFields.optWidth( true ); wTabFolder.setSelection( 0 ); try { loadJob(); } catch ( Throwable t ) { } setFlags(); wStepname.selectAll(); wStepname.setFocus(); }
|
/**
* Copy information from the meta-data input to the dialog fields.
*/
|
Copy information from the meta-data input to the dialog fields
|
getData
|
{
"repo_name": "SergeyTravin/pentaho-kettle",
"path": "ui/src/main/java/org/pentaho/di/ui/trans/steps/jobexecutor/JobExecutorDialog.java",
"license": "apache-2.0",
"size": 51306
}
|
[
"org.pentaho.di.core.Const"
] |
import org.pentaho.di.core.Const;
|
import org.pentaho.di.core.*;
|
[
"org.pentaho.di"
] |
org.pentaho.di;
| 750,276
|
@Override
public void setUp() throws Exception {
super.setUp();
workDir = _TestUtil.getTempDir("TestDoc");
workDir.mkdirs();
indexDir = _TestUtil.getTempDir("testIndex");
indexDir.mkdirs();
Directory directory = newFSDirectory(indexDir);
directory.close();
files = new LinkedList<File>();
files.add(createOutput("test.txt",
"This is the first test file"
));
files.add(createOutput("test2.txt",
"This is the second test file"
));
}
|
void function() throws Exception { super.setUp(); workDir = _TestUtil.getTempDir(STR); workDir.mkdirs(); indexDir = _TestUtil.getTempDir(STR); indexDir.mkdirs(); Directory directory = newFSDirectory(indexDir); directory.close(); files = new LinkedList<File>(); files.add(createOutput(STR, STR )); files.add(createOutput(STR, STR )); }
|
/** Set the test case. This test case needs
* a few text files created in the current working directory.
*/
|
Set the test case. This test case needs a few text files created in the current working directory
|
setUp
|
{
"repo_name": "fnp/pylucene",
"path": "lucene-java-3.5.0/lucene/src/test/org/apache/lucene/index/TestDoc.java",
"license": "apache-2.0",
"size": 8107
}
|
[
"java.io.File",
"java.util.LinkedList",
"org.apache.lucene.store.Directory"
] |
import java.io.File; import java.util.LinkedList; import org.apache.lucene.store.Directory;
|
import java.io.*; import java.util.*; import org.apache.lucene.store.*;
|
[
"java.io",
"java.util",
"org.apache.lucene"
] |
java.io; java.util; org.apache.lucene;
| 1,067,006
|
@SuppressWarnings("unchecked")
public void configure(Configuration conf, ResourceCalculatorPlugin monitor,
ResourceUsageMetrics metrics, Progressive progress) {
Class[] plugins = conf.getClasses(RESOURCE_USAGE_EMULATION_PLUGINS);
if (plugins == null) {
System.out.println("No resource usage emulator plugins configured.");
} else {
for (Class clazz : plugins) {
if (clazz != null) {
if (ResourceUsageEmulatorPlugin.class.isAssignableFrom(clazz)) {
ResourceUsageEmulatorPlugin plugin =
(ResourceUsageEmulatorPlugin) ReflectionUtils.newInstance(clazz,
conf);
emulationPlugins.add(plugin);
} else {
throw new RuntimeException("Misconfigured resource usage plugins. "
+ "Class " + clazz.getClass().getName() + " is not a resource "
+ "usage plugin as it does not extend "
+ ResourceUsageEmulatorPlugin.class.getName());
}
}
}
}
// initialize the emulators once all the configured emulator plugins are
// loaded
for (ResourceUsageEmulatorPlugin emulator : emulationPlugins) {
emulator.initialize(conf, metrics, monitor, progress);
}
}
|
@SuppressWarnings(STR) void function(Configuration conf, ResourceCalculatorPlugin monitor, ResourceUsageMetrics metrics, Progressive progress) { Class[] plugins = conf.getClasses(RESOURCE_USAGE_EMULATION_PLUGINS); if (plugins == null) { System.out.println(STR); } else { for (Class clazz : plugins) { if (clazz != null) { if (ResourceUsageEmulatorPlugin.class.isAssignableFrom(clazz)) { ResourceUsageEmulatorPlugin plugin = (ResourceUsageEmulatorPlugin) ReflectionUtils.newInstance(clazz, conf); emulationPlugins.add(plugin); } else { throw new RuntimeException(STR + STR + clazz.getClass().getName() + STR + STR + ResourceUsageEmulatorPlugin.class.getName()); } } } } for (ResourceUsageEmulatorPlugin emulator : emulationPlugins) { emulator.initialize(conf, metrics, monitor, progress); } }
|
/**
* Configure the {@link ResourceUsageMatcher} to load the configured plugins
* and initialize them.
*/
|
Configure the <code>ResourceUsageMatcher</code> to load the configured plugins and initialize them
|
configure
|
{
"repo_name": "moreus/hadoop",
"path": "hadoop-0.23.10/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/ResourceUsageMatcher.java",
"license": "apache-2.0",
"size": 3609
}
|
[
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.mapred.gridmix.Progressive",
"org.apache.hadoop.mapreduce.util.ResourceCalculatorPlugin",
"org.apache.hadoop.tools.rumen.ResourceUsageMetrics",
"org.apache.hadoop.util.ReflectionUtils"
] |
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapred.gridmix.Progressive; import org.apache.hadoop.mapreduce.util.ResourceCalculatorPlugin; import org.apache.hadoop.tools.rumen.ResourceUsageMetrics; import org.apache.hadoop.util.ReflectionUtils;
|
import org.apache.hadoop.conf.*; import org.apache.hadoop.mapred.gridmix.*; import org.apache.hadoop.mapreduce.util.*; import org.apache.hadoop.tools.rumen.*; import org.apache.hadoop.util.*;
|
[
"org.apache.hadoop"
] |
org.apache.hadoop;
| 195,953
|
public static void downto(double self, Number to, Closure closure) {
double to1 = to.doubleValue();
if (self >= to1) {
for (double i = self; i >= to1; i--) {
closure.call(i);
}
} else
throw new GroovyRuntimeException("Infinite loop in " + self + ".downto(" + to + ")");
}
|
static void function(double self, Number to, Closure closure) { double to1 = to.doubleValue(); if (self >= to1) { for (double i = self; i >= to1; i--) { closure.call(i); } } else throw new GroovyRuntimeException(STR + self + STR + to + ")"); }
|
/**
* Iterates from this number down to the given number, inclusive,
* decrementing by one each time.
*
* @param self a double
* @param to the end number
* @param closure the code to execute for each number
* @since 1.0
*/
|
Iterates from this number down to the given number, inclusive, decrementing by one each time
|
downto
|
{
"repo_name": "mv2a/yajsw",
"path": "src/groovy-patch/src/main/java/org/codehaus/groovy/runtime/DefaultGroovyMethods.java",
"license": "apache-2.0",
"size": 704164
}
|
[
"groovy.lang.Closure",
"groovy.lang.GroovyRuntimeException"
] |
import groovy.lang.Closure; import groovy.lang.GroovyRuntimeException;
|
import groovy.lang.*;
|
[
"groovy.lang"
] |
groovy.lang;
| 1,565,738
|
public double forwardThetaTheoretical(final SwaptionCashFixedIbor swaption, final YieldCurveWithBlackSwaptionBundle curveBlack) {
ArgumentChecker.notNull(swaption, "Swaption");
ArgumentChecker.notNull(curveBlack, "Curves with Black volatility");
final AnnuityCouponFixed annuityFixed = swaption.getUnderlyingSwap().getFixedLeg();
final double tenor = swaption.getMaturityTime();
final double forward = swaption.getUnderlyingSwap().accept(PRC, curveBlack);
final double pvbp = METHOD_SWAP.getAnnuityCash(swaption.getUnderlyingSwap(), forward);
// Implementation comment: cash-settled swaptions make sense only for constant strike, the computation of coupon equivalent is not required.
final double volatility = curveBlack.getBlackParameters().getVolatility(swaption.getTimeToExpiry(), tenor);
final double discountFactorSettle = curveBlack.getCurve(annuityFixed.getNthPayment(0).getFundingCurveName()).getDiscountFactor(swaption.getSettlementTime());
final double strike = swaption.getStrike();
final double expiry = swaption.getTimeToExpiry();
final boolean isCall = swaption.isCall();
final double df = discountFactorSettle * pvbp;
return forward * df * BlackFormulaRepository.delta(forward, strike, expiry, volatility, isCall) * (swaption.isLong() ? 1.0 : -1.0) + df *
BlackFormulaRepository.driftlessTheta(forward, strike, expiry, volatility) * (swaption.isLong() ? 1.0 : -1.0);
}
|
double function(final SwaptionCashFixedIbor swaption, final YieldCurveWithBlackSwaptionBundle curveBlack) { ArgumentChecker.notNull(swaption, STR); ArgumentChecker.notNull(curveBlack, STR); final AnnuityCouponFixed annuityFixed = swaption.getUnderlyingSwap().getFixedLeg(); final double tenor = swaption.getMaturityTime(); final double forward = swaption.getUnderlyingSwap().accept(PRC, curveBlack); final double pvbp = METHOD_SWAP.getAnnuityCash(swaption.getUnderlyingSwap(), forward); final double volatility = curveBlack.getBlackParameters().getVolatility(swaption.getTimeToExpiry(), tenor); final double discountFactorSettle = curveBlack.getCurve(annuityFixed.getNthPayment(0).getFundingCurveName()).getDiscountFactor(swaption.getSettlementTime()); final double strike = swaption.getStrike(); final double expiry = swaption.getTimeToExpiry(); final boolean isCall = swaption.isCall(); final double df = discountFactorSettle * pvbp; return forward * df * BlackFormulaRepository.delta(forward, strike, expiry, volatility, isCall) * (swaption.isLong() ? 1.0 : -1.0) + df * BlackFormulaRepository.driftlessTheta(forward, strike, expiry, volatility) * (swaption.isLong() ? 1.0 : -1.0); }
|
/**
* Compute minus of first derivative of present value with respect to time
* @param swaption The swaption.
* @param curveBlack The curves with Black volatility data.
* @return The forward theta
*/
|
Compute minus of first derivative of present value with respect to time
|
forwardThetaTheoretical
|
{
"repo_name": "jeorme/OG-Platform",
"path": "projects/OG-Analytics/src/main/java/com/opengamma/analytics/financial/interestrate/swaption/method/SwaptionCashFixedIborBlackMethod.java",
"license": "apache-2.0",
"size": 20942
}
|
[
"com.opengamma.analytics.financial.interestrate.annuity.derivative.AnnuityCouponFixed",
"com.opengamma.analytics.financial.interestrate.swaption.derivative.SwaptionCashFixedIbor",
"com.opengamma.analytics.financial.model.option.definition.YieldCurveWithBlackSwaptionBundle",
"com.opengamma.analytics.financial.model.volatility.BlackFormulaRepository",
"com.opengamma.util.ArgumentChecker"
] |
import com.opengamma.analytics.financial.interestrate.annuity.derivative.AnnuityCouponFixed; import com.opengamma.analytics.financial.interestrate.swaption.derivative.SwaptionCashFixedIbor; import com.opengamma.analytics.financial.model.option.definition.YieldCurveWithBlackSwaptionBundle; import com.opengamma.analytics.financial.model.volatility.BlackFormulaRepository; import com.opengamma.util.ArgumentChecker;
|
import com.opengamma.analytics.financial.interestrate.annuity.derivative.*; import com.opengamma.analytics.financial.interestrate.swaption.derivative.*; import com.opengamma.analytics.financial.model.option.definition.*; import com.opengamma.analytics.financial.model.volatility.*; import com.opengamma.util.*;
|
[
"com.opengamma.analytics",
"com.opengamma.util"
] |
com.opengamma.analytics; com.opengamma.util;
| 2,263,127
|
public void setTListTypeKey(ObjectKey key) throws TorqueException
{
setListType(new Integer(((NumberKey) key).intValue()));
}
private static List<String> fieldNames = null;
|
void function(ObjectKey key) throws TorqueException { setListType(new Integer(((NumberKey) key).intValue())); } private static List<String> fieldNames = null;
|
/**
* Provides convenient way to set a relationship based on a
* ObjectKey, for example
* <code>bar.setFooKey(foo.getPrimaryKey())</code>
*
*/
|
Provides convenient way to set a relationship based on a ObjectKey, for example <code>bar.setFooKey(foo.getPrimaryKey())</code>
|
setTListTypeKey
|
{
"repo_name": "trackplus/Genji",
"path": "src/main/java/com/aurel/track/persist/BaseTPpriority.java",
"license": "gpl-3.0",
"size": 31826
}
|
[
"java.util.List",
"org.apache.torque.TorqueException",
"org.apache.torque.om.NumberKey",
"org.apache.torque.om.ObjectKey"
] |
import java.util.List; import org.apache.torque.TorqueException; import org.apache.torque.om.NumberKey; import org.apache.torque.om.ObjectKey;
|
import java.util.*; import org.apache.torque.*; import org.apache.torque.om.*;
|
[
"java.util",
"org.apache.torque"
] |
java.util; org.apache.torque;
| 2,351,665
|
public void authenticate(String username, String host, CallbackHandler cbh) throws IOException, XMPPException {
String[] mechanisms = { getName() };
Map<String,String> props = new HashMap<String,String>();
sc = Sasl.createSaslClient(mechanisms, username, "xmpp", host, props, cbh);
authenticate();
}
|
void function(String username, String host, CallbackHandler cbh) throws IOException, XMPPException { String[] mechanisms = { getName() }; Map<String,String> props = new HashMap<String,String>(); sc = Sasl.createSaslClient(mechanisms, username, "xmpp", host, props, cbh); authenticate(); }
|
/**
* Builds and sends the <tt>auth</tt> stanza to the server. The callback handler will handle
* any additional information, such as the authentication ID or realm, if it is needed.
*
* @param username the username of the user being authenticated.
* @param host the hostname where the user account resides.
* @param cbh the CallbackHandler to obtain user information.
* @throws IOException If a network error occures while authenticating.
* @throws XMPPException If a protocol error occurs or the user is not authenticated.
*/
|
Builds and sends the auth stanza to the server. The callback handler will handle any additional information, such as the authentication ID or realm, if it is needed
|
authenticate
|
{
"repo_name": "ice-coffee/EIM",
"path": "src/org/jivesoftware/smack/sasl/SASLMechanism.java",
"license": "apache-2.0",
"size": 11625
}
|
[
"java.io.IOException",
"java.util.HashMap",
"java.util.Map",
"javax.security.auth.callback.CallbackHandler",
"javax.security.sasl.Sasl",
"org.jivesoftware.smack.XMPPException"
] |
import java.io.IOException; import java.util.HashMap; import java.util.Map; import javax.security.auth.callback.CallbackHandler; import javax.security.sasl.Sasl; import org.jivesoftware.smack.XMPPException;
|
import java.io.*; import java.util.*; import javax.security.auth.callback.*; import javax.security.sasl.*; import org.jivesoftware.smack.*;
|
[
"java.io",
"java.util",
"javax.security",
"org.jivesoftware.smack"
] |
java.io; java.util; javax.security; org.jivesoftware.smack;
| 1,883,646
|
public CharBuffer read(Charset charset) throws IllegalStateException, IOException {
return charset.decode(ByteBuffer.wrap(read()));
}
|
CharBuffer function(Charset charset) throws IllegalStateException, IOException { return charset.decode(ByteBuffer.wrap(read())); }
|
/**
* <p>Reads all available bytes from the port/serial device and returns a CharBuffer from the decoded bytes.</p>
*
* @param charset
* The character set to use for encoding/decoding bytes to/from text characters
*
* @return Returns a character set with the data read from the serial port.
*/
|
Reads all available bytes from the port/serial device and returns a CharBuffer from the decoded bytes
|
read
|
{
"repo_name": "phueper/pi4j",
"path": "pi4j-core/src/main/java/com/pi4j/io/serial/impl/AbstractSerialDataReaderWriter.java",
"license": "lgpl-3.0",
"size": 7839
}
|
[
"java.io.IOException",
"java.nio.ByteBuffer",
"java.nio.CharBuffer",
"java.nio.charset.Charset"
] |
import java.io.IOException; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.Charset;
|
import java.io.*; import java.nio.*; import java.nio.charset.*;
|
[
"java.io",
"java.nio"
] |
java.io; java.nio;
| 2,031,654
|
private static boolean hasSomeUser() {
for (User u : User.getAll())
if (u.getProperty(Details.class) != null)
return true;
return false;
}
|
static boolean function() { for (User u : User.getAll()) if (u.getProperty(Details.class) != null) return true; return false; }
|
/**
* Computes if this Hudson has some user accounts configured.
*
* <p>
* This is used to check for the initial
*/
|
Computes if this Hudson has some user accounts configured. This is used to check for the initial
|
hasSomeUser
|
{
"repo_name": "v1v/jenkins",
"path": "core/src/main/java/hudson/security/HudsonPrivateSecurityRealm.java",
"license": "mit",
"size": 39090
}
|
[
"hudson.model.User"
] |
import hudson.model.User;
|
import hudson.model.*;
|
[
"hudson.model"
] |
hudson.model;
| 187,376
|
public static JSONObject parseObject(String rawJson, JSONObject defaultValue) {
Object json = JSONUtilities.parse(rawJson, defaultValue);
// If parsed instance is not null and instance of JSONObject, return
// parsed instance.
// Otherwise return default value
return (json instanceof JSONObject) ? ((JSONObject) json)
: (defaultValue);
}
|
static JSONObject function(String rawJson, JSONObject defaultValue) { Object json = JSONUtilities.parse(rawJson, defaultValue); return (json instanceof JSONObject) ? ((JSONObject) json) : (defaultValue); }
|
/**
* Parses JSON formatted string to corresponding object structure to
* JSONObject. Default value will be returned on empty/null string input or
* failure.
*
* @param rawJson
* JSON formatted string
* @param defaultValue
* default return value in case of failure
* @return parsed JSONObject
*/
|
Parses JSON formatted string to corresponding object structure to JSONObject. Default value will be returned on empty/null string input or failure
|
parseObject
|
{
"repo_name": "gokhanbarisaker/JSONUtilities",
"path": "lib/src/main/java/com/gokhanbarisaker/utilities/JSONUtilities.java",
"license": "apache-2.0",
"size": 13924
}
|
[
"org.json.JSONObject"
] |
import org.json.JSONObject;
|
import org.json.*;
|
[
"org.json"
] |
org.json;
| 1,369,800
|
public String toString(String pattern) {
if (pattern == null) {
return toString();
}
return DateTimeFormat.forPattern(pattern).print(this);
}
|
String function(String pattern) { if (pattern == null) { return toString(); } return DateTimeFormat.forPattern(pattern).print(this); }
|
/**
* Output the date using the specified format pattern.
*
* @param pattern the pattern specification, null means use <code>toString</code>
* @see org.joda.time.format.DateTimeFormat
*/
|
Output the date using the specified format pattern
|
toString
|
{
"repo_name": "0359xiaodong/joda-time-android",
"path": "library/src/org/joda/time/LocalDate.java",
"license": "apache-2.0",
"size": 81625
}
|
[
"org.joda.time.format.DateTimeFormat"
] |
import org.joda.time.format.DateTimeFormat;
|
import org.joda.time.format.*;
|
[
"org.joda.time"
] |
org.joda.time;
| 218,814
|
public static MapElement readMapElement(Element element, String elementNodeName,
PathBuilder pathBuilder, XMLResources resources) throws IOException {
return readMapElement(element, elementNodeName, null, pathBuilder, resources);
}
|
static MapElement function(Element element, String elementNodeName, PathBuilder pathBuilder, XMLResources resources) throws IOException { return readMapElement(element, elementNodeName, null, pathBuilder, resources); }
|
/** Read a map element from the XML description.
*
* @param element is the XML node to read.
* @param elementNodeName is the name of the XML node that should contains the map element data.
* It must be one of {@link #NODE_POINT}, {@link #NODE_CIRCLE}, {@link #NODE_POLYGON}, {@link #NODE_POLYLINE},
* {@link #NODE_MULTIPOINT}, or {@code null} for the XML node name itself.
* @param pathBuilder is the tool to make paths absolute.
* @param resources is the tool that permits to gather the resources.
* @return the map element.
* @throws IOException in case of error.
*/
|
Read a map element from the XML description
|
readMapElement
|
{
"repo_name": "gallandarakhneorg/afc",
"path": "advanced/gis/gisinputoutput/src/main/java/org/arakhne/afc/gis/io/xml/XMLGISElementUtil.java",
"license": "apache-2.0",
"size": 31539
}
|
[
"java.io.IOException",
"org.arakhne.afc.gis.mapelement.MapElement",
"org.arakhne.afc.inputoutput.path.PathBuilder",
"org.arakhne.afc.inputoutput.xml.XMLResources",
"org.w3c.dom.Element"
] |
import java.io.IOException; import org.arakhne.afc.gis.mapelement.MapElement; import org.arakhne.afc.inputoutput.path.PathBuilder; import org.arakhne.afc.inputoutput.xml.XMLResources; import org.w3c.dom.Element;
|
import java.io.*; import org.arakhne.afc.gis.mapelement.*; import org.arakhne.afc.inputoutput.path.*; import org.arakhne.afc.inputoutput.xml.*; import org.w3c.dom.*;
|
[
"java.io",
"org.arakhne.afc",
"org.w3c.dom"
] |
java.io; org.arakhne.afc; org.w3c.dom;
| 970,075
|
protected CodecMaxValues getCodecMaxValues(MediaCodecInfo codecInfo, Format format,
Format[] streamFormats) throws DecoderQueryException {
int maxWidth = format.width;
int maxHeight = format.height;
int maxInputSize = getMaxInputSize(format);
if (streamFormats.length == 1) {
// The single entry in streamFormats must correspond to the format for which the codec is
// being configured.
return new CodecMaxValues(maxWidth, maxHeight, maxInputSize);
}
boolean haveUnknownDimensions = false;
for (Format streamFormat : streamFormats) {
if (areAdaptationCompatible(codecInfo.adaptive, format, streamFormat)) {
haveUnknownDimensions |= (streamFormat.width == Format.NO_VALUE
|| streamFormat.height == Format.NO_VALUE);
maxWidth = Math.max(maxWidth, streamFormat.width);
maxHeight = Math.max(maxHeight, streamFormat.height);
maxInputSize = Math.max(maxInputSize, getMaxInputSize(streamFormat));
}
}
if (haveUnknownDimensions) {
Log.w(TAG, "Resolutions unknown. Codec max resolution: " + maxWidth + "x" + maxHeight);
Point codecMaxSize = getCodecMaxSize(codecInfo, format);
if (codecMaxSize != null) {
maxWidth = Math.max(maxWidth, codecMaxSize.x);
maxHeight = Math.max(maxHeight, codecMaxSize.y);
maxInputSize = Math.max(maxInputSize,
getMaxInputSize(format.sampleMimeType, maxWidth, maxHeight));
Log.w(TAG, "Codec max resolution adjusted to: " + maxWidth + "x" + maxHeight);
}
}
return new CodecMaxValues(maxWidth, maxHeight, maxInputSize);
}
|
CodecMaxValues function(MediaCodecInfo codecInfo, Format format, Format[] streamFormats) throws DecoderQueryException { int maxWidth = format.width; int maxHeight = format.height; int maxInputSize = getMaxInputSize(format); if (streamFormats.length == 1) { return new CodecMaxValues(maxWidth, maxHeight, maxInputSize); } boolean haveUnknownDimensions = false; for (Format streamFormat : streamFormats) { if (areAdaptationCompatible(codecInfo.adaptive, format, streamFormat)) { haveUnknownDimensions = (streamFormat.width == Format.NO_VALUE streamFormat.height == Format.NO_VALUE); maxWidth = Math.max(maxWidth, streamFormat.width); maxHeight = Math.max(maxHeight, streamFormat.height); maxInputSize = Math.max(maxInputSize, getMaxInputSize(streamFormat)); } } if (haveUnknownDimensions) { Log.w(TAG, STR + maxWidth + "x" + maxHeight); Point codecMaxSize = getCodecMaxSize(codecInfo, format); if (codecMaxSize != null) { maxWidth = Math.max(maxWidth, codecMaxSize.x); maxHeight = Math.max(maxHeight, codecMaxSize.y); maxInputSize = Math.max(maxInputSize, getMaxInputSize(format.sampleMimeType, maxWidth, maxHeight)); Log.w(TAG, STR + maxWidth + "x" + maxHeight); } } return new CodecMaxValues(maxWidth, maxHeight, maxInputSize); }
|
/**
* Returns {@link CodecMaxValues} suitable for configuring a codec for {@code format} in a way
* that will allow possible adaptation to other compatible formats in {@code streamFormats}.
*
* @param codecInfo Information about the {@link MediaCodec} being configured.
* @param format The format for which the codec is being configured.
* @param streamFormats The possible stream formats.
* @return Suitable {@link CodecMaxValues}.
* @throws DecoderQueryException If an error occurs querying {@code codecInfo}.
*/
|
Returns <code>CodecMaxValues</code> suitable for configuring a codec for format in a way that will allow possible adaptation to other compatible formats in streamFormats
|
getCodecMaxValues
|
{
"repo_name": "antoniodiraff/ExoPlayer_Library_0.1",
"path": "library/core/src/main/java/com/google/android/exoplayer2/video/MediaCodecVideoRenderer.java",
"license": "apache-2.0",
"size": 43041
}
|
[
"android.graphics.Point",
"android.util.Log",
"com.google.android.exoplayer2.Format",
"com.google.android.exoplayer2.mediacodec.MediaCodecInfo",
"com.google.android.exoplayer2.mediacodec.MediaCodecUtil"
] |
import android.graphics.Point; import android.util.Log; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.mediacodec.MediaCodecInfo; import com.google.android.exoplayer2.mediacodec.MediaCodecUtil;
|
import android.graphics.*; import android.util.*; import com.google.android.exoplayer2.*; import com.google.android.exoplayer2.mediacodec.*;
|
[
"android.graphics",
"android.util",
"com.google.android"
] |
android.graphics; android.util; com.google.android;
| 1,858,090
|
public HashSet<CD> getCDsForSubset(String codeSystemOID, String codeSystemName, Subset subset, ServerConnectionSecureSocket conn) throws DTSException, IllegalArgumentException
{
HashSet<CD> setToReturn = new HashSet<CD>();
SubsetQuery subsetQuery = SubsetQuery.createInstance(conn);
DTSConceptQuery conceptQuery = DTSConceptQuery.createInstance(conn);
ConceptAttributeSetDescriptor casd = new ConceptAttributeSetDescriptor("default");
DTSPropertyType searchPropType = conceptQuery.findPropertyTypeByName("Code in Source", subsetQuery.fetchNamespaces(subset.getId())[0]);
casd.addPropertyType(searchPropType);
DTSConcept[] dtsConcepts = subsetQuery.fetchConcepts(subset.getId(), casd);
for (DTSConcept dtsConcept : dtsConcepts)
{
String code = getCodeInSource(dtsConcept);
String displayName = dtsConcept.getName();
if ((code != null) && (displayName != null))
{
// should always be the case
CD cd = new CD(codeSystemOID, codeSystemName, code, displayName);
setToReturn.add(cd);
}
else
{
String errMsg = "Error in ApelonDtsUtility.getCSsForSubset: code and/or displayName is null for dtsConcept: " + dtsConcept.toString();
log.error(errMsg);
System.err.println(errMsg);
}
}
return setToReturn;
}
|
HashSet<CD> function(String codeSystemOID, String codeSystemName, Subset subset, ServerConnectionSecureSocket conn) throws DTSException, IllegalArgumentException { HashSet<CD> setToReturn = new HashSet<CD>(); SubsetQuery subsetQuery = SubsetQuery.createInstance(conn); DTSConceptQuery conceptQuery = DTSConceptQuery.createInstance(conn); ConceptAttributeSetDescriptor casd = new ConceptAttributeSetDescriptor(STR); DTSPropertyType searchPropType = conceptQuery.findPropertyTypeByName(STR, subsetQuery.fetchNamespaces(subset.getId())[0]); casd.addPropertyType(searchPropType); DTSConcept[] dtsConcepts = subsetQuery.fetchConcepts(subset.getId(), casd); for (DTSConcept dtsConcept : dtsConcepts) { String code = getCodeInSource(dtsConcept); String displayName = dtsConcept.getName(); if ((code != null) && (displayName != null)) { CD cd = new CD(codeSystemOID, codeSystemName, code, displayName); setToReturn.add(cd); } else { String errMsg = STR + dtsConcept.toString(); log.error(errMsg); System.err.println(errMsg); } } return setToReturn; }
|
/**
* Returns CDs in specified subset. May be empty set but is never null.
* @param subset
* @param conn
* @return
* @throws DTSException
* @throws IllegalArgumentException If codeSystemOID is unrecognized.
*/
|
Returns CDs in specified subset. May be empty set but is never null
|
getCDsForSubset
|
{
"repo_name": "TonyWang-UMU/TFG-TWang",
"path": "opencds-parent/opencds-terminology-support/opencds-apelon/src/main/java/org/opencds/terminology/apelon/ApelonDtsUtility.java",
"license": "apache-2.0",
"size": 24985
}
|
[
"com.apelon.apelonserver.client.ServerConnectionSecureSocket",
"com.apelon.dts.client.DTSException",
"com.apelon.dts.client.attribute.DTSPropertyType",
"com.apelon.dts.client.concept.ConceptAttributeSetDescriptor",
"com.apelon.dts.client.concept.DTSConcept",
"com.apelon.dts.client.concept.DTSConceptQuery",
"com.apelon.dts.client.subset.SubsetQuery",
"com.apelon.dts.common.subset.Subset",
"java.util.HashSet"
] |
import com.apelon.apelonserver.client.ServerConnectionSecureSocket; import com.apelon.dts.client.DTSException; import com.apelon.dts.client.attribute.DTSPropertyType; import com.apelon.dts.client.concept.ConceptAttributeSetDescriptor; import com.apelon.dts.client.concept.DTSConcept; import com.apelon.dts.client.concept.DTSConceptQuery; import com.apelon.dts.client.subset.SubsetQuery; import com.apelon.dts.common.subset.Subset; import java.util.HashSet;
|
import com.apelon.apelonserver.client.*; import com.apelon.dts.client.*; import com.apelon.dts.client.attribute.*; import com.apelon.dts.client.concept.*; import com.apelon.dts.client.subset.*; import com.apelon.dts.common.subset.*; import java.util.*;
|
[
"com.apelon.apelonserver",
"com.apelon.dts",
"java.util"
] |
com.apelon.apelonserver; com.apelon.dts; java.util;
| 1,162,904
|
public Object getAttribute(final ResourceResolverContext context, final String name) {
for(final String key : FORBIDDEN_ATTRIBUTES) {
if (key.equals(name)) {
return null;
}
}
for (final AuthenticatedResourceProvider p : context.getProviderManager().getAllBestEffort(getResourceProviderStorage().getAttributableHandlers(), this)) {
final Object attribute = p.getAttribute(name);
if (attribute != null) {
return attribute;
}
}
return this.authenticationInfo != null ? this.authenticationInfo.get(name) :null;
}
|
Object function(final ResourceResolverContext context, final String name) { for(final String key : FORBIDDEN_ATTRIBUTES) { if (key.equals(name)) { return null; } } for (final AuthenticatedResourceProvider p : context.getProviderManager().getAllBestEffort(getResourceProviderStorage().getAttributableHandlers(), this)) { final Object attribute = p.getAttribute(name); if (attribute != null) { return attribute; } } return this.authenticationInfo != null ? this.authenticationInfo.get(name) :null; }
|
/**
* Returns the first non-null result of the
* {@link AuthenticatedResourceProvider#getAttribute(String)} invocation on
* the providers.
*/
|
Returns the first non-null result of the <code>AuthenticatedResourceProvider#getAttribute(String)</code> invocation on the providers
|
getAttribute
|
{
"repo_name": "roele/sling",
"path": "bundles/resourceresolver/src/main/java/org/apache/sling/resourceresolver/impl/helper/ResourceResolverControl.java",
"license": "apache-2.0",
"size": 34619
}
|
[
"org.apache.sling.resourceresolver.impl.providers.stateful.AuthenticatedResourceProvider"
] |
import org.apache.sling.resourceresolver.impl.providers.stateful.AuthenticatedResourceProvider;
|
import org.apache.sling.resourceresolver.impl.providers.stateful.*;
|
[
"org.apache.sling"
] |
org.apache.sling;
| 2,848,870
|
@NotNull
GenericAttributeValue<String> getUserProperty();
|
GenericAttributeValue<String> getUserProperty();
|
/**
* Returns the value of the user-property child.
* <pre>
* <h3>Attribute null:user-property documentation</h3>
* A property of the UserDetails object which will be
* used as salt by a password encoder. Typically something like
* "username" might be used.
* </pre>
*
* @return the value of the user-property child.
*/
|
Returns the value of the user-property child. <code> Attribute null:user-property documentation A property of the UserDetails object which will be used as salt by a password encoder. Typically something like "username" might be used. </code>
|
getUserProperty
|
{
"repo_name": "consulo-trash/consulo-spring",
"path": "spring-security/src/com/intellij/spring/security/model/xml/SaltSource.java",
"license": "apache-2.0",
"size": 1543
}
|
[
"com.intellij.util.xml.GenericAttributeValue"
] |
import com.intellij.util.xml.GenericAttributeValue;
|
import com.intellij.util.xml.*;
|
[
"com.intellij.util"
] |
com.intellij.util;
| 694,903
|
@Test
public void testBookieAuthPluginRequireClientTLSAuthenticationLocal() throws Exception {
if (useV2Protocol) {
return;
}
ServerConfiguration serverConf = new ServerConfiguration(baseConf);
serverConf.setBookieAuthProviderFactoryClass(AllowOnlyClientsWithX509Certificates.class.getName());
serverConf.setDisableServerSocketBind(true);
serverConf.setEnableLocalTransport(true);
restartBookies(serverConf);
secureBookieSideChannel = false;
secureBookieSideChannelPrincipals = null;
ClientConfiguration clientConf = new ClientConfiguration(baseClientConf);
testClient(clientConf, numBookies);
assertTrue(secureBookieSideChannel);
assertNotNull(secureBookieSideChannelPrincipals);
assertTrue(!secureBookieSideChannelPrincipals.isEmpty());
assertTrue(secureBookieSideChannelPrincipals.iterator().next() instanceof Certificate);
Certificate cert = (Certificate) secureBookieSideChannelPrincipals.iterator().next();
assertTrue(cert instanceof X509Certificate);
}
|
void function() throws Exception { if (useV2Protocol) { return; } ServerConfiguration serverConf = new ServerConfiguration(baseConf); serverConf.setBookieAuthProviderFactoryClass(AllowOnlyClientsWithX509Certificates.class.getName()); serverConf.setDisableServerSocketBind(true); serverConf.setEnableLocalTransport(true); restartBookies(serverConf); secureBookieSideChannel = false; secureBookieSideChannelPrincipals = null; ClientConfiguration clientConf = new ClientConfiguration(baseClientConf); testClient(clientConf, numBookies); assertTrue(secureBookieSideChannel); assertNotNull(secureBookieSideChannelPrincipals); assertTrue(!secureBookieSideChannelPrincipals.isEmpty()); assertTrue(secureBookieSideChannelPrincipals.iterator().next() instanceof Certificate); Certificate cert = (Certificate) secureBookieSideChannelPrincipals.iterator().next(); assertTrue(cert instanceof X509Certificate); }
|
/**
* Verify that a bookie-side Auth plugin can access server certificates over LocalTransport.
*/
|
Verify that a bookie-side Auth plugin can access server certificates over LocalTransport
|
testBookieAuthPluginRequireClientTLSAuthenticationLocal
|
{
"repo_name": "sijie/bookkeeper",
"path": "bookkeeper-server/src/test/java/org/apache/bookkeeper/tls/TestTLS.java",
"license": "apache-2.0",
"size": 40827
}
|
[
"java.security.cert.Certificate",
"java.security.cert.X509Certificate",
"org.apache.bookkeeper.conf.ClientConfiguration",
"org.apache.bookkeeper.conf.ServerConfiguration",
"org.junit.Assert"
] |
import java.security.cert.Certificate; import java.security.cert.X509Certificate; import org.apache.bookkeeper.conf.ClientConfiguration; import org.apache.bookkeeper.conf.ServerConfiguration; import org.junit.Assert;
|
import java.security.cert.*; import org.apache.bookkeeper.conf.*; import org.junit.*;
|
[
"java.security",
"org.apache.bookkeeper",
"org.junit"
] |
java.security; org.apache.bookkeeper; org.junit;
| 972,446
|
public void sendCloseConnection(GIOPVersion giopVersion)
throws IOException
{
Message msg = MessageBase.createCloseConnection(giopVersion);
sendHelper(giopVersion, msg);
}
|
void function(GIOPVersion giopVersion) throws IOException { Message msg = MessageBase.createCloseConnection(giopVersion); sendHelper(giopVersion, msg); }
|
/*************************************************************************
* The following methods are for dealing with Connection cleaning for
* better scalability of servers in high network load conditions.
**************************************************************************/
|
The following methods are for dealing with Connection cleaning for better scalability of servers in high network load conditions
|
sendCloseConnection
|
{
"repo_name": "shun634501730/java_source_cn",
"path": "src_en/com/sun/corba/se/impl/transport/SocketOrChannelConnectionImpl.java",
"license": "apache-2.0",
"size": 56758
}
|
[
"com.sun.corba.se.impl.protocol.giopmsgheaders.Message",
"com.sun.corba.se.impl.protocol.giopmsgheaders.MessageBase",
"com.sun.corba.se.spi.ior.iiop.GIOPVersion",
"java.io.IOException"
] |
import com.sun.corba.se.impl.protocol.giopmsgheaders.Message; import com.sun.corba.se.impl.protocol.giopmsgheaders.MessageBase; import com.sun.corba.se.spi.ior.iiop.GIOPVersion; import java.io.IOException;
|
import com.sun.corba.se.impl.protocol.giopmsgheaders.*; import com.sun.corba.se.spi.ior.iiop.*; import java.io.*;
|
[
"com.sun.corba",
"java.io"
] |
com.sun.corba; java.io;
| 2,232,214
|
public Requester form(final Map<?, ?> values, final String charset)
throws HttpRequestException {
if (!values.isEmpty()) {
for (Entry<?, ?> entry : values.entrySet()) {
form(entry, charset);
}
}
return this;
}
|
Requester function(final Map<?, ?> values, final String charset) throws HttpRequestException { if (!values.isEmpty()) { for (Entry<?, ?> entry : values.entrySet()) { form(entry, charset); } } return this; }
|
/**
* Write the values in the map as encoded form data to the request body
*
* @param values
* @param charset
* @return this request
* @throws HttpRequestException
*/
|
Write the values in the map as encoded form data to the request body
|
form
|
{
"repo_name": "vchoury/ses-daemon",
"path": "src/main/java/fr/vcy/coredaemon/httpd/utils/Requester.java",
"license": "mit",
"size": 96195
}
|
[
"java.util.Map"
] |
import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,960,298
|
public LocalelessTest doSilentlyIgnoreMissingResourceException( boolean silentlyIgnoreMissingResourceException )
{
return new LocalelessTest( this.locale, silentlyIgnoreMissingResourceException );
}
/**
* Returns a new instance of {@link LocalelessTest} which uses the given {@link Locale}
|
LocalelessTest function( boolean silentlyIgnoreMissingResourceException ) { return new LocalelessTest( this.locale, silentlyIgnoreMissingResourceException ); } /** * Returns a new instance of {@link LocalelessTest} which uses the given {@link Locale}
|
/**
* Returns a new instance of {@link LocalelessTest} which uses the given setting for the exception handling
* @see LocalelessTest
* @param silentlyIgnoreMissingResourceException
*/
|
Returns a new instance of <code>LocalelessTest</code> which uses the given setting for the exception handling
|
doSilentlyIgnoreMissingResourceException
|
{
"repo_name": "alexchiri/i18n-binder",
"path": "i18nbinder-core/src/test/java/org/omnaest/i18nbinder/internal/facade/i18nfacade/i18n/LocalelessTest.java",
"license": "apache-2.0",
"size": 4813
}
|
[
"java.util.Locale"
] |
import java.util.Locale;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 930,941
|
public static int getEmptySlot(IInventory inventory, int start, int end) {
for (int i = start; i < end; i++) {
if (inventory.getStackInSlot(i) == null) {
return i;
}
}
return -1;
}
|
static int function(IInventory inventory, int start, int end) { for (int i = start; i < end; i++) { if (inventory.getStackInSlot(i) == null) { return i; } } return -1; }
|
/**
* Get the first empty slot in the inventory inside the provided slot index range.
*
* @param inventory - inventory to check
* @param start - first slot index (inclusive)
* @param end - last slot index (exclusive)
* @return slot index of the first empty slot, -1 if none found
*/
|
Get the first empty slot in the inventory inside the provided slot index range
|
getEmptySlot
|
{
"repo_name": "planetguy32/TurboCraftingTable",
"path": "src/main/java/me/planetguy/tct/util/InventoryUtils.java",
"license": "mit",
"size": 9586
}
|
[
"net.minecraft.inventory.IInventory"
] |
import net.minecraft.inventory.IInventory;
|
import net.minecraft.inventory.*;
|
[
"net.minecraft.inventory"
] |
net.minecraft.inventory;
| 1,036,892
|
public List<AccountingLineViewLineFillingElement> getElements() {
return elements;
}
|
List<AccountingLineViewLineFillingElement> function() { return elements; }
|
/**
* Gets the elements attribute.
* @return Returns the elements.
*/
|
Gets the elements attribute
|
getElements
|
{
"repo_name": "Ariah-Group/Finance",
"path": "af_webapp/src/main/java/org/kuali/kfs/sys/document/web/AccountingLineViewLines.java",
"license": "apache-2.0",
"size": 8868
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,918,195
|
List<Filter> filters = new ArrayList<Filter>();
filters.add(new PageFilter(MAX_PAGES));
filters.add(new WhileMatchFilter(new PrefixFilter(Bytes.toBytes("yyy"))));
Filter filterMPONE =
new FilterList(FilterList.Operator.MUST_PASS_ONE, filters);
filterMPONE.reset();
assertFalse(filterMPONE.filterAllRemaining());
byte [] rowkey = Bytes.toBytes("yyyyyyyyy");
for (int i = 0; i < MAX_PAGES - 1; i++) {
assertFalse(filterMPONE.filterRowKey(rowkey, 0, rowkey.length));
KeyValue kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(i),
Bytes.toBytes(i));
assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterKeyValue(kv));
assertFalse(filterMPONE.filterRow());
}
rowkey = Bytes.toBytes("z");
assertFalse(filterMPONE.filterRowKey(rowkey, 0, rowkey.length));
KeyValue kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(0),
Bytes.toBytes(0));
assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterKeyValue(kv));
assertFalse(filterMPONE.filterRow());
rowkey = Bytes.toBytes("yyy");
assertTrue(filterMPONE.filterRowKey(rowkey, 0, rowkey.length));
kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(0),
Bytes.toBytes(0));
assertFalse(Filter.ReturnCode.INCLUDE == filterMPONE.filterKeyValue(kv));
rowkey = Bytes.toBytes("z");
assertTrue(filterMPONE.filterRowKey(rowkey, 0, rowkey.length));
assertTrue(filterMPONE.filterAllRemaining());
}
|
List<Filter> filters = new ArrayList<Filter>(); filters.add(new PageFilter(MAX_PAGES)); filters.add(new WhileMatchFilter(new PrefixFilter(Bytes.toBytes("yyy")))); Filter filterMPONE = new FilterList(FilterList.Operator.MUST_PASS_ONE, filters); filterMPONE.reset(); assertFalse(filterMPONE.filterAllRemaining()); byte [] rowkey = Bytes.toBytes(STR); for (int i = 0; i < MAX_PAGES - 1; i++) { assertFalse(filterMPONE.filterRowKey(rowkey, 0, rowkey.length)); KeyValue kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(i), Bytes.toBytes(i)); assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterKeyValue(kv)); assertFalse(filterMPONE.filterRow()); } rowkey = Bytes.toBytes("z"); assertFalse(filterMPONE.filterRowKey(rowkey, 0, rowkey.length)); KeyValue kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(0), Bytes.toBytes(0)); assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterKeyValue(kv)); assertFalse(filterMPONE.filterRow()); rowkey = Bytes.toBytes("yyy"); assertTrue(filterMPONE.filterRowKey(rowkey, 0, rowkey.length)); kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(0), Bytes.toBytes(0)); assertFalse(Filter.ReturnCode.INCLUDE == filterMPONE.filterKeyValue(kv)); rowkey = Bytes.toBytes("z"); assertTrue(filterMPONE.filterRowKey(rowkey, 0, rowkey.length)); assertTrue(filterMPONE.filterAllRemaining()); }
|
/**
* Test "must pass one"
* @throws Exception
*/
|
Test "must pass one"
|
testMPONE
|
{
"repo_name": "axfcampos/hbase-0.94.19",
"path": "src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java",
"license": "apache-2.0",
"size": 20812
}
|
[
"java.util.ArrayList",
"java.util.List",
"org.apache.hadoop.hbase.KeyValue",
"org.apache.hadoop.hbase.filter.Filter",
"org.apache.hadoop.hbase.filter.FilterList",
"org.apache.hadoop.hbase.util.Bytes"
] |
import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.util.Bytes;
|
import java.util.*; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.filter.*; import org.apache.hadoop.hbase.util.*;
|
[
"java.util",
"org.apache.hadoop"
] |
java.util; org.apache.hadoop;
| 2,305,524
|
private Set<String> getBulkComputeArgs() {
final Set<String> bulkComputeArgs = new LinkedHashSet<>();
for (final Set<String> set : this.bulkComputeSetCaptor.getAllValues()) {
bulkComputeArgs.addAll(set);
}
return bulkComputeArgs;
}
private static final boolean debugResults;
static {
debugResults = Boolean.parseBoolean(System.getProperty(EhcacheBasicRemoveAllTest.class.getName() + ".debug", "false"));
}
@Rule public TestName name = new TestName();
/**
* Writes a dump of test object details to {@code System.out} if, and only if, {@link #debugResults} is enabled.
*
* @param fakeStore the {@link org.ehcache.core.EhcacheBasicCrudBase.FakeStore FakeStore} instance used in the test
* @param originalStoreContent the original content provided to {@code fakeStore}
* @param fakeLoaderWriter the {@link org.ehcache.core.EhcacheBasicCrudBase.FakeCacheLoaderWriter FakeCacheLoaderWriter} instances used in the test
* @param originalWriterContent the original content provided to {@code fakeLoaderWriter}
* @param contentUpdates the {@code Set} provided to the {@link Ehcache#removeAll(java.util.Set)} call in the test
* @param expectedFailures the {@code Set} of failing keys expected for the test
* @param expectedSuccesses the {@code Set} of successful keys expected for the test
* @param bcweSuccesses the {@code Set} from {@link BulkCacheWritingException#getSuccesses()}
* @param bcweFailures the {@code Map} from {@link BulkCacheWritingException#getFailures()}
|
Set<String> function() { final Set<String> bulkComputeArgs = new LinkedHashSet<>(); for (final Set<String> set : this.bulkComputeSetCaptor.getAllValues()) { bulkComputeArgs.addAll(set); } return bulkComputeArgs; } private static final boolean debugResults; static { debugResults = Boolean.parseBoolean(System.getProperty(EhcacheBasicRemoveAllTest.class.getName() + STR, "false")); } @Rule public TestName name = new TestName(); /** * Writes a dump of test object details to {@code System.out} if, and only if, {@link #debugResults} is enabled. * * @param fakeStore the {@link org.ehcache.core.EhcacheBasicCrudBase.FakeStore FakeStore} instance used in the test * @param originalStoreContent the original content provided to {@code fakeStore} * @param fakeLoaderWriter the {@link org.ehcache.core.EhcacheBasicCrudBase.FakeCacheLoaderWriter FakeCacheLoaderWriter} instances used in the test * @param originalWriterContent the original content provided to {@code fakeLoaderWriter} * @param contentUpdates the {@code Set} provided to the {@link Ehcache#removeAll(java.util.Set)} call in the test * @param expectedFailures the {@code Set} of failing keys expected for the test * @param expectedSuccesses the {@code Set} of successful keys expected for the test * @param bcweSuccesses the {@code Set} from {@link BulkCacheWritingException#getSuccesses()} * @param bcweFailures the {@code Map} from {@link BulkCacheWritingException#getFailures()}
|
/**
* Collects all arguments captured by {@link #bulkComputeSetCaptor}.
*
* @return the argument values collected by {@link #bulkComputeSetCaptor}; the
* {@code Iterator} over the resulting {@code Set} returns the values
* in the order observed by the captor.
*/
|
Collects all arguments captured by <code>#bulkComputeSetCaptor</code>
|
getBulkComputeArgs
|
{
"repo_name": "jhouserizer/ehcache3",
"path": "ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicRemoveAllTest.java",
"license": "apache-2.0",
"size": 15085
}
|
[
"java.util.LinkedHashSet",
"java.util.Map",
"java.util.Set",
"org.ehcache.spi.loaderwriter.BulkCacheWritingException",
"org.junit.Rule",
"org.junit.rules.TestName"
] |
import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; import org.ehcache.spi.loaderwriter.BulkCacheWritingException; import org.junit.Rule; import org.junit.rules.TestName;
|
import java.util.*; import org.ehcache.spi.loaderwriter.*; import org.junit.*; import org.junit.rules.*;
|
[
"java.util",
"org.ehcache.spi",
"org.junit",
"org.junit.rules"
] |
java.util; org.ehcache.spi; org.junit; org.junit.rules;
| 2,873,527
|
private static void visitTargetsInAssign(EasyAstIteratorBase visitor, exprType[] targets) {
if (targets == null) {
return;
}
for (int i = 0; i < targets.length; i++) {
exprType t = targets[i];
if (t instanceof Tuple) {
Tuple tuple = (Tuple) t;
visitTargetsInAssign(visitor, tuple.elts);
}
visitTargetInAssign(visitor, t);
}
}
|
static void function(EasyAstIteratorBase visitor, exprType[] targets) { if (targets == null) { return; } for (int i = 0; i < targets.length; i++) { exprType t = targets[i]; if (t instanceof Tuple) { Tuple tuple = (Tuple) t; visitTargetsInAssign(visitor, tuple.elts); } visitTargetInAssign(visitor, t); } }
|
/**
* Given a visitor and the targets found in an assign, visit them to find class attributes / instance variables.
*
* @param visitor the visitor
* @param targets the expressions in the target
*/
|
Given a visitor and the targets found in an assign, visit them to find class attributes / instance variables
|
visitTargetsInAssign
|
{
"repo_name": "smkr/pyclipse",
"path": "plugins/org.python.pydev.parser/src/org/python/pydev/parser/visitors/scope/DefinitionsASTIteratorVisitor.java",
"license": "epl-1.0",
"size": 5165
}
|
[
"org.python.pydev.parser.jython.ast.Tuple"
] |
import org.python.pydev.parser.jython.ast.Tuple;
|
import org.python.pydev.parser.jython.ast.*;
|
[
"org.python.pydev"
] |
org.python.pydev;
| 1,533,689
|
public IPortalUrlBuilder getPortalUrlBuilderByLayoutNode(HttpServletRequest request, String layoutNodeId, UrlType urlType);
// Not implemented until all folders have fnames?
// public IPortalUrlBuilder getPortalUrlBuilderByLayoutFName(HttpServletRequest request, String folderFname, UrlType urlType);
|
IPortalUrlBuilder function(HttpServletRequest request, String layoutNodeId, UrlType urlType);
|
/**
* Get a portal URL builder that targets the specified layout node.
*
* @param request The current portal request
* @param layoutNodeId ID of the node in the user's layout that should be targeted by the URL.
* @param urlType The type of the portal url to create
* @return {@link IPortalUrlBuilder} targeting the specified node in the user's layout
* @throws IllegalArgumentException If the specified ID doesn't exist for a folder in the users layout.
*/
|
Get a portal URL builder that targets the specified layout node
|
getPortalUrlBuilderByLayoutNode
|
{
"repo_name": "MichaelVose2/uPortal",
"path": "uportal-war/src/main/java/org/apereo/portal/url/IPortalUrlProvider.java",
"license": "apache-2.0",
"size": 4173
}
|
[
"javax.servlet.http.HttpServletRequest"
] |
import javax.servlet.http.HttpServletRequest;
|
import javax.servlet.http.*;
|
[
"javax.servlet"
] |
javax.servlet;
| 1,772,852
|
protected IdentityAssoc toIdentityAssoc(Collection<DhcpLease> leases)
{
IdentityAssoc ia = null;
if ((leases != null) && !leases.isEmpty()) {
Iterator<DhcpLease> leaseIter = leases.iterator();
DhcpLease lease = leaseIter.next();
ia = new IdentityAssoc();
ia.setDuid(lease.getDuid());
ia.setIatype(lease.getIatype());
ia.setIaid(lease.getIaid());
ia.setState(lease.getState());
ia.setDhcpOptions(lease.getIaDhcpOptions());
List<IaAddress> iaAddrs = new ArrayList<IaAddress>();
iaAddrs.add(toIaAddress(lease));
while (leaseIter.hasNext()) {
//TODO: should confirm that the duid/iatype/iaid/state still match
lease = leaseIter.next();
iaAddrs.add(toIaAddress(lease));
}
ia.setIaAddresses(iaAddrs);
}
return ia;
}
|
IdentityAssoc function(Collection<DhcpLease> leases) { IdentityAssoc ia = null; if ((leases != null) && !leases.isEmpty()) { Iterator<DhcpLease> leaseIter = leases.iterator(); DhcpLease lease = leaseIter.next(); ia = new IdentityAssoc(); ia.setDuid(lease.getDuid()); ia.setIatype(lease.getIatype()); ia.setIaid(lease.getIaid()); ia.setState(lease.getState()); ia.setDhcpOptions(lease.getIaDhcpOptions()); List<IaAddress> iaAddrs = new ArrayList<IaAddress>(); iaAddrs.add(toIaAddress(lease)); while (leaseIter.hasNext()) { lease = leaseIter.next(); iaAddrs.add(toIaAddress(lease)); } ia.setIaAddresses(iaAddrs); } return ia; }
|
/**
* To identity assoc.
*
* @param leases the leases
* @return the identity assoc
*/
|
To identity assoc
|
toIdentityAssoc
|
{
"repo_name": "marosmars/dhcp",
"path": "Jagornet-DHCP/src/com/jagornet/dhcp/db/LeaseManager.java",
"license": "gpl-3.0",
"size": 13993
}
|
[
"java.util.ArrayList",
"java.util.Collection",
"java.util.Iterator",
"java.util.List"
] |
import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,472,911
|
private void setSlowThresholds(Configuration c) {
slowIndexPrepareThreshold = c.getLong(INDEXER_INDEX_WRITE_SLOW_THRESHOLD_KEY,
INDEXER_INDEX_WRITE_SLOW_THRESHOLD_DEFAULT);
slowIndexWriteThreshold = c.getLong(INDEXER_INDEX_PREPARE_SLOW_THRESHOLD_KEY,
INDEXER_INDEX_PREPARE_SLOW_THREHSOLD_DEFAULT);
slowPreWALRestoreThreshold = c.getLong(INDEXER_PRE_WAL_RESTORE_SLOW_THRESHOLD_KEY,
INDEXER_PRE_WAL_RESTORE_SLOW_THRESHOLD_DEFAULT);
slowPostOpenThreshold = c.getLong(INDEXER_POST_OPEN_SLOW_THRESHOLD_KEY,
INDEXER_POST_OPEN_SLOW_THRESHOLD_DEFAULT);
slowPreIncrementThreshold = c.getLong(INDEXER_PRE_INCREMENT_SLOW_THRESHOLD_KEY,
INDEXER_PRE_INCREMENT_SLOW_THRESHOLD_DEFAULT);
}
|
void function(Configuration c) { slowIndexPrepareThreshold = c.getLong(INDEXER_INDEX_WRITE_SLOW_THRESHOLD_KEY, INDEXER_INDEX_WRITE_SLOW_THRESHOLD_DEFAULT); slowIndexWriteThreshold = c.getLong(INDEXER_INDEX_PREPARE_SLOW_THRESHOLD_KEY, INDEXER_INDEX_PREPARE_SLOW_THREHSOLD_DEFAULT); slowPreWALRestoreThreshold = c.getLong(INDEXER_PRE_WAL_RESTORE_SLOW_THRESHOLD_KEY, INDEXER_PRE_WAL_RESTORE_SLOW_THRESHOLD_DEFAULT); slowPostOpenThreshold = c.getLong(INDEXER_POST_OPEN_SLOW_THRESHOLD_KEY, INDEXER_POST_OPEN_SLOW_THRESHOLD_DEFAULT); slowPreIncrementThreshold = c.getLong(INDEXER_PRE_INCREMENT_SLOW_THRESHOLD_KEY, INDEXER_PRE_INCREMENT_SLOW_THRESHOLD_DEFAULT); }
|
/**
* Extracts the slow call threshold values from the configuration.
*/
|
Extracts the slow call threshold values from the configuration
|
setSlowThresholds
|
{
"repo_name": "growingio/phoenix",
"path": "phoenix-core/src/main/java/org/apache/phoenix/hbase/index/Indexer.java",
"license": "apache-2.0",
"size": 35783
}
|
[
"org.apache.hadoop.conf.Configuration"
] |
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.*;
|
[
"org.apache.hadoop"
] |
org.apache.hadoop;
| 2,738,679
|
private static void unZip(String targetPath, ZipInputStream zipInputStream) throws IOException {
if (StringUtils.isNotEmpty(targetPath)) {
targetPath = targetPath + File.separator;
}
ZipEntry entry;
String entryName;
while ((entry = zipInputStream.getNextEntry()) != null) {
entryName = entry.getName();
if (entry.isDirectory()) {
entryName = entryName.substring(0, entryName.length() - 1);
FileUtils.createDir(targetPath + entryName);
} else {
File file = new File(targetPath + entryName);
FileUtils.createFile(file);
FileOutputStream fileOutputStream = null;
try {
fileOutputStream = new FileOutputStream(file);
byte[] bytes = new byte[BUFF_SIZE];
int len;
while ((len = zipInputStream.read(bytes)) != -1) {
fileOutputStream.write(bytes, 0, len);
}
fileOutputStream.flush();
} finally {
IOUtils.closeQuietly(fileOutputStream);
}
}
}
}
|
static void function(String targetPath, ZipInputStream zipInputStream) throws IOException { if (StringUtils.isNotEmpty(targetPath)) { targetPath = targetPath + File.separator; } ZipEntry entry; String entryName; while ((entry = zipInputStream.getNextEntry()) != null) { entryName = entry.getName(); if (entry.isDirectory()) { entryName = entryName.substring(0, entryName.length() - 1); FileUtils.createDir(targetPath + entryName); } else { File file = new File(targetPath + entryName); FileUtils.createFile(file); FileOutputStream fileOutputStream = null; try { fileOutputStream = new FileOutputStream(file); byte[] bytes = new byte[BUFF_SIZE]; int len; while ((len = zipInputStream.read(bytes)) != -1) { fileOutputStream.write(bytes, 0, len); } fileOutputStream.flush(); } finally { IOUtils.closeQuietly(fileOutputStream); } } } }
|
/**
* Real decompression.
*/
|
Real decompression
|
unZip
|
{
"repo_name": "motcwang/MCommon",
"path": "mcommon/src/main/java/im/wangchao/mcommon/utils/ZipUtils.java",
"license": "gpl-2.0",
"size": 6560
}
|
[
"java.io.File",
"java.io.FileOutputStream",
"java.io.IOException",
"java.util.zip.ZipEntry",
"java.util.zip.ZipInputStream"
] |
import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream;
|
import java.io.*; import java.util.zip.*;
|
[
"java.io",
"java.util"
] |
java.io; java.util;
| 1,147,798
|
@Nullable
private synchronized PresenterScope getScope(final Object host) {
if (host instanceof Activity) {
final ActivityInstanceObserver detector = mActivityInstanceObserver;
if (detector == null) {
return null;
}
final Activity activity = (Activity) host;
final String scopeId = detector.getActivityId(activity);
if (scopeId == null) {
return null;
}
return mScopes.get(scopeId);
} else {
// currently only Activity is supported as host
throw new IllegalHostException(host);
}
}
|
synchronized PresenterScope function(final Object host) { if (host instanceof Activity) { final ActivityInstanceObserver detector = mActivityInstanceObserver; if (detector == null) { return null; } final Activity activity = (Activity) host; final String scopeId = detector.getActivityId(activity); if (scopeId == null) { return null; } return mScopes.get(scopeId); } else { throw new IllegalHostException(host); } }
|
/**
* retrieves an existing scope for a {@link Activity} but doesn't create on when the scope
* doesn't exist
*/
|
retrieves an existing scope for a <code>Activity</code> but doesn't create on when the scope doesn't exist
|
getScope
|
{
"repo_name": "weiwenqiang/GitHub",
"path": "MVP/RxJava2ToMVP/ThirtyInch-master/thirtyinch/src/main/java/net/grandcentrix/thirtyinch/internal/PresenterSavior.java",
"license": "apache-2.0",
"size": 10142
}
|
[
"android.app.Activity"
] |
import android.app.Activity;
|
import android.app.*;
|
[
"android.app"
] |
android.app;
| 331,743
|
public void setLoadingImage(Bitmap bitmap) {
mLoadingBitmap = bitmap;
}
|
void function(Bitmap bitmap) { mLoadingBitmap = bitmap; }
|
/**
* Set placeholder bitmap that shows when the the background thread is
* running.
*/
|
Set placeholder bitmap that shows when the the background thread is running
|
setLoadingImage
|
{
"repo_name": "vimalrajpara2006/Google-IO-app-2012",
"path": "android/src/com/google/android/apps/iosched/util/ImageWorker.java",
"license": "apache-2.0",
"size": 15000
}
|
[
"android.graphics.Bitmap"
] |
import android.graphics.Bitmap;
|
import android.graphics.*;
|
[
"android.graphics"
] |
android.graphics;
| 884,983
|
public Value callMethodRef(Env env,
QuercusClass qClass,
Value qThis,
Value a1)
{
return callMethodRef(env, qClass, qThis,
new Value[] { a1 });
}
|
Value function(Env env, QuercusClass qClass, Value qThis, Value a1) { return callMethodRef(env, qClass, qThis, new Value[] { a1 }); }
|
/**
* Evaluates the function as a method call.
*/
|
Evaluates the function as a method call
|
callMethodRef
|
{
"repo_name": "christianchristensen/resin",
"path": "modules/quercus/src/com/caucho/quercus/function/AbstractFunction.java",
"license": "gpl-2.0",
"size": 16953
}
|
[
"com.caucho.quercus.env.Env",
"com.caucho.quercus.env.QuercusClass",
"com.caucho.quercus.env.Value"
] |
import com.caucho.quercus.env.Env; import com.caucho.quercus.env.QuercusClass; import com.caucho.quercus.env.Value;
|
import com.caucho.quercus.env.*;
|
[
"com.caucho.quercus"
] |
com.caucho.quercus;
| 2,129,460
|
public Vector getParameters()
{
return this.parameters;
}
|
Vector function() { return this.parameters; }
|
/**
* Getter for parameters
* @return
* Parameters of the multinomial distribution, must be at least
* 2-dimensional and each element must be nonnegative.
*/
|
Getter for parameters
|
getParameters
|
{
"repo_name": "codeaudit/Foundry",
"path": "Components/LearningCore/Source/gov/sandia/cognition/statistics/distribution/CategoricalDistribution.java",
"license": "bsd-3-clause",
"size": 9597
}
|
[
"gov.sandia.cognition.math.matrix.Vector"
] |
import gov.sandia.cognition.math.matrix.Vector;
|
import gov.sandia.cognition.math.matrix.*;
|
[
"gov.sandia.cognition"
] |
gov.sandia.cognition;
| 2,070,555
|
public CharSequence getTextBeforeCursor(int length, int flags) {
final Editable content = getEditable();
if (content == null) return null;
int a = Selection.getSelectionStart(content);
int b = Selection.getSelectionEnd(content);
if (a > b) {
int tmp = a;
a = b;
b = tmp;
}
if (a <= 0) {
return "";
}
if (length > a) {
length = a;
}
if ((flags&GET_TEXT_WITH_STYLES) != 0) {
return content.subSequence(a - length, a);
}
return TextUtils.substring(content, a - length, a);
}
|
CharSequence function(int length, int flags) { final Editable content = getEditable(); if (content == null) return null; int a = Selection.getSelectionStart(content); int b = Selection.getSelectionEnd(content); if (a > b) { int tmp = a; a = b; b = tmp; } if (a <= 0) { return ""; } if (length > a) { length = a; } if ((flags&GET_TEXT_WITH_STYLES) != 0) { return content.subSequence(a - length, a); } return TextUtils.substring(content, a - length, a); }
|
/**
* The default implementation returns the given amount of text from the
* current cursor position in the buffer.
*/
|
The default implementation returns the given amount of text from the current cursor position in the buffer
|
getTextBeforeCursor
|
{
"repo_name": "syslover33/ctank",
"path": "java/android-sdk-linux_r24.4.1_src/sources/android-23/android/view/inputmethod/BaseInputConnection.java",
"license": "gpl-3.0",
"size": 22688
}
|
[
"android.text.Editable",
"android.text.Selection",
"android.text.TextUtils"
] |
import android.text.Editable; import android.text.Selection; import android.text.TextUtils;
|
import android.text.*;
|
[
"android.text"
] |
android.text;
| 2,133,254
|
public void setWidgetMinSize(Widget child,
int minSize) {
assertIsAChild( child );
Splitter splitter = getAssociatedSplitter( child );
// The splitter is null for the center element.
if ( splitter != null ) {
splitter.setMinSize( minSize );
} else {
minCenterSize = minSize;
}
}
|
void function(Widget child, int minSize) { assertIsAChild( child ); Splitter splitter = getAssociatedSplitter( child ); if ( splitter != null ) { splitter.setMinSize( minSize ); } else { minCenterSize = minSize; } }
|
/**
* Sets the minimum allowable size for the given widget.
* <p>
* Its associated splitter cannot be dragged to a position that would make
* it smaller than this size. This method has no effect for the
* {@link DockLayoutPanel.Direction#CENTER} widget.
* </p>
*
* @param child
* the child whose minimum size will be set
* @param minSize
* the minimum size for this widget
*/
|
Sets the minimum allowable size for the given widget. Its associated splitter cannot be dragged to a position that would make it smaller than this size. This method has no effect for the <code>DockLayoutPanel.Direction#CENTER</code> widget.
|
setWidgetMinSize
|
{
"repo_name": "Rikkola/uberfire",
"path": "uberfire-workbench/uberfire-workbench-client/src/main/java/org/uberfire/client/workbench/widgets/split/WorkbenchSplitLayoutPanel.java",
"license": "apache-2.0",
"size": 17236
}
|
[
"com.google.gwt.user.client.ui.Widget"
] |
import com.google.gwt.user.client.ui.Widget;
|
import com.google.gwt.user.client.ui.*;
|
[
"com.google.gwt"
] |
com.google.gwt;
| 2,116,204
|
private String getBreakpointDescription(final int row) {
final Pair<IDebugger, Integer> breakpoint =
CBreakpointTableHelpers.findBreakpoint(m_debuggerProvider, row);
final BreakpointManager manager = breakpoint.first().getBreakpointManager();
final int breakpointIndex = breakpoint.second();
return manager.getBreakpoint(BreakpointType.REGULAR, breakpointIndex).getDescription();
}
|
String function(final int row) { final Pair<IDebugger, Integer> breakpoint = CBreakpointTableHelpers.findBreakpoint(m_debuggerProvider, row); final BreakpointManager manager = breakpoint.first().getBreakpointManager(); final int breakpointIndex = breakpoint.second(); return manager.getBreakpoint(BreakpointType.REGULAR, breakpointIndex).getDescription(); }
|
/**
* Determines the description of the breakpoint shown in a given row.
*
* @param row The row where the breakpoint is shown.
*
* @return The description of the breakpoint shown in the given row.
*/
|
Determines the description of the breakpoint shown in a given row
|
getBreakpointDescription
|
{
"repo_name": "AmesianX/binnavi",
"path": "src/main/java/com/google/security/zynamics/binnavi/Gui/Debug/BreakpointTable/CBreakpointTableModel.java",
"license": "apache-2.0",
"size": 10847
}
|
[
"com.google.security.zynamics.binnavi.debug.debugger.interfaces.IDebugger",
"com.google.security.zynamics.binnavi.debug.models.breakpoints.BreakpointManager",
"com.google.security.zynamics.binnavi.debug.models.breakpoints.enums.BreakpointType",
"com.google.security.zynamics.zylib.general.Pair"
] |
import com.google.security.zynamics.binnavi.debug.debugger.interfaces.IDebugger; import com.google.security.zynamics.binnavi.debug.models.breakpoints.BreakpointManager; import com.google.security.zynamics.binnavi.debug.models.breakpoints.enums.BreakpointType; import com.google.security.zynamics.zylib.general.Pair;
|
import com.google.security.zynamics.binnavi.debug.debugger.interfaces.*; import com.google.security.zynamics.binnavi.debug.models.breakpoints.*; import com.google.security.zynamics.binnavi.debug.models.breakpoints.enums.*; import com.google.security.zynamics.zylib.general.*;
|
[
"com.google.security"
] |
com.google.security;
| 209,983
|
public void put(char charValue) {
preparePut();
ensureFit(3);
_bytes[_size++] = (byte) TYPE_CHAR;
Util.putChar(_bytes, _size, charValue);
_size += 2;
_serializedItemCount++;
}
|
void function(char charValue) { preparePut(); ensureFit(3); _bytes[_size++] = (byte) TYPE_CHAR; Util.putChar(_bytes, _size, charValue); _size += 2; _serializedItemCount++; }
|
/**
* Replaces the current state with the supplied <code>char</code> value (or
* in <i><a href="#_streamMode">stream mode</a></i>, appends a new field
* containing this value to the state).
*
* @param charValue
* The new value
*/
|
Replaces the current state with the supplied <code>char</code> value (or in stream mode, appends a new field containing this value to the state)
|
put
|
{
"repo_name": "jaytaylor/persistit",
"path": "src/main/java/com/persistit/Value.java",
"license": "epl-1.0",
"size": 180447
}
|
[
"com.persistit.util.Util"
] |
import com.persistit.util.Util;
|
import com.persistit.util.*;
|
[
"com.persistit.util"
] |
com.persistit.util;
| 644,240
|
public Builder setCredentialDataStoreFactory(AbstractDataStoreFactory credentialDataStoreFactory) {
this.credentialDataStoreFactory = credentialDataStoreFactory;
return this;
}
|
Builder function(AbstractDataStoreFactory credentialDataStoreFactory) { this.credentialDataStoreFactory = credentialDataStoreFactory; return this; }
|
/**
* Sets the Credential DataStore factory used for storing and loading Credentials per user.
* Optional and defaults to an {@link MemoryDataStoreFactory}.
*/
|
Sets the Credential DataStore factory used for storing and loading Credentials per user. Optional and defaults to an <code>MemoryDataStoreFactory</code>
|
setCredentialDataStoreFactory
|
{
"repo_name": "uber/rides-java-sdk",
"path": "uber-core-oauth-client-adapter/src/main/java/com/uber/sdk/core/auth/OAuth2Credentials.java",
"license": "mit",
"size": 11477
}
|
[
"com.google.api.client.util.store.AbstractDataStoreFactory"
] |
import com.google.api.client.util.store.AbstractDataStoreFactory;
|
import com.google.api.client.util.store.*;
|
[
"com.google.api"
] |
com.google.api;
| 921,349
|
public static void assertEquals(BigDecimal expected, BigDecimal actual) {
assertEquals(null, expected, actual);
}
|
static void function(BigDecimal expected, BigDecimal actual) { assertEquals(null, expected, actual); }
|
/**
* Asserts that 2 given BigDecimal numbers are equivalent to 2 decimal places.
*
* @param expected expected BigDecimal value
* @param actual actual BigDecimal value
*/
|
Asserts that 2 given BigDecimal numbers are equivalent to 2 decimal places
|
assertEquals
|
{
"repo_name": "WebDataConsulting/billing",
"path": "test/unit/com/sapienter/jbilling/server/BigDecimalTestCase.java",
"license": "agpl-3.0",
"size": 2459
}
|
[
"java.math.BigDecimal"
] |
import java.math.BigDecimal;
|
import java.math.*;
|
[
"java.math"
] |
java.math;
| 2,207,826
|
public TreePath getPathForRow(int row) {
return renderer.getPathForRow(row);
}
|
TreePath function(int row) { return renderer.getPathForRow(row); }
|
/**
* Returns the TreePath for a given row.
*
* @param row
*
* @return the <code>TreePath</code> for the given row.
*/
|
Returns the TreePath for a given row
|
getPathForRow
|
{
"repo_name": "syncer/swingx",
"path": "swingx-core/src/main/java/org/jdesktop/swingx/JXTreeTable.java",
"license": "lgpl-2.1",
"size": 132592
}
|
[
"javax.swing.tree.TreePath"
] |
import javax.swing.tree.TreePath;
|
import javax.swing.tree.*;
|
[
"javax.swing"
] |
javax.swing;
| 2,617,231
|
public void setEmptyView(View emptyView) {
mEmptyView = emptyView;
}
|
void function(View emptyView) { mEmptyView = emptyView; }
|
/**
* Designate a view as the empty view. When the backing adapter has no
* data this view will be made visible and the recycler view hidden.
*
*/
|
Designate a view as the empty view. When the backing adapter has no data this view will be made visible and the recycler view hidden
|
setEmptyView
|
{
"repo_name": "floring/SwissManager",
"path": "app/src/main/java/com/arles/swissmanager/utils/ExtendedRecyclerView.java",
"license": "apache-2.0",
"size": 2546
}
|
[
"android.view.View"
] |
import android.view.View;
|
import android.view.*;
|
[
"android.view"
] |
android.view;
| 2,400,767
|
private void refreshWallpapersList() {
//converte para vetor
Vector v = new Vector();
for (int i = 0; i < wallpapers.size(); i++) {
String path = wallpapers.get(i);
v.add(path);
}
//atualiza lista
wallpapersList.setListData(v);
}//fim do método refreshWallpapersList
|
void function() { Vector v = new Vector(); for (int i = 0; i < wallpapers.size(); i++) { String path = wallpapers.get(i); v.add(path); } wallpapersList.setListData(v); }
|
/**
* Atualiza lista de wallpapers
*/
|
Atualiza lista de wallpapers
|
refreshWallpapersList
|
{
"repo_name": "jmayer13/LinuxMintCriadorSlidesWallpapers",
"path": "src/minwallpaper/MainFrame.java",
"license": "gpl-2.0",
"size": 24494
}
|
[
"java.util.Vector"
] |
import java.util.Vector;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,383,015
|
@Override
public Map<MetricName, ? extends Metric> metrics() {
return Collections.unmodifiableMap(this.metrics.metrics());
}
|
Map<MetricName, ? extends Metric> function() { return Collections.unmodifiableMap(this.metrics.metrics()); }
|
/**
* Get the metrics kept by the consumer
*/
|
Get the metrics kept by the consumer
|
metrics
|
{
"repo_name": "samaitra/kafka",
"path": "clients/src/main/java/org/apache/kafka/clients/consumer/KafkaConsumer.java",
"license": "apache-2.0",
"size": 69133
}
|
[
"java.util.Collections",
"java.util.Map",
"org.apache.kafka.common.Metric",
"org.apache.kafka.common.MetricName"
] |
import java.util.Collections; import java.util.Map; import org.apache.kafka.common.Metric; import org.apache.kafka.common.MetricName;
|
import java.util.*; import org.apache.kafka.common.*;
|
[
"java.util",
"org.apache.kafka"
] |
java.util; org.apache.kafka;
| 2,426,840
|
@Override
public final void sessionOpened(IoSession session) throws Exception {
ProxyIoSession proxyIoSession = (ProxyIoSession) session.getAttribute(ProxyIoSession.PROXY_SESSION);
if (proxyIoSession.getRequest() instanceof SocksProxyRequest || proxyIoSession.isAuthenticationFailed()
|| proxyIoSession.getHandler().isHandshakeComplete()) {
proxySessionOpened(session);
} else {
logger.debug("Filtered session opened event !");
}
}
|
final void function(IoSession session) throws Exception { ProxyIoSession proxyIoSession = (ProxyIoSession) session.getAttribute(ProxyIoSession.PROXY_SESSION); if (proxyIoSession.getRequest() instanceof SocksProxyRequest proxyIoSession.isAuthenticationFailed() proxyIoSession.getHandler().isHandshakeComplete()) { proxySessionOpened(session); } else { logger.debug(STR); } }
|
/**
* Hooked session opened event.
*
* @param session the io session
*/
|
Hooked session opened event
|
sessionOpened
|
{
"repo_name": "DL7AD/SSR-Receiver",
"path": "src/org/apache/mina/proxy/AbstractProxyIoHandler.java",
"license": "gpl-3.0",
"size": 2369
}
|
[
"org.apache.mina.core.session.IoSession",
"org.apache.mina.proxy.handlers.socks.SocksProxyRequest",
"org.apache.mina.proxy.session.ProxyIoSession"
] |
import org.apache.mina.core.session.IoSession; import org.apache.mina.proxy.handlers.socks.SocksProxyRequest; import org.apache.mina.proxy.session.ProxyIoSession;
|
import org.apache.mina.core.session.*; import org.apache.mina.proxy.handlers.socks.*; import org.apache.mina.proxy.session.*;
|
[
"org.apache.mina"
] |
org.apache.mina;
| 2,175,086
|
@Test
public void testMoreDataRows() throws Exception {
upsertRow(dataTableUpsertStmt, 1, "name-1", 95123);
conn.commit();
disableIndex();
// these rows won't have a corresponding index row
upsertRow(dataTableUpsertStmt, 2, "name-2", 95124);
upsertRow(dataTableUpsertStmt, 3, "name-3", 95125);
conn.commit();
List<Job> completedJobs = runScrutiny(schemaName, dataTableName, indexTableName);
Job job = completedJobs.get(0);
assertTrue(job.isSuccessful());
Counters counters = job.getCounters();
assertEquals(1, getCounterValue(counters, VALID_ROW_COUNT));
assertEquals(2, getCounterValue(counters, INVALID_ROW_COUNT));
}
|
void function() throws Exception { upsertRow(dataTableUpsertStmt, 1, STR, 95123); conn.commit(); disableIndex(); upsertRow(dataTableUpsertStmt, 2, STR, 95124); upsertRow(dataTableUpsertStmt, 3, STR, 95125); conn.commit(); List<Job> completedJobs = runScrutiny(schemaName, dataTableName, indexTableName); Job job = completedJobs.get(0); assertTrue(job.isSuccessful()); Counters counters = job.getCounters(); assertEquals(1, getCounterValue(counters, VALID_ROW_COUNT)); assertEquals(2, getCounterValue(counters, INVALID_ROW_COUNT)); }
|
/**
* Tests when there are more data table rows than index table rows Scrutiny should report the
* number of incorrect rows
*/
|
Tests when there are more data table rows than index table rows Scrutiny should report the number of incorrect rows
|
testMoreDataRows
|
{
"repo_name": "ohadshacham/phoenix",
"path": "phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java",
"license": "apache-2.0",
"size": 32489
}
|
[
"java.util.List",
"org.apache.hadoop.mapreduce.Counters",
"org.apache.hadoop.mapreduce.Job",
"org.junit.Assert"
] |
import java.util.List; import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.Job; import org.junit.Assert;
|
import java.util.*; import org.apache.hadoop.mapreduce.*; import org.junit.*;
|
[
"java.util",
"org.apache.hadoop",
"org.junit"
] |
java.util; org.apache.hadoop; org.junit;
| 2,618,425
|
public void setConfigureResult(int resultCode) {
final Intent data = new Intent();
data.putExtra(AppWidgetManager.EXTRA_APPWIDGET_ID, mAppWidgetId);
setResult(resultCode, data);
}
|
void function(int resultCode) { final Intent data = new Intent(); data.putExtra(AppWidgetManager.EXTRA_APPWIDGET_ID, mAppWidgetId); setResult(resultCode, data); }
|
/**
* Convenience method to always include {@link #mAppWidgetId} when setting
* the result {@link Intent}.
*/
|
Convenience method to always include <code>#mAppWidgetId</code> when setting the result <code>Intent</code>
|
setConfigureResult
|
{
"repo_name": "chrispbailey/ToDo-List-Widget",
"path": "src/org/chrisbailey/todo/activities/ToDoActivity.java",
"license": "gpl-3.0",
"size": 14186
}
|
[
"android.appwidget.AppWidgetManager",
"android.content.Intent"
] |
import android.appwidget.AppWidgetManager; import android.content.Intent;
|
import android.appwidget.*; import android.content.*;
|
[
"android.appwidget",
"android.content"
] |
android.appwidget; android.content;
| 525,230
|
public void addDragView(View child, View dragHandle, int index){
addView(child, index);
// update drag-able children mappings
final int numMappings = draggableChildren.size();
for(int i = numMappings - 1; i >= 0; i--){
final int key = draggableChildren.keyAt(i);
if(key >= index){
draggableChildren.put(key + 1, draggableChildren.get(key));
}
}
setViewDraggable(child, dragHandle);
}
|
void function(View child, View dragHandle, int index){ addView(child, index); final int numMappings = draggableChildren.size(); for(int i = numMappings - 1; i >= 0; i--){ final int key = draggableChildren.keyAt(i); if(key >= index){ draggableChildren.put(key + 1, draggableChildren.get(key)); } } setViewDraggable(child, dragHandle); }
|
/**
* Calls {@link #addView(android.view.View, int)} followed by
* {@link #setViewDraggable(android.view.View, android.view.View)} and correctly updates the
* drag-ability state of all existing views.
*/
|
Calls <code>#addView(android.view.View, int)</code> followed by <code>#setViewDraggable(android.view.View, android.view.View)</code> and correctly updates the drag-ability state of all existing views
|
addDragView
|
{
"repo_name": "jhouseke/one-message-away",
"path": "app/src/main/java/com/jmedeisis/draglinearlayout/DragLinearLayout.java",
"license": "apache-2.0",
"size": 27746
}
|
[
"android.view.View"
] |
import android.view.View;
|
import android.view.*;
|
[
"android.view"
] |
android.view;
| 2,057,506
|
public List<Criteria> getOredCriteria() {
return oredCriteria;
}
|
List<Criteria> function() { return oredCriteria; }
|
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table pray
*
* @mbggenerated Fri Jul 08 18:41:36 KST 2016
*/
|
This method was generated by MyBatis Generator. This method corresponds to the database table pray
|
getOredCriteria
|
{
"repo_name": "gusfot/pray-together",
"path": "prayer/src/main/java/com/gusfot/pray/model/PrayExample.java",
"license": "mit",
"size": 26501
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 484,477
|
public void setAllow(Set<HttpMethod> allowedMethods) {
set(ALLOW, StringUtils.collectionToCommaDelimitedString(allowedMethods));
}
|
void function(Set<HttpMethod> allowedMethods) { set(ALLOW, StringUtils.collectionToCommaDelimitedString(allowedMethods)); }
|
/**
* Set the set of allowed {@link HttpMethod HTTP methods},
* as specified by the {@code Allow} header.
*/
|
Set the set of allowed <code>HttpMethod HTTP methods</code>, as specified by the Allow header
|
setAllow
|
{
"repo_name": "leogoing/spring_jeesite",
"path": "spring-web-4.0/org/springframework/http/HttpHeaders.java",
"license": "apache-2.0",
"size": 20561
}
|
[
"java.util.Set",
"org.springframework.util.StringUtils"
] |
import java.util.Set; import org.springframework.util.StringUtils;
|
import java.util.*; import org.springframework.util.*;
|
[
"java.util",
"org.springframework.util"
] |
java.util; org.springframework.util;
| 1,167,925
|
@Test
public void testGenerateVpnProfile_v3_obfs4IPv4AndIPv6_skipIPv6() throws Exception {
gateway = new JSONObject(TestSetupHelper.getInputAsString(getClass().getClassLoader().getResourceAsStream("ptdemo_misconfigured_ipv4ipv6.json"))).getJSONArray("gateways").getJSONObject(0);
generalConfig = new JSONObject(TestSetupHelper.getInputAsString(getClass().getClassLoader().getResourceAsStream("ptdemo_misconfigured_ipv4ipv6.json"))).getJSONObject(OPENVPN_CONFIGURATION);
vpnConfigGenerator = new VpnConfigGenerator(generalConfig, secrets, gateway, 3, false);
HashMap<Connection.TransportType, VpnProfile> vpnProfiles = vpnConfigGenerator.generateVpnProfiles();
assertTrue(vpnProfiles.containsKey(OBFS4));
assertTrue(vpnProfiles.containsKey(OPENVPN));
assertEquals(1, vpnProfiles.get(OBFS4).mConnections.length);
assertEquals("37.218.247.60/32", vpnProfiles.get(OBFS4).mExcludedRoutes.trim());
}
|
void function() throws Exception { gateway = new JSONObject(TestSetupHelper.getInputAsString(getClass().getClassLoader().getResourceAsStream(STR))).getJSONArray(STR).getJSONObject(0); generalConfig = new JSONObject(TestSetupHelper.getInputAsString(getClass().getClassLoader().getResourceAsStream(STR))).getJSONObject(OPENVPN_CONFIGURATION); vpnConfigGenerator = new VpnConfigGenerator(generalConfig, secrets, gateway, 3, false); HashMap<Connection.TransportType, VpnProfile> vpnProfiles = vpnConfigGenerator.generateVpnProfiles(); assertTrue(vpnProfiles.containsKey(OBFS4)); assertTrue(vpnProfiles.containsKey(OPENVPN)); assertEquals(1, vpnProfiles.get(OBFS4).mConnections.length); assertEquals(STR, vpnProfiles.get(OBFS4).mExcludedRoutes.trim()); }
|
/**
* obfs4 cannot be used with ipv6 addresses currently
*/
|
obfs4 cannot be used with ipv6 addresses currently
|
testGenerateVpnProfile_v3_obfs4IPv4AndIPv6_skipIPv6
|
{
"repo_name": "leapcode/bitmask_android",
"path": "app/src/test/java/se/leap/bitmaskclient/eip/VpnConfigGeneratorTest.java",
"license": "gpl-3.0",
"size": 92481
}
|
[
"de.blinkt.openvpn.VpnProfile",
"de.blinkt.openvpn.core.connection.Connection",
"java.util.HashMap",
"junit.framework.Assert",
"org.json.JSONObject",
"se.leap.bitmaskclient.testutils.TestSetupHelper"
] |
import de.blinkt.openvpn.VpnProfile; import de.blinkt.openvpn.core.connection.Connection; import java.util.HashMap; import junit.framework.Assert; import org.json.JSONObject; import se.leap.bitmaskclient.testutils.TestSetupHelper;
|
import de.blinkt.openvpn.*; import de.blinkt.openvpn.core.connection.*; import java.util.*; import junit.framework.*; import org.json.*; import se.leap.bitmaskclient.testutils.*;
|
[
"de.blinkt.openvpn",
"java.util",
"junit.framework",
"org.json",
"se.leap.bitmaskclient"
] |
de.blinkt.openvpn; java.util; junit.framework; org.json; se.leap.bitmaskclient;
| 101,982
|
protected byte[] compress(byte[] in) {
if(in == null) {
throw new NullPointerException("Can't compress null");
}
ByteArrayOutputStream bos=new ByteArrayOutputStream();
GZIPOutputStream gz=null;
try {
gz = new GZIPOutputStream(bos);
gz.write(in);
} catch (IOException e) {
throw new RuntimeException("IO exception compressing data", e);
} finally {
CloseUtil.close(gz);
CloseUtil.close(bos);
}
byte[] rv=bos.toByteArray();
getLogger().debug("Compressed %d bytes to %d", in.length, rv.length);
return rv;
}
|
byte[] function(byte[] in) { if(in == null) { throw new NullPointerException(STR); } ByteArrayOutputStream bos=new ByteArrayOutputStream(); GZIPOutputStream gz=null; try { gz = new GZIPOutputStream(bos); gz.write(in); } catch (IOException e) { throw new RuntimeException(STR, e); } finally { CloseUtil.close(gz); CloseUtil.close(bos); } byte[] rv=bos.toByteArray(); getLogger().debug(STR, in.length, rv.length); return rv; }
|
/**
* Compress the given array of bytes.
*/
|
Compress the given array of bytes
|
compress
|
{
"repo_name": "qeist/arcus-java-client",
"path": "src/main/java/net/spy/memcached/transcoders/BaseSerializingTranscoder.java",
"license": "apache-2.0",
"size": 5411
}
|
[
"java.io.ByteArrayOutputStream",
"java.io.IOException",
"java.util.zip.GZIPOutputStream",
"net.spy.memcached.compat.CloseUtil"
] |
import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.zip.GZIPOutputStream; import net.spy.memcached.compat.CloseUtil;
|
import java.io.*; import java.util.zip.*; import net.spy.memcached.compat.*;
|
[
"java.io",
"java.util",
"net.spy.memcached"
] |
java.io; java.util; net.spy.memcached;
| 2,119,470
|
@Override
public ProcessGroupStatus getControllerStatus() {
return getGroupStatus(getRootGroupId());
}
|
ProcessGroupStatus function() { return getGroupStatus(getRootGroupId()); }
|
/**
* Returns the status of all components in the controller. This request is
* not in the context of a user so the results will be unfiltered.
*
* @return the component status
*/
|
Returns the status of all components in the controller. This request is not in the context of a user so the results will be unfiltered
|
getControllerStatus
|
{
"repo_name": "PuspenduBanerjee/nifi",
"path": "nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowController.java",
"license": "apache-2.0",
"size": 204852
}
|
[
"org.apache.nifi.controller.status.ProcessGroupStatus"
] |
import org.apache.nifi.controller.status.ProcessGroupStatus;
|
import org.apache.nifi.controller.status.*;
|
[
"org.apache.nifi"
] |
org.apache.nifi;
| 1,179,676
|
public DataNode setRotation_angle(IDataset rotation_angle);
|
DataNode function(IDataset rotation_angle);
|
/**
* Optional rotation angle for the case when the powder diagram has
* been obtained through an omega-2theta scan like from a traditional
* single detector powder diffractometer
* <p>
* <b>Type:</b> NX_FLOAT
* <b>Units:</b> NX_ANGLE
* </p>
*
* @param rotation_angle the rotation_angle
*/
|
Optional rotation angle for the case when the powder diagram has been obtained through an omega-2theta scan like from a traditional single detector powder diffractometer Type: NX_FLOAT Units: NX_ANGLE
|
setRotation_angle
|
{
"repo_name": "xen-0/dawnsci",
"path": "org.eclipse.dawnsci.nexus/autogen/org/eclipse/dawnsci/nexus/NXsample.java",
"license": "epl-1.0",
"size": 49075
}
|
[
"org.eclipse.dawnsci.analysis.api.tree.DataNode",
"org.eclipse.january.dataset.IDataset"
] |
import org.eclipse.dawnsci.analysis.api.tree.DataNode; import org.eclipse.january.dataset.IDataset;
|
import org.eclipse.dawnsci.analysis.api.tree.*; import org.eclipse.january.dataset.*;
|
[
"org.eclipse.dawnsci",
"org.eclipse.january"
] |
org.eclipse.dawnsci; org.eclipse.january;
| 1,097,888
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.