method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
list | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
list | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
|---|---|---|---|---|---|---|---|---|---|---|---|
public void test_GET_accessPath_delete_c() throws Exception {
if(TestMode.quads != getTestMode())
return;
doInsertbyURL("POST", packagePath
+ "test_delete_by_access_path.trig");
final long result = countResults(doGetWithAccessPath(//
// requestPath,//
null,// s
null,// p
null,// o
new URIImpl("http://www.bigdata.com/") // c
));
assertEquals(3, result);
}
|
void function() throws Exception { if(TestMode.quads != getTestMode()) return; doInsertbyURL("POST", packagePath + STR); final long result = countResults(doGetWithAccessPath( null, null, null, new URIImpl("http: )); assertEquals(3, result); }
|
/**
* Get everything in a named graph (context).
*/
|
Get everything in a named graph (context)
|
test_GET_accessPath_delete_c
|
{
"repo_name": "smalyshev/blazegraph",
"path": "bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestBigdataSailRemoteRepository.java",
"license": "gpl-2.0",
"size": 24471
}
|
[
"org.openrdf.model.impl.URIImpl"
] |
import org.openrdf.model.impl.URIImpl;
|
import org.openrdf.model.impl.*;
|
[
"org.openrdf.model"
] |
org.openrdf.model;
| 856,658
|
private void setRotation()
{
Quaternion rotRef = new Quaternion();
float rotation=0;
//iterate over the range, go over by one step to accommodate mathematical error
for(float timeElp=0;timeElp<(this.time+(this.time/this.timeDivision));timeElp+=(this.time/this.timeDivision))
{
rotation = (timeElp/this.time)*360;
if(rotation>360)
rotation=360;//lock to 360
rotRef.fromAngleAxis((float)(Math.PI/180)*rotation,new Vector3f(1,1,1));
this.spt.setRotation(0,timeElp,rotRef);
}
}
|
void function() { Quaternion rotRef = new Quaternion(); float rotation=0; for(float timeElp=0;timeElp<(this.time+(this.time/this.timeDivision));timeElp+=(this.time/this.timeDivision)) { rotation = (timeElp/this.time)*360; if(rotation>360) rotation=360; rotRef.fromAngleAxis((float)(Math.PI/180)*rotation,new Vector3f(1,1,1)); this.spt.setRotation(0,timeElp,rotRef); } }
|
/**
* Sets the test rotation of 0 to 360 degrees, all axis.
*/
|
Sets the test rotation of 0 to 360 degrees, all axis
|
setRotation
|
{
"repo_name": "tectronics/xenogeddon",
"path": "src/jmetest/renderer/TestSpatialTransform.java",
"license": "gpl-2.0",
"size": 8831
}
|
[
"com.jme.math.Quaternion",
"com.jme.math.Vector3f"
] |
import com.jme.math.Quaternion; import com.jme.math.Vector3f;
|
import com.jme.math.*;
|
[
"com.jme.math"
] |
com.jme.math;
| 2,627,074
|
Set<String> getTypeBySuffix(String suffix);
|
Set<String> getTypeBySuffix(String suffix);
|
/**
* Get the set of types relevant to a word-form suffix
*
* @param suffix
* the suffix of the wf
* @return the set of types relevant to the word-form with this suffix
*/
|
Get the set of types relevant to a word-form suffix
|
getTypeBySuffix
|
{
"repo_name": "SONIAGroup/S.O.N.I.A.",
"path": "GATE_Developer_8.0/plugins/Lang_Russian/src/com/ontotext/russie/morph/Lemma.java",
"license": "gpl-2.0",
"size": 3458
}
|
[
"java.util.Set"
] |
import java.util.Set;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 36,244
|
protected int getCurrentProcessID() throws ProcessHandlerException {
int pid;
// Not ideal but using JNA failed on RHEL5.
RuntimeMXBean runtime = ManagementFactory.getRuntimeMXBean();
Field jvm = null;
try {
jvm = runtime.getClass().getDeclaredField("jvm");
jvm.setAccessible(true);
VMManagement mgmt = (VMManagement) jvm.get(runtime);
Method pid_method = mgmt.getClass().getDeclaredMethod("getProcessId");
pid_method.setAccessible(true);
pid = (Integer) pid_method.invoke(mgmt);
} catch (NoSuchFieldException | NoSuchMethodException | IllegalAccessException | InvocationTargetException e) {
throw new ProcessHandlerException(e);
}
return pid;
}
|
int function() throws ProcessHandlerException { int pid; RuntimeMXBean runtime = ManagementFactory.getRuntimeMXBean(); Field jvm = null; try { jvm = runtime.getClass().getDeclaredField("jvm"); jvm.setAccessible(true); VMManagement mgmt = (VMManagement) jvm.get(runtime); Method pid_method = mgmt.getClass().getDeclaredMethod(STR); pid_method.setAccessible(true); pid = (Integer) pid_method.invoke(mgmt); } catch (NoSuchFieldException NoSuchMethodException IllegalAccessException InvocationTargetException e) { throw new ProcessHandlerException(e); } return pid; }
|
/**
* Gets the PID for the SeLion-Grid (main) process
*
* @return the PID as an int
* @throws ProcessHandlerException
*/
|
Gets the PID for the SeLion-Grid (main) process
|
getCurrentProcessID
|
{
"repo_name": "shelar/SeLion",
"path": "server/src/main/java/com/paypal/selion/utils/process/AbstractProcessHandler.java",
"license": "apache-2.0",
"size": 6965
}
|
[
"java.lang.management.ManagementFactory",
"java.lang.management.RuntimeMXBean",
"java.lang.reflect.Field",
"java.lang.reflect.InvocationTargetException",
"java.lang.reflect.Method"
] |
import java.lang.management.ManagementFactory; import java.lang.management.RuntimeMXBean; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method;
|
import java.lang.management.*; import java.lang.reflect.*;
|
[
"java.lang"
] |
java.lang;
| 497,713
|
public HashMap getTotalSubmissionPerAssessment(String agentId) {
return PersistenceService.getInstance().
getPublishedAssessmentFacadeQueries().
getTotalSubmissionPerAssessment(agentId);
}
|
HashMap function(String agentId) { return PersistenceService.getInstance(). getPublishedAssessmentFacadeQueries(). getTotalSubmissionPerAssessment(agentId); }
|
/** total submitted for grade
* returns HashMap (Long publishedAssessmentId, Integer totalSubmittedForGrade);
*/
|
total submitted for grade returns HashMap (Long publishedAssessmentId, Integer totalSubmittedForGrade)
|
getTotalSubmissionPerAssessment
|
{
"repo_name": "hackbuteer59/sakai",
"path": "samigo/samigo-services/src/java/org/sakaiproject/tool/assessment/services/assessment/PublishedAssessmentService.java",
"license": "apache-2.0",
"size": 30413
}
|
[
"java.util.HashMap",
"org.sakaiproject.tool.assessment.services.PersistenceService"
] |
import java.util.HashMap; import org.sakaiproject.tool.assessment.services.PersistenceService;
|
import java.util.*; import org.sakaiproject.tool.assessment.services.*;
|
[
"java.util",
"org.sakaiproject.tool"
] |
java.util; org.sakaiproject.tool;
| 2,263,008
|
public UserDataConstraintType<T> id(String id)
{
childNode.attribute("id", id);
return this;
}
|
UserDataConstraintType<T> function(String id) { childNode.attribute("id", id); return this; }
|
/**
* Sets the <code>id</code> attribute
* @param id the value for the attribute <code>id</code>
* @return the current instance of <code>UserDataConstraintType<T></code>
*/
|
Sets the <code>id</code> attribute
|
id
|
{
"repo_name": "forge/javaee-descriptors",
"path": "impl/src/main/java/org/jboss/shrinkwrap/descriptor/impl/webapp25/UserDataConstraintTypeImpl.java",
"license": "epl-1.0",
"size": 6509
}
|
[
"org.jboss.shrinkwrap.descriptor.api.webapp25.UserDataConstraintType"
] |
import org.jboss.shrinkwrap.descriptor.api.webapp25.UserDataConstraintType;
|
import org.jboss.shrinkwrap.descriptor.api.webapp25.*;
|
[
"org.jboss.shrinkwrap"
] |
org.jboss.shrinkwrap;
| 1,910,101
|
@Nonnull
public TermCollectionRequest count() {
addCountOption(true);
return this;
}
|
TermCollectionRequest function() { addCountOption(true); return this; }
|
/**
* Sets the count value to true for the request
*
* @return the updated request
*/
|
Sets the count value to true for the request
|
count
|
{
"repo_name": "microsoftgraph/msgraph-sdk-java",
"path": "src/main/java/com/microsoft/graph/termstore/requests/TermCollectionRequest.java",
"license": "mit",
"size": 5444
}
|
[
"com.microsoft.graph.termstore.requests.TermCollectionRequest"
] |
import com.microsoft.graph.termstore.requests.TermCollectionRequest;
|
import com.microsoft.graph.termstore.requests.*;
|
[
"com.microsoft.graph"
] |
com.microsoft.graph;
| 923,416
|
return SINGLETON;
}
public enum TrackingLevel {
NONE, COUNTER, REFERENCES
}
private final AtomicInteger mCounter;
private final ReferenceTracker mReferenceTracker;
private DebugResourceTracker() {
switch(TRACKING_LEVEL) {
case NONE: {
mReferenceTracker = null;
mCounter = null;
break;
}
case COUNTER: {
mReferenceTracker = null;
mCounter = new AtomicInteger(0);
LOG.debug("Registering hook to log number of unclosed resources at shutdown.");
Runtime.getRuntime().addShutdownHook(new ShutdownHook());
break;
}
case REFERENCES: {
mReferenceTracker = new ReferenceTracker();
mCounter = new AtomicInteger(0);
LOG.debug("Registering hook to log details of unclosed resources at shutdown.");
Runtime.getRuntime().addShutdownHook(new ShutdownHook());
break;
}
default: throw new InternalKijiError(String.format(
"Unknown DebugResourceTracker.TrackingType: %s", TRACKING_LEVEL));
}
}
|
return SINGLETON; } public enum TrackingLevel { NONE, COUNTER, REFERENCES } private final AtomicInteger mCounter; private final ReferenceTracker mReferenceTracker; private DebugResourceTracker() { switch(TRACKING_LEVEL) { case NONE: { mReferenceTracker = null; mCounter = null; break; } case COUNTER: { mReferenceTracker = null; mCounter = new AtomicInteger(0); LOG.debug(STR); Runtime.getRuntime().addShutdownHook(new ShutdownHook()); break; } case REFERENCES: { mReferenceTracker = new ReferenceTracker(); mCounter = new AtomicInteger(0); LOG.debug(STR); Runtime.getRuntime().addShutdownHook(new ShutdownHook()); break; } default: throw new InternalKijiError(String.format( STR, TRACKING_LEVEL)); } }
|
/**
* Get the singleton DebugResourceTracker.
*
* @return the singleton DebugResourceTracker.
*/
|
Get the singleton DebugResourceTracker
|
get
|
{
"repo_name": "rpinzon/kiji-schema",
"path": "kiji-schema/src/main/java/org/kiji/schema/util/DebugResourceTracker.java",
"license": "apache-2.0",
"size": 17630
}
|
[
"java.util.concurrent.atomic.AtomicInteger",
"org.kiji.schema.InternalKijiError"
] |
import java.util.concurrent.atomic.AtomicInteger; import org.kiji.schema.InternalKijiError;
|
import java.util.concurrent.atomic.*; import org.kiji.schema.*;
|
[
"java.util",
"org.kiji.schema"
] |
java.util; org.kiji.schema;
| 1,948,685
|
public static Resource CoordinatePoint() {
return _namespace_CDAO("CDAO_0000003");
}
|
static Resource function() { return _namespace_CDAO(STR); }
|
/**
* -- No comment or description provided. --
* (http://purl.obolibrary.org/obo/CDAO_0000003)
*/
|
-- No comment or description provided. -- (HREF)
|
CoordinatePoint
|
{
"repo_name": "BioInterchange/BioInterchange",
"path": "supplemental/java/biointerchange/src/main/java/org/biointerchange/vocabulary/CDAO.java",
"license": "mit",
"size": 85675
}
|
[
"com.hp.hpl.jena.rdf.model.Resource"
] |
import com.hp.hpl.jena.rdf.model.Resource;
|
import com.hp.hpl.jena.rdf.model.*;
|
[
"com.hp.hpl"
] |
com.hp.hpl;
| 1,656,134
|
@Override
public boolean cleanUp()
{
if ( reorientate == 0 || reorientate == 1 )
{
// the spimdata registrations were changed
return true;
}
else if ( reorientate == 2 )
{
// remove the registrations we added
if ( viewIdsToApply == null )
{
IOFunctions.println( "Something went wrong, the viewIdsToApply list is null." );
}
else
{
for ( final ViewId viewId : viewIdsToApply )
{
final ViewDescription vd = spimData.getSequenceDescription().getViewDescription( viewId );
if ( !vd.isPresent() )
continue;
// get the registration
final ViewRegistration r = spimData.getViewRegistrations().getViewRegistration( viewId );
final List< ViewTransform > vtl = r.getTransformList();
vtl.remove( 0 );
r.updateModel();
}
}
}
return this.changedSpimDataObject;
}
|
boolean function() { if ( reorientate == 0 reorientate == 1 ) { return true; } else if ( reorientate == 2 ) { if ( viewIdsToApply == null ) { IOFunctions.println( STR ); } else { for ( final ViewId viewId : viewIdsToApply ) { final ViewDescription vd = spimData.getSequenceDescription().getViewDescription( viewId ); if ( !vd.isPresent() ) continue; final ViewRegistration r = spimData.getViewRegistrations().getViewRegistration( viewId ); final List< ViewTransform > vtl = r.getTransformList(); vtl.remove( 0 ); r.updateModel(); } } } return this.changedSpimDataObject; }
|
/**
* Called before the XML is potentially saved
*/
|
Called before the XML is potentially saved
|
cleanUp
|
{
"repo_name": "hkmoon/SPIM_Registration",
"path": "src/main/java/spim/process/fusion/boundingbox/AutomaticReorientation.java",
"license": "gpl-2.0",
"size": 20209
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,846,979
|
private void movePrototypeDotMethodAssignment(Node destParent, Node functionNode) {
checkState(functionNode.isFunction(), functionNode);
Node assignNode = functionNode.getParent();
checkState(assignNode.isAssign() && functionNode.isSecondChildOf(assignNode), assignNode);
Node definitionStatement = assignNode.getParent();
checkState(definitionStatement.isExprResult(), assignNode);
if (noStubFunctions) {
// Remove the definition statement from its current location
Node assignStatementParent = definitionStatement.getParent();
definitionStatement.detach();
compiler.reportChangeToEnclosingScope(assignStatementParent);
// Prepend definition to new chunk
// Foo.prototype.propName = function() {};
destParent.addChildToFront(definitionStatement);
compiler.reportChangeToEnclosingScope(destParent);
} else {
int stubId = idGenerator.newId();
// replace function definition with temporary placeholder so we can clone the whole
// assignment statement without cloning the function definition itself.
Node originalDefinitionPlaceholder = astFactory.createEmpty();
functionNode.replaceWith(originalDefinitionPlaceholder);
Node newDefinitionStatement = definitionStatement.cloneTree();
Node newDefinitionPlaceholder =
newDefinitionStatement // EXPR_RESULT
.getOnlyChild() // ASSIGN
.getLastChild(); // EMPTY RHS node
// convert original assignment statement to
// owner.prototype.propName = JSCompiler_stubMethod(0);
Node stubCall = createStubCall(functionNode, stubId);
originalDefinitionPlaceholder.replaceWith(stubCall);
compiler.reportChangeToEnclosingScope(definitionStatement);
// Prepend new definition to new chunk
// Foo.prototype.propName = JSCompiler_unstubMethod(0, function() {});
Node unstubCall = createUnstubCall(functionNode, stubId);
newDefinitionPlaceholder.replaceWith(unstubCall);
destParent.addChildToFront(newDefinitionStatement);
compiler.reportChangeToEnclosingScope(destParent);
}
}
|
void function(Node destParent, Node functionNode) { checkState(functionNode.isFunction(), functionNode); Node assignNode = functionNode.getParent(); checkState(assignNode.isAssign() && functionNode.isSecondChildOf(assignNode), assignNode); Node definitionStatement = assignNode.getParent(); checkState(definitionStatement.isExprResult(), assignNode); if (noStubFunctions) { Node assignStatementParent = definitionStatement.getParent(); definitionStatement.detach(); compiler.reportChangeToEnclosingScope(assignStatementParent); destParent.addChildToFront(definitionStatement); compiler.reportChangeToEnclosingScope(destParent); } else { int stubId = idGenerator.newId(); Node originalDefinitionPlaceholder = astFactory.createEmpty(); functionNode.replaceWith(originalDefinitionPlaceholder); Node newDefinitionStatement = definitionStatement.cloneTree(); Node newDefinitionPlaceholder = newDefinitionStatement .getOnlyChild() .getLastChild(); Node stubCall = createStubCall(functionNode, stubId); originalDefinitionPlaceholder.replaceWith(stubCall); compiler.reportChangeToEnclosingScope(definitionStatement); Node unstubCall = createUnstubCall(functionNode, stubId); newDefinitionPlaceholder.replaceWith(unstubCall); destParent.addChildToFront(newDefinitionStatement); compiler.reportChangeToEnclosingScope(destParent); } }
|
/**
* Move a property defined by assignment to `.prototype` or `.prototype.propName`.
*
* <pre><code>
* Foo.prototype.propName = function() {};
* </code></pre>
*/
|
Move a property defined by assignment to `.prototype` or `.prototype.propName`. <code><code> Foo.prototype.propName = function() {}; </code></code>
|
movePrototypeDotMethodAssignment
|
{
"repo_name": "vobruba-martin/closure-compiler",
"path": "src/com/google/javascript/jscomp/CrossChunkMethodMotion.java",
"license": "apache-2.0",
"size": 23684
}
|
[
"com.google.common.base.Preconditions",
"com.google.javascript.rhino.Node"
] |
import com.google.common.base.Preconditions; import com.google.javascript.rhino.Node;
|
import com.google.common.base.*; import com.google.javascript.rhino.*;
|
[
"com.google.common",
"com.google.javascript"
] |
com.google.common; com.google.javascript;
| 1,593,123
|
@Override
public int getDamageValue(World world, int x, int y, int z)
{
return getWoodType() / 2;
}
|
int function(World world, int x, int y, int z) { return getWoodType() / 2; }
|
/**
* Get the block's damage value (for use with pick block).
*/
|
Get the block's damage value (for use with pick block)
|
getDamageValue
|
{
"repo_name": "raymondbh/TFCraft",
"path": "src/Common/com/bioxx/tfc/Blocks/Vanilla/BlockCustomDoor.java",
"license": "gpl-3.0",
"size": 14464
}
|
[
"net.minecraft.world.World"
] |
import net.minecraft.world.World;
|
import net.minecraft.world.*;
|
[
"net.minecraft.world"
] |
net.minecraft.world;
| 2,230,629
|
public void testTimeAndDateWithCalendar() throws SQLException {
// Create calendars for some time zones to use when testing the
// setter methods.
Calendar[] cal1 = {
Calendar.getInstance(), // local calendar
Calendar.getInstance(TimeZone.getTimeZone("GMT")),
Calendar.getInstance(TimeZone.getTimeZone("Europe/Oslo")),
Calendar.getInstance(TimeZone.getTimeZone("Asia/Hong_Kong")),
};
// Use calendars for the same time zones in the getters, but create
// clones so that we don't get interference between the calendars.
Calendar[] cal2 = (Calendar[]) cal1.clone();
for (int i = 0; i < cal2.length; i++) {
cal2[i] = (Calendar) cal2[i].clone();
}
// Now test all the combinations.
for (int i = 0; i < cal1.length; i++) {
for (int j = 0; j < cal2.length; j++) {
testTimeAndDateWithCalendar(cal1[i], cal2[j]);
}
}
}
|
void function() throws SQLException { Calendar[] cal1 = { Calendar.getInstance(), Calendar.getInstance(TimeZone.getTimeZone("GMT")), Calendar.getInstance(TimeZone.getTimeZone(STR)), Calendar.getInstance(TimeZone.getTimeZone(STR)), }; Calendar[] cal2 = (Calendar[]) cal1.clone(); for (int i = 0; i < cal2.length; i++) { cal2[i] = (Calendar) cal2[i].clone(); } for (int i = 0; i < cal1.length; i++) { for (int j = 0; j < cal2.length; j++) { testTimeAndDateWithCalendar(cal1[i], cal2[j]); } } }
|
/**
* Test that the getters and setters for Date, Time and Timestamp work as
* expected when given a Calendar argument. Test case for DERBY-4615.
*/
|
Test that the getters and setters for Date, Time and Timestamp work as expected when given a Calendar argument. Test case for DERBY-4615
|
testTimeAndDateWithCalendar
|
{
"repo_name": "splicemachine/spliceengine",
"path": "db-testing/src/test/java/com/splicemachine/dbTesting/functionTests/tests/jdbcapi/CallableTest.java",
"license": "agpl-3.0",
"size": 43760
}
|
[
"java.sql.SQLException",
"java.util.Calendar",
"java.util.TimeZone"
] |
import java.sql.SQLException; import java.util.Calendar; import java.util.TimeZone;
|
import java.sql.*; import java.util.*;
|
[
"java.sql",
"java.util"
] |
java.sql; java.util;
| 1,559,336
|
public static void checkProxyPackageAccess(Class<?> clazz) {
SecurityManager s = System.getSecurityManager();
if (s != null) {
// check proxy interfaces if the given class is a proxy class
if (Proxy.isProxyClass(clazz)) {
for (Class<?> intf : clazz.getInterfaces()) {
checkPackageAccess(intf);
}
}
}
}
|
static void function(Class<?> clazz) { SecurityManager s = System.getSecurityManager(); if (s != null) { if (Proxy.isProxyClass(clazz)) { for (Class<?> intf : clazz.getInterfaces()) { checkPackageAccess(intf); } } } }
|
/**
* Check package access on the proxy interfaces that the given proxy class
* implements.
*
* @param clazz Proxy class object
*/
|
Check package access on the proxy interfaces that the given proxy class implements
|
checkProxyPackageAccess
|
{
"repo_name": "google/desugar_jdk_libs",
"path": "jdk11/src/libcore/ojluni/src/main/java/sun/reflect/misc/ReflectUtil.java",
"license": "gpl-2.0",
"size": 9917
}
|
[
"java.lang.reflect.Proxy"
] |
import java.lang.reflect.Proxy;
|
import java.lang.reflect.*;
|
[
"java.lang"
] |
java.lang;
| 801,456
|
private static void validateCompatibleModules(Class<?> one, Class<?> two) {
StarlarkBuiltin moduleOne = one.getAnnotation(StarlarkBuiltin.class);
StarlarkBuiltin moduleTwo = two.getAnnotation(StarlarkBuiltin.class);
if (one.isAssignableFrom(two) || two.isAssignableFrom(one)) {
if (!moduleOne.doc().equals(moduleTwo.doc())) {
throw new IllegalStateException(
String.format(
"%s and %s are related modules but have mismatching documentation for '%s'",
one, two, moduleOne.name()));
}
} else {
throw new IllegalStateException(
String.format(
"%s and %s are unrelated modules with documentation for '%s'",
one, two, moduleOne.name()));
}
}
|
static void function(Class<?> one, Class<?> two) { StarlarkBuiltin moduleOne = one.getAnnotation(StarlarkBuiltin.class); StarlarkBuiltin moduleTwo = two.getAnnotation(StarlarkBuiltin.class); if (one.isAssignableFrom(two) two.isAssignableFrom(one)) { if (!moduleOne.doc().equals(moduleTwo.doc())) { throw new IllegalStateException( String.format( STR, one, two, moduleOne.name())); } } else { throw new IllegalStateException( String.format( STR, one, two, moduleOne.name())); } }
|
/**
* Validate that it is acceptable that the given module classes with the same module name
* co-exist.
*/
|
Validate that it is acceptable that the given module classes with the same module name co-exist
|
validateCompatibleModules
|
{
"repo_name": "davidzchen/bazel",
"path": "src/main/java/com/google/devtools/build/docgen/StarlarkDocumentationCollector.java",
"license": "apache-2.0",
"size": 12517
}
|
[
"net.starlark.java.annot.StarlarkBuiltin"
] |
import net.starlark.java.annot.StarlarkBuiltin;
|
import net.starlark.java.annot.*;
|
[
"net.starlark.java"
] |
net.starlark.java;
| 1,098,758
|
public List getIntervals() {
return Collections.unmodifiableList(this.intervals);
}
|
List function() { return Collections.unmodifiableList(this.intervals); }
|
/**
* Returns an unmodifiable list of the intervals for the plot.
*
* @return A list.
*
* @see #addInterval(MeterInterval)
*/
|
Returns an unmodifiable list of the intervals for the plot
|
getIntervals
|
{
"repo_name": "apetresc/JFreeChart",
"path": "src/main/java/org/jfree/chart/plot/MeterPlot.java",
"license": "lgpl-2.1",
"size": 44804
}
|
[
"java.util.Collections",
"java.util.List"
] |
import java.util.Collections; import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,605,347
|
@LogMessage(level = INFO)
@Message(id = 4, value = "Stopping %s Service '%s'")
void stoppingService(String serviceName, String name);
//
// @LogMessage(level = ERROR)
// @Message(id = 5, value = "Could not load default persistence provider adaptor module. Management attributes will not be registered for the adaptor")
// void errorPreloadingDefaultProviderAdaptor(@Cause Throwable cause);
|
@LogMessage(level = INFO) @Message(id = 4, value = STR) void stoppingService(String serviceName, String name);
|
/**
* Logs an informational message indicating the service, represented by the {@code serviceName} parameter, is
* stopping.
*
* @param serviceName the name of the service.
* @param name an additional name for the service.
*/
|
Logs an informational message indicating the service, represented by the serviceName parameter, is stopping
|
stoppingService
|
{
"repo_name": "jstourac/wildfly",
"path": "jpa/subsystem/src/main/java/org/jboss/as/jpa/messages/JpaLogger.java",
"license": "lgpl-2.1",
"size": 35947
}
|
[
"org.jboss.logging.annotations.LogMessage",
"org.jboss.logging.annotations.Message"
] |
import org.jboss.logging.annotations.LogMessage; import org.jboss.logging.annotations.Message;
|
import org.jboss.logging.annotations.*;
|
[
"org.jboss.logging"
] |
org.jboss.logging;
| 186,187
|
@Test
public final void testLinkConnectionRules() {
// final ILinkObjectType link = mockery.mock(ILinkObjectType.class);
// final IShapeObjectType typeOne = mockery.mock(IShapeObjectType.class);
// final IShapeObjectType typeThree = mockery.mock(IShapeObjectType.class);
// mockery.checking(new Expectations(){{
//// allowing(typeOne).getTypeCode(); will(returnValue(typeOne));
// }});
// this.testInstance = new LinkConnectionRules(link);
assertFalse("no valid source set, yet", this.testInstance.isValidSource(typeOne));
assertFalse("no valid target set, yet", this.testInstance.isValidTarget(typeOne, typeThree));
}
|
final void function() { assertFalse(STR, this.testInstance.isValidSource(typeOne)); assertFalse(STR, this.testInstance.isValidTarget(typeOne, typeThree)); }
|
/**
* Test method for {@link org.pathwayeditor.notationsubsystem.toolkit.definition.LinkConnectionRules#LinkConnectionRules()}.
* Test uninitialised state
*/
|
Test method for <code>org.pathwayeditor.notationsubsystem.toolkit.definition.LinkConnectionRules#LinkConnectionRules()</code>. Test uninitialised state
|
testLinkConnectionRules
|
{
"repo_name": "stumoodie/NotationSubsystemUtils",
"path": "test/org/pathwayeditor/notationsubsystem/toolkit/definition/LinkConnectionRulesTest.java",
"license": "apache-2.0",
"size": 5697
}
|
[
"org.junit.Assert"
] |
import org.junit.Assert;
|
import org.junit.*;
|
[
"org.junit"
] |
org.junit;
| 496,630
|
@Override
public Date getCreateDate();
|
Date function();
|
/**
* Returns the create date of this k b comment.
*
* @return the create date of this k b comment
*/
|
Returns the create date of this k b comment
|
getCreateDate
|
{
"repo_name": "codyhoag/blade.tools",
"path": "blade.migrate.liferay70/projects/knowledge-base-portlet-6.2.x/docroot/WEB-INF/service/com/liferay/knowledgebase/model/KBCommentModel.java",
"license": "apache-2.0",
"size": 8105
}
|
[
"java.util.Date"
] |
import java.util.Date;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 256,276
|
public BillingInfo setAddress(Address address) {
this.address = address;
return this;
}
|
BillingInfo function(Address address) { this.address = address; return this; }
|
/**
* Setter for address
*/
|
Setter for address
|
setAddress
|
{
"repo_name": "vincent-wen/ams",
"path": "src/main/java/com/paypal/api/payments/BillingInfo.java",
"license": "gpl-2.0",
"size": 2934
}
|
[
"com.paypal.api.payments.Address"
] |
import com.paypal.api.payments.Address;
|
import com.paypal.api.payments.*;
|
[
"com.paypal.api"
] |
com.paypal.api;
| 2,608,588
|
public void setHandlers(List<Handler> handlers) {
this.handlers = handlers;
}
|
void function(List<Handler> handlers) { this.handlers = handlers; }
|
/**
* Sets the list of hanlders for this InvocationContext
*
* @param list
*/
|
Sets the list of hanlders for this InvocationContext
|
setHandlers
|
{
"repo_name": "arunasujith/wso2-axis2",
"path": "modules/jaxws/src/org/apache/axis2/jaxws/core/InvocationContextImpl.java",
"license": "apache-2.0",
"size": 3265
}
|
[
"java.util.List",
"javax.xml.ws.handler.Handler"
] |
import java.util.List; import javax.xml.ws.handler.Handler;
|
import java.util.*; import javax.xml.ws.handler.*;
|
[
"java.util",
"javax.xml"
] |
java.util; javax.xml;
| 2,036,745
|
public synchronized double getCount()
{
return weightedCount / weight(TimeUnit.NANOSECONDS.toSeconds(ticker.read()));
}
|
synchronized double function() { return weightedCount / weight(TimeUnit.NANOSECONDS.toSeconds(ticker.read())); }
|
/**
* Number (decayed) of elements added to this quantile digest
*/
|
Number (decayed) of elements added to this quantile digest
|
getCount
|
{
"repo_name": "11xor6/airlift",
"path": "stats/src/main/java/io/airlift/stats/QuantileDigest.java",
"license": "apache-2.0",
"size": 25121
}
|
[
"java.util.concurrent.TimeUnit"
] |
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.*;
|
[
"java.util"
] |
java.util;
| 1,066,025
|
public Config getConfig(){
if(config.commandLogging){
Log.d(config.commandLoggingTag, "getConfig()");
}
return config;
}
|
Config function(){ if(config.commandLogging){ Log.d(config.commandLoggingTag, STR); } return config; }
|
/**
* Returns the Config used by Robotium.
*
* @return the Config used by Robotium
*/
|
Returns the Config used by Robotium
|
getConfig
|
{
"repo_name": "darker50/robotium",
"path": "robotium-solo/src/main/java/com/robotium/solo/Solo.java",
"license": "apache-2.0",
"size": 124742
}
|
[
"android.util.Log"
] |
import android.util.Log;
|
import android.util.*;
|
[
"android.util"
] |
android.util;
| 1,972,415
|
public void setDivisible(String v)
{
if (!ObjectUtils.equals(this.divisible, v))
{
this.divisible = v;
setModified(true);
}
}
|
void function(String v) { if (!ObjectUtils.equals(this.divisible, v)) { this.divisible = v; setModified(true); } }
|
/**
* Set the value of Divisible
*
* @param v new value
*/
|
Set the value of Divisible
|
setDivisible
|
{
"repo_name": "trackplus/Genji",
"path": "src/main/java/com/aurel/track/persist/BaseTBasket.java",
"license": "gpl-3.0",
"size": 41013
}
|
[
"org.apache.commons.lang.ObjectUtils"
] |
import org.apache.commons.lang.ObjectUtils;
|
import org.apache.commons.lang.*;
|
[
"org.apache.commons"
] |
org.apache.commons;
| 2,650,715
|
public void draw(Graphics2D g2, Rectangle2D area);
|
void function(Graphics2D g2, Rectangle2D area);
|
/**
* Draws the border by filling in the reserved space (in black).
*
* @param g2 the graphics device.
* @param area the area.
*/
|
Draws the border by filling in the reserved space (in black)
|
draw
|
{
"repo_name": "akardapolov/ASH-Viewer",
"path": "jfreechart-fse/src/main/java/org/jfree/chart/block/BlockFrame.java",
"license": "gpl-3.0",
"size": 2384
}
|
[
"java.awt.Graphics2D",
"java.awt.geom.Rectangle2D"
] |
import java.awt.Graphics2D; import java.awt.geom.Rectangle2D;
|
import java.awt.*; import java.awt.geom.*;
|
[
"java.awt"
] |
java.awt;
| 2,182,715
|
public ReactorResult<Agent> getAllbiboDirector_as() {
return Base.getAll_as(this.model, this.getResource(), DIRECTOR, Agent.class);
}
|
ReactorResult<Agent> function() { return Base.getAll_as(this.model, this.getResource(), DIRECTOR, Agent.class); }
|
/**
* Get all values of property Director as a ReactorResult of Agent
* @return a ReactorResult of $type which can conveniently be converted to iterator, list or array
*
* [Generated from RDFReactor template rule #get12dynamic-reactorresult]
*/
|
Get all values of property Director as a ReactorResult of Agent
|
getAllbiboDirector_as
|
{
"repo_name": "alexgarciac/biotea",
"path": "src/ws/biotea/ld2rdf/rdf/model/bibo/Audio_visualdocument.java",
"license": "apache-2.0",
"size": 19394
}
|
[
"org.ontoware.rdfreactor.runtime.Base",
"org.ontoware.rdfreactor.runtime.ReactorResult"
] |
import org.ontoware.rdfreactor.runtime.Base; import org.ontoware.rdfreactor.runtime.ReactorResult;
|
import org.ontoware.rdfreactor.runtime.*;
|
[
"org.ontoware.rdfreactor"
] |
org.ontoware.rdfreactor;
| 1,222,545
|
public void removeOperation(Operation operation) {
Aspect aspect = EMFModelUtil.getRootContainerOfType(operation, RamPackage.Literals.ASPECT);
EditingDomain editingDomain = EMFEditUtil.getEditingDomain(aspect);
CompoundCommand compoundCommand = new CompoundCommand();
MessageView messageView = RAMModelUtil.getMessageViewFor(aspect, operation);
if (messageView != null) {
compoundCommand.append(AspectController.createRemoveMessageViewCommand(messageView));
}
compoundCommand.append(RemoveCommand.create(editingDomain, operation));
doExecute(editingDomain, compoundCommand);
}
|
void function(Operation operation) { Aspect aspect = EMFModelUtil.getRootContainerOfType(operation, RamPackage.Literals.ASPECT); EditingDomain editingDomain = EMFEditUtil.getEditingDomain(aspect); CompoundCommand compoundCommand = new CompoundCommand(); MessageView messageView = RAMModelUtil.getMessageViewFor(aspect, operation); if (messageView != null) { compoundCommand.append(AspectController.createRemoveMessageViewCommand(messageView)); } compoundCommand.append(RemoveCommand.create(editingDomain, operation)); doExecute(editingDomain, compoundCommand); }
|
/**
* Removes the given operation. Also removes the corresponding {@link MessageView} if it exists.
*
* @param operation the operation to be removed
*/
|
Removes the given operation. Also removes the corresponding <code>MessageView</code> if it exists
|
removeOperation
|
{
"repo_name": "mschoettle/ecse429-fall15-project",
"path": "ca.mcgill.sel.ram.controller/src/ca/mcgill/sel/ram/controller/ClassController.java",
"license": "gpl-2.0",
"size": 35270
}
|
[
"ca.mcgill.sel.commons.emf.util.EMFEditUtil",
"ca.mcgill.sel.commons.emf.util.EMFModelUtil",
"ca.mcgill.sel.ram.Aspect",
"ca.mcgill.sel.ram.MessageView",
"ca.mcgill.sel.ram.Operation",
"ca.mcgill.sel.ram.RamPackage",
"ca.mcgill.sel.ram.util.RAMModelUtil",
"org.eclipse.emf.common.command.CompoundCommand",
"org.eclipse.emf.edit.command.RemoveCommand",
"org.eclipse.emf.edit.domain.EditingDomain"
] |
import ca.mcgill.sel.commons.emf.util.EMFEditUtil; import ca.mcgill.sel.commons.emf.util.EMFModelUtil; import ca.mcgill.sel.ram.Aspect; import ca.mcgill.sel.ram.MessageView; import ca.mcgill.sel.ram.Operation; import ca.mcgill.sel.ram.RamPackage; import ca.mcgill.sel.ram.util.RAMModelUtil; import org.eclipse.emf.common.command.CompoundCommand; import org.eclipse.emf.edit.command.RemoveCommand; import org.eclipse.emf.edit.domain.EditingDomain;
|
import ca.mcgill.sel.commons.emf.util.*; import ca.mcgill.sel.ram.*; import ca.mcgill.sel.ram.util.*; import org.eclipse.emf.common.command.*; import org.eclipse.emf.edit.command.*; import org.eclipse.emf.edit.domain.*;
|
[
"ca.mcgill.sel",
"org.eclipse.emf"
] |
ca.mcgill.sel; org.eclipse.emf;
| 1,142,174
|
final Client client = getClient();
// START SNIPPET: java-doc-admin-cluster-health
final ClusterHealthResponse health =
client.admin().cluster().prepareHealth().setWaitForYellowStatus().get();
final String clusterName = health.getClusterName();
final int numberOfNodes = health.getNumberOfNodes();
// END SNIPPET: java-doc-admin-cluster-health
assertThat(
"cluster [" + clusterName + "] should have at least 1 node",
numberOfNodes,
greaterThan(0));
}
|
final Client client = getClient(); final ClusterHealthResponse health = client.admin().cluster().prepareHealth().setWaitForYellowStatus().get(); final String clusterName = health.getClusterName(); final int numberOfNodes = health.getNumberOfNodes(); assertThat( STR + clusterName + STR, numberOfNodes, greaterThan(0)); }
|
/**
* Check that we are connected to a cluster named "elasticsearch".
*/
|
Check that we are connected to a cluster named "elasticsearch"
|
testSimpleClient
|
{
"repo_name": "jimczi/elasticsearch",
"path": "qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/SmokeTestClientIT.java",
"license": "apache-2.0",
"size": 2854
}
|
[
"org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse",
"org.elasticsearch.client.Client",
"org.hamcrest.Matchers"
] |
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.client.Client; import org.hamcrest.Matchers;
|
import org.elasticsearch.action.admin.cluster.health.*; import org.elasticsearch.client.*; import org.hamcrest.*;
|
[
"org.elasticsearch.action",
"org.elasticsearch.client",
"org.hamcrest"
] |
org.elasticsearch.action; org.elasticsearch.client; org.hamcrest;
| 2,153,774
|
public static Artifact populateArtifact(OMElement artifactEle) {
if (artifactEle == null) {
return null;
}
Artifact artifact = new Artifact();
// read top level attributes
artifact.setName(readAttribute(artifactEle, Artifact.NAME));
artifact.setVersion(readAttribute(artifactEle, Artifact.VERSION));
artifact.setType(readAttribute(artifactEle, Artifact.TYPE));
artifact.setServerRole(readAttribute(artifactEle, Artifact.SERVER_ROLE));
// read the dependencies
Iterator itr = artifactEle.getChildrenWithLocalName(Artifact.DEPENDENCY);
while (itr.hasNext()) {
OMElement depElement = (OMElement) itr.next();
// create an artifact for each dependency and add to the root artifact
Artifact.Dependency dep = new Artifact.Dependency();
dep.setServerRole(readAttribute(depElement, Artifact.SERVER_ROLE));
dep.setName(readAttribute(depElement, Artifact.ARTIFACT));
dep.setVersion(readAttribute(depElement, Artifact.VERSION));
artifact.addDependency(dep);
}
// read the subArtifacts
OMElement subArtifactsElement = artifactEle
.getFirstChildWithName(new QName(Artifact.SUB_ARTIFACTS));
if (subArtifactsElement != null) {
Iterator subArtItr = subArtifactsElement.getChildrenWithLocalName(Artifact.ARTIFACT);
while (subArtItr.hasNext()) {
// as this is also an artifact, use recursion
Artifact subArtifact = populateArtifact((OMElement) subArtItr.next());
artifact.addSubArtifact(subArtifact);
}
}
// read the files
Iterator fileItr = artifactEle.getChildrenWithLocalName(Artifact.FILE);
while (fileItr.hasNext()) {
OMElement fileElement = (OMElement) fileItr.next();
CappFile tempFile = new CappFile();
tempFile.setName(fileElement.getText());
tempFile.setVersion(readAttribute(fileElement, Artifact.VERSION));
artifact.addFile(tempFile);
}
return artifact;
}
|
static Artifact function(OMElement artifactEle) { if (artifactEle == null) { return null; } Artifact artifact = new Artifact(); artifact.setName(readAttribute(artifactEle, Artifact.NAME)); artifact.setVersion(readAttribute(artifactEle, Artifact.VERSION)); artifact.setType(readAttribute(artifactEle, Artifact.TYPE)); artifact.setServerRole(readAttribute(artifactEle, Artifact.SERVER_ROLE)); Iterator itr = artifactEle.getChildrenWithLocalName(Artifact.DEPENDENCY); while (itr.hasNext()) { OMElement depElement = (OMElement) itr.next(); Artifact.Dependency dep = new Artifact.Dependency(); dep.setServerRole(readAttribute(depElement, Artifact.SERVER_ROLE)); dep.setName(readAttribute(depElement, Artifact.ARTIFACT)); dep.setVersion(readAttribute(depElement, Artifact.VERSION)); artifact.addDependency(dep); } OMElement subArtifactsElement = artifactEle .getFirstChildWithName(new QName(Artifact.SUB_ARTIFACTS)); if (subArtifactsElement != null) { Iterator subArtItr = subArtifactsElement.getChildrenWithLocalName(Artifact.ARTIFACT); while (subArtItr.hasNext()) { Artifact subArtifact = populateArtifact((OMElement) subArtItr.next()); artifact.addSubArtifact(subArtifact); } } Iterator fileItr = artifactEle.getChildrenWithLocalName(Artifact.FILE); while (fileItr.hasNext()) { OMElement fileElement = (OMElement) fileItr.next(); CappFile tempFile = new CappFile(); tempFile.setName(fileElement.getText()); tempFile.setVersion(readAttribute(fileElement, Artifact.VERSION)); artifact.addFile(tempFile); } return artifact; }
|
/**
* Builds the Artifact object when an artifact element is given
*
* @param artifactEle - artifact OMElement
* @return created Artifact object
*/
|
Builds the Artifact object when an artifact element is given
|
populateArtifact
|
{
"repo_name": "madhawa-gunasekara/product-ei",
"path": "components/org.wso2.carbon.micro.integrator.core/src/main/java/org/wso2/carbon/application/deployer/AppDeployerUtils.java",
"license": "apache-2.0",
"size": 31181
}
|
[
"java.util.Iterator",
"javax.xml.namespace.QName",
"org.apache.axiom.om.OMElement",
"org.wso2.carbon.application.deployer.config.Artifact",
"org.wso2.carbon.application.deployer.config.CappFile"
] |
import java.util.Iterator; import javax.xml.namespace.QName; import org.apache.axiom.om.OMElement; import org.wso2.carbon.application.deployer.config.Artifact; import org.wso2.carbon.application.deployer.config.CappFile;
|
import java.util.*; import javax.xml.namespace.*; import org.apache.axiom.om.*; import org.wso2.carbon.application.deployer.config.*;
|
[
"java.util",
"javax.xml",
"org.apache.axiom",
"org.wso2.carbon"
] |
java.util; javax.xml; org.apache.axiom; org.wso2.carbon;
| 1,641,002
|
public void addGridPosition(Vector3i gridPosition, byte blockType) {
if (gridPosition.x < _minBounds.x) {
_minBounds.x = gridPosition.x;
}
if (gridPosition.y < _minBounds.y) {
_minBounds.y = gridPosition.y;
}
if (gridPosition.z < _minBounds.z) {
_minBounds.z = gridPosition.z;
}
if (gridPosition.x > _maxBounds.x) {
_maxBounds.x = gridPosition.x;
}
if (gridPosition.y > _maxBounds.y) {
_maxBounds.y = gridPosition.y;
}
if (gridPosition.z > _maxBounds.z) {
_maxBounds.z = gridPosition.z;
}
_gridPositions.add(new GridPosition(gridPosition, blockType));
}
|
void function(Vector3i gridPosition, byte blockType) { if (gridPosition.x < _minBounds.x) { _minBounds.x = gridPosition.x; } if (gridPosition.y < _minBounds.y) { _minBounds.y = gridPosition.y; } if (gridPosition.z < _minBounds.z) { _minBounds.z = gridPosition.z; } if (gridPosition.x > _maxBounds.x) { _maxBounds.x = gridPosition.x; } if (gridPosition.y > _maxBounds.y) { _maxBounds.y = gridPosition.y; } if (gridPosition.z > _maxBounds.z) { _maxBounds.z = gridPosition.z; } _gridPositions.add(new GridPosition(gridPosition, blockType)); }
|
/**
* Adds a block position to the grid.
*
* @param gridPosition The block position to add
*/
|
Adds a block position to the grid
|
addGridPosition
|
{
"repo_name": "zoneXcoding/Mineworld",
"path": "src/main/java/org/terasology/rendering/world/BlockGrid.java",
"license": "apache-2.0",
"size": 4625
}
|
[
"org.terasology.math.Vector3i"
] |
import org.terasology.math.Vector3i;
|
import org.terasology.math.*;
|
[
"org.terasology.math"
] |
org.terasology.math;
| 478,520
|
private List<String> getRawColumn(int column)
{
List<String> results = new LinkedList<String>();
for(int i=dataOffsetFromTop; i< data.size(); i++)
{
results.add(data.get(i)[column]);
}
return results;
}
|
List<String> function(int column) { List<String> results = new LinkedList<String>(); for(int i=dataOffsetFromTop; i< data.size(); i++) { results.add(data.get(i)[column]); } return results; }
|
/**
* Returns a column consisting of all the data values in a given column
* @param column Column index to return
* @return list of values in string format
*/
|
Returns a column consisting of all the data values in a given column
|
getRawColumn
|
{
"repo_name": "fredizzimo/keyboardlayout",
"path": "smac/src/aeatk/ca/ubc/cs/beta/aeatk/misc/csvhelpers/ConfigCSVFileHelper.java",
"license": "gpl-2.0",
"size": 11574
}
|
[
"java.util.LinkedList",
"java.util.List"
] |
import java.util.LinkedList; import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,367,614
|
public static SortedBag decorate(SortedBag bag, Predicate predicate) {
return new PredicatedSortedBag(bag, predicate);
}
//-----------------------------------------------------------------------
protected PredicatedSortedBag(SortedBag bag, Predicate predicate) {
super(bag, predicate);
}
|
static SortedBag function(SortedBag bag, Predicate predicate) { return new PredicatedSortedBag(bag, predicate); } protected PredicatedSortedBag(SortedBag bag, Predicate predicate) { super(bag, predicate); }
|
/**
* Factory method to create a predicated (validating) bag.
* <p>
* If there are any elements already in the bag being decorated, they
* are validated.
*
* @param bag the bag to decorate, must not be null
* @param predicate the predicate to use for validation, must not be null
* @return a new predicated SortedBag
* @throws IllegalArgumentException if bag or predicate is null
* @throws IllegalArgumentException if the bag contains invalid elements
*/
|
Factory method to create a predicated (validating) bag. If there are any elements already in the bag being decorated, they are validated
|
decorate
|
{
"repo_name": "ProfilingLabs/Usemon2",
"path": "usemon-agent-commons-java/src/main/java/com/usemon/lib/org/apache/commons/collections/bag/PredicatedSortedBag.java",
"license": "mpl-2.0",
"size": 3692
}
|
[
"com.usemon.lib.org.apache.commons.collections.Predicate",
"com.usemon.lib.org.apache.commons.collections.SortedBag"
] |
import com.usemon.lib.org.apache.commons.collections.Predicate; import com.usemon.lib.org.apache.commons.collections.SortedBag;
|
import com.usemon.lib.org.apache.commons.collections.*;
|
[
"com.usemon.lib"
] |
com.usemon.lib;
| 245,709
|
boolean resolveConflicts(Project project, ChangeConflictSet changeConflict);
|
boolean resolveConflicts(Project project, ChangeConflictSet changeConflict);
|
/**
* Resolves all conflicts between the given change packages "theirs" and the
* given local operations.
*
* @param project
* the project at the time were all local changes are already
* applied and their operations are NOT, in other words the base
* version plus local operations
* @param changeConflict
* the {@link ChangeConflictSet} that needs to be resolved
* @return true if the merge can proceed, false if it has to be cancelled
*/
|
Resolves all conflicts between the given change packages "theirs" and the given local operations
|
resolveConflicts
|
{
"repo_name": "edgarmueller/emfstore-rest",
"path": "bundles/org.eclipse.emf.emfstore.client/src/org/eclipse/emf/emfstore/internal/client/model/changeTracking/merging/ConflictResolver.java",
"license": "epl-1.0",
"size": 1629
}
|
[
"org.eclipse.emf.emfstore.internal.common.model.Project",
"org.eclipse.emf.emfstore.internal.server.conflictDetection.ChangeConflictSet"
] |
import org.eclipse.emf.emfstore.internal.common.model.Project; import org.eclipse.emf.emfstore.internal.server.conflictDetection.ChangeConflictSet;
|
import org.eclipse.emf.emfstore.internal.common.model.*; import org.eclipse.emf.emfstore.internal.server.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 2,773,429
|
@ServiceMethod(returns = ReturnType.SINGLE)
public SyncPoller<PollResult<GalleryImageInner>, GalleryImageInner> beginCreateOrUpdate(
String resourceGroupName,
String galleryName,
String galleryImageName,
GalleryImageInner galleryImage,
Context context) {
return beginCreateOrUpdateAsync(resourceGroupName, galleryName, galleryImageName, galleryImage, context)
.getSyncPoller();
}
|
@ServiceMethod(returns = ReturnType.SINGLE) SyncPoller<PollResult<GalleryImageInner>, GalleryImageInner> function( String resourceGroupName, String galleryName, String galleryImageName, GalleryImageInner galleryImage, Context context) { return beginCreateOrUpdateAsync(resourceGroupName, galleryName, galleryImageName, galleryImage, context) .getSyncPoller(); }
|
/**
* Create or update a gallery Image Definition.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Image Gallery in which the Image Definition is to be created.
* @param galleryImageName The name of the gallery Image Definition to be created or updated. The allowed characters
* are alphabets and numbers with dots, dashes, and periods allowed in the middle. The maximum length is 80
* characters.
* @param galleryImage Specifies information about the gallery Image Definition that you want to create or update.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return specifies information about the gallery Image Definition that you want to create or update.
*/
|
Create or update a gallery Image Definition
|
beginCreateOrUpdate
|
{
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-compute/src/main/java/com/azure/resourcemanager/compute/implementation/GalleryImagesClientImpl.java",
"license": "mit",
"size": 76471
}
|
[
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.management.polling.PollResult",
"com.azure.core.util.Context",
"com.azure.core.util.polling.SyncPoller",
"com.azure.resourcemanager.compute.fluent.models.GalleryImageInner"
] |
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.management.polling.PollResult; import com.azure.core.util.Context; import com.azure.core.util.polling.SyncPoller; import com.azure.resourcemanager.compute.fluent.models.GalleryImageInner;
|
import com.azure.core.annotation.*; import com.azure.core.management.polling.*; import com.azure.core.util.*; import com.azure.core.util.polling.*; import com.azure.resourcemanager.compute.fluent.models.*;
|
[
"com.azure.core",
"com.azure.resourcemanager"
] |
com.azure.core; com.azure.resourcemanager;
| 2,179,799
|
@Test()
public void testExplodeRDNValidSingleValueNoExclude()
throws Exception
{
String[] s = LDAPDN.explodeRDN("dc=com", false);
assertNotNull(s);
assertEquals(s.length, 1);
assertEquals(s[0], "dc=com");
}
|
@Test() void function() throws Exception { String[] s = LDAPDN.explodeRDN(STR, false); assertNotNull(s); assertEquals(s.length, 1); assertEquals(s[0], STR); }
|
/**
* Tests the {@code explodeRDN} method for a valid RDN with a single value and
* not excluding types.
*
* @throws Exception If an unexpected problem occurs.
*/
|
Tests the explodeRDN method for a valid RDN with a single value and not excluding types
|
testExplodeRDNValidSingleValueNoExclude
|
{
"repo_name": "UnboundID/ldapsdk",
"path": "tests/unit/src/com/unboundid/ldap/sdk/migrate/ldapjdk/LDAPDNTestCase.java",
"license": "gpl-2.0",
"size": 9791
}
|
[
"org.testng.annotations.Test"
] |
import org.testng.annotations.Test;
|
import org.testng.annotations.*;
|
[
"org.testng.annotations"
] |
org.testng.annotations;
| 206,312
|
public void registerTaskComponent(String engineId, TaskComponent engine)
{
if (taskComponents.containsKey(engineId))
{
throw new WorkflowException("Task Component already registered for engine id '" + engineId + "'");
}
taskComponents.put(engineId, engine);
if (logger.isDebugEnabled())
logger.debug("Registered Task Component '" + engineId + "' (" + engine.getClass() + ")");
}
|
void function(String engineId, TaskComponent engine) { if (taskComponents.containsKey(engineId)) { throw new WorkflowException(STR + engineId + "'"); } taskComponents.put(engineId, engine); if (logger.isDebugEnabled()) logger.debug(STR + engineId + STR + engine.getClass() + ")"); }
|
/**
* Register a BPM Engine Task Component
*
* @param engineId engine id
* @param engine implementing engine
*/
|
Register a BPM Engine Task Component
|
registerTaskComponent
|
{
"repo_name": "loftuxab/community-edition-old",
"path": "projects/repository/source/java/org/alfresco/repo/workflow/BPMEngineRegistry.java",
"license": "lgpl-3.0",
"size": 7167
}
|
[
"org.alfresco.service.cmr.workflow.WorkflowException"
] |
import org.alfresco.service.cmr.workflow.WorkflowException;
|
import org.alfresco.service.cmr.workflow.*;
|
[
"org.alfresco.service"
] |
org.alfresco.service;
| 2,913,132
|
public void readEntities(Table table, Session.ResultSetReader reader, boolean orderByPK) throws SQLException {
String sqlQuery = "Select " + filteredSelectionClause(table) + " From " + SQLDialect.dmlTableReference(ENTITY, session) + " E join " + table.getName() + " T on " +
pkEqualsEntityID(table, "T", "E") +
" Where E.birthday>=0 and E.r_entitygraph=" + graphID + " and E.type='" + table.getName() + "'";
long rc;
if (orderByPK) {
String sqlQueryWithOrderBy = sqlQuery +
(orderByPK? " order by " + table.primaryKey.columnList("T.") : "");
rc = session.executeQuery(sqlQueryWithOrderBy, reader, sqlQuery, null, 0);
} else {
rc = session.executeQuery(sqlQuery, reader);
}
ProgressListenerRegistry.getProgressListener().exported(table, rc);
}
|
void function(Table table, Session.ResultSetReader reader, boolean orderByPK) throws SQLException { String sqlQuery = STR + filteredSelectionClause(table) + STR + SQLDialect.dmlTableReference(ENTITY, session) + STR + table.getName() + STR + pkEqualsEntityID(table, "T", "E") + STR + graphID + STR + table.getName() + "'"; long rc; if (orderByPK) { String sqlQueryWithOrderBy = sqlQuery + (orderByPK? STR + table.primaryKey.columnList("T.") : ""); rc = session.executeQuery(sqlQueryWithOrderBy, reader, sqlQuery, null, 0); } else { rc = session.executeQuery(sqlQuery, reader); } ProgressListenerRegistry.getProgressListener().exported(table, rc); }
|
/**
* Reads all entities of a given table.
*
* @param reader for reading the result-set
* @param table the table
* @param orderByPK if <code>true</code>, result will be ordered by primary keys
*/
|
Reads all entities of a given table
|
readEntities
|
{
"repo_name": "pellcorp/jailer",
"path": "src/main/net/sf/jailer/entitygraph/remote/RemoteEntityGraph.java",
"license": "apache-2.0",
"size": 48544
}
|
[
"java.sql.SQLException",
"net.sf.jailer.database.SQLDialect",
"net.sf.jailer.database.Session",
"net.sf.jailer.datamodel.Table",
"net.sf.jailer.progress.ProgressListenerRegistry"
] |
import java.sql.SQLException; import net.sf.jailer.database.SQLDialect; import net.sf.jailer.database.Session; import net.sf.jailer.datamodel.Table; import net.sf.jailer.progress.ProgressListenerRegistry;
|
import java.sql.*; import net.sf.jailer.database.*; import net.sf.jailer.datamodel.*; import net.sf.jailer.progress.*;
|
[
"java.sql",
"net.sf.jailer"
] |
java.sql; net.sf.jailer;
| 51,610
|
public java.util.List<fr.lip6.move.pnml.hlpn.finiteIntRanges.hlapi.LessThanOrEqualHLAPI> getSubterm_finiteIntRanges_LessThanOrEqualHLAPI(){
java.util.List<fr.lip6.move.pnml.hlpn.finiteIntRanges.hlapi.LessThanOrEqualHLAPI> retour = new ArrayList<fr.lip6.move.pnml.hlpn.finiteIntRanges.hlapi.LessThanOrEqualHLAPI>();
for (Term elemnt : getSubterm()) {
if(elemnt.getClass().equals(fr.lip6.move.pnml.hlpn.finiteIntRanges.impl.LessThanOrEqualImpl.class)){
retour.add(new fr.lip6.move.pnml.hlpn.finiteIntRanges.hlapi.LessThanOrEqualHLAPI(
(fr.lip6.move.pnml.hlpn.finiteIntRanges.LessThanOrEqual)elemnt
));
}
}
return retour;
}
|
java.util.List<fr.lip6.move.pnml.hlpn.finiteIntRanges.hlapi.LessThanOrEqualHLAPI> function(){ java.util.List<fr.lip6.move.pnml.hlpn.finiteIntRanges.hlapi.LessThanOrEqualHLAPI> retour = new ArrayList<fr.lip6.move.pnml.hlpn.finiteIntRanges.hlapi.LessThanOrEqualHLAPI>(); for (Term elemnt : getSubterm()) { if(elemnt.getClass().equals(fr.lip6.move.pnml.hlpn.finiteIntRanges.impl.LessThanOrEqualImpl.class)){ retour.add(new fr.lip6.move.pnml.hlpn.finiteIntRanges.hlapi.LessThanOrEqualHLAPI( (fr.lip6.move.pnml.hlpn.finiteIntRanges.LessThanOrEqual)elemnt )); } } return retour; }
|
/**
* This accessor return a list of encapsulated subelement, only of LessThanOrEqualHLAPI kind.
* WARNING : this method can creates a lot of new object in memory.
*/
|
This accessor return a list of encapsulated subelement, only of LessThanOrEqualHLAPI kind. WARNING : this method can creates a lot of new object in memory
|
getSubterm_finiteIntRanges_LessThanOrEqualHLAPI
|
{
"repo_name": "lhillah/pnmlframework",
"path": "pnmlFw-HLPN/src/fr/lip6/move/pnml/hlpn/strings/hlapi/LessThanOrEqualHLAPI.java",
"license": "epl-1.0",
"size": 108661
}
|
[
"fr.lip6.move.pnml.hlpn.terms.Term",
"java.util.ArrayList",
"java.util.List"
] |
import fr.lip6.move.pnml.hlpn.terms.Term; import java.util.ArrayList; import java.util.List;
|
import fr.lip6.move.pnml.hlpn.terms.*; import java.util.*;
|
[
"fr.lip6.move",
"java.util"
] |
fr.lip6.move; java.util;
| 2,815,398
|
Properties getSystemProperties();
|
Properties getSystemProperties();
|
/**
* Gets the system properties to use for interpolation and profile activation. The system properties are collected
* from the runtime environment like {@link System#getProperties()} and environment variables.
*
* @return The system properties, never {@code null}.
*/
|
Gets the system properties to use for interpolation and profile activation. The system properties are collected from the runtime environment like <code>System#getProperties()</code> and environment variables
|
getSystemProperties
|
{
"repo_name": "rogerchina/maven",
"path": "maven-model-builder/src/main/java/org/apache/maven/model/building/ModelBuildingRequest.java",
"license": "apache-2.0",
"size": 13022
}
|
[
"java.util.Properties"
] |
import java.util.Properties;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 432,198
|
ServiceResponse<String> getWhitespace() throws ErrorException, IOException;
|
ServiceResponse<String> getWhitespace() throws ErrorException, IOException;
|
/**
* Get string value with leading and trailing whitespace '<tab><space><space>Now is the time for all good men to come to the aid of their country<tab><space><space>'.
*
* @throws ErrorException exception thrown from REST call
* @throws IOException exception thrown from serialization/deserialization
* @return the String object wrapped in {@link ServiceResponse} if successful.
*/
|
Get string value with leading and trailing whitespace '<tab><space><space>Now is the time for all good men to come to the aid of their country<tab><space><space>'
|
getWhitespace
|
{
"repo_name": "vulcansteel/autorest",
"path": "AutoRest/Generators/Java/Java.Tests/src/main/java/fixtures/bodystring/StringOperations.java",
"license": "mit",
"size": 11198
}
|
[
"com.microsoft.rest.ServiceResponse",
"java.io.IOException"
] |
import com.microsoft.rest.ServiceResponse; import java.io.IOException;
|
import com.microsoft.rest.*; import java.io.*;
|
[
"com.microsoft.rest",
"java.io"
] |
com.microsoft.rest; java.io;
| 1,389,728
|
private static boolean isTranslation(AffineTransform at) {
return ( Math.abs(at.getScaleX()-1.0) < THRESH &&
Math.abs(at.getScaleY()-1.0) < THRESH &&
Math.abs(at.getShearX()) < THRESH &&
Math.abs(at.getShearY()) < THRESH );
}
|
static boolean function(AffineTransform at) { return ( Math.abs(at.getScaleX()-1.0) < THRESH && Math.abs(at.getScaleY()-1.0) < THRESH && Math.abs(at.getShearX()) < THRESH && Math.abs(at.getShearY()) < THRESH ); }
|
/**
* Check if the given AffineTransform is a translation
* (within thresholds -- see {@link #THRESH}.
*/
|
Check if the given AffineTransform is a translation (within thresholds -- see <code>#THRESH</code>
|
isTranslation
|
{
"repo_name": "jchildress/Prefux",
"path": "src/main/java/prefux/util/display/BackgroundPainter.java",
"license": "bsd-3-clause",
"size": 8718
}
|
[
"java.awt.geom.AffineTransform"
] |
import java.awt.geom.AffineTransform;
|
import java.awt.geom.*;
|
[
"java.awt"
] |
java.awt;
| 335,373
|
public static Map<String,String> getTermsByName(String name,
String ontologyID)
{
boolean removeObsoleteTerms = false;
Map<String, String> map = null;
try {
QueryService locator = new QueryServiceLocator();
// this requires the axis.jar and jaxrpc.jar
// jaxrpc project is now jax-ws (web services) but the
// JAXWS2.1.5 jar can't replace the jaxrpc.jar.
Query qs = locator.getOntologyQuery();
map = qs.getTermsByName(name, ontologyID, false);
// This would be nice but is FAR TOO SLOW (have to make a SOAP call for every term!).
if (removeObsoleteTerms) {
List<String> obsoleteTermIds = new ArrayList<String>();
for (Iterator i = map.keySet().iterator(); i.hasNext();){
String key = (String) i.next();
if(qs.isObsolete(key, ontologyID)) {
obsoleteTermIds.add(key);
}
}
for(String termId: obsoleteTermIds) {
map.remove(termId);
}
}
} catch (Exception e) {
handleException(e, "getTermsByName");
}
return map;
}
|
static Map<String,String> function(String name, String ontologyID) { boolean removeObsoleteTerms = false; Map<String, String> map = null; try { QueryService locator = new QueryServiceLocator(); Query qs = locator.getOntologyQuery(); map = qs.getTermsByName(name, ontologyID, false); if (removeObsoleteTerms) { List<String> obsoleteTermIds = new ArrayList<String>(); for (Iterator i = map.keySet().iterator(); i.hasNext();){ String key = (String) i.next(); if(qs.isObsolete(key, ontologyID)) { obsoleteTermIds.add(key); } } for(String termId: obsoleteTermIds) { map.remove(termId); } } } catch (Exception e) { handleException(e, STR); } return map; }
|
/**
* Searches a specified ontology for terms that match the partial name
* given. Doesn't remove obsolete terms!
* Returns the results as a Map, with terms identified by their
* Ontology:TermID string. E.g:
* Map key "GO:0000236"
* Map object "mitotic prometaphase"
*
* @param name The partial name of the term. E.g. "mito"
* @param ontologyID The ontology identifier. E.g. "GO"
*
* @return Map of terms
*/
|
Searches a specified ontology for terms that match the partial name given. Doesn't remove obsolete terms! Returns the results as a Map, with terms identified by their Ontology:TermID string. E.g: Map object "mitotic prometaphase"
|
getTermsByName
|
{
"repo_name": "bramalingam/openmicroscopy",
"path": "components/insight/SRC/org/openmicroscopy/shoola/agents/editor/util/OntologyLookUp.java",
"license": "gpl-2.0",
"size": 5692
}
|
[
"java.util.ArrayList",
"java.util.Iterator",
"java.util.List",
"java.util.Map",
"uk.ac.ebi.ook.web.services.Query",
"uk.ac.ebi.ook.web.services.QueryService",
"uk.ac.ebi.ook.web.services.QueryServiceLocator"
] |
import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import uk.ac.ebi.ook.web.services.Query; import uk.ac.ebi.ook.web.services.QueryService; import uk.ac.ebi.ook.web.services.QueryServiceLocator;
|
import java.util.*; import uk.ac.ebi.ook.web.services.*;
|
[
"java.util",
"uk.ac.ebi"
] |
java.util; uk.ac.ebi;
| 1,814,397
|
public void onClickOpenAddressButton(View v) {
String addressString = "1600 Amphitheatre Parkway, CA";
Uri.Builder builder = new Uri.Builder();
builder.scheme("geo")
.path("0,0")
.query(addressString);
Uri addressUri = builder.build();
showMap(addressUri);
}
|
void function(View v) { String addressString = STR; Uri.Builder builder = new Uri.Builder(); builder.scheme("geo") .path("0,0") .query(addressString); Uri addressUri = builder.build(); showMap(addressUri); }
|
/**
* This method is called when the Open Location in Map button is clicked. It will open the
* a map to the location represented by the variable addressString using implicit Intents.
*
* @param v Button that was clicked.
*/
|
This method is called when the Open Location in Map button is clicked. It will open the a map to the location represented by the variable addressString using implicit Intents
|
onClickOpenAddressButton
|
{
"repo_name": "pavanpucha/Developing-android-APPS",
"path": "Lesson04b-Webpages-Maps-and-Sharing/T04b.03-Exercise-ShareText/app/src/main/java/com/example/android/implicitintents/MainActivity.java",
"license": "apache-2.0",
"size": 6489
}
|
[
"android.net.Uri",
"android.view.View"
] |
import android.net.Uri; import android.view.View;
|
import android.net.*; import android.view.*;
|
[
"android.net",
"android.view"
] |
android.net; android.view;
| 694,435
|
public int loadTable(final HTable t, final byte[] f) throws IOException {
t.setAutoFlush(false);
byte[] k = new byte[3];
int rowCount = 0;
for (byte b1 = 'a'; b1 <= 'z'; b1++) {
for (byte b2 = 'a'; b2 <= 'z'; b2++) {
for (byte b3 = 'a'; b3 <= 'z'; b3++) {
k[0] = b1;
k[1] = b2;
k[2] = b3;
Put put = new Put(k);
put.add(f, null, k);
t.put(put);
rowCount++;
}
}
}
t.flushCommits();
return rowCount;
}
|
int function(final HTable t, final byte[] f) throws IOException { t.setAutoFlush(false); byte[] k = new byte[3]; int rowCount = 0; for (byte b1 = 'a'; b1 <= 'z'; b1++) { for (byte b2 = 'a'; b2 <= 'z'; b2++) { for (byte b3 = 'a'; b3 <= 'z'; b3++) { k[0] = b1; k[1] = b2; k[2] = b3; Put put = new Put(k); put.add(f, null, k); t.put(put); rowCount++; } } } t.flushCommits(); return rowCount; }
|
/**
* Load table with rows from 'aaa' to 'zzz'.
* @param t Table
* @param f Family
* @return Count of rows loaded.
* @throws IOException
*/
|
Load table with rows from 'aaa' to 'zzz'
|
loadTable
|
{
"repo_name": "ay65535/hbase-0.94.0",
"path": "src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java",
"license": "apache-2.0",
"size": 65900
}
|
[
"java.io.IOException",
"org.apache.hadoop.hbase.client.HTable",
"org.apache.hadoop.hbase.client.Put"
] |
import java.io.IOException; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put;
|
import java.io.*; import org.apache.hadoop.hbase.client.*;
|
[
"java.io",
"org.apache.hadoop"
] |
java.io; org.apache.hadoop;
| 1,359,092
|
public static String toString(JSONArray ja) throws JSONException {
int i;
JSONObject jo;
String key;
Iterator<String> keys;
int length;
Object object;
StringBuilder sb = new StringBuilder();
String tagName;
String value;
// Emit <tagName
tagName = ja.getString(0);
XML.noSpace(tagName);
tagName = XML.escape(tagName);
sb.append('<');
sb.append(tagName);
object = ja.opt(1);
if (object instanceof JSONObject) {
i = 2;
jo = (JSONObject)object;
// Emit the attributes
keys = jo.keys();
while (keys.hasNext()) {
key = keys.next();
XML.noSpace(key);
value = jo.optString(key);
if (value != null) {
sb.append(' ');
sb.append(XML.escape(key));
sb.append('=');
sb.append('"');
sb.append(XML.escape(value));
sb.append('"');
}
}
} else {
i = 1;
}
// Emit content in body
length = ja.length();
if (i >= length) {
sb.append('/');
sb.append('>');
} else {
sb.append('>');
do {
object = ja.get(i);
i += 1;
if (object != null) {
if (object instanceof String) {
sb.append(XML.escape(object.toString()));
} else if (object instanceof JSONObject) {
sb.append(toString((JSONObject)object));
} else if (object instanceof JSONArray) {
sb.append(toString((JSONArray)object));
} else {
sb.append(object.toString());
}
}
} while (i < length);
sb.append('<');
sb.append('/');
sb.append(tagName);
sb.append('>');
}
return sb.toString();
}
|
static String function(JSONArray ja) throws JSONException { int i; JSONObject jo; String key; Iterator<String> keys; int length; Object object; StringBuilder sb = new StringBuilder(); String tagName; String value; tagName = ja.getString(0); XML.noSpace(tagName); tagName = XML.escape(tagName); sb.append('<'); sb.append(tagName); object = ja.opt(1); if (object instanceof JSONObject) { i = 2; jo = (JSONObject)object; keys = jo.keys(); while (keys.hasNext()) { key = keys.next(); XML.noSpace(key); value = jo.optString(key); if (value != null) { sb.append(' '); sb.append(XML.escape(key)); sb.append('='); sb.append('STR'); } } } else { i = 1; } length = ja.length(); if (i >= length) { sb.append('/'); sb.append('>'); } else { sb.append('>'); do { object = ja.get(i); i += 1; if (object != null) { if (object instanceof String) { sb.append(XML.escape(object.toString())); } else if (object instanceof JSONObject) { sb.append(toString((JSONObject)object)); } else if (object instanceof JSONArray) { sb.append(toString((JSONArray)object)); } else { sb.append(object.toString()); } } } while (i < length); sb.append('<'); sb.append('/'); sb.append(tagName); sb.append('>'); } return sb.toString(); }
|
/**
* Reverse the JSONML transformation, making an XML text from a JSONArray.
* @param ja A JSONArray.
* @return An XML string.
* @throws JSONException
*/
|
Reverse the JSONML transformation, making an XML text from a JSONArray
|
toString
|
{
"repo_name": "alexeq/datacrown",
"path": "datacrow-core/_source/net/datacrow/core/utilities/json/JSONML.java",
"license": "gpl-3.0",
"size": 17261
}
|
[
"java.util.Iterator"
] |
import java.util.Iterator;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,126,150
|
public static void getHtmlContent() throws IOException, SAXException {
HttpUnitOptions.setScriptingEnabled(false);
WebConversation wc = new WebConversation();
WebResponse wr = wc.getResponse("http://www.zhihu.com/people/xie-jun-jie-80-94/following");
System.out.println(wr.getText());
}
|
static void function() throws IOException, SAXException { HttpUnitOptions.setScriptingEnabled(false); WebConversation wc = new WebConversation(); WebResponse wr = wc.getResponse("http: System.out.println(wr.getText()); }
|
/**
* simple to request, just get the static html
*
* @throws IOException
* @throws SAXException
*/
|
simple to request, just get the static html
|
getHtmlContent
|
{
"repo_name": "izhaomeng/spring-poc",
"path": "src/main/java/com/zhaomeng/study/basic/HttpUnitHelper.java",
"license": "apache-2.0",
"size": 3639
}
|
[
"com.meterware.httpunit.HttpUnitOptions",
"com.meterware.httpunit.WebConversation",
"com.meterware.httpunit.WebResponse",
"java.io.IOException",
"org.xml.sax.SAXException"
] |
import com.meterware.httpunit.HttpUnitOptions; import com.meterware.httpunit.WebConversation; import com.meterware.httpunit.WebResponse; import java.io.IOException; import org.xml.sax.SAXException;
|
import com.meterware.httpunit.*; import java.io.*; import org.xml.sax.*;
|
[
"com.meterware.httpunit",
"java.io",
"org.xml.sax"
] |
com.meterware.httpunit; java.io; org.xml.sax;
| 2,623,072
|
public PrintWriter getProtocolFileWriter() {
return protocolFileWriter;
}
|
PrintWriter function() { return protocolFileWriter; }
|
/**
* Zugriff auf den <code>protocolFileWriter</code>.
*
* @return {@link PrintWriter} mit Namen der Protokolldatei
*/
|
Zugriff auf den <code>protocolFileWriter</code>
|
getProtocolFileWriter
|
{
"repo_name": "datenverteiler/de.bsvrz.pat.onlprot",
"path": "src/main/java/de/bsvrz/pat/onlprot/standardProtocolModule/ProtocolModule.java",
"license": "gpl-2.0",
"size": 4153
}
|
[
"java.io.PrintWriter"
] |
import java.io.PrintWriter;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,110,547
|
public Map<Class<?>, Integer> registerFunction(final Object function) {
final Map<Class<?>, Integer> ids = new HashMap<Class<?>, Integer>();
if(function!=null) {
if(function instanceof EventNotificationEnricher) {
final int id = functionIdSerial.incrementAndGet();
enrichers.put(id, (EventNotificationEnricher)function);
ids.put(EventNotificationEnricher.class, id);
}
}
return ids;
}
|
Map<Class<?>, Integer> function(final Object function) { final Map<Class<?>, Integer> ids = new HashMap<Class<?>, Integer>(); if(function!=null) { if(function instanceof EventNotificationEnricher) { final int id = functionIdSerial.incrementAndGet(); enrichers.put(id, (EventNotificationEnricher)function); ids.put(EventNotificationEnricher.class, id); } } return ids; }
|
/**
* Registers a function object
* @param function The function object
* @return a map of the assigned ids for the function keyed by the
* interface classes the function implemented
*/
|
Registers a function object
|
registerFunction
|
{
"repo_name": "nickman/heliosutils",
"path": "src/main/java/com/heliosapm/utils/events/TriggerPipeline.java",
"license": "apache-2.0",
"size": 14794
}
|
[
"java.util.HashMap",
"java.util.Map"
] |
import java.util.HashMap; import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 264,374
|
public static Date[] timePeriodSettings(String providerName) throws ParseException {
providerName = providerName.toUpperCase();
if (providerName == "THEIA") {
final Date startDate = DateUtilities.parseISO("2018-01-01T00:00:00Z");
final Date endDate = DateUtilities.parseISO("2018-01-03T00:00:00Z");
return new Date[] {startDate, endDate};
}
if (providerName == "AWS") {
final Date startDate = DateUtilities.parseISO("2018-01-04T00:00:00Z");
final Date endDate = DateUtilities.parseISO("2018-01-05T00:00:00Z");
return new Date[] {startDate, endDate};
}
throw new RuntimeException(
"No valid time-period defined for '" + providerName + "' Sentinel2 provider");
}
|
static Date[] function(String providerName) throws ParseException { providerName = providerName.toUpperCase(); if (providerName == "THEIA") { final Date startDate = DateUtilities.parseISO(STR); final Date endDate = DateUtilities.parseISO(STR); return new Date[] {startDate, endDate}; } if (providerName == "AWS") { final Date startDate = DateUtilities.parseISO(STR); final Date endDate = DateUtilities.parseISO(STR); return new Date[] {startDate, endDate}; } throw new RuntimeException( STR + providerName + STR); }
|
/**
* Returns a valid time-period for testing a Sentinel2 provider
*
* @return
* @throws ParseException
*/
|
Returns a valid time-period for testing a Sentinel2 provider
|
timePeriodSettings
|
{
"repo_name": "locationtech/geowave",
"path": "extensions/cli/sentinel2/src/test/java/org/locationtech/geowave/format/sentinel2/Tests.java",
"license": "apache-2.0",
"size": 5338
}
|
[
"java.text.ParseException",
"java.util.Date",
"org.locationtech.geowave.adapter.vector.util.DateUtilities"
] |
import java.text.ParseException; import java.util.Date; import org.locationtech.geowave.adapter.vector.util.DateUtilities;
|
import java.text.*; import java.util.*; import org.locationtech.geowave.adapter.vector.util.*;
|
[
"java.text",
"java.util",
"org.locationtech.geowave"
] |
java.text; java.util; org.locationtech.geowave;
| 1,396,212
|
public DocumentFragment createDocumentFragment() {
return new DocumentFragmentImpl(this);
}
|
DocumentFragment function() { return new DocumentFragmentImpl(this); }
|
/**
* Factory method; creates a DocumentFragment having this Document
* as its OwnerDoc.
*/
|
Factory method; creates a DocumentFragment having this Document as its OwnerDoc
|
createDocumentFragment
|
{
"repo_name": "TheTypoMaster/Scaper",
"path": "openjdk/jaxp/drop_included/jaxp_src/src/com/sun/org/apache/xerces/internal/dom/CoreDocumentImpl.java",
"license": "gpl-2.0",
"size": 98511
}
|
[
"org.w3c.dom.DocumentFragment"
] |
import org.w3c.dom.DocumentFragment;
|
import org.w3c.dom.*;
|
[
"org.w3c.dom"
] |
org.w3c.dom;
| 251,549
|
@AfterMethod
public void baseAfterMethod() {
Mockito.validateMockitoUsage();
for (String field : this.fieldsToReset) {
setInstanceValue(this, field, null);
}
}
|
void function() { Mockito.validateMockitoUsage(); for (String field : this.fieldsToReset) { setInstanceValue(this, field, null); } }
|
/**
* The method will be run after each test method.
* <ul>
* <li>It checks for correct mockito usage in each test run.</li>
* <li>It resets the field annotated with @InjectMocks.</li>
* </ul>
*/
|
The method will be run after each test method. It checks for correct mockito usage in each test run. It resets the field annotated with @InjectMocks.
|
baseAfterMethod
|
{
"repo_name": "andy32323/inspectIT",
"path": "CommonsCS/test/info/novatec/inspectit/testbase/TestBase.java",
"license": "agpl-3.0",
"size": 4467
}
|
[
"org.mockito.Mockito"
] |
import org.mockito.Mockito;
|
import org.mockito.*;
|
[
"org.mockito"
] |
org.mockito;
| 1,805,189
|
public Inventory getCompleteInventory() {
return completeInventory;
}
|
Inventory function() { return completeInventory; }
|
/**
* Returns the inventory containing all trinkets of all Friends.
* @return Inventory - contains all Trinkets of all Friends.
*/
|
Returns the inventory containing all trinkets of all Friends
|
getCompleteInventory
|
{
"repo_name": "CMPUT301F15T01/YesWeCandroid",
"path": "app/src/main/java/ca/ualberta/trinkettrader/Friends/AllFriendsInventoriesActivity.java",
"license": "apache-2.0",
"size": 5810
}
|
[
"ca.ualberta.trinkettrader.Inventory"
] |
import ca.ualberta.trinkettrader.Inventory;
|
import ca.ualberta.trinkettrader.*;
|
[
"ca.ualberta.trinkettrader"
] |
ca.ualberta.trinkettrader;
| 1,629,416
|
public int computeIndent(String line)
{
IPreferenceStore preferences = CCPlugin.getDefault().getPreferenceStore();
int tabWidth = preferences.getInt(DefaultCodeFormatterConstants.FORMATTER_TAB_SIZE);
int result = 0;
int blanks = 0;
int size = line.length();
for (int i = 0; i < size; i++)
{
char c = line.charAt(i);
if (c == '\t')
{
result++;
blanks = 0;
}
else if (Character.isWhitespace(c) && !(c == '\n' || c == '\r'))
{
blanks++;
if (blanks == tabWidth)
{
result++;
blanks = 0;
}
}
else
{
return result;
}
}
return result;
}
|
int function(String line) { IPreferenceStore preferences = CCPlugin.getDefault().getPreferenceStore(); int tabWidth = preferences.getInt(DefaultCodeFormatterConstants.FORMATTER_TAB_SIZE); int result = 0; int blanks = 0; int size = line.length(); for (int i = 0; i < size; i++) { char c = line.charAt(i); if (c == '\t') { result++; blanks = 0; } else if (Character.isWhitespace(c) && !(c == '\n' c == '\r')) { blanks++; if (blanks == tabWidth) { result++; blanks = 0; } } else { return result; } } return result; }
|
/**
* Returns the indent of the given string.
* @param line the text line
* @return indent level
*/
|
Returns the indent of the given string
|
computeIndent
|
{
"repo_name": "mnuessler/commonclipse",
"path": "src/main/java/net/sf/commonclipse/Generator.java",
"license": "apache-2.0",
"size": 13823
}
|
[
"org.eclipse.jdt.core.formatter.DefaultCodeFormatterConstants",
"org.eclipse.jface.preference.IPreferenceStore"
] |
import org.eclipse.jdt.core.formatter.DefaultCodeFormatterConstants; import org.eclipse.jface.preference.IPreferenceStore;
|
import org.eclipse.jdt.core.formatter.*; import org.eclipse.jface.preference.*;
|
[
"org.eclipse.jdt",
"org.eclipse.jface"
] |
org.eclipse.jdt; org.eclipse.jface;
| 906,532
|
protected RenderingDef selectRenderingDef(
List<RenderingDef> renderingDefs, final long userId,
final long pixelsId)
throws ServerError {
RenderingDef userRenderingDef = renderingDefs
.stream()
.filter(v -> v.getPixels().getId() == pixelsId)
.filter(v -> v.getDetails().getOwner().getId() == userId)
.findFirst()
.orElse(null);
if (userRenderingDef != null) {
return userRenderingDef;
}
// Otherwise pick the first (from the owner) if available
return renderingDefs
.stream()
.filter(v -> v.getPixels().getId() == pixelsId)
.findFirst()
.orElse(null);
}
|
RenderingDef function( List<RenderingDef> renderingDefs, final long userId, final long pixelsId) throws ServerError { RenderingDef userRenderingDef = renderingDefs .stream() .filter(v -> v.getPixels().getId() == pixelsId) .filter(v -> v.getDetails().getOwner().getId() == userId) .findFirst() .orElse(null); if (userRenderingDef != null) { return userRenderingDef; } return renderingDefs .stream() .filter(v -> v.getPixels().getId() == pixelsId) .findFirst() .orElse(null); }
|
/**
* Selects the correct rendering settings either from the user
* (preferred) or image owner corresponding to the specified
* pixels set.
* @param renderingDefs A list of rendering settings to select from.
* @param pixelsId The identifier of the pixels.
* @return See above.
*/
|
Selects the correct rendering settings either from the user (preferred) or image owner corresponding to the specified pixels set
|
selectRenderingDef
|
{
"repo_name": "glencoesoftware/omero-ms-image-region",
"path": "src/main/java/com/glencoesoftware/omero/ms/image/region/ImageRegionRequestHandler.java",
"license": "gpl-2.0",
"size": 28474
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 43,946
|
EClass getAttributeMapping();
|
EClass getAttributeMapping();
|
/**
* Returns the meta object for class '{@link org.eclectic.frontend.mappings.AttributeMapping <em>Attribute Mapping</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Attribute Mapping</em>'.
* @see org.eclectic.frontend.mappings.AttributeMapping
* @generated
*/
|
Returns the meta object for class '<code>org.eclectic.frontend.mappings.AttributeMapping Attribute Mapping</code>'.
|
getAttributeMapping
|
{
"repo_name": "jesusc/eclectic",
"path": "plugins/org.eclectic.frontend.asm/src-gen/org/eclectic/frontend/mappings/MappingsPackage.java",
"license": "gpl-3.0",
"size": 129327
}
|
[
"org.eclipse.emf.ecore.EClass"
] |
import org.eclipse.emf.ecore.EClass;
|
import org.eclipse.emf.ecore.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 228,011
|
public final void setTextKeepState(CharSequence text, BufferType type)
{
int start = getSelectionStart();
int end = getSelectionEnd();
int len = text.length();
setText(text, type);
if (start >= 0 || end >= 0)
{
if (mText instanceof Spannable)
{
Selection.setSelection((Spannable) mText, Math.max(0, Math.min(start, len)), Math.max(0, Math.min(end, len)));
}
}
}
|
final void function(CharSequence text, BufferType type) { int start = getSelectionStart(); int end = getSelectionEnd(); int len = text.length(); setText(text, type); if (start >= 0 end >= 0) { if (mText instanceof Spannable) { Selection.setSelection((Spannable) mText, Math.max(0, Math.min(start, len)), Math.max(0, Math.min(end, len))); } } }
|
/**
* Like {@link #setText(CharSequence, android.widget.TextView.BufferType)},
* except that the cursor position (if any) is retained in the new text.
*
* @see #setText(CharSequence, android.widget.TextView.BufferType)
*/
|
Like <code>#setText(CharSequence, android.widget.TextView.BufferType)</code>, except that the cursor position (if any) is retained in the new text
|
setTextKeepState
|
{
"repo_name": "haikuowuya/android_system_code",
"path": "src/android/widget/TextView.java",
"license": "apache-2.0",
"size": 271830
}
|
[
"android.text.Selection",
"android.text.Spannable"
] |
import android.text.Selection; import android.text.Spannable;
|
import android.text.*;
|
[
"android.text"
] |
android.text;
| 31,428
|
ResultMap scoreRelatedItemsWithDetails(@Nonnull Collection<Long> basket, Collection<Long> items);
|
ResultMap scoreRelatedItemsWithDetails(@Nonnull Collection<Long> basket, Collection<Long> items);
|
/**
* Score a collection of items based on a collection of items (e.g. a shopping basket), with details.
*
* @param basket The items to use as the query.
* @param items The items to score.
* @return The scores for the items, possibly with additional details (will be represented as a
* subclass of {@link Result}.
*/
|
Score a collection of items based on a collection of items (e.g. a shopping basket), with details
|
scoreRelatedItemsWithDetails
|
{
"repo_name": "kluver/lenskit",
"path": "lenskit-api/src/main/java/org/lenskit/api/ItemBasedItemScorer.java",
"license": "lgpl-2.1",
"size": 2892
}
|
[
"java.util.Collection",
"javax.annotation.Nonnull"
] |
import java.util.Collection; import javax.annotation.Nonnull;
|
import java.util.*; import javax.annotation.*;
|
[
"java.util",
"javax.annotation"
] |
java.util; javax.annotation;
| 2,618,394
|
public static void setLogLevel(String value) {
sLogLevel = LogLevel.getByString(value);
Log.setLevel(sLogLevel);
}
|
static void function(String value) { sLogLevel = LogLevel.getByString(value); Log.setLevel(sLogLevel); }
|
/**
* Sets the minimum {@link LogLevel} to display.
* <p/>This change takes effect right away.
*/
|
Sets the minimum <code>LogLevel</code> to display. This change takes effect right away
|
setLogLevel
|
{
"repo_name": "ironmanMA/continuum",
"path": "backened/app/src/main/java/com/hackathon/continuum/ddmlib/DdmPreferences.java",
"license": "gpl-3.0",
"size": 5828
}
|
[
"com.hackathon.continuum.ddmlib.Log"
] |
import com.hackathon.continuum.ddmlib.Log;
|
import com.hackathon.continuum.ddmlib.*;
|
[
"com.hackathon.continuum"
] |
com.hackathon.continuum;
| 223,079
|
public static synchronized int insertProviderAt(Provider provider, int position) {
int size = providers.size();
if ((position < 1) || (position > size)) {
position = size + 1;
}
providers.add(position - 1, provider);
providersNames.put(provider.getName(), provider);
setNeedRefresh();
return position;
}
|
static synchronized int function(Provider provider, int position) { int size = providers.size(); if ((position < 1) (position > size)) { position = size + 1; } providers.add(position - 1, provider); providersNames.put(provider.getName(), provider); setNeedRefresh(); return position; }
|
/**
* Inserts a provider at a specified 1-based position.
*/
|
Inserts a provider at a specified 1-based position
|
insertProviderAt
|
{
"repo_name": "groschovskiy/j2objc",
"path": "jre_emul/android/libcore/luni/src/main/java/org/apache/harmony/security/fortress/Services.java",
"license": "apache-2.0",
"size": 7545
}
|
[
"java.security.Provider"
] |
import java.security.Provider;
|
import java.security.*;
|
[
"java.security"
] |
java.security;
| 2,427,283
|
private ObjectName createXsltProcessor() throws javax.management.JMException {
ObjectName objName = getXsltProcessorName();
// make sure this mbean is not already registered...
if (getMBeanServer().isRegistered(objName)) {
// dunno how we got here...
logger.info("XsltProcessor already registered as {}", objName);
return objName;
}
getMBeanServer().registerMBean(new mx4j.tools.adaptor.http.XSLTProcessor(), objName);
return objName;
}
|
ObjectName function() throws javax.management.JMException { ObjectName objName = getXsltProcessorName(); if (getMBeanServer().isRegistered(objName)) { logger.info(STR, objName); return objName; } getMBeanServer().registerMBean(new mx4j.tools.adaptor.http.XSLTProcessor(), objName); return objName; }
|
/**
* Defines and starts the Xslt Processor helper service for the Http Adaptor.
*/
|
Defines and starts the Xslt Processor helper service for the Http Adaptor
|
createXsltProcessor
|
{
"repo_name": "davinash/geode",
"path": "geode-core/src/main/java/org/apache/geode/admin/jmx/internal/AgentImpl.java",
"license": "apache-2.0",
"size": 54557
}
|
[
"javax.management.ObjectName"
] |
import javax.management.ObjectName;
|
import javax.management.*;
|
[
"javax.management"
] |
javax.management;
| 282,489
|
public void save(File f) {
try {
props.store(new OutputStreamWriter(new FileOutputStream(f), "UTF-8"), null);
} catch (IOException ex) {
// ignore
}
}
|
void function(File f) { try { props.store(new OutputStreamWriter(new FileOutputStream(f), "UTF-8"), null); } catch (IOException ex) { } }
|
/**
* Writes out the <code>key=value</code> properties that were changed into
* a seperate .[properties] file in UTF8.
*/
|
Writes out the <code>key=value</code> properties that were changed into a seperate .[properties] file in UTF8
|
save
|
{
"repo_name": "Minequest/MineQuest-API",
"path": "src/main/java/com/theminequest/api/util/PropertiesFile.java",
"license": "lgpl-3.0",
"size": 17496
}
|
[
"java.io.File",
"java.io.FileOutputStream",
"java.io.IOException",
"java.io.OutputStreamWriter"
] |
import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,545,059
|
private static boolean listContains(List<String> contains, String string) {
for(String entry : contains) {
if (string.contains(entry)) {
return true;
}
}
return false;
}
|
static boolean function(List<String> contains, String string) { for(String entry : contains) { if (string.contains(entry)) { return true; } } return false; }
|
/**
* Is at least one entry in the list contained in the string?
* @param contains the list of strings to look for
* @param string the String to check against
* @return true if at least one entry in the contains list is contained in the string
*/
|
Is at least one entry in the list contained in the string
|
listContains
|
{
"repo_name": "mohanaraosv/commons-net",
"path": "src/main/java/examples/mail/IMAPImportMbox.java",
"license": "apache-2.0",
"size": 7681
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,350,010
|
@PayloadRoots({
@PayloadRoot(localPart = "receiveDocuments", namespace = NAMESPACE_URI_DHX),
@PayloadRoot(localPart = "receiveDocuments", namespace = NAMESPACE_URI_DVK)
})
@ResponsePayload
@Loggable
public ReceiveDocumentsResponse receiveDocuments(
@RequestPayload ReceiveDocuments request,
MessageContext messageContext) throws DhxException {
InternalXroadMember client = dhxGateway.getXroadClientAndSetRersponseHeader(messageContext);
InternalXroadMember service = dhxGateway.getXroadService(messageContext);
if (!service.getServiceVersion().equals("v4")
&& !service.getServiceVersion().equals("v3")
&& !service.getServiceVersion().equals("v2")
&& !service.getServiceVersion().equals("v1")) {
throw new DhxException(DhxExceptionEnum.TECHNICAL_ERROR,
"Only v1,v2,v3,v4 versions of receiveDocuments are supported");
}
ReceiveDocumentsResponse response =
soapService.receiveDocuments(request, client, service);
String contentId = WsUtil.addAttachment(messageContext, response.getKeha().getHref());
response.getKeha().setHrefString(contentId);
return response;
}
|
@PayloadRoots({ @PayloadRoot(localPart = STR, namespace = NAMESPACE_URI_DHX), @PayloadRoot(localPart = STR, namespace = NAMESPACE_URI_DVK) }) ReceiveDocumentsResponse function( @RequestPayload ReceiveDocuments request, MessageContext messageContext) throws DhxException { InternalXroadMember client = dhxGateway.getXroadClientAndSetRersponseHeader(messageContext); InternalXroadMember service = dhxGateway.getXroadService(messageContext); if (!service.getServiceVersion().equals("v4") && !service.getServiceVersion().equals("v3") && !service.getServiceVersion().equals("v2") && !service.getServiceVersion().equals("v1")) { throw new DhxException(DhxExceptionEnum.TECHNICAL_ERROR, STR); } ReceiveDocumentsResponse response = soapService.receiveDocuments(request, client, service); String contentId = WsUtil.addAttachment(messageContext, response.getKeha().getHref()); response.getKeha().setHrefString(contentId); return response; }
|
/**
* X-road SOAP service receiveDocuments.
*
* @param request - service request
* @param messageContext - SOAP message context
* @return - service response. contains documents to receive
* @throws DhxException - thrown if error occurred while receiving the documents
*/
|
X-road SOAP service receiveDocuments
|
receiveDocuments
|
{
"repo_name": "e-gov/DHX-adapter",
"path": "dhx-adapter-server/src/main/java/ee/ria/dhx/server/endpoint/ServerEndpoint.java",
"license": "mit",
"size": 9521
}
|
[
"ee.ria.dhx.exception.DhxException",
"ee.ria.dhx.exception.DhxExceptionEnum",
"ee.ria.dhx.server.service.util.WsUtil",
"ee.ria.dhx.server.types.ee.riik.xrd.dhl.producers.producer.dhl.ReceiveDocuments",
"ee.ria.dhx.server.types.ee.riik.xrd.dhl.producers.producer.dhl.ReceiveDocumentsResponse",
"ee.ria.dhx.types.InternalXroadMember",
"org.springframework.ws.context.MessageContext",
"org.springframework.ws.server.endpoint.annotation.PayloadRoot",
"org.springframework.ws.server.endpoint.annotation.PayloadRoots",
"org.springframework.ws.server.endpoint.annotation.RequestPayload"
] |
import ee.ria.dhx.exception.DhxException; import ee.ria.dhx.exception.DhxExceptionEnum; import ee.ria.dhx.server.service.util.WsUtil; import ee.ria.dhx.server.types.ee.riik.xrd.dhl.producers.producer.dhl.ReceiveDocuments; import ee.ria.dhx.server.types.ee.riik.xrd.dhl.producers.producer.dhl.ReceiveDocumentsResponse; import ee.ria.dhx.types.InternalXroadMember; import org.springframework.ws.context.MessageContext; import org.springframework.ws.server.endpoint.annotation.PayloadRoot; import org.springframework.ws.server.endpoint.annotation.PayloadRoots; import org.springframework.ws.server.endpoint.annotation.RequestPayload;
|
import ee.ria.dhx.exception.*; import ee.ria.dhx.server.service.util.*; import ee.ria.dhx.server.types.ee.riik.xrd.dhl.producers.producer.dhl.*; import ee.ria.dhx.types.*; import org.springframework.ws.context.*; import org.springframework.ws.server.endpoint.annotation.*;
|
[
"ee.ria.dhx",
"org.springframework.ws"
] |
ee.ria.dhx; org.springframework.ws;
| 422,928
|
@Override
public ITabDescriptor[] getTabDescriptors(IWorkbenchPart part,
ISelection selection) {
// Reset the tab descriptors.
tabDescriptors.clear();
// Get the IMeshPart from the selection and visit it.
Assert.isTrue(selection instanceof IStructuredSelection);
IStructuredSelection structuredSelection = (IStructuredSelection) selection;
// TODO Incorporate multiple elements from the selection.
// Visit the selected IMeshPart if possible.
if (!structuredSelection.isEmpty()) {
Object element = structuredSelection.getFirstElement();
Assert.isTrue(element instanceof MeshSelection);
MeshSelection meshSelection = (MeshSelection) element;
// Get the mesh from the selection.
mesh = meshSelection.mesh;
// Visit the selected IMeshPart to get the available tabs.
meshSelection.selectedMeshPart.acceptMeshVisitor(this);
}
// Convert the List of ITabDescriptors into an array..
return tabDescriptors
.toArray(new ITabDescriptor[tabDescriptors.size()]);
}
|
ITabDescriptor[] function(IWorkbenchPart part, ISelection selection) { tabDescriptors.clear(); Assert.isTrue(selection instanceof IStructuredSelection); IStructuredSelection structuredSelection = (IStructuredSelection) selection; if (!structuredSelection.isEmpty()) { Object element = structuredSelection.getFirstElement(); Assert.isTrue(element instanceof MeshSelection); MeshSelection meshSelection = (MeshSelection) element; mesh = meshSelection.mesh; meshSelection.selectedMeshPart.acceptMeshVisitor(this); } return tabDescriptors .toArray(new ITabDescriptor[tabDescriptors.size()]); }
|
/**
* Sets and returns {@link #tabDescriptors} based on the current selection.
*/
|
Sets and returns <code>#tabDescriptors</code> based on the current selection
|
getTabDescriptors
|
{
"repo_name": "gorindn/ice",
"path": "src/org.eclipse.ice.viz.service.mesh/src/org/eclipse/ice/viz/service/mesh/properties/TabDescriptorProvider.java",
"license": "epl-1.0",
"size": 20711
}
|
[
"org.eclipse.core.runtime.Assert",
"org.eclipse.jface.viewers.ISelection",
"org.eclipse.jface.viewers.IStructuredSelection",
"org.eclipse.ui.IWorkbenchPart",
"org.eclipse.ui.views.properties.tabbed.ITabDescriptor"
] |
import org.eclipse.core.runtime.Assert; import org.eclipse.jface.viewers.ISelection; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.ui.IWorkbenchPart; import org.eclipse.ui.views.properties.tabbed.ITabDescriptor;
|
import org.eclipse.core.runtime.*; import org.eclipse.jface.viewers.*; import org.eclipse.ui.*; import org.eclipse.ui.views.properties.tabbed.*;
|
[
"org.eclipse.core",
"org.eclipse.jface",
"org.eclipse.ui"
] |
org.eclipse.core; org.eclipse.jface; org.eclipse.ui;
| 963,924
|
@Override public void exitIdlist(@NotNull PoCoParser.IdlistContext ctx) { }
|
@Override public void exitIdlist(@NotNull PoCoParser.IdlistContext ctx) { }
|
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
|
The default implementation does nothing
|
enterIdlist
|
{
"repo_name": "Corjuh/PoCo-Compiler",
"path": "Parser/gen/PoCoParserBaseListener.java",
"license": "lgpl-2.1",
"size": 18482
}
|
[
"org.antlr.v4.runtime.misc.NotNull"
] |
import org.antlr.v4.runtime.misc.NotNull;
|
import org.antlr.v4.runtime.misc.*;
|
[
"org.antlr.v4"
] |
org.antlr.v4;
| 706,434
|
public static AbstractConstraint maxCardinality(String scope,
String tagNamespace, int maxCardinality, String... allocationTags) {
return cardinality(scope, tagNamespace, 0, maxCardinality, allocationTags);
}
|
static AbstractConstraint function(String scope, String tagNamespace, int maxCardinality, String... allocationTags) { return cardinality(scope, tagNamespace, 0, maxCardinality, allocationTags); }
|
/**
* Similar to {@link #maxCardinality(String, int, String...)}, but let you
* specify a namespace for the tags, see supported namespaces in
* {@link AllocationTagNamespaceType}.
*
* @param scope the scope of the constraint
* @param tagNamespace the namespace of these tags
* @param maxCardinality determines the maximum number of allocations within
* the scope
* @param allocationTags allocation tags
* @return the resulting placement constraint
*/
|
Similar to <code>#maxCardinality(String, int, String...)</code>, but let you specify a namespace for the tags, see supported namespaces in <code>AllocationTagNamespaceType</code>
|
maxCardinality
|
{
"repo_name": "GeLiXin/hadoop",
"path": "hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/resource/PlacementConstraints.java",
"license": "apache-2.0",
"size": 14824
}
|
[
"org.apache.hadoop.yarn.api.resource.PlacementConstraint"
] |
import org.apache.hadoop.yarn.api.resource.PlacementConstraint;
|
import org.apache.hadoop.yarn.api.resource.*;
|
[
"org.apache.hadoop"
] |
org.apache.hadoop;
| 2,481,988
|
@Test
public void testExec_3Z_Dimension() {
NodeValue geometryLiteral = NodeValue.makeNode("<http://www.opengis.net/def/crs/EPSG/0/27700> POINT Z (90 60 30)", WKTDatatype.INSTANCE);
CoordinateDimensionFF instance = new CoordinateDimensionFF();
NodeValue expResult = NodeValue.makeNodeInteger(3);
NodeValue result = instance.exec(geometryLiteral);
assertEquals(expResult, result);
}
|
void function() { NodeValue geometryLiteral = NodeValue.makeNode("<http: CoordinateDimensionFF instance = new CoordinateDimensionFF(); NodeValue expResult = NodeValue.makeNodeInteger(3); NodeValue result = instance.exec(geometryLiteral); assertEquals(expResult, result); }
|
/**
* Test of exec method, of class CoordinateDimensionFF.
*/
|
Test of exec method, of class CoordinateDimensionFF
|
testExec_3Z_Dimension
|
{
"repo_name": "apache/jena",
"path": "jena-geosparql/src/test/java/org/apache/jena/geosparql/geof/topological/filter_functions/geometry_property/CoordinateDimensionFFTest.java",
"license": "apache-2.0",
"size": 3555
}
|
[
"org.apache.jena.sparql.expr.NodeValue",
"org.junit.Assert"
] |
import org.apache.jena.sparql.expr.NodeValue; import org.junit.Assert;
|
import org.apache.jena.sparql.expr.*; import org.junit.*;
|
[
"org.apache.jena",
"org.junit"
] |
org.apache.jena; org.junit;
| 1,651,087
|
public static boolean isReservedLabel(Label label) {
return DEFAULT_CONDITION_LABEL.equals(label);
}
}
|
static boolean function(Label label) { return DEFAULT_CONDITION_LABEL.equals(label); } }
|
/**
* Returns true for labels that are "reserved selector key words" and not intended to
* map to actual targets.
*/
|
Returns true for labels that are "reserved selector key words" and not intended to map to actual targets
|
isReservedLabel
|
{
"repo_name": "damienmg/bazel",
"path": "src/main/java/com/google/devtools/build/lib/packages/BuildType.java",
"license": "apache-2.0",
"size": 25026
}
|
[
"com.google.devtools.build.lib.cmdline.Label"
] |
import com.google.devtools.build.lib.cmdline.Label;
|
import com.google.devtools.build.lib.cmdline.*;
|
[
"com.google.devtools"
] |
com.google.devtools;
| 1,352,463
|
public KualiDecimal getDisbVchrPersonalCarAmount() {
return dvPersonalCarMileageAmount == null ? null : disbVchrPersonalCarAmount;
}
|
KualiDecimal function() { return dvPersonalCarMileageAmount == null ? null : disbVchrPersonalCarAmount; }
|
/**
* Gets the disbVchrPersonalCarAmount attribute.
*
* @return Returns the disbVchrPersonalCarAmount
*/
|
Gets the disbVchrPersonalCarAmount attribute
|
getDisbVchrPersonalCarAmount
|
{
"repo_name": "Ariah-Group/Finance",
"path": "af_webapp/src/main/java/org/kuali/kfs/fp/businessobject/DisbursementVoucherNonEmployeeTravel.java",
"license": "apache-2.0",
"size": 24746
}
|
[
"org.kuali.rice.core.api.util.type.KualiDecimal"
] |
import org.kuali.rice.core.api.util.type.KualiDecimal;
|
import org.kuali.rice.core.api.util.type.*;
|
[
"org.kuali.rice"
] |
org.kuali.rice;
| 464,199
|
public static Integer getDimension(int index, Object[] row) {
Integer[] dimensions = (Integer[]) row[WriteStepRowUtil.DICTIONARY_DIMENSION];
return dimensions[index];
}
|
static Integer function(int index, Object[] row) { Integer[] dimensions = (Integer[]) row[WriteStepRowUtil.DICTIONARY_DIMENSION]; return dimensions[index]; }
|
/**
* Method to get the required Dimension from obj []
*
* @param index
* @param row
* @return
*/
|
Method to get the required Dimension from obj []
|
getDimension
|
{
"repo_name": "ksimar/incubator-carbondata",
"path": "processing/src/main/java/org/apache/carbondata/processing/util/NonDictionaryUtil.java",
"license": "apache-2.0",
"size": 5339
}
|
[
"org.apache.carbondata.processing.newflow.row.WriteStepRowUtil"
] |
import org.apache.carbondata.processing.newflow.row.WriteStepRowUtil;
|
import org.apache.carbondata.processing.newflow.row.*;
|
[
"org.apache.carbondata"
] |
org.apache.carbondata;
| 949,156
|
@Schema(required = true, description = "Amount of users")
public Integer getCntUsers() {
return cntUsers;
}
|
@Schema(required = true, description = STR) Integer function() { return cntUsers; }
|
/**
* Amount of users
* @return cntUsers
**/
|
Amount of users
|
getCntUsers
|
{
"repo_name": "iterate-ch/cyberduck",
"path": "dracoon/src/main/java/ch/cyberduck/core/sds/io/swagger/client/model/Group.java",
"license": "gpl-3.0",
"size": 7176
}
|
[
"io.swagger.v3.oas.annotations.media.Schema"
] |
import io.swagger.v3.oas.annotations.media.Schema;
|
import io.swagger.v3.oas.annotations.media.*;
|
[
"io.swagger.v3"
] |
io.swagger.v3;
| 935,589
|
@Test
public void testPropertyNameAndOrders(){
Comparator<Object> chainedComparator = BeanComparatorUtil.chainedComparator("name");
assertTrue(ClassUtil.isInstance(chainedComparator, PropertyComparator.class));
}
|
void function(){ Comparator<Object> chainedComparator = BeanComparatorUtil.chainedComparator("name"); assertTrue(ClassUtil.isInstance(chainedComparator, PropertyComparator.class)); }
|
/**
* Test property name and orders.
*/
|
Test property name and orders
|
testPropertyNameAndOrders
|
{
"repo_name": "venusdrogon/feilong-core",
"path": "src/test/java/com/feilong/core/util/comparator/beancomparatorutiltest/ChainedComparatorTest.java",
"license": "apache-2.0",
"size": 2941
}
|
[
"com.feilong.core.lang.ClassUtil",
"com.feilong.core.util.comparator.BeanComparatorUtil",
"com.feilong.core.util.comparator.PropertyComparator",
"java.util.Comparator",
"org.junit.Assert"
] |
import com.feilong.core.lang.ClassUtil; import com.feilong.core.util.comparator.BeanComparatorUtil; import com.feilong.core.util.comparator.PropertyComparator; import java.util.Comparator; import org.junit.Assert;
|
import com.feilong.core.lang.*; import com.feilong.core.util.comparator.*; import java.util.*; import org.junit.*;
|
[
"com.feilong.core",
"java.util",
"org.junit"
] |
com.feilong.core; java.util; org.junit;
| 2,478,331
|
@Override
public void paint(Graphics g, JComponent c) {
Rectangle clip = g.getClipBounds();
Rectangle bounds = table.getBounds();
// account for the fact that the graphics has already been translated
// into the table's bounds
bounds.x = bounds.y = 0;
GridSheetPane gridSheetPane = getGridSheetPane();
if (gridSheetPane.getRowCount() <= 0 || gridSheetPane.getColumnCount() <= 0
|| !bounds.intersects(clip)) {
// paintDropLines(g);
return;
}
boolean isFrozen = false;
GridSheetScrollPane scrollPane = null;
Container parent = table.getParent(); // should be viewport
if (parent != null) {
parent = parent.getParent(); // should be the scrollpane
if (parent != null && parent instanceof GridSheetScrollPane) {
scrollPane = (GridSheetScrollPane) parent;
isFrozen = scrollPane.isFrozen();
}
}
boolean ltr = table.getComponentOrientation().isLeftToRight();
if (!isFrozen) {
paintRuleAndCells(g, clip, ltr, 0, 0);
} else {
// frozen
Rectangle viewportRect = scrollPane.getViewport().getBounds();
Point frozenPoint = scrollPane.getFrozenPoint();
Point dPoint = scrollPane.getDivisionPoint();
Point scrolledDistance = scrollPane.getViewport().getViewPosition();
int frozenAreaWidth = dPoint.x - frozenPoint.x;
int frozenAreaHeight = dPoint.y - frozenPoint.y;
Rectangle upperLeftRect =
new Rectangle(frozenPoint.x, frozenPoint.y, frozenAreaWidth, frozenAreaHeight);
Rectangle lowerRightRect = new Rectangle(dPoint.x + scrolledDistance.x + frozenPoint.x,
dPoint.y + scrolledDistance.y + frozenPoint.y, viewportRect.width - frozenAreaWidth,
viewportRect.height - frozenAreaHeight);
Rectangle upperRightRect =
new Rectangle(lowerRightRect.x, upperLeftRect.y, lowerRightRect.width, frozenAreaHeight);
Rectangle lowerLeftRect =
new Rectangle(upperLeftRect.x, lowerRightRect.y, frozenAreaWidth, lowerRightRect.height);
// scrolledDistance.x -= frozenPoint.x;
// scrolledDistance.y -= frozenPoint.y;
// Paint cells.
paintFrozenGridAndCells(g, lowerRightRect, clip, ltr, scrolledDistance, frozenPoint, false,
false);
paintFrozenGridAndCells(g, upperRightRect, clip, ltr, scrolledDistance, frozenPoint, false,
true);
paintFrozenGridAndCells(g, lowerLeftRect, clip, ltr, scrolledDistance, frozenPoint, true,
false);
paintFrozenGridAndCells(g, upperLeftRect, clip, ltr, scrolledDistance, frozenPoint, true,
true);
// clip.x -= frozenPoint.x;
// clip.y -= frozenPoint.y;
// paint line.
Rectangle horizontalLineRect = new Rectangle(scrolledDistance.x,
dPoint.y + scrolledDistance.y - 1 - frozenPoint.y, viewportRect.width, 1);
Rectangle verticalLineRect = new Rectangle(dPoint.x + scrolledDistance.x - 1 - frozenPoint.x,
scrolledDistance.y, 1, viewportRect.height);
paintFrozenLine(g, clip, horizontalLineRect);
paintFrozenLine(g, clip, verticalLineRect);
// g.drawLine(clip.x, clip.y, clip.x + clip.width, clip.y +
// clip.height);
}
}
// private void paintDropLines(Graphics g) {
// GridSheetTable.DropLocation loc = table.getDropLocation();
// if (loc == null) {
// return;
// }
//
// Color color = UIManager.getColor("Table.dropLineColor");
// Color shortColor = UIManager.getColor("Table.dropLineShortColor");
// if (color == null && shortColor == null) {
// return;
// }
//
// Rectangle rect;
//
// rect = getHDropLineRect(loc);
// if (rect != null) {
// int x = rect.x;
// int w = rect.width;
// if (color != null) {
// extendRect(rect, true);
// g.setColor(color);
// g.fillRect(rect.x, rect.y, rect.width, rect.height);
// }
// if (!loc.isInsertColumn() && shortColor != null) {
// g.setColor(shortColor);
// g.fillRect(x, rect.y, w, rect.height);
// }
// }
//
// rect = getVDropLineRect(loc);
// if (rect != null) {
// int y = rect.y;
// int h = rect.height;
// if (color != null) {
// extendRect(rect, false);
// g.setColor(color);
// g.fillRect(rect.x, rect.y, rect.width, rect.height);
// }
// if (!loc.isInsertRow() && shortColor != null) {
// g.setColor(shortColor);
// g.fillRect(rect.x, y, rect.width, h);
// }
// }
// }
|
void function(Graphics g, JComponent c) { Rectangle clip = g.getClipBounds(); Rectangle bounds = table.getBounds(); bounds.x = bounds.y = 0; GridSheetPane gridSheetPane = getGridSheetPane(); if (gridSheetPane.getRowCount() <= 0 gridSheetPane.getColumnCount() <= 0 !bounds.intersects(clip)) { return; } boolean isFrozen = false; GridSheetScrollPane scrollPane = null; Container parent = table.getParent(); if (parent != null) { parent = parent.getParent(); if (parent != null && parent instanceof GridSheetScrollPane) { scrollPane = (GridSheetScrollPane) parent; isFrozen = scrollPane.isFrozen(); } } boolean ltr = table.getComponentOrientation().isLeftToRight(); if (!isFrozen) { paintRuleAndCells(g, clip, ltr, 0, 0); } else { Rectangle viewportRect = scrollPane.getViewport().getBounds(); Point frozenPoint = scrollPane.getFrozenPoint(); Point dPoint = scrollPane.getDivisionPoint(); Point scrolledDistance = scrollPane.getViewport().getViewPosition(); int frozenAreaWidth = dPoint.x - frozenPoint.x; int frozenAreaHeight = dPoint.y - frozenPoint.y; Rectangle upperLeftRect = new Rectangle(frozenPoint.x, frozenPoint.y, frozenAreaWidth, frozenAreaHeight); Rectangle lowerRightRect = new Rectangle(dPoint.x + scrolledDistance.x + frozenPoint.x, dPoint.y + scrolledDistance.y + frozenPoint.y, viewportRect.width - frozenAreaWidth, viewportRect.height - frozenAreaHeight); Rectangle upperRightRect = new Rectangle(lowerRightRect.x, upperLeftRect.y, lowerRightRect.width, frozenAreaHeight); Rectangle lowerLeftRect = new Rectangle(upperLeftRect.x, lowerRightRect.y, frozenAreaWidth, lowerRightRect.height); paintFrozenGridAndCells(g, lowerRightRect, clip, ltr, scrolledDistance, frozenPoint, false, false); paintFrozenGridAndCells(g, upperRightRect, clip, ltr, scrolledDistance, frozenPoint, false, true); paintFrozenGridAndCells(g, lowerLeftRect, clip, ltr, scrolledDistance, frozenPoint, true, false); paintFrozenGridAndCells(g, upperLeftRect, clip, ltr, scrolledDistance, frozenPoint, true, true); Rectangle horizontalLineRect = new Rectangle(scrolledDistance.x, dPoint.y + scrolledDistance.y - 1 - frozenPoint.y, viewportRect.width, 1); Rectangle verticalLineRect = new Rectangle(dPoint.x + scrolledDistance.x - 1 - frozenPoint.x, scrolledDistance.y, 1, viewportRect.height); paintFrozenLine(g, clip, horizontalLineRect); paintFrozenLine(g, clip, verticalLineRect); } }
|
/**
* Paint a representation of the <code>table</code> instance that was set in installUI().
*/
|
Paint a representation of the <code>table</code> instance that was set in installUI()
|
paint
|
{
"repo_name": "kohii/smoothcsv",
"path": "smoothcsv-swing/src/main/java/com/smoothcsv/swing/gridsheet/ui/GridSheetTableNoActionUI.java",
"license": "apache-2.0",
"size": 51213
}
|
[
"com.smoothcsv.swing.gridsheet.GridSheetPane",
"com.smoothcsv.swing.gridsheet.GridSheetScrollPane",
"java.awt.Container",
"java.awt.Graphics",
"java.awt.Point",
"java.awt.Rectangle",
"javax.swing.JComponent"
] |
import com.smoothcsv.swing.gridsheet.GridSheetPane; import com.smoothcsv.swing.gridsheet.GridSheetScrollPane; import java.awt.Container; import java.awt.Graphics; import java.awt.Point; import java.awt.Rectangle; import javax.swing.JComponent;
|
import com.smoothcsv.swing.gridsheet.*; import java.awt.*; import javax.swing.*;
|
[
"com.smoothcsv.swing",
"java.awt",
"javax.swing"
] |
com.smoothcsv.swing; java.awt; javax.swing;
| 246,126
|
private SyncResult syncInodeMetadata(RpcContext rpcContext, LockedInodePath inodePath,
DescendantType syncDescendantType, Map<AlluxioURI, UfsStatus> statusCache)
throws FileDoesNotExistException, InvalidPathException, IOException, AccessControlException {
Preconditions.checkState(inodePath.getLockPattern() == LockPattern.WRITE_EDGE);
// Set to true if the given inode was deleted.
boolean deletedInode = false;
// Set of paths to sync
Set<String> pathsToLoad = new HashSet<>();
LOG.debug("Syncing inode metadata {}", inodePath.getUri());
// The options for deleting.
DeleteOptions syncDeleteOptions =
DeleteOptions.defaults().setRecursive(true).setAlluxioOnly(true).setUnchecked(true);
// The requested path already exists in Alluxio.
InodeView inode = inodePath.getInode();
if (inode instanceof InodeFileView && !((InodeFileView) inode).isCompleted()) {
// Do not sync an incomplete file, since the UFS file is expected to not exist.
return SyncResult.defaults();
}
Optional<Scoped> persistingLock = mInodeLockManager.tryAcquirePersistingLock(inode.getId());
if (!persistingLock.isPresent()) {
// Do not sync a file in the process of being persisted, since the UFS file is being
// written.
return SyncResult.defaults();
}
persistingLock.get().close();
MountTable.Resolution resolution = mMountTable.resolve(inodePath.getUri());
AlluxioURI ufsUri = resolution.getUri();
try (CloseableResource<UnderFileSystem> ufsResource = resolution.acquireUfsResource()) {
UnderFileSystem ufs = ufsResource.get();
String ufsFingerprint;
Fingerprint ufsFpParsed;
UfsStatus cachedStatus = statusCache.get(inodePath.getUri());
if (cachedStatus == null) {
// TODO(david): change the interface so that getFingerprint returns a parsed fingerprint
ufsFingerprint = ufs.getFingerprint(ufsUri.toString());
ufsFpParsed = Fingerprint.parse(ufsFingerprint);
} else {
Pair<AccessControlList, DefaultAccessControlList> aclPair
= ufs.getAclPair(ufsUri.toString());
if (aclPair == null || aclPair.getFirst() == null || !aclPair.getFirst().hasExtended()) {
ufsFpParsed = Fingerprint.create(ufs.getUnderFSType(), cachedStatus);
ufsFingerprint = ufsFpParsed.serialize();
} else {
ufsFpParsed = Fingerprint.create(ufs.getUnderFSType(), cachedStatus,
aclPair.getFirst());
ufsFingerprint = ufsFpParsed.serialize();
}
}
boolean containsMountPoint = mMountTable.containsMountPoint(inodePath.getUri());
UfsSyncUtils.SyncPlan syncPlan =
UfsSyncUtils.computeSyncPlan(inode, ufsFpParsed, containsMountPoint);
if (syncPlan.toUpdateMetaData()) {
// UpdateMetadata is used when a file or a directory only had metadata change.
// It works by calling SetAttributeInternal on the inodePath.
if (ufsFpParsed.isValid()) {
short mode = Short.parseShort(ufsFpParsed.getTag(Tag.MODE));
SetAttributeOptions options =
SetAttributeOptions.defaults().setOwner(ufsFpParsed.getTag(Tag.OWNER))
.setGroup(ufsFpParsed.getTag(Tag.GROUP))
.setMode(mode)
.setUfsFingerprint(ufsFingerprint);
long opTimeMs = System.currentTimeMillis();
setAttributeSingleFile(rpcContext, inodePath, false, opTimeMs, options);
}
}
if (syncPlan.toDelete()) {
try {
deleteInternal(rpcContext, inodePath, syncDeleteOptions);
deletedInode = true;
} catch (DirectoryNotEmptyException | IOException e) {
// Should not happen, since it is an unchecked delete.
LOG.error("Unexpected error for unchecked delete.", e);
}
}
if (syncPlan.toLoadMetadata()) {
AlluxioURI mountUri = new AlluxioURI(mMountTable.getMountPoint(inodePath.getUri()));
pathsToLoad.add(mountUri.getPath());
}
if (syncPlan.toSyncChildren() && inode instanceof InodeDirectory
&& syncDescendantType != DescendantType.NONE) {
InodeDirectoryView inodeDir = (InodeDirectoryView) inode;
// maps children name to inode
Map<String, InodeView> inodeChildren = new HashMap<>();
for (InodeView child : inodeDir.getChildren()) {
inodeChildren.put(child.getName(), child);
}
UfsStatus[] listStatus = ufs.listStatus(ufsUri.toString());
// Iterate over UFS listings and process UFS children.
if (listStatus != null) {
for (UfsStatus ufsChildStatus : listStatus) {
if (!inodeChildren.containsKey(ufsChildStatus.getName()) && !PathUtils
.isTemporaryFileName(ufsChildStatus.getName())) {
// Ufs child exists, but Alluxio child does not. Must load metadata.
AlluxioURI mountUri = new AlluxioURI(mMountTable.getMountPoint(inodePath.getUri()));
pathsToLoad.add(mountUri.getPath());
break;
}
}
}
// Iterate over Alluxio children and process persisted children.
for (Map.Entry<String, InodeView> inodeEntry : inodeChildren.entrySet()) {
if (!inodeEntry.getValue().isPersisted()) {
// Ignore non-persisted inodes.
continue;
}
// Technically we don't need to lock here since inodePath is already write-locked. We can
// improve this by implementing a way to traverse an inode path without locking.
try (LockedInodePath descendant = inodePath.lockDescendant(
inodePath.getUri().joinUnsafe(inodeEntry.getKey()), LockPattern.WRITE_EDGE)) {
// Recursively sync children
if (syncDescendantType != DescendantType.ALL) {
syncDescendantType = DescendantType.NONE;
}
SyncResult syncResult =
syncInodeMetadata(rpcContext, descendant, syncDescendantType, statusCache);
pathsToLoad.addAll(syncResult.getPathsToLoad());
}
}
}
}
return new SyncResult(deletedInode, pathsToLoad);
}
|
SyncResult function(RpcContext rpcContext, LockedInodePath inodePath, DescendantType syncDescendantType, Map<AlluxioURI, UfsStatus> statusCache) throws FileDoesNotExistException, InvalidPathException, IOException, AccessControlException { Preconditions.checkState(inodePath.getLockPattern() == LockPattern.WRITE_EDGE); boolean deletedInode = false; Set<String> pathsToLoad = new HashSet<>(); LOG.debug(STR, inodePath.getUri()); DeleteOptions syncDeleteOptions = DeleteOptions.defaults().setRecursive(true).setAlluxioOnly(true).setUnchecked(true); InodeView inode = inodePath.getInode(); if (inode instanceof InodeFileView && !((InodeFileView) inode).isCompleted()) { return SyncResult.defaults(); } Optional<Scoped> persistingLock = mInodeLockManager.tryAcquirePersistingLock(inode.getId()); if (!persistingLock.isPresent()) { return SyncResult.defaults(); } persistingLock.get().close(); MountTable.Resolution resolution = mMountTable.resolve(inodePath.getUri()); AlluxioURI ufsUri = resolution.getUri(); try (CloseableResource<UnderFileSystem> ufsResource = resolution.acquireUfsResource()) { UnderFileSystem ufs = ufsResource.get(); String ufsFingerprint; Fingerprint ufsFpParsed; UfsStatus cachedStatus = statusCache.get(inodePath.getUri()); if (cachedStatus == null) { ufsFingerprint = ufs.getFingerprint(ufsUri.toString()); ufsFpParsed = Fingerprint.parse(ufsFingerprint); } else { Pair<AccessControlList, DefaultAccessControlList> aclPair = ufs.getAclPair(ufsUri.toString()); if (aclPair == null aclPair.getFirst() == null !aclPair.getFirst().hasExtended()) { ufsFpParsed = Fingerprint.create(ufs.getUnderFSType(), cachedStatus); ufsFingerprint = ufsFpParsed.serialize(); } else { ufsFpParsed = Fingerprint.create(ufs.getUnderFSType(), cachedStatus, aclPair.getFirst()); ufsFingerprint = ufsFpParsed.serialize(); } } boolean containsMountPoint = mMountTable.containsMountPoint(inodePath.getUri()); UfsSyncUtils.SyncPlan syncPlan = UfsSyncUtils.computeSyncPlan(inode, ufsFpParsed, containsMountPoint); if (syncPlan.toUpdateMetaData()) { if (ufsFpParsed.isValid()) { short mode = Short.parseShort(ufsFpParsed.getTag(Tag.MODE)); SetAttributeOptions options = SetAttributeOptions.defaults().setOwner(ufsFpParsed.getTag(Tag.OWNER)) .setGroup(ufsFpParsed.getTag(Tag.GROUP)) .setMode(mode) .setUfsFingerprint(ufsFingerprint); long opTimeMs = System.currentTimeMillis(); setAttributeSingleFile(rpcContext, inodePath, false, opTimeMs, options); } } if (syncPlan.toDelete()) { try { deleteInternal(rpcContext, inodePath, syncDeleteOptions); deletedInode = true; } catch (DirectoryNotEmptyException IOException e) { LOG.error(STR, e); } } if (syncPlan.toLoadMetadata()) { AlluxioURI mountUri = new AlluxioURI(mMountTable.getMountPoint(inodePath.getUri())); pathsToLoad.add(mountUri.getPath()); } if (syncPlan.toSyncChildren() && inode instanceof InodeDirectory && syncDescendantType != DescendantType.NONE) { InodeDirectoryView inodeDir = (InodeDirectoryView) inode; Map<String, InodeView> inodeChildren = new HashMap<>(); for (InodeView child : inodeDir.getChildren()) { inodeChildren.put(child.getName(), child); } UfsStatus[] listStatus = ufs.listStatus(ufsUri.toString()); if (listStatus != null) { for (UfsStatus ufsChildStatus : listStatus) { if (!inodeChildren.containsKey(ufsChildStatus.getName()) && !PathUtils .isTemporaryFileName(ufsChildStatus.getName())) { AlluxioURI mountUri = new AlluxioURI(mMountTable.getMountPoint(inodePath.getUri())); pathsToLoad.add(mountUri.getPath()); break; } } } for (Map.Entry<String, InodeView> inodeEntry : inodeChildren.entrySet()) { if (!inodeEntry.getValue().isPersisted()) { continue; } try (LockedInodePath descendant = inodePath.lockDescendant( inodePath.getUri().joinUnsafe(inodeEntry.getKey()), LockPattern.WRITE_EDGE)) { if (syncDescendantType != DescendantType.ALL) { syncDescendantType = DescendantType.NONE; } SyncResult syncResult = syncInodeMetadata(rpcContext, descendant, syncDescendantType, statusCache); pathsToLoad.addAll(syncResult.getPathsToLoad()); } } } } return new SyncResult(deletedInode, pathsToLoad); }
|
/**
* Syncs an inode with the UFS.
*
* @param rpcContext the rpc context
* @param inodePath the Alluxio inode path to sync with UFS
* @param syncDescendantType how to sync descendants
* @param statusCache a pre-populated cache of ufs statuses that can be used to construct
* fingerprint
* @return the result of the sync, including if the inode was deleted, and if further load
* metadata is required
*/
|
Syncs an inode with the UFS
|
syncInodeMetadata
|
{
"repo_name": "Reidddddd/alluxio",
"path": "core/server/master/src/main/java/alluxio/master/file/DefaultFileSystemMaster.java",
"license": "apache-2.0",
"size": 188561
}
|
[
"com.google.common.base.Preconditions",
"java.io.IOException",
"java.util.HashMap",
"java.util.HashSet",
"java.util.Map",
"java.util.Optional",
"java.util.Set"
] |
import com.google.common.base.Preconditions; import java.io.IOException; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Optional; import java.util.Set;
|
import com.google.common.base.*; import java.io.*; import java.util.*;
|
[
"com.google.common",
"java.io",
"java.util"
] |
com.google.common; java.io; java.util;
| 2,332,641
|
public Observable<ServiceResponse<Page<VirtualNetworkGatewayInner>>> listByResourceGroupNextSinglePageAsync(final String nextPageLink) {
if (nextPageLink == null) {
throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null.");
}
|
Observable<ServiceResponse<Page<VirtualNetworkGatewayInner>>> function(final String nextPageLink) { if (nextPageLink == null) { throw new IllegalArgumentException(STR); }
|
/**
* Gets all virtual network gateways by resource group.
*
ServiceResponse<PageImpl<VirtualNetworkGatewayInner>> * @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<VirtualNetworkGatewayInner> object wrapped in {@link ServiceResponse} if successful.
*/
|
Gets all virtual network gateways by resource group
|
listByResourceGroupNextSinglePageAsync
|
{
"repo_name": "navalev/azure-sdk-for-java",
"path": "sdk/network/mgmt-v2019_08_01/src/main/java/com/microsoft/azure/management/network/v2019_08_01/implementation/VirtualNetworkGatewaysInner.java",
"license": "mit",
"size": 283551
}
|
[
"com.microsoft.azure.Page",
"com.microsoft.rest.ServiceResponse"
] |
import com.microsoft.azure.Page; import com.microsoft.rest.ServiceResponse;
|
import com.microsoft.azure.*; import com.microsoft.rest.*;
|
[
"com.microsoft.azure",
"com.microsoft.rest"
] |
com.microsoft.azure; com.microsoft.rest;
| 1,140,041
|
public JsonObject getJudges() throws ClientException, ConnectionException
{
return getJudges(10, 0, "testcase");
}
|
JsonObject function() throws ClientException, ConnectionException { return getJudges(10, 0, STR); }
|
/**
* List of first 10 testcase judges starting from the first one
*
* @throws NotAuthorizedException for invalid access token
* @throws ClientException
* @throws ConnectionException
* @return API response
*/
|
List of first 10 testcase judges starting from the first one
|
getJudges
|
{
"repo_name": "sphere-engine/java-client",
"path": "src/com/SphereEngine/Api/ProblemsClientV3.java",
"license": "apache-2.0",
"size": 46337
}
|
[
"com.google.gson.JsonObject"
] |
import com.google.gson.JsonObject;
|
import com.google.gson.*;
|
[
"com.google.gson"
] |
com.google.gson;
| 2,243,608
|
BaseModel item = null;
if(itemType.equals(EVENT)) {
item = Dhis2.getInstance().getDataValueController().getEvent(itemId);
} else if (itemType.equals(ENROLLMENT)) {
item = Dhis2.getInstance().getDataValueController().getEnrollment(itemId);
} else if (itemType.equals(TRACKEDENTITYINSTANCE)) {
item = Dhis2.getInstance().getDataValueController().getTrackedEntityInstance(itemId);
}
return item;
}
|
BaseModel item = null; if(itemType.equals(EVENT)) { item = Dhis2.getInstance().getDataValueController().getEvent(itemId); } else if (itemType.equals(ENROLLMENT)) { item = Dhis2.getInstance().getDataValueController().getEnrollment(itemId); } else if (itemType.equals(TRACKEDENTITYINSTANCE)) { item = Dhis2.getInstance().getDataValueController().getTrackedEntityInstance(itemId); } return item; }
|
/**
* Returns the item for the given FailedItem. Can be cast to either of the model types
* @return
*/
|
Returns the item for the given FailedItem. Can be cast to either of the model types
|
getItem
|
{
"repo_name": "erlingfjelstad/SA-DHIS-SDK",
"path": "app/src/main/java/org/hisp/dhis/android/sdk/persistence/models/FailedItem.java",
"license": "bsd-3-clause",
"size": 4617
}
|
[
"com.raizlabs.android.dbflow.structure.BaseModel",
"org.hisp.dhis.android.sdk.controllers.Dhis2"
] |
import com.raizlabs.android.dbflow.structure.BaseModel; import org.hisp.dhis.android.sdk.controllers.Dhis2;
|
import com.raizlabs.android.dbflow.structure.*; import org.hisp.dhis.android.sdk.controllers.*;
|
[
"com.raizlabs.android",
"org.hisp.dhis"
] |
com.raizlabs.android; org.hisp.dhis;
| 677,254
|
private void checkIfPropertyExists(String key) {
if (!properties.containsKey(key)) {
throw new MetricsException("Metrics2 configuration is missing " + key
+ " property");
}
}
|
void function(String key) { if (!properties.containsKey(key)) { throw new MetricsException(STR + key + STR); } }
|
/**
* Throw a {@link MetricsException} if the given property is not set.
*
* @param key the key to validate
*/
|
Throw a <code>MetricsException</code> if the given property is not set
|
checkIfPropertyExists
|
{
"repo_name": "GeLiXin/hadoop",
"path": "hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/RollingFileSystemSink.java",
"license": "apache-2.0",
"size": 35623
}
|
[
"org.apache.hadoop.metrics2.MetricsException"
] |
import org.apache.hadoop.metrics2.MetricsException;
|
import org.apache.hadoop.metrics2.*;
|
[
"org.apache.hadoop"
] |
org.apache.hadoop;
| 1,389,281
|
// </editor-fold>
public static Investigation factoryNewInvestigation() {
Investigation result = new Investigation();
result.setUniqueIdentifier(UUID.randomUUID().toString());
result.setDataSets(new HashSet<DigitalObject>());
result.setParticipants(new HashSet<Participant>());
result.setMetaDataSchema(new HashSet<MetaDataSchema>());
return result;
}
|
static Investigation function() { Investigation result = new Investigation(); result.setUniqueIdentifier(UUID.randomUUID().toString()); result.setDataSets(new HashSet<DigitalObject>()); result.setParticipants(new HashSet<Participant>()); result.setMetaDataSchema(new HashSet<MetaDataSchema>()); return result; }
|
/**
* Factory a new investigation with an auto-generated unique identifier. The
* identifier is generated using UUID.randomUUID().toString().
*
* @return The new investigation.
*/
|
Factory a new investigation with an auto-generated unique identifier. The identifier is generated using UUID.randomUUID().toString()
|
factoryNewInvestigation
|
{
"repo_name": "kit-data-manager/base",
"path": "MetaDataManagement/MDM-BaseMetaData/src/main/java/edu/kit/dama/mdm/base/Investigation.java",
"license": "apache-2.0",
"size": 23621
}
|
[
"java.util.HashSet",
"java.util.UUID"
] |
import java.util.HashSet; import java.util.UUID;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,829,882
|
default T get(Environment environment, String name) throws IOException {
if (requiresAnalysisSettings()) {
throw new IllegalArgumentException("Analysis settings required - can't instantiate analysis factory");
}
return get(NA_INDEX_SETTINGS, environment, name, NA_INDEX_SETTINGS.getSettings());
}
|
default T get(Environment environment, String name) throws IOException { if (requiresAnalysisSettings()) { throw new IllegalArgumentException(STR); } return get(NA_INDEX_SETTINGS, environment, name, NA_INDEX_SETTINGS.getSettings()); }
|
/**
* Creates a new global scope analysis provider without index specific settings not settings for the provider itself.
* This can be used to get a default instance of an analysis factory without binding to an index.
*
* @param environment the nodes environment to load resources from persistent storage
* @param name the name of the analysis component
* @return a new provider instance
* @throws IOException if an {@link IOException} occurs
* @throws IllegalArgumentException if the provider requires analysis settings ie. if {@link #requiresAnalysisSettings()} returns
* <code>true</code>
*/
|
Creates a new global scope analysis provider without index specific settings not settings for the provider itself. This can be used to get a default instance of an analysis factory without binding to an index
|
get
|
{
"repo_name": "jimczi/elasticsearch",
"path": "core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java",
"license": "apache-2.0",
"size": 23082
}
|
[
"java.io.IOException",
"org.elasticsearch.env.Environment"
] |
import java.io.IOException; import org.elasticsearch.env.Environment;
|
import java.io.*; import org.elasticsearch.env.*;
|
[
"java.io",
"org.elasticsearch.env"
] |
java.io; org.elasticsearch.env;
| 3,710
|
logger.debug("TaskMapperContext {} in UserDefinedTaskMapper", taskMapperContext);
WorkflowTask taskToSchedule = taskMapperContext.getTaskToSchedule();
Workflow workflowInstance = taskMapperContext.getWorkflowInstance();
String taskId = taskMapperContext.getTaskId();
int retryCount = taskMapperContext.getRetryCount();
TaskDef taskDefinition = Optional.ofNullable(taskMapperContext.getTaskDefinition())
.orElseGet(() -> Optional.ofNullable(metadataDAO.getTaskDef(taskToSchedule.getName()))
.orElseThrow(() -> {
String reason = String.format("Invalid task specified. Cannot find task by name %s in the task definitions", taskToSchedule.getName());
return new TerminateWorkflowException(reason);
}));
Map<String, Object> input = parametersUtils.getTaskInputV2(taskToSchedule.getInputParameters(), workflowInstance, taskId, taskDefinition);
Task userDefinedTask = new Task();
userDefinedTask.setTaskType(taskToSchedule.getType());
userDefinedTask.setTaskDefName(taskToSchedule.getName());
userDefinedTask.setReferenceTaskName(taskToSchedule.getTaskReferenceName());
userDefinedTask.setWorkflowInstanceId(workflowInstance.getWorkflowId());
userDefinedTask.setWorkflowType(workflowInstance.getWorkflowName());
userDefinedTask.setCorrelationId(workflowInstance.getCorrelationId());
userDefinedTask.setScheduledTime(System.currentTimeMillis());
userDefinedTask.setTaskId(taskId);
userDefinedTask.setInputData(input);
userDefinedTask.setStatus(Task.Status.SCHEDULED);
userDefinedTask.setRetryCount(retryCount);
userDefinedTask.setCallbackAfterSeconds(taskToSchedule.getStartDelay());
userDefinedTask.setWorkflowTask(taskToSchedule);
userDefinedTask.setRateLimitPerFrequency(taskDefinition.getRateLimitPerFrequency());
userDefinedTask.setRateLimitFrequencyInSeconds(taskDefinition.getRateLimitFrequencyInSeconds());
return Collections.singletonList(userDefinedTask);
}
|
logger.debug(STR, taskMapperContext); WorkflowTask taskToSchedule = taskMapperContext.getTaskToSchedule(); Workflow workflowInstance = taskMapperContext.getWorkflowInstance(); String taskId = taskMapperContext.getTaskId(); int retryCount = taskMapperContext.getRetryCount(); TaskDef taskDefinition = Optional.ofNullable(taskMapperContext.getTaskDefinition()) .orElseGet(() -> Optional.ofNullable(metadataDAO.getTaskDef(taskToSchedule.getName())) .orElseThrow(() -> { String reason = String.format(STR, taskToSchedule.getName()); return new TerminateWorkflowException(reason); })); Map<String, Object> input = parametersUtils.getTaskInputV2(taskToSchedule.getInputParameters(), workflowInstance, taskId, taskDefinition); Task userDefinedTask = new Task(); userDefinedTask.setTaskType(taskToSchedule.getType()); userDefinedTask.setTaskDefName(taskToSchedule.getName()); userDefinedTask.setReferenceTaskName(taskToSchedule.getTaskReferenceName()); userDefinedTask.setWorkflowInstanceId(workflowInstance.getWorkflowId()); userDefinedTask.setWorkflowType(workflowInstance.getWorkflowName()); userDefinedTask.setCorrelationId(workflowInstance.getCorrelationId()); userDefinedTask.setScheduledTime(System.currentTimeMillis()); userDefinedTask.setTaskId(taskId); userDefinedTask.setInputData(input); userDefinedTask.setStatus(Task.Status.SCHEDULED); userDefinedTask.setRetryCount(retryCount); userDefinedTask.setCallbackAfterSeconds(taskToSchedule.getStartDelay()); userDefinedTask.setWorkflowTask(taskToSchedule); userDefinedTask.setRateLimitPerFrequency(taskDefinition.getRateLimitPerFrequency()); userDefinedTask.setRateLimitFrequencyInSeconds(taskDefinition.getRateLimitFrequencyInSeconds()); return Collections.singletonList(userDefinedTask); }
|
/**
* This method maps a {@link WorkflowTask} of type {@link TaskType#USER_DEFINED}
* to a {@link Task} in a {@link Task.Status#SCHEDULED} state
*
* @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link WorkflowDef}, {@link Workflow} and a string representation of the TaskId
* @return a List with just one User defined task
* @throws TerminateWorkflowException In case if the task definition does not exist
*/
|
This method maps a <code>WorkflowTask</code> of type <code>TaskType#USER_DEFINED</code> to a <code>Task</code> in a <code>Task.Status#SCHEDULED</code> state
|
getMappedTasks
|
{
"repo_name": "grfeng/conductor",
"path": "core/src/main/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapper.java",
"license": "apache-2.0",
"size": 4805
}
|
[
"com.netflix.conductor.common.metadata.tasks.Task",
"com.netflix.conductor.common.metadata.tasks.TaskDef",
"com.netflix.conductor.common.metadata.workflow.WorkflowTask",
"com.netflix.conductor.common.run.Workflow",
"com.netflix.conductor.core.execution.TerminateWorkflowException",
"java.util.Collections",
"java.util.Map",
"java.util.Optional"
] |
import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.core.execution.TerminateWorkflowException; import java.util.Collections; import java.util.Map; import java.util.Optional;
|
import com.netflix.conductor.common.metadata.tasks.*; import com.netflix.conductor.common.metadata.workflow.*; import com.netflix.conductor.common.run.*; import com.netflix.conductor.core.execution.*; import java.util.*;
|
[
"com.netflix.conductor",
"java.util"
] |
com.netflix.conductor; java.util;
| 2,191,881
|
public Collection loadImages(long datasetId)
throws DSAccessException, DSOutOfServiceException;
|
Collection function(long datasetId) throws DSAccessException, DSOutOfServiceException;
|
/**
* Loads all the images contained in the specified dataset.
*
* @param datasetId The id of the dataset.
* @return See above.
* @throws DSOutOfServiceException If the connection is broken, or logged in
* @throws DSAccessException If an error occurred while trying to
* retrieve data from OMERO service.
*/
|
Loads all the images contained in the specified dataset
|
loadImages
|
{
"repo_name": "chris-allan/openmicroscopy",
"path": "components/tools/OmeroImageJ/Omero_ImageJ/src/ome/ij/data/DataService.java",
"license": "gpl-2.0",
"size": 4031
}
|
[
"java.util.Collection"
] |
import java.util.Collection;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,302,681
|
public void setUsername(String username) {
boolean is16bit = StringUtil.hasMultibyte(username);
int encodedByteCount = 3 + username.length() * (is16bit ? 2 : 1);
int paddingSize = DATA_SIZE - encodedByteCount;
if (paddingSize < 0) {
throw new IllegalArgumentException("Name is too long: " + username);
}
field_1_username = username;
}
|
void function(String username) { boolean is16bit = StringUtil.hasMultibyte(username); int encodedByteCount = 3 + username.length() * (is16bit ? 2 : 1); int paddingSize = DATA_SIZE - encodedByteCount; if (paddingSize < 0) { throw new IllegalArgumentException(STR + username); } field_1_username = username; }
|
/**
* set the username for the user that created the report. HSSF uses the
* logged in user.
*
* @param username of the user who is logged in (probably "tomcat" or "apache")
*/
|
set the username for the user that created the report. HSSF uses the logged in user
|
setUsername
|
{
"repo_name": "lvweiwolf/poi-3.16",
"path": "src/java/org/apache/poi/hssf/record/WriteAccessRecord.java",
"license": "apache-2.0",
"size": 4889
}
|
[
"org.apache.poi.util.StringUtil"
] |
import org.apache.poi.util.StringUtil;
|
import org.apache.poi.util.*;
|
[
"org.apache.poi"
] |
org.apache.poi;
| 852,512
|
@POST
@Path("/eval/filter/{functionId}")
public Response evaluateAlertFilterByFunctionId(@PathParam("functionId") long id,
@QueryParam("start") String startTimeIso,
@QueryParam("end") String endTimeIso,
@QueryParam("holidayStarts") @DefaultValue("") String holidayStarts,
@QueryParam("holidayEnds") @DefaultValue("") String holidayEnds) {
long startTime = ISODateTimeFormat.dateTimeParser().parseDateTime(startTimeIso).getMillis();
long endTime = ISODateTimeFormat.dateTimeParser().parseDateTime(endTimeIso).getMillis();
// get anomalies by function id, start time and end time`
AnomalyFunctionDTO anomalyFunctionSpec = DAO_REGISTRY.getAnomalyFunctionDAO().findById(id);
List<MergedAnomalyResultDTO> anomalyResultDTOS =
getMergedAnomaliesRemoveHolidays(id, startTime, endTime, holidayStarts, holidayEnds);
// create alert filter and evaluator
AlertFilter alertFilter = alertFilterFactory.fromSpec(anomalyFunctionSpec.getAlertFilter());
//evaluate current alert filter (calculate current precision and recall)
PrecisionRecallEvaluator evaluator = new PrecisionRecallEvaluator(alertFilter, anomalyResultDTOS);
LOG.info("AlertFilter of Type {}, has been evaluated with precision: {}, recall:{}", alertFilter.getClass().toString(),
evaluator.getWeightedPrecision(), evaluator.getRecall());
return Response.ok(evaluator.toProperties().toString()).build();
}
|
@Path(STR) Response function(@PathParam(STR) long id, @QueryParam("start") String startTimeIso, @QueryParam("end") String endTimeIso, @QueryParam(STR) @DefaultValue(STRholidayEndsSTRSTRAlertFilter of Type {}, has been evaluated with precision: {}, recall:{}", alertFilter.getClass().toString(), evaluator.getWeightedPrecision(), evaluator.getRecall()); return Response.ok(evaluator.toProperties().toString()).build(); }
|
/**
* The endpoint to evaluate alert filter
* @param id: function ID
* @param startTimeIso: startTime of merged anomaly ex: 2016-5-23T00:00:00Z
* @param endTimeIso: endTime of merged anomaly ex: 2016-5-23T00:00:00Z
* @return feedback summary, precision and recall as json object
* @throws Exception when data has no positive label or model has no positive prediction
*/
|
The endpoint to evaluate alert filter
|
evaluateAlertFilterByFunctionId
|
{
"repo_name": "sajavadi/pinot",
"path": "thirdeye/thirdeye-pinot/src/main/java/com/linkedin/thirdeye/dashboard/resources/DetectionJobResource.java",
"license": "apache-2.0",
"size": 53568
}
|
[
"javax.ws.rs.DefaultValue",
"javax.ws.rs.Path",
"javax.ws.rs.PathParam",
"javax.ws.rs.QueryParam",
"javax.ws.rs.core.Response"
] |
import javax.ws.rs.DefaultValue; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Response;
|
import javax.ws.rs.*; import javax.ws.rs.core.*;
|
[
"javax.ws"
] |
javax.ws;
| 1,160,419
|
public Path makeQualified(Path path) {
checkPath(path);
return path.makeQualified(this.getUri(), this.getWorkingDirectory());
}
/**
* Get a new delegation token for this file system.
* This is an internal method that should have been declared protected
* but wasn't historically.
* Callers should use {@link #addDelegationTokens(String, Credentials)}
|
Path function(Path path) { checkPath(path); return path.makeQualified(this.getUri(), this.getWorkingDirectory()); } /** * Get a new delegation token for this file system. * This is an internal method that should have been declared protected * but wasn't historically. * Callers should use {@link #addDelegationTokens(String, Credentials)}
|
/**
* Make sure that a path specifies a FileSystem.
* @param path to use
*/
|
Make sure that a path specifies a FileSystem
|
makeQualified
|
{
"repo_name": "Microsoft-CISL/hadoop-prototype",
"path": "hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java",
"license": "apache-2.0",
"size": 116772
}
|
[
"org.apache.hadoop.security.Credentials"
] |
import org.apache.hadoop.security.Credentials;
|
import org.apache.hadoop.security.*;
|
[
"org.apache.hadoop"
] |
org.apache.hadoop;
| 215,800
|
List<FormSchemaJson> getAllFormSchemas(String configName);
|
List<FormSchemaJson> getAllFormSchemas(String configName);
|
/**
* Retrieves form schemas for all modules from MOTECH database for given configuration..
* @return List of all from schemas
*/
|
Retrieves form schemas for all modules from MOTECH database for given configuration.
|
getAllFormSchemas
|
{
"repo_name": "martokarski/modules",
"path": "commcare/src/main/java/org/motechproject/commcare/service/CommcareSchemaService.java",
"license": "bsd-3-clause",
"size": 2169
}
|
[
"java.util.List",
"org.motechproject.commcare.domain.FormSchemaJson"
] |
import java.util.List; import org.motechproject.commcare.domain.FormSchemaJson;
|
import java.util.*; import org.motechproject.commcare.domain.*;
|
[
"java.util",
"org.motechproject.commcare"
] |
java.util; org.motechproject.commcare;
| 2,674,591
|
public boolean contains(InetAddress inetAddr)
{
boolean rc = false;
BigInteger start = new BigInteger(getSubnetAddress().getAddress());
BigInteger end = new BigInteger(getEndAddress().getAddress());
BigInteger addr = new BigInteger(inetAddr.getAddress());
if ((addr.compareTo(start) >= 0) && (addr.compareTo(end) <= 0)) {
rc = true;
}
return rc;
}
|
boolean function(InetAddress inetAddr) { boolean rc = false; BigInteger start = new BigInteger(getSubnetAddress().getAddress()); BigInteger end = new BigInteger(getEndAddress().getAddress()); BigInteger addr = new BigInteger(inetAddr.getAddress()); if ((addr.compareTo(start) >= 0) && (addr.compareTo(end) <= 0)) { rc = true; } return rc; }
|
/**
* Contains. Test if an IP address falls within a subnet.
*
* @param inetAddr the IP address to check
*
* @return true, if subnet contains the IP address
*/
|
Contains. Test if an IP address falls within a subnet
|
contains
|
{
"repo_name": "jagornet/dhcp",
"path": "Jagornet-DHCP/dhcp-core/src/main/java/com/jagornet/dhcp/core/util/Subnet.java",
"license": "gpl-3.0",
"size": 6915
}
|
[
"java.math.BigInteger",
"java.net.InetAddress"
] |
import java.math.BigInteger; import java.net.InetAddress;
|
import java.math.*; import java.net.*;
|
[
"java.math",
"java.net"
] |
java.math; java.net;
| 2,602,658
|
int decompress(ByteBuffer inCompressed, ByteBuffer outDecompressed)
throws IOException;
|
int decompress(ByteBuffer inCompressed, ByteBuffer outDecompressed) throws IOException;
|
/**
* This method decompresses chunk of data that was compressed using {@link ChunkCompressor}.
*
* @param inCompressed Compressed data
* @param outDecompressed ByteBuffer where the decompressed data is put.
* @return Size of decompressed data.
* @throws IOException
*/
|
This method decompresses chunk of data that was compressed using <code>ChunkCompressor</code>
|
decompress
|
{
"repo_name": "sajavadi/pinot",
"path": "pinot-core/src/main/java/com/linkedin/pinot/core/io/compression/ChunkDecompressor.java",
"license": "apache-2.0",
"size": 1226
}
|
[
"java.io.IOException",
"java.nio.ByteBuffer"
] |
import java.io.IOException; import java.nio.ByteBuffer;
|
import java.io.*; import java.nio.*;
|
[
"java.io",
"java.nio"
] |
java.io; java.nio;
| 832,017
|
@Override
protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) {
super.collectNewChildDescriptors(newChildDescriptors, object);
}
|
void function(Collection<Object> newChildDescriptors, Object object) { super.collectNewChildDescriptors(newChildDescriptors, object); }
|
/**
* This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children that can be
* created under this object. <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
|
This adds <code>org.eclipse.emf.edit.command.CommandParameter</code>s describing the children that can be created under this object.
|
collectNewChildDescriptors
|
{
"repo_name": "ModelWriter/Source",
"path": "plugins/org.eclipse.mylyn.docs.intent.mapping.emf.edit/src-gen/org/eclipse/mylyn/docs/intent/mapping/provider/TextLocationItemProvider.java",
"license": "epl-1.0",
"size": 5177
}
|
[
"java.util.Collection"
] |
import java.util.Collection;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 61,798
|
PackageScanClassResolver getPackageScanClassResolver();
|
PackageScanClassResolver getPackageScanClassResolver();
|
/**
* Returns the package scanning class resolver
*
* @return the resolver
*/
|
Returns the package scanning class resolver
|
getPackageScanClassResolver
|
{
"repo_name": "neoramon/camel",
"path": "camel-core/src/main/java/org/apache/camel/CamelContext.java",
"license": "apache-2.0",
"size": 70125
}
|
[
"org.apache.camel.spi.PackageScanClassResolver"
] |
import org.apache.camel.spi.PackageScanClassResolver;
|
import org.apache.camel.spi.*;
|
[
"org.apache.camel"
] |
org.apache.camel;
| 1,942,764
|
public void setShowNodeImagesBinding(IBinding ShowNodeImagesBinding) {
m_objShowNodeImagesBinding = ShowNodeImagesBinding;
}
|
void function(IBinding ShowNodeImagesBinding) { m_objShowNodeImagesBinding = ShowNodeImagesBinding; }
|
/**
* Sets the ShowNodeImagesBinding.
* @param ShowNodeImagesBinding The ShowNodeImagesBinding to set
*/
|
Sets the ShowNodeImagesBinding
|
setShowNodeImagesBinding
|
{
"repo_name": "apache/tapestry3",
"path": "tapestry-contrib/src/org/apache/tapestry/contrib/tree/components/TreeNodeView.java",
"license": "apache-2.0",
"size": 13907
}
|
[
"org.apache.tapestry.IBinding"
] |
import org.apache.tapestry.IBinding;
|
import org.apache.tapestry.*;
|
[
"org.apache.tapestry"
] |
org.apache.tapestry;
| 758,023
|
public void startElement(String namespaceURI,
String localName,
String qName,
Attributes atts) throws SAXException {
if (qName.equals(ITEM_TAG)) {
KeyHandler subhandler = new KeyHandler(this.root, this);
this.root.pushSubHandler(subhandler);
}
else if (qName.equals(VALUE_TAG)) {
ValueHandler subhandler = new ValueHandler(this.root, this);
this.root.pushSubHandler(subhandler);
}
else {
throw new SAXException(
"Expected <Item> or <Value>...found " + qName
);
}
}
|
void function(String namespaceURI, String localName, String qName, Attributes atts) throws SAXException { if (qName.equals(ITEM_TAG)) { KeyHandler subhandler = new KeyHandler(this.root, this); this.root.pushSubHandler(subhandler); } else if (qName.equals(VALUE_TAG)) { ValueHandler subhandler = new ValueHandler(this.root, this); this.root.pushSubHandler(subhandler); } else { throw new SAXException( STR + qName ); } }
|
/**
* The start of an element.
*
* @param namespaceURI the namespace.
* @param localName the element name.
* @param qName the element name.
* @param atts the attributes.
*
* @throws SAXException for errors.
*/
|
The start of an element
|
startElement
|
{
"repo_name": "nologic/nabs",
"path": "client/trunk/shared/libraries/jfreechart-1.0.5/source/org/jfree/data/xml/ItemHandler.java",
"license": "gpl-2.0",
"size": 5077
}
|
[
"org.xml.sax.Attributes",
"org.xml.sax.SAXException"
] |
import org.xml.sax.Attributes; import org.xml.sax.SAXException;
|
import org.xml.sax.*;
|
[
"org.xml.sax"
] |
org.xml.sax;
| 1,642,663
|
public void destroy(@Nonnegative final int primaryKey)
{
final T record = records.remove( primaryKey);
if (record != null)
{
if (hasCallbacks())
{
((RecordCallbacks) record).onDestroy();
}
core.fireRecordEvent(RecordListener.RecordEvent.RECORD_DESTROYED, this, record);
}
getStore().destroy(this, primaryKey );
}
|
void function(@Nonnegative final int primaryKey) { final T record = records.remove( primaryKey); if (record != null) { if (hasCallbacks()) { ((RecordCallbacks) record).onDestroy(); } core.fireRecordEvent(RecordListener.RecordEvent.RECORD_DESTROYED, this, record); } getStore().destroy(this, primaryKey ); }
|
/**
* Removes the record with this primaryKey from the record-store and all cache
* @param primaryKey
*/
|
Removes the record with this primaryKey from the record-store and all cache
|
destroy
|
{
"repo_name": "doe300/jactiverecord",
"path": "src/de/doe300/activerecord/RecordBase.java",
"license": "mit",
"size": 19825
}
|
[
"de.doe300.activerecord.record.RecordCallbacks",
"javax.annotation.Nonnegative"
] |
import de.doe300.activerecord.record.RecordCallbacks; import javax.annotation.Nonnegative;
|
import de.doe300.activerecord.record.*; import javax.annotation.*;
|
[
"de.doe300.activerecord",
"javax.annotation"
] |
de.doe300.activerecord; javax.annotation;
| 735,343
|
private int _getStart(ResourceRequest resourceRequest, QueryContext queryContext) {
int page = ParamUtil.getInteger(
resourceRequest, ParameterNames.PAGE, 1);
if (page == 1) {
return 0;
} else {
return (((page-1) * _moduleConfiguration.pageSize()));
}
}
private static final Logger _log =
LoggerFactory.getLogger(GetSearchResultsMVCResourceCommand.class);
@Reference
private GSearch _gSearch;
@Reference
private LocalizationHelper _localizationHelper;
private volatile ModuleConfiguration _moduleConfiguration;
@Reference
private Portal _portal;
@Reference
private QueryContextBuilder _queryContextBuilder;
|
int function(ResourceRequest resourceRequest, QueryContext queryContext) { int page = ParamUtil.getInteger( resourceRequest, ParameterNames.PAGE, 1); if (page == 1) { return 0; } else { return (((page-1) * _moduleConfiguration.pageSize())); } } private static final Logger _log = LoggerFactory.getLogger(GetSearchResultsMVCResourceCommand.class); private GSearch _gSearch; private LocalizationHelper _localizationHelper; private volatile ModuleConfiguration _moduleConfiguration; @Reference private Portal _portal; private QueryContextBuilder _queryContextBuilder;
|
/**
* Get search start.
*
* We are relying here on Semanticweb pagination component,
* but on the core level, still using the "start" param,
* which is better suitable for headless access.
*
* @param resourceRequest
* @param queryContext
* @return
*/
|
Get search start. We are relying here on Semanticweb pagination component, but on the core level, still using the "start" param, which is better suitable for headless access
|
_getStart
|
{
"repo_name": "peerkar/liferay-gsearch",
"path": "liferay-gsearch-workspace/modules/gsearch-react-web/src/main/java/fi/soveltia/liferay/gsearch/react/web/portlet/action/GetSearchResultsMVCResourceCommand.java",
"license": "lgpl-3.0",
"size": 14397
}
|
[
"com.liferay.portal.kernel.util.ParamUtil",
"com.liferay.portal.kernel.util.Portal",
"fi.soveltia.liferay.gsearch.core.api.GSearch",
"fi.soveltia.liferay.gsearch.core.api.constants.ParameterNames",
"fi.soveltia.liferay.gsearch.core.api.query.context.QueryContext",
"fi.soveltia.liferay.gsearch.core.api.query.context.QueryContextBuilder",
"fi.soveltia.liferay.gsearch.localization.api.LocalizationHelper",
"fi.soveltia.liferay.gsearch.react.web.configuration.ModuleConfiguration",
"javax.portlet.ResourceRequest",
"org.osgi.service.component.annotations.Reference",
"org.slf4j.Logger",
"org.slf4j.LoggerFactory"
] |
import com.liferay.portal.kernel.util.ParamUtil; import com.liferay.portal.kernel.util.Portal; import fi.soveltia.liferay.gsearch.core.api.GSearch; import fi.soveltia.liferay.gsearch.core.api.constants.ParameterNames; import fi.soveltia.liferay.gsearch.core.api.query.context.QueryContext; import fi.soveltia.liferay.gsearch.core.api.query.context.QueryContextBuilder; import fi.soveltia.liferay.gsearch.localization.api.LocalizationHelper; import fi.soveltia.liferay.gsearch.react.web.configuration.ModuleConfiguration; import javax.portlet.ResourceRequest; import org.osgi.service.component.annotations.Reference; import org.slf4j.Logger; import org.slf4j.LoggerFactory;
|
import com.liferay.portal.kernel.util.*; import fi.soveltia.liferay.gsearch.core.api.*; import fi.soveltia.liferay.gsearch.core.api.constants.*; import fi.soveltia.liferay.gsearch.core.api.query.context.*; import fi.soveltia.liferay.gsearch.localization.api.*; import fi.soveltia.liferay.gsearch.react.web.configuration.*; import javax.portlet.*; import org.osgi.service.component.annotations.*; import org.slf4j.*;
|
[
"com.liferay.portal",
"fi.soveltia.liferay",
"javax.portlet",
"org.osgi.service",
"org.slf4j"
] |
com.liferay.portal; fi.soveltia.liferay; javax.portlet; org.osgi.service; org.slf4j;
| 2,545,936
|
public WorldInfo loadWorldInfo()
{
File var1 = new File(this.worldDirectory, "level.dat");
NBTTagCompound var2;
NBTTagCompound var3;
if (var1.exists())
{
try
{
var2 = CompressedStreamTools.readCompressed(new FileInputStream(var1));
var3 = var2.getCompoundTag("Data");
return new WorldInfo(var3);
}
catch (Exception var5)
{
var5.printStackTrace();
}
}
var1 = new File(this.worldDirectory, "level.dat_old");
if (var1.exists())
{
try
{
var2 = CompressedStreamTools.readCompressed(new FileInputStream(var1));
var3 = var2.getCompoundTag("Data");
return new WorldInfo(var3);
}
catch (Exception var4)
{
var4.printStackTrace();
}
}
return null;
}
|
WorldInfo function() { File var1 = new File(this.worldDirectory, STR); NBTTagCompound var2; NBTTagCompound var3; if (var1.exists()) { try { var2 = CompressedStreamTools.readCompressed(new FileInputStream(var1)); var3 = var2.getCompoundTag("Data"); return new WorldInfo(var3); } catch (Exception var5) { var5.printStackTrace(); } } var1 = new File(this.worldDirectory, STR); if (var1.exists()) { try { var2 = CompressedStreamTools.readCompressed(new FileInputStream(var1)); var3 = var2.getCompoundTag("Data"); return new WorldInfo(var3); } catch (Exception var4) { var4.printStackTrace(); } } return null; }
|
/**
* Loads and returns the world info
*/
|
Loads and returns the world info
|
loadWorldInfo
|
{
"repo_name": "mviitanen/marsmod",
"path": "mcp/src/minecraft/net/minecraft/world/storage/SaveHandler.java",
"license": "gpl-2.0",
"size": 9678
}
|
[
"java.io.File",
"java.io.FileInputStream",
"net.minecraft.nbt.CompressedStreamTools",
"net.minecraft.nbt.NBTTagCompound"
] |
import java.io.File; import java.io.FileInputStream; import net.minecraft.nbt.CompressedStreamTools; import net.minecraft.nbt.NBTTagCompound;
|
import java.io.*; import net.minecraft.nbt.*;
|
[
"java.io",
"net.minecraft.nbt"
] |
java.io; net.minecraft.nbt;
| 1,868,977
|
public Serializable convertPropertyValue(Object value)
{
if (value == null)
{
return null;
}
if(nodeConverter.isSupported(value))
{
return nodeConverter.convert(value);
}
if (value instanceof Serializable)
{
return (Serializable) value;
}
String msg = messageService.getMessage(ERR_CONVERT_VALUE, value);
throw new WorkflowException(msg);
}
/**
* Performs basic conversion from a property to a
* value that can be uses as activiti variable. If the type of the
* property is known, use {@link #convertValueToPropertyType(Task, Serializable, QName)}
|
Serializable function(Object value) { if (value == null) { return null; } if(nodeConverter.isSupported(value)) { return nodeConverter.convert(value); } if (value instanceof Serializable) { return (Serializable) value; } String msg = messageService.getMessage(ERR_CONVERT_VALUE, value); throw new WorkflowException(msg); } /** * Performs basic conversion from a property to a * value that can be uses as activiti variable. If the type of the * property is known, use {@link #convertValueToPropertyType(Task, Serializable, QName)}
|
/**
* Convert an Activiti variable value to an Alfresco value.
*
* @param value
* activti value
* @return alfresco value
*/
|
Convert an Activiti variable value to an Alfresco value
|
convertPropertyValue
|
{
"repo_name": "nguyentienlong/community-edition",
"path": "projects/repository/source/java/org/alfresco/repo/workflow/activiti/properties/ActivitiPropertyConverter.java",
"license": "lgpl-3.0",
"size": 45750
}
|
[
"java.io.Serializable",
"org.activiti.engine.task.Task",
"org.alfresco.service.cmr.workflow.WorkflowException",
"org.alfresco.service.namespace.QName"
] |
import java.io.Serializable; import org.activiti.engine.task.Task; import org.alfresco.service.cmr.workflow.WorkflowException; import org.alfresco.service.namespace.QName;
|
import java.io.*; import org.activiti.engine.task.*; import org.alfresco.service.cmr.workflow.*; import org.alfresco.service.namespace.*;
|
[
"java.io",
"org.activiti.engine",
"org.alfresco.service"
] |
java.io; org.activiti.engine; org.alfresco.service;
| 89,076
|
public static List<OrgSoftwareEntitlementDto>
listEntitlementsForAllOrgsWithEmptyOrgs(ChannelFamily cf, User user) {
List <OrgSoftwareEntitlementDto> ret =
new LinkedList<OrgSoftwareEntitlementDto>();
List<ChannelOverview> entitlementUsage = ChannelManager.getEntitlementForAllOrgs(
cf.getId());
// Create a mapping of org ID's to the channel overview returned, we'll need this
// when iterating the list of all orgs shortly:
Map<Long, ChannelOverview> orgEntitlementUsage =
new HashMap<Long, ChannelOverview>();
for (ChannelOverview o : entitlementUsage) {
orgEntitlementUsage.put(o.getOrgId(), o);
}
Org satelliteOrg = OrgFactory.getSatelliteOrg();
ChannelOverview satelliteOrgOverview = ChannelManager.getEntitlement(
satelliteOrg.getId(),
cf.getId());
if (satelliteOrgOverview == null) {
throw new RuntimeException("Satellite org does not" +
"appear to have been allocated entitlement:" +
cf.getId());
}
List<Org> allOrgs = OrgManager.allOrgs(user);
for (Org org : allOrgs) {
if (orgEntitlementUsage.containsKey(org.getId())) {
ChannelOverview co = orgEntitlementUsage.get(org.getId());
ret.add(makeOrgSoftwareEntitlement(co, org, satelliteOrgOverview));
}
else {
OrgSoftwareEntitlementDto seDto = new OrgSoftwareEntitlementDto();
seDto.setOrg(org);
seDto.setCurrentMembers(0L);
seDto.setMaxMembers(0L);
seDto.setMaxPossibleAllocation(satelliteOrgOverview.getFreeMembers());
seDto.setCurrentFlex(0L);
seDto.setMaxFlex(0L);
seDto.setMaxPossibleFlexAllocation(satelliteOrgOverview.getFreeFlex());
ret.add(seDto);
}
}
return ret;
}
|
static List<OrgSoftwareEntitlementDto> function(ChannelFamily cf, User user) { List <OrgSoftwareEntitlementDto> ret = new LinkedList<OrgSoftwareEntitlementDto>(); List<ChannelOverview> entitlementUsage = ChannelManager.getEntitlementForAllOrgs( cf.getId()); Map<Long, ChannelOverview> orgEntitlementUsage = new HashMap<Long, ChannelOverview>(); for (ChannelOverview o : entitlementUsage) { orgEntitlementUsage.put(o.getOrgId(), o); } Org satelliteOrg = OrgFactory.getSatelliteOrg(); ChannelOverview satelliteOrgOverview = ChannelManager.getEntitlement( satelliteOrg.getId(), cf.getId()); if (satelliteOrgOverview == null) { throw new RuntimeException(STR + STR + cf.getId()); } List<Org> allOrgs = OrgManager.allOrgs(user); for (Org org : allOrgs) { if (orgEntitlementUsage.containsKey(org.getId())) { ChannelOverview co = orgEntitlementUsage.get(org.getId()); ret.add(makeOrgSoftwareEntitlement(co, org, satelliteOrgOverview)); } else { OrgSoftwareEntitlementDto seDto = new OrgSoftwareEntitlementDto(); seDto.setOrg(org); seDto.setCurrentMembers(0L); seDto.setMaxMembers(0L); seDto.setMaxPossibleAllocation(satelliteOrgOverview.getFreeMembers()); seDto.setCurrentFlex(0L); seDto.setMaxFlex(0L); seDto.setMaxPossibleFlexAllocation(satelliteOrgOverview.getFreeFlex()); ret.add(seDto); } } return ret; }
|
/**
* Given a channel family, this method returns entitlement information on a per org
* basis. This call will return all organizations, even if it does not have any
* entitlements on the family.
*
* @param cf the channel family
* @param user the user needed for access privilege
* @return lists the entitlement information for the given channel family for
* all orgs.
*/
|
Given a channel family, this method returns entitlement information on a per org basis. This call will return all organizations, even if it does not have any entitlements on the family
|
listEntitlementsForAllOrgsWithEmptyOrgs
|
{
"repo_name": "dmacvicar/spacewalk",
"path": "java/code/src/com/redhat/rhn/manager/channel/ChannelManager.java",
"license": "gpl-2.0",
"size": 105505
}
|
[
"com.redhat.rhn.domain.channel.ChannelFamily",
"com.redhat.rhn.domain.org.Org",
"com.redhat.rhn.domain.org.OrgFactory",
"com.redhat.rhn.domain.user.User",
"com.redhat.rhn.frontend.dto.ChannelOverview",
"com.redhat.rhn.frontend.dto.OrgSoftwareEntitlementDto",
"com.redhat.rhn.manager.org.OrgManager",
"java.util.HashMap",
"java.util.LinkedList",
"java.util.List",
"java.util.Map"
] |
import com.redhat.rhn.domain.channel.ChannelFamily; import com.redhat.rhn.domain.org.Org; import com.redhat.rhn.domain.org.OrgFactory; import com.redhat.rhn.domain.user.User; import com.redhat.rhn.frontend.dto.ChannelOverview; import com.redhat.rhn.frontend.dto.OrgSoftwareEntitlementDto; import com.redhat.rhn.manager.org.OrgManager; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map;
|
import com.redhat.rhn.domain.channel.*; import com.redhat.rhn.domain.org.*; import com.redhat.rhn.domain.user.*; import com.redhat.rhn.frontend.dto.*; import com.redhat.rhn.manager.org.*; import java.util.*;
|
[
"com.redhat.rhn",
"java.util"
] |
com.redhat.rhn; java.util;
| 1,311,960
|
public static void assertDistinctEntities(
final List<? extends PersistentEntity<?>> entities)
{
assertTrue("DISTINCT_ROOT_ENTITY didn't collocate person entities correctly",
isAllEntitiesDistinct(entities));
}
|
static void function( final List<? extends PersistentEntity<?>> entities) { assertTrue(STR, isAllEntitiesDistinct(entities)); }
|
/**
* Assert that Hibernate's DISTINCT_ROOT_ENTITY collocated root entities.
*
* @param entities
* persistent entity list
*/
|
Assert that Hibernate's DISTINCT_ROOT_ENTITY collocated root entities
|
assertDistinctEntities
|
{
"repo_name": "openfurther/further-open-core",
"path": "core/core-test/src/main/java/edu/utah/further/core/test/util/AssertUtil.java",
"license": "apache-2.0",
"size": 6891
}
|
[
"edu.utah.further.core.api.data.PersistentEntity",
"java.util.List",
"org.junit.Assert"
] |
import edu.utah.further.core.api.data.PersistentEntity; import java.util.List; import org.junit.Assert;
|
import edu.utah.further.core.api.data.*; import java.util.*; import org.junit.*;
|
[
"edu.utah.further",
"java.util",
"org.junit"
] |
edu.utah.further; java.util; org.junit;
| 2,113,409
|
public static PaxDate of(int prolepticYear, int month, int dayOfMonth) {
YEAR.checkValidValue(prolepticYear);
PaxChronology.MONTH_OF_YEAR_RANGE.checkValidValue(month, MONTH_OF_YEAR);
PaxChronology.DAY_OF_MONTH_RANGE.checkValidValue(dayOfMonth, DAY_OF_MONTH);
if (month == MONTHS_IN_YEAR + 1 && !PaxChronology.INSTANCE.isLeapYear(prolepticYear)) {
throw new DateTimeException("Invalid month 14 as " + prolepticYear + "is not a leap year");
}
if (dayOfMonth > DAYS_IN_WEEK && month == MONTHS_IN_YEAR && PaxChronology.INSTANCE.isLeapYear(prolepticYear)) {
throw new DateTimeException("Invalid date during Pax as " + prolepticYear + " is a leap year");
}
return new PaxDate(prolepticYear, month, dayOfMonth);
}
/**
* Obtains a {@code PaxDate} from a temporal object.
* <p>
* This obtains a date in the Pax calendar system based on the specified temporal.
* A {@code TemporalAccessor} represents an arbitrary set of date and time information,
* which this factory converts to an instance of {@code PaxDate}.
* <p>
* The conversion typically uses the {@link ChronoField#EPOCH_DAY EPOCH_DAY}
* field, which is standardized across calendar systems.
* <p>
* This method matches the signature of the functional interface {@link TemporalQuery}
* allowing it to be used as a query via method reference, {@code PaxDate::from}.
*
* @param temporal the temporal object to convert, not null
* @return the date in the Pax calendar system, not null
* @throws DateTimeException if unable to convert to a {@code PaxDate}
|
static PaxDate function(int prolepticYear, int month, int dayOfMonth) { YEAR.checkValidValue(prolepticYear); PaxChronology.MONTH_OF_YEAR_RANGE.checkValidValue(month, MONTH_OF_YEAR); PaxChronology.DAY_OF_MONTH_RANGE.checkValidValue(dayOfMonth, DAY_OF_MONTH); if (month == MONTHS_IN_YEAR + 1 && !PaxChronology.INSTANCE.isLeapYear(prolepticYear)) { throw new DateTimeException(STR + prolepticYear + STR); } if (dayOfMonth > DAYS_IN_WEEK && month == MONTHS_IN_YEAR && PaxChronology.INSTANCE.isLeapYear(prolepticYear)) { throw new DateTimeException(STR + prolepticYear + STR); } return new PaxDate(prolepticYear, month, dayOfMonth); } /** * Obtains a {@code PaxDate} from a temporal object. * <p> * This obtains a date in the Pax calendar system based on the specified temporal. * A {@code TemporalAccessor} represents an arbitrary set of date and time information, * which this factory converts to an instance of {@code PaxDate}. * <p> * The conversion typically uses the {@link ChronoField#EPOCH_DAY EPOCH_DAY} * field, which is standardized across calendar systems. * <p> * This method matches the signature of the functional interface {@link TemporalQuery} * allowing it to be used as a query via method reference, {@code PaxDate::from}. * * @param temporal the temporal object to convert, not null * @return the date in the Pax calendar system, not null * @throws DateTimeException if unable to convert to a {@code PaxDate}
|
/**
* Obtains a {@code PaxDate} representing a date in the Pax calendar
* system from the proleptic-year, month-of-year and day-of-month fields.
* <p>
* This returns a {@code PaxDate} with the specified fields.
* The day must be valid for the year and month, otherwise an exception will be thrown.
*
* @param prolepticYear the Pax proleptic-year
* @param month the Pax month-of-year, from 1 to 14
* @param dayOfMonth the Pax day-of-month, from 1 to 28
* @return the date in Pax calendar system, not null
* @throws DateTimeException if the value of any field is out of range,
* or if the day-of-month is invalid for the month-year
*/
|
Obtains a PaxDate representing a date in the Pax calendar system from the proleptic-year, month-of-year and day-of-month fields. This returns a PaxDate with the specified fields. The day must be valid for the year and month, otherwise an exception will be thrown
|
of
|
{
"repo_name": "steve-o/threeten-extra",
"path": "src/main/java/org/threeten/extra/chrono/PaxDate.java",
"license": "bsd-3-clause",
"size": 29707
}
|
[
"java.time.DateTimeException",
"java.time.temporal.ChronoField",
"java.time.temporal.TemporalAccessor",
"java.time.temporal.TemporalQuery"
] |
import java.time.DateTimeException; import java.time.temporal.ChronoField; import java.time.temporal.TemporalAccessor; import java.time.temporal.TemporalQuery;
|
import java.time.*; import java.time.temporal.*;
|
[
"java.time"
] |
java.time;
| 503,418
|
public void updateMeasure( String cubeName, String measureName, MondrianDef.Measure measure ) throws ModelerException {
if ( StringUtils.isBlank( measureName ) ) {
throw new ModelerException(
BaseMessages.getString( MSG_CLASS, "MondrianSchemaHelper.updateMeasure.UNABLE_TO_FIND_MEASURE" )
);
}
// try to resolve name attribute if formatted with dimension
if ( measureName.contains( "[" ) ) {
// assuming measure is immediate child of dimension
// e.g. [Measures].[Quantity]
measureName = measureName.substring(
measureName.lastIndexOf( "[" ) + 1,
measureName.lastIndexOf( "]" )
);
}
try {
// Check to make sure there isn't a measure that already exists with the new name
Node duplicateMeasure = getMeasureNode( cubeName, measure.name );
if ( !measureName.equals( measure.name ) && duplicateMeasure != null ) {
throw new ModelerException(
BaseMessages.getString( MSG_CLASS, "MondrianSchemaHelper.updateMeasure.MEASURE_ALREADY_EXISTS", measure.name )
);
}
Node measureNode = getMeasureNode( cubeName, measureName );
if ( measureNode == null ) {
throw new ModelerException(
BaseMessages.getString( MSG_CLASS, "MondrianSchemaHelper.updateMeasure.UNABLE_TO_FIND_MEASURE" )
);
}
NamedNodeMap measureAttrs = measureNode.getAttributes();
// Change aggregation
if ( !StringUtils.isBlank( measure.aggregator ) ) {
Node aggNode = measureAttrs.getNamedItem( "aggregator" );
aggNode.setNodeValue( measure.aggregator );
}
// Change format
Node formatNode = measureAttrs.getNamedItem( "formatString" );
formatNode.setNodeValue( measure.formatString );
// Name Change
if ( !StringUtils.isBlank( measure.name ) ) {
Node nameNode = measureAttrs.getNamedItem( "name" );
nameNode.setNodeValue( measure.name );
}
} catch ( Exception e ) {
throw new ModelerException( e );
}
}
|
void function( String cubeName, String measureName, MondrianDef.Measure measure ) throws ModelerException { if ( StringUtils.isBlank( measureName ) ) { throw new ModelerException( BaseMessages.getString( MSG_CLASS, STR ) ); } if ( measureName.contains( "[" ) ) { measureName = measureName.substring( measureName.lastIndexOf( "[" ) + 1, measureName.lastIndexOf( "]" ) ); } try { Node duplicateMeasure = getMeasureNode( cubeName, measure.name ); if ( !measureName.equals( measure.name ) && duplicateMeasure != null ) { throw new ModelerException( BaseMessages.getString( MSG_CLASS, STR, measure.name ) ); } Node measureNode = getMeasureNode( cubeName, measureName ); if ( measureNode == null ) { throw new ModelerException( BaseMessages.getString( MSG_CLASS, STR ) ); } NamedNodeMap measureAttrs = measureNode.getAttributes(); if ( !StringUtils.isBlank( measure.aggregator ) ) { Node aggNode = measureAttrs.getNamedItem( STR ); aggNode.setNodeValue( measure.aggregator ); } Node formatNode = measureAttrs.getNamedItem( STR ); formatNode.setNodeValue( measure.formatString ); if ( !StringUtils.isBlank( measure.name ) ) { Node nameNode = measureAttrs.getNamedItem( "name" ); nameNode.setNodeValue( measure.name ); } } catch ( Exception e ) { throw new ModelerException( e ); } }
|
/**
* Update measure with name and/or aggregation type.
*
* @param cubeName Cube to search for measure
* @param measureName Name of measure to search for
* @param measure The updated measure
* @throws ModelerException
*/
|
Update measure with name and/or aggregation type
|
updateMeasure
|
{
"repo_name": "kolinus/modeler",
"path": "src/org/pentaho/agilebi/modeler/models/annotations/util/MondrianSchemaHandler.java",
"license": "lgpl-2.1",
"size": 11999
}
|
[
"org.apache.commons.lang.StringUtils",
"org.pentaho.agilebi.modeler.ModelerException",
"org.pentaho.di.i18n.BaseMessages",
"org.w3c.dom.NamedNodeMap",
"org.w3c.dom.Node"
] |
import org.apache.commons.lang.StringUtils; import org.pentaho.agilebi.modeler.ModelerException; import org.pentaho.di.i18n.BaseMessages; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node;
|
import org.apache.commons.lang.*; import org.pentaho.agilebi.modeler.*; import org.pentaho.di.i18n.*; import org.w3c.dom.*;
|
[
"org.apache.commons",
"org.pentaho.agilebi",
"org.pentaho.di",
"org.w3c.dom"
] |
org.apache.commons; org.pentaho.agilebi; org.pentaho.di; org.w3c.dom;
| 1,679,947
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.