method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
list
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
list
libraries_info
stringlengths
6
661
id
int64
0
2.92M
public static void checkState(boolean expression, @Nullable String errorMessageTemplate, @Nullable Object... errorMessageArgs) { if (!expression) { throw new IllegalStateException(format(errorMessageTemplate, errorMessageArgs)); } }
static void function(boolean expression, @Nullable String errorMessageTemplate, @Nullable Object... errorMessageArgs) { if (!expression) { throw new IllegalStateException(format(errorMessageTemplate, errorMessageArgs)); } }
/** * Ensures the truth of an expression involving the state of the calling instance, but not * involving any parameters to the calling method. * * @param expression a boolean expression * @param errorMessageTemplate a template for the exception message should the check fail. The * message is formed by replacing each {@code %s} placeholder in the template with an * argument. These are matched by position - the first {@code %s} gets {@code * errorMessageArgs[0]}, etc. Unmatched arguments will be appended to the formatted message * in square braces. Unmatched placeholders will be left as-is. * @param errorMessageArgs the arguments to be substituted into the message template. Arguments * are converted to strings using {@link String#valueOf(Object)}. * @throws IllegalStateException if {@code expression} is false * @throws NullPointerException if the check fails and either {@code errorMessageTemplate} or * {@code errorMessageArgs} is null (don't let this happen) */
Ensures the truth of an expression involving the state of the calling instance, but not involving any parameters to the calling method
checkState
{ "repo_name": "EvilBT/HDImageView", "path": "library/src/main/java/xyz/zpayh/hdimage/util/Preconditions.java", "license": "apache-2.0", "size": 16852 }
[ "androidx.annotation.Nullable" ]
import androidx.annotation.Nullable;
import androidx.annotation.*;
[ "androidx.annotation" ]
androidx.annotation;
1,103,468
return getAccountById(Account.CURRENT_ID); }
return getAccountById(Account.CURRENT_ID); }
/** * Gets current account of logged user. * * @return current account * @throws com.gooddata.sdk.common.GoodDataException when current account can't be accessed. */
Gets current account of logged user
getCurrent
{ "repo_name": "martiner/gooddata-java", "path": "gooddata-java/src/main/java/com/gooddata/sdk/service/account/AccountService.java", "license": "bsd-3-clause", "size": 8487 }
[ "com.gooddata.sdk.model.account.Account" ]
import com.gooddata.sdk.model.account.Account;
import com.gooddata.sdk.model.account.*;
[ "com.gooddata.sdk" ]
com.gooddata.sdk;
1,568,278
public static void setPiece(TEBase TE, int piece) { int temp = (TE.getData() & ~0x80) | (piece << 7); TE.setData(temp); }
static void function(TEBase TE, int piece) { int temp = (TE.getData() & ~0x80) (piece << 7); TE.setData(temp); }
/** * Sets door piece (top or bottom). */
Sets door piece (top or bottom)
setPiece
{ "repo_name": "Techern/carpentersblocks", "path": "src/main/java/com/carpentersblocks/data/Hinge.java", "license": "lgpl-2.1", "size": 3649 }
[ "com.carpentersblocks.tileentity.TEBase" ]
import com.carpentersblocks.tileentity.TEBase;
import com.carpentersblocks.tileentity.*;
[ "com.carpentersblocks.tileentity" ]
com.carpentersblocks.tileentity;
889,639
public static void main(final String[] args) throws IOException { // set up user arguments Options options = new Options(); options.addOption(OPT_HFILE_NAME, true, "HFile to analyse (REQUIRED)"); options.getOption(OPT_HFILE_NAME).setArgName("FILENAME"); options.addOption(OPT_KV_LIMIT, true, "Maximum number of KeyValues to process. A benchmark stops running " + "after iterating over this many KV pairs."); options.getOption(OPT_KV_LIMIT).setArgName("NUMBER"); options.addOption(OPT_MEASURE_THROUGHPUT, false, "Measure read throughput"); options.addOption(OPT_OMIT_CORRECTNESS_TEST, false, "Omit corectness tests."); options.addOption(OPT_ENCODING_ALGORITHM, true, "What kind of compression algorithm use for comparison."); options.addOption(OPT_BENCHMARK_N_TIMES, true, "Number of times to run each benchmark. Default value: " + DEFAULT_BENCHMARK_N_TIMES); options.addOption(OPT_BENCHMARK_N_OMIT, true, "Number of first runs of every benchmark to exclude from " + "statistics (" + DEFAULT_BENCHMARK_N_OMIT + " by default, so that " + "only the last " + (DEFAULT_BENCHMARK_N_TIMES - DEFAULT_BENCHMARK_N_OMIT) + " times are included in statistics.)"); // parse arguments CommandLineParser parser = new PosixParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (ParseException e) { System.err.println("Could not parse arguments!"); System.exit(-1); return; // avoid warning } int kvLimit = Integer.MAX_VALUE; if (cmd.hasOption(OPT_KV_LIMIT)) { kvLimit = Integer.parseInt(cmd.getOptionValue(OPT_KV_LIMIT)); } // basic argument sanity checks if (!cmd.hasOption(OPT_HFILE_NAME)) { LOG.error("Please specify HFile name using the " + OPT_HFILE_NAME + " option"); printUsage(options); System.exit(-1); } String pathName = cmd.getOptionValue(OPT_HFILE_NAME); String compressionName = DEFAULT_COMPRESSION.getName(); if (cmd.hasOption(OPT_ENCODING_ALGORITHM)) { compressionName = cmd.getOptionValue(OPT_ENCODING_ALGORITHM).toLowerCase(); } boolean doBenchmark = cmd.hasOption(OPT_MEASURE_THROUGHPUT); boolean doVerify = !cmd.hasOption(OPT_OMIT_CORRECTNESS_TEST); if (cmd.hasOption(OPT_BENCHMARK_N_TIMES)) { benchmarkNTimes = Integer.valueOf(cmd.getOptionValue( OPT_BENCHMARK_N_TIMES)); } if (cmd.hasOption(OPT_BENCHMARK_N_OMIT)) { benchmarkNOmit = Integer.valueOf(cmd.getOptionValue(OPT_BENCHMARK_N_OMIT)); } if (benchmarkNTimes < benchmarkNOmit) { LOG.error("The number of times to run each benchmark (" + benchmarkNTimes + ") must be greater than the number of benchmark runs to exclude " + "from statistics (" + benchmarkNOmit + ")"); System.exit(1); } LOG.info("Running benchmark " + benchmarkNTimes + " times. " + "Excluding the first " + benchmarkNOmit + " times from statistics."); final Configuration conf = HBaseConfiguration.create(); try { testCodecs(conf, kvLimit, pathName, compressionName, doBenchmark, doVerify); } finally { (new CacheConfig(conf)).getBlockCache().shutdown(); } }
static void function(final String[] args) throws IOException { Options options = new Options(); options.addOption(OPT_HFILE_NAME, true, STR); options.getOption(OPT_HFILE_NAME).setArgName(STR); options.addOption(OPT_KV_LIMIT, true, STR + STR); options.getOption(OPT_KV_LIMIT).setArgName(STR); options.addOption(OPT_MEASURE_THROUGHPUT, false, STR); options.addOption(OPT_OMIT_CORRECTNESS_TEST, false, STR); options.addOption(OPT_ENCODING_ALGORITHM, true, STR); options.addOption(OPT_BENCHMARK_N_TIMES, true, STR + DEFAULT_BENCHMARK_N_TIMES); options.addOption(OPT_BENCHMARK_N_OMIT, true, STR + STR + DEFAULT_BENCHMARK_N_OMIT + STR + STR + (DEFAULT_BENCHMARK_N_TIMES - DEFAULT_BENCHMARK_N_OMIT) + STR); CommandLineParser parser = new PosixParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (ParseException e) { System.err.println(STR); System.exit(-1); return; } int kvLimit = Integer.MAX_VALUE; if (cmd.hasOption(OPT_KV_LIMIT)) { kvLimit = Integer.parseInt(cmd.getOptionValue(OPT_KV_LIMIT)); } if (!cmd.hasOption(OPT_HFILE_NAME)) { LOG.error(STR + OPT_HFILE_NAME + STR); printUsage(options); System.exit(-1); } String pathName = cmd.getOptionValue(OPT_HFILE_NAME); String compressionName = DEFAULT_COMPRESSION.getName(); if (cmd.hasOption(OPT_ENCODING_ALGORITHM)) { compressionName = cmd.getOptionValue(OPT_ENCODING_ALGORITHM).toLowerCase(); } boolean doBenchmark = cmd.hasOption(OPT_MEASURE_THROUGHPUT); boolean doVerify = !cmd.hasOption(OPT_OMIT_CORRECTNESS_TEST); if (cmd.hasOption(OPT_BENCHMARK_N_TIMES)) { benchmarkNTimes = Integer.valueOf(cmd.getOptionValue( OPT_BENCHMARK_N_TIMES)); } if (cmd.hasOption(OPT_BENCHMARK_N_OMIT)) { benchmarkNOmit = Integer.valueOf(cmd.getOptionValue(OPT_BENCHMARK_N_OMIT)); } if (benchmarkNTimes < benchmarkNOmit) { LOG.error(STR + benchmarkNTimes + STR + STR + benchmarkNOmit + ")"); System.exit(1); } LOG.info(STR + benchmarkNTimes + STR + STR + benchmarkNOmit + STR); final Configuration conf = HBaseConfiguration.create(); try { testCodecs(conf, kvLimit, pathName, compressionName, doBenchmark, doVerify); } finally { (new CacheConfig(conf)).getBlockCache().shutdown(); } }
/** * A command line interface to benchmarks. Parses command-line arguments and * runs the appropriate benchmarks. * @param args Should have length at least 1 and holds the file path to HFile. * @throws IOException If you specified the wrong file. */
A command line interface to benchmarks. Parses command-line arguments and runs the appropriate benchmarks
main
{ "repo_name": "throughsky/lywebank", "path": "hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java", "license": "apache-2.0", "size": 27072 }
[ "java.io.IOException", "org.apache.commons.cli.CommandLine", "org.apache.commons.cli.CommandLineParser", "org.apache.commons.cli.Options", "org.apache.commons.cli.ParseException", "org.apache.commons.cli.PosixParser", "org.apache.hadoop.conf.Configuration", "org.apache.hadoop.hbase.HBaseConfiguration", "org.apache.hadoop.hbase.io.hfile.CacheConfig" ]
import java.io.IOException; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.cli.PosixParser; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import java.io.*; import org.apache.commons.cli.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.io.hfile.*;
[ "java.io", "org.apache.commons", "org.apache.hadoop" ]
java.io; org.apache.commons; org.apache.hadoop;
1,795,715
public static ims.clinical.domain.objects.Actions extractActions(ims.domain.ILightweightDomainFactory domainFactory, ims.coe.vo.DischargeActions valueObject) { return extractActions(domainFactory, valueObject, new HashMap()); }
static ims.clinical.domain.objects.Actions function(ims.domain.ILightweightDomainFactory domainFactory, ims.coe.vo.DischargeActions valueObject) { return extractActions(domainFactory, valueObject, new HashMap()); }
/** * Create the domain object from the value object. * @param domainFactory - used to create existing (persistent) domain objects. * @param valueObject - extract the domain object fields from this. */
Create the domain object from the value object
extractActions
{ "repo_name": "open-health-hub/openmaxims-linux", "path": "openmaxims_workspace/ValueObjects/src/ims/coe/vo/domain/DischargeActionsAssembler.java", "license": "agpl-3.0", "size": 18169 }
[ "java.util.HashMap" ]
import java.util.HashMap;
import java.util.*;
[ "java.util" ]
java.util;
1,331,201
@Override public Coordinate createValue(String parameterValue) { String[] coordinatesCSV = parameterValue.split(","); String srs = GOOGLE_EARTH_SRS; if (coordinatesCSV.length > 2) { srs = coordinatesCSV[2]; } // REMOVE THIS!! // ----------------------- String latitude = coordinatesCSV[0]; if (latitude.equals("$[latitude]")) { latitude = "0"; } String longitude = coordinatesCSV[1]; if (longitude.equals("$[longitude]")) { longitude = "0"; } // ----------------------- //TODO : VERY IMPORTANT!!!! // THE ORDER OF THE COORDINATES IS CHANGED HERE SO THAT THE PNG BUG IS CONSISTENT THROUGHOUT THEIR ASSESSMENT!! // CHANGE BACK WHEN THEY ARE FINIHSED!!!!!! Coordinate coord = new Coordinate(Double.parseDouble(longitude), Double.parseDouble(latitude), srs); //Coordinate coord = new Coordinate(Double.parseDouble(coordinatesCSV[0]), Double.parseDouble(coordinatesCSV[1]), srs); PNG BUG IN THIS LINE!! return coord; }
Coordinate function(String parameterValue) { String[] coordinatesCSV = parameterValue.split(","); String srs = GOOGLE_EARTH_SRS; if (coordinatesCSV.length > 2) { srs = coordinatesCSV[2]; } String latitude = coordinatesCSV[0]; if (latitude.equals(STR)) { latitude = "0"; } String longitude = coordinatesCSV[1]; if (longitude.equals(STR)) { longitude = "0"; } Coordinate coord = new Coordinate(Double.parseDouble(longitude), Double.parseDouble(latitude), srs); return coord; }
/** * Expects the coordinate as a String "latitude,longitude" * @param parameterValue the latitude and longitude as "latitude,longitude" * @return Coordinate returns the coordinate generated */
Expects the coordinate as a String "latitude,longitude"
createValue
{ "repo_name": "openforis/collect-earth", "path": "collect-earth/collect-earth-core/src/main/java/org/openforis/collect/earth/core/handlers/CoordinateAttributeHandler.java", "license": "mit", "size": 2132 }
[ "org.openforis.idm.model.Coordinate" ]
import org.openforis.idm.model.Coordinate;
import org.openforis.idm.model.*;
[ "org.openforis.idm" ]
org.openforis.idm;
1,644,885
public static String bigDecimalToString(boolean currency, boolean percentage, boolean useGrouping, int precision, BigDecimal value, Locale locale) { return bigDecimalToString(currency, percentage, useGrouping, precision, value, locale, getCurrencySymbol()); }
static String function(boolean currency, boolean percentage, boolean useGrouping, int precision, BigDecimal value, Locale locale) { return bigDecimalToString(currency, percentage, useGrouping, precision, value, locale, getCurrencySymbol()); }
/** * * Converts a BigDecimal value to a String * * @param currency whether the value represents a currency * @param percentage whether the value represents a percentage * @param useGrouping whether to use a thousand grouping * @param value the value * @param locale the locale to use * @return */
Converts a BigDecimal value to a String
bigDecimalToString
{ "repo_name": "opencirclesolutions/dynamo", "path": "dynamo-frontend/src/main/java/com/ocs/dynamo/ui/utils/VaadinUtils.java", "license": "apache-2.0", "size": 24081 }
[ "java.math.BigDecimal", "java.util.Locale" ]
import java.math.BigDecimal; import java.util.Locale;
import java.math.*; import java.util.*;
[ "java.math", "java.util" ]
java.math; java.util;
1,555,019
public void setOnPreparedListener(MediaPlayer.OnPreparedListener l) { mOnPreparedListener = l; }
void function(MediaPlayer.OnPreparedListener l) { mOnPreparedListener = l; }
/** * Register a callback to be invoked when the media file * is loaded and ready to go. * * @param l The callback that will be run */
Register a callback to be invoked when the media file is loaded and ready to go
setOnPreparedListener
{ "repo_name": "abiles/DBProject2014", "path": "myClassManage/cocos2d/cocos/platform/android/java/src/org/cocos2dx/lib/Cocos2dxVideoView.java", "license": "mit", "size": 23755 }
[ "android.media.MediaPlayer" ]
import android.media.MediaPlayer;
import android.media.*;
[ "android.media" ]
android.media;
1,186,064
@Override public Collection<BuildableRDFSProperty> getProperties() throws RepositoryException { return anno4j.findAll(BuildableRDFSProperty.class); }
Collection<BuildableRDFSProperty> function() throws RepositoryException { return anno4j.findAll(BuildableRDFSProperty.class); }
/** * Returns the extended resource objects of RDFS properties that were found during * the last call to {@link #build()}. * * @return Returns the RDFS properties in the model built. */
Returns the extended resource objects of RDFS properties that were found during the last call to <code>#build()</code>
getProperties
{ "repo_name": "anno4j/anno4j", "path": "anno4j-core/src/main/java/com/github/anno4j/schema_parsing/building/OWLJavaFileGenerator.java", "license": "apache-2.0", "size": 24925 }
[ "com.github.anno4j.schema_parsing.model.BuildableRDFSProperty", "java.util.Collection", "org.openrdf.repository.RepositoryException" ]
import com.github.anno4j.schema_parsing.model.BuildableRDFSProperty; import java.util.Collection; import org.openrdf.repository.RepositoryException;
import com.github.anno4j.schema_parsing.model.*; import java.util.*; import org.openrdf.repository.*;
[ "com.github.anno4j", "java.util", "org.openrdf.repository" ]
com.github.anno4j; java.util; org.openrdf.repository;
1,659,533
public List<Action> getViewMenuActions() { return Arrays.asList(new Action[] { new NormalViewAction(this), new StackedViewAction(this), new PaintViewAction(this) }); }
List<Action> function() { return Arrays.asList(new Action[] { new NormalViewAction(this), new StackedViewAction(this), new PaintViewAction(this) }); }
/** * Return a list of view menu actions for this vision world. * * @return a list of view menu actions for this vision world */
Return a list of view menu actions for this vision world
getViewMenuActions
{ "repo_name": "automenta/java_dann", "path": "src/syncleus/dann/solve/visionworld/VisionWorld.java", "license": "agpl-3.0", "size": 15876 }
[ "java.util.Arrays", "java.util.List", "javax.swing.Action", "org.simbrain.world.visionworld.action.NormalViewAction", "org.simbrain.world.visionworld.action.PaintViewAction", "org.simbrain.world.visionworld.action.StackedViewAction" ]
import java.util.Arrays; import java.util.List; import javax.swing.Action; import org.simbrain.world.visionworld.action.NormalViewAction; import org.simbrain.world.visionworld.action.PaintViewAction; import org.simbrain.world.visionworld.action.StackedViewAction;
import java.util.*; import javax.swing.*; import org.simbrain.world.visionworld.action.*;
[ "java.util", "javax.swing", "org.simbrain.world" ]
java.util; javax.swing; org.simbrain.world;
1,008,905
void sendRegularData(Object data) throws IOException;
void sendRegularData(Object data) throws IOException;
/** * * Sends Regular data. * * @param data The data to send. * @throws IOException Exception */
Sends Regular data
sendRegularData
{ "repo_name": "snoozesoftware/snoozenode", "path": "src/main/java/org/inria/myriads/snoozenode/comunicator/api/Communicator.java", "license": "gpl-2.0", "size": 716 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
418,724
public void setTimeZoneID(String timeZoneId) { setTimeZone(TimeZone.getTimeZone(timeZoneId)); }
void function(String timeZoneId) { setTimeZone(TimeZone.getTimeZone(timeZoneId)); }
/** Set the timezone. * @param timeZoneId TimeZoneId the ID of the zone as used by * TimeZone.getTimeZone(id) */
Set the timezone
setTimeZoneID
{ "repo_name": "HossainKhademian/Studio3", "path": "plugins/com.aptana.jetty.util.epl/src/com/aptana/jetty/util/epl/DateCache.java", "license": "gpl-3.0", "size": 9499 }
[ "java.util.TimeZone" ]
import java.util.TimeZone;
import java.util.*;
[ "java.util" ]
java.util;
565,847
@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final RemarketingActionServiceClient create(RemarketingActionServiceStub stub) { return new RemarketingActionServiceClient(stub); } protected RemarketingActionServiceClient(RemarketingActionServiceSettings settings) throws IOException { this.settings = settings; this.stub = ((RemarketingActionServiceStubSettings) settings.getStubSettings()).createStub(); } @BetaApi("A restructuring of stub classes is planned, so this may break in the future") protected RemarketingActionServiceClient(RemarketingActionServiceStub stub) { this.settings = null; this.stub = stub; }
@BetaApi(STR) static final RemarketingActionServiceClient function(RemarketingActionServiceStub stub) { return new RemarketingActionServiceClient(stub); } protected RemarketingActionServiceClient(RemarketingActionServiceSettings settings) throws IOException { this.settings = settings; this.stub = ((RemarketingActionServiceStubSettings) settings.getStubSettings()).createStub(); } @BetaApi(STR) protected RemarketingActionServiceClient(RemarketingActionServiceStub stub) { this.settings = null; this.stub = stub; }
/** * Constructs an instance of RemarketingActionServiceClient, using the given stub for making * calls. This is for advanced usage - prefer using create(RemarketingActionServiceSettings). */
Constructs an instance of RemarketingActionServiceClient, using the given stub for making calls. This is for advanced usage - prefer using create(RemarketingActionServiceSettings)
create
{ "repo_name": "googleads/google-ads-java", "path": "google-ads-stubs-v8/src/main/java/com/google/ads/googleads/v8/services/RemarketingActionServiceClient.java", "license": "apache-2.0", "size": 15774 }
[ "com.google.ads.googleads.v8.services.stub.RemarketingActionServiceStub", "com.google.ads.googleads.v8.services.stub.RemarketingActionServiceStubSettings", "com.google.api.core.BetaApi", "java.io.IOException" ]
import com.google.ads.googleads.v8.services.stub.RemarketingActionServiceStub; import com.google.ads.googleads.v8.services.stub.RemarketingActionServiceStubSettings; import com.google.api.core.BetaApi; import java.io.IOException;
import com.google.ads.googleads.v8.services.stub.*; import com.google.api.core.*; import java.io.*;
[ "com.google.ads", "com.google.api", "java.io" ]
com.google.ads; com.google.api; java.io;
2,260,087
public static void writeMatrix(Matrix m, String fileName) { MatrixWriter mw = MatrixWriter.createWriter(MatrixType.ZIP, new File(fileName)); long startTime = System.currentTimeMillis(); mw.writeMatrix( m ); logger.info("writeMatrix() "+fileName+", "+m.getRowCount()*m.getColumnCount()*4+" bytes, "+(System.currentTimeMillis()-startTime)/1000.0 +" secs"); }
static void function(Matrix m, String fileName) { MatrixWriter mw = MatrixWriter.createWriter(MatrixType.ZIP, new File(fileName)); long startTime = System.currentTimeMillis(); mw.writeMatrix( m ); logger.info(STR+fileName+STR+m.getRowCount()*m.getColumnCount()*4+STR+(System.currentTimeMillis()-startTime)/1000.0 +STR); }
/** * Write the matrix out to a new zip file */
Write the matrix out to a new zip file
writeMatrix
{ "repo_name": "moeckel/silo", "path": "third-party/common-base/src/java/com/pb/common/matrix/tests/TestZipMatrix.java", "license": "gpl-2.0", "size": 5961 }
[ "com.pb.common.matrix.Matrix", "com.pb.common.matrix.MatrixType", "com.pb.common.matrix.MatrixWriter", "java.io.File" ]
import com.pb.common.matrix.Matrix; import com.pb.common.matrix.MatrixType; import com.pb.common.matrix.MatrixWriter; import java.io.File;
import com.pb.common.matrix.*; import java.io.*;
[ "com.pb.common", "java.io" ]
com.pb.common; java.io;
1,829,312
public static long[] parseVibratePattern(String stringPattern) { ArrayList<Long> arrayListPattern = new ArrayList<Long>(); Long l; String[] splitPattern = stringPattern.split(","); int VIBRATE_PATTERN_MAX_SECONDS = 60000; int VIBRATE_PATTERN_MAX_PATTERN = 100; for (int i = 0; i < splitPattern.length; i++) { try { l = Long.parseLong(splitPattern[i].trim()); } catch (NumberFormatException e) { return null; } if (l > VIBRATE_PATTERN_MAX_SECONDS) { return null; } arrayListPattern.add(l); } int size = arrayListPattern.size(); if (size > 0 && size < VIBRATE_PATTERN_MAX_PATTERN) { long[] pattern = new long[size]; for (int i = 0; i < pattern.length; i++) { pattern[i] = arrayListPattern.get(i); } return pattern; } return null; }
static long[] function(String stringPattern) { ArrayList<Long> arrayListPattern = new ArrayList<Long>(); Long l; String[] splitPattern = stringPattern.split(","); int VIBRATE_PATTERN_MAX_SECONDS = 60000; int VIBRATE_PATTERN_MAX_PATTERN = 100; for (int i = 0; i < splitPattern.length; i++) { try { l = Long.parseLong(splitPattern[i].trim()); } catch (NumberFormatException e) { return null; } if (l > VIBRATE_PATTERN_MAX_SECONDS) { return null; } arrayListPattern.add(l); } int size = arrayListPattern.size(); if (size > 0 && size < VIBRATE_PATTERN_MAX_PATTERN) { long[] pattern = new long[size]; for (int i = 0; i < pattern.length; i++) { pattern[i] = arrayListPattern.get(i); } return pattern; } return null; }
/** * Parse the user provided custom vibrate pattern into a long[] Borrowed * from SMSPopup */
Parse the user provided custom vibrate pattern into a long[] Borrowed from SMSPopup
parseVibratePattern
{ "repo_name": "tstratton/kitchentimer", "path": "app/src/main/java/com/leinardi/kitchentimer/receivers/AlarmReceiver.java", "license": "gpl-3.0", "size": 7097 }
[ "java.util.ArrayList" ]
import java.util.ArrayList;
import java.util.*;
[ "java.util" ]
java.util;
940,235
void setScript(ScriptObject object);
void setScript(ScriptObject object);
/** * Sets the script. * * @param object The object hosting the script. */
Sets the script
setScript
{ "repo_name": "emilroz/openmicroscopy", "path": "components/insight/SRC/org/openmicroscopy/shoola/agents/treeviewer/view/TreeViewer.java", "license": "gpl-2.0", "size": 36185 }
[ "org.openmicroscopy.shoola.env.data.model.ScriptObject" ]
import org.openmicroscopy.shoola.env.data.model.ScriptObject;
import org.openmicroscopy.shoola.env.data.model.*;
[ "org.openmicroscopy.shoola" ]
org.openmicroscopy.shoola;
1,580,539
@Test public void testHashCodeLoadNameNull() throws Exception { int expected_result = 31; int data = loadMetadataDetails.hashCode(); assertEquals(expected_result, data); }
@Test void function() throws Exception { int expected_result = 31; int data = loadMetadataDetails.hashCode(); assertEquals(expected_result, data); }
/** * This method will test Hashcode which will return 31 if we don't set loadName. * * @throws Exception */
This method will test Hashcode which will return 31 if we don't set loadName
testHashCodeLoadNameNull
{ "repo_name": "manishgupta88/carbondata", "path": "core/src/test/java/org/apache/carbondata/core/load/LoadMetadataDetailsUnitTest.java", "license": "apache-2.0", "size": 4334 }
[ "junit.framework.Assert", "org.junit.Test" ]
import junit.framework.Assert; import org.junit.Test;
import junit.framework.*; import org.junit.*;
[ "junit.framework", "org.junit" ]
junit.framework; org.junit;
767,450
public static void setDefaultDownloadIndicator(DownloadIndicator indicator) { checkExitClass(); JNLPRuntime.indicator = indicator; }
static void function(DownloadIndicator indicator) { checkExitClass(); JNLPRuntime.indicator = indicator; }
/** * Sets the default download indicator. * * @param indicator where to show progress * @throws IllegalStateException if caller is not the exit class */
Sets the default download indicator
setDefaultDownloadIndicator
{ "repo_name": "GITNE/icedtea-web", "path": "netx/net/sourceforge/jnlp/runtime/JNLPRuntime.java", "license": "gpl-2.0", "size": 32256 }
[ "net.sourceforge.jnlp.cache.DownloadIndicator" ]
import net.sourceforge.jnlp.cache.DownloadIndicator;
import net.sourceforge.jnlp.cache.*;
[ "net.sourceforge.jnlp" ]
net.sourceforge.jnlp;
791,406
Options options = new Options(); for (ResourceInfo resource : descr) { if (resource.getOptionInfo() instanceof SingleOptionInfo) { options.addOption(generate((SingleOptionInfo) resource .getOptionInfo())); } else { options.addOptionGroup(generate((GroupOptionInfo) resource .getOptionInfo())); } } return options; }
Options options = new Options(); for (ResourceInfo resource : descr) { if (resource.getOptionInfo() instanceof SingleOptionInfo) { options.addOption(generate((SingleOptionInfo) resource .getOptionInfo())); } else { options.addOptionGroup(generate((GroupOptionInfo) resource .getOptionInfo())); } } return options; }
/** * Get {@code Options} constructed from the specified class. * * @param descr * the process descriptor. * @return the {@code Options} object. */
Get Options constructed from the specified class
getOptions
{ "repo_name": "SHAF-WORK/shaf", "path": "core/src/main/java/org/shaf/core/process/cmd/CommandOptionHandler.java", "license": "apache-2.0", "size": 2887 }
[ "org.apache.commons.cli.Options", "org.shaf.core.content.GroupOptionInfo", "org.shaf.core.content.ResourceInfo", "org.shaf.core.content.SingleOptionInfo" ]
import org.apache.commons.cli.Options; import org.shaf.core.content.GroupOptionInfo; import org.shaf.core.content.ResourceInfo; import org.shaf.core.content.SingleOptionInfo;
import org.apache.commons.cli.*; import org.shaf.core.content.*;
[ "org.apache.commons", "org.shaf.core" ]
org.apache.commons; org.shaf.core;
2,724,752
public synchronized void push() { for (int i = 0; i < count; i++) { int ix = (start+i)%buffer.length; LogRecord record = buffer[ix]; buffer[ix] = null; target.publish(record); } // Empty the buffer. start = 0; count = 0; }
synchronized void function() { for (int i = 0; i < count; i++) { int ix = (start+i)%buffer.length; LogRecord record = buffer[ix]; buffer[ix] = null; target.publish(record); } start = 0; count = 0; }
/** * Push any buffered output to the target <tt>Handler</tt>. * <p> * The buffer is then cleared. */
Push any buffered output to the target Handler. The buffer is then cleared
push
{ "repo_name": "z4hyoung/codes", "path": "phone/app/src/main/java/free/yonghu/java/logging/MemoryHandler2.java", "license": "apache-2.0", "size": 11914 }
[ "java.util.logging.LogRecord" ]
import java.util.logging.LogRecord;
import java.util.logging.*;
[ "java.util" ]
java.util;
533,020
@Test(expected = IOException.class) public void wlmEndpoints_noAvailableEndpoints() throws Exception { Connector connector = new Connector(TEST_URL, null); List<String> list = new ArrayList<String>(); list.add("otherEndpoint:8010"); Map<String, Object> env = new HashMap<String, Object>(); env.put(JMXConnector.CREDENTIALS, ConnectorSettings.CERTIFICATE_AUTHENTICATION); env.put(ConnectorSettings.WLM_ENDPOINTS, list); connector.connect(env); }
@Test(expected = IOException.class) void function() throws Exception { Connector connector = new Connector(TEST_URL, null); List<String> list = new ArrayList<String>(); list.add(STR); Map<String, Object> env = new HashMap<String, Object>(); env.put(JMXConnector.CREDENTIALS, ConnectorSettings.CERTIFICATE_AUTHENTICATION); env.put(ConnectorSettings.WLM_ENDPOINTS, list); connector.connect(env); }
/** * Test for no available endpoints exception */
Test for no available endpoints exception
wlmEndpoints_noAvailableEndpoints
{ "repo_name": "OpenLiberty/open-liberty", "path": "dev/com.ibm.ws.jmx.connector.client.rest/test/com/ibm/ws/jmx/connector/client/rest/internal/ConnectorTest.java", "license": "epl-1.0", "size": 6813 }
[ "com.ibm.websphere.jmx.connector.rest.ConnectorSettings", "java.io.IOException", "java.util.ArrayList", "java.util.HashMap", "java.util.List", "java.util.Map", "javax.management.remote.JMXConnector", "org.junit.Test" ]
import com.ibm.websphere.jmx.connector.rest.ConnectorSettings; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.management.remote.JMXConnector; import org.junit.Test;
import com.ibm.websphere.jmx.connector.rest.*; import java.io.*; import java.util.*; import javax.management.remote.*; import org.junit.*;
[ "com.ibm.websphere", "java.io", "java.util", "javax.management", "org.junit" ]
com.ibm.websphere; java.io; java.util; javax.management; org.junit;
492,856
public Color getColor () { return cache.getColor(); }
Color function () { return cache.getColor(); }
/** Returns the color of this font. Changing the returned color will have no affect, {@link #setColor(Color)} or * {@link #setColor(float, float, float, float)} must be used. */
Returns the color of this font. Changing the returned color will have no affect, <code>#setColor(Color)</code> or
getColor
{ "repo_name": "domix/libgdx", "path": "gdx/src/com/badlogic/gdx/graphics/g2d/BitmapFont.java", "license": "apache-2.0", "size": 38890 }
[ "com.badlogic.gdx.graphics.Color" ]
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.*;
[ "com.badlogic.gdx" ]
com.badlogic.gdx;
1,156,154
@Test public void testModifiedEncryptedDataStructure() throws Exception { WSSecEncrypt builder = new WSSecEncrypt(); builder.setUserInfo("wss40"); builder.setKeyIdentifierType(WSConstants.BST_DIRECT_REFERENCE); builder.setSymmetricEncAlgorithm(WSConstants.TRIPLE_DES); Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG); WSSecHeader secHeader = new WSSecHeader(doc); secHeader.insertSecurityHeader(); Crypto wssCrypto = CryptoFactory.getInstance("wss40.properties"); Document encryptedDoc = builder.build(doc, wssCrypto, secHeader); Element body = WSSecurityUtil.findBodyElement(doc); Element encryptionMethod = XMLUtils.findElement(body, "EncryptionMethod", WSConstants.ENC_NS); encryptionMethod.setAttributeNS(null, "Algorithm", "http://new-algorithm"); String outputString = XMLUtils.prettyDocumentToString(encryptedDoc); if (LOG.isDebugEnabled()) { LOG.debug(outputString); } WSSecurityEngine newEngine = new WSSecurityEngine(); try { newEngine.processSecurityHeader(doc, null, new KeystoreCallbackHandler(), wssCrypto); fail("Failure expected on a modified EncryptedData structure"); } catch (WSSecurityException ex) { assertTrue(ex.getErrorCode() == WSSecurityException.ErrorCode.INVALID_SECURITY); } }
void function() throws Exception { WSSecEncrypt builder = new WSSecEncrypt(); builder.setUserInfo("wss40"); builder.setKeyIdentifierType(WSConstants.BST_DIRECT_REFERENCE); builder.setSymmetricEncAlgorithm(WSConstants.TRIPLE_DES); Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG); WSSecHeader secHeader = new WSSecHeader(doc); secHeader.insertSecurityHeader(); Crypto wssCrypto = CryptoFactory.getInstance(STR); Document encryptedDoc = builder.build(doc, wssCrypto, secHeader); Element body = WSSecurityUtil.findBodyElement(doc); Element encryptionMethod = XMLUtils.findElement(body, STR, WSConstants.ENC_NS); encryptionMethod.setAttributeNS(null, STR, STRFailure expected on a modified EncryptedData structure"); } catch (WSSecurityException ex) { assertTrue(ex.getErrorCode() == WSSecurityException.ErrorCode.INVALID_SECURITY); } }
/** * Test for when an EncryptedData structure is modified */
Test for when an EncryptedData structure is modified
testModifiedEncryptedDataStructure
{ "repo_name": "clibois/wss4j", "path": "ws-security-dom/src/test/java/org/apache/wss4j/dom/message/ModifiedRequestTest.java", "license": "apache-2.0", "size": 24939 }
[ "org.apache.wss4j.common.crypto.Crypto", "org.apache.wss4j.common.crypto.CryptoFactory", "org.apache.wss4j.common.ext.WSSecurityException", "org.apache.wss4j.common.util.XMLUtils", "org.apache.wss4j.dom.WSConstants", "org.apache.wss4j.dom.common.SOAPUtil", "org.apache.wss4j.dom.util.WSSecurityUtil", "org.w3c.dom.Document", "org.w3c.dom.Element" ]
import org.apache.wss4j.common.crypto.Crypto; import org.apache.wss4j.common.crypto.CryptoFactory; import org.apache.wss4j.common.ext.WSSecurityException; import org.apache.wss4j.common.util.XMLUtils; import org.apache.wss4j.dom.WSConstants; import org.apache.wss4j.dom.common.SOAPUtil; import org.apache.wss4j.dom.util.WSSecurityUtil; import org.w3c.dom.Document; import org.w3c.dom.Element;
import org.apache.wss4j.common.crypto.*; import org.apache.wss4j.common.ext.*; import org.apache.wss4j.common.util.*; import org.apache.wss4j.dom.*; import org.apache.wss4j.dom.common.*; import org.apache.wss4j.dom.util.*; import org.w3c.dom.*;
[ "org.apache.wss4j", "org.w3c.dom" ]
org.apache.wss4j; org.w3c.dom;
1,124,170
private Map<String, String> getCacheServerAttributesDefaultValues() { Map<String, String> csAttributesDefault = new HashMap<String, String>(); csAttributesDefault.put("bind-address", CacheServer.DEFAULT_BIND_ADDRESS); csAttributesDefault.put("hostname-for-clients", CacheServer.DEFAULT_HOSTNAME_FOR_CLIENTS); csAttributesDefault.put("max-connections", Integer.toString(CacheServer.DEFAULT_MAX_CONNECTIONS)); csAttributesDefault.put("maximum-message-count", Integer.toString(CacheServer.DEFAULT_MAXIMUM_MESSAGE_COUNT)); csAttributesDefault.put("maximum-time-between-pings", Integer.toString(CacheServer.DEFAULT_MAXIMUM_TIME_BETWEEN_PINGS)); csAttributesDefault.put("max-threads", Integer.toString(CacheServer.DEFAULT_MAX_THREADS)); csAttributesDefault.put("message-time-to-live", Integer.toString(CacheServer.DEFAULT_MESSAGE_TIME_TO_LIVE)); csAttributesDefault.put("notify-by-subscription", Boolean.toString(CacheServer.DEFAULT_NOTIFY_BY_SUBSCRIPTION)); csAttributesDefault.put("port", Integer.toString(CacheServer.DEFAULT_PORT)); csAttributesDefault.put(SOCKET_BUFFER_SIZE, Integer.toString(CacheServer.DEFAULT_SOCKET_BUFFER_SIZE)); csAttributesDefault.put("load-poll-interval", Long.toString(CacheServer.DEFAULT_LOAD_POLL_INTERVAL)); return csAttributesDefault; }
Map<String, String> function() { Map<String, String> csAttributesDefault = new HashMap<String, String>(); csAttributesDefault.put(STR, CacheServer.DEFAULT_BIND_ADDRESS); csAttributesDefault.put(STR, CacheServer.DEFAULT_HOSTNAME_FOR_CLIENTS); csAttributesDefault.put(STR, Integer.toString(CacheServer.DEFAULT_MAX_CONNECTIONS)); csAttributesDefault.put(STR, Integer.toString(CacheServer.DEFAULT_MAXIMUM_MESSAGE_COUNT)); csAttributesDefault.put(STR, Integer.toString(CacheServer.DEFAULT_MAXIMUM_TIME_BETWEEN_PINGS)); csAttributesDefault.put(STR, Integer.toString(CacheServer.DEFAULT_MAX_THREADS)); csAttributesDefault.put(STR, Integer.toString(CacheServer.DEFAULT_MESSAGE_TIME_TO_LIVE)); csAttributesDefault.put(STR, Boolean.toString(CacheServer.DEFAULT_NOTIFY_BY_SUBSCRIPTION)); csAttributesDefault.put("port", Integer.toString(CacheServer.DEFAULT_PORT)); csAttributesDefault.put(SOCKET_BUFFER_SIZE, Integer.toString(CacheServer.DEFAULT_SOCKET_BUFFER_SIZE)); csAttributesDefault.put(STR, Long.toString(CacheServer.DEFAULT_LOAD_POLL_INTERVAL)); return csAttributesDefault; }
/*** * Gets the default values for the cache attributes * * @return a map containing the cache server attributes - default values */
Gets the default values for the cache attributes
getCacheServerAttributesDefaultValues
{ "repo_name": "davebarnes97/geode", "path": "geode-gfsh/src/main/java/org/apache/geode/management/internal/cli/functions/GetMemberConfigInformationFunction.java", "license": "apache-2.0", "size": 10414 }
[ "java.util.HashMap", "java.util.Map", "org.apache.geode.cache.server.CacheServer" ]
import java.util.HashMap; import java.util.Map; import org.apache.geode.cache.server.CacheServer;
import java.util.*; import org.apache.geode.cache.server.*;
[ "java.util", "org.apache.geode" ]
java.util; org.apache.geode;
1,226,168
public final void set(Matrix4d matrix) { assert matrix != null : AssertMessages.notNullParameter(); this.m00 = matrix.m00; this.m01 = matrix.m01; this.m02 = matrix.m02; this.m03 = matrix.m03; this.m10 = matrix.m10; this.m11 = matrix.m11; this.m12 = matrix.m12; this.m13 = matrix.m13; this.m20 = matrix.m20; this.m21 = matrix.m21; this.m22 = matrix.m22; this.m23 = matrix.m23; this.m30 = matrix.m30; this.m31 = matrix.m31; this.m32 = matrix.m32; this.m33 = matrix.m33; this.isIdentity = null; }
final void function(Matrix4d matrix) { assert matrix != null : AssertMessages.notNullParameter(); this.m00 = matrix.m00; this.m01 = matrix.m01; this.m02 = matrix.m02; this.m03 = matrix.m03; this.m10 = matrix.m10; this.m11 = matrix.m11; this.m12 = matrix.m12; this.m13 = matrix.m13; this.m20 = matrix.m20; this.m21 = matrix.m21; this.m22 = matrix.m22; this.m23 = matrix.m23; this.m30 = matrix.m30; this.m31 = matrix.m31; this.m32 = matrix.m32; this.m33 = matrix.m33; this.isIdentity = null; }
/** * Sets the value of this matrix to the double value of the Matrix3. * argument. * * @param matrix * the Matrix4f to be converted to double */
Sets the value of this matrix to the double value of the Matrix3. argument
set
{ "repo_name": "tpiotrow/afc", "path": "core/math/src/main/java/org/arakhne/afc/math/matrix/Matrix4d.java", "license": "apache-2.0", "size": 75836 }
[ "org.arakhne.afc.vmutil.asserts.AssertMessages" ]
import org.arakhne.afc.vmutil.asserts.AssertMessages;
import org.arakhne.afc.vmutil.asserts.*;
[ "org.arakhne.afc" ]
org.arakhne.afc;
2,126,246
public void onICE(JSONObject msg);
void function(JSONObject msg);
/** * receive a ICE command */
receive a ICE command
onICE
{ "repo_name": "LexlooWorks/Comic", "path": "src/com/nvapp/video/webrtc/RTCClient.java", "license": "apache-2.0", "size": 3888 }
[ "org.json.JSONObject" ]
import org.json.JSONObject;
import org.json.*;
[ "org.json" ]
org.json;
2,562,150
private void computeAllTotalStats() { Map<String, Map<Date, AddressStatsDTO>> dtoByDateAndAddress = new HashMap<String, Map<Date, AddressStatsDTO>>(); if (dataContainer.getAddressesStats() != null && !dataContainer.getAddressesStats().isEmpty()) { for (Entry<String, List<AddressStatsDTO>> entry : dataContainer.getAddressesStats().entrySet()) { if (entry.getValue() != null && !entry.getValue().isEmpty()) { Map<Date, AddressStatsDTO> dtoByDate = new HashMap<Date, AddressStatsDTO>(); for (AddressStatsDTO statsDto : entry.getValue()) { dtoByDate.put(statsDto.getRefreshTime(), statsDto); } dtoByDateAndAddress.put(entry.getKey(), dtoByDate); } } } Map<Date, AddressStatsDTO> resultMap = new HashMap<Date, AddressStatsDTO>(); for (Map<Date, AddressStatsDTO> singleAddressMap : dtoByDateAndAddress.values()) { for (Entry<Date, AddressStatsDTO> entry : singleAddressMap.entrySet()) { AddressStatsDTO totalStatsForTheGivenDate = resultMap.get(entry.getKey()); AddressStatsDTO singleStatsForTheGivenDate = entry.getValue(); if (totalStatsForTheGivenDate == null) { totalStatsForTheGivenDate = new AddressStatsDTO(); resultMap.put(entry.getKey(), totalStatsForTheGivenDate); } totalStatsForTheGivenDate.setAddress(""); totalStatsForTheGivenDate.setBalance((totalStatsForTheGivenDate.getBalance() == null ? 0 : totalStatsForTheGivenDate.getBalance()) + (singleStatsForTheGivenDate.getBalance() == null ? 0 : singleStatsForTheGivenDate.getBalance())); totalStatsForTheGivenDate.setImmature((totalStatsForTheGivenDate.getImmature() == null ? 0 : totalStatsForTheGivenDate.getImmature()) + (singleStatsForTheGivenDate.getImmature() == null ? 0 : singleStatsForTheGivenDate.getImmature())); totalStatsForTheGivenDate.setLastHourRejectedShares((totalStatsForTheGivenDate.getLastHourRejectedShares() == null ? 0 : totalStatsForTheGivenDate.getLastHourRejectedShares()) + (singleStatsForTheGivenDate.getLastHourRejectedShares() == null ? 0 : singleStatsForTheGivenDate .getLastHourRejectedShares())); totalStatsForTheGivenDate.setLastHourShares((totalStatsForTheGivenDate.getLastHourShares() == null ? 0 : totalStatsForTheGivenDate .getLastHourShares()) + (singleStatsForTheGivenDate.getLastHourShares() == null ? 0 : singleStatsForTheGivenDate.getLastHourShares())); totalStatsForTheGivenDate.setMegaHashesPerSeconds((totalStatsForTheGivenDate.getMegaHashesPerSeconds() == null ? 0 : totalStatsForTheGivenDate.getMegaHashesPerSeconds()) + (singleStatsForTheGivenDate.getMegaHashesPerSeconds() == null ? 0 : singleStatsForTheGivenDate.getMegaHashesPerSeconds())); totalStatsForTheGivenDate.setPaidOut((totalStatsForTheGivenDate.getPaidOut() == null ? 0 : totalStatsForTheGivenDate.getPaidOut()) + (singleStatsForTheGivenDate.getPaidOut() == null ? 0 : singleStatsForTheGivenDate.getPaidOut())); totalStatsForTheGivenDate.setRejectedMegaHashesPerSeconds((totalStatsForTheGivenDate.getRejectedMegaHashesPerSeconds() == null ? 0 : totalStatsForTheGivenDate.getRejectedMegaHashesPerSeconds()) + (singleStatsForTheGivenDate.getRejectedMegaHashesPerSeconds() == null ? 0 : singleStatsForTheGivenDate .getRejectedMegaHashesPerSeconds())); totalStatsForTheGivenDate.setUnexchanged((totalStatsForTheGivenDate.getUnexchanged() == null ? 0 : totalStatsForTheGivenDate .getUnexchanged()) + (singleStatsForTheGivenDate.getUnexchanged() == null ? 0 : singleStatsForTheGivenDate.getUnexchanged())); totalStatsForTheGivenDate.setRefreshTime(singleStatsForTheGivenDate.getRefreshTime()); totalStatsForTheGivenDate.setUpdateTime(singleStatsForTheGivenDate.getUpdateTime()); } }
void function() { Map<String, Map<Date, AddressStatsDTO>> dtoByDateAndAddress = new HashMap<String, Map<Date, AddressStatsDTO>>(); if (dataContainer.getAddressesStats() != null && !dataContainer.getAddressesStats().isEmpty()) { for (Entry<String, List<AddressStatsDTO>> entry : dataContainer.getAddressesStats().entrySet()) { if (entry.getValue() != null && !entry.getValue().isEmpty()) { Map<Date, AddressStatsDTO> dtoByDate = new HashMap<Date, AddressStatsDTO>(); for (AddressStatsDTO statsDto : entry.getValue()) { dtoByDate.put(statsDto.getRefreshTime(), statsDto); } dtoByDateAndAddress.put(entry.getKey(), dtoByDate); } } } Map<Date, AddressStatsDTO> resultMap = new HashMap<Date, AddressStatsDTO>(); for (Map<Date, AddressStatsDTO> singleAddressMap : dtoByDateAndAddress.values()) { for (Entry<Date, AddressStatsDTO> entry : singleAddressMap.entrySet()) { AddressStatsDTO totalStatsForTheGivenDate = resultMap.get(entry.getKey()); AddressStatsDTO singleStatsForTheGivenDate = entry.getValue(); if (totalStatsForTheGivenDate == null) { totalStatsForTheGivenDate = new AddressStatsDTO(); resultMap.put(entry.getKey(), totalStatsForTheGivenDate); } totalStatsForTheGivenDate.setAddress(""); totalStatsForTheGivenDate.setBalance((totalStatsForTheGivenDate.getBalance() == null ? 0 : totalStatsForTheGivenDate.getBalance()) + (singleStatsForTheGivenDate.getBalance() == null ? 0 : singleStatsForTheGivenDate.getBalance())); totalStatsForTheGivenDate.setImmature((totalStatsForTheGivenDate.getImmature() == null ? 0 : totalStatsForTheGivenDate.getImmature()) + (singleStatsForTheGivenDate.getImmature() == null ? 0 : singleStatsForTheGivenDate.getImmature())); totalStatsForTheGivenDate.setLastHourRejectedShares((totalStatsForTheGivenDate.getLastHourRejectedShares() == null ? 0 : totalStatsForTheGivenDate.getLastHourRejectedShares()) + (singleStatsForTheGivenDate.getLastHourRejectedShares() == null ? 0 : singleStatsForTheGivenDate .getLastHourRejectedShares())); totalStatsForTheGivenDate.setLastHourShares((totalStatsForTheGivenDate.getLastHourShares() == null ? 0 : totalStatsForTheGivenDate .getLastHourShares()) + (singleStatsForTheGivenDate.getLastHourShares() == null ? 0 : singleStatsForTheGivenDate.getLastHourShares())); totalStatsForTheGivenDate.setMegaHashesPerSeconds((totalStatsForTheGivenDate.getMegaHashesPerSeconds() == null ? 0 : totalStatsForTheGivenDate.getMegaHashesPerSeconds()) + (singleStatsForTheGivenDate.getMegaHashesPerSeconds() == null ? 0 : singleStatsForTheGivenDate.getMegaHashesPerSeconds())); totalStatsForTheGivenDate.setPaidOut((totalStatsForTheGivenDate.getPaidOut() == null ? 0 : totalStatsForTheGivenDate.getPaidOut()) + (singleStatsForTheGivenDate.getPaidOut() == null ? 0 : singleStatsForTheGivenDate.getPaidOut())); totalStatsForTheGivenDate.setRejectedMegaHashesPerSeconds((totalStatsForTheGivenDate.getRejectedMegaHashesPerSeconds() == null ? 0 : totalStatsForTheGivenDate.getRejectedMegaHashesPerSeconds()) + (singleStatsForTheGivenDate.getRejectedMegaHashesPerSeconds() == null ? 0 : singleStatsForTheGivenDate .getRejectedMegaHashesPerSeconds())); totalStatsForTheGivenDate.setUnexchanged((totalStatsForTheGivenDate.getUnexchanged() == null ? 0 : totalStatsForTheGivenDate .getUnexchanged()) + (singleStatsForTheGivenDate.getUnexchanged() == null ? 0 : singleStatsForTheGivenDate.getUnexchanged())); totalStatsForTheGivenDate.setRefreshTime(singleStatsForTheGivenDate.getRefreshTime()); totalStatsForTheGivenDate.setUpdateTime(singleStatsForTheGivenDate.getUpdateTime()); } }
/** * Load all the total stats */
Load all the total stats
computeAllTotalStats
{ "repo_name": "Stratehm/multipool-stats-backend", "path": "src/main/java/strat/mining/multipool/stats/client/mvp/model/middlecoin/MiddlecoinDataManager.java", "license": "gpl-3.0", "size": 31633 }
[ "java.util.Date", "java.util.HashMap", "java.util.List", "java.util.Map" ]
import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
789,449
@Override public Map<K, ValueHolder<V>> bulkComputeIfAbsent(final Set<? extends K> keys, final Function<Iterable<? extends K>, Iterable<? extends Map.Entry<? extends K, ? extends V>>> mappingFunction) throws StoreAccessException { if(mappingFunction instanceof Ehcache.GetAllFunction) { Map<K, ValueHolder<V>> map = new HashMap<>(); for (K key : keys) { ValueHolder<V> value; try { value = getInternal(key); } catch (TimeoutException e) { // This timeout handling is safe **only** in the context of a get/read operation! value = null; } map.put(key, value); } return map; } else { throw new UnsupportedOperationException("This bulkComputeIfAbsent method is not yet capable of handling generic computation functions"); } }
Map<K, ValueHolder<V>> function(final Set<? extends K> keys, final Function<Iterable<? extends K>, Iterable<? extends Map.Entry<? extends K, ? extends V>>> mappingFunction) throws StoreAccessException { if(mappingFunction instanceof Ehcache.GetAllFunction) { Map<K, ValueHolder<V>> map = new HashMap<>(); for (K key : keys) { ValueHolder<V> value; try { value = getInternal(key); } catch (TimeoutException e) { value = null; } map.put(key, value); } return map; } else { throw new UnsupportedOperationException(STR); } }
/** * The assumption is that this method will be invoked only by cache.getAll method. */
The assumption is that this method will be invoked only by cache.getAll method
bulkComputeIfAbsent
{ "repo_name": "rkavanap/ehcache3", "path": "clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java", "license": "apache-2.0", "size": 36384 }
[ "java.util.HashMap", "java.util.Map", "java.util.Set", "java.util.concurrent.TimeoutException", "java.util.function.Function", "org.ehcache.core.Ehcache", "org.ehcache.spi.resilience.StoreAccessException" ]
import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeoutException; import java.util.function.Function; import org.ehcache.core.Ehcache; import org.ehcache.spi.resilience.StoreAccessException;
import java.util.*; import java.util.concurrent.*; import java.util.function.*; import org.ehcache.core.*; import org.ehcache.spi.resilience.*;
[ "java.util", "org.ehcache.core", "org.ehcache.spi" ]
java.util; org.ehcache.core; org.ehcache.spi;
2,883,578
@POST @Override public Response post(Model model) { if (getWebTarget() == null) throw new NotFoundException("Resource URI not supplied"); // cannot throw Exception in constructor: https://github.com/eclipse-ee4j/jersey/issues/4436 if (log.isDebugEnabled()) log.debug("POSTing Dataset to URI: {}", getWebTarget().getUri()); return getWebTarget().request(). accept(getMediaTypes().getReadable(Model.class).toArray(new javax.ws.rs.core.MediaType[0])). post(Entity.entity(model, com.atomgraph.core.MediaType.APPLICATION_NTRIPLES_TYPE)); }
Response function(Model model) { if (getWebTarget() == null) throw new NotFoundException(STR); if (log.isDebugEnabled()) log.debug(STR, getWebTarget().getUri()); return getWebTarget().request(). accept(getMediaTypes().getReadable(Model.class).toArray(new javax.ws.rs.core.MediaType[0])). post(Entity.entity(model, com.atomgraph.core.MediaType.APPLICATION_NTRIPLES_TYPE)); }
/** * Forwards POST request with RDF dataset body and returns RDF response from remote resource. * * @param model * @return response */
Forwards POST request with RDF dataset body and returns RDF response from remote resource
post
{ "repo_name": "AtomGraph/Web-Client", "path": "src/main/java/com/atomgraph/client/model/impl/ProxyResourceBase.java", "license": "apache-2.0", "size": 12551 }
[ "javax.ws.rs.NotFoundException", "javax.ws.rs.client.Entity", "javax.ws.rs.core.MediaType", "javax.ws.rs.core.Response", "org.apache.jena.rdf.model.Model" ]
import javax.ws.rs.NotFoundException; import javax.ws.rs.client.Entity; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.apache.jena.rdf.model.Model;
import javax.ws.rs.*; import javax.ws.rs.client.*; import javax.ws.rs.core.*; import org.apache.jena.rdf.model.*;
[ "javax.ws", "org.apache.jena" ]
javax.ws; org.apache.jena;
2,257,070
public SearchEngineInfo getDefaultSearchEngine() { return mDefaultSearchEngine; }
SearchEngineInfo function() { return mDefaultSearchEngine; }
/** * Get system default search engine. * @return the first item in config file as system default search engine */
Get system default search engine
getDefaultSearchEngine
{ "repo_name": "rex-xxx/mt6572_x201", "path": "mediatek/frameworks/base/search/java/com/mediatek/search/SearchEngineManagerService.java", "license": "gpl-2.0", "size": 10189 }
[ "com.mediatek.common.search.SearchEngineInfo" ]
import com.mediatek.common.search.SearchEngineInfo;
import com.mediatek.common.search.*;
[ "com.mediatek.common" ]
com.mediatek.common;
2,773,506
public static synchronized Iterator<Object[]> getDataFromSpreadsheet(final Class<?> clazz, final String filename, final String sheetName, final int sheetNumber, final String[] fields, final Filter filter, final boolean readHeaders) { return getDataFromSpreadsheet(clazz, filename, sheetName, sheetNumber, fields, filter, readHeaders, true); }
static synchronized Iterator<Object[]> function(final Class<?> clazz, final String filename, final String sheetName, final int sheetNumber, final String[] fields, final Filter filter, final boolean readHeaders) { return getDataFromSpreadsheet(clazz, filename, sheetName, sheetNumber, fields, filter, readHeaders, true); }
/** * Reads data from spreadsheet. If sheetName and sheetNumber both are supplied the sheetName takes precedence. Put * the excel sheet in the same folder as the test case and specify clazz as <code>this.getClass()</code> . */
Reads data from spreadsheet. If sheetName and sheetNumber both are supplied the sheetName takes precedence. Put the excel sheet in the same folder as the test case and specify clazz as <code>this.getClass()</code>
getDataFromSpreadsheet
{ "repo_name": "usmankec/DriveAccessSelenium", "path": "src/main/java/com/seleniumtests/util/SpreadSheetHelper.java", "license": "apache-2.0", "size": 28371 }
[ "com.seleniumtests.core.Filter", "java.util.Iterator" ]
import com.seleniumtests.core.Filter; import java.util.Iterator;
import com.seleniumtests.core.*; import java.util.*;
[ "com.seleniumtests.core", "java.util" ]
com.seleniumtests.core; java.util;
122,013
UserDto getUserByLogin(String login);
UserDto getUserByLogin(String login);
/** * Search and returns a user for a given login. The login is unique for each * user. * * @param login the login for the user to be searched. * @return the user for the login. */
Search and returns a user for a given login. The login is unique for each user
getUserByLogin
{ "repo_name": "dicentim/currency-converter", "path": "src/main/java/edu/currencyconv/api/UserManagerService.java", "license": "apache-2.0", "size": 1815 }
[ "edu.currencyconv.dto.UserDto" ]
import edu.currencyconv.dto.UserDto;
import edu.currencyconv.dto.*;
[ "edu.currencyconv.dto" ]
edu.currencyconv.dto;
1,189,256
public Iterator<Variable> getParameters() { return getD().getParameters(); }
Iterator<Variable> function() { return getD().getParameters(); }
/** * Method to return an Iterator over all Variables marked as parameters on this Cell. * @return an Iterator over all Variables on this Cell. */
Method to return an Iterator over all Variables marked as parameters on this Cell
getParameters
{ "repo_name": "imr/Electric8", "path": "com/sun/electric/database/hierarchy/Cell.java", "license": "gpl-3.0", "size": 185659 }
[ "com.sun.electric.database.variable.Variable", "java.util.Iterator" ]
import com.sun.electric.database.variable.Variable; import java.util.Iterator;
import com.sun.electric.database.variable.*; import java.util.*;
[ "com.sun.electric", "java.util" ]
com.sun.electric; java.util;
476,033
public static RoundedBitmapDrawable getRoundedBitmap(Resources res, byte[] imgInBytes) { Bitmap srcBitmap = BitmapFactory.decodeByteArray(imgInBytes, 0, imgInBytes.length); return getRoundedBitmap(res, srcBitmap); }
static RoundedBitmapDrawable function(Resources res, byte[] imgInBytes) { Bitmap srcBitmap = BitmapFactory.decodeByteArray(imgInBytes, 0, imgInBytes.length); return getRoundedBitmap(res, srcBitmap); }
/** * Return a rounded bitmap version of the image contained in the byte array * * @param res the context resources * @param imgInBytes the byte array with the image * @return the RoundedBitmapDrawable with the rounded bitmap */
Return a rounded bitmap version of the image contained in the byte array
getRoundedBitmap
{ "repo_name": "abicelis/CreditCardExpenseManager", "path": "app/src/main/java/ve/com/abicelis/creditcardexpensemanager/app/utils/ImageUtils.java", "license": "mit", "size": 6232 }
[ "android.content.res.Resources", "android.graphics.Bitmap", "android.graphics.BitmapFactory", "android.support.v4.graphics.drawable.RoundedBitmapDrawable" ]
import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.support.v4.graphics.drawable.RoundedBitmapDrawable;
import android.content.res.*; import android.graphics.*; import android.support.v4.graphics.drawable.*;
[ "android.content", "android.graphics", "android.support" ]
android.content; android.graphics; android.support;
2,882,890
@Override public void run() { int opsProcessed = 0; Op op = null; try { dataXceiverServer.addPeer(peer, Thread.currentThread(), this); peer.setWriteTimeout(datanode.getDnConf().socketWriteTimeout); InputStream input = socketIn; try { IOStreamPair saslStreams = datanode.saslServer.receive(peer, socketOut, socketIn, datanode.getXferAddress().getPort(), datanode.getDatanodeId()); input = new BufferedInputStream(saslStreams.in, HdfsConstants.SMALL_BUFFER_SIZE); socketOut = saslStreams.out; } catch (InvalidMagicNumberException imne) { if (imne.isHandshake4Encryption()) { LOG.info("Failed to read expected encryption handshake from client " + "at " + peer.getRemoteAddressString() + ". Perhaps the client " + "is running an older version of Hadoop which does not support " + "encryption"); } else { LOG.info("Failed to read expected SASL data transfer protection " + "handshake from client at " + peer.getRemoteAddressString() + ". Perhaps the client is running an older version of Hadoop " + "which does not support SASL data transfer protection"); } return; } super.initialize(new DataInputStream(input)); // We process requests in a loop, and stay around for a short timeout. // This optimistic behaviour allows the other end to reuse connections. // Setting keepalive timeout to 0 disable this behavior. do { updateCurrentThreadName("Waiting for operation #" + (opsProcessed + 1)); try { if (opsProcessed != 0) { assert dnConf.socketKeepaliveTimeout > 0; peer.setReadTimeout(dnConf.socketKeepaliveTimeout); } else { peer.setReadTimeout(dnConf.socketTimeout); } op = readOp(); } catch (InterruptedIOException ignored) { // Time out while we wait for client rpc break; } catch (IOException err) { // Since we optimistically expect the next op, it's quite normal to get EOF here. if (opsProcessed > 0 && (err instanceof EOFException || err instanceof ClosedChannelException)) { if (LOG.isDebugEnabled()) { LOG.debug("Cached " + peer + " closing after " + opsProcessed + " ops"); } } else { incrDatanodeNetworkErrors(); throw err; } break; } // restore normal timeout if (opsProcessed != 0) { peer.setReadTimeout(dnConf.socketTimeout); } opStartTime = monotonicNow(); processOp(op); ++opsProcessed; } while ((peer != null) && (!peer.isClosed() && dnConf.socketKeepaliveTimeout > 0)); } catch (Throwable t) { String s = datanode.getDisplayName() + ":DataXceiver error processing " + ((op == null) ? "unknown" : op.name()) + " operation " + " src: " + remoteAddress + " dst: " + localAddress; if (op == Op.WRITE_BLOCK && t instanceof ReplicaAlreadyExistsException) { // For WRITE_BLOCK, it is okay if the replica already exists since // client and replication may write the same block to the same datanode // at the same time. if (LOG.isTraceEnabled()) { LOG.trace(s, t); } else { LOG.info(s + "; " + t); } } else if (op == Op.READ_BLOCK && t instanceof SocketTimeoutException) { String s1 = "Likely the client has stopped reading, disconnecting it"; s1 += " (" + s + ")"; if (LOG.isTraceEnabled()) { LOG.trace(s1, t); } else { LOG.info(s1 + "; " + t); } } else { LOG.error(s, t); } } finally { if (LOG.isDebugEnabled()) { LOG.debug(datanode.getDisplayName() + ":Number of active connections is: " + datanode.getXceiverCount()); } updateCurrentThreadName("Cleaning up"); if (peer != null) { dataXceiverServer.closePeer(peer); IOUtils.closeStream(in); } } }
void function() { int opsProcessed = 0; Op op = null; try { dataXceiverServer.addPeer(peer, Thread.currentThread(), this); peer.setWriteTimeout(datanode.getDnConf().socketWriteTimeout); InputStream input = socketIn; try { IOStreamPair saslStreams = datanode.saslServer.receive(peer, socketOut, socketIn, datanode.getXferAddress().getPort(), datanode.getDatanodeId()); input = new BufferedInputStream(saslStreams.in, HdfsConstants.SMALL_BUFFER_SIZE); socketOut = saslStreams.out; } catch (InvalidMagicNumberException imne) { if (imne.isHandshake4Encryption()) { LOG.info(STR + STR + peer.getRemoteAddressString() + STR + STR + STR); } else { LOG.info(STR + STR + peer.getRemoteAddressString() + STR + STR); } return; } super.initialize(new DataInputStream(input)); do { updateCurrentThreadName(STR + (opsProcessed + 1)); try { if (opsProcessed != 0) { assert dnConf.socketKeepaliveTimeout > 0; peer.setReadTimeout(dnConf.socketKeepaliveTimeout); } else { peer.setReadTimeout(dnConf.socketTimeout); } op = readOp(); } catch (InterruptedIOException ignored) { break; } catch (IOException err) { if (opsProcessed > 0 && (err instanceof EOFException err instanceof ClosedChannelException)) { if (LOG.isDebugEnabled()) { LOG.debug(STR + peer + STR + opsProcessed + STR); } } else { incrDatanodeNetworkErrors(); throw err; } break; } if (opsProcessed != 0) { peer.setReadTimeout(dnConf.socketTimeout); } opStartTime = monotonicNow(); processOp(op); ++opsProcessed; } while ((peer != null) && (!peer.isClosed() && dnConf.socketKeepaliveTimeout > 0)); } catch (Throwable t) { String s = datanode.getDisplayName() + STR + ((op == null) ? STR : op.name()) + STR + STR + remoteAddress + STR + localAddress; if (op == Op.WRITE_BLOCK && t instanceof ReplicaAlreadyExistsException) { if (LOG.isTraceEnabled()) { LOG.trace(s, t); } else { LOG.info(s + STR + t); } } else if (op == Op.READ_BLOCK && t instanceof SocketTimeoutException) { String s1 = STR; s1 += STR + s + ")"; if (LOG.isTraceEnabled()) { LOG.trace(s1, t); } else { LOG.info(s1 + STR + t); } } else { LOG.error(s, t); } } finally { if (LOG.isDebugEnabled()) { LOG.debug(datanode.getDisplayName() + STR + datanode.getXceiverCount()); } updateCurrentThreadName(STR); if (peer != null) { dataXceiverServer.closePeer(peer); IOUtils.closeStream(in); } } }
/** * Read/write data from/to the DataXceiverServer. */
Read/write data from/to the DataXceiverServer
run
{ "repo_name": "kristiantokarim/GIK-Hadoop", "path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java", "license": "apache-2.0", "size": 51934 }
[ "java.io.BufferedInputStream", "java.io.DataInputStream", "java.io.EOFException", "java.io.IOException", "java.io.InputStream", "java.io.InterruptedIOException", "java.net.SocketTimeoutException", "java.nio.channels.ClosedChannelException", "org.apache.hadoop.hdfs.protocol.HdfsConstants", "org.apache.hadoop.hdfs.protocol.datatransfer.IOStreamPair", "org.apache.hadoop.hdfs.protocol.datatransfer.Op", "org.apache.hadoop.hdfs.protocol.datatransfer.sasl.InvalidMagicNumberException", "org.apache.hadoop.io.IOUtils", "org.apache.hadoop.util.Time" ]
import java.io.BufferedInputStream; import java.io.DataInputStream; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; import java.io.InterruptedIOException; import java.net.SocketTimeoutException; import java.nio.channels.ClosedChannelException; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.datatransfer.IOStreamPair; import org.apache.hadoop.hdfs.protocol.datatransfer.Op; import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.InvalidMagicNumberException; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.Time;
import java.io.*; import java.net.*; import java.nio.channels.*; import org.apache.hadoop.hdfs.protocol.*; import org.apache.hadoop.hdfs.protocol.datatransfer.*; import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.*; import org.apache.hadoop.io.*; import org.apache.hadoop.util.*;
[ "java.io", "java.net", "java.nio", "org.apache.hadoop" ]
java.io; java.net; java.nio; org.apache.hadoop;
2,796,823
public HttpRequest form(final Map<?, ?> values, final String charset) throws HttpRequestException { if (!values.isEmpty()) for (Entry<?, ?> entry : values.entrySet()) form(entry, charset); return this; }
HttpRequest function(final Map<?, ?> values, final String charset) throws HttpRequestException { if (!values.isEmpty()) for (Entry<?, ?> entry : values.entrySet()) form(entry, charset); return this; }
/** * Write the values in the map as encoded form data to the request body * * @param values * @param charset * @return this request * @throws HttpRequestException */
Write the values in the map as encoded form data to the request body
form
{ "repo_name": "Tomucha/gae-java-proxy", "path": "src/main/java/cz/tomucha/gae/proxy/HttpRequest.java", "license": "apache-2.0", "size": 81841 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
1,815,464
public Dialog getDialog() { return dialog; }
Dialog function() { return dialog; }
/** * This method will return a dialog window for the interface. * * @return a dialog window */
This method will return a dialog window for the interface
getDialog
{ "repo_name": "CMPUT301W15T06/Project", "path": "App/src/ca/ualberta/CMPUT301W15T06/MainActivity.java", "license": "apache-2.0", "size": 6680 }
[ "android.app.Dialog" ]
import android.app.Dialog;
import android.app.*;
[ "android.app" ]
android.app;
1,255,044
public static void write(String s, File f, boolean create) throws IOException{ write(s, f, OVERWRITE, create); }
static void function(String s, File f, boolean create) throws IOException{ write(s, f, OVERWRITE, create); }
/** * Shorthand for write(s, f, OVERWRITE, create) */
Shorthand for write(s, f, OVERWRITE, create)
write
{ "repo_name": "KamranMackey/CommandHelper", "path": "src/main/java/com/laytonsmith/PureUtilities/Common/FileUtil.java", "license": "mit", "size": 12481 }
[ "java.io.File", "java.io.IOException" ]
import java.io.File; import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
693,484
public static void write(char[] data, OutputStream output, Charset encoding) throws IOException { if (data != null) { output.write(StringCodingUtils.getBytes(new String(data), Charsets.toCharset(encoding))); } }
static void function(char[] data, OutputStream output, Charset encoding) throws IOException { if (data != null) { output.write(StringCodingUtils.getBytes(new String(data), Charsets.toCharset(encoding))); } }
/** * Writes chars from a <code>char[]</code> to bytes on an * <code>OutputStream</code> using the specified character encoding. * <p/> * This method uses {@link String#String(char[])} and * {@link String#getBytes(String)}. * * @param data the char array to write, do not modify during output, * null ignored * @param output the <code>OutputStream</code> to write to * @param encoding the encoding to use, null means platform default * @throws NullPointerException if output is null * @throws IOException if an I/O error occurs * @since 2.3 */
Writes chars from a <code>char[]</code> to bytes on an <code>OutputStream</code> using the specified character encoding. This method uses <code>String#String(char[])</code> and <code>String#getBytes(String)</code>
write
{ "repo_name": "solaris0403/SeleneDemo", "path": "common_lib/src/main/java/com/tony/selene/common/trinea/android/common/io/IOUtil.java", "license": "gpl-2.0", "size": 95443 }
[ "java.io.IOException", "java.io.OutputStream", "java.nio.charset.Charset" ]
import java.io.IOException; import java.io.OutputStream; import java.nio.charset.Charset;
import java.io.*; import java.nio.charset.*;
[ "java.io", "java.nio" ]
java.io; java.nio;
347,759
@Nullable public BuiltInIdentityProvider delete() throws ClientException { return send(HttpMethod.DELETE, null); }
BuiltInIdentityProvider function() throws ClientException { return send(HttpMethod.DELETE, null); }
/** * Delete this item from the service * @return the resulting response if the service returns anything on deletion * * @throws ClientException if there was an exception during the delete operation */
Delete this item from the service
delete
{ "repo_name": "microsoftgraph/msgraph-sdk-java", "path": "src/main/java/com/microsoft/graph/requests/BuiltInIdentityProviderRequest.java", "license": "mit", "size": 6379 }
[ "com.microsoft.graph.core.ClientException", "com.microsoft.graph.http.HttpMethod", "com.microsoft.graph.models.BuiltInIdentityProvider" ]
import com.microsoft.graph.core.ClientException; import com.microsoft.graph.http.HttpMethod; import com.microsoft.graph.models.BuiltInIdentityProvider;
import com.microsoft.graph.core.*; import com.microsoft.graph.http.*; import com.microsoft.graph.models.*;
[ "com.microsoft.graph" ]
com.microsoft.graph;
2,768,759
AuthenticationException tE; tE = new AuthenticationException(); assertNull("getMessage() must return null", tE.getMessage()); assertNull("getCause() must return null", tE.getCause()); }
AuthenticationException tE; tE = new AuthenticationException(); assertNull(STR, tE.getMessage()); assertNull(STR, tE.getCause()); }
/** * Test for <code>AuthenticationException()</code> constructor * Assertion: constructs AuthenticationException with null message and * null root exception. */
Test for <code>AuthenticationException()</code> constructor Assertion: constructs AuthenticationException with null message and null root exception
testAuthenticationException01
{ "repo_name": "freeVM/freeVM", "path": "enhanced/archive/classlib/java6/modules/auth/src/test/java/common/org/apache/harmony/auth/tests/javax/security/sasl/AuthenticationExceptionTest.java", "license": "apache-2.0", "size": 7268 }
[ "javax.security.sasl.AuthenticationException" ]
import javax.security.sasl.AuthenticationException;
import javax.security.sasl.*;
[ "javax.security" ]
javax.security;
1,018,971
public void delete(ScriptFilterGroup group) { if (!security.hasRight(AccessRight.DELETE_FILTER) && !security.isOwner(group)) { messages.warn("You don't have permission to delete this filter group."); } else { new ScriptFilterGroupDao().delete(group); refresh(); } }
void function(ScriptFilterGroup group) { if (!security.hasRight(AccessRight.DELETE_FILTER) && !security.isOwner(group)) { messages.warn(STR); } else { new ScriptFilterGroupDao().delete(group); refresh(); } }
/** * Code for deleting a filter group goes here. * * @param event */
Code for deleting a filter group goes here
delete
{ "repo_name": "rkadle/Tank", "path": "web/web_support/src/main/java/com/intuit/tank/filter/FilterGroupBean.java", "license": "epl-1.0", "size": 3075 }
[ "com.intuit.tank.dao.ScriptFilterGroupDao", "com.intuit.tank.project.ScriptFilterGroup", "com.intuit.tank.vm.settings.AccessRight" ]
import com.intuit.tank.dao.ScriptFilterGroupDao; import com.intuit.tank.project.ScriptFilterGroup; import com.intuit.tank.vm.settings.AccessRight;
import com.intuit.tank.dao.*; import com.intuit.tank.project.*; import com.intuit.tank.vm.settings.*;
[ "com.intuit.tank" ]
com.intuit.tank;
336,175
public static String getKey( final String text ) { byte[] bytes = new byte[0]; if ( ( text != null ) && ( text.length() > 0 ) ) { try { bytes = text.getBytes( UTF_8 ); } catch ( final UnsupportedEncodingException e ) { e.printStackTrace(); } } return new String( encode( bytes ) ); }
static String function( final String text ) { byte[] bytes = new byte[0]; if ( ( text != null ) && ( text.length() > 0 ) ) { try { bytes = text.getBytes( UTF_8 ); } catch ( final UnsupportedEncodingException e ) { e.printStackTrace(); } } return new String( encode( bytes ) ); }
/** * Generate a string representation of a cipher key based on the given text. * * <p>This method is useful for generating a cipher key based on text the * user may provide. The result is a base64 encoding of the bytes which can * be placed in configuration files. * * @param text The text from which the bytes are generated * * @return Base64 encoding of the bytes generated from the given text. */
Generate a string representation of a cipher key based on the given text. This method is useful for generating a cipher key based on text the user may provide. The result is a base64 encoding of the bytes which can be placed in configuration files
getKey
{ "repo_name": "sdcote/loader", "path": "src/main/java/coyote/commons/CipherUtil.java", "license": "mit", "size": 26258 }
[ "java.io.UnsupportedEncodingException" ]
import java.io.UnsupportedEncodingException;
import java.io.*;
[ "java.io" ]
java.io;
178,737
protected void showSnackbar(@NonNull View view, String message) { if (null != message && !TextUtils.isEmpty(message)) { Snackbar.make(view, message, Snackbar.LENGTH_SHORT).show(); } }
void function(@NonNull View view, String message) { if (null != message && !TextUtils.isEmpty(message)) { Snackbar.make(view, message, Snackbar.LENGTH_SHORT).show(); } }
/** * Shows a {@link android.support.design.widget.Snackbar} message. * * @param view The view to find a parent from. * @param message An string representing a message to be shown. */
Shows a <code>android.support.design.widget.Snackbar</code> message
showSnackbar
{ "repo_name": "katsura122/DemoArchitecture", "path": "library/src/main/java/com/katsuraf/library/base/BaseAppCompatActivity.java", "license": "mit", "size": 6048 }
[ "android.support.annotation.NonNull", "android.support.design.widget.Snackbar", "android.text.TextUtils", "android.view.View" ]
import android.support.annotation.NonNull; import android.support.design.widget.Snackbar; import android.text.TextUtils; import android.view.View;
import android.support.annotation.*; import android.support.design.widget.*; import android.text.*; import android.view.*;
[ "android.support", "android.text", "android.view" ]
android.support; android.text; android.view;
2,799,907
@Test public void testGetInvalidateWorkPctPerIteration() { Configuration conf = new Configuration(); float blocksInvalidateWorkPct = DFSUtil .getInvalidateWorkPctPerIteration(conf); assertTrue(blocksInvalidateWorkPct > 0); conf.set(DFSConfigKeys.DFS_NAMENODE_INVALIDATE_WORK_PCT_PER_ITERATION, "0.5f"); blocksInvalidateWorkPct = DFSUtil.getInvalidateWorkPctPerIteration(conf); assertEquals(blocksInvalidateWorkPct, 0.5f, blocksInvalidateWorkPct * 1e-7); conf.set(DFSConfigKeys. DFS_NAMENODE_INVALIDATE_WORK_PCT_PER_ITERATION, "1.0f"); blocksInvalidateWorkPct = DFSUtil.getInvalidateWorkPctPerIteration(conf); assertEquals(blocksInvalidateWorkPct, 1.0f, blocksInvalidateWorkPct * 1e-7); conf.set(DFSConfigKeys. DFS_NAMENODE_INVALIDATE_WORK_PCT_PER_ITERATION, "0.0f"); exception.expect(IllegalArgumentException.class); blocksInvalidateWorkPct = DFSUtil.getInvalidateWorkPctPerIteration(conf); }
void function() { Configuration conf = new Configuration(); float blocksInvalidateWorkPct = DFSUtil .getInvalidateWorkPctPerIteration(conf); assertTrue(blocksInvalidateWorkPct > 0); conf.set(DFSConfigKeys.DFS_NAMENODE_INVALIDATE_WORK_PCT_PER_ITERATION, "0.5f"); blocksInvalidateWorkPct = DFSUtil.getInvalidateWorkPctPerIteration(conf); assertEquals(blocksInvalidateWorkPct, 0.5f, blocksInvalidateWorkPct * 1e-7); conf.set(DFSConfigKeys. DFS_NAMENODE_INVALIDATE_WORK_PCT_PER_ITERATION, "1.0f"); blocksInvalidateWorkPct = DFSUtil.getInvalidateWorkPctPerIteration(conf); assertEquals(blocksInvalidateWorkPct, 1.0f, blocksInvalidateWorkPct * 1e-7); conf.set(DFSConfigKeys. DFS_NAMENODE_INVALIDATE_WORK_PCT_PER_ITERATION, "0.0f"); exception.expect(IllegalArgumentException.class); blocksInvalidateWorkPct = DFSUtil.getInvalidateWorkPctPerIteration(conf); }
/** * This testcase tests whether the default value returned by * DFSUtil.getInvalidateWorkPctPerIteration() is positive, * and whether an IllegalArgumentException will be thrown * when 0.0f is retrieved */
This testcase tests whether the default value returned by DFSUtil.getInvalidateWorkPctPerIteration() is positive, and whether an IllegalArgumentException will be thrown when 0.0f is retrieved
testGetInvalidateWorkPctPerIteration
{ "repo_name": "jsrudani/HadoopHDFSProject", "path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestReplicationPolicy.java", "license": "apache-2.0", "size": 41261 }
[ "org.apache.hadoop.conf.Configuration", "org.apache.hadoop.hdfs.DFSConfigKeys", "org.apache.hadoop.hdfs.DFSUtil", "org.junit.Assert" ]
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.junit.Assert;
import org.apache.hadoop.conf.*; import org.apache.hadoop.hdfs.*; import org.junit.*;
[ "org.apache.hadoop", "org.junit" ]
org.apache.hadoop; org.junit;
1,173,285
protected void sequence_MandatoryManyTransition(ISerializationContext context, MandatoryManyTransition semanticObject) { if (errorAcceptor != null) { if (transientValues.isValueTransient(semanticObject, SyntacticsequencertestPackage.Literals.MANDATORY_MANY_TRANSITION__VAL) == ValueTransient.YES) errorAcceptor.accept(diagnosticProvider.createFeatureValueMissing(semanticObject, SyntacticsequencertestPackage.Literals.MANDATORY_MANY_TRANSITION__VAL)); } SequenceFeeder feeder = createSequencerFeeder(context, semanticObject); feeder.accept(grammarAccess.getMandatoryManyTransitionAccess().getValIDTerminalRuleCall_2_0(), semanticObject.getVal()); feeder.finish(); }
void function(ISerializationContext context, MandatoryManyTransition semanticObject) { if (errorAcceptor != null) { if (transientValues.isValueTransient(semanticObject, SyntacticsequencertestPackage.Literals.MANDATORY_MANY_TRANSITION__VAL) == ValueTransient.YES) errorAcceptor.accept(diagnosticProvider.createFeatureValueMissing(semanticObject, SyntacticsequencertestPackage.Literals.MANDATORY_MANY_TRANSITION__VAL)); } SequenceFeeder feeder = createSequencerFeeder(context, semanticObject); feeder.accept(grammarAccess.getMandatoryManyTransitionAccess().getValIDTerminalRuleCall_2_0(), semanticObject.getVal()); feeder.finish(); }
/** * Contexts: * MandatoryManyTransition returns MandatoryManyTransition * * Constraint: * val=ID */
Contexts: MandatoryManyTransition returns MandatoryManyTransition Constraint: val=ID
sequence_MandatoryManyTransition
{ "repo_name": "miklossy/xtext-core", "path": "org.eclipse.xtext.tests/src-gen/org/eclipse/xtext/serializer/serializer/AbstractSyntacticSequencerTestLanguageSemanticSequencer.java", "license": "epl-1.0", "size": 25300 }
[ "org.eclipse.xtext.serializer.ISerializationContext", "org.eclipse.xtext.serializer.acceptor.SequenceFeeder", "org.eclipse.xtext.serializer.sequencer.ITransientValueService", "org.eclipse.xtext.serializer.syntacticsequencertest.MandatoryManyTransition", "org.eclipse.xtext.serializer.syntacticsequencertest.SyntacticsequencertestPackage" ]
import org.eclipse.xtext.serializer.ISerializationContext; import org.eclipse.xtext.serializer.acceptor.SequenceFeeder; import org.eclipse.xtext.serializer.sequencer.ITransientValueService; import org.eclipse.xtext.serializer.syntacticsequencertest.MandatoryManyTransition; import org.eclipse.xtext.serializer.syntacticsequencertest.SyntacticsequencertestPackage;
import org.eclipse.xtext.serializer.*; import org.eclipse.xtext.serializer.acceptor.*; import org.eclipse.xtext.serializer.sequencer.*; import org.eclipse.xtext.serializer.syntacticsequencertest.*;
[ "org.eclipse.xtext" ]
org.eclipse.xtext;
1,261,583
public void init() { PropertyCheck.mandatory(this, "nodeService", nodeService); PropertyCheck.mandatory(this, "tenantService", tenantService); PropertyCheck.mandatory(this, "authenticationService", authenticationService); PropertyCheck.mandatory(this, "searchService", searchService); PropertyCheck.mandatory(this, "behaviourFilter", behaviourFilter); PropertyCheck.mandatory(this, "policyComponent", policyComponent); // Register the policies beforeLock = policyComponent.registerClassPolicy(LockServicePolicies.BeforeLock.class); // Register the various class behaviours to enable lock checking this.policyComponent.bindAssociationBehaviour( NodeServicePolicies.OnCreateChildAssociationPolicy.QNAME, ContentModel.ASPECT_LOCKABLE, new JavaBehaviour(this, "onCreateChildAssociation")); this.policyComponent.bindClassBehaviour( NodeServicePolicies.BeforeUpdateNodePolicy.QNAME, ContentModel.ASPECT_LOCKABLE, new JavaBehaviour(this, "beforeUpdateNode")); this.policyComponent.bindClassBehaviour( NodeServicePolicies.BeforeDeleteNodePolicy.QNAME, ContentModel.ASPECT_LOCKABLE, new JavaBehaviour(this, "beforeDeleteNode")); this.policyComponent.bindClassBehaviour( NodeServicePolicies.OnMoveNodePolicy.QNAME, ContentModel.ASPECT_LOCKABLE, new JavaBehaviour(this, "onMoveNode")); // Register copy class behaviour this.policyComponent.bindClassBehaviour( CopyServicePolicies.OnCopyNodePolicy.QNAME, ContentModel.ASPECT_LOCKABLE, new JavaBehaviour(this, "getCopyCallback")); // Register the onCreateVersion behavior for the version aspect // BeforeCreateVersion behavior was removed // we should be able to version a node regardless of its lock state, see ALF-16540 this.policyComponent.bindClassBehaviour( VersionServicePolicies.OnCreateVersionPolicy.QNAME, ContentModel.ASPECT_LOCKABLE, new JavaBehaviour(this, "onCreateVersion")); }
void function() { PropertyCheck.mandatory(this, STR, nodeService); PropertyCheck.mandatory(this, STR, tenantService); PropertyCheck.mandatory(this, STR, authenticationService); PropertyCheck.mandatory(this, STR, searchService); PropertyCheck.mandatory(this, STR, behaviourFilter); PropertyCheck.mandatory(this, STR, policyComponent); beforeLock = policyComponent.registerClassPolicy(LockServicePolicies.BeforeLock.class); this.policyComponent.bindAssociationBehaviour( NodeServicePolicies.OnCreateChildAssociationPolicy.QNAME, ContentModel.ASPECT_LOCKABLE, new JavaBehaviour(this, STR)); this.policyComponent.bindClassBehaviour( NodeServicePolicies.BeforeUpdateNodePolicy.QNAME, ContentModel.ASPECT_LOCKABLE, new JavaBehaviour(this, STR)); this.policyComponent.bindClassBehaviour( NodeServicePolicies.BeforeDeleteNodePolicy.QNAME, ContentModel.ASPECT_LOCKABLE, new JavaBehaviour(this, STR)); this.policyComponent.bindClassBehaviour( NodeServicePolicies.OnMoveNodePolicy.QNAME, ContentModel.ASPECT_LOCKABLE, new JavaBehaviour(this, STR)); this.policyComponent.bindClassBehaviour( CopyServicePolicies.OnCopyNodePolicy.QNAME, ContentModel.ASPECT_LOCKABLE, new JavaBehaviour(this, STR)); this.policyComponent.bindClassBehaviour( VersionServicePolicies.OnCreateVersionPolicy.QNAME, ContentModel.ASPECT_LOCKABLE, new JavaBehaviour(this, STR)); }
/** * Initialise methods called by Spring framework */
Initialise methods called by Spring framework
init
{ "repo_name": "loftuxab/alfresco-community-loftux", "path": "projects/repository/source/java/org/alfresco/repo/lock/LockServiceImpl.java", "license": "lgpl-3.0", "size": 39531 }
[ "org.alfresco.model.ContentModel", "org.alfresco.repo.copy.CopyServicePolicies", "org.alfresco.repo.lock.LockServicePolicies", "org.alfresco.repo.node.NodeServicePolicies", "org.alfresco.repo.policy.JavaBehaviour", "org.alfresco.repo.version.VersionServicePolicies", "org.alfresco.util.PropertyCheck" ]
import org.alfresco.model.ContentModel; import org.alfresco.repo.copy.CopyServicePolicies; import org.alfresco.repo.lock.LockServicePolicies; import org.alfresco.repo.node.NodeServicePolicies; import org.alfresco.repo.policy.JavaBehaviour; import org.alfresco.repo.version.VersionServicePolicies; import org.alfresco.util.PropertyCheck;
import org.alfresco.model.*; import org.alfresco.repo.copy.*; import org.alfresco.repo.lock.*; import org.alfresco.repo.node.*; import org.alfresco.repo.policy.*; import org.alfresco.repo.version.*; import org.alfresco.util.*;
[ "org.alfresco.model", "org.alfresco.repo", "org.alfresco.util" ]
org.alfresco.model; org.alfresco.repo; org.alfresco.util;
2,171,689
private void setCogxels( DynamicArrayMap<Cogxel> cogxels) { if ( cogxels == null ) { // Error: Bad cogxels. throw new NullPointerException("The cogxels cannot be null."); } this.cogxels = cogxels; }
void function( DynamicArrayMap<Cogxel> cogxels) { if ( cogxels == null ) { throw new NullPointerException(STR); } this.cogxels = cogxels; }
/** * Sets the cogxels in the state. * * @param cogxels The new cogxels. */
Sets the cogxels in the state
setCogxels
{ "repo_name": "codeaudit/Foundry", "path": "Components/FrameworkCore/Source/gov/sandia/cognition/framework/lite/CogxelStateLite.java", "license": "bsd-3-clause", "size": 8407 }
[ "gov.sandia.cognition.collection.DynamicArrayMap", "gov.sandia.cognition.framework.Cogxel" ]
import gov.sandia.cognition.collection.DynamicArrayMap; import gov.sandia.cognition.framework.Cogxel;
import gov.sandia.cognition.collection.*; import gov.sandia.cognition.framework.*;
[ "gov.sandia.cognition" ]
gov.sandia.cognition;
96,074
Long getCount(WxMessageType msgType, String wxAccountId, String distinctPropName);
Long getCount(WxMessageType msgType, String wxAccountId, String distinctPropName);
/** * Get total count of weixin message of special message type, distinct by * property name. * * @param msgType * @param wxAccountId * @param distinctPropName * @return */
Get total count of weixin message of special message type, distinct by property name
getCount
{ "repo_name": "jarvisji/Demo-Java-RestService", "path": "src/main/java/net/freecoder/restdemo/dao/WxMessageDao.java", "license": "apache-2.0", "size": 1874 }
[ "net.freecoder.restdemo.constant.WxMessageType" ]
import net.freecoder.restdemo.constant.WxMessageType;
import net.freecoder.restdemo.constant.*;
[ "net.freecoder.restdemo" ]
net.freecoder.restdemo;
276,280
default Offset<BigDecimal> byLessThan(BigDecimal value) { return Assertions.byLessThan(value); }
default Offset<BigDecimal> byLessThan(BigDecimal value) { return Assertions.byLessThan(value); }
/** * Assertions entry point for BigDecimal {@link Offset} to use with isCloseTo assertions. * <p> * Typical usage : * <pre><code class='java'> assertThat(BigDecimal.TEN).isCloseTo(new BigDecimal("10.5"), byLessThan(BigDecimal.ONE));</code></pre> * * @param value the value of the offset. * @return the created {@code Offset}. * @throws NullPointerException if the given value is {@code null}. * @throws IllegalArgumentException if the given value is negative. * @since 3.9.0 */
Assertions entry point for BigDecimal <code>Offset</code> to use with isCloseTo assertions. Typical usage : <code> assertThat(BigDecimal.TEN).isCloseTo(new BigDecimal("10.5"), byLessThan(BigDecimal.ONE));</code></code>
byLessThan
{ "repo_name": "ChrisA89/assertj-core", "path": "src/main/java/org/assertj/core/api/WithAssertions.java", "license": "apache-2.0", "size": 102131 }
[ "java.math.BigDecimal", "org.assertj.core.data.Offset" ]
import java.math.BigDecimal; import org.assertj.core.data.Offset;
import java.math.*; import org.assertj.core.data.*;
[ "java.math", "org.assertj.core" ]
java.math; org.assertj.core;
242,327
private StackItem getParent() { Stack<StackItem> stack = parentStack.get(); if (stack.size() != 0) { return parentStack.get().peek(); } else { return null; } }
StackItem function() { Stack<StackItem> stack = parentStack.get(); if (stack.size() != 0) { return parentStack.get().peek(); } else { return null; } }
/** * method comments here * @return */
method comments here
getParent
{ "repo_name": "hundsun/fui", "path": "fui.template/src/main/java/com/hundsun/jres/fui/ftl/FBaseFreemarkerSupport.java", "license": "mit", "size": 6906 }
[ "java.util.Stack" ]
import java.util.Stack;
import java.util.*;
[ "java.util" ]
java.util;
1,633,109
protected NodeFigure createMainFigure() { NodeFigure figure = createNodePlate(); figure.setLayoutManager(new StackLayout()); IFigure shape = createNodeShape(); figure.add(shape); contentPane = setupContentPane(shape); return figure; }
NodeFigure function() { NodeFigure figure = createNodePlate(); figure.setLayoutManager(new StackLayout()); IFigure shape = createNodeShape(); figure.add(shape); contentPane = setupContentPane(shape); return figure; }
/** * Creates figure for this edit part. * * Body of this method does not depend on settings in generation model * so you may safely remove <i>generated</i> tag and modify it. * * @generated */
Creates figure for this edit part. Body of this method does not depend on settings in generation model so you may safely remove generated tag and modify it
createMainFigure
{ "repo_name": "prabushi/devstudio-tooling-esb", "path": "plugins/org.wso2.developerstudio.eclipse.gmf.esb.diagram/src/org/wso2/developerstudio/eclipse/gmf/esb/diagram/edit/parts/SendMediatorEditPart.java", "license": "apache-2.0", "size": 21509 }
[ "org.eclipse.draw2d.IFigure", "org.eclipse.draw2d.StackLayout", "org.eclipse.gmf.runtime.gef.ui.figures.NodeFigure" ]
import org.eclipse.draw2d.IFigure; import org.eclipse.draw2d.StackLayout; import org.eclipse.gmf.runtime.gef.ui.figures.NodeFigure;
import org.eclipse.draw2d.*; import org.eclipse.gmf.runtime.gef.ui.figures.*;
[ "org.eclipse.draw2d", "org.eclipse.gmf" ]
org.eclipse.draw2d; org.eclipse.gmf;
2,328,801
protected void loadNotes(final Document document) { if (isNoteTargetReady(document)) { Object legacyObjectClass; if (document instanceof MaintenanceDocument) { MaintenanceDocument mdoc = (MaintenanceDocument) document; legacyObjectClass = ((Maintainable) org.apache.commons.lang.ObjectUtils.defaultIfNull(mdoc.getOldMaintainableObject(), mdoc.getNewMaintainableObject())).getDataObjectClass(); } else { legacyObjectClass = document.getClass(); } List<Note> notes = new ArrayList<Note>(); if (StringUtils.isNotBlank(document.getNoteTarget().getObjectId())) { notes.addAll(getNoteService().getByRemoteObjectId(document.getNoteTarget().getObjectId())); } //notes created on 'disapprove' are linked to Doc Header, so this checks that even if notetype = BO if (document.getNoteType().equals(NoteType.BUSINESS_OBJECT) && document.getDocumentHeader() .getWorkflowDocument().isDisapproved()) { notes.addAll(getNoteService().getByRemoteObjectId(document.getDocumentHeader().getObjectId())); } document.setNotes(notes); } }
void function(final Document document) { if (isNoteTargetReady(document)) { Object legacyObjectClass; if (document instanceof MaintenanceDocument) { MaintenanceDocument mdoc = (MaintenanceDocument) document; legacyObjectClass = ((Maintainable) org.apache.commons.lang.ObjectUtils.defaultIfNull(mdoc.getOldMaintainableObject(), mdoc.getNewMaintainableObject())).getDataObjectClass(); } else { legacyObjectClass = document.getClass(); } List<Note> notes = new ArrayList<Note>(); if (StringUtils.isNotBlank(document.getNoteTarget().getObjectId())) { notes.addAll(getNoteService().getByRemoteObjectId(document.getNoteTarget().getObjectId())); } if (document.getNoteType().equals(NoteType.BUSINESS_OBJECT) && document.getDocumentHeader() .getWorkflowDocument().isDisapproved()) { notes.addAll(getNoteService().getByRemoteObjectId(document.getDocumentHeader().getObjectId())); } document.setNotes(notes); } }
/** * Loads the Notes for the note target on this Document. * * @param document the document for which to load the notes */
Loads the Notes for the note target on this Document
loadNotes
{ "repo_name": "sonamuthu/rice-1", "path": "rice-framework/krad-service-impl/src/main/java/org/kuali/rice/krad/service/impl/DocumentServiceImpl.java", "license": "apache-2.0", "size": 54756 }
[ "java.util.ArrayList", "java.util.List", "org.apache.commons.lang.StringUtils", "org.kuali.rice.krad.bo.Note", "org.kuali.rice.krad.document.Document", "org.kuali.rice.krad.maintenance.Maintainable", "org.kuali.rice.krad.maintenance.MaintenanceDocument", "org.kuali.rice.krad.util.NoteType" ]
import java.util.ArrayList; import java.util.List; import org.apache.commons.lang.StringUtils; import org.kuali.rice.krad.bo.Note; import org.kuali.rice.krad.document.Document; import org.kuali.rice.krad.maintenance.Maintainable; import org.kuali.rice.krad.maintenance.MaintenanceDocument; import org.kuali.rice.krad.util.NoteType;
import java.util.*; import org.apache.commons.lang.*; import org.kuali.rice.krad.bo.*; import org.kuali.rice.krad.document.*; import org.kuali.rice.krad.maintenance.*; import org.kuali.rice.krad.util.*;
[ "java.util", "org.apache.commons", "org.kuali.rice" ]
java.util; org.apache.commons; org.kuali.rice;
862,192
private void prepareLoadWidgetPreviewsTask(int page,ArrayList<Object> widgets, int cellWidth,int cellHeight,int cellCountX){ // Prune all tasks that are no longer needed Iterator<AppsCustomizeAsyncTask> iter=mRunningTasks.iterator(); while(iter.hasNext()){ AppsCustomizeAsyncTask task=iter.next(); int taskPage=task.page; if(taskPage < getAssociatedLowerPageBound(mCurrentPage) || taskPage > getAssociatedUpperPageBound(mCurrentPage)){ task.cancel(false); iter.remove(); }else{ task.setThreadPriority(getThreadPriorityForPage(taskPage)); } }
void function(int page,ArrayList<Object> widgets, int cellWidth,int cellHeight,int cellCountX){ Iterator<AppsCustomizeAsyncTask> iter=mRunningTasks.iterator(); while(iter.hasNext()){ AppsCustomizeAsyncTask task=iter.next(); int taskPage=task.page; if(taskPage < getAssociatedLowerPageBound(mCurrentPage) taskPage > getAssociatedUpperPageBound(mCurrentPage)){ task.cancel(false); iter.remove(); }else{ task.setThreadPriority(getThreadPriorityForPage(taskPage)); } }
/** * Creates and executes a new AsyncTask to load a page of widget previews. */
Creates and executes a new AsyncTask to load a page of widget previews
prepareLoadWidgetPreviewsTask
{ "repo_name": "hikelee/projector", "path": "android/master/src/com/android/launcher3/AppsCustomizePagedView.java", "license": "mit", "size": 68088 }
[ "java.util.ArrayList", "java.util.Iterator" ]
import java.util.ArrayList; import java.util.Iterator;
import java.util.*;
[ "java.util" ]
java.util;
1,687,705
public static boolean intent_hasFlag(Intent intent, int flag){ if ((intent.getFlags() & flag) != 0){ return true; } return false; }
static boolean function(Intent intent, int flag){ if ((intent.getFlags() & flag) != 0){ return true; } return false; }
/** * Returns TRUE if the intent has the specified FLAG, otherwise FALSE. * * @param intent * @param flag * @return */
Returns TRUE if the intent has the specified FLAG, otherwise FALSE
intent_hasFlag
{ "repo_name": "javocsoft/javocsoft-toolbox", "path": "src/es/javocsoft/android/lib/toolbox/ToolBox.java", "license": "gpl-3.0", "size": 316451 }
[ "android.content.Intent" ]
import android.content.Intent;
import android.content.*;
[ "android.content" ]
android.content;
227,917
public Ledger getLedger() { return ledger; }
Ledger function() { return ledger; }
/** * Returns the Ledger used in this instance * * @return Ledger used in this instance */
Returns the Ledger used in this instance
getLedger
{ "repo_name": "evan-cleary/Battlegrounds", "path": "src/com/division/battlegrounds/core/BattlegroundCore.java", "license": "gpl-3.0", "size": 7584 }
[ "com.division.battlegrounds.economy.Ledger" ]
import com.division.battlegrounds.economy.Ledger;
import com.division.battlegrounds.economy.*;
[ "com.division.battlegrounds" ]
com.division.battlegrounds;
1,002,237
@Test(dependsOnMethods = { "testCreateGroupAliasGenerationWhenAliasExists" }, dependsOnGroups = { "groupUpdate" }) public void testDeleteGroupWithoutMember() throws Exception { AuthenticationTestUtils.setSecurityContext(manager); Long groupId = UserAndGroupTestUtils.findGroup(GROUP_3_ALIAS_EXPECTED).getId(); getGroupManagement().deleteGroup(groupId); Assert.assertNull(UserAndGroupTestUtils.findGroup(GROUP_3_ALIAS_EXPECTED)); }
@Test(dependsOnMethods = { STR }, dependsOnGroups = { STR }) void function() throws Exception { AuthenticationTestUtils.setSecurityContext(manager); Long groupId = UserAndGroupTestUtils.findGroup(GROUP_3_ALIAS_EXPECTED).getId(); getGroupManagement().deleteGroup(groupId); Assert.assertNull(UserAndGroupTestUtils.findGroup(GROUP_3_ALIAS_EXPECTED)); }
/** * Tests the deletion of a group without members. * * @throws Exception * if the test fails */
Tests the deletion of a group without members
testDeleteGroupWithoutMember
{ "repo_name": "Communote/communote-server", "path": "communote/tests/all-versions/integration/src/test/java/com/communote/server/core/user/group/GroupManagementTest.java", "license": "apache-2.0", "size": 43228 }
[ "com.communote.server.test.util.AuthenticationTestUtils", "com.communote.server.test.util.UserAndGroupTestUtils", "org.testng.Assert", "org.testng.annotations.Test" ]
import com.communote.server.test.util.AuthenticationTestUtils; import com.communote.server.test.util.UserAndGroupTestUtils; import org.testng.Assert; import org.testng.annotations.Test;
import com.communote.server.test.util.*; import org.testng.*; import org.testng.annotations.*;
[ "com.communote.server", "org.testng", "org.testng.annotations" ]
com.communote.server; org.testng; org.testng.annotations;
661,914
public static boolean validatePassword(String password, String correctHash) throws NoSuchAlgorithmException, InvalidKeySpecException { return validatePassword(password.toCharArray(), correctHash); }
static boolean function(String password, String correctHash) throws NoSuchAlgorithmException, InvalidKeySpecException { return validatePassword(password.toCharArray(), correctHash); }
/** * Validates a password using a hash. * * @param password the password to check * @param correctHash the hash of the valid password * @return true if the password is correct, false if not */
Validates a password using a hash
validatePassword
{ "repo_name": "ludjer/ninja-acl", "path": "src/main/java/com/ludgerpeters/acl/utility/PasswordHash.java", "license": "apache-2.0", "size": 8797 }
[ "java.security.NoSuchAlgorithmException", "java.security.spec.InvalidKeySpecException" ]
import java.security.NoSuchAlgorithmException; import java.security.spec.InvalidKeySpecException;
import java.security.*; import java.security.spec.*;
[ "java.security" ]
java.security;
1,304,456
public List<String> getContactNotes(int id) { List<String> notes = new ArrayList<String>(); ContentResolver cr = ArsApplication.getInstance().getApplicationContext().getContentResolver(); String where = ContactsContract.Data.CONTACT_ID + " = ? AND " + ContactsContract.Data.MIMETYPE + " = ?"; String[] whereParameters = new String[] { String.valueOf(id), ContactsContract.CommonDataKinds.Note.CONTENT_ITEM_TYPE }; Cursor noteCur = cr.query(ContactsContract.Data.CONTENT_URI, null, where, whereParameters, null); if (noteCur.moveToFirst()) { String note = noteCur.getString(noteCur.getColumnIndex(ContactsContract.CommonDataKinds.Note.NOTE)); if (note.length() > 0) { notes.add(note); } } noteCur.close(); return notes; }
List<String> function(int id) { List<String> notes = new ArrayList<String>(); ContentResolver cr = ArsApplication.getInstance().getApplicationContext().getContentResolver(); String where = ContactsContract.Data.CONTACT_ID + STR + ContactsContract.Data.MIMETYPE + STR; String[] whereParameters = new String[] { String.valueOf(id), ContactsContract.CommonDataKinds.Note.CONTENT_ITEM_TYPE }; Cursor noteCur = cr.query(ContactsContract.Data.CONTENT_URI, null, where, whereParameters, null); if (noteCur.moveToFirst()) { String note = noteCur.getString(noteCur.getColumnIndex(ContactsContract.CommonDataKinds.Note.NOTE)); if (note.length() > 0) { notes.add(note); } } noteCur.close(); return notes; }
/** * Returns a list of contact notes. * @param id ID of the contact * @return List of contact notes */
Returns a list of contact notes
getContactNotes
{ "repo_name": "vityokkv73/android_retrieval_system", "path": "src/net/deerhunter/ars/contact_structs/ContactsManager.java", "license": "gpl-3.0", "size": 9342 }
[ "android.content.ContentResolver", "android.database.Cursor", "android.provider.ContactsContract", "java.util.ArrayList", "java.util.List", "net.deerhunter.ars.application.ArsApplication" ]
import android.content.ContentResolver; import android.database.Cursor; import android.provider.ContactsContract; import java.util.ArrayList; import java.util.List; import net.deerhunter.ars.application.ArsApplication;
import android.content.*; import android.database.*; import android.provider.*; import java.util.*; import net.deerhunter.ars.application.*;
[ "android.content", "android.database", "android.provider", "java.util", "net.deerhunter.ars" ]
android.content; android.database; android.provider; java.util; net.deerhunter.ars;
118,697
protected TempFile doCreatePDFFile(String spec, HttpServletRequest httpServletRequest) throws IOException, DocumentException, ServletException, InterruptedException { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Generating PDF for spec=" + spec); } if (SPEC_LOGGER.isInfoEnabled()) { SPEC_LOGGER.info(spec); } PJsonObject specJson = MapPrinter.parseSpec(spec); if (specJson.has("app")) { app = specJson.getString("app"); } else { app = null; } MapPrinter mapPrinter = getMapPrinter(app); Map<String, String> headers = new HashMap<String, String>(); TreeSet<String> configHeaders = mapPrinter.getConfig().getHeaders(); if (configHeaders == null) { configHeaders = new TreeSet<String>(); configHeaders.add("Referer"); configHeaders.add("Cookie"); } for (Iterator<String> header_iter = configHeaders.iterator() ; header_iter.hasNext() ; ) { String header = header_iter.next(); if (httpServletRequest.getHeader(header) != null) { headers.put(header, httpServletRequest.getHeader(header)); } } final OutputFormat outputFormat = mapPrinter.getOutputFormat(specJson); //create a temporary file that will contain the PDF final File tempJavaFile = File.createTempFile(TEMP_FILE_PREFIX, "."+outputFormat.getFileSuffix()+TEMP_FILE_SUFFIX, getTempDir()); TempFile tempFile = new TempFile(tempJavaFile, specJson, outputFormat); FileOutputStream out = null; try { out = new FileOutputStream(tempFile); mapPrinter.print(specJson, out, headers); return tempFile; } catch (IOException e) { deleteFile(tempFile); throw e; } catch (DocumentException e) { deleteFile(tempFile); throw e; } catch (InterruptedException e) { deleteFile(tempFile); throw e; } finally { if (out != null) { out.close(); } } }
TempFile function(String spec, HttpServletRequest httpServletRequest) throws IOException, DocumentException, ServletException, InterruptedException { if (LOGGER.isDebugEnabled()) { LOGGER.debug(STR + spec); } if (SPEC_LOGGER.isInfoEnabled()) { SPEC_LOGGER.info(spec); } PJsonObject specJson = MapPrinter.parseSpec(spec); if (specJson.has("app")) { app = specJson.getString("app"); } else { app = null; } MapPrinter mapPrinter = getMapPrinter(app); Map<String, String> headers = new HashMap<String, String>(); TreeSet<String> configHeaders = mapPrinter.getConfig().getHeaders(); if (configHeaders == null) { configHeaders = new TreeSet<String>(); configHeaders.add(STR); configHeaders.add(STR); } for (Iterator<String> header_iter = configHeaders.iterator() ; header_iter.hasNext() ; ) { String header = header_iter.next(); if (httpServletRequest.getHeader(header) != null) { headers.put(header, httpServletRequest.getHeader(header)); } } final OutputFormat outputFormat = mapPrinter.getOutputFormat(specJson); final File tempJavaFile = File.createTempFile(TEMP_FILE_PREFIX, "."+outputFormat.getFileSuffix()+TEMP_FILE_SUFFIX, getTempDir()); TempFile tempFile = new TempFile(tempJavaFile, specJson, outputFormat); FileOutputStream out = null; try { out = new FileOutputStream(tempFile); mapPrinter.print(specJson, out, headers); return tempFile; } catch (IOException e) { deleteFile(tempFile); throw e; } catch (DocumentException e) { deleteFile(tempFile); throw e; } catch (InterruptedException e) { deleteFile(tempFile); throw e; } finally { if (out != null) { out.close(); } } }
/** * Do the actual work of creating the PDF temporary file. * @throws InterruptedException */
Do the actual work of creating the PDF temporary file
doCreatePDFFile
{ "repo_name": "jbjonesjr/mapfish-print", "path": "src/main/java/org/mapfish/print/servlet/MapPrinterServlet.java", "license": "gpl-3.0", "size": 22407 }
[ "com.itextpdf.text.DocumentException", "java.io.File", "java.io.FileOutputStream", "java.io.IOException", "java.util.HashMap", "java.util.Iterator", "java.util.Map", "java.util.TreeSet", "javax.servlet.ServletException", "javax.servlet.http.HttpServletRequest", "org.mapfish.print.MapPrinter", "org.mapfish.print.output.OutputFormat", "org.mapfish.print.utils.PJsonObject" ]
import com.itextpdf.text.DocumentException; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.TreeSet; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import org.mapfish.print.MapPrinter; import org.mapfish.print.output.OutputFormat; import org.mapfish.print.utils.PJsonObject;
import com.itextpdf.text.*; import java.io.*; import java.util.*; import javax.servlet.*; import javax.servlet.http.*; import org.mapfish.print.*; import org.mapfish.print.output.*; import org.mapfish.print.utils.*;
[ "com.itextpdf.text", "java.io", "java.util", "javax.servlet", "org.mapfish.print" ]
com.itextpdf.text; java.io; java.util; javax.servlet; org.mapfish.print;
1,674,857
private RequestStageContainer doUpdateResources(final RequestStageContainer stages, final Request request, Predicate predicate, boolean performQueryEvaluation) throws UnsupportedPropertyException, SystemException, NoSuchResourceException, NoSuchParentResourceException { final Set<ServiceComponentHostRequest> requests = new HashSet<ServiceComponentHostRequest>(); final boolean runSmokeTest = "true".equals(getQueryParameterValue( QUERY_PARAMETERS_RUN_SMOKE_TEST_ID, predicate)); Set<String> queryIds = Collections.singleton(HOST_COMPONENT_COMPONENT_NAME_PROPERTY_ID); Request queryRequest = PropertyHelper.getReadRequest(queryIds); // will take care of 404 exception Set<Resource> matchingResources = getResourcesForUpdate(queryRequest, predicate); for (Resource queryResource : matchingResources) { //todo: predicate evaluation was removed for BUG-28737 and the removal of this breaks //todo: the new "add hosts" api. BUG-4818 is the root cause and needs to be addressed //todo: and then this predicate evaluation should always be performed and the //todo: temporary performQueryEvaluation flag hack should be removed. if (! performQueryEvaluation || predicate.evaluate(queryResource)) { Map<String, Object> updateRequestProperties = new HashMap<String, Object>(); // add props from query resource updateRequestProperties.putAll(PropertyHelper.getProperties(queryResource)); // add properties from update request //todo: should we flag value size > 1? if (request.getProperties() != null && request.getProperties().size() != 0) { updateRequestProperties.putAll(request.getProperties().iterator().next()); } requests.add(changeRequest(updateRequestProperties)); } }
RequestStageContainer function(final RequestStageContainer stages, final Request request, Predicate predicate, boolean performQueryEvaluation) throws UnsupportedPropertyException, SystemException, NoSuchResourceException, NoSuchParentResourceException { final Set<ServiceComponentHostRequest> requests = new HashSet<ServiceComponentHostRequest>(); final boolean runSmokeTest = "true".equals(getQueryParameterValue( QUERY_PARAMETERS_RUN_SMOKE_TEST_ID, predicate)); Set<String> queryIds = Collections.singleton(HOST_COMPONENT_COMPONENT_NAME_PROPERTY_ID); Request queryRequest = PropertyHelper.getReadRequest(queryIds); Set<Resource> matchingResources = getResourcesForUpdate(queryRequest, predicate); for (Resource queryResource : matchingResources) { if (! performQueryEvaluation predicate.evaluate(queryResource)) { Map<String, Object> updateRequestProperties = new HashMap<String, Object>(); updateRequestProperties.putAll(PropertyHelper.getProperties(queryResource)); if (request.getProperties() != null && request.getProperties().size() != 0) { updateRequestProperties.putAll(request.getProperties().iterator().next()); } requests.add(changeRequest(updateRequestProperties)); } }
/** * Update resources. * * @param stages request stage container * @param request request * @param predicate request predicate * @param performQueryEvaluation should query be evaluated for matching resource set * @return * @throws UnsupportedPropertyException an unsupported property was specified in the request * @throws SystemException an unknown exception occurred * @throws NoSuchResourceException the query didn't match any resources * @throws NoSuchParentResourceException a specified parent resource doesn't exist */
Update resources
doUpdateResources
{ "repo_name": "zouzhberk/ambaridemo", "path": "demo-server/src/main/java/org/apache/ambari/server/controller/internal/HostComponentResourceProvider.java", "license": "apache-2.0", "size": 44721 }
[ "java.util.Collections", "java.util.HashMap", "java.util.HashSet", "java.util.Map", "java.util.Set", "org.apache.ambari.server.controller.ServiceComponentHostRequest", "org.apache.ambari.server.controller.spi.NoSuchParentResourceException", "org.apache.ambari.server.controller.spi.NoSuchResourceException", "org.apache.ambari.server.controller.spi.Predicate", "org.apache.ambari.server.controller.spi.Request", "org.apache.ambari.server.controller.spi.Resource", "org.apache.ambari.server.controller.spi.SystemException", "org.apache.ambari.server.controller.spi.UnsupportedPropertyException", "org.apache.ambari.server.controller.utilities.PropertyHelper" ]
import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.apache.ambari.server.controller.ServiceComponentHostRequest; import org.apache.ambari.server.controller.spi.NoSuchParentResourceException; import org.apache.ambari.server.controller.spi.NoSuchResourceException; import org.apache.ambari.server.controller.spi.Predicate; import org.apache.ambari.server.controller.spi.Request; import org.apache.ambari.server.controller.spi.Resource; import org.apache.ambari.server.controller.spi.SystemException; import org.apache.ambari.server.controller.spi.UnsupportedPropertyException; import org.apache.ambari.server.controller.utilities.PropertyHelper;
import java.util.*; import org.apache.ambari.server.controller.*; import org.apache.ambari.server.controller.spi.*; import org.apache.ambari.server.controller.utilities.*;
[ "java.util", "org.apache.ambari" ]
java.util; org.apache.ambari;
2,240,898
public static Expression simplifyGivenEquality(Expression disequality, Expression variable, Expression otherTerm) { Expression result; if (disequality.getArguments().contains(variable) && disequality.getArguments().contains(otherTerm)) { result = Expressions.FALSE; } else { result = disequality; } return result; }
static Expression function(Expression disequality, Expression variable, Expression otherTerm) { Expression result; if (disequality.getArguments().contains(variable) && disequality.getArguments().contains(otherTerm)) { result = Expressions.FALSE; } else { result = disequality; } return result; }
/** * Returns an expression equivalent to disequality (and perhaps simpler) given equality between a variable and another term. */
Returns an expression equivalent to disequality (and perhaps simpler) given equality between a variable and another term
simplifyGivenEquality
{ "repo_name": "aic-sri-international/aic-expresso", "path": "src/main/java/com/sri/ai/grinder/library/Disequality.java", "license": "bsd-3-clause", "size": 7484 }
[ "com.sri.ai.expresso.api.Expression", "com.sri.ai.expresso.helper.Expressions" ]
import com.sri.ai.expresso.api.Expression; import com.sri.ai.expresso.helper.Expressions;
import com.sri.ai.expresso.api.*; import com.sri.ai.expresso.helper.*;
[ "com.sri.ai" ]
com.sri.ai;
1,602,604
//----------------------------------------------------------------------- public LongShort getLongShort() { return _longShort; }
LongShort function() { return _longShort; }
/** * Gets the long/short type. * @return the value of the property, not null */
Gets the long/short type
getLongShort
{ "repo_name": "jeorme/OG-Platform", "path": "projects/OG-FinancialTypes/src/main/java/com/opengamma/financial/security/option/NonDeliverableFXOptionSecurity.java", "license": "apache-2.0", "size": 24557 }
[ "com.opengamma.financial.security.LongShort" ]
import com.opengamma.financial.security.LongShort;
import com.opengamma.financial.security.*;
[ "com.opengamma.financial" ]
com.opengamma.financial;
1,905,382
public Request addRequestHeader(String header, String value) { if (header == null) { throw new NullPointerException("header cannot be null"); } if (header.contains(":")) { throw new IllegalArgumentException("header may not contain ':'"); } if (value == null) { value = ""; } requestHeaders.add(Pair.create(header, value)); return this; }
Request function(String header, String value) { if (header == null) { throw new NullPointerException(STR); } if (header.contains(":")) { throw new IllegalArgumentException(STR); } if (value == null) { value = ""; } requestHeaders.add(Pair.create(header, value)); return this; }
/** * Add an HTTP header to be included with the download request. The header will be added to * the end of the list. * * @param header HTTP header name * @param value header value * @return this object * @see <a href="http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2">HTTP/1.1 * Message Headers</a> */
Add an HTTP header to be included with the download request. The header will be added to the end of the list
addRequestHeader
{ "repo_name": "xufeifandj/download-manager", "path": "library/src/main/java/com/novoda/downloadmanager/lib/Request.java", "license": "apache-2.0", "size": 19438 }
[ "android.util.Pair" ]
import android.util.Pair;
import android.util.*;
[ "android.util" ]
android.util;
713,900
@Test public void testMaintUpgrade() throws Exception { UpgradeHelper upgradeHelper = createNiceMock(UpgradeHelper.class); ConfigHelper configHelper = createNiceMock(ConfigHelper.class); UpgradePack upgradePack = createNiceMock(UpgradePack.class); expect(m_clusterVersionSummary.getAvailableServiceNames()).andReturn( Sets.newHashSet(HDFS_SERVICE_NAME)).once(); expect(m_targetRepositoryVersion.getType()).andReturn(RepositoryType.MAINT).atLeastOnce(); expect(m_targetRepositoryVersion.getRepositoryXml()).andReturn(m_vdfXml).once(); expect(upgradeHelper.suggestUpgradePack(EasyMock.anyString(), EasyMock.anyObject(StackId.class), EasyMock.anyObject(StackId.class), EasyMock.anyObject(Direction.class), EasyMock.anyObject(UpgradeType.class), EasyMock.anyString())).andReturn(upgradePack).once(); // make the cluster have 2 services - one is already upgraded to a new // enough version expect(m_cluster.getService(ZOOKEEPER_SERVICE_NAME)).andReturn(m_zookeeperService).anyTimes(); m_services.put(ZOOKEEPER_SERVICE_NAME, m_zookeeperService); assertEquals(2, m_services.size()); replayAll(); Map<String, Object> requestMap = new HashMap<>(); requestMap.put(UpgradeResourceProvider.UPGRADE_TYPE, UpgradeType.NON_ROLLING.name()); requestMap.put(UpgradeResourceProvider.UPGRADE_DIRECTION, Direction.UPGRADE.name()); requestMap.put(UpgradeResourceProvider.UPGRADE_REPO_VERSION_ID, m_targetRepositoryVersion.getId().toString()); requestMap.put(UpgradeResourceProvider.UPGRADE_SKIP_PREREQUISITE_CHECKS, "true"); UpgradeContext context = new UpgradeContext(m_cluster, requestMap, null, upgradeHelper, m_upgradeDAO, m_repositoryVersionDAO, configHelper); assertEquals(Direction.UPGRADE, context.getDirection()); assertEquals(RepositoryType.MAINT, context.getOrchestrationType()); assertEquals(1, context.getSupportedServices().size()); assertFalse(context.isPatchRevert()); verifyAll(); }
void function() throws Exception { UpgradeHelper upgradeHelper = createNiceMock(UpgradeHelper.class); ConfigHelper configHelper = createNiceMock(ConfigHelper.class); UpgradePack upgradePack = createNiceMock(UpgradePack.class); expect(m_clusterVersionSummary.getAvailableServiceNames()).andReturn( Sets.newHashSet(HDFS_SERVICE_NAME)).once(); expect(m_targetRepositoryVersion.getType()).andReturn(RepositoryType.MAINT).atLeastOnce(); expect(m_targetRepositoryVersion.getRepositoryXml()).andReturn(m_vdfXml).once(); expect(upgradeHelper.suggestUpgradePack(EasyMock.anyString(), EasyMock.anyObject(StackId.class), EasyMock.anyObject(StackId.class), EasyMock.anyObject(Direction.class), EasyMock.anyObject(UpgradeType.class), EasyMock.anyString())).andReturn(upgradePack).once(); expect(m_cluster.getService(ZOOKEEPER_SERVICE_NAME)).andReturn(m_zookeeperService).anyTimes(); m_services.put(ZOOKEEPER_SERVICE_NAME, m_zookeeperService); assertEquals(2, m_services.size()); replayAll(); Map<String, Object> requestMap = new HashMap<>(); requestMap.put(UpgradeResourceProvider.UPGRADE_TYPE, UpgradeType.NON_ROLLING.name()); requestMap.put(UpgradeResourceProvider.UPGRADE_DIRECTION, Direction.UPGRADE.name()); requestMap.put(UpgradeResourceProvider.UPGRADE_REPO_VERSION_ID, m_targetRepositoryVersion.getId().toString()); requestMap.put(UpgradeResourceProvider.UPGRADE_SKIP_PREREQUISITE_CHECKS, "true"); UpgradeContext context = new UpgradeContext(m_cluster, requestMap, null, upgradeHelper, m_upgradeDAO, m_repositoryVersionDAO, configHelper); assertEquals(Direction.UPGRADE, context.getDirection()); assertEquals(RepositoryType.MAINT, context.getOrchestrationType()); assertEquals(1, context.getSupportedServices().size()); assertFalse(context.isPatchRevert()); verifyAll(); }
/** * Tests that the {@link UpgradeContext} for a maintenance upgrade. * Maintenance upgrades will only upgrade services which require it by * examining the versions included in the VDF. * * @throws Exception */
Tests that the <code>UpgradeContext</code> for a maintenance upgrade. Maintenance upgrades will only upgrade services which require it by examining the versions included in the VDF
testMaintUpgrade
{ "repo_name": "arenadata/ambari", "path": "ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeContextTest.java", "license": "apache-2.0", "size": 21074 }
[ "com.google.common.collect.Sets", "java.util.HashMap", "java.util.Map", "junit.framework.Assert", "org.apache.ambari.server.controller.internal.UpgradeResourceProvider", "org.apache.ambari.server.state.stack.UpgradePack", "org.apache.ambari.server.state.stack.upgrade.Direction", "org.apache.ambari.server.state.stack.upgrade.UpgradeType", "org.easymock.EasyMock" ]
import com.google.common.collect.Sets; import java.util.HashMap; import java.util.Map; import junit.framework.Assert; import org.apache.ambari.server.controller.internal.UpgradeResourceProvider; import org.apache.ambari.server.state.stack.UpgradePack; import org.apache.ambari.server.state.stack.upgrade.Direction; import org.apache.ambari.server.state.stack.upgrade.UpgradeType; import org.easymock.EasyMock;
import com.google.common.collect.*; import java.util.*; import junit.framework.*; import org.apache.ambari.server.controller.internal.*; import org.apache.ambari.server.state.stack.*; import org.apache.ambari.server.state.stack.upgrade.*; import org.easymock.*;
[ "com.google.common", "java.util", "junit.framework", "org.apache.ambari", "org.easymock" ]
com.google.common; java.util; junit.framework; org.apache.ambari; org.easymock;
636,298
public void setTheme(Theme theme) { this.theme = theme; texture = GravityGame.getInstance().getAssets().getBackground(theme.getBackground()); int width = texture.getWidth(); int height = texture.getHeight(); float aspectRatio = camera.viewportWidth / camera.viewportHeight; int srcViewWidth = theme.getSrcViewWidth(); int srcViewHeight = (int) (srcViewWidth / aspectRatio); int x = (width - srcViewWidth) / 2; int y = (height - srcViewHeight) / 2; region = new TextureRegion(texture, x, y, srcViewWidth, srcViewHeight); }
void function(Theme theme) { this.theme = theme; texture = GravityGame.getInstance().getAssets().getBackground(theme.getBackground()); int width = texture.getWidth(); int height = texture.getHeight(); float aspectRatio = camera.viewportWidth / camera.viewportHeight; int srcViewWidth = theme.getSrcViewWidth(); int srcViewHeight = (int) (srcViewWidth / aspectRatio); int x = (width - srcViewWidth) / 2; int y = (height - srcViewHeight) / 2; region = new TextureRegion(texture, x, y, srcViewWidth, srcViewHeight); }
/** * Sets the background texture to that specified by the given theme and sets * the region to be the source view width of the theme. * * @param theme */
Sets the background texture to that specified by the given theme and sets the region to be the source view width of the theme
setTheme
{ "repo_name": "Sawrr/gravity-game", "path": "core/src/com/sawyerharris/gravitygame/screen/ParallaxBackground.java", "license": "gpl-3.0", "size": 2897 }
[ "com.badlogic.gdx.graphics.g2d.TextureRegion", "com.sawyerharris.gravitygame.game.GravityGame", "com.sawyerharris.gravitygame.game.Theme" ]
import com.badlogic.gdx.graphics.g2d.TextureRegion; import com.sawyerharris.gravitygame.game.GravityGame; import com.sawyerharris.gravitygame.game.Theme;
import com.badlogic.gdx.graphics.g2d.*; import com.sawyerharris.gravitygame.game.*;
[ "com.badlogic.gdx", "com.sawyerharris.gravitygame" ]
com.badlogic.gdx; com.sawyerharris.gravitygame;
1,249,623
public License getLicense() { return license; }
License function() { return license; }
/** * Returns the license for the target. */
Returns the license for the target
getLicense
{ "repo_name": "dslomov/bazel", "path": "src/main/java/com/google/devtools/build/lib/analysis/LicensesProvider.java", "license": "apache-2.0", "size": 2819 }
[ "com.google.devtools.build.lib.packages.License" ]
import com.google.devtools.build.lib.packages.License;
import com.google.devtools.build.lib.packages.*;
[ "com.google.devtools" ]
com.google.devtools;
1,050,108
public static void notEmpty(@Nullable Collection<?> collection, Supplier<String> messageSupplier) { if (CollectionUtils.isEmpty(collection)) { throw new IllegalArgumentException(nullSafeGet(messageSupplier)); } } /** * Assert that a collection contains elements; that is, it must not be * {@code null} and must contain at least one element. * * @deprecated as of 4.3.7, in favor of {@link #notEmpty(Collection, String)}
static void function(@Nullable Collection<?> collection, Supplier<String> messageSupplier) { if (CollectionUtils.isEmpty(collection)) { throw new IllegalArgumentException(nullSafeGet(messageSupplier)); } } /** * Assert that a collection contains elements; that is, it must not be * {@code null} and must contain at least one element. * * @deprecated as of 4.3.7, in favor of {@link #notEmpty(Collection, String)}
/** * Assert that a collection contains elements; that is, it must not be * {@code null} and must contain at least one element. * * <pre class="code"> * Assert.notEmpty(collection, () -&gt; "The " + collectionType + " collection must contain elements"); * </pre> * * @param collection the collection to check * @param messageSupplier a supplier for the exception message to use if the * assertion fails * @throws IllegalArgumentException if the collection is {@code null} or * contains no elements * @since 5.0 */
Assert that a collection contains elements; that is, it must not be null and must contain at least one element. Assert.notEmpty(collection, () -&gt; "The " + collectionType + " collection must contain elements"); </code>
notEmpty
{ "repo_name": "emacslisp/Java", "path": "SpringFrameworkReading/src/org/springframework/util/Assert.java", "license": "mit", "size": 29412 }
[ "java.util.Collection", "java.util.function.Supplier", "org.springframework.lang.Nullable" ]
import java.util.Collection; import java.util.function.Supplier; import org.springframework.lang.Nullable;
import java.util.*; import java.util.function.*; import org.springframework.lang.*;
[ "java.util", "org.springframework.lang" ]
java.util; org.springframework.lang;
192,150
public void notifyGroupChange(String nodeName, GroupChangeType opType) throws DatabaseException { RepGroupImpl repGroup = repNode.getGroup(); GroupChange changeEvent = getProtocol(repGroup).new GroupChange(repGroup, nodeName, opType); refreshMonitors(repGroup, changeEvent); }
void function(String nodeName, GroupChangeType opType) throws DatabaseException { RepGroupImpl repGroup = repNode.getGroup(); GroupChange changeEvent = getProtocol(repGroup).new GroupChange(repGroup, nodeName, opType); refreshMonitors(repGroup, changeEvent); }
/** * Fire a GroupChangeEvent. */
Fire a GroupChangeEvent
notifyGroupChange
{ "repo_name": "prat0318/dbms", "path": "mini_dbms/je-5.0.103/src/com/sleepycat/je/rep/impl/node/MonitorEventManager.java", "license": "mit", "size": 5187 }
[ "com.sleepycat.je.DatabaseException", "com.sleepycat.je.rep.impl.RepGroupImpl", "com.sleepycat.je.rep.monitor.GroupChangeEvent", "com.sleepycat.je.rep.monitor.Protocol" ]
import com.sleepycat.je.DatabaseException; import com.sleepycat.je.rep.impl.RepGroupImpl; import com.sleepycat.je.rep.monitor.GroupChangeEvent; import com.sleepycat.je.rep.monitor.Protocol;
import com.sleepycat.je.*; import com.sleepycat.je.rep.impl.*; import com.sleepycat.je.rep.monitor.*;
[ "com.sleepycat.je" ]
com.sleepycat.je;
1,850,763
protected int runPrivileged(Configuration config, Configuration dynamicProperties) { ActorSystem actorSystem = null; WebMonitor webMonitor = null; MesosArtifactServer artifactServer = null; ScheduledExecutorService futureExecutor = null; ExecutorService ioExecutor = null; MesosServices mesosServices = null; HighAvailabilityServices highAvailabilityServices = null; MetricRegistryImpl metricRegistry = null; try { // ------- (1) load and parse / validate all configurations ------- final String appMasterHostname = config.getString( JobManagerOptions.ADDRESS, InetAddress.getLocalHost().getHostName()); LOG.info("App Master Hostname to use: {}", appMasterHostname); // Mesos configuration final MesosConfiguration mesosConfig = MesosEntrypointUtils.createMesosSchedulerConfiguration(config, appMasterHostname); // JM configuration int numberProcessors = Hardware.getNumberCPUCores(); futureExecutor = Executors.newScheduledThreadPool( numberProcessors, new ExecutorThreadFactory("mesos-jobmanager-future")); ioExecutor = Executors.newFixedThreadPool( numberProcessors, new ExecutorThreadFactory("mesos-jobmanager-io")); mesosServices = MesosServicesUtils.createMesosServices(config, appMasterHostname); // TM configuration final MesosTaskManagerParameters taskManagerParameters = MesosEntrypointUtils.createTmParameters(config, LOG); // JM endpoint, which should be explicitly configured based on acquired net resources final int listeningPort = config.getInteger(JobManagerOptions.PORT); checkState(listeningPort >= 0 && listeningPort <= 65536, "Config parameter \"" + JobManagerOptions.PORT.key() + "\" is invalid, it must be between 0 and 65536"); // ----------------- (2) start the actor system ------------------- // try to start the actor system, JobManager and JobManager actor system // using the configured address and ports actorSystem = BootstrapTools.startActorSystem(config, appMasterHostname, listeningPort, LOG); Address address = AkkaUtils.getAddress(actorSystem); final String akkaHostname = address.host().get(); final int akkaPort = (Integer) address.port().get(); LOG.info("Actor system bound to hostname {}.", akkaHostname); // try to start the artifact server LOG.debug("Starting Artifact Server"); artifactServer = mesosServices.getArtifactServer(); // ----------------- (3) Generate the configuration for the TaskManagers ------------------- // generate a container spec which conveys the artifacts/vars needed to launch a TM ContainerSpecification taskManagerContainerSpec = new ContainerSpecification(); // propagate the AM dynamic configuration to the TM taskManagerContainerSpec.getDynamicConfiguration().addAll(dynamicProperties); // propagate newly-generated configuration elements final Configuration taskManagerConfig = BootstrapTools.generateTaskManagerConfiguration( new Configuration(), akkaHostname, akkaPort, taskManagerParameters.containeredParameters().numSlots(), TASKMANAGER_REGISTRATION_TIMEOUT); taskManagerContainerSpec.getDynamicConfiguration().addAll(taskManagerConfig); // apply the overlays MesosEntrypointUtils.applyOverlays(config, taskManagerContainerSpec); // configure the artifact server to serve the specified artifacts LaunchableMesosWorker.configureArtifactServer(artifactServer, taskManagerContainerSpec); // ----------------- (4) start the actors ------------------- // 1) JobManager & Archive (in non-HA case, the leader service takes this) // 2) Web Monitor (we need its port to register) // 3) Resource Master for Mesos // 4) Process reapers for the JobManager and Resource Master // 0: Start the JobManager services highAvailabilityServices = HighAvailabilityServicesUtils.createHighAvailabilityServices( config, ioExecutor, HighAvailabilityServicesUtils.AddressResolution.NO_ADDRESS_RESOLUTION); // 1: the web monitor LOG.debug("Starting Web Frontend"); Time webMonitorTimeout = Time.milliseconds(config.getLong(WebOptions.TIMEOUT)); webMonitor = BootstrapTools.startWebMonitorIfConfigured( config, highAvailabilityServices, new AkkaJobManagerRetriever(actorSystem, webMonitorTimeout, 10, Time.milliseconds(50L)), new AkkaQueryServiceRetriever(actorSystem, webMonitorTimeout), webMonitorTimeout, new ScheduledExecutorServiceAdapter(futureExecutor), LOG); if (webMonitor != null) { final URL webMonitorURL = new URL(webMonitor.getRestAddress()); mesosConfig.frameworkInfo().setWebuiUrl(webMonitorURL.toExternalForm()); } // 2: the JobManager LOG.debug("Starting JobManager actor"); metricRegistry = new MetricRegistryImpl( MetricRegistryConfiguration.fromConfiguration(config)); metricRegistry.startQueryService(actorSystem, null); // we start the JobManager with its standard name ActorRef jobManager = JobManager.startJobManagerActors( config, actorSystem, futureExecutor, ioExecutor, highAvailabilityServices, metricRegistry, webMonitor != null ? Option.apply(webMonitor.getRestAddress()) : Option.empty(), Option.apply(JobMaster.JOB_MANAGER_NAME), Option.apply(JobMaster.ARCHIVE_NAME), getJobManagerClass(), getArchivistClass())._1(); // 3: Flink's Mesos ResourceManager LOG.debug("Starting Mesos Flink Resource Manager"); // create the worker store to persist task information across restarts MesosWorkerStore workerStore = mesosServices.createMesosWorkerStore( config, ioExecutor); Props resourceMasterProps = MesosFlinkResourceManager.createActorProps( getResourceManagerClass(), config, mesosConfig, workerStore, highAvailabilityServices.getJobManagerLeaderRetriever(HighAvailabilityServices.DEFAULT_JOB_ID), taskManagerParameters, taskManagerContainerSpec, artifactServer, LOG); ActorRef resourceMaster = actorSystem.actorOf(resourceMasterProps, "Mesos_Resource_Master"); // 4: Process reapers // The process reapers ensure that upon unexpected actor death, the process exits // and does not stay lingering around unresponsive LOG.debug("Starting process reapers for JobManager"); actorSystem.actorOf( Props.create(ProcessReaper.class, resourceMaster, LOG, ACTOR_DIED_EXIT_CODE), "Mesos_Resource_Master_Process_Reaper"); actorSystem.actorOf( Props.create(ProcessReaper.class, jobManager, LOG, ACTOR_DIED_EXIT_CODE), "JobManager_Process_Reaper"); } catch (Throwable t) { // make sure that everything whatever ends up in the log LOG.error("Mesos JobManager initialization failed", t); if (webMonitor != null) { try { webMonitor.stop(); } catch (Throwable ignored) { LOG.warn("Failed to stop the web frontend", ignored); } } if (actorSystem != null) { try { actorSystem.shutdown(); } catch (Throwable tt) { LOG.error("Error shutting down actor system", tt); } } if (futureExecutor != null) { try { futureExecutor.shutdownNow(); } catch (Throwable tt) { LOG.error("Error shutting down future executor", tt); } } if (ioExecutor != null) { try { ioExecutor.shutdownNow(); } catch (Throwable tt) { LOG.error("Error shutting down io executor", tt); } } if (mesosServices != null) { try { mesosServices.close(false); } catch (Throwable tt) { LOG.error("Error closing the mesos services.", tt); } } return INIT_ERROR_EXIT_CODE; } // everything started, we can wait until all is done or the process is killed LOG.info("Mesos JobManager started"); // wait until everything is done actorSystem.awaitTermination(); // if we get here, everything work out jolly all right, and we even exited smoothly if (webMonitor != null) { try { webMonitor.stop(); } catch (Throwable t) { LOG.error("Failed to stop the web frontend", t); } } if (highAvailabilityServices != null) { try { highAvailabilityServices.close(); } catch (Throwable t) { LOG.error("Could not properly stop the high availability services."); } } if (metricRegistry != null) { try { metricRegistry.shutdown().get(); } catch (Throwable t) { LOG.error("Could not shut down metric registry.", t); } } ExecutorUtils.gracefulShutdown( AkkaUtils.getTimeout(config).toMillis(), TimeUnit.MILLISECONDS, futureExecutor, ioExecutor); try { mesosServices.close(true); } catch (Throwable t) { LOG.error("Failed to clean up and close MesosServices.", t); } return 0; } // ------------------------------------------------------------------------ // For testing, this allows to override the actor classes used for // JobManager and the archive of completed jobs // ------------------------------------------------------------------------
int function(Configuration config, Configuration dynamicProperties) { ActorSystem actorSystem = null; WebMonitor webMonitor = null; MesosArtifactServer artifactServer = null; ScheduledExecutorService futureExecutor = null; ExecutorService ioExecutor = null; MesosServices mesosServices = null; HighAvailabilityServices highAvailabilityServices = null; MetricRegistryImpl metricRegistry = null; try { final String appMasterHostname = config.getString( JobManagerOptions.ADDRESS, InetAddress.getLocalHost().getHostName()); LOG.info(STR, appMasterHostname); final MesosConfiguration mesosConfig = MesosEntrypointUtils.createMesosSchedulerConfiguration(config, appMasterHostname); int numberProcessors = Hardware.getNumberCPUCores(); futureExecutor = Executors.newScheduledThreadPool( numberProcessors, new ExecutorThreadFactory(STR)); ioExecutor = Executors.newFixedThreadPool( numberProcessors, new ExecutorThreadFactory(STR)); mesosServices = MesosServicesUtils.createMesosServices(config, appMasterHostname); final MesosTaskManagerParameters taskManagerParameters = MesosEntrypointUtils.createTmParameters(config, LOG); final int listeningPort = config.getInteger(JobManagerOptions.PORT); checkState(listeningPort >= 0 && listeningPort <= 65536, STRSTR\STR); actorSystem = BootstrapTools.startActorSystem(config, appMasterHostname, listeningPort, LOG); Address address = AkkaUtils.getAddress(actorSystem); final String akkaHostname = address.host().get(); final int akkaPort = (Integer) address.port().get(); LOG.info(STR, akkaHostname); LOG.debug(STR); artifactServer = mesosServices.getArtifactServer(); ContainerSpecification taskManagerContainerSpec = new ContainerSpecification(); taskManagerContainerSpec.getDynamicConfiguration().addAll(dynamicProperties); final Configuration taskManagerConfig = BootstrapTools.generateTaskManagerConfiguration( new Configuration(), akkaHostname, akkaPort, taskManagerParameters.containeredParameters().numSlots(), TASKMANAGER_REGISTRATION_TIMEOUT); taskManagerContainerSpec.getDynamicConfiguration().addAll(taskManagerConfig); MesosEntrypointUtils.applyOverlays(config, taskManagerContainerSpec); LaunchableMesosWorker.configureArtifactServer(artifactServer, taskManagerContainerSpec); highAvailabilityServices = HighAvailabilityServicesUtils.createHighAvailabilityServices( config, ioExecutor, HighAvailabilityServicesUtils.AddressResolution.NO_ADDRESS_RESOLUTION); LOG.debug(STR); Time webMonitorTimeout = Time.milliseconds(config.getLong(WebOptions.TIMEOUT)); webMonitor = BootstrapTools.startWebMonitorIfConfigured( config, highAvailabilityServices, new AkkaJobManagerRetriever(actorSystem, webMonitorTimeout, 10, Time.milliseconds(50L)), new AkkaQueryServiceRetriever(actorSystem, webMonitorTimeout), webMonitorTimeout, new ScheduledExecutorServiceAdapter(futureExecutor), LOG); if (webMonitor != null) { final URL webMonitorURL = new URL(webMonitor.getRestAddress()); mesosConfig.frameworkInfo().setWebuiUrl(webMonitorURL.toExternalForm()); } LOG.debug(STR); metricRegistry = new MetricRegistryImpl( MetricRegistryConfiguration.fromConfiguration(config)); metricRegistry.startQueryService(actorSystem, null); ActorRef jobManager = JobManager.startJobManagerActors( config, actorSystem, futureExecutor, ioExecutor, highAvailabilityServices, metricRegistry, webMonitor != null ? Option.apply(webMonitor.getRestAddress()) : Option.empty(), Option.apply(JobMaster.JOB_MANAGER_NAME), Option.apply(JobMaster.ARCHIVE_NAME), getJobManagerClass(), getArchivistClass())._1(); LOG.debug(STR); MesosWorkerStore workerStore = mesosServices.createMesosWorkerStore( config, ioExecutor); Props resourceMasterProps = MesosFlinkResourceManager.createActorProps( getResourceManagerClass(), config, mesosConfig, workerStore, highAvailabilityServices.getJobManagerLeaderRetriever(HighAvailabilityServices.DEFAULT_JOB_ID), taskManagerParameters, taskManagerContainerSpec, artifactServer, LOG); ActorRef resourceMaster = actorSystem.actorOf(resourceMasterProps, STR); LOG.debug(STR); actorSystem.actorOf( Props.create(ProcessReaper.class, resourceMaster, LOG, ACTOR_DIED_EXIT_CODE), STR); actorSystem.actorOf( Props.create(ProcessReaper.class, jobManager, LOG, ACTOR_DIED_EXIT_CODE), STR); } catch (Throwable t) { LOG.error(STR, t); if (webMonitor != null) { try { webMonitor.stop(); } catch (Throwable ignored) { LOG.warn(STR, ignored); } } if (actorSystem != null) { try { actorSystem.shutdown(); } catch (Throwable tt) { LOG.error(STR, tt); } } if (futureExecutor != null) { try { futureExecutor.shutdownNow(); } catch (Throwable tt) { LOG.error(STR, tt); } } if (ioExecutor != null) { try { ioExecutor.shutdownNow(); } catch (Throwable tt) { LOG.error(STR, tt); } } if (mesosServices != null) { try { mesosServices.close(false); } catch (Throwable tt) { LOG.error(STR, tt); } } return INIT_ERROR_EXIT_CODE; } LOG.info(STR); actorSystem.awaitTermination(); if (webMonitor != null) { try { webMonitor.stop(); } catch (Throwable t) { LOG.error(STR, t); } } if (highAvailabilityServices != null) { try { highAvailabilityServices.close(); } catch (Throwable t) { LOG.error(STR); } } if (metricRegistry != null) { try { metricRegistry.shutdown().get(); } catch (Throwable t) { LOG.error(STR, t); } } ExecutorUtils.gracefulShutdown( AkkaUtils.getTimeout(config).toMillis(), TimeUnit.MILLISECONDS, futureExecutor, ioExecutor); try { mesosServices.close(true); } catch (Throwable t) { LOG.error(STR, t); } return 0; }
/** * The main work method, must run as a privileged action. * * @return The return code for the Java process. */
The main work method, must run as a privileged action
runPrivileged
{ "repo_name": "zhangminglei/flink", "path": "flink-mesos/src/main/java/org/apache/flink/mesos/runtime/clusterframework/MesosApplicationMasterRunner.java", "license": "apache-2.0", "size": 16993 }
[ "java.net.InetAddress", "java.util.concurrent.ExecutorService", "java.util.concurrent.Executors", "java.util.concurrent.ScheduledExecutorService", "java.util.concurrent.TimeUnit", "org.apache.flink.api.common.time.Time", "org.apache.flink.configuration.Configuration", "org.apache.flink.configuration.JobManagerOptions", "org.apache.flink.configuration.WebOptions", "org.apache.flink.mesos.entrypoint.MesosEntrypointUtils", "org.apache.flink.mesos.runtime.clusterframework.services.MesosServices", "org.apache.flink.mesos.runtime.clusterframework.services.MesosServicesUtils", "org.apache.flink.mesos.runtime.clusterframework.store.MesosWorkerStore", "org.apache.flink.mesos.util.MesosArtifactServer", "org.apache.flink.mesos.util.MesosConfiguration", "org.apache.flink.runtime.akka.AkkaUtils", "org.apache.flink.runtime.clusterframework.BootstrapTools", "org.apache.flink.runtime.clusterframework.ContainerSpecification", "org.apache.flink.runtime.concurrent.ScheduledExecutorServiceAdapter", "org.apache.flink.runtime.highavailability.HighAvailabilityServices", "org.apache.flink.runtime.highavailability.HighAvailabilityServicesUtils", "org.apache.flink.runtime.jobmanager.JobManager", "org.apache.flink.runtime.jobmaster.JobMaster", "org.apache.flink.runtime.metrics.MetricRegistryConfiguration", "org.apache.flink.runtime.metrics.MetricRegistryImpl", "org.apache.flink.runtime.process.ProcessReaper", "org.apache.flink.runtime.util.ExecutorThreadFactory", "org.apache.flink.runtime.util.Hardware", "org.apache.flink.runtime.webmonitor.WebMonitor", "org.apache.flink.runtime.webmonitor.retriever.impl.AkkaJobManagerRetriever", "org.apache.flink.runtime.webmonitor.retriever.impl.AkkaQueryServiceRetriever", "org.apache.flink.util.ExecutorUtils", "org.apache.flink.util.Preconditions" ]
import java.net.InetAddress; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import org.apache.flink.api.common.time.Time; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.JobManagerOptions; import org.apache.flink.configuration.WebOptions; import org.apache.flink.mesos.entrypoint.MesosEntrypointUtils; import org.apache.flink.mesos.runtime.clusterframework.services.MesosServices; import org.apache.flink.mesos.runtime.clusterframework.services.MesosServicesUtils; import org.apache.flink.mesos.runtime.clusterframework.store.MesosWorkerStore; import org.apache.flink.mesos.util.MesosArtifactServer; import org.apache.flink.mesos.util.MesosConfiguration; import org.apache.flink.runtime.akka.AkkaUtils; import org.apache.flink.runtime.clusterframework.BootstrapTools; import org.apache.flink.runtime.clusterframework.ContainerSpecification; import org.apache.flink.runtime.concurrent.ScheduledExecutorServiceAdapter; import org.apache.flink.runtime.highavailability.HighAvailabilityServices; import org.apache.flink.runtime.highavailability.HighAvailabilityServicesUtils; import org.apache.flink.runtime.jobmanager.JobManager; import org.apache.flink.runtime.jobmaster.JobMaster; import org.apache.flink.runtime.metrics.MetricRegistryConfiguration; import org.apache.flink.runtime.metrics.MetricRegistryImpl; import org.apache.flink.runtime.process.ProcessReaper; import org.apache.flink.runtime.util.ExecutorThreadFactory; import org.apache.flink.runtime.util.Hardware; import org.apache.flink.runtime.webmonitor.WebMonitor; import org.apache.flink.runtime.webmonitor.retriever.impl.AkkaJobManagerRetriever; import org.apache.flink.runtime.webmonitor.retriever.impl.AkkaQueryServiceRetriever; import org.apache.flink.util.ExecutorUtils; import org.apache.flink.util.Preconditions;
import java.net.*; import java.util.concurrent.*; import org.apache.flink.api.common.time.*; import org.apache.flink.configuration.*; import org.apache.flink.mesos.entrypoint.*; import org.apache.flink.mesos.runtime.clusterframework.services.*; import org.apache.flink.mesos.runtime.clusterframework.store.*; import org.apache.flink.mesos.util.*; import org.apache.flink.runtime.akka.*; import org.apache.flink.runtime.clusterframework.*; import org.apache.flink.runtime.concurrent.*; import org.apache.flink.runtime.highavailability.*; import org.apache.flink.runtime.jobmanager.*; import org.apache.flink.runtime.jobmaster.*; import org.apache.flink.runtime.metrics.*; import org.apache.flink.runtime.process.*; import org.apache.flink.runtime.util.*; import org.apache.flink.runtime.webmonitor.*; import org.apache.flink.runtime.webmonitor.retriever.impl.*; import org.apache.flink.util.*;
[ "java.net", "java.util", "org.apache.flink" ]
java.net; java.util; org.apache.flink;
1,035,588
public final void finishFragmentByStep(int step) { android.support.v4.app.FragmentActivity activity = getActivity(); if (activity == null) { throw new IllegalStateException("Fragment " + this + " not attached to Activity"); } List<android.support.v4.app.Fragment> list = getFragmentManager().getFragments(); if (list == null || list.size() < step) { throw new IllegalStateException("There is not enough Fragment to finish."); } for (int i = 0; i < step; i++) { activity.onBackPressed(); } }
final void function(int step) { android.support.v4.app.FragmentActivity activity = getActivity(); if (activity == null) { throw new IllegalStateException(STR + this + STR); } List<android.support.v4.app.Fragment> list = getFragmentManager().getFragments(); if (list == null list.size() < step) { throw new IllegalStateException(STR); } for (int i = 0; i < step; i++) { activity.onBackPressed(); } }
/** * close several fragment by step * * @param step the number of the fragments which will be finished. */
close several fragment by step
finishFragmentByStep
{ "repo_name": "snowdream/SnowdreamFramework", "path": "modules/core/src/main/java/com/github/snowdream/android/support/v4/app/Fragment.java", "license": "apache-2.0", "size": 4568 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,852,678
public static void setRegistryProperty(String propertyName, String propertyValue, String path, String tenantDomain) throws APIManagementException { UserRegistry registry = getRegistry(tenantDomain); PrivilegedCarbonContext.startTenantFlow(); if (tenantDomain != null && StringUtils.isNotEmpty(tenantDomain)) { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext() .setTenantDomain(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } try { Resource resource = registry.get(path); // add or update property if (resource.getProperty(propertyName) != null) { resource.setProperty(propertyName, propertyValue); } else { resource.addProperty(propertyName, propertyValue); } registry.put(resource.getPath(), resource); resource.discard(); } catch (RegistryException e) { throw new APIManagementException("Error while reading registry resource " + path + " for tenant " + tenantDomain); } finally { PrivilegedCarbonContext.endTenantFlow(); } }
static void function(String propertyName, String propertyValue, String path, String tenantDomain) throws APIManagementException { UserRegistry registry = getRegistry(tenantDomain); PrivilegedCarbonContext.startTenantFlow(); if (tenantDomain != null && StringUtils.isNotEmpty(tenantDomain)) { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext() .setTenantDomain(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } try { Resource resource = registry.get(path); if (resource.getProperty(propertyName) != null) { resource.setProperty(propertyName, propertyValue); } else { resource.addProperty(propertyName, propertyValue); } registry.put(resource.getPath(), resource); resource.discard(); } catch (RegistryException e) { throw new APIManagementException(STR + path + STR + tenantDomain); } finally { PrivilegedCarbonContext.endTenantFlow(); } }
/** * Add/Update the given registry property from the given tenant registry * path * * @param propertyName property name * @param propertyValue property value * @param path resource path * @param tenantDomain * @throws APIManagementException */
Add/Update the given registry property from the given tenant registry path
setRegistryProperty
{ "repo_name": "harsha89/carbon-apimgt", "path": "components/apimgt/org.wso2.carbon.apimgt.gateway/src/main/java/org/wso2/carbon/apimgt/gateway/utils/GatewayUtils.java", "license": "apache-2.0", "size": 38195 }
[ "org.apache.commons.lang3.StringUtils", "org.wso2.carbon.apimgt.api.APIManagementException", "org.wso2.carbon.context.PrivilegedCarbonContext", "org.wso2.carbon.registry.core.Resource", "org.wso2.carbon.registry.core.exceptions.RegistryException", "org.wso2.carbon.registry.core.session.UserRegistry", "org.wso2.carbon.utils.multitenancy.MultitenantConstants" ]
import org.apache.commons.lang3.StringUtils; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.registry.core.Resource; import org.wso2.carbon.registry.core.exceptions.RegistryException; import org.wso2.carbon.registry.core.session.UserRegistry; import org.wso2.carbon.utils.multitenancy.MultitenantConstants;
import org.apache.commons.lang3.*; import org.wso2.carbon.apimgt.api.*; import org.wso2.carbon.context.*; import org.wso2.carbon.registry.core.*; import org.wso2.carbon.registry.core.exceptions.*; import org.wso2.carbon.registry.core.session.*; import org.wso2.carbon.utils.multitenancy.*;
[ "org.apache.commons", "org.wso2.carbon" ]
org.apache.commons; org.wso2.carbon;
1,668,641
protected void customValidate(ValidationContext validationContext, Collection<ValidationResult> validationResults) { }
void function(ValidationContext validationContext, Collection<ValidationResult> validationResults) { }
/** * Sub-classes can add custom validation by implementing this method. * @param validationContext the validation context * @param validationResults add custom validation result to this collection */
Sub-classes can add custom validation by implementing this method
customValidate
{ "repo_name": "mcgilman/nifi", "path": "nifi-nar-bundles/nifi-extension-utils/nifi-processor-utils/src/main/java/org/apache/nifi/processor/util/list/AbstractListProcessor.java", "license": "apache-2.0", "size": 46751 }
[ "java.util.Collection", "org.apache.nifi.components.ValidationContext", "org.apache.nifi.components.ValidationResult" ]
import java.util.Collection; import org.apache.nifi.components.ValidationContext; import org.apache.nifi.components.ValidationResult;
import java.util.*; import org.apache.nifi.components.*;
[ "java.util", "org.apache.nifi" ]
java.util; org.apache.nifi;
2,705,302
public List<String> getAllDescription() { List<String> result = new ArrayList<String>(); List<Node> nodes = childNode.get("description"); for (Node node : nodes) { result.add(node.getText()); } return result; }
List<String> function() { List<String> result = new ArrayList<String>(); List<Node> nodes = childNode.get(STR); for (Node node : nodes) { result.add(node.getText()); } return result; }
/** * Returns all <code>description</code> elements * @return list of <code>description</code> */
Returns all <code>description</code> elements
getAllDescription
{ "repo_name": "forge/javaee-descriptors", "path": "impl/src/main/java/org/jboss/shrinkwrap/descriptor/impl/facelettaglibrary22/FaceletTaglibTagAttributeTypeImpl.java", "license": "epl-1.0", "size": 14291 }
[ "java.util.ArrayList", "java.util.List", "org.jboss.shrinkwrap.descriptor.spi.node.Node" ]
import java.util.ArrayList; import java.util.List; import org.jboss.shrinkwrap.descriptor.spi.node.Node;
import java.util.*; import org.jboss.shrinkwrap.descriptor.spi.node.*;
[ "java.util", "org.jboss.shrinkwrap" ]
java.util; org.jboss.shrinkwrap;
953,341
byte[] childPrefix(Object value) { Preconditions.checkState(parent == null, "Not a parent index."); return buildKey(name, toParentKey(value)); }
byte[] childPrefix(Object value) { Preconditions.checkState(parent == null, STR); return buildKey(name, toParentKey(value)); }
/** * Creates a key prefix for child indices of this index. This allows the prefix to be * calculated only once, avoiding redundant work when multiple child indices of the * same parent index exist. */
Creates a key prefix for child indices of this index. This allows the prefix to be calculated only once, avoiding redundant work when multiple child indices of the same parent index exist
childPrefix
{ "repo_name": "bravo-zhang/spark", "path": "common/kvstore/src/main/java/org/apache/spark/util/kvstore/LevelDBTypeInfo.java", "license": "apache-2.0", "size": 17396 }
[ "com.google.common.base.Preconditions" ]
import com.google.common.base.Preconditions;
import com.google.common.base.*;
[ "com.google.common" ]
com.google.common;
2,574,462
public static MenuScroller setScrollerFor(JPopupMenu menu) { return new MenuScroller(menu); }
static MenuScroller function(JPopupMenu menu) { return new MenuScroller(menu); }
/** * Registers a popup menu to be scrolled with the default number of items to display at a time and the default scrolling interval. * <p> * @param menu the popup menu * @return the MenuScroller */
Registers a popup menu to be scrolled with the default number of items to display at a time and the default scrolling interval.
setScrollerFor
{ "repo_name": "AlexFalappa/hcc", "path": "af-toolkit/src/main/java/net/falappa/swing/menu/MenuScroller.java", "license": "apache-2.0", "size": 20429 }
[ "javax.swing.JPopupMenu" ]
import javax.swing.JPopupMenu;
import javax.swing.*;
[ "javax.swing" ]
javax.swing;
2,820,567
Collection<IMessage> receiveDeferredMessageBatch(Collection<Long> sequenceNumbers) throws InterruptedException, ServiceBusException;
Collection<IMessage> receiveDeferredMessageBatch(Collection<Long> sequenceNumbers) throws InterruptedException, ServiceBusException;
/** * Receives a batch of deferred {@link Message}. * * @param sequenceNumbers The sequence numbers of desired deferred messages. * @return List of messages received. Returns null if no message is found. * @throws InterruptedException if the current thread was interrupted while waiting * @throws ServiceBusException if receive failed */
Receives a batch of deferred <code>Message</code>
receiveDeferredMessageBatch
{ "repo_name": "navalev/azure-sdk-for-java", "path": "sdk/servicebus/microsoft-azure-servicebus/src/main/java/com/microsoft/azure/servicebus/IMessageReceiver.java", "license": "mit", "size": 31069 }
[ "com.microsoft.azure.servicebus.primitives.ServiceBusException", "java.util.Collection" ]
import com.microsoft.azure.servicebus.primitives.ServiceBusException; import java.util.Collection;
import com.microsoft.azure.servicebus.primitives.*; import java.util.*;
[ "com.microsoft.azure", "java.util" ]
com.microsoft.azure; java.util;
792,919
void onViewTap(View view, float x, float y); } private class AnimatedZoomRunnable implements Runnable { // These are 'postScale' values, means they're compounded each iteration static final float ANIMATION_SCALE_PER_ITERATION_IN = 1.07f; static final float ANIMATION_SCALE_PER_ITERATION_OUT = 0.93f; private final float mFocalX, mFocalY; private final float mTargetZoom; private final float mDeltaScale; public AnimatedZoomRunnable(final float currentZoom, final float targetZoom, final float focalX, final float focalY) { mTargetZoom = targetZoom; mFocalX = focalX; mFocalY = focalY; if (currentZoom < targetZoom) { mDeltaScale = ANIMATION_SCALE_PER_ITERATION_IN; } else { mDeltaScale = ANIMATION_SCALE_PER_ITERATION_OUT; } }
void onViewTap(View view, float x, float y); } private class AnimatedZoomRunnable implements Runnable { static final float ANIMATION_SCALE_PER_ITERATION_IN = 1.07f; static final float ANIMATION_SCALE_PER_ITERATION_OUT = 0.93f; private final float mFocalX, mFocalY; private final float mTargetZoom; private final float mDeltaScale; public AnimatedZoomRunnable(final float currentZoom, final float targetZoom, final float focalX, final float focalY) { mTargetZoom = targetZoom; mFocalX = focalX; mFocalY = focalY; if (currentZoom < targetZoom) { mDeltaScale = ANIMATION_SCALE_PER_ITERATION_IN; } else { mDeltaScale = ANIMATION_SCALE_PER_ITERATION_OUT; } }
/** * A callback to receive where the user taps on a ImageView. You will * receive a callback if the user taps anywhere on the view, tapping on * 'whitespace' will not be ignored. * * @param view * - View the user tapped. * @param x * - where the user tapped from the left of the View. * @param y * - where the user tapped from the top of the View. */
A callback to receive where the user taps on a ImageView. You will receive a callback if the user taps anywhere on the view, tapping on 'whitespace' will not be ignored
onViewTap
{ "repo_name": "shucc/FunctionClass", "path": "mylibrary/src/main/java/cchao/org/mylibrary/photo/zoom/PhotoViewAttacher.java", "license": "apache-2.0", "size": 26121 }
[ "android.view.View" ]
import android.view.View;
import android.view.*;
[ "android.view" ]
android.view;
2,127,820
EReference getResources_Resources();
EReference getResources_Resources();
/** * Returns the meta object for the containment reference list '{@link io.opensemantics.semiotics.model.assessment.Resources#getResources <em>Resources</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the containment reference list '<em>Resources</em>'. * @see io.opensemantics.semiotics.model.assessment.Resources#getResources() * @see #getResources() * @generated */
Returns the meta object for the containment reference list '<code>io.opensemantics.semiotics.model.assessment.Resources#getResources Resources</code>'.
getResources_Resources
{ "repo_name": "CoastalHacking/semiotics-main", "path": "bundles/io.opensemantics.semiotics.model.assessment/src-gen/io/opensemantics/semiotics/model/assessment/AssessmentPackage.java", "license": "apache-2.0", "size": 151116 }
[ "org.eclipse.emf.ecore.EReference" ]
import org.eclipse.emf.ecore.EReference;
import org.eclipse.emf.ecore.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
1,795,343
private boolean canEditContentTopics(SecuritySession securitySession){ String[] restrictRoles = Aksess.getConfiguration().getStrings("restrict.editing.content.topics"); return noRestriction(restrictRoles) || securitySession.isUserInRole(restrictRoles); }
boolean function(SecuritySession securitySession){ String[] restrictRoles = Aksess.getConfiguration().getStrings(STR); return noRestriction(restrictRoles) securitySession.isUserInRole(restrictRoles); }
/** * Editing Topics can be restricted to certain roles * @return true if editing is allowed */
Editing Topics can be restricted to certain roles
canEditContentTopics
{ "repo_name": "kantega/Flyt-cms", "path": "modules/core/src/main/java/no/kantega/publishing/admin/content/action/AbstractContentAction.java", "license": "apache-2.0", "size": 4761 }
[ "no.kantega.publishing.common.Aksess", "no.kantega.publishing.security.SecuritySession" ]
import no.kantega.publishing.common.Aksess; import no.kantega.publishing.security.SecuritySession;
import no.kantega.publishing.common.*; import no.kantega.publishing.security.*;
[ "no.kantega.publishing" ]
no.kantega.publishing;
2,196,604
protected ProfileSchemaDto createProfileSchema() throws TException, IOException { return createProfileSchema(null); }
ProfileSchemaDto function() throws TException, IOException { return createProfileSchema(null); }
/** * Creates the profile schema. * * @return the profile schema dto * @throws TException the t exception * @throws IOException Signals that an I/O exception has occurred. */
Creates the profile schema
createProfileSchema
{ "repo_name": "vzhukovskyi/kaa", "path": "server/control/src/test/java/org/kaaproject/kaa/server/control/AbstractTestControlServer.java", "license": "apache-2.0", "size": 41435 }
[ "java.io.IOException", "org.apache.thrift.TException", "org.kaaproject.kaa.common.dto.ProfileSchemaDto" ]
import java.io.IOException; import org.apache.thrift.TException; import org.kaaproject.kaa.common.dto.ProfileSchemaDto;
import java.io.*; import org.apache.thrift.*; import org.kaaproject.kaa.common.dto.*;
[ "java.io", "org.apache.thrift", "org.kaaproject.kaa" ]
java.io; org.apache.thrift; org.kaaproject.kaa;
2,522,587
public static GroupBucket createSelectGroupBucket( TrafficTreatment treatment, short weight) { if (weight == 0) { return null; } return new DefaultGroupBucket(GroupDescription.Type.SELECT, treatment, weight, null, null); }
static GroupBucket function( TrafficTreatment treatment, short weight) { if (weight == 0) { return null; } return new DefaultGroupBucket(GroupDescription.Type.SELECT, treatment, weight, null, null); }
/** * Creates select group bucket with specified weight. * * @param treatment traffic treatment associated with group bucket * @param weight weight associated with group bucket * @return select group bucket object */
Creates select group bucket with specified weight
createSelectGroupBucket
{ "repo_name": "sonu283304/onos", "path": "core/api/src/main/java/org/onosproject/net/group/DefaultGroupBucket.java", "license": "apache-2.0", "size": 8851 }
[ "org.onosproject.net.flow.TrafficTreatment" ]
import org.onosproject.net.flow.TrafficTreatment;
import org.onosproject.net.flow.*;
[ "org.onosproject.net" ]
org.onosproject.net;
567,859
@Override public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) { if (itemPropertyDescriptors == null) { super.getPropertyDescriptors(object); } return itemPropertyDescriptors; }
List<IItemPropertyDescriptor> function(Object object) { if (itemPropertyDescriptors == null) { super.getPropertyDescriptors(object); } return itemPropertyDescriptors; }
/** * This returns the property descriptors for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */
This returns the property descriptors for the adapted class.
getPropertyDescriptors
{ "repo_name": "KAMP-Research/KAMP", "path": "bundles/Toometa/toometa.archoptions.edit/src/archoptions/provider/IntroduceNewComponentItemProvider.java", "license": "apache-2.0", "size": 2662 }
[ "java.util.List", "org.eclipse.emf.edit.provider.IItemPropertyDescriptor" ]
import java.util.List; import org.eclipse.emf.edit.provider.IItemPropertyDescriptor;
import java.util.*; import org.eclipse.emf.edit.provider.*;
[ "java.util", "org.eclipse.emf" ]
java.util; org.eclipse.emf;
1,106,332
@Override protected boolean handleMovement(final RPEntity entity) { if (rand.nextDouble() < probability) { doDamage(entity); } return true; }
boolean function(final RPEntity entity) { if (rand.nextDouble() < probability) { doDamage(entity); } return true; }
/** * Apply actions done while moving. * * @param entity * The RPEntity that moved. * * @return <code>false</code> if this entity should be removed from * further processing, <code>true</code> otherwise. */
Apply actions done while moving
handleMovement
{ "repo_name": "AntumDeluge/arianne-stendhal", "path": "src/games/stendhal/server/entity/mapstuff/area/DamagingArea.java", "license": "gpl-2.0", "size": 4994 }
[ "games.stendhal.server.entity.RPEntity" ]
import games.stendhal.server.entity.RPEntity;
import games.stendhal.server.entity.*;
[ "games.stendhal.server" ]
games.stendhal.server;
274,650
public void setWindowPos(int windowPos) throws CmsRuntimeException { checkFrozen(); m_windowPos = windowPos; }
void function(int windowPos) throws CmsRuntimeException { checkFrozen(); m_windowPos = windowPos; }
/** * Sets the start position of the current display.<p> * * This is a count of "windows" that * consist of viewable text with "windowSize" lines of text (for a non-standard log file) or * log-entries (for a standard log file).<p> * * @param windowPos the start position of the current display to set * * @throws CmsRuntimeException if the configuration of this instance has been frozen * ({@link #setFrozen(boolean)}) */
Sets the start position of the current display. This is a count of "windows" that consist of viewable text with "windowSize" lines of text (for a non-standard log file) or log-entries (for a standard log file)
setWindowPos
{ "repo_name": "mediaworx/opencms-core", "path": "src/org/opencms/util/CmsRfsFileViewer.java", "license": "lgpl-2.1", "size": 27366 }
[ "org.opencms.main.CmsRuntimeException" ]
import org.opencms.main.CmsRuntimeException;
import org.opencms.main.*;
[ "org.opencms.main" ]
org.opencms.main;
1,138,213
public void execute(Project module) { pi.execute(module); eventBus.fireEvent(new ProjectAnalysisEvent(module, true)); executeInitializersPhase(); // Index and lock the filesystem indexFs(); // Log detected languages and their profiles after FS is indexed and languages detected profileVerifier.execute(); // Initialize issue exclusions initIssueExclusions(); sensorsExecutor.execute(sensorContext); if (module.isRoot()) { if (analysisMode.isPreview()) { localIssueTracking(); issuesCallback(); } issuesReport(); publishReportJob(); postJobsExecutor.execute(sensorContext); } cleanMemory(); eventBus.fireEvent(new ProjectAnalysisEvent(module, false)); }
void function(Project module) { pi.execute(module); eventBus.fireEvent(new ProjectAnalysisEvent(module, true)); executeInitializersPhase(); indexFs(); profileVerifier.execute(); initIssueExclusions(); sensorsExecutor.execute(sensorContext); if (module.isRoot()) { if (analysisMode.isPreview()) { localIssueTracking(); issuesCallback(); } issuesReport(); publishReportJob(); postJobsExecutor.execute(sensorContext); } cleanMemory(); eventBus.fireEvent(new ProjectAnalysisEvent(module, false)); }
/** * Executed on each module */
Executed on each module
execute
{ "repo_name": "julien-sobczak/sonarqube", "path": "sonar-batch/src/main/java/org/sonar/batch/phases/PhaseExecutor.java", "license": "lgpl-3.0", "size": 6112 }
[ "org.sonar.api.resources.Project" ]
import org.sonar.api.resources.Project;
import org.sonar.api.resources.*;
[ "org.sonar.api" ]
org.sonar.api;
2,530,494
public TunnelService tunnel() { return tunnelService; }
TunnelService function() { return tunnelService; }
/** * Returns a reference to the tunnel service. * * @return tunnel service reference */
Returns a reference to the tunnel service
tunnel
{ "repo_name": "LorenzReinhart/ONOSnew", "path": "web/gui/src/main/java/org/onosproject/ui/impl/topo/util/ServicesBundle.java", "license": "apache-2.0", "size": 5424 }
[ "org.onosproject.incubator.net.tunnel.TunnelService" ]
import org.onosproject.incubator.net.tunnel.TunnelService;
import org.onosproject.incubator.net.tunnel.*;
[ "org.onosproject.incubator" ]
org.onosproject.incubator;
2,531,938
RuleType type();
RuleType type();
/** * Type of the issue. */
Type of the issue
type
{ "repo_name": "Godin/sonar", "path": "sonar-plugin-api/src/main/java/org/sonar/api/batch/sensor/issue/ExternalIssue.java", "license": "lgpl-3.0", "size": 1478 }
[ "org.sonar.api.rules.RuleType" ]
import org.sonar.api.rules.RuleType;
import org.sonar.api.rules.*;
[ "org.sonar.api" ]
org.sonar.api;
2,473,219
public static void buildProjectJsonK(Context context) { ObjectMapper mapper = new ObjectMapper(); Survey survey = SurveyGenerator.createWanderingMindSurvey(); // Session Session session = new Session(); session.setDurationUnits(24L * 4L); // 4days session.setDurationMeasure("hours"); // session.setStartDate(new Calendar()) List<Task> tasks = new ArrayList<Task>(); Task wifiSensor = TaskGenerator.createWifiConnectionSensor(mapper, 1000, new String[] { "AppleBS4" }); tasks.add(wifiSensor); Task accSensor = TaskGenerator.createAccelerometerSensor(mapper, 20, 10000, 5000); tasks.add(accSensor); Task dataSink = TaskGenerator.createDataSink(mapper, 60); tasks.add(dataSink); Task gpsSensor = TaskGenerator.createGpsSensor(mapper, 1000L * 30L); tasks.add(gpsSensor); Task audioSensor = TaskGenerator.createAudioSensor(mapper, 44100, 1000 * 25); // rate: 44100Hz, duration: 25 seconds tasks.add(audioSensor); Task audioSink = TaskGenerator.createTaskWithPeriod(mapper, "AudioSink", TaskType.AudioSink, 1000); tasks.add(audioSink); Condition ifTimerSaysSo = TaskGenerator.createCondition("value", GeneralTrigger.DataType.BOOLEAN.name(), GeneralTrigger.booleanOperators[0]); // "is true" ArrayList<Condition> conditions = new ArrayList<Condition>(); conditions.add(ifTimerSaysSo); Task audioTrigger = TaskGenerator.createTrigger(mapper, "AudioTrigger", 1000, GeneralTrigger.matches[0], conditions); tasks.add(audioTrigger); Task timerSensor = TaskGenerator.createTimerSensor(mapper, 1000, 1000 * 60 * 60 * 1); // each 3 hour tasks.add(timerSensor); List<TaskRelation> relations = Arrays .asList(new TaskRelation[] { new TaskRelation(timerSensor.getName(), audioTrigger .getName()), new TaskRelation(audioTrigger.getName(), audioSensor .getName()), new TaskRelation(audioSensor.getName(), audioSink .getName()), new TaskRelation(wifiSensor.getName(), dataSink .getName()), new TaskRelation(accSensor.getName(), dataSink .getName()), new TaskRelation(gpsSensor.getName(), dataSink .getName()), }); session.setTasks(tasks); session.setRelations(relations); Project project = new Project(); project.setSessionsSize(1); project.put("mainSession", session); project.setSurveysSize(1); project.put("mainSurvey", survey); writeProject(context, mapper, project); }
static void function(Context context) { ObjectMapper mapper = new ObjectMapper(); Survey survey = SurveyGenerator.createWanderingMindSurvey(); Session session = new Session(); session.setDurationUnits(24L * 4L); session.setDurationMeasure("hours"); List<Task> tasks = new ArrayList<Task>(); Task wifiSensor = TaskGenerator.createWifiConnectionSensor(mapper, 1000, new String[] { STR }); tasks.add(wifiSensor); Task accSensor = TaskGenerator.createAccelerometerSensor(mapper, 20, 10000, 5000); tasks.add(accSensor); Task dataSink = TaskGenerator.createDataSink(mapper, 60); tasks.add(dataSink); Task gpsSensor = TaskGenerator.createGpsSensor(mapper, 1000L * 30L); tasks.add(gpsSensor); Task audioSensor = TaskGenerator.createAudioSensor(mapper, 44100, 1000 * 25); tasks.add(audioSensor); Task audioSink = TaskGenerator.createTaskWithPeriod(mapper, STR, TaskType.AudioSink, 1000); tasks.add(audioSink); Condition ifTimerSaysSo = TaskGenerator.createCondition("value", GeneralTrigger.DataType.BOOLEAN.name(), GeneralTrigger.booleanOperators[0]); ArrayList<Condition> conditions = new ArrayList<Condition>(); conditions.add(ifTimerSaysSo); Task audioTrigger = TaskGenerator.createTrigger(mapper, STR, 1000, GeneralTrigger.matches[0], conditions); tasks.add(audioTrigger); Task timerSensor = TaskGenerator.createTimerSensor(mapper, 1000, 1000 * 60 * 60 * 1); tasks.add(timerSensor); List<TaskRelation> relations = Arrays .asList(new TaskRelation[] { new TaskRelation(timerSensor.getName(), audioTrigger .getName()), new TaskRelation(audioTrigger.getName(), audioSensor .getName()), new TaskRelation(audioSensor.getName(), audioSink .getName()), new TaskRelation(wifiSensor.getName(), dataSink .getName()), new TaskRelation(accSensor.getName(), dataSink .getName()), new TaskRelation(gpsSensor.getName(), dataSink .getName()), }); session.setTasks(tasks); session.setRelations(relations); Project project = new Project(); project.setSessionsSize(1); project.put(STR, session); project.setSurveysSize(1); project.put(STR, survey); writeProject(context, mapper, project); }
/** * GPS + Wifi + Acc + audio * * @param resources */
GPS + Wifi + Acc + audio
buildProjectJsonK
{ "repo_name": "alanmtz1503/InCense", "path": "src/edu/incense/android/test/ProjectGenerator.java", "license": "mit", "size": 77362 }
[ "android.content.Context", "edu.incense.android.datatask.model.Task", "edu.incense.android.datatask.model.TaskRelation", "edu.incense.android.datatask.model.TaskType", "edu.incense.android.datatask.trigger.Condition", "edu.incense.android.datatask.trigger.GeneralTrigger", "edu.incense.android.project.Project", "edu.incense.android.session.Session", "edu.incense.android.survey.Survey", "java.util.ArrayList", "java.util.Arrays", "java.util.List", "org.codehaus.jackson.map.ObjectMapper" ]
import android.content.Context; import edu.incense.android.datatask.model.Task; import edu.incense.android.datatask.model.TaskRelation; import edu.incense.android.datatask.model.TaskType; import edu.incense.android.datatask.trigger.Condition; import edu.incense.android.datatask.trigger.GeneralTrigger; import edu.incense.android.project.Project; import edu.incense.android.session.Session; import edu.incense.android.survey.Survey; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.codehaus.jackson.map.ObjectMapper;
import android.content.*; import edu.incense.android.datatask.model.*; import edu.incense.android.datatask.trigger.*; import edu.incense.android.project.*; import edu.incense.android.session.*; import edu.incense.android.survey.*; import java.util.*; import org.codehaus.jackson.map.*;
[ "android.content", "edu.incense.android", "java.util", "org.codehaus.jackson" ]
android.content; edu.incense.android; java.util; org.codehaus.jackson;
1,303,308
@Bean public JdbcTemplate metadataJdbcTemplate( @Qualifier("metadataDataSource") final DataSource mySqlDataSource) { return new JdbcTemplate(mySqlDataSource); }
JdbcTemplate function( @Qualifier(STR) final DataSource mySqlDataSource) { return new JdbcTemplate(mySqlDataSource); }
/** * mySql metadata JDBC template. * * @param mySqlDataSource metadata data source * @return metadata JDBC template */
mySql metadata JDBC template
metadataJdbcTemplate
{ "repo_name": "tgianos/metacat", "path": "metacat-metadata-mysql/src/main/java/com/netflix/metacat/metadata/mysql/MySqlUserMetadataConfig.java", "license": "apache-2.0", "size": 5601 }
[ "javax.sql.DataSource", "org.springframework.beans.factory.annotation.Qualifier", "org.springframework.jdbc.core.JdbcTemplate" ]
import javax.sql.DataSource; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.*; import org.springframework.beans.factory.annotation.*; import org.springframework.jdbc.core.*;
[ "javax.sql", "org.springframework.beans", "org.springframework.jdbc" ]
javax.sql; org.springframework.beans; org.springframework.jdbc;
929,153
public void authenticate(Context context, String consumerKey, String consumerSecret, SocializeAuthListener authListener);
void function(Context context, String consumerKey, String consumerSecret, SocializeAuthListener authListener);
/** * Authenticates the application against the API as an anonymous user. * @param context The current context. * @param consumerKey The consumer key, obtained from registration at http://www.getsocialize.com. * @param consumerSecret The consumer secret, obtained from registration at http://www.getsocialize.com. * @param authListener The callback for authentication outcomes. */
Authenticates the application against the API as an anonymous user
authenticate
{ "repo_name": "dylanmaryk/InsanityRadio-Android", "path": "socialize/src/com/socialize/SocializeService.java", "license": "mit", "size": 12949 }
[ "android.content.Context", "com.socialize.listener.SocializeAuthListener" ]
import android.content.Context; import com.socialize.listener.SocializeAuthListener;
import android.content.*; import com.socialize.listener.*;
[ "android.content", "com.socialize.listener" ]
android.content; com.socialize.listener;
1,229,662
public void deleteEverything(MailboxSession mailboxSession) throws MailboxException { final HBaseMailboxMapper mapper = (HBaseMailboxMapper) getMapperFactory().getMailboxMapper(mailboxSession); mapper.execute(new TransactionalMapper.VoidTransaction() {
void function(MailboxSession mailboxSession) throws MailboxException { final HBaseMailboxMapper mapper = (HBaseMailboxMapper) getMapperFactory().getMailboxMapper(mailboxSession); mapper.execute(new TransactionalMapper.VoidTransaction() {
/** * Delete all mailboxes * * @param mailboxSession * @throws MailboxException */
Delete all mailboxes
deleteEverything
{ "repo_name": "aduprat/james-mailbox", "path": "hbase/src/main/java/org/apache/james/mailbox/hbase/HBaseMailboxManager.java", "license": "apache-2.0", "size": 4214 }
[ "org.apache.james.mailbox.MailboxSession", "org.apache.james.mailbox.exception.MailboxException", "org.apache.james.mailbox.hbase.mail.HBaseMailboxMapper", "org.apache.james.mailbox.store.transaction.TransactionalMapper" ]
import org.apache.james.mailbox.MailboxSession; import org.apache.james.mailbox.exception.MailboxException; import org.apache.james.mailbox.hbase.mail.HBaseMailboxMapper; import org.apache.james.mailbox.store.transaction.TransactionalMapper;
import org.apache.james.mailbox.*; import org.apache.james.mailbox.exception.*; import org.apache.james.mailbox.hbase.mail.*; import org.apache.james.mailbox.store.transaction.*;
[ "org.apache.james" ]
org.apache.james;
956,402
private static Map<String, String> convertHeaders(Header[] headers) { Map<String, String> result = new HashMap<String, String>(); for (int i = 0; i < headers.length; i++) { result.put(headers[i].getName(), headers[i].getValue()); } return result; }
static Map<String, String> function(Header[] headers) { Map<String, String> result = new HashMap<String, String>(); for (int i = 0; i < headers.length; i++) { result.put(headers[i].getName(), headers[i].getValue()); } return result; }
/** * Converts Headers[] to Map<String, String>. */
Converts Headers[] to Map
convertHeaders
{ "repo_name": "benniaobuguai/android-project-wo2b", "path": "wo2b-tp-android-volley/src/com/android/volley/toolbox/BasicNetwork.java", "license": "apache-2.0", "size": 10008 }
[ "java.util.HashMap", "java.util.Map", "org.apache.http.Header" ]
import java.util.HashMap; import java.util.Map; import org.apache.http.Header;
import java.util.*; import org.apache.http.*;
[ "java.util", "org.apache.http" ]
java.util; org.apache.http;
128,076
private RowFilter createVersionLimitFilter(SingleColumnValueFilter filter) { return RowFilter.newBuilder() .setCellsPerColumnLimitFilter( filter.getLatestVersionOnly() ? 1 : Integer.MAX_VALUE) .build(); }
RowFilter function(SingleColumnValueFilter filter) { return RowFilter.newBuilder() .setCellsPerColumnLimitFilter( filter.getLatestVersionOnly() ? 1 : Integer.MAX_VALUE) .build(); }
/** * Emit a filter that will limit the number of cell versions that will be emitted. */
Emit a filter that will limit the number of cell versions that will be emitted
createVersionLimitFilter
{ "repo_name": "waprin/cloud-bigtable-client", "path": "bigtable-hbase/src/main/java/com/google/cloud/bigtable/hbase/adapters/filters/SingleColumnValueFilterAdapter.java", "license": "apache-2.0", "size": 4809 }
[ "com.google.bigtable.v1.RowFilter", "org.apache.hadoop.hbase.filter.SingleColumnValueFilter" ]
import com.google.bigtable.v1.RowFilter; import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import com.google.bigtable.v1.*; import org.apache.hadoop.hbase.filter.*;
[ "com.google.bigtable", "org.apache.hadoop" ]
com.google.bigtable; org.apache.hadoop;
58,887
protected void renderReport(JasperPrint populatedReport, Map model, HttpServletResponse response) throws Exception { String format = (String) model.get(this.formatKey); if (format == null) { throw new IllegalArgumentException("No format format found in model"); } if (logger.isDebugEnabled()) { logger.debug("Rendering report using format mapping key [" + format + "]"); } Class viewClass = (Class) this.formatMappings.get(format); if (viewClass == null) { throw new IllegalArgumentException("Format discriminator [" + format + "] is not a configured mapping"); } if (logger.isDebugEnabled()) { logger.debug("Rendering report using view class [" + viewClass.getName() + "]"); } AbstractJasperReportsView view = (AbstractJasperReportsView) BeanUtils.instantiateClass(viewClass); // Copy appropriate properties across. view.setExporterParameters(getExporterParameters()); // Can skip most initialization since all relevant URL processing // has been done - just need to convert parameters on the sub view. view.convertExporterParameters(); // Prepare response and render report. populateContentDispositionIfNecessary(response, format); view.renderReport(populatedReport, model, response); }
void function(JasperPrint populatedReport, Map model, HttpServletResponse response) throws Exception { String format = (String) model.get(this.formatKey); if (format == null) { throw new IllegalArgumentException(STR); } if (logger.isDebugEnabled()) { logger.debug(STR + format + "]"); } Class viewClass = (Class) this.formatMappings.get(format); if (viewClass == null) { throw new IllegalArgumentException(STR + format + STR); } if (logger.isDebugEnabled()) { logger.debug(STR + viewClass.getName() + "]"); } AbstractJasperReportsView view = (AbstractJasperReportsView) BeanUtils.instantiateClass(viewClass); view.setExporterParameters(getExporterParameters()); view.convertExporterParameters(); populateContentDispositionIfNecessary(response, format); view.renderReport(populatedReport, model, response); }
/** * Locates the format key in the model using the configured discriminator key and uses this * key to lookup the appropriate view class from the mappings. The rendering of the * report is then delegated to an instance of that view class. */
Locates the format key in the model using the configured discriminator key and uses this key to lookup the appropriate view class from the mappings. The rendering of the report is then delegated to an instance of that view class
renderReport
{ "repo_name": "mattxia/spring-2.5-analysis", "path": "src/org/springframework/web/servlet/view/jasperreports/JasperReportsMultiFormatView.java", "license": "apache-2.0", "size": 8313 }
[ "java.util.Map", "javax.servlet.http.HttpServletResponse", "net.sf.jasperreports.engine.JasperPrint", "org.springframework.beans.BeanUtils" ]
import java.util.Map; import javax.servlet.http.HttpServletResponse; import net.sf.jasperreports.engine.JasperPrint; import org.springframework.beans.BeanUtils;
import java.util.*; import javax.servlet.http.*; import net.sf.jasperreports.engine.*; import org.springframework.beans.*;
[ "java.util", "javax.servlet", "net.sf.jasperreports", "org.springframework.beans" ]
java.util; javax.servlet; net.sf.jasperreports; org.springframework.beans;
365,013
private TypeInformation<KEY> validateKeyType(TypeInformation<KEY> keyType) { Stack<TypeInformation<?>> stack = new Stack<>(); stack.push(keyType); List<TypeInformation<?>> unsupportedTypes = new ArrayList<>(); while (!stack.isEmpty()) { TypeInformation<?> typeInfo = stack.pop(); if (!validateKeyTypeIsHashable(typeInfo)) { unsupportedTypes.add(typeInfo); } if (typeInfo instanceof TupleTypeInfoBase) { for (int i = 0; i < typeInfo.getArity(); i++) { stack.push(((TupleTypeInfoBase) typeInfo).getTypeAt(i)); } } } if (!unsupportedTypes.isEmpty()) { throw new InvalidProgramException("Type " + keyType + " cannot be used as key. Contained " + "UNSUPPORTED key types: " + StringUtils.join(unsupportedTypes, ", ") + ". Look " + "at the keyBy() documentation for the conditions a type has to satisfy in order to be " + "eligible for a key."); } return keyType; }
TypeInformation<KEY> function(TypeInformation<KEY> keyType) { Stack<TypeInformation<?>> stack = new Stack<>(); stack.push(keyType); List<TypeInformation<?>> unsupportedTypes = new ArrayList<>(); while (!stack.isEmpty()) { TypeInformation<?> typeInfo = stack.pop(); if (!validateKeyTypeIsHashable(typeInfo)) { unsupportedTypes.add(typeInfo); } if (typeInfo instanceof TupleTypeInfoBase) { for (int i = 0; i < typeInfo.getArity(); i++) { stack.push(((TupleTypeInfoBase) typeInfo).getTypeAt(i)); } } } if (!unsupportedTypes.isEmpty()) { throw new InvalidProgramException(STR + keyType + STR + STR + StringUtils.join(unsupportedTypes, STR) + STR + STR + STR); } return keyType; }
/** * Validates that a given type of element (as encoded by the provided {@link TypeInformation}) can be * used as a key in the {@code DataStream.keyBy()} operation. This is done by searching depth-first the * key type and checking if each of the composite types satisfies the required conditions * (see {@link #validateKeyTypeIsHashable(TypeInformation)}). * * @param keyType The {@link TypeInformation} of the key. */
Validates that a given type of element (as encoded by the provided <code>TypeInformation</code>) can be used as a key in the DataStream.keyBy() operation. This is done by searching depth-first the key type and checking if each of the composite types satisfies the required conditions (see <code>#validateKeyTypeIsHashable(TypeInformation)</code>)
validateKeyType
{ "repo_name": "oscarceballos/flink-1.3.2", "path": "flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/KeyedStream.java", "license": "apache-2.0", "size": 34096 }
[ "java.util.ArrayList", "java.util.List", "java.util.Stack", "org.apache.commons.lang3.StringUtils", "org.apache.flink.api.common.InvalidProgramException", "org.apache.flink.api.common.typeinfo.TypeInformation", "org.apache.flink.api.java.typeutils.TupleTypeInfoBase" ]
import java.util.ArrayList; import java.util.List; import java.util.Stack; import org.apache.commons.lang3.StringUtils; import org.apache.flink.api.common.InvalidProgramException; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.java.typeutils.TupleTypeInfoBase;
import java.util.*; import org.apache.commons.lang3.*; import org.apache.flink.api.common.*; import org.apache.flink.api.common.typeinfo.*; import org.apache.flink.api.java.typeutils.*;
[ "java.util", "org.apache.commons", "org.apache.flink" ]
java.util; org.apache.commons; org.apache.flink;
1,862,997
public static Headers of(Map<String, String> headers) { if (headers == null) throw new NullPointerException("headers == null"); // Make a defensive copy and clean it up. String[] namesAndValues = new String[headers.size() * 2]; int i = 0; for (Map.Entry<String, String> header : headers.entrySet()) { if (header.getKey() == null || header.getValue() == null) { throw new IllegalArgumentException("Headers cannot be null"); } String name = header.getKey().trim(); String value = header.getValue().trim(); if (name.length() == 0 || name.indexOf('\0') != -1 || value.indexOf('\0') != -1) { throw new IllegalArgumentException("Unexpected header: " + name + ": " + value); } namesAndValues[i] = name; namesAndValues[i + 1] = value; i += 2; } return new Headers(namesAndValues); }
static Headers function(Map<String, String> headers) { if (headers == null) throw new NullPointerException(STR); String[] namesAndValues = new String[headers.size() * 2]; int i = 0; for (Map.Entry<String, String> header : headers.entrySet()) { if (header.getKey() == null header.getValue() == null) { throw new IllegalArgumentException(STR); } String name = header.getKey().trim(); String value = header.getValue().trim(); if (name.length() == 0 name.indexOf('\0') != -1 value.indexOf('\0') != -1) { throw new IllegalArgumentException(STR + name + STR + value); } namesAndValues[i] = name; namesAndValues[i + 1] = value; i += 2; } return new Headers(namesAndValues); }
/** * Returns headers for the header names and values in the {@link Map}. */
Returns headers for the header names and values in the <code>Map</code>
of
{ "repo_name": "apptik/jus", "path": "jus-java/src/main/java/io/apptik/comm/jus/http/Headers.java", "license": "apache-2.0", "size": 17438 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
1,634,728
int updateByPrimaryKeyWithBLOBs(CommentWithBLOBs record);
int updateByPrimaryKeyWithBLOBs(CommentWithBLOBs record);
/** * This method was generated by MyBatis Generator. * This method corresponds to the database table m_comment * * @mbggenerated Thu Jul 16 10:50:11 ICT 2015 */
This method was generated by MyBatis Generator. This method corresponds to the database table m_comment
updateByPrimaryKeyWithBLOBs
{ "repo_name": "uniteddiversity/mycollab", "path": "mycollab-services/src/main/java/com/esofthead/mycollab/common/dao/CommentMapper.java", "license": "agpl-3.0", "size": 4737 }
[ "com.esofthead.mycollab.common.domain.CommentWithBLOBs" ]
import com.esofthead.mycollab.common.domain.CommentWithBLOBs;
import com.esofthead.mycollab.common.domain.*;
[ "com.esofthead.mycollab" ]
com.esofthead.mycollab;
1,070,154
@Override public void show() { if (localLOGV) Log.v(TAG, "SHOW: " + this); mHandler.post(mShow); }
void function() { if (localLOGV) Log.v(TAG, STR + this); mHandler.post(mShow); }
/** * schedule handleShow into the right thread */
schedule handleShow into the right thread
show
{ "repo_name": "OmniEvo/android_frameworks_base", "path": "core/java/android/widget/Toast.java", "license": "gpl-3.0", "size": 16620 }
[ "android.util.Log" ]
import android.util.Log;
import android.util.*;
[ "android.util" ]
android.util;
290,564
byte[] getConnectedPubKeyScript() { byte[] result = checkNotNull(getConnectedOutput().getScriptBytes()); checkState(result.length > 0); return result; }
byte[] getConnectedPubKeyScript() { byte[] result = checkNotNull(getConnectedOutput().getScriptBytes()); checkState(result.length > 0); return result; }
/** * Returns the pubkey script from the connected output. */
Returns the pubkey script from the connected output
getConnectedPubKeyScript
{ "repo_name": "denwaotoko/devcoinj", "path": "core/src/main/java/com/google/devcoin/core/TransactionOutPoint.java", "license": "apache-2.0", "size": 7218 }
[ "com.google.common.base.Preconditions" ]
import com.google.common.base.Preconditions;
import com.google.common.base.*;
[ "com.google.common" ]
com.google.common;
2,625,113