method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
list | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
list | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
|---|---|---|---|---|---|---|---|---|---|---|---|
public Animator getAnimator()
{
return animator;
}
|
Animator function() { return animator; }
|
/**
* Returns <code>Animator</code> object that drives this ScreenTransition.
*
* @return the Animator that drives this ScreenTransition
*/
|
Returns <code>Animator</code> object that drives this ScreenTransition
|
getAnimator
|
{
"repo_name": "borisbrodski/jmockit",
"path": "samples/AnimatedTransitions/src/org/jdesktop/animation/transitions/ScreenTransition.java",
"license": "mit",
"size": 17174
}
|
[
"org.jdesktop.animation.timing.Animator"
] |
import org.jdesktop.animation.timing.Animator;
|
import org.jdesktop.animation.timing.*;
|
[
"org.jdesktop.animation"
] |
org.jdesktop.animation;
| 216,469
|
@Test
public void shouldExportBlankCellsForPatientsWithoutMatchingData() throws Exception {
executeDataSet("org/openmrs/reporting/export/include/DataExportTest-patients.xml");
executeDataSet("org/openmrs/reporting/export/include/DataExportTest-obs.xml");
RowPerObsDataExportReportObject export = new RowPerObsDataExportReportObject();
export.setName("Given names export");
export.addSimpleColumn("PATIENT_ID", "$!{fn.patientId}");
export.addSimpleColumn("Name", "$!{fn.getPatientAttr('PersonName', 'givenName')}");
export.setRowPerObsColumn("Weight (KG)", "5089", new String[] { "location" });
Cohort patients = new Cohort();
patients.addMember(2);
patients.addMember(6);
patients.addMember(7);
patients.addMember(8);
DataExportUtil.generateExport(export, patients, "\t", null);
File exportFile = DataExportUtil.getGeneratedFile(export);
//System.out.println("Template String: \n" + export.generateTemplate());
String expectedOutput = "PATIENT_ID Name Weight (KG) Weight (KG)_location\n2 John 10.0 Test Location\n2 John 9.0 Test Location\n2 John 8.0 Test Location\n2 John 7.0 Test Location\n2 John 6.0 Test Location\n2 John 5.0 Test Location\n2 John 4.0 Test Location\n2 John 3.0 Test Location\n2 John 2.0 Test Location\n2 John 1.0 Test Location\n6 Johnny \n7 Collet 61.0 Xanadu\n8 Anet \n";
String output = OpenmrsUtil.getFileAsString(exportFile);
exportFile.delete();
//System.out.println("exportFile: \n" + output);
//TestUtil.printAssignableToSingleString(output);
assertEquals("The output is not right.", expectedOutput, output);
}
|
void function() throws Exception { executeDataSet(STR); executeDataSet(STR); RowPerObsDataExportReportObject export = new RowPerObsDataExportReportObject(); export.setName(STR); export.addSimpleColumn(STR, STR); export.addSimpleColumn("Name", STR); export.setRowPerObsColumn(STR, "5089", new String[] { STR }); Cohort patients = new Cohort(); patients.addMember(2); patients.addMember(6); patients.addMember(7); patients.addMember(8); DataExportUtil.generateExport(export, patients, "\t", null); File exportFile = DataExportUtil.getGeneratedFile(export); String expectedOutput = STR; String output = OpenmrsUtil.getFileAsString(exportFile); exportFile.delete(); assertEquals(STR, expectedOutput, output); }
|
/**
* If a patient doens't have data, the concept column data should not be printed
*
* @throws Exception
*/
|
If a patient doens't have data, the concept column data should not be printed
|
shouldExportBlankCellsForPatientsWithoutMatchingData
|
{
"repo_name": "Winbobob/openmrs-core",
"path": "api/src/test/java/org/openmrs/reporting/export/RowPerObsDataExportTest.java",
"license": "mpl-2.0",
"size": 4256
}
|
[
"java.io.File",
"org.junit.Assert",
"org.openmrs.Cohort",
"org.openmrs.util.OpenmrsUtil"
] |
import java.io.File; import org.junit.Assert; import org.openmrs.Cohort; import org.openmrs.util.OpenmrsUtil;
|
import java.io.*; import org.junit.*; import org.openmrs.*; import org.openmrs.util.*;
|
[
"java.io",
"org.junit",
"org.openmrs",
"org.openmrs.util"
] |
java.io; org.junit; org.openmrs; org.openmrs.util;
| 2,318,346
|
public static ResultSet selectAllUsers(){
ResultSet rs = null;
String query = new String("SELECT * FROM usuari");
try {
s =(Statement) conn.createStatement();
rs = s.executeQuery (query);
} catch (SQLException ex) {
System.out.println("Problema al Recuperar les dades --> " + ex.getSQLState());
}
return rs;
}
|
static ResultSet function(){ ResultSet rs = null; String query = new String(STR); try { s =(Statement) conn.createStatement(); rs = s.executeQuery (query); } catch (SQLException ex) { System.out.println(STR + ex.getSQLState()); } return rs; }
|
/**
* retonra un result set amb tots el susuaris registrats a la base de dades.
*/
|
retonra un result set amb tots el susuaris registrats a la base de dades
|
selectAllUsers
|
{
"repo_name": "NuriaValls/MemoTournament.Server",
"path": "src/network/ConectorDB.java",
"license": "mit",
"size": 6393
}
|
[
"java.sql.ResultSet",
"java.sql.SQLException",
"java.sql.Statement"
] |
import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement;
|
import java.sql.*;
|
[
"java.sql"
] |
java.sql;
| 363,875
|
@Test
public void testDoOCR_BufferedImage() throws Exception {
logger.info("doOCR on a buffered image of a PNG");
File imageFile = new File(this.testResourcesDataPath, "eurotext.png");
BufferedImage bi = ImageIO.read(imageFile);
String expResult = "The (quick) [brown] {fox} jumps!\nOver the $43,456.78 <lazy> #90 dog";
String result = instance.doOCR(bi);
logger.info(result);
assertEquals(expResult, result.substring(0, expResult.length()));
}
|
void function() throws Exception { logger.info(STR); File imageFile = new File(this.testResourcesDataPath, STR); BufferedImage bi = ImageIO.read(imageFile); String expResult = STR; String result = instance.doOCR(bi); logger.info(result); assertEquals(expResult, result.substring(0, expResult.length())); }
|
/**
* Test of doOCR method, of class Tesseract1.
*
* @throws Exception while processing image.
*/
|
Test of doOCR method, of class Tesseract1
|
testDoOCR_BufferedImage
|
{
"repo_name": "wcecil/tess4j",
"path": "src/test/java/net/sourceforge/tess4j/Tesseract1Test.java",
"license": "apache-2.0",
"size": 11199
}
|
[
"java.awt.image.BufferedImage",
"java.io.File",
"javax.imageio.ImageIO",
"org.junit.Assert"
] |
import java.awt.image.BufferedImage; import java.io.File; import javax.imageio.ImageIO; import org.junit.Assert;
|
import java.awt.image.*; import java.io.*; import javax.imageio.*; import org.junit.*;
|
[
"java.awt",
"java.io",
"javax.imageio",
"org.junit"
] |
java.awt; java.io; javax.imageio; org.junit;
| 964,949
|
public int[] search (ASEvaluation ASEval, Instances data)
throws Exception {
double best_merit;
double tempMerit;
boolean done = false;
int sizeOfBest;
int tempSize;
BigInteger space = BigInteger.ZERO;
m_evaluations = 0;
m_numAttribs = data.numAttributes();
m_bestGroup = new BitSet(m_numAttribs);
if (!(ASEval instanceof SubsetEvaluator)) {
throw new Exception(ASEval.getClass().getName()
+ " is not a "
+ "Subset evaluator!");
}
if (ASEval instanceof UnsupervisedSubsetEvaluator) {
m_hasClass = false;
}
else {
m_hasClass = true;
m_classIndex = data.classIndex();
}
SubsetEvaluator ASEvaluator = (SubsetEvaluator)ASEval;
m_numAttribs = data.numAttributes();
best_merit = ASEvaluator.evaluateSubset(m_bestGroup);
m_evaluations++;
sizeOfBest = countFeatures(m_bestGroup);
BitSet tempGroup = new BitSet(m_numAttribs);
tempMerit = ASEvaluator.evaluateSubset(tempGroup);
if (m_verbose) {
System.out.println("Zero feature subset ("
+Utils.doubleToString(Math.
abs(tempMerit),8,5)
+")");
}
if (tempMerit >= best_merit) {
tempSize = countFeatures(tempGroup);
if (tempMerit > best_merit ||
(tempSize < sizeOfBest)) {
best_merit = tempMerit;
m_bestGroup = (BitSet)(tempGroup.clone());
sizeOfBest = tempSize;
}
}
int numatts = (m_hasClass)
? m_numAttribs - 1
: m_numAttribs;
BigInteger searchSpaceEnd =
BigInteger.ONE.add(BigInteger.ONE).pow(numatts).subtract(BigInteger.ONE);
while (!done) {
// the next subset
space = space.add(BigInteger.ONE);
if (space.equals(searchSpaceEnd)) {
done = true;
}
tempGroup.clear();
for (int i = 0; i < numatts; i++) {
if (space.testBit(i)) {
if (!m_hasClass) {
tempGroup.set(i);
} else {
int j = (i >= m_classIndex)
? i + 1
: i;
tempGroup.set(j);
}
}
}
tempMerit = ASEvaluator.evaluateSubset(tempGroup);
m_evaluations++;
if (tempMerit >= best_merit) {
tempSize = countFeatures(tempGroup);
if (tempMerit > best_merit ||
(tempSize < sizeOfBest)) {
best_merit = tempMerit;
m_bestGroup = (BitSet)(tempGroup.clone());
sizeOfBest = tempSize;
if (m_verbose) {
System.out.println("New best subset ("
+Utils.doubleToString(Math.
abs(best_merit),8,5)
+"): "+printSubset(m_bestGroup));
}
}
}
}
m_bestMerit = best_merit;
return attributeList(m_bestGroup);
}
|
int[] function (ASEvaluation ASEval, Instances data) throws Exception { double best_merit; double tempMerit; boolean done = false; int sizeOfBest; int tempSize; BigInteger space = BigInteger.ZERO; m_evaluations = 0; m_numAttribs = data.numAttributes(); m_bestGroup = new BitSet(m_numAttribs); if (!(ASEval instanceof SubsetEvaluator)) { throw new Exception(ASEval.getClass().getName() + STR + STR); } if (ASEval instanceof UnsupervisedSubsetEvaluator) { m_hasClass = false; } else { m_hasClass = true; m_classIndex = data.classIndex(); } SubsetEvaluator ASEvaluator = (SubsetEvaluator)ASEval; m_numAttribs = data.numAttributes(); best_merit = ASEvaluator.evaluateSubset(m_bestGroup); m_evaluations++; sizeOfBest = countFeatures(m_bestGroup); BitSet tempGroup = new BitSet(m_numAttribs); tempMerit = ASEvaluator.evaluateSubset(tempGroup); if (m_verbose) { System.out.println(STR +Utils.doubleToString(Math. abs(tempMerit),8,5) +")"); } if (tempMerit >= best_merit) { tempSize = countFeatures(tempGroup); if (tempMerit > best_merit (tempSize < sizeOfBest)) { best_merit = tempMerit; m_bestGroup = (BitSet)(tempGroup.clone()); sizeOfBest = tempSize; } } int numatts = (m_hasClass) ? m_numAttribs - 1 : m_numAttribs; BigInteger searchSpaceEnd = BigInteger.ONE.add(BigInteger.ONE).pow(numatts).subtract(BigInteger.ONE); while (!done) { space = space.add(BigInteger.ONE); if (space.equals(searchSpaceEnd)) { done = true; } tempGroup.clear(); for (int i = 0; i < numatts; i++) { if (space.testBit(i)) { if (!m_hasClass) { tempGroup.set(i); } else { int j = (i >= m_classIndex) ? i + 1 : i; tempGroup.set(j); } } } tempMerit = ASEvaluator.evaluateSubset(tempGroup); m_evaluations++; if (tempMerit >= best_merit) { tempSize = countFeatures(tempGroup); if (tempMerit > best_merit (tempSize < sizeOfBest)) { best_merit = tempMerit; m_bestGroup = (BitSet)(tempGroup.clone()); sizeOfBest = tempSize; if (m_verbose) { System.out.println(STR +Utils.doubleToString(Math. abs(best_merit),8,5) +STR+printSubset(m_bestGroup)); } } } } m_bestMerit = best_merit; return attributeList(m_bestGroup); }
|
/**
* Searches the attribute subset space using an exhaustive search.
*
* @param ASEval the attribute evaluator to guide the search
* @param data the training instances.
* @return an array (not necessarily ordered) of selected attribute indexes
* @throws Exception if the search can't be completed
*/
|
Searches the attribute subset space using an exhaustive search
|
search
|
{
"repo_name": "goddesss/DataModeling",
"path": "src/weka/attributeSelection/ExhaustiveSearch.java",
"license": "gpl-2.0",
"size": 11655
}
|
[
"java.math.BigInteger",
"java.util.BitSet"
] |
import java.math.BigInteger; import java.util.BitSet;
|
import java.math.*; import java.util.*;
|
[
"java.math",
"java.util"
] |
java.math; java.util;
| 1,064,699
|
public static final long getSerialVersionUID( java.lang.Class<?> clazz) {
ObjectStreamClass_1_3_1 theosc = ObjectStreamClass_1_3_1.lookup( clazz );
if( theosc != null )
{
return theosc.getSerialVersionUID( );
}
return 0;
}
|
static final long function( java.lang.Class<?> clazz) { ObjectStreamClass_1_3_1 theosc = ObjectStreamClass_1_3_1.lookup( clazz ); if( theosc != null ) { return theosc.getSerialVersionUID( ); } return 0; }
|
/**
* Return the serialVersionUID for this class.
* The serialVersionUID defines a set of classes all with the same name
* that have evolved from a common root class and agree to be serialized
* and deserialized using a common format.
*/
|
Return the serialVersionUID for this class. The serialVersionUID defines a set of classes all with the same name that have evolved from a common root class and agree to be serialized and deserialized using a common format
|
getSerialVersionUID
|
{
"repo_name": "tadamski/openjdk-orb",
"path": "src/share/classes/com/sun/corba/se/impl/orbutil/ObjectStreamClass_1_3_1.java",
"license": "gpl-2.0",
"size": 44238
}
|
[
"com.sun.corba.se.impl.io.ObjectStreamClass"
] |
import com.sun.corba.se.impl.io.ObjectStreamClass;
|
import com.sun.corba.se.impl.io.*;
|
[
"com.sun.corba"
] |
com.sun.corba;
| 1,911,995
|
public Memory getMemory() {
return memory;
}
|
Memory function() { return memory; }
|
/**
* Get the reference to the memory (containing helper functions and processing information)
* @return The memory
*/
|
Get the reference to the memory (containing helper functions and processing information)
|
getMemory
|
{
"repo_name": "dimaki/enhydrator",
"path": "enhydrator/src/main/java/com/airhacks/enhydrator/in/Row.java",
"license": "apache-2.0",
"size": 12014
}
|
[
"com.airhacks.enhydrator.transform.Memory"
] |
import com.airhacks.enhydrator.transform.Memory;
|
import com.airhacks.enhydrator.transform.*;
|
[
"com.airhacks.enhydrator"
] |
com.airhacks.enhydrator;
| 1,786,233
|
private String buildRemoteName(Account account, OCFile file) {
return account.name + file.getRemotePath();
}
|
String function(Account account, OCFile file) { return account.name + file.getRemotePath(); }
|
/**
* Builds a key for mPendingUploads from the account and file to upload
*
* @param account Account where the file to upload is stored
* @param file File to upload
*/
|
Builds a key for mPendingUploads from the account and file to upload
|
buildRemoteName
|
{
"repo_name": "duke8804/Iluq-Cloud",
"path": "src/com/owncloud/android/files/services/FileUploader.java",
"license": "gpl-2.0",
"size": 42288
}
|
[
"android.accounts.Account",
"com.iluq_cloud.android.datamodel.OCFile"
] |
import android.accounts.Account; import com.iluq_cloud.android.datamodel.OCFile;
|
import android.accounts.*; import com.iluq_cloud.android.datamodel.*;
|
[
"android.accounts",
"com.iluq_cloud.android"
] |
android.accounts; com.iluq_cloud.android;
| 2,598,037
|
//-----------------------------------------------------------------------
public static LocalTime parse(CharSequence text) {
return parse(text, DateTimeFormatter.ISO_LOCAL_TIME);
}
|
static LocalTime function(CharSequence text) { return parse(text, DateTimeFormatter.ISO_LOCAL_TIME); }
|
/**
* Obtains an instance of {@code LocalTime} from a text string such as {@code 10:15}.
* <p>
* The string must represent a valid time and is parsed using
* {@link java.time.format.DateTimeFormatter#ISO_LOCAL_TIME}.
*
* @param text the text to parse such as "10:15:30", not null
* @return the parsed local time, not null
* @throws DateTimeParseException if the text cannot be parsed
*/
|
Obtains an instance of LocalTime from a text string such as 10:15. The string must represent a valid time and is parsed using <code>java.time.format.DateTimeFormatter#ISO_LOCAL_TIME</code>
|
parse
|
{
"repo_name": "lukhnos/j2objc",
"path": "jre_emul/android/platform/libcore/ojluni/src/main/java/java/time/LocalTime.java",
"license": "apache-2.0",
"size": 71791
}
|
[
"java.time.format.DateTimeFormatter"
] |
import java.time.format.DateTimeFormatter;
|
import java.time.format.*;
|
[
"java.time"
] |
java.time;
| 2,056,070
|
public void begin() {
BufferedReader reader = null;
try {
reader = new BufferedReader(new FileReader(configFile));
Yaml yaml = new Yaml(new SafeConstructor());
@SuppressWarnings("unchecked")
Map<String, Object> config = (Map<String, Object>) yaml.load(reader);
if (config != null) {
this.config = config;
} else {
throw new SiphonFailure("Unable to load YAML config.");
}
} catch (FileNotFoundException fnfe) {
throw new SiphonFailure(fnfe);
}
// Extra debug output?
this.debug = (Boolean) this.config.get("debug");
// Slices # of slices to split the day into while extracting
this.slices = (Integer) this.config.get("slices");
// # of minutes to delay between extracts
this.delay = (Integer) this.config.get("delay");
// # of seconds inbetween checks on completion of tasks
this.checkDelay = (Integer) this.config.get("checkDelay");
// Max amount of concurrency
this.concurrency = (Integer) this.config.get("concurrency");
// Where to put the file.
this.targetFolderString = (String) this.config.get("targetFolder");
// Where to stage the file.
this.tmpFolderString = (String) this.config.get("tmpFolder");
// Wrap tar with su -c?
this.wrapAccumulate = (Boolean) this.config.get("wrapAccumulate");
System.out.println("Wrapping accumulate with su -c to handle Java's weird shell invocation stuff");
this.targetOwner = (String) this.config.get("targetOwner");
System.out.println("Owner of backups set to " + targetOwner);
if (this.slices == null || this.slices < 0) {
throw new SiphonFailure("'slices' must be present and non-negative");
}
if (this.delay == null || this.delay < 0) {
throw new SiphonFailure("'delay' must be present and non-negative");
}
if (this.checkDelay == null || this.checkDelay < 1) {
throw new SiphonFailure("'checkDelay' must be present and non-zero positive");
}
this.fuzz = (Integer) this.config.get("fuzz");
this.buffer = (Integer) this.config.get("buffer");
if (this.fuzz == null || this.fuzz < 0) {
throw new SiphonFailure("'fuzz' must be present and non-negative");
}
if (this.buffer == null || this.buffer < 0) {
throw new SiphonFailure("'buffer' must be present and non-negative");
}
active = true;
// are we listening for user input?
this.attached = (Boolean) this.config.get("attached");
@SuppressWarnings("unchecked")
Map<String, Object> database = (Map<String, Object>) this.config.get("database");
String host = (String) database.get("host");
int port = (Integer) database.get("port");
String db = (String) database.get("database");
String user = (String) database.get("user");
String password = (String) database.get("password");
this.databaseTmpFolderString = (String) database.get("tmpFolder");
this.database = new SiphonDatabase(host, port, db, user, password);
doMainLoop();
}
private static final String STOP = "stop";
private static final String PAUSE = "pause";
private static final String START = "start";
|
void function() { BufferedReader reader = null; try { reader = new BufferedReader(new FileReader(configFile)); Yaml yaml = new Yaml(new SafeConstructor()); @SuppressWarnings(STR) Map<String, Object> config = (Map<String, Object>) yaml.load(reader); if (config != null) { this.config = config; } else { throw new SiphonFailure(STR); } } catch (FileNotFoundException fnfe) { throw new SiphonFailure(fnfe); } this.debug = (Boolean) this.config.get("debug"); this.slices = (Integer) this.config.get(STR); this.delay = (Integer) this.config.get("delay"); this.checkDelay = (Integer) this.config.get(STR); this.concurrency = (Integer) this.config.get(STR); this.targetFolderString = (String) this.config.get(STR); this.tmpFolderString = (String) this.config.get(STR); this.wrapAccumulate = (Boolean) this.config.get(STR); System.out.println(STR); this.targetOwner = (String) this.config.get(STR); System.out.println(STR + targetOwner); if (this.slices == null this.slices < 0) { throw new SiphonFailure(STR); } if (this.delay == null this.delay < 0) { throw new SiphonFailure(STR); } if (this.checkDelay == null this.checkDelay < 1) { throw new SiphonFailure(STR); } this.fuzz = (Integer) this.config.get("fuzz"); this.buffer = (Integer) this.config.get(STR); if (this.fuzz == null this.fuzz < 0) { throw new SiphonFailure(STR); } if (this.buffer == null this.buffer < 0) { throw new SiphonFailure(STR); } active = true; this.attached = (Boolean) this.config.get(STR); @SuppressWarnings(STR) Map<String, Object> database = (Map<String, Object>) this.config.get(STR); String host = (String) database.get("host"); int port = (Integer) database.get("port"); String db = (String) database.get(STR); String user = (String) database.get("user"); String password = (String) database.get(STR); this.databaseTmpFolderString = (String) database.get(STR); this.database = new SiphonDatabase(host, port, db, user, password); doMainLoop(); } private static final String STOP = "stop"; private static final String PAUSE = "pause"; private static final String START = "start";
|
/**
* Starts the siphon process, first reading in the config file and aborting if there are issues with it.
*/
|
Starts the siphon process, first reading in the config file and aborting if there are issues with it
|
begin
|
{
"repo_name": "ProgrammerDan/Devotion",
"path": "src/com/programmerdan/minecraft/devotion/siphon/Siphon.java",
"license": "bsd-3-clause",
"size": 8573
}
|
[
"java.io.BufferedReader",
"java.io.FileNotFoundException",
"java.io.FileReader",
"java.util.Map",
"org.yaml.snakeyaml.Yaml",
"org.yaml.snakeyaml.constructor.SafeConstructor"
] |
import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.FileReader; import java.util.Map; import org.yaml.snakeyaml.Yaml; import org.yaml.snakeyaml.constructor.SafeConstructor;
|
import java.io.*; import java.util.*; import org.yaml.snakeyaml.*; import org.yaml.snakeyaml.constructor.*;
|
[
"java.io",
"java.util",
"org.yaml.snakeyaml"
] |
java.io; java.util; org.yaml.snakeyaml;
| 2,017,970
|
private void writePeriod() throws AdeException {
Systems systems = new Systems();
systems.setVersion(XML_VERSION);
Date startTime = PeriodUtils.getContainingPeriodStart(m_periodStartDate);
m_gc.setTimeInMillis(startTime.getTime());
XMLGregorianCalendar startXMLDate = s_dataTypeFactory.newXMLGregorianCalendar(m_gc);
systems.setStartTime(startXMLDate);
Date endTime = PeriodUtils.getNextPeriodStart(m_periodStartDate);
m_gc.setTimeInMillis(endTime.getTime());
XMLGregorianCalendar endXMLDate = s_dataTypeFactory.newXMLGregorianCalendar(m_gc);
systems.setEndTime(endXMLDate);
systems.setGmtOffset(m_xmlMetaData.getGMTOffset(m_source.getSourceId()));
systems.setIntervalSize(m_xmlMetaData.getIntervalLengthInSeconds());
NumberIntervals numberOfIntervals = new NumberIntervals();
systems.setNumberIntervals(numberOfIntervals);
numberOfIntervals.setValue(XMLUtil.getNumberOfSnapshots(m_framingFlowType));
numberOfIntervals.setAnalysisSnapshotSize(XMLUtil.getXMLHardenedDurationInSeconds(m_framingFlowType));
ModelInfo modelInfo = new ModelInfo();
systems.setModelInfo(modelInfo);
modelInfo.setAnalysisGroup(m_xmlMetaData.getAnalysisGroupName());
modelInfo.setModelCreationDate(m_xmlMetaData.getModelCreationDate());
modelInfo.setTrainingPeriod(m_xmlMetaData.getNumberOfDaysInTraining());
SystemsSystemType system = new SystemsSystemType();
systems.setSystem(system);
system.setLogType(m_xmlMetaData.getLogType());
system.setSysId(m_source.getSourceId());
List<SystemsIntervalType> listOfInterval = system.getInterval();
for (int i = 0; i < m_numIntervals; ++i) {
SystemsIntervalType intervalType = new SystemsIntervalType();
listOfInterval.add(intervalType);
if (m_aiVec[i].m_results_file != null && m_aiVec[i].m_results_file.length() > 0) {
intervalType.setMissing(false);
} else {
intervalType.setMissing(true);
intervalType.setMissingReason(MISSING_INTERVAL_REASON_NO_CONNECTION);
}
intervalType.setIndex(i);
double value = Double.parseDouble(SingleDigitFormatter.format(m_aiVec[i].m_anomaly_score * 100));
intervalType.setAnomalyScore(value);
intervalType.setNumUniqueMsgIds(m_aiVec[i].m_num_unique_msg_ids);
intervalType.setNumNewMessages(m_aiVec[i].m_numNewMessages);
intervalType.setNumNeverSeenBeforeMessages(m_aiVec[i].m_numNeverSeenBeforeMessages);
intervalType.setLimitedModel(m_aiVec[i].m_modelQualityIndicator.toString());
}
writeToXML(systems, s_marshaller);
}
|
void function() throws AdeException { Systems systems = new Systems(); systems.setVersion(XML_VERSION); Date startTime = PeriodUtils.getContainingPeriodStart(m_periodStartDate); m_gc.setTimeInMillis(startTime.getTime()); XMLGregorianCalendar startXMLDate = s_dataTypeFactory.newXMLGregorianCalendar(m_gc); systems.setStartTime(startXMLDate); Date endTime = PeriodUtils.getNextPeriodStart(m_periodStartDate); m_gc.setTimeInMillis(endTime.getTime()); XMLGregorianCalendar endXMLDate = s_dataTypeFactory.newXMLGregorianCalendar(m_gc); systems.setEndTime(endXMLDate); systems.setGmtOffset(m_xmlMetaData.getGMTOffset(m_source.getSourceId())); systems.setIntervalSize(m_xmlMetaData.getIntervalLengthInSeconds()); NumberIntervals numberOfIntervals = new NumberIntervals(); systems.setNumberIntervals(numberOfIntervals); numberOfIntervals.setValue(XMLUtil.getNumberOfSnapshots(m_framingFlowType)); numberOfIntervals.setAnalysisSnapshotSize(XMLUtil.getXMLHardenedDurationInSeconds(m_framingFlowType)); ModelInfo modelInfo = new ModelInfo(); systems.setModelInfo(modelInfo); modelInfo.setAnalysisGroup(m_xmlMetaData.getAnalysisGroupName()); modelInfo.setModelCreationDate(m_xmlMetaData.getModelCreationDate()); modelInfo.setTrainingPeriod(m_xmlMetaData.getNumberOfDaysInTraining()); SystemsSystemType system = new SystemsSystemType(); systems.setSystem(system); system.setLogType(m_xmlMetaData.getLogType()); system.setSysId(m_source.getSourceId()); List<SystemsIntervalType> listOfInterval = system.getInterval(); for (int i = 0; i < m_numIntervals; ++i) { SystemsIntervalType intervalType = new SystemsIntervalType(); listOfInterval.add(intervalType); if (m_aiVec[i].m_results_file != null && m_aiVec[i].m_results_file.length() > 0) { intervalType.setMissing(false); } else { intervalType.setMissing(true); intervalType.setMissingReason(MISSING_INTERVAL_REASON_NO_CONNECTION); } intervalType.setIndex(i); double value = Double.parseDouble(SingleDigitFormatter.format(m_aiVec[i].m_anomaly_score * 100)); intervalType.setAnomalyScore(value); intervalType.setNumUniqueMsgIds(m_aiVec[i].m_num_unique_msg_ids); intervalType.setNumNewMessages(m_aiVec[i].m_numNewMessages); intervalType.setNumNeverSeenBeforeMessages(m_aiVec[i].m_numNeverSeenBeforeMessages); intervalType.setLimitedModel(m_aiVec[i].m_modelQualityIndicator.toString()); } writeToXML(systems, s_marshaller); }
|
/**
* Write out the period
* @param outputFileName
* @throws AdeException
*/
|
Write out the period
|
writePeriod
|
{
"repo_name": "openmainframeproject/ade",
"path": "ade-ext/src/main/java/org/openmainframe/ade/ext/output/ExtJaxbAnalyzedPeriodV2XmlStorer.java",
"license": "gpl-3.0",
"size": 29538
}
|
[
"java.util.Date",
"java.util.List",
"javax.xml.datatype.XMLGregorianCalendar",
"org.openmainframe.ade.exceptions.AdeException",
"org.openmainframe.ade.ext.xml.v2.Systems",
"org.openmainframe.ade.ext.xml.v2.SystemsIntervalType",
"org.openmainframe.ade.ext.xml.v2.SystemsSystemType",
"org.openmainframe.ade.impl.data.PeriodUtils"
] |
import java.util.Date; import java.util.List; import javax.xml.datatype.XMLGregorianCalendar; import org.openmainframe.ade.exceptions.AdeException; import org.openmainframe.ade.ext.xml.v2.Systems; import org.openmainframe.ade.ext.xml.v2.SystemsIntervalType; import org.openmainframe.ade.ext.xml.v2.SystemsSystemType; import org.openmainframe.ade.impl.data.PeriodUtils;
|
import java.util.*; import javax.xml.datatype.*; import org.openmainframe.ade.exceptions.*; import org.openmainframe.ade.ext.xml.v2.*; import org.openmainframe.ade.impl.data.*;
|
[
"java.util",
"javax.xml",
"org.openmainframe.ade"
] |
java.util; javax.xml; org.openmainframe.ade;
| 2,366,527
|
CmsSitemapData prefetch(String sitemapUri) throws CmsRpcException;
|
CmsSitemapData prefetch(String sitemapUri) throws CmsRpcException;
|
/**
* Returns the initialization data for the given sitemap.<p>
*
* @param sitemapUri the site relative path
*
* @return the initialization data
*
* @throws CmsRpcException if something goes wrong
*/
|
Returns the initialization data for the given sitemap
|
prefetch
|
{
"repo_name": "victos/opencms-core",
"path": "src/org/opencms/ade/sitemap/shared/rpc/I_CmsSitemapService.java",
"license": "lgpl-2.1",
"size": 10751
}
|
[
"org.opencms.ade.sitemap.shared.CmsSitemapData",
"org.opencms.gwt.CmsRpcException"
] |
import org.opencms.ade.sitemap.shared.CmsSitemapData; import org.opencms.gwt.CmsRpcException;
|
import org.opencms.ade.sitemap.shared.*; import org.opencms.gwt.*;
|
[
"org.opencms.ade",
"org.opencms.gwt"
] |
org.opencms.ade; org.opencms.gwt;
| 2,339,208
|
public Observable<ServiceResponse<Page<ServiceEndpointPolicyInner>>> listByResourceGroupNextSinglePageAsync(final String nextPageLink) {
if (nextPageLink == null) {
throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null.");
}
|
Observable<ServiceResponse<Page<ServiceEndpointPolicyInner>>> function(final String nextPageLink) { if (nextPageLink == null) { throw new IllegalArgumentException(STR); }
|
/**
* Gets all service endpoint Policies in a resource group.
*
ServiceResponse<PageImpl<ServiceEndpointPolicyInner>> * @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<ServiceEndpointPolicyInner> object wrapped in {@link ServiceResponse} if successful.
*/
|
Gets all service endpoint Policies in a resource group
|
listByResourceGroupNextSinglePageAsync
|
{
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/network/mgmt-v2018_08_01/src/main/java/com/microsoft/azure/management/network/v2018_08_01/implementation/ServiceEndpointPoliciesInner.java",
"license": "mit",
"size": 81753
}
|
[
"com.microsoft.azure.Page",
"com.microsoft.rest.ServiceResponse"
] |
import com.microsoft.azure.Page; import com.microsoft.rest.ServiceResponse;
|
import com.microsoft.azure.*; import com.microsoft.rest.*;
|
[
"com.microsoft.azure",
"com.microsoft.rest"
] |
com.microsoft.azure; com.microsoft.rest;
| 1,382,394
|
public void onResume() {
Log.i("GLView", "Resume called");
mGLThread.onResume();
}
|
void function() { Log.i(STR, STR); mGLThread.onResume(); }
|
/**
* Inform the view that the activity is resumed.
*/
|
Inform the view that the activity is resumed
|
onResume
|
{
"repo_name": "kuhnmi/jukefox",
"path": "JukefoxAndroid/src/ch/ethz/dcg/pancho3/view/tabs/opengl/GLView.java",
"license": "gpl-3.0",
"size": 13949
}
|
[
"ch.ethz.dcg.jukefox.commons.utils.Log"
] |
import ch.ethz.dcg.jukefox.commons.utils.Log;
|
import ch.ethz.dcg.jukefox.commons.utils.*;
|
[
"ch.ethz.dcg"
] |
ch.ethz.dcg;
| 94,631
|
@Override
public QName getQName() {
return null;
}
|
QName function() { return null; }
|
/**
* Always returns null.
*/
|
Always returns null
|
getQName
|
{
"repo_name": "lbchen/odl-mod",
"path": "opendaylight/sal/yang-prototype/code-generator/yang-model-parser-impl/src/main/java/org/opendaylight/controller/yang/parser/builder/impl/AugmentationSchemaBuilderImpl.java",
"license": "epl-1.0",
"size": 13232
}
|
[
"org.opendaylight.controller.yang.common.QName"
] |
import org.opendaylight.controller.yang.common.QName;
|
import org.opendaylight.controller.yang.common.*;
|
[
"org.opendaylight.controller"
] |
org.opendaylight.controller;
| 2,258,936
|
private String[] mapGoldStandardAlignmentsWeatherGov(String name, Alignment[] alignments, String[] text)
{
List<String> out = new ArrayList<>();
for(int i = 0; i < alignments.length; i++)
{
Alignment alignment = alignments[i];
if(alignment.size() == 1)
{
// treat none event lines: (wrongly) assign them the alignments of
// the next (or previous) non-none line.
if(alignment.getElements()[0].equals("-1"))
{
out.addAll(Arrays.asList(fillWithOneRecord(findNextNonNullAlignment(alignments, i), text[i])));
}
else
{
out.addAll(Arrays.asList(fillWithOneRecord(alignment, text[i])));
}
}
else
{
out.addAll(Arrays.asList(segmentUsingPatternsWeatherGov(alignment, text[i])));
}
}
return out.toArray(new String[0]);
}
|
String[] function(String name, Alignment[] alignments, String[] text) { List<String> out = new ArrayList<>(); for(int i = 0; i < alignments.length; i++) { Alignment alignment = alignments[i]; if(alignment.size() == 1) { if(alignment.getElements()[0].equals("-1")) { out.addAll(Arrays.asList(fillWithOneRecord(findNextNonNullAlignment(alignments, i), text[i]))); } else { out.addAll(Arrays.asList(fillWithOneRecord(alignment, text[i]))); } } else { out.addAll(Arrays.asList(segmentUsingPatternsWeatherGov(alignment, text[i]))); } } return out.toArray(new String[0]); }
|
/**
* Takes a set of record alignments per line of text and returns an array of alignments per word.
* We automatically segment lines of text that correspond to more than one records, using
* domain-based heuristics (based on Percy Liang's original alignment code).
* @param name
* @param alignments
* @param text
* @return
*/
|
Takes a set of record alignments per line of text and returns an array of alignments per word. We automatically segment lines of text that correspond to more than one records, using domain-based heuristics (based on Percy Liang's original alignment code)
|
mapGoldStandardAlignmentsWeatherGov
|
{
"repo_name": "sinantie/Generator",
"path": "src/induction/utils/ExportExamplesToEdusFile.java",
"license": "gpl-3.0",
"size": 32689
}
|
[
"java.util.ArrayList",
"java.util.Arrays",
"java.util.List"
] |
import java.util.ArrayList; import java.util.Arrays; import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,502,641
|
private void loadStoreDefinition
(Configuration storeDefinition,
Hashtable storesClass,
Hashtable storesParameters,
Hashtable childStores)
throws ConfigurationException, SlideException {
String storeName = storeDefinition.getAttribute("name");
String storeClassname = defaultStoreClassname;
try {
storeClassname = storeDefinition.getAttribute("classname");
} catch (ConfigurationException e) {
}
Enumeration storeParametersDefinitions =
storeDefinition.getConfigurations("parameter");
// Load descriptors store class
Class storeClass = null;
try {
storeClass = Class.forName(storeClassname);
} catch (Exception e) {
getLogger().log(e,LOG_CHANNEL, Logger.ERROR);
throw new SlideException(e.getMessage());
}
storesClass.put(storeName, storeClass);
// Load descriptor store parameters
Hashtable storeParameters = new Hashtable();
while (storeParametersDefinitions.hasMoreElements()) {
Configuration parameterDefinition = (Configuration)
storeParametersDefinitions.nextElement();
String parameterName = parameterDefinition.getAttribute("name");
String parameterValue = parameterDefinition.getValue();
storeParameters.put(parameterName, parameterValue);
}
storesParameters.put(storeName, storeParameters);
// Now reading the "child" stores
Hashtable currentStoreChildStores = new Hashtable();
// Loading node store (if any)
getChildStore (storeDefinition, NODE_STORE, currentStoreChildStores, storeParameters);
// Loading security store (if any)
getChildStore (storeDefinition, SECURITY_STORE, currentStoreChildStores, storeParameters);
// Loading lock store (if any)
getChildStore (storeDefinition, LOCK_STORE, currentStoreChildStores, storeParameters);
// Loading revision descriptors store (if any)
getChildStore (storeDefinition, REVISION_DESCRIPTORS_STORE, currentStoreChildStores, storeParameters);
// Loading revision descriptor store (if any)
getChildStore (storeDefinition, REVISION_DESCRIPTOR_STORE, currentStoreChildStores, storeParameters);
// Loading content store (if any)
getChildStore (storeDefinition, CONTENT_STORE, currentStoreChildStores, storeParameters);
// Loading descriptorindexstore store (if any)
getChildStore (storeDefinition, PROPERTIES_INDEX_STORE, currentStoreChildStores, storeParameters);
// Loading contentindexstore store (if any)
getChildStore (storeDefinition, CONTENT_INDEX_STORE, currentStoreChildStores, storeParameters);
// load default indexer, if no indexer defined
// Loading sequence store (if any)
getChildStore (storeDefinition, SEQUENCE_STORE, currentStoreChildStores, storeParameters);
childStores.put(storeName, currentStoreChildStores);
}
|
void function (Configuration storeDefinition, Hashtable storesClass, Hashtable storesParameters, Hashtable childStores) throws ConfigurationException, SlideException { String storeName = storeDefinition.getAttribute("name"); String storeClassname = defaultStoreClassname; try { storeClassname = storeDefinition.getAttribute(STR); } catch (ConfigurationException e) { } Enumeration storeParametersDefinitions = storeDefinition.getConfigurations(STR); Class storeClass = null; try { storeClass = Class.forName(storeClassname); } catch (Exception e) { getLogger().log(e,LOG_CHANNEL, Logger.ERROR); throw new SlideException(e.getMessage()); } storesClass.put(storeName, storeClass); Hashtable storeParameters = new Hashtable(); while (storeParametersDefinitions.hasMoreElements()) { Configuration parameterDefinition = (Configuration) storeParametersDefinitions.nextElement(); String parameterName = parameterDefinition.getAttribute("name"); String parameterValue = parameterDefinition.getValue(); storeParameters.put(parameterName, parameterValue); } storesParameters.put(storeName, storeParameters); Hashtable currentStoreChildStores = new Hashtable(); getChildStore (storeDefinition, NODE_STORE, currentStoreChildStores, storeParameters); getChildStore (storeDefinition, SECURITY_STORE, currentStoreChildStores, storeParameters); getChildStore (storeDefinition, LOCK_STORE, currentStoreChildStores, storeParameters); getChildStore (storeDefinition, REVISION_DESCRIPTORS_STORE, currentStoreChildStores, storeParameters); getChildStore (storeDefinition, REVISION_DESCRIPTOR_STORE, currentStoreChildStores, storeParameters); getChildStore (storeDefinition, CONTENT_STORE, currentStoreChildStores, storeParameters); getChildStore (storeDefinition, PROPERTIES_INDEX_STORE, currentStoreChildStores, storeParameters); getChildStore (storeDefinition, CONTENT_INDEX_STORE, currentStoreChildStores, storeParameters); getChildStore (storeDefinition, SEQUENCE_STORE, currentStoreChildStores, storeParameters); childStores.put(storeName, currentStoreChildStores); }
|
/**
* Parse the store definition.
*
* @param storeDefinition store definition
* @param storesClass Class names of the stores
* @param storesParameters Parameters of the stores
* @param childStores Child stores
* @exception ConfigurationException Error parsing configuration file
* @exception SlideException Error loading the specified class
*/
|
Parse the store definition
|
loadStoreDefinition
|
{
"repo_name": "integrated/jakarta-slide-server",
"path": "src/share/org/apache/slide/common/Namespace.java",
"license": "apache-2.0",
"size": 43228
}
|
[
"java.util.Enumeration",
"java.util.Hashtable",
"org.apache.slide.util.conf.Configuration",
"org.apache.slide.util.conf.ConfigurationException",
"org.apache.slide.util.logger.Logger"
] |
import java.util.Enumeration; import java.util.Hashtable; import org.apache.slide.util.conf.Configuration; import org.apache.slide.util.conf.ConfigurationException; import org.apache.slide.util.logger.Logger;
|
import java.util.*; import org.apache.slide.util.conf.*; import org.apache.slide.util.logger.*;
|
[
"java.util",
"org.apache.slide"
] |
java.util; org.apache.slide;
| 1,366,550
|
public CmsUserSettingsBean loadSettings() {
CmsUserSettingsBean result = new CmsUserSettingsBean();
CmsDefaultUserSettings currentSettings = new CmsDefaultUserSettings();
currentSettings.init(m_preferences.getUserSettings().getUser());
for (I_CmsPreference pref : OpenCms.getWorkplaceManager().getDefaultUserSettings().getPreferences().values()) {
String tab = pref.getTab();
if (CmsGwtConstants.TAB_HIDDEN.equals(tab)) {
continue;
}
CmsXmlContentProperty prop2 = pref.getPropertyDefinition(m_cms);
String value = pref.getValue(currentSettings);
CmsXmlContentProperty resolvedProp = CmsXmlContentPropertyHelper.resolveMacrosInProperty(
prop2.withDefaultWidget("string"),
m_macroResolver);
result.addSetting(value, resolvedProp, CmsGwtConstants.TAB_BASIC.equals(tab));
}
addAccountInfo(result);
return result;
}
|
CmsUserSettingsBean function() { CmsUserSettingsBean result = new CmsUserSettingsBean(); CmsDefaultUserSettings currentSettings = new CmsDefaultUserSettings(); currentSettings.init(m_preferences.getUserSettings().getUser()); for (I_CmsPreference pref : OpenCms.getWorkplaceManager().getDefaultUserSettings().getPreferences().values()) { String tab = pref.getTab(); if (CmsGwtConstants.TAB_HIDDEN.equals(tab)) { continue; } CmsXmlContentProperty prop2 = pref.getPropertyDefinition(m_cms); String value = pref.getValue(currentSettings); CmsXmlContentProperty resolvedProp = CmsXmlContentPropertyHelper.resolveMacrosInProperty( prop2.withDefaultWidget(STR), m_macroResolver); result.addSetting(value, resolvedProp, CmsGwtConstants.TAB_BASIC.equals(tab)); } addAccountInfo(result); return result; }
|
/**
* Loads the current user's preferences into a CmsUserSettingsBean.<p>
*
* @return the bean representing the current user's preferences
*/
|
Loads the current user's preferences into a CmsUserSettingsBean
|
loadSettings
|
{
"repo_name": "sbonoc/opencms-core",
"path": "src/org/opencms/gwt/CmsClientUserSettingConverter.java",
"license": "lgpl-2.1",
"size": 8618
}
|
[
"org.opencms.configuration.CmsDefaultUserSettings",
"org.opencms.gwt.shared.CmsGwtConstants",
"org.opencms.gwt.shared.CmsUserSettingsBean",
"org.opencms.main.OpenCms",
"org.opencms.xml.content.CmsXmlContentProperty",
"org.opencms.xml.content.CmsXmlContentPropertyHelper"
] |
import org.opencms.configuration.CmsDefaultUserSettings; import org.opencms.gwt.shared.CmsGwtConstants; import org.opencms.gwt.shared.CmsUserSettingsBean; import org.opencms.main.OpenCms; import org.opencms.xml.content.CmsXmlContentProperty; import org.opencms.xml.content.CmsXmlContentPropertyHelper;
|
import org.opencms.configuration.*; import org.opencms.gwt.shared.*; import org.opencms.main.*; import org.opencms.xml.content.*;
|
[
"org.opencms.configuration",
"org.opencms.gwt",
"org.opencms.main",
"org.opencms.xml"
] |
org.opencms.configuration; org.opencms.gwt; org.opencms.main; org.opencms.xml;
| 1,483,704
|
EClass getChoice();
|
EClass getChoice();
|
/**
* Returns the meta object for class '{@link br.ufpe.ines.decode.decode.artifacts.questionnaire.Choice <em>Choice</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Choice</em>'.
* @see br.ufpe.ines.decode.decode.artifacts.questionnaire.Choice
* @generated
*/
|
Returns the meta object for class '<code>br.ufpe.ines.decode.decode.artifacts.questionnaire.Choice Choice</code>'.
|
getChoice
|
{
"repo_name": "netuh/DecodePlatformPlugin",
"path": "br.ufpe.ines.decode/bundles/br.ufpe.ines.decode.model/src/br/ufpe/ines/decode/decode/artifacts/questionnaire/QuestionnairePackage.java",
"license": "gpl-3.0",
"size": 38583
}
|
[
"org.eclipse.emf.ecore.EClass"
] |
import org.eclipse.emf.ecore.EClass;
|
import org.eclipse.emf.ecore.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 1,290,525
|
@Deployment
public void testSimpleAutomaticSubProcess() {
ProcessInstance pi = runtimeService.startProcessInstanceByKey("simpleSubProcessAutomatic");
assertTrue(pi.isEnded());
assertProcessEnded(pi.getId());
}
|
void function() { ProcessInstance pi = runtimeService.startProcessInstanceByKey(STR); assertTrue(pi.isEnded()); assertProcessEnded(pi.getId()); }
|
/**
* Same test case as before, but now with all automatic steps
*/
|
Same test case as before, but now with all automatic steps
|
testSimpleAutomaticSubProcess
|
{
"repo_name": "roberthafner/flowable-engine",
"path": "modules/flowable-engine/src/test/java/org/activiti/engine/test/bpmn/subprocess/SubProcessTest.java",
"license": "apache-2.0",
"size": 19307
}
|
[
"org.activiti.engine.runtime.ProcessInstance"
] |
import org.activiti.engine.runtime.ProcessInstance;
|
import org.activiti.engine.runtime.*;
|
[
"org.activiti.engine"
] |
org.activiti.engine;
| 1,697,944
|
public static final String getCONTENT_ITEM_TYPE() {
return ContentResolver.CURSOR_ITEM_BASE_TYPE + "/vnd.jsonsyncadapter.comments";
}
|
static final String function() { return ContentResolver.CURSOR_ITEM_BASE_TYPE + STR; }
|
/**
* MIME type for individual entries.
*/
|
MIME type for individual entries
|
getCONTENT_ITEM_TYPE
|
{
"repo_name": "MaxSmile/JsonSyncAdapter",
"path": "JsonSyncAdapter/Application/src/main/java/com/vasilkoff/android/Sync/model/CommentObject.java",
"license": "mit",
"size": 997
}
|
[
"android.content.ContentResolver"
] |
import android.content.ContentResolver;
|
import android.content.*;
|
[
"android.content"
] |
android.content;
| 2,747,761
|
public void readDatabases(JobMeta jobMeta, boolean overWriteShared) throws KettleException {
try {
ObjectId dbids[] = repository.getDatabaseIDs(false);
for (int i = 0; i < dbids.length; i++) {
DatabaseMeta databaseMeta = repository.loadDatabaseMeta(dbids[i], null); // reads last version
databaseMeta.shareVariablesWith(jobMeta);
// See if there already is one in the transformation
//
DatabaseMeta check = jobMeta.findDatabase(databaseMeta.getName());
// We only add, never overwrite database connections.
//
if (check == null || overWriteShared)
{
if (databaseMeta.getName() != null) {
jobMeta.addOrReplaceDatabase(databaseMeta);
if (!overWriteShared)
databaseMeta.setChanged(false);
}
}
}
jobMeta.setChanged(false);
} catch (KettleDatabaseException dbe) {
throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Log.UnableToReadDatabaseIDSFromRepository"), dbe); //$NON-NLS-1$
} catch (KettleException ke) {
throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Log.UnableToReadDatabasesFromRepository"), ke); //$NON-NLS-1$
}
}
|
void function(JobMeta jobMeta, boolean overWriteShared) throws KettleException { try { ObjectId dbids[] = repository.getDatabaseIDs(false); for (int i = 0; i < dbids.length; i++) { DatabaseMeta databaseMeta = repository.loadDatabaseMeta(dbids[i], null); databaseMeta.shareVariablesWith(jobMeta); { if (databaseMeta.getName() != null) { jobMeta.addOrReplaceDatabase(databaseMeta); if (!overWriteShared) databaseMeta.setChanged(false); } } } jobMeta.setChanged(false); } catch (KettleDatabaseException dbe) { throw new KettleException(BaseMessages.getString(PKG, STR), dbe); } catch (KettleException ke) { throw new KettleException(BaseMessages.getString(PKG, STR), ke); } }
|
/**
* Read the database connections in the repository and add them to this job
* if they are not yet present.
*
* @param jobMeta the job to put the database connections in
* @param overWriteShared set to true if you want to overwrite shared connections while loading.
* @throws KettleException
*/
|
Read the database connections in the repository and add them to this job if they are not yet present
|
readDatabases
|
{
"repo_name": "lihongqiang/kettle-4.4.0-stable",
"path": "src/org/pentaho/di/repository/kdr/delegates/KettleDatabaseRepositoryJobDelegate.java",
"license": "apache-2.0",
"size": 40026
}
|
[
"org.pentaho.di.core.database.DatabaseMeta",
"org.pentaho.di.core.exception.KettleDatabaseException",
"org.pentaho.di.core.exception.KettleException",
"org.pentaho.di.i18n.BaseMessages",
"org.pentaho.di.job.JobMeta",
"org.pentaho.di.repository.ObjectId"
] |
import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.JobMeta; import org.pentaho.di.repository.ObjectId;
|
import org.pentaho.di.core.database.*; import org.pentaho.di.core.exception.*; import org.pentaho.di.i18n.*; import org.pentaho.di.job.*; import org.pentaho.di.repository.*;
|
[
"org.pentaho.di"
] |
org.pentaho.di;
| 2,155,843
|
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<WorkspaceInner> createAsync(
String resourceGroupName, String workspaceName, WorkspaceCreateParameters parameters) {
return beginCreateAsync(resourceGroupName, workspaceName, parameters)
.last()
.flatMap(this.client::getLroFinalResultOrError);
}
|
@ServiceMethod(returns = ReturnType.SINGLE) Mono<WorkspaceInner> function( String resourceGroupName, String workspaceName, WorkspaceCreateParameters parameters) { return beginCreateAsync(resourceGroupName, workspaceName, parameters) .last() .flatMap(this.client::getLroFinalResultOrError); }
|
/**
* Creates a Workspace.
*
* @param resourceGroupName Name of the resource group to which the resource belongs.
* @param workspaceName The name of the workspace. Workspace names can only contain a combination of alphanumeric
* characters along with dash (-) and underscore (_). The name must be from 1 through 64 characters long.
* @param parameters Workspace creation parameters.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return batch AI Workspace information.
*/
|
Creates a Workspace
|
createAsync
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/batchai/azure-resourcemanager-batchai/src/main/java/com/azure/resourcemanager/batchai/implementation/WorkspacesClientImpl.java",
"license": "mit",
"size": 77030
}
|
[
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.resourcemanager.batchai.fluent.models.WorkspaceInner",
"com.azure.resourcemanager.batchai.models.WorkspaceCreateParameters"
] |
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.resourcemanager.batchai.fluent.models.WorkspaceInner; import com.azure.resourcemanager.batchai.models.WorkspaceCreateParameters;
|
import com.azure.core.annotation.*; import com.azure.resourcemanager.batchai.fluent.models.*; import com.azure.resourcemanager.batchai.models.*;
|
[
"com.azure.core",
"com.azure.resourcemanager"
] |
com.azure.core; com.azure.resourcemanager;
| 1,404,192
|
void sendRegistrationSyncSubtree(RegistrationSubtree regSubtree, BatchingTask batchingTask) {
Preconditions.checkState(internalScheduler.isRunningOnThread(), "Not on internal thread");
batcher.addRegSubtree(regSubtree);
logger.info("Adding subtree: %s", regSubtree);
batchingTask.ensureScheduled("Send-reg-sync");
}
|
void sendRegistrationSyncSubtree(RegistrationSubtree regSubtree, BatchingTask batchingTask) { Preconditions.checkState(internalScheduler.isRunningOnThread(), STR); batcher.addRegSubtree(regSubtree); logger.info(STR, regSubtree); batchingTask.ensureScheduled(STR); }
|
/**
* Sends a single registration subtree to the server.
*
* @param regSubtree subtree to send
*/
|
Sends a single registration subtree to the server
|
sendRegistrationSyncSubtree
|
{
"repo_name": "windyuuy/opera",
"path": "chromium/src/third_party/cacheinvalidation/src/java/com/google/ipc/invalidation/ticl/ProtocolHandler.java",
"license": "bsd-3-clause",
"size": 31235
}
|
[
"com.google.common.base.Preconditions",
"com.google.ipc.invalidation.ticl.InvalidationClientCore",
"com.google.protos.ipc.invalidation.ClientProtocol"
] |
import com.google.common.base.Preconditions; import com.google.ipc.invalidation.ticl.InvalidationClientCore; import com.google.protos.ipc.invalidation.ClientProtocol;
|
import com.google.common.base.*; import com.google.ipc.invalidation.ticl.*; import com.google.protos.ipc.invalidation.*;
|
[
"com.google.common",
"com.google.ipc",
"com.google.protos"
] |
com.google.common; com.google.ipc; com.google.protos;
| 1,762,028
|
void onMessageCreated(Message message, Session session, Exchange exchange, Throwable cause);
|
void onMessageCreated(Message message, Session session, Exchange exchange, Throwable cause);
|
/**
* Callback when the JMS message has <i>just</i> been created, which allows custom modifications afterwards.
*
* @param exchange the current exchange
* @param session the JMS session used to create the message
* @param cause optional exception occurred that should be sent as reply instead of a regular body
*/
|
Callback when the JMS message has just been created, which allows custom modifications afterwards
|
onMessageCreated
|
{
"repo_name": "nikhilvibhav/camel",
"path": "components/camel-jms/src/main/java/org/apache/camel/component/jms/MessageCreatedStrategy.java",
"license": "apache-2.0",
"size": 1712
}
|
[
"javax.jms.Message",
"javax.jms.Session",
"org.apache.camel.Exchange"
] |
import javax.jms.Message; import javax.jms.Session; import org.apache.camel.Exchange;
|
import javax.jms.*; import org.apache.camel.*;
|
[
"javax.jms",
"org.apache.camel"
] |
javax.jms; org.apache.camel;
| 1,116,986
|
public void testEntireObjectNestedSearch2() throws Exception
{
OrderLine searchObject = new OrderLine();
Collection results = getApplicationService().search("gov.nih.nci.cacoresdk.domain.onetoone.bidirectional.OrderLine",searchObject );
assertNotNull(results);
assertEquals(5,results.size());
for(Iterator i = results.iterator();i.hasNext();)
{
OrderLine result = (OrderLine)i.next();
toXML(result);
validateClassElements(result);
validateAttribute(result,"id",result.getId());
validateAttribute(result,"name",result.getName());
assertTrue(validateXMLData(result, searchObject.getClass()));
OrderLine result2 = (OrderLine)fromXML(result);
assertNotNull(result2);
assertNotNull(result2.getId());
assertNotNull(result2.getName());
}
}
|
void function() throws Exception { OrderLine searchObject = new OrderLine(); Collection results = getApplicationService().search(STR,searchObject ); assertNotNull(results); assertEquals(5,results.size()); for(Iterator i = results.iterator();i.hasNext();) { OrderLine result = (OrderLine)i.next(); toXML(result); validateClassElements(result); validateAttribute(result,"id",result.getId()); validateAttribute(result,"name",result.getName()); assertTrue(validateXMLData(result, searchObject.getClass())); OrderLine result2 = (OrderLine)fromXML(result); assertNotNull(result2); assertNotNull(result2.getId()); assertNotNull(result2.getName()); } }
|
/**
* Uses Nested Search Criteria for search
* Verifies that the results are returned
* Verifies size of the result set
* Verifies that none of the attribute is null
*
* @throws Exception
*/
|
Uses Nested Search Criteria for search Verifies that the results are returned Verifies size of the result set Verifies that none of the attribute is null
|
testEntireObjectNestedSearch2
|
{
"repo_name": "NCIP/cacore-sdk",
"path": "sdk-toolkit/example-project/junit/src/test/xml/data/O2OBidirectionalXMLDataTest.java",
"license": "bsd-3-clause",
"size": 11189
}
|
[
"gov.nih.nci.cacoresdk.domain.onetoone.bidirectional.OrderLine",
"java.util.Collection",
"java.util.Iterator"
] |
import gov.nih.nci.cacoresdk.domain.onetoone.bidirectional.OrderLine; import java.util.Collection; import java.util.Iterator;
|
import gov.nih.nci.cacoresdk.domain.onetoone.bidirectional.*; import java.util.*;
|
[
"gov.nih.nci",
"java.util"
] |
gov.nih.nci; java.util;
| 528,643
|
public void settings (View view)
{
cVibration.activateVibrationShort ();
Intent settings = new Intent(this, Settings.class);
settings.addFlags (Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity (settings);
}
|
void function (View view) { cVibration.activateVibrationShort (); Intent settings = new Intent(this, Settings.class); settings.addFlags (Intent.FLAG_ACTIVITY_CLEAR_TOP); startActivity (settings); }
|
/**
* Vibrates short, creates a new instance linked to settings, starts the
* activity
*
* @param view : the current view the device is in
*/
|
Vibrates short, creates a new instance linked to settings, starts the activity
|
settings
|
{
"repo_name": "JormungandrGames/Spritandroidimals",
"path": "app/src/main/java/cs/pacificu/edu/spritandroidimals/title_screen.java",
"license": "gpl-2.0",
"size": 5945
}
|
[
"android.content.Intent",
"android.view.View"
] |
import android.content.Intent; import android.view.View;
|
import android.content.*; import android.view.*;
|
[
"android.content",
"android.view"
] |
android.content; android.view;
| 2,171,328
|
public boolean isFirstTime()
{
// If returns false we set it to false and return true
if(!this.sharedPreferences.contains(this.context.getString(R.string.pref_is_first_time)))
{
this.preferenceEditor.putBoolean(this.context.getString(R.string.pref_is_first_time), false);
this.preferenceEditor.commit();
Log.d("glowing-smote", "Is first time..");
return true;
}
Log.d("glowing-smote", "Isn't first time.");
return false;
}
|
boolean function() { if(!this.sharedPreferences.contains(this.context.getString(R.string.pref_is_first_time))) { this.preferenceEditor.putBoolean(this.context.getString(R.string.pref_is_first_time), false); this.preferenceEditor.commit(); Log.d(STR, STR); return true; } Log.d(STR, STR); return false; }
|
/**
* Method to check if it is the users first time
*
* @return boolean isFirstTime
*/
|
Method to check if it is the users first time
|
isFirstTime
|
{
"repo_name": "RyanDawkins/glowing-smote",
"path": "app/src/main/java/com/ryanddawkins/glowing_smote/Settings.java",
"license": "apache-2.0",
"size": 2802
}
|
[
"android.util.Log"
] |
import android.util.Log;
|
import android.util.*;
|
[
"android.util"
] |
android.util;
| 711,886
|
public static Logger getLoggerForShortName(String category) {
return Hierarchy.getDefaultHierarchy().getLoggerFor(removePrefix(category));
}
|
static Logger function(String category) { return Hierarchy.getDefaultHierarchy().getLoggerFor(removePrefix(category)); }
|
/**
* Get the Logger for a class.
*
* @param category - the full name of the logger category, this will have the prefix removed.
*
* @return Logger
*/
|
Get the Logger for a class
|
getLoggerForShortName
|
{
"repo_name": "ubikfsabbe/jmeter",
"path": "src/jorphan/org/apache/jorphan/logging/LoggingManager.java",
"license": "apache-2.0",
"size": 14455
}
|
[
"org.apache.log.Hierarchy",
"org.apache.log.Logger"
] |
import org.apache.log.Hierarchy; import org.apache.log.Logger;
|
import org.apache.log.*;
|
[
"org.apache.log"
] |
org.apache.log;
| 1,521,606
|
//-------------//
// headHasStem //
//-------------//
private boolean headHasStem (Inter head)
{
if (head.isVip()) {
logger.info("VIP checkHeadHasStem for {}", head);
}
// Check if the head has a stem relation
if (!sig.hasRelation(head, HeadStemRelation.class)) {
if (head.isVip()) {
logger.info("VIP no stem for {}", head);
}
return false;
}
return true;
}
|
boolean function (Inter head) { if (head.isVip()) { logger.info(STR, head); } if (!sig.hasRelation(head, HeadStemRelation.class)) { if (head.isVip()) { logger.info(STR, head); } return false; } return true; }
|
/**
* Check the (stem) head has a link to a stem
*
* @param head the head to check
* @return true if OK
*/
|
Check the (stem) head has a link to a stem
|
headHasStem
|
{
"repo_name": "Audiveris/audiveris",
"path": "src/main/org/audiveris/omr/sig/SigReducer.java",
"license": "agpl-3.0",
"size": 72261
}
|
[
"org.audiveris.omr.sig.inter.Inter",
"org.audiveris.omr.sig.relation.HeadStemRelation"
] |
import org.audiveris.omr.sig.inter.Inter; import org.audiveris.omr.sig.relation.HeadStemRelation;
|
import org.audiveris.omr.sig.inter.*; import org.audiveris.omr.sig.relation.*;
|
[
"org.audiveris.omr"
] |
org.audiveris.omr;
| 1,990,054
|
static Status buildStatus(ParameterSet options) {
Status command = new Status();
command.setLimit(parseInt(options, "limit", 50));
command.setOffset(parseInt(options, "offset", 0));
return command;
}
|
static Status buildStatus(ParameterSet options) { Status command = new Status(); command.setLimit(parseInt(options, "limit", 50)); command.setOffset(parseInt(options, STR, 0)); return command; }
|
/**
* Builds the {@link Status} command.
*
* @param options the parameter set
* @return the built command
*/
|
Builds the <code>Status</code> command
|
buildStatus
|
{
"repo_name": "markles/GeoGit",
"path": "src/web/api/src/main/java/org/geogit/web/api/CommandBuilder.java",
"license": "bsd-3-clause",
"size": 20421
}
|
[
"org.geogit.web.api.commands.Status"
] |
import org.geogit.web.api.commands.Status;
|
import org.geogit.web.api.commands.*;
|
[
"org.geogit.web"
] |
org.geogit.web;
| 1,038,086
|
@Override
public String getHtmlData(RequestModel req) {
SessionModel session = req.getSession();
String html = "";
if (authorized(req.getAuthzLvl(), AUTH_CONTEXT_CERT_MGMNT)) {
if (req.getAction().equals("loadMain")) {
return getCertMgmntAreaHtml(req);
}
if (req.getAction().equals("loadElement")) {
return loadElementData(req);
}
//Called by an iFrame in page load to retrieve the iFrame content
if (req.getAction().equals("loadFrameInfo")) {
return getCertMgmntAreaHtml(req);
}
if (req.getAction().equals("frameLoad")) {
String parameter = req.getParameter();
if (req.getId().equals(VIEW_BUTTON)) {
setViewButtonParameter(parameter, req);
}
}
} else {
html = "You are not authorized to access this information";
}
return html;
}
|
String function(RequestModel req) { SessionModel session = req.getSession(); String html = STRloadMainSTRloadElementSTRloadFrameInfoSTRframeLoadSTRYou are not authorized to access this information"; } return html; }
|
/**
* Generates the html code for functions under the Certificate Management
* menu
*
* @param req Data related to the Http request from the client.
* @return html response data
*/
|
Generates the html code for functions under the Certificate Management menu
|
getHtmlData
|
{
"repo_name": "elegnamnden/tsl-trust",
"path": "admin-weblogic/src/main/java/se/tillvaxtverket/tsltrust/weblogic/workareas/CertManagementArea.java",
"license": "gpl-3.0",
"size": 15799
}
|
[
"se.tillvaxtverket.tsltrust.weblogic.models.RequestModel",
"se.tillvaxtverket.tsltrust.weblogic.models.SessionModel"
] |
import se.tillvaxtverket.tsltrust.weblogic.models.RequestModel; import se.tillvaxtverket.tsltrust.weblogic.models.SessionModel;
|
import se.tillvaxtverket.tsltrust.weblogic.models.*;
|
[
"se.tillvaxtverket.tsltrust"
] |
se.tillvaxtverket.tsltrust;
| 1,123,939
|
OAuth20RefreshToken create(Service service, Authentication authentication,
TicketGrantingTicket ticketGrantingTicket,
Collection<String> scopes,
String clientId,
String accessToken,
Map<String, Map<String, Object>> requestClaims);
|
OAuth20RefreshToken create(Service service, Authentication authentication, TicketGrantingTicket ticketGrantingTicket, Collection<String> scopes, String clientId, String accessToken, Map<String, Map<String, Object>> requestClaims);
|
/**
* Create a refresh token.
*
* @param service the service
* @param authentication the authentication
* @param ticketGrantingTicket the ticket granting ticket
* @param scopes the scopes
* @param clientId the client id
* @param accessToken the access token created with this refresh token
* @param requestClaims the request claims
* @return the refresh token
*/
|
Create a refresh token
|
create
|
{
"repo_name": "pdrados/cas",
"path": "support/cas-server-support-oauth-api/src/main/java/org/apereo/cas/ticket/refreshtoken/OAuth20RefreshTokenFactory.java",
"license": "apache-2.0",
"size": 1340
}
|
[
"java.util.Collection",
"java.util.Map",
"org.apereo.cas.authentication.Authentication",
"org.apereo.cas.authentication.principal.Service",
"org.apereo.cas.ticket.TicketGrantingTicket"
] |
import java.util.Collection; import java.util.Map; import org.apereo.cas.authentication.Authentication; import org.apereo.cas.authentication.principal.Service; import org.apereo.cas.ticket.TicketGrantingTicket;
|
import java.util.*; import org.apereo.cas.authentication.*; import org.apereo.cas.authentication.principal.*; import org.apereo.cas.ticket.*;
|
[
"java.util",
"org.apereo.cas"
] |
java.util; org.apereo.cas;
| 2,269,459
|
@SmallTest
@Feature({"Android-WebView"})
public void testNoProxy() throws Exception {
checkMapping("ftp://example.com/", "DIRECT");
checkMapping("http://example.com/", "DIRECT");
checkMapping("https://example.com/", "DIRECT");
}
|
@Feature({STR}) void function() throws Exception { checkMapping(STRhttp: checkMapping("https: }
|
/**
* Test direct mapping when no proxy defined.
*
* @throws Exception
*/
|
Test direct mapping when no proxy defined
|
testNoProxy
|
{
"repo_name": "junmin-zhu/chromium-rivertrail",
"path": "net/android/javatests/src/org/chromium/net/AndroidProxySelectorTest.java",
"license": "bsd-3-clause",
"size": 11126
}
|
[
"org.chromium.base.test.util.Feature"
] |
import org.chromium.base.test.util.Feature;
|
import org.chromium.base.test.util.*;
|
[
"org.chromium.base"
] |
org.chromium.base;
| 2,260,615
|
private void startDistributedUpgradeIfNeeded() throws IOException {
UpgradeManagerDatanode um = DataNode.getDataNode().upgradeManager;
assert um != null : "DataNode.upgradeManager is null.";
if(!um.getUpgradeState())
return;
um.setUpgradeState(false, um.getUpgradeVersion());
um.startUpgrade();
return;
}
|
void function() throws IOException { UpgradeManagerDatanode um = DataNode.getDataNode().upgradeManager; assert um != null : STR; if(!um.getUpgradeState()) return; um.setUpgradeState(false, um.getUpgradeVersion()); um.startUpgrade(); return; }
|
/**
* Start distributed upgrade if it should be initiated by the data-node.
*/
|
Start distributed upgrade if it should be initiated by the data-node
|
startDistributedUpgradeIfNeeded
|
{
"repo_name": "sbyoun/i-mapreduce",
"path": "src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java",
"license": "apache-2.0",
"size": 57854
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 2,899,774
|
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
public int testEncode() {
Dictionary dictionary = new Dictionary(dictionaryVector, new DictionaryEncoding(1L, false, null));
final ValueVector encoded = DictionaryEncoder.encode(vector, dictionary);
encoded.close();
return 0;
}
|
@BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.NANOSECONDS) int function() { Dictionary dictionary = new Dictionary(dictionaryVector, new DictionaryEncoding(1L, false, null)); final ValueVector encoded = DictionaryEncoder.encode(vector, dictionary); encoded.close(); return 0; }
|
/**
* Test encode for {@link DictionaryEncoder}.
* @return useless. To avoid DCE by JIT.
*/
|
Test encode for <code>DictionaryEncoder</code>
|
testEncode
|
{
"repo_name": "cpcloud/arrow",
"path": "java/performance/src/test/java/org/apache/arrow/vector/dictionary/DictionaryEncoderBenchmarks.java",
"license": "apache-2.0",
"size": 4471
}
|
[
"java.util.concurrent.TimeUnit",
"org.apache.arrow.vector.ValueVector",
"org.apache.arrow.vector.types.pojo.DictionaryEncoding",
"org.openjdk.jmh.annotations.BenchmarkMode",
"org.openjdk.jmh.annotations.Mode",
"org.openjdk.jmh.annotations.OutputTimeUnit"
] |
import java.util.concurrent.TimeUnit; import org.apache.arrow.vector.ValueVector; import org.apache.arrow.vector.types.pojo.DictionaryEncoding; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Mode; import org.openjdk.jmh.annotations.OutputTimeUnit;
|
import java.util.concurrent.*; import org.apache.arrow.vector.*; import org.apache.arrow.vector.types.pojo.*; import org.openjdk.jmh.annotations.*;
|
[
"java.util",
"org.apache.arrow",
"org.openjdk.jmh"
] |
java.util; org.apache.arrow; org.openjdk.jmh;
| 2,672,353
|
void onFailedToOpen(RewardedAd ad, String reason);
|
void onFailedToOpen(RewardedAd ad, String reason);
|
/**
* Called when a rewarded ad has failed to open.
*
* @param ad the rewarded ad
* @param reason the reason for the failure
*/
|
Called when a rewarded ad has failed to open
|
onFailedToOpen
|
{
"repo_name": "deltaDNA/android-smartads-sdk",
"path": "library/src/main/java/com/deltadna/android/sdk/ads/listeners/RewardedAdsListener.java",
"license": "apache-2.0",
"size": 1825
}
|
[
"com.deltadna.android.sdk.ads.RewardedAd"
] |
import com.deltadna.android.sdk.ads.RewardedAd;
|
import com.deltadna.android.sdk.ads.*;
|
[
"com.deltadna.android"
] |
com.deltadna.android;
| 2,806,079
|
public String getUniqueUserId()
{
// If we don't have a cached id - see if we previously
// saved one and load it in. Otherwise generate a new
// one now and save it.
if ( mUniqueUserId == null )
{
SharedPreferences sharedPreferences = mApplicationContext.getSharedPreferences( SHARED_PREFERENCES_NAME, Context.MODE_PRIVATE );
String uniqueUserId = sharedPreferences.getString( SHARED_PREFERENCES_KEY_UNIQUE_USER_ID, null );
if ( uniqueUserId == null )
{
uniqueUserId = UUID.randomUUID().toString();
sharedPreferences
.edit()
.putString( SHARED_PREFERENCES_KEY_UNIQUE_USER_ID, uniqueUserId )
.commit();
}
mUniqueUserId = uniqueUserId;
}
return ( mUniqueUserId );
}
|
String function() { if ( mUniqueUserId == null ) { SharedPreferences sharedPreferences = mApplicationContext.getSharedPreferences( SHARED_PREFERENCES_NAME, Context.MODE_PRIVATE ); String uniqueUserId = sharedPreferences.getString( SHARED_PREFERENCES_KEY_UNIQUE_USER_ID, null ); if ( uniqueUserId == null ) { uniqueUserId = UUID.randomUUID().toString(); sharedPreferences .edit() .putString( SHARED_PREFERENCES_KEY_UNIQUE_USER_ID, uniqueUserId ) .commit(); } mUniqueUserId = uniqueUserId; } return ( mUniqueUserId ); }
|
/*****************************************************
*
* Returns a unique id representing the user. This is
* generated and then persisted.
*
*****************************************************/
|
Returns a unique id representing the user. This is generated and then persisted
|
getUniqueUserId
|
{
"repo_name": "bearprada/Android-Print-SDK",
"path": "KitePrintSDK/src/main/java/ly/kite/KiteSDK.java",
"license": "mit",
"size": 26068
}
|
[
"android.content.Context",
"android.content.SharedPreferences",
"java.util.UUID"
] |
import android.content.Context; import android.content.SharedPreferences; import java.util.UUID;
|
import android.content.*; import java.util.*;
|
[
"android.content",
"java.util"
] |
android.content; java.util;
| 261,302
|
@Override
public boolean onPrepareActionMode(ActionMode actionMode, Menu menu) {
actionMode.setTitle(Integer.toString(getSelectedItemCount()));
MenuItem archiveItem = menu.findItem(R.id.action_archive);
MenuItem unarchiveItem = menu.findItem(R.id.action_unarchive);
MenuItem deleteItem = menu.findItem(R.id.action_delete);
MenuItem restoreItem = menu.findItem(R.id.action_restore);
Note.Folder currentFolder = getCurrentFolder();
if (currentFolder == Note.Folder.FOLDER_LIVE) {
archiveItem.setVisible(true);
unarchiveItem.setVisible(false);
deleteItem.setVisible(true);
restoreItem.setVisible(false);
} else if (currentFolder == Note.Folder.FOLDER_ARCHIVE) {
archiveItem.setVisible(false);
unarchiveItem.setVisible(true);
deleteItem.setVisible(true);
restoreItem.setVisible(false);
} else if (currentFolder == Note.Folder.FOLDER_TRASH) {
archiveItem.setVisible(false);
unarchiveItem.setVisible(false);
deleteItem.setVisible(true);
restoreItem.setVisible(true);
} else if (currentFolder == Note.Folder.FOLDER_TAG) {
archiveItem.setVisible(true);
unarchiveItem.setVisible(false);
deleteItem.setVisible(true);
restoreItem.setVisible(false);
}
return true;
}
|
boolean function(ActionMode actionMode, Menu menu) { actionMode.setTitle(Integer.toString(getSelectedItemCount())); MenuItem archiveItem = menu.findItem(R.id.action_archive); MenuItem unarchiveItem = menu.findItem(R.id.action_unarchive); MenuItem deleteItem = menu.findItem(R.id.action_delete); MenuItem restoreItem = menu.findItem(R.id.action_restore); Note.Folder currentFolder = getCurrentFolder(); if (currentFolder == Note.Folder.FOLDER_LIVE) { archiveItem.setVisible(true); unarchiveItem.setVisible(false); deleteItem.setVisible(true); restoreItem.setVisible(false); } else if (currentFolder == Note.Folder.FOLDER_ARCHIVE) { archiveItem.setVisible(false); unarchiveItem.setVisible(true); deleteItem.setVisible(true); restoreItem.setVisible(false); } else if (currentFolder == Note.Folder.FOLDER_TRASH) { archiveItem.setVisible(false); unarchiveItem.setVisible(false); deleteItem.setVisible(true); restoreItem.setVisible(true); } else if (currentFolder == Note.Folder.FOLDER_TAG) { archiveItem.setVisible(true); unarchiveItem.setVisible(false); deleteItem.setVisible(true); restoreItem.setVisible(false); } return true; }
|
/**
* Called when menu is invalidated. Updates the title that shows
* count of selected items.
*/
|
Called when menu is invalidated. Updates the title that shows count of selected items
|
onPrepareActionMode
|
{
"repo_name": "vishesh/sealnote",
"path": "Sealnote/src/main/java/com/twistedplane/sealnote/internal/MultiChoiceCallback.java",
"license": "mit",
"size": 7831
}
|
[
"android.view.ActionMode",
"android.view.Menu",
"android.view.MenuItem",
"com.twistedplane.sealnote.data.Note"
] |
import android.view.ActionMode; import android.view.Menu; import android.view.MenuItem; import com.twistedplane.sealnote.data.Note;
|
import android.view.*; import com.twistedplane.sealnote.data.*;
|
[
"android.view",
"com.twistedplane.sealnote"
] |
android.view; com.twistedplane.sealnote;
| 2,652,797
|
protected HashMap<String, Object> getHttpHeaders(final Environment environment) {
val headersPassed = getPropertyFromEnvironment(environment, "headers");
val headers = new HashMap<String, Object>();
if (StringUtils.isNotBlank(headersPassed)) {
Arrays.stream(headersPassed.split(";")).forEach(headerAndValue -> {
val values = Splitter.on(":").splitToList(headerAndValue);
if (values.size() == 2) {
headers.put(values.get(0), values.get(1));
}
});
}
return headers;
}
|
HashMap<String, Object> function(final Environment environment) { val headersPassed = getPropertyFromEnvironment(environment, STR); val headers = new HashMap<String, Object>(); if (StringUtils.isNotBlank(headersPassed)) { Arrays.stream(headersPassed.split(";")).forEach(headerAndValue -> { val values = Splitter.on(":").splitToList(headerAndValue); if (values.size() == 2) { headers.put(values.get(0), values.get(1)); } }); } return headers; }
|
/**
* Gets http headers.
*
* @param environment the environment
* @return the http headers
*/
|
Gets http headers
|
getHttpHeaders
|
{
"repo_name": "pdrados/cas",
"path": "support/cas-server-support-configuration-cloud-rest/src/main/java/org/apereo/cas/config/RestfulPropertySourceLocator.java",
"license": "apache-2.0",
"size": 3860
}
|
[
"com.google.common.base.Splitter",
"java.util.Arrays",
"java.util.HashMap",
"org.apache.commons.lang3.StringUtils",
"org.springframework.core.env.Environment"
] |
import com.google.common.base.Splitter; import java.util.Arrays; import java.util.HashMap; import org.apache.commons.lang3.StringUtils; import org.springframework.core.env.Environment;
|
import com.google.common.base.*; import java.util.*; import org.apache.commons.lang3.*; import org.springframework.core.env.*;
|
[
"com.google.common",
"java.util",
"org.apache.commons",
"org.springframework.core"
] |
com.google.common; java.util; org.apache.commons; org.springframework.core;
| 1,021,067
|
try {
ModelFactory theModelFactory = (ModelFactory)EPackage.Registry.INSTANCE.getEFactory(ModelPackage.eNS_URI);
if (theModelFactory != null) {
return theModelFactory;
}
}
catch (Exception exception) {
EcorePlugin.INSTANCE.log(exception);
}
return new ModelFactoryImpl();
}
public ModelFactoryImpl() {
super();
}
|
try { ModelFactory theModelFactory = (ModelFactory)EPackage.Registry.INSTANCE.getEFactory(ModelPackage.eNS_URI); if (theModelFactory != null) { return theModelFactory; } } catch (Exception exception) { EcorePlugin.INSTANCE.log(exception); } return new ModelFactoryImpl(); } public ModelFactoryImpl() { super(); }
|
/**
* Creates the default factory implementation.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
|
Creates the default factory implementation.
|
init
|
{
"repo_name": "Nasdanika/server",
"path": "org.nasdanika.osgi.model/src/org/nasdanika/osgi/model/impl/ModelFactoryImpl.java",
"license": "epl-1.0",
"size": 6606
}
|
[
"org.eclipse.emf.ecore.EPackage",
"org.eclipse.emf.ecore.plugin.EcorePlugin",
"org.nasdanika.osgi.model.ModelFactory",
"org.nasdanika.osgi.model.ModelPackage"
] |
import org.eclipse.emf.ecore.EPackage; import org.eclipse.emf.ecore.plugin.EcorePlugin; import org.nasdanika.osgi.model.ModelFactory; import org.nasdanika.osgi.model.ModelPackage;
|
import org.eclipse.emf.ecore.*; import org.eclipse.emf.ecore.plugin.*; import org.nasdanika.osgi.model.*;
|
[
"org.eclipse.emf",
"org.nasdanika.osgi"
] |
org.eclipse.emf; org.nasdanika.osgi;
| 2,050,021
|
public CDEValue inverseTranslate(CDEValue orig);
|
CDEValue function(CDEValue orig);
|
/**
* Translates the local site's SDE {@link CDEValue} back into its CDE
* {@link CDEValue}.
*
* @param orig
* The local site's SDE {@link CDEValue} to translate back into
* a CDE.
* @return The translated {@link CDEValue}.
*/
|
Translates the local site's SDE <code>CDEValue</code> back into its CDE <code>CDEValue</code>
|
inverseTranslate
|
{
"repo_name": "OSBI/oodt",
"path": "xmlps/src/main/java/org/apache/oodt/xmlps/mapping/funcs/MappingFunc.java",
"license": "apache-2.0",
"size": 2015
}
|
[
"org.apache.oodt.xmlps.structs.CDEValue"
] |
import org.apache.oodt.xmlps.structs.CDEValue;
|
import org.apache.oodt.xmlps.structs.*;
|
[
"org.apache.oodt"
] |
org.apache.oodt;
| 1,466,825
|
static BlockReaderLocalLegacy newBlockReader(DFSClient.Conf conf,
UserGroupInformation userGroupInformation,
Configuration configuration, String file, ExtendedBlock blk,
Token<BlockTokenIdentifier> token, DatanodeInfo node,
long startOffset, long length, StorageType storageType)
throws IOException {
LocalDatanodeInfo localDatanodeInfo = getLocalDatanodeInfo(node
.getIpcPort());
// check the cache first
BlockLocalPathInfo pathinfo = localDatanodeInfo.getBlockLocalPathInfo(blk);
if (pathinfo == null) {
if (userGroupInformation == null) {
userGroupInformation = UserGroupInformation.getCurrentUser();
}
pathinfo = getBlockPathInfo(userGroupInformation, blk, node,
configuration, conf.socketTimeout, token,
conf.connectToDnViaHostname, storageType);
}
// check to see if the file exists. It may so happen that the
// HDFS file has been deleted and this block-lookup is occurring
// on behalf of a new HDFS file. This time, the block file could
// be residing in a different portion of the fs.data.dir directory.
// In this case, we remove this entry from the cache. The next
// call to this method will re-populate the cache.
FileInputStream dataIn = null;
FileInputStream checksumIn = null;
BlockReaderLocalLegacy localBlockReader = null;
boolean skipChecksumCheck = conf.skipShortCircuitChecksums ||
storageType.isTransient();
try {
// get a local file system
File blkfile = new File(pathinfo.getBlockPath());
dataIn = new FileInputStream(blkfile);
if (LOG.isDebugEnabled()) {
LOG.debug("New BlockReaderLocalLegacy for file " + blkfile + " of size "
+ blkfile.length() + " startOffset " + startOffset + " length "
+ length + " short circuit checksum " + !skipChecksumCheck);
}
if (!skipChecksumCheck) {
// get the metadata file
File metafile = new File(pathinfo.getMetaPath());
checksumIn = new FileInputStream(metafile);
final DataChecksum checksum = BlockMetadataHeader.readDataChecksum(
new DataInputStream(checksumIn), blk);
long firstChunkOffset = startOffset
- (startOffset % checksum.getBytesPerChecksum());
localBlockReader = new BlockReaderLocalLegacy(conf, file, blk, token,
startOffset, length, pathinfo, checksum, true, dataIn,
firstChunkOffset, checksumIn);
} else {
localBlockReader = new BlockReaderLocalLegacy(conf, file, blk, token,
startOffset, length, pathinfo, dataIn);
}
} catch (IOException e) {
// remove from cache
localDatanodeInfo.removeBlockLocalPathInfo(blk);
DFSClient.LOG.warn("BlockReaderLocalLegacy: Removing " + blk
+ " from cache because local file " + pathinfo.getBlockPath()
+ " could not be opened.");
throw e;
} finally {
if (localBlockReader == null) {
if (dataIn != null) {
dataIn.close();
}
if (checksumIn != null) {
checksumIn.close();
}
}
}
return localBlockReader;
}
|
static BlockReaderLocalLegacy newBlockReader(DFSClient.Conf conf, UserGroupInformation userGroupInformation, Configuration configuration, String file, ExtendedBlock blk, Token<BlockTokenIdentifier> token, DatanodeInfo node, long startOffset, long length, StorageType storageType) throws IOException { LocalDatanodeInfo localDatanodeInfo = getLocalDatanodeInfo(node .getIpcPort()); BlockLocalPathInfo pathinfo = localDatanodeInfo.getBlockLocalPathInfo(blk); if (pathinfo == null) { if (userGroupInformation == null) { userGroupInformation = UserGroupInformation.getCurrentUser(); } pathinfo = getBlockPathInfo(userGroupInformation, blk, node, configuration, conf.socketTimeout, token, conf.connectToDnViaHostname, storageType); } FileInputStream dataIn = null; FileInputStream checksumIn = null; BlockReaderLocalLegacy localBlockReader = null; boolean skipChecksumCheck = conf.skipShortCircuitChecksums storageType.isTransient(); try { File blkfile = new File(pathinfo.getBlockPath()); dataIn = new FileInputStream(blkfile); if (LOG.isDebugEnabled()) { LOG.debug(STR + blkfile + STR + blkfile.length() + STR + startOffset + STR + length + STR + !skipChecksumCheck); } if (!skipChecksumCheck) { File metafile = new File(pathinfo.getMetaPath()); checksumIn = new FileInputStream(metafile); final DataChecksum checksum = BlockMetadataHeader.readDataChecksum( new DataInputStream(checksumIn), blk); long firstChunkOffset = startOffset - (startOffset % checksum.getBytesPerChecksum()); localBlockReader = new BlockReaderLocalLegacy(conf, file, blk, token, startOffset, length, pathinfo, checksum, true, dataIn, firstChunkOffset, checksumIn); } else { localBlockReader = new BlockReaderLocalLegacy(conf, file, blk, token, startOffset, length, pathinfo, dataIn); } } catch (IOException e) { localDatanodeInfo.removeBlockLocalPathInfo(blk); DFSClient.LOG.warn(STR + blk + STR + pathinfo.getBlockPath() + STR); throw e; } finally { if (localBlockReader == null) { if (dataIn != null) { dataIn.close(); } if (checksumIn != null) { checksumIn.close(); } } } return localBlockReader; }
|
/**
* The only way this object can be instantiated.
*/
|
The only way this object can be instantiated
|
newBlockReader
|
{
"repo_name": "Bizyroth/hadoop",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderLocalLegacy.java",
"license": "apache-2.0",
"size": 27040
}
|
[
"java.io.DataInputStream",
"java.io.File",
"java.io.FileInputStream",
"java.io.IOException",
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.fs.StorageType",
"org.apache.hadoop.hdfs.protocol.BlockLocalPathInfo",
"org.apache.hadoop.hdfs.protocol.DatanodeInfo",
"org.apache.hadoop.hdfs.protocol.ExtendedBlock",
"org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier",
"org.apache.hadoop.hdfs.server.datanode.BlockMetadataHeader",
"org.apache.hadoop.security.UserGroupInformation",
"org.apache.hadoop.security.token.Token",
"org.apache.hadoop.util.DataChecksum"
] |
import java.io.DataInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.hdfs.protocol.BlockLocalPathInfo; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.server.datanode.BlockMetadataHeader; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.DataChecksum;
|
import java.io.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.hdfs.protocol.*; import org.apache.hadoop.hdfs.security.token.block.*; import org.apache.hadoop.hdfs.server.datanode.*; import org.apache.hadoop.security.*; import org.apache.hadoop.security.token.*; import org.apache.hadoop.util.*;
|
[
"java.io",
"org.apache.hadoop"
] |
java.io; org.apache.hadoop;
| 2,177,350
|
public Vector<Account> listContactAccounts(boolean includeReadOnlyAccounts,
boolean excludeImportedAccounts) {
Log.trace(TAG_LOG, "listContactAccounts: " + includeReadOnlyAccounts);
AccountManager am = AccountManager.get(context);
Account[] accounts = am.getAccounts();
SyncAdapterType[] syncs = ContentResolver.getSyncAdapterTypes();
Vector<Account> contactAccounts = new Vector<Account>();
Vector<String> contactAccountTypes = new Vector<String>();
for (SyncAdapterType sync : syncs) {
if (ContactsContract.AUTHORITY.equals(sync.authority) &&
(includeReadOnlyAccounts || sync.supportsUploading())) {
contactAccountTypes.add(sync.accountType);
}
}
String funType = context.getString(R.string.account_type);
for (Account acct: accounts) {
// Exclude Funambol accounts
if (!funType.equals(acct.type) &&
contactAccountTypes.contains(acct.type)) {
if(excludeImportedAccounts) {
Vector<Account> importedAccounts = loadImportedAccounts();
boolean isImported = false;
for(int i=0; i<importedAccounts.size(); i++) {
Account account = importedAccounts.elementAt(i);
if(account.name.equals(acct.name) &&
account.type.equals(acct.type)) {
isImported = true;
}
}
if(!isImported) {
contactAccounts.add(acct);
}
} else {
contactAccounts.add(acct);
}
}
}
return contactAccounts;
}
|
Vector<Account> function(boolean includeReadOnlyAccounts, boolean excludeImportedAccounts) { Log.trace(TAG_LOG, STR + includeReadOnlyAccounts); AccountManager am = AccountManager.get(context); Account[] accounts = am.getAccounts(); SyncAdapterType[] syncs = ContentResolver.getSyncAdapterTypes(); Vector<Account> contactAccounts = new Vector<Account>(); Vector<String> contactAccountTypes = new Vector<String>(); for (SyncAdapterType sync : syncs) { if (ContactsContract.AUTHORITY.equals(sync.authority) && (includeReadOnlyAccounts sync.supportsUploading())) { contactAccountTypes.add(sync.accountType); } } String funType = context.getString(R.string.account_type); for (Account acct: accounts) { if (!funType.equals(acct.type) && contactAccountTypes.contains(acct.type)) { if(excludeImportedAccounts) { Vector<Account> importedAccounts = loadImportedAccounts(); boolean isImported = false; for(int i=0; i<importedAccounts.size(); i++) { Account account = importedAccounts.elementAt(i); if(account.name.equals(acct.name) && account.type.equals(acct.type)) { isImported = true; } } if(!isImported) { contactAccounts.add(acct); } } else { contactAccounts.add(acct); } } } return contactAccounts; }
|
/**
* Lists the accounts which support contact items.
*
* @param includeReadOnlyAccounts
* @param excludeImportedAccounts
* @return The Vector of the accounts
*/
|
Lists the accounts which support contact items
|
listContactAccounts
|
{
"repo_name": "zhangdakun/funasyn",
"path": "src/com/funambol/android/ExternalAccountManager.java",
"license": "agpl-3.0",
"size": 46041
}
|
[
"android.accounts.Account",
"android.accounts.AccountManager",
"android.content.ContentResolver",
"android.content.SyncAdapterType",
"android.provider.ContactsContract",
"com.funambol.util.Log",
"java.util.Vector"
] |
import android.accounts.Account; import android.accounts.AccountManager; import android.content.ContentResolver; import android.content.SyncAdapterType; import android.provider.ContactsContract; import com.funambol.util.Log; import java.util.Vector;
|
import android.accounts.*; import android.content.*; import android.provider.*; import com.funambol.util.*; import java.util.*;
|
[
"android.accounts",
"android.content",
"android.provider",
"com.funambol.util",
"java.util"
] |
android.accounts; android.content; android.provider; com.funambol.util; java.util;
| 1,545,013
|
public BufferedReader bufferedReader() throws HttpRequestException {
return bufferedReader(charset());
}
|
BufferedReader function() throws HttpRequestException { return bufferedReader(charset()); }
|
/**
* Get buffered reader to response body using the character set returned
* from {@link #charset()} and the configured buffer size
*
* @see #bufferSize(int)
* @return reader
* @throws HttpRequestException
*/
|
Get buffered reader to response body using the character set returned from <code>#charset()</code> and the configured buffer size
|
bufferedReader
|
{
"repo_name": "sindhunaydu/web-perf-analyzer",
"path": "HttpRequest.java",
"license": "mit",
"size": 83766
}
|
[
"java.io.BufferedReader"
] |
import java.io.BufferedReader;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 333,857
|
public static final String getDefaultEncoding() {
return PasswordCipherUtil.getSupportedCryptoAlgorithms()[0];
}
|
static final String function() { return PasswordCipherUtil.getSupportedCryptoAlgorithms()[0]; }
|
/**
* Return the default algorithm for the encoding or decoding.
*
* @return The default algorithm.
*/
|
Return the default algorithm for the encoding or decoding
|
getDefaultEncoding
|
{
"repo_name": "kgibm/open-liberty",
"path": "dev/com.ibm.ws.crypto.passwordutil/src/com/ibm/websphere/crypto/PasswordUtil.java",
"license": "epl-1.0",
"size": 29500
}
|
[
"com.ibm.ws.crypto.util.PasswordCipherUtil"
] |
import com.ibm.ws.crypto.util.PasswordCipherUtil;
|
import com.ibm.ws.crypto.util.*;
|
[
"com.ibm.ws"
] |
com.ibm.ws;
| 971,641
|
public final void setOnDismissListener(PopupWindow.OnDismissListener onDismissListener) {
mPopupWindow.setOnDismissListener(onDismissListener);
}
|
final void function(PopupWindow.OnDismissListener onDismissListener) { mPopupWindow.setOnDismissListener(onDismissListener); }
|
/**
* Set a listener to receive a callback when the popup is dismissed.
*
* @param onDismissListener Listener that will be notified when the popup is dismissed.
*/
|
Set a listener to receive a callback when the popup is dismissed
|
setOnDismissListener
|
{
"repo_name": "LSPOoO/com.lspooo.example",
"path": "plugin_common/src/main/java/com/lspooo/plugin/common/view/CCPListPopupWindow.java",
"license": "mit",
"size": 44273
}
|
[
"android.widget.PopupWindow"
] |
import android.widget.PopupWindow;
|
import android.widget.*;
|
[
"android.widget"
] |
android.widget;
| 1,201,825
|
public void deleteDevice(@NonNull ApiCallback<Void> callback) {
makeDeleteCall(callback);
}
|
void function(@NonNull ApiCallback<Void> callback) { makeDeleteCall(callback); }
|
/**
* Delete a single device.
*
* @param callback result callback
*/
|
Delete a single device
|
deleteDevice
|
{
"repo_name": "fitpay/fitpay-android-sdk",
"path": "fitpay/src/main/java/com/fitpay/android/api/models/device/Device.java",
"license": "mit",
"size": 22454
}
|
[
"androidx.annotation.NonNull",
"com.fitpay.android.api.callbacks.ApiCallback"
] |
import androidx.annotation.NonNull; import com.fitpay.android.api.callbacks.ApiCallback;
|
import androidx.annotation.*; import com.fitpay.android.api.callbacks.*;
|
[
"androidx.annotation",
"com.fitpay.android"
] |
androidx.annotation; com.fitpay.android;
| 13,754
|
public Icon getIcon() {
return (Icon)getValue(SMALL_ICON);
}
|
Icon function() { return (Icon)getValue(SMALL_ICON); }
|
/**
* Returns the icon for this action.
*
* @return The icon.
*/
|
Returns the icon for this action
|
getIcon
|
{
"repo_name": "reqT/reqT-syntax",
"path": "src/org/fife/ui/rtextarea/RecordableTextAction.java",
"license": "bsd-3-clause",
"size": 7526
}
|
[
"javax.swing.Icon"
] |
import javax.swing.Icon;
|
import javax.swing.*;
|
[
"javax.swing"
] |
javax.swing;
| 2,482,107
|
private byte[][] getBytecodesFromClasses(Source source, String fullClassName)
{
if (fullClassName == null)
return null;
String xslFileName = getStylesheetFileName(source);
File xslFile = null;
if (xslFileName != null)
xslFile = new File(xslFileName);
// Find the base name of the translet
final String transletName;
int lastDotIndex = fullClassName.lastIndexOf('.');
if (lastDotIndex > 0)
transletName = fullClassName.substring(lastDotIndex+1);
else
transletName = fullClassName;
// Construct the path name for the translet class file
String transletPath = fullClassName.replace('.', '/');
if (_destinationDirectory != null) {
transletPath = _destinationDirectory + "/" + transletPath + ".class";
}
else {
if (xslFile != null && xslFile.getParent() != null)
transletPath = xslFile.getParent() + "/" + transletPath + ".class";
else
transletPath = transletPath + ".class";
}
// Return null if the translet class file does not exist.
File transletFile = new File(transletPath);
if (!transletFile.exists())
return null;
// Compare the timestamps of the translet and the xsl file.
// If the translet is older than the xsl file, return null
// so that the xsl file is used for the transformation and
// the translet is regenerated.
if (xslFile != null && xslFile.exists()) {
long xslTimestamp = xslFile.lastModified();
long transletTimestamp = transletFile.lastModified();
if (transletTimestamp < xslTimestamp)
return null;
}
// Load the translet into a bytecode array.
Vector bytecodes = new Vector();
int fileLength = (int)transletFile.length();
if (fileLength > 0) {
FileInputStream input;
try {
input = new FileInputStream(transletFile);
}
catch (FileNotFoundException e) {
return null;
}
byte[] bytes = new byte[fileLength];
try {
readFromInputStream(bytes, input, fileLength);
input.close();
}
catch (IOException e) {
return null;
}
bytecodes.addElement(bytes);
}
else
return null;
// Find the parent directory of the translet.
String transletParentDir = transletFile.getParent();
if (transletParentDir == null)
transletParentDir = SecuritySupport.getSystemProperty("user.dir");
File transletParentFile = new File(transletParentDir);
|
byte[][] function(Source source, String fullClassName) { if (fullClassName == null) return null; String xslFileName = getStylesheetFileName(source); File xslFile = null; if (xslFileName != null) xslFile = new File(xslFileName); final String transletName; int lastDotIndex = fullClassName.lastIndexOf('.'); if (lastDotIndex > 0) transletName = fullClassName.substring(lastDotIndex+1); else transletName = fullClassName; String transletPath = fullClassName.replace('.', '/'); if (_destinationDirectory != null) { transletPath = _destinationDirectory + "/" + transletPath + STR; } else { if (xslFile != null && xslFile.getParent() != null) transletPath = xslFile.getParent() + "/" + transletPath + STR; else transletPath = transletPath + STR; } File transletFile = new File(transletPath); if (!transletFile.exists()) return null; if (xslFile != null && xslFile.exists()) { long xslTimestamp = xslFile.lastModified(); long transletTimestamp = transletFile.lastModified(); if (transletTimestamp < xslTimestamp) return null; } Vector bytecodes = new Vector(); int fileLength = (int)transletFile.length(); if (fileLength > 0) { FileInputStream input; try { input = new FileInputStream(transletFile); } catch (FileNotFoundException e) { return null; } byte[] bytes = new byte[fileLength]; try { readFromInputStream(bytes, input, fileLength); input.close(); } catch (IOException e) { return null; } bytecodes.addElement(bytes); } else return null; String transletParentDir = transletFile.getParent(); if (transletParentDir == null) transletParentDir = SecuritySupport.getSystemProperty(STR); File transletParentFile = new File(transletParentDir);
|
/**
* Load the translet classes from local .class files and return
* the bytecode array.
*
* @param source The xsl source
* @param fullClassName The full name of the translet
* @return The bytecode array
*/
|
Load the translet classes from local .class files and return the bytecode array
|
getBytecodesFromClasses
|
{
"repo_name": "JetBrains/jdk8u_jaxp",
"path": "src/com/sun/org/apache/xalan/internal/xsltc/trax/TransformerFactoryImpl.java",
"license": "gpl-2.0",
"size": 59454
}
|
[
"com.sun.org.apache.xalan.internal.utils.SecuritySupport",
"java.io.File",
"java.io.FileInputStream",
"java.io.FileNotFoundException",
"java.io.IOException",
"java.util.Vector",
"javax.xml.transform.Source"
] |
import com.sun.org.apache.xalan.internal.utils.SecuritySupport; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.util.Vector; import javax.xml.transform.Source;
|
import com.sun.org.apache.xalan.internal.utils.*; import java.io.*; import java.util.*; import javax.xml.transform.*;
|
[
"com.sun.org",
"java.io",
"java.util",
"javax.xml"
] |
com.sun.org; java.io; java.util; javax.xml;
| 2,554,400
|
public static List<CssNode> cursor2Node(Cursor c){
List<CssNode> list = new ArrayList<CssNode>();
try{
if (c==null) return null;
if (c.getCount()==0) return null;
c.moveToFirst();
do{
CssNode node = new CssNode();
node.setIdentity(c.getString(c.getColumnIndex(CssNodeTable.KEY_IDENTITY)));
node.setStatus(c.getInt(c.getColumnIndex(CssNodeTable.KEY_STATUS)));
node.setType(c.getInt(c.getColumnIndex(CssNodeTable.KEY_TYPE)));
list.add(node);
}while (c.moveToNext());
return list;
}catch(Exception ex){
ex.printStackTrace();
}
return null;
}
|
static List<CssNode> function(Cursor c){ List<CssNode> list = new ArrayList<CssNode>(); try{ if (c==null) return null; if (c.getCount()==0) return null; c.moveToFirst(); do{ CssNode node = new CssNode(); node.setIdentity(c.getString(c.getColumnIndex(CssNodeTable.KEY_IDENTITY))); node.setStatus(c.getInt(c.getColumnIndex(CssNodeTable.KEY_STATUS))); node.setType(c.getInt(c.getColumnIndex(CssNodeTable.KEY_TYPE))); list.add(node); }while (c.moveToNext()); return list; }catch(Exception ex){ ex.printStackTrace(); } return null; }
|
/**
* Transform generic Cursor Result in a list of CSSNode
* @param c
* @return
*/
|
Transform generic Cursor Result in a list of CSSNode
|
cursor2Node
|
{
"repo_name": "EPapadopoulou/PersoNIS",
"path": "api/android/archive/internal/src/main/java/org/societies/android/api/internal/contentproviders/CssUtils.java",
"license": "bsd-2-clause",
"size": 5296
}
|
[
"android.database.Cursor",
"java.util.ArrayList",
"java.util.List",
"org.societies.api.schema.cssmanagement.CssNode"
] |
import android.database.Cursor; import java.util.ArrayList; import java.util.List; import org.societies.api.schema.cssmanagement.CssNode;
|
import android.database.*; import java.util.*; import org.societies.api.schema.cssmanagement.*;
|
[
"android.database",
"java.util",
"org.societies.api"
] |
android.database; java.util; org.societies.api;
| 2,355,068
|
@Override
protected void onDetachedFromWindow() {
if (LOG_ATTACH_DETACH) {
Log.d(TAG, "onDetachedFromWindow");
}
if (mGLThread != null) {
mGLThread.requestExitAndWait();
}
mDetached = true;
super.onDetachedFromWindow();
}
|
void function() { if (LOG_ATTACH_DETACH) { Log.d(TAG, STR); } if (mGLThread != null) { mGLThread.requestExitAndWait(); } mDetached = true; super.onDetachedFromWindow(); }
|
/**
* This method is used as part of the View class and is not normally
* called or subclassed by clients of MyGLSurfaceView.
* Must not be called before a renderer has been set.
*/
|
This method is used as part of the View class and is not normally called or subclassed by clients of MyGLSurfaceView. Must not be called before a renderer has been set
|
onDetachedFromWindow
|
{
"repo_name": "nvllsvm/GZDoom-Android",
"path": "doom/src/main/java/net/nullsum/doom/MyGLSurfaceView.java",
"license": "gpl-2.0",
"size": 78962
}
|
[
"android.util.Log"
] |
import android.util.Log;
|
import android.util.*;
|
[
"android.util"
] |
android.util;
| 281,590
|
public static String addTagValue( String tag, byte[] val ) throws IOException {
return addTagValue( tag, val, true );
}
|
static String function( String tag, byte[] val ) throws IOException { return addTagValue( tag, val, true ); }
|
/**
* Build an XML string (including a carriage return) for a certain tag binary (byte[]) value
*
* @param tag
* The XML tag
* @param val
* The binary value of the tag
* @return The XML String for the tag.
* @throws IOException
* in case there is an Base64 or GZip encoding problem
*/
|
Build an XML string (including a carriage return) for a certain tag binary (byte[]) value
|
addTagValue
|
{
"repo_name": "IvanNikolaychuk/pentaho-kettle",
"path": "core/src/org/pentaho/di/core/xml/XMLHandler.java",
"license": "apache-2.0",
"size": 37069
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,200,363
|
public void setStatistics(Statistics statistics)
{
this.statistics = statistics;
}
|
void function(Statistics statistics) { this.statistics = statistics; }
|
/**
* Sets the statistics of the TSM
*
* @param statistics
*/
|
Sets the statistics of the TSM
|
setStatistics
|
{
"repo_name": "GaloisInc/KOA",
"path": "infrastructure/source/WebVotingSystem/src/ie/ucd/srg/koa/soap/response/SuspendResponse.java",
"license": "gpl-2.0",
"size": 1869
}
|
[
"ie.ucd.srg.koa.soap.response.Statistics"
] |
import ie.ucd.srg.koa.soap.response.Statistics;
|
import ie.ucd.srg.koa.soap.response.*;
|
[
"ie.ucd.srg"
] |
ie.ucd.srg;
| 2,487,316
|
public static SignedRequestsHelper getInstance(String endpoint, String awsAccessKeyId, String awsSecretKey)
throws IllegalArgumentException, UnsupportedEncodingException, NoSuchAlgorithmException, InvalidKeyException {
if (null == endpoint || endpoint.length() == 0) {
throw new IllegalArgumentException("endpoint is null or empty");
}
if (null == awsAccessKeyId || awsAccessKeyId.length() == 0) {
throw new IllegalArgumentException("awsAccessKeyId is null or empty");
}
if (null == awsSecretKey || awsSecretKey.length() == 0) {
throw new IllegalArgumentException("awsSecretKey is null or empty");
}
SignedRequestsHelper instance = new SignedRequestsHelper();
instance.endpoint = endpoint.toLowerCase();
instance.awsAccessKeyId = awsAccessKeyId;
instance.awsSecretKey = awsSecretKey;
byte[] secretyKeyBytes = instance.awsSecretKey.getBytes(UTF8_CHARSET);
instance.secretKeySpec = new SecretKeySpec(secretyKeyBytes, HMAC_SHA256_ALGORITHM);
instance.mac = Mac.getInstance(HMAC_SHA256_ALGORITHM);
instance.mac.init(instance.secretKeySpec);
return instance;
}
private SignedRequestsHelper() {
}
|
static SignedRequestsHelper function(String endpoint, String awsAccessKeyId, String awsSecretKey) throws IllegalArgumentException, UnsupportedEncodingException, NoSuchAlgorithmException, InvalidKeyException { if (null == endpoint endpoint.length() == 0) { throw new IllegalArgumentException(STR); } if (null == awsAccessKeyId awsAccessKeyId.length() == 0) { throw new IllegalArgumentException(STR); } if (null == awsSecretKey awsSecretKey.length() == 0) { throw new IllegalArgumentException(STR); } SignedRequestsHelper instance = new SignedRequestsHelper(); instance.endpoint = endpoint.toLowerCase(); instance.awsAccessKeyId = awsAccessKeyId; instance.awsSecretKey = awsSecretKey; byte[] secretyKeyBytes = instance.awsSecretKey.getBytes(UTF8_CHARSET); instance.secretKeySpec = new SecretKeySpec(secretyKeyBytes, HMAC_SHA256_ALGORITHM); instance.mac = Mac.getInstance(HMAC_SHA256_ALGORITHM); instance.mac.init(instance.secretKeySpec); return instance; } private SignedRequestsHelper() { }
|
/**
* You must provide the three values below to initialize the helper.
*
* @param endpoint
* Destination for the requests.
* @param awsAccessKeyId
* Your AWS Access Key ID
* @param awsSecretKey
* Your AWS Secret Key
*/
|
You must provide the three values below to initialize the helper
|
getInstance
|
{
"repo_name": "fei-chen/AmberAlert",
"path": "backend/src/main/java/com/highform/affiliates/amazon/SignedRequestsHelper.java",
"license": "apache-2.0",
"size": 9125
}
|
[
"java.io.UnsupportedEncodingException",
"java.security.InvalidKeyException",
"java.security.NoSuchAlgorithmException",
"javax.crypto.Mac",
"javax.crypto.spec.SecretKeySpec"
] |
import java.io.UnsupportedEncodingException; import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException; import javax.crypto.Mac; import javax.crypto.spec.SecretKeySpec;
|
import java.io.*; import java.security.*; import javax.crypto.*; import javax.crypto.spec.*;
|
[
"java.io",
"java.security",
"javax.crypto"
] |
java.io; java.security; javax.crypto;
| 1,420,401
|
private void drawDataSeriesUnclipped(Canvas canvas) {
mSeriesLinesBuffer[0] = mContentRect.left;
mSeriesLinesBuffer[1] = getDrawY(fun(mCurrentViewport.left));
mSeriesLinesBuffer[2] = mSeriesLinesBuffer[0];
mSeriesLinesBuffer[3] = mSeriesLinesBuffer[1];
float x;
for (int i = 1; i <= DRAW_STEPS; i++) {
mSeriesLinesBuffer[i * 4 + 0] = mSeriesLinesBuffer[(i - 1) * 4 + 2];
mSeriesLinesBuffer[i * 4 + 1] = mSeriesLinesBuffer[(i - 1) * 4 + 3];
x = (mCurrentViewport.left + (mCurrentViewport.width() / DRAW_STEPS * i));
mSeriesLinesBuffer[i * 4 + 2] = getDrawX(x);
mSeriesLinesBuffer[i * 4 + 3] = getDrawY(fun(x));
}
canvas.drawLines(mSeriesLinesBuffer, mDataPaint);
}
|
void function(Canvas canvas) { mSeriesLinesBuffer[0] = mContentRect.left; mSeriesLinesBuffer[1] = getDrawY(fun(mCurrentViewport.left)); mSeriesLinesBuffer[2] = mSeriesLinesBuffer[0]; mSeriesLinesBuffer[3] = mSeriesLinesBuffer[1]; float x; for (int i = 1; i <= DRAW_STEPS; i++) { mSeriesLinesBuffer[i * 4 + 0] = mSeriesLinesBuffer[(i - 1) * 4 + 2]; mSeriesLinesBuffer[i * 4 + 1] = mSeriesLinesBuffer[(i - 1) * 4 + 3]; x = (mCurrentViewport.left + (mCurrentViewport.width() / DRAW_STEPS * i)); mSeriesLinesBuffer[i * 4 + 2] = getDrawX(x); mSeriesLinesBuffer[i * 4 + 3] = getDrawY(fun(x)); } canvas.drawLines(mSeriesLinesBuffer, mDataPaint); }
|
/**
* Draws the currently visible portion of the data series defined by {@link #fun(float)} to the
* canvas. This method does not clip its drawing, so users should call {@link Canvas#clipRect
* before calling this method.
*/
|
Draws the currently visible portion of the data series defined by <code>#fun(float)</code> to the canvas. This method does not clip its drawing, so users should call {@link Canvas#clipRect before calling this method
|
drawDataSeriesUnclipped
|
{
"repo_name": "indashnet/InDashNet.Open.UN2000",
"path": "android/development/samples/training/InteractiveChart/src/com/example/android/interactivechart/InteractiveLineGraphView.java",
"license": "apache-2.0",
"size": 46674
}
|
[
"android.graphics.Canvas"
] |
import android.graphics.Canvas;
|
import android.graphics.*;
|
[
"android.graphics"
] |
android.graphics;
| 2,056,142
|
public List<S> getShapesAtPosition(Point point) {
List<S> shapes = new ArrayList<S>();
if (point == null)
return shapes;
for (S shape : this.getAllShapesReadOnly()) {
if (shape.getAbsoluteBounds().isPointIncluded(point))
shapes.add(shape);
}
return shapes;
}
|
List<S> function(Point point) { List<S> shapes = new ArrayList<S>(); if (point == null) return shapes; for (S shape : this.getAllShapesReadOnly()) { if (shape.getAbsoluteBounds().isPointIncluded(point)) shapes.add(shape); } return shapes; }
|
/**
* Returns all shapes that include the given point (in absolute coordinates).
*
* @param point
* @return
*/
|
Returns all shapes that include the given point (in absolute coordinates)
|
getShapesAtPosition
|
{
"repo_name": "KarnYong/BPaaS-modeling",
"path": "platform extensions/diagram core/src/org/oryxeditor/server/diagram/generic/GenericDiagram.java",
"license": "gpl-3.0",
"size": 10319
}
|
[
"java.util.ArrayList",
"java.util.List",
"org.oryxeditor.server.diagram.Point"
] |
import java.util.ArrayList; import java.util.List; import org.oryxeditor.server.diagram.Point;
|
import java.util.*; import org.oryxeditor.server.diagram.*;
|
[
"java.util",
"org.oryxeditor.server"
] |
java.util; org.oryxeditor.server;
| 2,785,220
|
protected void processRequest(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
try {
DB base_datos = new DB();
base_datos.conectar();
Pc usuario = base_datos.insertar();
base_datos.desconectar();
if (Util.docker) {
Process proceso;
Runtime shell = Runtime.getRuntime();
// COMANDO DOCKER
// docker run -d --rm -p [PuertoPHP]:80 -p [PuertoSQL]:3306 --name=server[ID] xxdrackleroxx/test
proceso = shell.exec("docker run -d --rm -p " + usuario.getPuertoPHP() + ":80 -p " + usuario.getPuertoSQL() + ":3306 --name=server" + usuario.getId() + " xxdrackleroxx/test");
proceso.waitFor();
}
String json = new Gson().toJson(usuario);
response.setContentType("application/json");
response.setCharacterEncoding("UTF-8");
Writer salida = null;
salida = response.getWriter();
salida.write(json);
salida.close();
CloseableHttpClient httpclient = HttpClients.createDefault();
HttpPost httppost = new HttpPost("https://fcm.googleapis.com/fcm/send");
// Headers
httppost.addHeader("Authorization", " key=AAAAZ3CLFVs:APA91bHg6SSZ0Xq6hzcPX9Q7g37sIRNH4HP4BW_eFQblN__2jax9ZjsaptJYfAugyVo12sfnRFTk4o57XwiDpV_1-DEBnPYmH-ETTB-nc3DqlOBqV98AtsW99sujS04HhMhzRzE9xIfQ");
httppost.addHeader("Content-Type", "application/json");
JsonObject mensaje = new JsonObject();
mensaje.addProperty("to", "c8Yt-yQouZI:APA91bEkZ0QLt1jESKmLwkDHD1gu7s-VJ8ThRv8JsHFhpgk3RbfwooiDnvHvhYpdWkvIqLveEHD_tAkoQvm5EyYVE3AumkF_cQwDO27M_rTSTQslpGySbDbY2N9S50gKUAa2ADRS-GPH");
mensaje.addProperty("priority", "high");
JsonObject notificacion = new JsonObject();
notificacion.addProperty("title", "Nuevo contenedor");
notificacion.addProperty("body", usuario.toString());
mensaje.add("notification", notificacion);
httppost.setEntity(new StringEntity(mensaje.toString(), "UTF-8"));
System.out.println("[LOG] Mensaje: " + mensaje);
HttpResponse respuesta = httpclient.execute(httppost);
System.out.println(respuesta.getStatusLine());
} catch (Exception e) {
String json = new Gson().toJson("ERROR BD");
response.setContentType("application/json");
response.setCharacterEncoding("UTF-8");
Writer salida = null;
salida = response.getWriter();
salida.write(json);
salida.close();
}
}
|
void function(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { DB base_datos = new DB(); base_datos.conectar(); Pc usuario = base_datos.insertar(); base_datos.desconectar(); if (Util.docker) { Process proceso; Runtime shell = Runtime.getRuntime(); proceso = shell.exec(STR + usuario.getPuertoPHP() + STR + usuario.getPuertoSQL() + STR + usuario.getId() + STR); proceso.waitFor(); } String json = new Gson().toJson(usuario); response.setContentType(STR); response.setCharacterEncoding("UTF-8"); Writer salida = null; salida = response.getWriter(); salida.write(json); salida.close(); CloseableHttpClient httpclient = HttpClients.createDefault(); HttpPost httppost = new HttpPost(STRAuthorizationSTR key=AAAAZ3CLFVs:APA91bHg6SSZ0Xq6hzcPX9Q7g37sIRNH4HP4BW_eFQblN__2jax9ZjsaptJYfAugyVo12sfnRFTk4o57XwiDpV_1-DEBnPYmH-ETTB-nc3DqlOBqV98AtsW99sujS04HhMhzRzE9xIfQSTRContent-Type", STR); JsonObject mensaje = new JsonObject(); mensaje.addProperty("toSTRc8Yt-yQouZI:APA91bEkZ0QLt1jESKmLwkDHD1gu7s-VJ8ThRv8JsHFhpgk3RbfwooiDnvHvhYpdWkvIqLveEHD_tAkoQvm5EyYVE3AumkF_cQwDO27M_rTSTQslpGySbDbY2N9S50gKUAa2ADRS-GPHSTRprioritySTRhighSTRtitleSTRNuevo contenedorSTRbodySTRnotificationSTRUTF-8STR[LOG] Mensaje: STRERROR BD"); response.setContentType(STR); response.setCharacterEncoding("UTF-8"); Writer salida = null; salida = response.getWriter(); salida.write(json); salida.close(); } }
|
/**
* Processes requests for both HTTP <code>GET</code> and <code>POST</code>
* methods.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
|
Processes requests for both HTTP <code>GET</code> and <code>POST</code> methods
|
processRequest
|
{
"repo_name": "AmauryOrtega/Sem-Update",
"path": "3-Corte/Proyecto_gson_FireBase/src/java/controladores/ServidorIniciar.java",
"license": "mit",
"size": 5076
}
|
[
"com.google.gson.Gson",
"com.google.gson.JsonObject",
"java.io.IOException",
"java.io.Writer",
"javax.servlet.ServletException",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse",
"org.apache.http.client.methods.HttpPost",
"org.apache.http.impl.client.CloseableHttpClient",
"org.apache.http.impl.client.HttpClients"
] |
import com.google.gson.Gson; import com.google.gson.JsonObject; import java.io.IOException; import java.io.Writer; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients;
|
import com.google.gson.*; import java.io.*; import javax.servlet.*; import javax.servlet.http.*; import org.apache.http.client.methods.*; import org.apache.http.impl.client.*;
|
[
"com.google.gson",
"java.io",
"javax.servlet",
"org.apache.http"
] |
com.google.gson; java.io; javax.servlet; org.apache.http;
| 2,569,929
|
synchronized public void processAck(long sid, long zxid, SocketAddress followerAddr) {
if (LOG.isTraceEnabled()) {
LOG.trace("Ack zxid: 0x{}", Long.toHexString(zxid));
for (Proposal p : outstandingProposals.values()) {
long packetZxid = p.packet.getZxid();
LOG.trace("outstanding proposal: 0x{}",
Long.toHexString(packetZxid));
}
LOG.trace("outstanding proposals all");
}
if ((zxid & 0xffffffffL) == 0) {
return;
}
if (outstandingProposals.size() == 0) {
if (LOG.isDebugEnabled()) {
LOG.debug("outstanding is 0");
}
return;
}
if (lastCommitted >= zxid) {
if (LOG.isDebugEnabled()) {
LOG.debug("proposal has already been committed, pzxid: 0x{} zxid: 0x{}",
Long.toHexString(lastCommitted), Long.toHexString(zxid));
}
// The proposal has already been committed
return;
}
Proposal p = outstandingProposals.get(zxid);
if (p == null) {
LOG.warn("Trying to commit future proposal: zxid 0x{} from {}",
Long.toHexString(zxid), followerAddr);
return;
}
p.ackSet.add(sid);
if (LOG.isDebugEnabled()) {
LOG.debug("Count for zxid: 0x{} is {}",
Long.toHexString(zxid), p.ackSet.size());
}
if (self.getQuorumVerifier().containsQuorum(p.ackSet)){
if (zxid != lastCommitted+1) {
LOG.warn("Commiting zxid 0x{} from {} not first!",
Long.toHexString(zxid), followerAddr);
LOG.warn("First is 0x{}", Long.toHexString(lastCommitted + 1));
}
outstandingProposals.remove(zxid);
if (p.request != null) {
toBeApplied.add(p);
}
if (p.request == null) {
LOG.warn("Going to commmit null request for proposal: {}", p);
}
commit(zxid);
inform(p);
zk.commitProcessor.commit(p.request);
if(pendingSyncs.containsKey(zxid)){
for(LearnerSyncRequest r: pendingSyncs.remove(zxid)) {
sendSync(r);
}
}
}
}
static class ToBeAppliedRequestProcessor implements RequestProcessor {
private RequestProcessor next;
private ConcurrentLinkedQueue<Proposal> toBeApplied;
ToBeAppliedRequestProcessor(RequestProcessor next,
ConcurrentLinkedQueue<Proposal> toBeApplied) {
if (!(next instanceof FinalRequestProcessor)) {
throw new RuntimeException(ToBeAppliedRequestProcessor.class
.getName()
+ " must be connected to "
+ FinalRequestProcessor.class.getName()
+ " not "
+ next.getClass().getName());
}
this.toBeApplied = toBeApplied;
this.next = next;
}
|
synchronized void function(long sid, long zxid, SocketAddress followerAddr) { if (LOG.isTraceEnabled()) { LOG.trace(STR, Long.toHexString(zxid)); for (Proposal p : outstandingProposals.values()) { long packetZxid = p.packet.getZxid(); LOG.trace(STR, Long.toHexString(packetZxid)); } LOG.trace(STR); } if ((zxid & 0xffffffffL) == 0) { return; } if (outstandingProposals.size() == 0) { if (LOG.isDebugEnabled()) { LOG.debug(STR); } return; } if (lastCommitted >= zxid) { if (LOG.isDebugEnabled()) { LOG.debug(STR, Long.toHexString(lastCommitted), Long.toHexString(zxid)); } return; } Proposal p = outstandingProposals.get(zxid); if (p == null) { LOG.warn(STR, Long.toHexString(zxid), followerAddr); return; } p.ackSet.add(sid); if (LOG.isDebugEnabled()) { LOG.debug(STR, Long.toHexString(zxid), p.ackSet.size()); } if (self.getQuorumVerifier().containsQuorum(p.ackSet)){ if (zxid != lastCommitted+1) { LOG.warn(STR, Long.toHexString(zxid), followerAddr); LOG.warn(STR, Long.toHexString(lastCommitted + 1)); } outstandingProposals.remove(zxid); if (p.request != null) { toBeApplied.add(p); } if (p.request == null) { LOG.warn(STR, p); } commit(zxid); inform(p); zk.commitProcessor.commit(p.request); if(pendingSyncs.containsKey(zxid)){ for(LearnerSyncRequest r: pendingSyncs.remove(zxid)) { sendSync(r); } } } } static class ToBeAppliedRequestProcessor implements RequestProcessor { private RequestProcessor next; private ConcurrentLinkedQueue<Proposal> toBeApplied; ToBeAppliedRequestProcessor(RequestProcessor next, ConcurrentLinkedQueue<Proposal> toBeApplied) { if (!(next instanceof FinalRequestProcessor)) { throw new RuntimeException(ToBeAppliedRequestProcessor.class .getName() + STR + FinalRequestProcessor.class.getName() + STR + next.getClass().getName()); } this.toBeApplied = toBeApplied; this.next = next; }
|
/**
* Keep a count of acks that are received by the leader for a particular
* proposal
*
* @param zxid
* the zxid of the proposal sent out
* @param followerAddr
*/
|
Keep a count of acks that are received by the leader for a particular proposal
|
processAck
|
{
"repo_name": "fzsens/zookeeper",
"path": "src/java/main/org/apache/zookeeper/server/quorum/Leader.java",
"license": "apache-2.0",
"size": 36214
}
|
[
"java.net.SocketAddress",
"java.util.concurrent.ConcurrentLinkedQueue",
"org.apache.zookeeper.server.FinalRequestProcessor",
"org.apache.zookeeper.server.RequestProcessor"
] |
import java.net.SocketAddress; import java.util.concurrent.ConcurrentLinkedQueue; import org.apache.zookeeper.server.FinalRequestProcessor; import org.apache.zookeeper.server.RequestProcessor;
|
import java.net.*; import java.util.concurrent.*; import org.apache.zookeeper.server.*;
|
[
"java.net",
"java.util",
"org.apache.zookeeper"
] |
java.net; java.util; org.apache.zookeeper;
| 33,825
|
public void dhtLookupFinished(OverlayContact<?> contact,
DHTKey<?> key, List<OverlayContact<?>> responsibleContact, int hops);
|
void function(OverlayContact<?> contact, DHTKey<?> key, List<OverlayContact<?>> responsibleContact, int hops);
|
/**
* Informs the installed DHTOverlayAnalyzers about the finish of a lookup
* with the responsible contact.
*
* @param contact
* the contact of the peer who has initiated the lookup
* @param key
* the key to lookup
* @param responsibleContact
* the contact of the peer who is responsible for the key
* @param hops
* the number of hops needed for the lookup
*/
|
Informs the installed DHTOverlayAnalyzers about the finish of a lookup with the responsible contact
|
dhtLookupFinished
|
{
"repo_name": "flyroom/PeerfactSimKOM_Clone",
"path": "src/org/peerfact/api/common/Monitor.java",
"license": "gpl-2.0",
"size": 16925
}
|
[
"java.util.List",
"org.peerfact.api.overlay.OverlayContact",
"org.peerfact.api.overlay.dht.DHTKey"
] |
import java.util.List; import org.peerfact.api.overlay.OverlayContact; import org.peerfact.api.overlay.dht.DHTKey;
|
import java.util.*; import org.peerfact.api.overlay.*; import org.peerfact.api.overlay.dht.*;
|
[
"java.util",
"org.peerfact.api"
] |
java.util; org.peerfact.api;
| 1,322,433
|
public static int[] mapSargColumnsToOrcInternalColIdx(
List<PredicateLeaf> sargLeaves,
SchemaEvolution evolution) {
int[] result = new int[sargLeaves.size()];
Arrays.fill(result, -1);
for(int i=0; i < result.length; ++i) {
String colName = sargLeaves.get(i).getColumnName();
result[i] = findColumns(evolution, colName);
}
return result;
}
|
static int[] function( List<PredicateLeaf> sargLeaves, SchemaEvolution evolution) { int[] result = new int[sargLeaves.size()]; Arrays.fill(result, -1); for(int i=0; i < result.length; ++i) { String colName = sargLeaves.get(i).getColumnName(); result[i] = findColumns(evolution, colName); } return result; }
|
/**
* Find the mapping from predicate leaves to columns.
* @param sargLeaves the search argument that we need to map
* @param evolution the mapping from reader to file schema
* @return an array mapping the sarg leaves to concrete column numbers in the
* file
*/
|
Find the mapping from predicate leaves to columns
|
mapSargColumnsToOrcInternalColIdx
|
{
"repo_name": "majetideepak/orc",
"path": "java/core/src/java/org/apache/orc/impl/RecordReaderImpl.java",
"license": "apache-2.0",
"size": 58099
}
|
[
"java.util.Arrays",
"java.util.List",
"org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf"
] |
import java.util.Arrays; import java.util.List; import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf;
|
import java.util.*; import org.apache.hadoop.hive.ql.io.sarg.*;
|
[
"java.util",
"org.apache.hadoop"
] |
java.util; org.apache.hadoop;
| 373,229
|
private static void setSystemFileIconProvider(FileIconProvider fip) {
systemFileIconProvider = fip;
}
|
static void function(FileIconProvider fip) { systemFileIconProvider = fip; }
|
/**
* Sets the {@link com.mucommander.commons.file.icon.FileIconProvider} instance that provides 'custom' file icons.
*
* @param fip the FileIconProvider instance that provides 'custom' file icons
*/
|
Sets the <code>com.mucommander.commons.file.icon.FileIconProvider</code> instance that provides 'custom' file icons
|
setSystemFileIconProvider
|
{
"repo_name": "trol73/mucommander",
"path": "src/main/com/mucommander/ui/icon/FileIcons.java",
"license": "gpl-3.0",
"size": 14336
}
|
[
"com.mucommander.commons.file.icon.FileIconProvider"
] |
import com.mucommander.commons.file.icon.FileIconProvider;
|
import com.mucommander.commons.file.icon.*;
|
[
"com.mucommander.commons"
] |
com.mucommander.commons;
| 125,126
|
void setErrorStream(OutputStream err);
|
void setErrorStream(OutputStream err);
|
/**
* Set the error stream that can be used by the shell to write its errors.
*
* @param err The {@link OutputStream} used by the shell to write its errors
*/
|
Set the error stream that can be used by the shell to write its errors
|
setErrorStream
|
{
"repo_name": "Niky4000/UsefulUtils",
"path": "projects/ssh/apache_mina/apache-sshd-1.2.0/sshd-core/src/main/java/org/apache/sshd/server/Command.java",
"license": "gpl-3.0",
"size": 2308
}
|
[
"java.io.OutputStream"
] |
import java.io.OutputStream;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 882,309
|
public void testException3() throws RuntimeException, SOAPException {
try {
SOAPClient client = new SOAPClientImpl();
client.send(request.getSoapMessage(), null);
fail("Should not reach this");
} catch (RuntimeException ex) {
// OK
}
}
|
void function() throws RuntimeException, SOAPException { try { SOAPClient client = new SOAPClientImpl(); client.send(request.getSoapMessage(), null); fail(STR); } catch (RuntimeException ex) { } }
|
/**
* Invalid URL. Null value.
*
* @throws RuntimeException
* @throws SOAPException
*/
|
Invalid URL. Null value
|
testException3
|
{
"repo_name": "petkivim/xrd4j",
"path": "src/client/src/test/java/com/pkrete/xrd4j/client/SOAPClientTest.java",
"license": "mit",
"size": 6538
}
|
[
"javax.xml.soap.SOAPException"
] |
import javax.xml.soap.SOAPException;
|
import javax.xml.soap.*;
|
[
"javax.xml"
] |
javax.xml;
| 2,694,272
|
private static Pref cachePrinterName = null;
|
private static Pref cachePrinterName = null;
|
/**
* Method to tell whether to plot the date in PostScript output by default.
* @return whether to plot the date in PostScript output by default.
*/
|
Method to tell whether to plot the date in PostScript output by default
|
isFactoryPlotDate
|
{
"repo_name": "imr/Electric8",
"path": "com/sun/electric/tool/io/IOTool.java",
"license": "gpl-3.0",
"size": 68905
}
|
[
"com.sun.electric.database.text.Pref"
] |
import com.sun.electric.database.text.Pref;
|
import com.sun.electric.database.text.*;
|
[
"com.sun.electric"
] |
com.sun.electric;
| 259,591
|
static BufferConfig<?> maxBytes(final long byteLimit) {
return new EagerBufferConfigImpl(Long.MAX_VALUE, byteLimit);
}
|
static BufferConfig<?> maxBytes(final long byteLimit) { return new EagerBufferConfigImpl(Long.MAX_VALUE, byteLimit); }
|
/**
* Create a size-constrained buffer in terms of the maximum number of bytes it will use.
*/
|
Create a size-constrained buffer in terms of the maximum number of bytes it will use
|
maxBytes
|
{
"repo_name": "gf53520/kafka",
"path": "streams/src/main/java/org/apache/kafka/streams/kstream/Suppressed.java",
"license": "apache-2.0",
"size": 8205
}
|
[
"org.apache.kafka.streams.kstream.internals.suppress.EagerBufferConfigImpl"
] |
import org.apache.kafka.streams.kstream.internals.suppress.EagerBufferConfigImpl;
|
import org.apache.kafka.streams.kstream.internals.suppress.*;
|
[
"org.apache.kafka"
] |
org.apache.kafka;
| 2,857,000
|
public TimeOfDay setCopy(String text, Locale locale) {
int[] newValues = iTimeOfDay.getValues();
newValues = getField().set(iTimeOfDay, iFieldIndex, newValues, text, locale);
return new TimeOfDay(iTimeOfDay, newValues);
}
|
TimeOfDay function(String text, Locale locale) { int[] newValues = iTimeOfDay.getValues(); newValues = getField().set(iTimeOfDay, iFieldIndex, newValues, text, locale); return new TimeOfDay(iTimeOfDay, newValues); }
|
/**
* Sets this field in a copy of the TimeOfDay to a parsed text value.
* <p>
* The TimeOfDay attached to this property is unchanged by this call.
* Instead, a new instance is returned.
*
* @param text the text value to set
* @param locale optional locale to use for selecting a text symbol
* @return a copy of the TimeOfDay with the field value changed
* @throws IllegalArgumentException if the text value isn't valid
*/
|
Sets this field in a copy of the TimeOfDay to a parsed text value. The TimeOfDay attached to this property is unchanged by this call. Instead, a new instance is returned
|
setCopy
|
{
"repo_name": "charles-cooper/idylfin",
"path": "src/org/joda/time/TimeOfDay.java",
"license": "apache-2.0",
"size": 48905
}
|
[
"java.util.Locale"
] |
import java.util.Locale;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,531,221
|
private void createOptionsGroup(Composite parent) {
GridData optionsGroupLayoutData = new GridData();
optionsGroupLayoutData.grabExcessHorizontalSpace = false;
optionsGroupLayoutData.verticalAlignment = GridData.FILL;
optionsGroupLayoutData.horizontalAlignment = GridData.FILL;
GridData checkBoxLayout = new GridData();
checkBoxLayout.horizontalAlignment = GridData.FILL;
checkBoxLayout.grabExcessHorizontalSpace = true;
checkBoxLayout.grabExcessVerticalSpace = true;
checkBoxLayout.verticalAlignment = GridData.CENTER;
GridLayout optionsGroupLayout = new GridLayout();
optionsGroupLayout.makeColumnsEqualWidth = true;
optionsGroup = new Group(parent, SWT.NONE);
optionsGroup.setText(Messages.getString("AlphabetsPreferencePage.9")); //$NON-NLS-1$
optionsGroup.setLayoutData(optionsGroupLayoutData);
optionsGroup.setLayout(optionsGroupLayout);
enableFilterCheckBox = new Button(optionsGroup, SWT.CHECK);
enableFilterCheckBox.setText(Messages.getString("AlphabetsPreferencePage.10")); //$NON-NLS-1$
enableFilterCheckBox.setLayoutData(checkBoxLayout);
lblFilterHint = new Label(optionsGroup, SWT.WRAP);
GridData layoutData = new GridData(SWT.FILL, SWT.CENTER, true, false);
layoutData.widthHint = 250;
lblFilterHint.setLayoutData(layoutData);
lblFilterHint.setText(Messages.getString("AlphabetsPreferencePage.12")); //$NON-NLS-1$
}
|
void function(Composite parent) { GridData optionsGroupLayoutData = new GridData(); optionsGroupLayoutData.grabExcessHorizontalSpace = false; optionsGroupLayoutData.verticalAlignment = GridData.FILL; optionsGroupLayoutData.horizontalAlignment = GridData.FILL; GridData checkBoxLayout = new GridData(); checkBoxLayout.horizontalAlignment = GridData.FILL; checkBoxLayout.grabExcessHorizontalSpace = true; checkBoxLayout.grabExcessVerticalSpace = true; checkBoxLayout.verticalAlignment = GridData.CENTER; GridLayout optionsGroupLayout = new GridLayout(); optionsGroupLayout.makeColumnsEqualWidth = true; optionsGroup = new Group(parent, SWT.NONE); optionsGroup.setText(Messages.getString(STR)); optionsGroup.setLayoutData(optionsGroupLayoutData); optionsGroup.setLayout(optionsGroupLayout); enableFilterCheckBox = new Button(optionsGroup, SWT.CHECK); enableFilterCheckBox.setText(Messages.getString(STR)); enableFilterCheckBox.setLayoutData(checkBoxLayout); lblFilterHint = new Label(optionsGroup, SWT.WRAP); GridData layoutData = new GridData(SWT.FILL, SWT.CENTER, true, false); layoutData.widthHint = 250; lblFilterHint.setLayoutData(layoutData); lblFilterHint.setText(Messages.getString(STR)); }
|
/**
* This method initializes optionsGroup.
*
*/
|
This method initializes optionsGroup
|
createOptionsGroup
|
{
"repo_name": "ChristophSonnberger/crypto",
"path": "org.jcryptool.crypto.classic.alphabets/src/org/jcryptool/crypto/classic/alphabets/preferences/AlphabetsPreferencePage.java",
"license": "epl-1.0",
"size": 23139
}
|
[
"org.eclipse.swt.layout.GridData",
"org.eclipse.swt.layout.GridLayout",
"org.eclipse.swt.widgets.Button",
"org.eclipse.swt.widgets.Composite",
"org.eclipse.swt.widgets.Group",
"org.eclipse.swt.widgets.Label"
] |
import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Group; import org.eclipse.swt.widgets.Label;
|
import org.eclipse.swt.layout.*; import org.eclipse.swt.widgets.*;
|
[
"org.eclipse.swt"
] |
org.eclipse.swt;
| 946,544
|
public X509AttributeCertificate generateCertificate(
PrivateKey key,
String provider,
SecureRandom random)
throws NoSuchProviderException, SecurityException, SignatureException, InvalidKeyException
{
try
{
return generate(key, provider, random);
}
catch (NoSuchProviderException e)
{
throw e;
}
catch (SignatureException e)
{
throw e;
}
catch (InvalidKeyException e)
{
throw e;
}
catch (GeneralSecurityException e)
{
throw new SecurityException("exception creating certificate: " + e);
}
}
|
X509AttributeCertificate function( PrivateKey key, String provider, SecureRandom random) throws NoSuchProviderException, SecurityException, SignatureException, InvalidKeyException { try { return generate(key, provider, random); } catch (NoSuchProviderException e) { throw e; } catch (SignatureException e) { throw e; } catch (InvalidKeyException e) { throw e; } catch (GeneralSecurityException e) { throw new SecurityException(STR + e); } }
|
/**
* generate an X509 certificate, based on the current issuer and subject,
* using the passed in provider for the signing and the supplied source
* of randomness, if required.
* @deprecated use generate()
*/
|
generate an X509 certificate, based on the current issuer and subject, using the passed in provider for the signing and the supplied source of randomness, if required
|
generateCertificate
|
{
"repo_name": "sake/bouncycastle-java",
"path": "src/org/bouncycastle/x509/X509V2AttributeCertificateGenerator.java",
"license": "mit",
"size": 8251
}
|
[
"java.security.GeneralSecurityException",
"java.security.InvalidKeyException",
"java.security.NoSuchProviderException",
"java.security.PrivateKey",
"java.security.SecureRandom",
"java.security.SignatureException"
] |
import java.security.GeneralSecurityException; import java.security.InvalidKeyException; import java.security.NoSuchProviderException; import java.security.PrivateKey; import java.security.SecureRandom; import java.security.SignatureException;
|
import java.security.*;
|
[
"java.security"
] |
java.security;
| 2,234,849
|
EReference getBTSLemmaEntry_Translations();
|
EReference getBTSLemmaEntry_Translations();
|
/**
* Returns the meta object for the containment reference '{@link org.bbaw.bts.corpus.btsCorpusModel.BTSLemmaEntry#getTranslations <em>Translations</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the containment reference '<em>Translations</em>'.
* @see org.bbaw.bts.corpus.btsCorpusModel.BTSLemmaEntry#getTranslations()
* @see #getBTSLemmaEntry()
* @generated
*/
|
Returns the meta object for the containment reference '<code>org.bbaw.bts.corpus.btsCorpusModel.BTSLemmaEntry#getTranslations Translations</code>'.
|
getBTSLemmaEntry_Translations
|
{
"repo_name": "JKatzwinkel/bts",
"path": "org.bbaw.bts.model.corpus/src/org/bbaw/bts/corpus/btsCorpusModel/BtsCorpusModelPackage.java",
"license": "lgpl-3.0",
"size": 201277
}
|
[
"org.eclipse.emf.ecore.EReference"
] |
import org.eclipse.emf.ecore.EReference;
|
import org.eclipse.emf.ecore.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 1,781,801
|
@Test
public void testPersistentAuthDisabled() {
assertEquals(0, getPrefer());
assertEquals(0, getPrefer("x", "y"));
assertEquals(0, getPrefer("x", "y"));
}
|
void function() { assertEquals(0, getPrefer()); assertEquals(0, getPrefer("x", "y")); assertEquals(0, getPrefer("x", "y")); }
|
/**
* Check that the persistent authentication isn't enabled if the preference isn't present.
*/
|
Check that the persistent authentication isn't enabled if the preference isn't present
|
testPersistentAuthDisabled
|
{
"repo_name": "OpenUniversity/ovirt-engine",
"path": "backend/manager/modules/aaa/src/test/java/org/ovirt/engine/core/aaa/filters/FiltersHelperTest.java",
"license": "apache-2.0",
"size": 4232
}
|
[
"org.junit.Assert"
] |
import org.junit.Assert;
|
import org.junit.*;
|
[
"org.junit"
] |
org.junit;
| 2,101,497
|
@CheckReturnValue
public static AbstractZonedDateTimeAssert<?> assertThat(ZonedDateTime actual) {
return AssertionsForClassTypes.assertThat(actual);
}
|
static AbstractZonedDateTimeAssert<?> function(ZonedDateTime actual) { return AssertionsForClassTypes.assertThat(actual); }
|
/**
* Creates a new instance of <code>{@link ZonedDateTimeAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
|
Creates a new instance of <code><code>ZonedDateTimeAssert</code></code>
|
assertThat
|
{
"repo_name": "ChrisA89/assertj-core",
"path": "src/main/java/org/assertj/core/api/Assertions.java",
"license": "apache-2.0",
"size": 107527
}
|
[
"java.time.ZonedDateTime"
] |
import java.time.ZonedDateTime;
|
import java.time.*;
|
[
"java.time"
] |
java.time;
| 792,677
|
public void addMouseEventCallback(MouseEventCallback callback) {
if (mouseEventCallbacks == null) {
mouseEventCallbacks = new ArrayList<MouseEventCallback>();
}
mouseEventCallbacks.add(callback);
}
|
void function(MouseEventCallback callback) { if (mouseEventCallbacks == null) { mouseEventCallbacks = new ArrayList<MouseEventCallback>(); } mouseEventCallbacks.add(callback); }
|
/**Add a mouse event callback to a element
*
* @param callback
* mouseEventCallback
*/
|
Add a mouse event callback to a element
|
addMouseEventCallback
|
{
"repo_name": "casmi/casmi",
"path": "src/main/java/casmi/graphics/element/Element.java",
"license": "lgpl-3.0",
"size": 19551
}
|
[
"java.util.ArrayList"
] |
import java.util.ArrayList;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,304,286
|
public void addTTemplatePerson(TTemplatePerson l) throws TorqueException
{
getTTemplatePersons().add(l);
l.setTPerson((TPerson) this);
}
|
void function(TTemplatePerson l) throws TorqueException { getTTemplatePersons().add(l); l.setTPerson((TPerson) this); }
|
/**
* Method called to associate a TTemplatePerson object to this object
* through the TTemplatePerson foreign key attribute
*
* @param l TTemplatePerson
* @throws TorqueException
*/
|
Method called to associate a TTemplatePerson object to this object through the TTemplatePerson foreign key attribute
|
addTTemplatePerson
|
{
"repo_name": "trackplus/Genji",
"path": "src/main/java/com/aurel/track/persist/BaseTPerson.java",
"license": "gpl-3.0",
"size": 1013508
}
|
[
"com.aurel.track.persist.TPerson",
"org.apache.torque.TorqueException"
] |
import com.aurel.track.persist.TPerson; import org.apache.torque.TorqueException;
|
import com.aurel.track.persist.*; import org.apache.torque.*;
|
[
"com.aurel.track",
"org.apache.torque"
] |
com.aurel.track; org.apache.torque;
| 1,206,977
|
@Test
public void testTcpDstPort() throws Exception {
VNSAccessControlList acl = new VNSAccessControlList("testAcl");
VNSAccessControlListEntry tcpAcl = tpAclEntry(40, "tcp", "any", "eq");
acl.addAclEntry(tcpAcl);
testHintInternal(acl, (Ethernet) tcpPacket1, defaultHint &
~(OFMatch.OFPFW_DL_TYPE | OFMatch.OFPFW_NW_PROTO | OFMatch.OFPFW_TP_DST));
}
|
void function() throws Exception { VNSAccessControlList acl = new VNSAccessControlList(STR); VNSAccessControlListEntry tcpAcl = tpAclEntry(40, "tcp", "any", "eq"); acl.addAclEntry(tcpAcl); testHintInternal(acl, (Ethernet) tcpPacket1, defaultHint & ~(OFMatch.OFPFW_DL_TYPE OFMatch.OFPFW_NW_PROTO OFMatch.OFPFW_TP_DST)); }
|
/**
* Test tcp ACL with specific destination port
*/
|
Test tcp ACL with specific destination port
|
testTcpDstPort
|
{
"repo_name": "mandeepdhami/netvirt-ctrl",
"path": "sdnplatform/src/test/java/org/sdnplatform/netvirt/virtualrouting/internal/VirtualRoutingHintTest.java",
"license": "epl-1.0",
"size": 19772
}
|
[
"org.openflow.protocol.OFMatch",
"org.sdnplatform.netvirt.core.VNSAccessControlList",
"org.sdnplatform.netvirt.core.VNSAccessControlListEntry",
"org.sdnplatform.packet.Ethernet"
] |
import org.openflow.protocol.OFMatch; import org.sdnplatform.netvirt.core.VNSAccessControlList; import org.sdnplatform.netvirt.core.VNSAccessControlListEntry; import org.sdnplatform.packet.Ethernet;
|
import org.openflow.protocol.*; import org.sdnplatform.netvirt.core.*; import org.sdnplatform.packet.*;
|
[
"org.openflow.protocol",
"org.sdnplatform.netvirt",
"org.sdnplatform.packet"
] |
org.openflow.protocol; org.sdnplatform.netvirt; org.sdnplatform.packet;
| 1,501,612
|
@Test
public void testpathComputationCase11() {
Link link1 = addLink(DEVICE1, 10, DEVICE2, 20, true, 50);
Link link2 = addLink(DEVICE2, 30, DEVICE4, 40, true, 20);
Link link3 = addLink(DEVICE1, 80, DEVICE3, 70, true, 100);
Link link4 = addLink(DEVICE3, 60, DEVICE4, 50, true, 80);
CapabilityConstraint capabilityConst = CapabilityConstraint
.of(CapabilityConstraint.CapabilityType.SR_WITHOUT_SIGNALLING);
List<Constraint> constraints = new LinkedList<>();
constraints.add(capabilityConst);
CostConstraint costConst = CostConstraint.of(COST);
constraints.add(costConst);
//Device1
DefaultAnnotations.Builder builder = DefaultAnnotations.builder();
builder.set(AnnotationKeys.TYPE, L3);
builder.set(LSRID, "1.1.1.1");
addDevice(DEVICE1, builder);
DeviceCapability device1Cap = netConfigRegistry.addConfig(DeviceId.deviceId("1.1.1.1"), DeviceCapability.class);
device1Cap.setLabelStackCap(true)
.setLocalLabelCap(false)
.setSrCap(true)
.apply();
//Device2
builder = DefaultAnnotations.builder();
builder.set(AnnotationKeys.TYPE, L3);
builder.set(LSRID, "2.2.2.2");
addDevice(DEVICE2, builder);
DeviceCapability device2Cap = netConfigRegistry.addConfig(DeviceId.deviceId("2.2.2.2"), DeviceCapability.class);
device2Cap.setLabelStackCap(true)
.setLocalLabelCap(false)
.setSrCap(true)
.apply();
//Device3
builder = DefaultAnnotations.builder();
builder.set(AnnotationKeys.TYPE, L3);
builder.set(LSRID, "3.3.3.3");
addDevice(DEVICE3, builder);
DeviceCapability device3Cap = netConfigRegistry.addConfig(DeviceId.deviceId("3.3.3.3"), DeviceCapability.class);
device3Cap.setLabelStackCap(true)
.setLocalLabelCap(false)
.setSrCap(true)
.apply();
//Device4
builder = DefaultAnnotations.builder();
builder.set(AnnotationKeys.TYPE, L3);
builder.set(LSRID, "4.4.4.4");
addDevice(DEVICE4, builder);
DeviceCapability device4Cap = netConfigRegistry.addConfig(DeviceId.deviceId("4.4.4.4"), DeviceCapability.class);
device4Cap.setLabelStackCap(true)
.setLocalLabelCap(false)
.setSrCap(true)
.apply();
Set<Path> paths = computePath(link1, link2, link3, link4, constraints);
List<Link> links = new LinkedList<>();
links.add(link1);
links.add(link2);
assertThat(paths.iterator().next().links(), is(links));
assertThat(paths.iterator().next().cost(), is((double) 70));
}
|
void function() { Link link1 = addLink(DEVICE1, 10, DEVICE2, 20, true, 50); Link link2 = addLink(DEVICE2, 30, DEVICE4, 40, true, 20); Link link3 = addLink(DEVICE1, 80, DEVICE3, 70, true, 100); Link link4 = addLink(DEVICE3, 60, DEVICE4, 50, true, 80); CapabilityConstraint capabilityConst = CapabilityConstraint .of(CapabilityConstraint.CapabilityType.SR_WITHOUT_SIGNALLING); List<Constraint> constraints = new LinkedList<>(); constraints.add(capabilityConst); CostConstraint costConst = CostConstraint.of(COST); constraints.add(costConst); DefaultAnnotations.Builder builder = DefaultAnnotations.builder(); builder.set(AnnotationKeys.TYPE, L3); builder.set(LSRID, STR); addDevice(DEVICE1, builder); DeviceCapability device1Cap = netConfigRegistry.addConfig(DeviceId.deviceId(STR), DeviceCapability.class); device1Cap.setLabelStackCap(true) .setLocalLabelCap(false) .setSrCap(true) .apply(); builder = DefaultAnnotations.builder(); builder.set(AnnotationKeys.TYPE, L3); builder.set(LSRID, STR); addDevice(DEVICE2, builder); DeviceCapability device2Cap = netConfigRegistry.addConfig(DeviceId.deviceId(STR), DeviceCapability.class); device2Cap.setLabelStackCap(true) .setLocalLabelCap(false) .setSrCap(true) .apply(); builder = DefaultAnnotations.builder(); builder.set(AnnotationKeys.TYPE, L3); builder.set(LSRID, STR); addDevice(DEVICE3, builder); DeviceCapability device3Cap = netConfigRegistry.addConfig(DeviceId.deviceId(STR), DeviceCapability.class); device3Cap.setLabelStackCap(true) .setLocalLabelCap(false) .setSrCap(true) .apply(); builder = DefaultAnnotations.builder(); builder.set(AnnotationKeys.TYPE, L3); builder.set(LSRID, STR); addDevice(DEVICE4, builder); DeviceCapability device4Cap = netConfigRegistry.addConfig(DeviceId.deviceId(STR), DeviceCapability.class); device4Cap.setLabelStackCap(true) .setLocalLabelCap(false) .setSrCap(true) .apply(); Set<Path> paths = computePath(link1, link2, link3, link4, constraints); List<Link> links = new LinkedList<>(); links.add(link1); links.add(link2); assertThat(paths.iterator().next().links(), is(links)); assertThat(paths.iterator().next().cost(), is((double) 70)); }
|
/**
* Device supporting SR capability.
*/
|
Device supporting SR capability
|
testpathComputationCase11
|
{
"repo_name": "Shashikanth-Huawei/bmp",
"path": "apps/pce/app/src/test/java/org/onosproject/pce/pceservice/PathComputationTest.java",
"license": "apache-2.0",
"size": 50167
}
|
[
"com.google.common.collect.ImmutableSet",
"java.util.LinkedList",
"java.util.List",
"java.util.Set",
"org.hamcrest.MatcherAssert",
"org.hamcrest.core.Is",
"org.onosproject.net.AnnotationKeys",
"org.onosproject.net.DefaultAnnotations",
"org.onosproject.net.DeviceId",
"org.onosproject.net.Link",
"org.onosproject.net.Path",
"org.onosproject.net.intent.Constraint",
"org.onosproject.pce.pceservice.constraint.CapabilityConstraint",
"org.onosproject.pce.pceservice.constraint.CostConstraint",
"org.onosproject.pcep.api.DeviceCapability"
] |
import com.google.common.collect.ImmutableSet; import java.util.LinkedList; import java.util.List; import java.util.Set; import org.hamcrest.MatcherAssert; import org.hamcrest.core.Is; import org.onosproject.net.AnnotationKeys; import org.onosproject.net.DefaultAnnotations; import org.onosproject.net.DeviceId; import org.onosproject.net.Link; import org.onosproject.net.Path; import org.onosproject.net.intent.Constraint; import org.onosproject.pce.pceservice.constraint.CapabilityConstraint; import org.onosproject.pce.pceservice.constraint.CostConstraint; import org.onosproject.pcep.api.DeviceCapability;
|
import com.google.common.collect.*; import java.util.*; import org.hamcrest.*; import org.hamcrest.core.*; import org.onosproject.net.*; import org.onosproject.net.intent.*; import org.onosproject.pce.pceservice.constraint.*; import org.onosproject.pcep.api.*;
|
[
"com.google.common",
"java.util",
"org.hamcrest",
"org.hamcrest.core",
"org.onosproject.net",
"org.onosproject.pce",
"org.onosproject.pcep"
] |
com.google.common; java.util; org.hamcrest; org.hamcrest.core; org.onosproject.net; org.onosproject.pce; org.onosproject.pcep;
| 226,829
|
public List<String> getRoutes() {
return mRoutes;
}
|
List<String> function() { return mRoutes; }
|
/**
* For routing values
*
* @return Routes
*/
|
For routing values
|
getRoutes
|
{
"repo_name": "qiujuer/Genius-Android",
"path": "caprice/kit-cmd/src/main/java/net/qiujuer/genius/kit/cmd/TraceRoute.java",
"license": "apache-2.0",
"size": 12161
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,536,192
|
if(arg0.getActionCommand().equals(okCommand))
{
ok=true;
operationTypes=new HashMap<Operation, Class<?>>();
for(OperationTypeComponent component:components)
operationTypes.put(component.getOperation(), component.getOperationType());
}
dispose();
}
|
if(arg0.getActionCommand().equals(okCommand)) { ok=true; operationTypes=new HashMap<Operation, Class<?>>(); for(OperationTypeComponent component:components) operationTypes.put(component.getOperation(), component.getOperationType()); } dispose(); }
|
/**
* If OK button was pressed, gets what type of business operation was
* selected to be created for a given operation. If OK or Cancel button
* were pressed closes the dialog.
*/
|
If OK button was pressed, gets what type of business operation was selected to be created for a given operation. If OK or Cancel button were pressed closes the dialog
|
actionPerformed
|
{
"repo_name": "farkas-arpad/KROKI-mockup-tool",
"path": "KrokiMockupTool/src/kroki/app/utils/uml/OperationsTypeDialog.java",
"license": "mit",
"size": 4955
}
|
[
"java.util.HashMap",
"org.eclipse.uml2.uml.Operation"
] |
import java.util.HashMap; import org.eclipse.uml2.uml.Operation;
|
import java.util.*; import org.eclipse.uml2.uml.*;
|
[
"java.util",
"org.eclipse.uml2"
] |
java.util; org.eclipse.uml2;
| 1,473,627
|
public void putRow(RowMetaInterface row, Object data[]) throws KettleException;
|
void function(RowMetaInterface row, Object data[]) throws KettleException;
|
/**
* Put a row on the destination rowsets.
* @param row The row to send to the destinations steps
*/
|
Put a row on the destination rowsets
|
putRow
|
{
"repo_name": "soluvas/pdi-ce",
"path": "src/org/pentaho/di/trans/step/StepInterface.java",
"license": "apache-2.0",
"size": 10245
}
|
[
"org.pentaho.di.core.exception.KettleException",
"org.pentaho.di.core.row.RowMetaInterface"
] |
import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowMetaInterface;
|
import org.pentaho.di.core.exception.*; import org.pentaho.di.core.row.*;
|
[
"org.pentaho.di"
] |
org.pentaho.di;
| 605,364
|
private void readGridData(List<SFRComponent> providers, int groupId)
throws NullPointerException, HDF5Exception {
// Open the GridData Group.
int gridDataGroupId = openGroup(groupId, "GridData");
// Write all of the IDataProviders' data.
for (int i = 0; i < providers.size(); i++) {
// Skip non-existent providers (these are places where no pin/rod is
// set).
SFRComponent provider = providers.get(i);
if (provider != null) {
// Open the Group holding the position's data.
int providerGroupId = openGroup(gridDataGroupId,
Integer.toString(i));
// Read the provider.
readDataProvider(provider, providerGroupId);
// Close the IDataProvider's group.
closeGroup(providerGroupId);
}
}
// Close the GridData Group.
closeGroup(gridDataGroupId);
return;
}
|
void function(List<SFRComponent> providers, int groupId) throws NullPointerException, HDF5Exception { int gridDataGroupId = openGroup(groupId, STR); for (int i = 0; i < providers.size(); i++) { SFRComponent provider = providers.get(i); if (provider != null) { int providerGroupId = openGroup(gridDataGroupId, Integer.toString(i)); readDataProvider(provider, providerGroupId); closeGroup(providerGroupId); } } closeGroup(gridDataGroupId); return; }
|
/**
* Reads in the data for a GridDataManager into a pre-constructed List of
* IDataProviders (SFRComponents).
*
* @param providers
* A List of IDataProviders to read the data into.
* @param groupId
* The ID of the parent HDF5 Group, which should be open.
* @throws NullPointerException
* @throws HDF5Exception
*/
|
Reads in the data for a GridDataManager into a pre-constructed List of IDataProviders (SFRComponents)
|
readGridData
|
{
"repo_name": "gorindn/ice",
"path": "src/org.eclipse.ice.reactor.sfr/src/org/eclipse/ice/reactor/sfr/base/SFReactorIOHandler.java",
"license": "epl-1.0",
"size": 64613
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 688,589
|
Date value = (Date) getValue();
return value != null ? new LocalDateTime(value).toString(formatter) : "";
}
|
Date value = (Date) getValue(); return value != null ? new LocalDateTime(value).toString(formatter) : ""; }
|
/**
* Format the YearMonthDay as String, using the specified format.
*
* @return DateTime formatted string
*/
|
Format the YearMonthDay as String, using the specified format
|
getAsText
|
{
"repo_name": "julienguiraud/jhipster",
"path": "src/main/java/com/mycompany/myapp/web/propertyeditors/LocaleDateTimeEditor.java",
"license": "apache-2.0",
"size": 1997
}
|
[
"java.util.Date",
"org.joda.time.LocalDateTime"
] |
import java.util.Date; import org.joda.time.LocalDateTime;
|
import java.util.*; import org.joda.time.*;
|
[
"java.util",
"org.joda.time"
] |
java.util; org.joda.time;
| 2,012,900
|
public final boolean onKeyDownPanel(int featureId, KeyEvent event) {
final int keyCode = event.getKeyCode();
if (event.getRepeatCount() == 0) {
// The panel key was pushed, so set the chording key
mPanelChordingKey = keyCode;
PanelFeatureState st = getPanelState(featureId, true);
if (!st.isOpen) {
return preparePanel(st, event);
}
}
return false;
}
|
final boolean function(int featureId, KeyEvent event) { final int keyCode = event.getKeyCode(); if (event.getRepeatCount() == 0) { mPanelChordingKey = keyCode; PanelFeatureState st = getPanelState(featureId, true); if (!st.isOpen) { return preparePanel(st, event); } } return false; }
|
/**
* Called when the panel key is pushed down.
* @param featureId The feature ID of the relevant panel (defaults to FEATURE_OPTIONS_PANEL}.
* @param event The key event.
* @return Whether the key was handled.
*/
|
Called when the panel key is pushed down
|
onKeyDownPanel
|
{
"repo_name": "rex-xxx/mt6572_x201",
"path": "frameworks/base/policy/src/com/android/internal/policy/impl/PhoneWindow.java",
"license": "gpl-2.0",
"size": 140433
}
|
[
"android.view.KeyEvent"
] |
import android.view.KeyEvent;
|
import android.view.*;
|
[
"android.view"
] |
android.view;
| 838,347
|
protected Map<String, Serializable> toPrefixedProperties(Map<QName, Serializable> properties) {
Map<String, Serializable> result = new HashMap<String, Serializable>(properties.size());
for (Entry<QName, Serializable> nextProp : properties.entrySet()) {
result.put(nextProp.getKey().toPrefixString(getNamespaceService()), nextProp.getValue());
}
return result;
}
|
Map<String, Serializable> function(Map<QName, Serializable> properties) { Map<String, Serializable> result = new HashMap<String, Serializable>(properties.size()); for (Entry<QName, Serializable> nextProp : properties.entrySet()) { result.put(nextProp.getKey().toPrefixString(getNamespaceService()), nextProp.getValue()); } return result; }
|
/**
* Converts full notation {@link QName} to prefixed string with the same
* value.
*
* @param properties
* is the properties map
* @return the updated keys map
*/
|
Converts full notation <code>QName</code> to prefixed string with the same value
|
toPrefixedProperties
|
{
"repo_name": "SirmaITT/conservation-space-1.7.0",
"path": "docker/sep-alfresco/alfresco-emf-integration/alfresco-cmf/src/main/java/com/sirma/itt/cmf/integration/webscript/BaseAlfrescoScript.java",
"license": "lgpl-3.0",
"size": 26195
}
|
[
"java.io.Serializable",
"java.util.HashMap",
"java.util.Map",
"org.alfresco.service.namespace.QName"
] |
import java.io.Serializable; import java.util.HashMap; import java.util.Map; import org.alfresco.service.namespace.QName;
|
import java.io.*; import java.util.*; import org.alfresco.service.namespace.*;
|
[
"java.io",
"java.util",
"org.alfresco.service"
] |
java.io; java.util; org.alfresco.service;
| 1,561,769
|
public static final boolean addLinks(@NonNull Spannable spannable, @NonNull Pattern pattern,
@Nullable String defaultScheme, @Nullable String[] schemes,
@Nullable MatchFilter matchFilter, @Nullable TransformFilter transformFilter) {
final String[] schemesCopy;
if (defaultScheme == null) defaultScheme = "";
if (schemes == null || schemes.length < 1) {
schemes = EmptyArray.STRING;
}
schemesCopy = new String[schemes.length + 1];
schemesCopy[0] = defaultScheme.toLowerCase(Locale.ROOT);
for (int index = 0; index < schemes.length; index++) {
String scheme = schemes[index];
schemesCopy[index + 1] = (scheme == null) ? "" : scheme.toLowerCase(Locale.ROOT);
}
boolean hasMatches = false;
Matcher m = pattern.matcher(spannable);
while (m.find()) {
int start = m.start();
int end = m.end();
boolean allowed = true;
if (matchFilter != null) {
allowed = matchFilter.acceptMatch(spannable, start, end);
}
if (allowed) {
String url = makeUrl(m.group(0), schemesCopy, m, transformFilter);
applyLink(url, start, end, spannable);
hasMatches = true;
}
}
return hasMatches;
}
|
static final boolean function(@NonNull Spannable spannable, @NonNull Pattern pattern, @Nullable String defaultScheme, @Nullable String[] schemes, @Nullable MatchFilter matchFilter, @Nullable TransformFilter transformFilter) { final String[] schemesCopy; if (defaultScheme == null) defaultScheme = STR" : scheme.toLowerCase(Locale.ROOT); } boolean hasMatches = false; Matcher m = pattern.matcher(spannable); while (m.find()) { int start = m.start(); int end = m.end(); boolean allowed = true; if (matchFilter != null) { allowed = matchFilter.acceptMatch(spannable, start, end); } if (allowed) { String url = makeUrl(m.group(0), schemesCopy, m, transformFilter); applyLink(url, start, end, spannable); hasMatches = true; } } return hasMatches; }
|
/**
* Applies a regex to a Spannable turning the matches into links.
*
* @param spannable Spannable whose text is to be marked-up with links.
* @param pattern Regex pattern to be used for finding links.
* @param defaultScheme The default scheme to be prepended to links if the link does not
* start with one of the <code>schemes</code> given.
* @param schemes Array of schemes (eg <code>http://</code>) to check if the link found
* contains a scheme. Passing a null or empty value means prepend defaultScheme
* to all links.
* @param matchFilter The filter that is used to allow the client code additional control
* over which pattern matches are to be converted into links.
* @param transformFilter Filter to allow the client code to update the link found.
*
* @return True if at least one link is found and applied.
*/
|
Applies a regex to a Spannable turning the matches into links
|
addLinks
|
{
"repo_name": "xorware/android_frameworks_base",
"path": "core/java/android/text/util/Linkify.java",
"license": "apache-2.0",
"size": 24052
}
|
[
"android.annotation.NonNull",
"android.annotation.Nullable",
"android.text.Spannable",
"java.util.Locale",
"java.util.regex.Matcher",
"java.util.regex.Pattern"
] |
import android.annotation.NonNull; import android.annotation.Nullable; import android.text.Spannable; import java.util.Locale; import java.util.regex.Matcher; import java.util.regex.Pattern;
|
import android.annotation.*; import android.text.*; import java.util.*; import java.util.regex.*;
|
[
"android.annotation",
"android.text",
"java.util"
] |
android.annotation; android.text; java.util;
| 2,018,435
|
void selectPage(ClasspathPagePresenter property);
interface ActionDelegate {
|
void selectPage(ClasspathPagePresenter property); interface ActionDelegate {
|
/**
* Selects classpath configuration page
*
* @param property
* chosen page
*/
|
Selects classpath configuration page
|
selectPage
|
{
"repo_name": "gazarenkov/che-sketch",
"path": "plugins/plugin-java/che-plugin-java-ext-lang-client/src/main/java/org/eclipse/che/ide/ext/java/client/project/classpath/ProjectClasspathView.java",
"license": "epl-1.0",
"size": 2479
}
|
[
"org.eclipse.che.ide.ext.java.client.project.classpath.valueproviders.pages.ClasspathPagePresenter"
] |
import org.eclipse.che.ide.ext.java.client.project.classpath.valueproviders.pages.ClasspathPagePresenter;
|
import org.eclipse.che.ide.ext.java.client.project.classpath.valueproviders.pages.*;
|
[
"org.eclipse.che"
] |
org.eclipse.che;
| 350,587
|
private void byFreq(String[] tokens,String cxt,ArrayList<String> seqs){
//no more tokens
if(tokens.length==0){
String[] cxtTokens = cxt.split(" ");
String realCxt = StringUtils.join(Arrays.copyOfRange(cxtTokens,Math.max(0,cxtTokens.length-this.order+1),cxtTokens.length)," ");
seqs.add(realCxt+" <E>");
return;
}
String child = "";
int count = 0;
for(String token:tokens){
if(this.dist.get(token) > count){
count = this.dist.get(token);
child = token;
}
}
int index = Arrays.asList(tokens).indexOf(child);
String[] cxtTokens = cxt.split(" ");
String realCxt = StringUtils.join(Arrays.copyOfRange(cxtTokens,Math.max(0,cxtTokens.length-this.order+1),cxtTokens.length)," ");
seqs.add(realCxt+" "+child);
String[] L = Arrays.copyOfRange(tokens,0,index);
String[] R = Arrays.copyOfRange(tokens,index+1,tokens.length);
byFreq(L,cxt+" "+child+"-L",seqs);
byFreq(R,cxt+" "+child+"-R",seqs);
}
|
void function(String[] tokens,String cxt,ArrayList<String> seqs){ if(tokens.length==0){ String[] cxtTokens = cxt.split(" STR "); seqs.add(realCxt+STR); return; } String child = STR STR STR STR STR-LSTR STR-R",seqs); }
|
/**
* Divide a (sub)sentence recursively by frequncy as a cover of NST method
*
* @param subtree subtree of a NST.
* @param tokens (sub)tokens of original sentence.
**/
|
Divide a (sub)sentence recursively by frequncy as a cover of NST method
|
byFreq
|
{
"repo_name": "aisophie/HWS",
"path": "src/structures/NST.java",
"license": "gpl-3.0",
"size": 8981
}
|
[
"java.util.ArrayList"
] |
import java.util.ArrayList;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,306,216
|
@Override
public T addResources(Map<String, Double> resources) {
if (resources != null) {
Map<String, Double> currentResources = (Map<String, Double>) getComponentConfiguration().computeIfAbsent(
Config.TOPOLOGY_COMPONENT_RESOURCES_MAP, (k) -> new HashMap<>());
currentResources.putAll(resources);
}
return (T) this;
}
|
T function(Map<String, Double> resources) { if (resources != null) { Map<String, Double> currentResources = (Map<String, Double>) getComponentConfiguration().computeIfAbsent( Config.TOPOLOGY_COMPONENT_RESOURCES_MAP, (k) -> new HashMap<>()); currentResources.putAll(resources); } return (T) this; }
|
/**
* Add generic resources for this component.
*/
|
Add generic resources for this component
|
addResources
|
{
"repo_name": "kishorvpatil/incubator-storm",
"path": "storm-client/src/jvm/org/apache/storm/topology/BaseConfigurationDeclarer.java",
"license": "apache-2.0",
"size": 4105
}
|
[
"java.util.HashMap",
"java.util.Map",
"org.apache.storm.Config"
] |
import java.util.HashMap; import java.util.Map; import org.apache.storm.Config;
|
import java.util.*; import org.apache.storm.*;
|
[
"java.util",
"org.apache.storm"
] |
java.util; org.apache.storm;
| 2,698,380
|
public static BrowserStartupController get(Context context, int libraryProcessType) {
assert ThreadUtils.runningOnUiThread() : "Tried to start the browser on the wrong thread.";
ThreadUtils.assertOnUiThread();
if (sInstance == null) {
assert LibraryProcessType.PROCESS_BROWSER == libraryProcessType
|| LibraryProcessType.PROCESS_WEBVIEW == libraryProcessType;
sInstance = new BrowserStartupController(context, libraryProcessType);
}
assert sInstance.mLibraryProcessType == libraryProcessType : "Wrong process type";
return sInstance;
}
|
static BrowserStartupController function(Context context, int libraryProcessType) { assert ThreadUtils.runningOnUiThread() : STR; ThreadUtils.assertOnUiThread(); if (sInstance == null) { assert LibraryProcessType.PROCESS_BROWSER == libraryProcessType LibraryProcessType.PROCESS_WEBVIEW == libraryProcessType; sInstance = new BrowserStartupController(context, libraryProcessType); } assert sInstance.mLibraryProcessType == libraryProcessType : STR; return sInstance; }
|
/**
* Get BrowserStartupController instance, create a new one if no existing.
*
* @param context the application context.
* @param libraryProcessType the type of process the shared library is loaded. it must be
* LibraryProcessType.PROCESS_BROWSER or
* LibraryProcessType.PROCESS_WEBVIEW.
* @return BrowserStartupController instance.
*/
|
Get BrowserStartupController instance, create a new one if no existing
|
get
|
{
"repo_name": "TheTypoMaster/chromium-crosswalk",
"path": "content/public/android/java/src/org/chromium/content/browser/BrowserStartupController.java",
"license": "bsd-3-clause",
"size": 13473
}
|
[
"android.content.Context",
"org.chromium.base.ThreadUtils",
"org.chromium.base.library_loader.LibraryProcessType"
] |
import android.content.Context; import org.chromium.base.ThreadUtils; import org.chromium.base.library_loader.LibraryProcessType;
|
import android.content.*; import org.chromium.base.*; import org.chromium.base.library_loader.*;
|
[
"android.content",
"org.chromium.base"
] |
android.content; org.chromium.base;
| 2,021,201
|
public default <V> IgniteFunction<T, V> andThen(IgniteFunction<? super R, ? extends V> after) {
Objects.requireNonNull(after);
return (T t) -> after.apply(apply(t));
}
|
default <V> IgniteFunction<T, V> function(IgniteFunction<? super R, ? extends V> after) { Objects.requireNonNull(after); return (T t) -> after.apply(apply(t)); }
|
/**
* Compose this function and given function.
*
* @param after Function to compose with.
* @param <V> Type of value which result of {@code after} extends.
* @return Functions composition.
*/
|
Compose this function and given function
|
andThen
|
{
"repo_name": "samaitra/ignite",
"path": "modules/ml/src/main/java/org/apache/ignite/ml/math/functions/IgniteFunction.java",
"license": "apache-2.0",
"size": 2209
}
|
[
"java.util.Objects"
] |
import java.util.Objects;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,300,960
|
protected void uninstallDefaults()
{
if (textComponent.getCaretColor() instanceof UIResource)
textComponent.setCaretColor(null);
if (textComponent.getSelectionColor() instanceof UIResource)
textComponent.setSelectionColor(null);
if (textComponent.getDisabledTextColor() instanceof UIResource)
textComponent.setDisabledTextColor(null);
if (textComponent.getSelectedTextColor() instanceof UIResource)
textComponent.setSelectedTextColor(null);
LookAndFeel.uninstallBorder(textComponent);
if (textComponent.getMargin() instanceof UIResource)
textComponent.setMargin(null);
}
|
void function() { if (textComponent.getCaretColor() instanceof UIResource) textComponent.setCaretColor(null); if (textComponent.getSelectionColor() instanceof UIResource) textComponent.setSelectionColor(null); if (textComponent.getDisabledTextColor() instanceof UIResource) textComponent.setDisabledTextColor(null); if (textComponent.getSelectedTextColor() instanceof UIResource) textComponent.setSelectedTextColor(null); LookAndFeel.uninstallBorder(textComponent); if (textComponent.getMargin() instanceof UIResource) textComponent.setMargin(null); }
|
/**
* Uninstalls all default properties that have previously been installed by
* this UI.
*/
|
Uninstalls all default properties that have previously been installed by this UI
|
uninstallDefaults
|
{
"repo_name": "SanDisk-Open-Source/SSD_Dashboard",
"path": "uefi/gcc/gcc-4.6.3/libjava/classpath/javax/swing/plaf/basic/BasicTextUI.java",
"license": "gpl-2.0",
"size": 47133
}
|
[
"javax.swing.LookAndFeel",
"javax.swing.plaf.UIResource"
] |
import javax.swing.LookAndFeel; import javax.swing.plaf.UIResource;
|
import javax.swing.*; import javax.swing.plaf.*;
|
[
"javax.swing"
] |
javax.swing;
| 1,580,222
|
public String getQueryMethodArgumentFieldsJsString() {
String queryMethodArgsJs = "[";
for (String methodArg : queryMethodArgumentFieldList) {
if (!StringUtils.equals(queryMethodArgsJs, "{")) {
queryMethodArgsJs += ",";
}
queryMethodArgsJs += "\"" + methodArg + "\"";
}
queryMethodArgsJs += "]";
return queryMethodArgsJs;
}
|
String function() { String queryMethodArgsJs = "["; for (String methodArg : queryMethodArgumentFieldList) { if (!StringUtils.equals(queryMethodArgsJs, "{")) { queryMethodArgsJs += ","; } queryMethodArgsJs += "\"STR\STR]"; return queryMethodArgsJs; }
|
/**
* Builds String for passing the queryMethodArgumentFieldList as a Javascript array
*
* @return String js parameter string
*/
|
Builds String for passing the queryMethodArgumentFieldList as a Javascript array
|
getQueryMethodArgumentFieldsJsString
|
{
"repo_name": "quikkian-ua-devops/will-financials",
"path": "kfs-kns/src/main/java/org/kuali/kfs/krad/uif/field/AttributeQuery.java",
"license": "agpl-3.0",
"size": 15331
}
|
[
"org.apache.commons.lang.StringUtils"
] |
import org.apache.commons.lang.StringUtils;
|
import org.apache.commons.lang.*;
|
[
"org.apache.commons"
] |
org.apache.commons;
| 1,487,873
|
public void updateApplicationRegistration(String state, String keyType, int appId) throws APIManagementException {
Connection conn = null;
PreparedStatement ps = null;
String sqlStmt = "UPDATE AM_APPLICATION_KEY_MAPPING " +
"SET STATE = ? WHERE APPLICATION_ID = ? AND KEY_TYPE = ?";
try {
conn = APIMgtDBUtil.getConnection();
conn.setAutoCommit(false);
ps = conn.prepareStatement(sqlStmt);
ps.setString(1, state);
ps.setInt(2, appId);
ps.setString(3, keyType);
ps.execute();
conn.commit();
} catch (SQLException e) {
handleException("Error while updating registration entry.", e);
} finally {
APIMgtDBUtil.closeAllConnections(ps, conn, null);
}
}
|
void function(String state, String keyType, int appId) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; String sqlStmt = STR + STR; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); ps = conn.prepareStatement(sqlStmt); ps.setString(1, state); ps.setInt(2, appId); ps.setString(3, keyType); ps.execute(); conn.commit(); } catch (SQLException e) { handleException(STR, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, null); } }
|
/**
* Updates the state of the Application Registration.
* @param state State of the registration.
* @param keyType PRODUCTION | SANDBOX
* @param appId ID of the Application.
* @throws APIManagementException if updating fails.
*/
|
Updates the state of the Application Registration
|
updateApplicationRegistration
|
{
"repo_name": "madusankapremaratne/carbon-apimgt",
"path": "components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/dao/ApiMgtDAO.java",
"license": "apache-2.0",
"size": 400222
}
|
[
"java.sql.Connection",
"java.sql.PreparedStatement",
"java.sql.SQLException",
"org.wso2.carbon.apimgt.api.APIManagementException",
"org.wso2.carbon.apimgt.impl.utils.APIMgtDBUtil"
] |
import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.impl.utils.APIMgtDBUtil;
|
import java.sql.*; import org.wso2.carbon.apimgt.api.*; import org.wso2.carbon.apimgt.impl.utils.*;
|
[
"java.sql",
"org.wso2.carbon"
] |
java.sql; org.wso2.carbon;
| 1,620,813
|
// ===========================================================================
public void _unresolve(CommandContext ci)
{
gluer.unresolve(getIds(ci), true);
}
|
void function(CommandContext ci) { gluer.unresolve(getIds(ci), true); }
|
/**
* Requests the framework to unresolve one or more services.
* It requires 1 parameter:
*
* - a partial class name. All classes matching the given name will be unresolved.
*
* The stop will unglue() them from the framework and unresolve them.
*
* @param ci The current context.
*/
|
Requests the framework to unresolve one or more services. It requires 1 parameter: - a partial class name. All classes matching the given name will be unresolved. The stop will unglue() them from the framework and unresolve them
|
_unresolve
|
{
"repo_name": "gevaerts/Gluewine",
"path": "imp/src/java/org/gluewine/console/impl/SystemCommandProvider.java",
"license": "apache-2.0",
"size": 27950
}
|
[
"org.gluewine.console.CommandContext"
] |
import org.gluewine.console.CommandContext;
|
import org.gluewine.console.*;
|
[
"org.gluewine.console"
] |
org.gluewine.console;
| 674,528
|
protected void newTransformation() {
TransMeta newTransMeta = new TransMeta();
newTransMeta.getDatabases().addAll( jobMeta.getDatabases() );
newTransMeta.setRepository( rep );
newTransMeta.setRepositoryDirectory( jobMeta.getRepositoryDirectory() );
newTransMeta.setMetaStore( metaStore );
TransDialog transDialog = new TransDialog( shell, SWT.NONE, newTransMeta, rep );
if ( transDialog.open() != null ) {
Spoon spoon = Spoon.getInstance();
spoon.addTransGraph( newTransMeta );
boolean saved = false;
try {
if ( rep != null ) {
if ( !Utils.isEmpty( newTransMeta.getName() ) ) {
wName.setText( newTransMeta.getName() );
}
saved = spoon.saveToRepository( newTransMeta, false );
if ( rep.getRepositoryMeta().getRepositoryCapabilities().supportsReferences() ) {
specificationMethod = ObjectLocationSpecificationMethod.REPOSITORY_BY_REFERENCE;
referenceObjectId = newTransMeta.getObjectId();
} else {
specificationMethod = ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME;
}
} else {
saved = spoon.saveToFile( newTransMeta );
specificationMethod = ObjectLocationSpecificationMethod.FILENAME;
}
} catch ( Exception e ) {
new ErrorDialog( shell, "Error", "Error saving new transformation", e );
}
if ( saved ) {
setRadioButtons();
switch ( specificationMethod ) {
case FILENAME:
wFilename.setText( Const.NVL( newTransMeta.getFilename(), "" ) );
break;
case REPOSITORY_BY_NAME:
wTransname.setText( Const.NVL( newTransMeta.getName(), "" ) );
wDirectory.setText( newTransMeta.getRepositoryDirectory().getPath() );
break;
case REPOSITORY_BY_REFERENCE:
getByReferenceData( newTransMeta.getObjectId() );
break;
default:
break;
}
getParameters( newTransMeta );
}
}
}
|
void function() { TransMeta newTransMeta = new TransMeta(); newTransMeta.getDatabases().addAll( jobMeta.getDatabases() ); newTransMeta.setRepository( rep ); newTransMeta.setRepositoryDirectory( jobMeta.getRepositoryDirectory() ); newTransMeta.setMetaStore( metaStore ); TransDialog transDialog = new TransDialog( shell, SWT.NONE, newTransMeta, rep ); if ( transDialog.open() != null ) { Spoon spoon = Spoon.getInstance(); spoon.addTransGraph( newTransMeta ); boolean saved = false; try { if ( rep != null ) { if ( !Utils.isEmpty( newTransMeta.getName() ) ) { wName.setText( newTransMeta.getName() ); } saved = spoon.saveToRepository( newTransMeta, false ); if ( rep.getRepositoryMeta().getRepositoryCapabilities().supportsReferences() ) { specificationMethod = ObjectLocationSpecificationMethod.REPOSITORY_BY_REFERENCE; referenceObjectId = newTransMeta.getObjectId(); } else { specificationMethod = ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME; } } else { saved = spoon.saveToFile( newTransMeta ); specificationMethod = ObjectLocationSpecificationMethod.FILENAME; } } catch ( Exception e ) { new ErrorDialog( shell, "Error", STR, e ); } if ( saved ) { setRadioButtons(); switch ( specificationMethod ) { case FILENAME: wFilename.setText( Const.NVL( newTransMeta.getFilename(), STR" ) ); wDirectory.setText( newTransMeta.getRepositoryDirectory().getPath() ); break; case REPOSITORY_BY_REFERENCE: getByReferenceData( newTransMeta.getObjectId() ); break; default: break; } getParameters( newTransMeta ); } } }
|
/**
* Ask the user to fill in the details...
*/
|
Ask the user to fill in the details..
|
newTransformation
|
{
"repo_name": "denisprotopopov/pentaho-kettle",
"path": "ui/src/org/pentaho/di/ui/job/entries/trans/JobEntryTransDialog.java",
"license": "apache-2.0",
"size": 65814
}
|
[
"org.pentaho.di.core.Const",
"org.pentaho.di.core.ObjectLocationSpecificationMethod",
"org.pentaho.di.core.util.Utils",
"org.pentaho.di.trans.TransMeta",
"org.pentaho.di.ui.core.dialog.ErrorDialog",
"org.pentaho.di.ui.spoon.Spoon",
"org.pentaho.di.ui.trans.dialog.TransDialog"
] |
import org.pentaho.di.core.Const; import org.pentaho.di.core.ObjectLocationSpecificationMethod; import org.pentaho.di.core.util.Utils; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.di.ui.trans.dialog.TransDialog;
|
import org.pentaho.di.core.*; import org.pentaho.di.core.util.*; import org.pentaho.di.trans.*; import org.pentaho.di.ui.core.dialog.*; import org.pentaho.di.ui.spoon.*; import org.pentaho.di.ui.trans.dialog.*;
|
[
"org.pentaho.di"
] |
org.pentaho.di;
| 1,070,571
|
EReference getSystemOfRecords_Guest();
|
EReference getSystemOfRecords_Guest();
|
/**
* Returns the meta object for the containment reference '{@link org.nasdanika.examples.bank.SystemOfRecords#getGuest <em>Guest</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the containment reference '<em>Guest</em>'.
* @see org.nasdanika.examples.bank.SystemOfRecords#getGuest()
* @see #getSystemOfRecords()
* @generated
*/
|
Returns the meta object for the containment reference '<code>org.nasdanika.examples.bank.SystemOfRecords#getGuest Guest</code>'.
|
getSystemOfRecords_Guest
|
{
"repo_name": "Nasdanika/examples",
"path": "org.nasdanika.examples.bank/src/org/nasdanika/examples/bank/BankPackage.java",
"license": "epl-1.0",
"size": 69780
}
|
[
"org.eclipse.emf.ecore.EReference"
] |
import org.eclipse.emf.ecore.EReference;
|
import org.eclipse.emf.ecore.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 1,165,100
|
@Test
public void testContextSpecificServletFilterWhenInitThrowsException()
throws Exception {
Configuration conf = new Configuration();
HttpServer http = createTestServer(conf);
HttpServer.defineFilter(http.webAppContext,
"ErrorFilter", ErrorFilter.class.getName(),
null, null);
try {
http.start();
fail("expecting exception");
} catch (IOException e) {
GenericTestUtils.assertExceptionContains("Unable to initialize WebAppContext", e);
}
}
|
void function() throws Exception { Configuration conf = new Configuration(); HttpServer http = createTestServer(conf); HttpServer.defineFilter(http.webAppContext, STR, ErrorFilter.class.getName(), null, null); try { http.start(); fail(STR); } catch (IOException e) { GenericTestUtils.assertExceptionContains(STR, e); } }
|
/**
* Similar to the above test case, except that it uses a different API to add the
* filter. Regression test for HADOOP-8786.
*/
|
Similar to the above test case, except that it uses a different API to add the filter. Regression test for HADOOP-8786
|
testContextSpecificServletFilterWhenInitThrowsException
|
{
"repo_name": "intel-hadoop/hbase-rhino",
"path": "hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java",
"license": "apache-2.0",
"size": 6584
}
|
[
"java.io.IOException",
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.hbase.GenericTestUtils"
] |
import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.GenericTestUtils;
|
import java.io.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.hbase.*;
|
[
"java.io",
"org.apache.hadoop"
] |
java.io; org.apache.hadoop;
| 694,588
|
boolean check(@NonNull T sender) throws Exception;
|
boolean check(@NonNull T sender) throws Exception;
|
/**
* Check listener.
* @param sender
* @throws Exception
*/
|
Check listener
|
check
|
{
"repo_name": "fjalvingh/domui",
"path": "to.etc.domui/src/main/java/to/etc/domui/component/event/ICheckCallback.java",
"license": "lgpl-2.1",
"size": 247
}
|
[
"org.eclipse.jdt.annotation.NonNull"
] |
import org.eclipse.jdt.annotation.NonNull;
|
import org.eclipse.jdt.annotation.*;
|
[
"org.eclipse.jdt"
] |
org.eclipse.jdt;
| 53,502
|
public void recvTensor(org.tensorflow.distruntime.RecvTensorRequest request,
io.grpc.stub.StreamObserver<org.tensorflow.distruntime.RecvTensorResponse> responseObserver) {
asyncUnaryCall(
getChannel().newCall(METHOD_RECV_TENSOR, getCallOptions()), request, responseObserver);
}
|
void function(org.tensorflow.distruntime.RecvTensorRequest request, io.grpc.stub.StreamObserver<org.tensorflow.distruntime.RecvTensorResponse> responseObserver) { asyncUnaryCall( getChannel().newCall(METHOD_RECV_TENSOR, getCallOptions()), request, responseObserver); }
|
/**
* <pre>
* See worker.proto for details.
* </pre>
*/
|
<code> See worker.proto for details. </code>
|
recvTensor
|
{
"repo_name": "nubbel/swift-tensorflow",
"path": "JavaGenerated/org/tensorflow/distruntime/WorkerServiceGrpc.java",
"license": "mit",
"size": 35911
}
|
[
"io.grpc.stub.ClientCalls",
"io.grpc.stub.ServerCalls"
] |
import io.grpc.stub.ClientCalls; import io.grpc.stub.ServerCalls;
|
import io.grpc.stub.*;
|
[
"io.grpc.stub"
] |
io.grpc.stub;
| 287,799
|
public boolean setSocket(String socketHost, int socketPort) {
// Terminate previous sender
if (gpsSender != null) {
gpsSender.terminate();
}
Toolkit tk = Toolkit.getDefaultToolkit();
Dimension d = tk.getScreenSize();
int port = socketPort;
String host = socketHost;
// get Host & port
SocketDialog dialog = new SocketDialog(caller, "Enter Host & Port",
socketPort, socketHost);
dialog.setLocation(d.width * 3 / 8 + 50, d.height * 3 / 8 - 30);
dialog.show();
if (!dialog.isValid()) {
System.out
.println("SenderManager.setSocket() dialog not valid, returning.");
return false;
}
host = dialog.getHostName();
port = dialog.getPort();
simulator.saveSocket(host, port);
gpsSender = new SocketGPSSender(host, port, gpsSenderListener);
gpsSender.start();
return true;
}
|
boolean function(String socketHost, int socketPort) { if (gpsSender != null) { gpsSender.terminate(); } Toolkit tk = Toolkit.getDefaultToolkit(); Dimension d = tk.getScreenSize(); int port = socketPort; String host = socketHost; SocketDialog dialog = new SocketDialog(caller, STR, socketPort, socketHost); dialog.setLocation(d.width * 3 / 8 + 50, d.height * 3 / 8 - 30); dialog.show(); if (!dialog.isValid()) { System.out .println(STR); return false; } host = dialog.getHostName(); port = dialog.getPort(); simulator.saveSocket(host, port); gpsSender = new SocketGPSSender(host, port, gpsSenderListener); gpsSender.start(); return true; }
|
/**
* Connect to a remote socket host.
* @param socketHost
* @param socketPort
* @return
*/
|
Connect to a remote socket host
|
setSocket
|
{
"repo_name": "wayfinder/Wayfinder-Server",
"path": "Server/Tools/BTGPSSimulator/SenderManager.java",
"license": "bsd-3-clause",
"size": 4804
}
|
[
"java.awt.Dimension",
"java.awt.Toolkit"
] |
import java.awt.Dimension; import java.awt.Toolkit;
|
import java.awt.*;
|
[
"java.awt"
] |
java.awt;
| 2,224,280
|
public static ReplicaPlan.ForPaxosWrite forPaxos(Keyspace keyspace, DecoratedKey key, ConsistencyLevel consistencyForPaxos) throws UnavailableException
{
Token tk = key.getToken();
ReplicaLayout.ForTokenWrite liveAndDown = ReplicaLayout.forTokenWriteLiveAndDown(keyspace, tk);
Replicas.temporaryAssertFull(liveAndDown.all()); // TODO CASSANDRA-14547
if (consistencyForPaxos == ConsistencyLevel.LOCAL_SERIAL)
{
// TODO: we should cleanup our semantics here, as we're filtering ALL nodes to localDC which is unexpected for ReplicaPlan
// Restrict natural and pending to node in the local DC only
liveAndDown = liveAndDown.filter(InOurDcTester.replicas());
}
ReplicaLayout.ForTokenWrite live = liveAndDown.filter(FailureDetector.isReplicaAlive);
// TODO: this should use assureSufficientReplicas
int participants = liveAndDown.all().size();
int requiredParticipants = participants / 2 + 1; // See CASSANDRA-8346, CASSANDRA-833
EndpointsForToken contacts = live.all();
if (contacts.size() < requiredParticipants)
throw UnavailableException.create(consistencyForPaxos, requiredParticipants, contacts.size());
// We cannot allow CAS operations with 2 or more pending endpoints, see #8346.
// Note that we fake an impossible number of required nodes in the unavailable exception
// to nail home the point that it's an impossible operation no matter how many nodes are live.
if (liveAndDown.pending().size() > 1)
throw new UnavailableException(String.format("Cannot perform LWT operation as there is more than one (%d) pending range movement", liveAndDown.all().size()),
consistencyForPaxos,
participants + 1,
contacts.size());
return new ReplicaPlan.ForPaxosWrite(keyspace, consistencyForPaxos, liveAndDown.pending(), liveAndDown.all(), live.all(), contacts, requiredParticipants);
}
|
static ReplicaPlan.ForPaxosWrite function(Keyspace keyspace, DecoratedKey key, ConsistencyLevel consistencyForPaxos) throws UnavailableException { Token tk = key.getToken(); ReplicaLayout.ForTokenWrite liveAndDown = ReplicaLayout.forTokenWriteLiveAndDown(keyspace, tk); Replicas.temporaryAssertFull(liveAndDown.all()); if (consistencyForPaxos == ConsistencyLevel.LOCAL_SERIAL) { liveAndDown = liveAndDown.filter(InOurDcTester.replicas()); } ReplicaLayout.ForTokenWrite live = liveAndDown.filter(FailureDetector.isReplicaAlive); int participants = liveAndDown.all().size(); int requiredParticipants = participants / 2 + 1; EndpointsForToken contacts = live.all(); if (contacts.size() < requiredParticipants) throw UnavailableException.create(consistencyForPaxos, requiredParticipants, contacts.size()); if (liveAndDown.pending().size() > 1) throw new UnavailableException(String.format(STR, liveAndDown.all().size()), consistencyForPaxos, participants + 1, contacts.size()); return new ReplicaPlan.ForPaxosWrite(keyspace, consistencyForPaxos, liveAndDown.pending(), liveAndDown.all(), live.all(), contacts, requiredParticipants); }
|
/**
* Construct the plan for a paxos round - NOT the write or read consistency level for either the write or comparison,
* but for the paxos linearisation agreement.
*
* This will select all live nodes as the candidates for the operation. Only the required number of participants
*/
|
Construct the plan for a paxos round - NOT the write or read consistency level for either the write or comparison, but for the paxos linearisation agreement. This will select all live nodes as the candidates for the operation. Only the required number of participants
|
forPaxos
|
{
"repo_name": "jasonstack/cassandra",
"path": "src/java/org/apache/cassandra/locator/ReplicaPlans.java",
"license": "apache-2.0",
"size": 32854
}
|
[
"com.google.common.collect.Iterables",
"org.apache.cassandra.db.ConsistencyLevel",
"org.apache.cassandra.db.DecoratedKey",
"org.apache.cassandra.db.Keyspace",
"org.apache.cassandra.dht.Token",
"org.apache.cassandra.exceptions.UnavailableException",
"org.apache.cassandra.gms.FailureDetector"
] |
import com.google.common.collect.Iterables; import org.apache.cassandra.db.ConsistencyLevel; import org.apache.cassandra.db.DecoratedKey; import org.apache.cassandra.db.Keyspace; import org.apache.cassandra.dht.Token; import org.apache.cassandra.exceptions.UnavailableException; import org.apache.cassandra.gms.FailureDetector;
|
import com.google.common.collect.*; import org.apache.cassandra.db.*; import org.apache.cassandra.dht.*; import org.apache.cassandra.exceptions.*; import org.apache.cassandra.gms.*;
|
[
"com.google.common",
"org.apache.cassandra"
] |
com.google.common; org.apache.cassandra;
| 1,050,256
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.