method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
list
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
list
libraries_info
stringlengths
6
661
id
int64
0
2.92M
public long getNumDocs() { return numDocs; } static class LoadedMetadata { final Map<String, StoreFileMetaData> fileMetadata; final Map<String, String> userData; final long numDocs; LoadedMetadata(Map<String, StoreFileMetaData> fileMetadata, Map<String, String> userData, long numDocs) { this.fileMetadata = fileMetadata; this.userData = userData; this.numDocs = numDocs; } }
long function() { return numDocs; } static class LoadedMetadata { final Map<String, StoreFileMetaData> fileMetadata; final Map<String, String> userData; final long numDocs; LoadedMetadata(Map<String, StoreFileMetaData> fileMetadata, Map<String, String> userData, long numDocs) { this.fileMetadata = fileMetadata; this.userData = userData; this.numDocs = numDocs; } }
/** * Returns the number of documents in this store snapshot */
Returns the number of documents in this store snapshot
getNumDocs
{ "repo_name": "strapdata/elassandra5-rc", "path": "core/src/main/java/org/elasticsearch/index/store/Store.java", "license": "apache-2.0", "size": 66803 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
1,111,534
@Test public void ConsumeMangledJWTTests_nonce() throws Exception { builder.setClaim(PayloadConstants.NONCE, someString); String jwtToken = buildToken(); Expectations expectations = addGoodResponseAndClaimsExpectations(currentAction, builder); expectations = updateExpectationsForJsonAttribute(expectations, PayloadConstants.NONCE, someString); Page response = consumeToken(jwtToken); validationUtils.validateResult(response, currentAction, expectations); }
void function() throws Exception { builder.setClaim(PayloadConstants.NONCE, someString); String jwtToken = buildToken(); Expectations expectations = addGoodResponseAndClaimsExpectations(currentAction, builder); expectations = updateExpectationsForJsonAttribute(expectations, PayloadConstants.NONCE, someString); Page response = consumeToken(jwtToken); validationUtils.validateResult(response, currentAction, expectations); }
/** * Create a JWT token with "nonce" * The request should succeed as we should not look at the claim * * @throws Exception */
Create a JWT token with "nonce" The request should succeed as we should not look at the claim
ConsumeMangledJWTTests_nonce
{ "repo_name": "OpenLiberty/open-liberty", "path": "dev/com.ibm.ws.security.fat.common.jwt/fat/src/com/ibm/ws/security/fat/common/jwt/sharedTests/ConsumeMangledJWTTests.java", "license": "epl-1.0", "size": 34559 }
[ "com.gargoylesoftware.htmlunit.Page", "com.ibm.ws.security.fat.common.expectations.Expectations", "com.ibm.ws.security.fat.common.jwt.PayloadConstants" ]
import com.gargoylesoftware.htmlunit.Page; import com.ibm.ws.security.fat.common.expectations.Expectations; import com.ibm.ws.security.fat.common.jwt.PayloadConstants;
import com.gargoylesoftware.htmlunit.*; import com.ibm.ws.security.fat.common.expectations.*; import com.ibm.ws.security.fat.common.jwt.*;
[ "com.gargoylesoftware.htmlunit", "com.ibm.ws" ]
com.gargoylesoftware.htmlunit; com.ibm.ws;
67,958
public Map<String, ModelBLikenessCriterion> getBLikenessModel() { if (this.bLikenessModel == null) { this.bLikenessModel = new HashMap<String, ModelBLikenessCriterion>(); DataHandle handle = inputConfig.getInput().getHandle(); for (int col = 0; col < handle.getNumColumns(); col++) { String attribute = handle.getAttributeName(col); bLikenessModel.put(attribute, new ModelBLikenessCriterion(attribute)); } } return bLikenessModel; }
Map<String, ModelBLikenessCriterion> function() { if (this.bLikenessModel == null) { this.bLikenessModel = new HashMap<String, ModelBLikenessCriterion>(); DataHandle handle = inputConfig.getInput().getHandle(); for (int col = 0; col < handle.getNumColumns(); col++) { String attribute = handle.getAttributeName(col); bLikenessModel.put(attribute, new ModelBLikenessCriterion(attribute)); } } return bLikenessModel; }
/** * Returns the b-Likeness privacy model. * * @return */
Returns the b-Likeness privacy model
getBLikenessModel
{ "repo_name": "arx-deidentifier/arx", "path": "src/gui/org/deidentifier/arx/gui/model/Model.java", "license": "apache-2.0", "size": 57645 }
[ "java.util.HashMap", "java.util.Map", "org.deidentifier.arx.DataHandle" ]
import java.util.HashMap; import java.util.Map; import org.deidentifier.arx.DataHandle;
import java.util.*; import org.deidentifier.arx.*;
[ "java.util", "org.deidentifier.arx" ]
java.util; org.deidentifier.arx;
866,909
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { PifSerializationUtil.read(in, this); } private void readObjectNoData() throws ObjectStreamException {}
void function(ObjectInputStream in) throws IOException, ClassNotFoundException { PifSerializationUtil.read(in, this); } void functionNoData() throws ObjectStreamException {}
/** * Read into this object from the input stream. * * @param in {@link ObjectInputStream} to read from. * @throws IOException if thrown while reading the stream. * @throws ClassNotFoundException if thrown while reading the stream. */
Read into this object from the input stream
readObject
{ "repo_name": "CitrineInformatics/jpif", "path": "src/main/java/io/citrine/jpif/obj/common/DisplayItem.java", "license": "apache-2.0", "size": 4035 }
[ "io.citrine.jpif.util.PifSerializationUtil", "java.io.IOException", "java.io.ObjectInputStream", "java.io.ObjectStreamException" ]
import io.citrine.jpif.util.PifSerializationUtil; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectStreamException;
import io.citrine.jpif.util.*; import java.io.*;
[ "io.citrine.jpif", "java.io" ]
io.citrine.jpif; java.io;
2,548,795
public final void setAttribute(String name, Object value) { if (attributes == null) attributes = new Hashtable(); attributes.put(name, value); }
final void function(String name, Object value) { if (attributes == null) attributes = new Hashtable(); attributes.put(name, value); }
/** * Sets a named attribute to the given value. * @param name the name of the attribute * @param value the value to set the attribute */
Sets a named attribute to the given value
setAttribute
{ "repo_name": "yzhnasa/TASSEL-iRods", "path": "src/net/maizegenetics/taxa/tree/SimpleNode.java", "license": "mit", "size": 12749 }
[ "java.util.Hashtable" ]
import java.util.Hashtable;
import java.util.*;
[ "java.util" ]
java.util;
585,044
public static double[][] toSquareData(DenseMatrix64F A) { final int numRows = A.numRows; final int numCols = A.numCols; final double[][] out = new double[numRows][]; for (int i = 0, pos = 0; i < numRows; i++, pos += numRows) { out[i] = new double[numCols]; System.arraycopy(A.data, pos, out[i], 0, numCols); } return out; }
static double[][] function(DenseMatrix64F A) { final int numRows = A.numRows; final int numCols = A.numCols; final double[][] out = new double[numRows][]; for (int i = 0, pos = 0; i < numRows; i++, pos += numRows) { out[i] = new double[numCols]; System.arraycopy(A.data, pos, out[i], 0, numCols); } return out; }
/** * Convert a dense matrix to a row/column format. * * @param A the matrix * @return the row/column format */
Convert a dense matrix to a row/column format
toSquareData
{ "repo_name": "aherbert/GDSC-SMLM", "path": "src/main/java/uk/ac/sussex/gdsc/smlm/fitting/linear/EjmlLinearSolver.java", "license": "gpl-3.0", "size": 40872 }
[ "org.ejml.data.DenseMatrix64F" ]
import org.ejml.data.DenseMatrix64F;
import org.ejml.data.*;
[ "org.ejml.data" ]
org.ejml.data;
2,223,343
private static void addRealPageStoreTestsLongRunning(List<Class<?>> suite, Collection<Class> ignoredTests) { // Basic PageMemory tests. GridTestUtils.addTestIfNeeded(suite, IgnitePdsPageReplacementTest.class, ignoredTests); }
static void function(List<Class<?>> suite, Collection<Class> ignoredTests) { GridTestUtils.addTestIfNeeded(suite, IgnitePdsPageReplacementTest.class, ignoredTests); }
/** * Fills {@code suite} with PDS test subset, which operates with real page store, but requires long time to * execute. * * @param suite suite to add tests into. * @param ignoredTests Ignored tests. */
Fills suite with PDS test subset, which operates with real page store, but requires long time to execute
addRealPageStoreTestsLongRunning
{ "repo_name": "ilantukh/ignite", "path": "modules/core/src/test/java/org/apache/ignite/testsuites/IgnitePdsTestSuite.java", "license": "apache-2.0", "size": 12143 }
[ "java.util.Collection", "java.util.List", "org.apache.ignite.internal.processors.cache.persistence.db.file.IgnitePdsPageReplacementTest", "org.apache.ignite.testframework.GridTestUtils" ]
import java.util.Collection; import java.util.List; import org.apache.ignite.internal.processors.cache.persistence.db.file.IgnitePdsPageReplacementTest; import org.apache.ignite.testframework.GridTestUtils;
import java.util.*; import org.apache.ignite.internal.processors.cache.persistence.db.file.*; import org.apache.ignite.testframework.*;
[ "java.util", "org.apache.ignite" ]
java.util; org.apache.ignite;
115,742
@SimpleEvent(description = "Called when the touch sensor is pressed.") public void Released() { EventDispatcher.dispatchEvent(this, "Released"); }
@SimpleEvent(description = STR) void function() { EventDispatcher.dispatchEvent(this, STR); }
/** * Called when the touch sensor is pressed. */
Called when the touch sensor is pressed
Released
{ "repo_name": "kkashi01/appinventor-sources", "path": "appinventor/components/src/com/google/appinventor/components/runtime/Ev3TouchSensor.java", "license": "apache-2.0", "size": 6102 }
[ "com.google.appinventor.components.annotations.SimpleEvent" ]
import com.google.appinventor.components.annotations.SimpleEvent;
import com.google.appinventor.components.annotations.*;
[ "com.google.appinventor" ]
com.google.appinventor;
2,705,315
@Override public List<Extractor> extractors() { return this.extractors.stream() .map(ExtractorConfig::getExtractor) .filter(Objects::nonNull) .collect(Collectors.toList()); }
List<Extractor> function() { return this.extractors.stream() .map(ExtractorConfig::getExtractor) .filter(Objects::nonNull) .collect(Collectors.toList()); }
/** * Returns a list of extractor classes that should be used for * the extraction run! * * @return List of named extractors. */
Returns a list of extractor classes that should be used for the extraction run
extractors
{ "repo_name": "dbisUnibas/cineast", "path": "cineast-runtime/src/main/java/org/vitrivr/cineast/standalone/config/IngestConfig.java", "license": "mit", "size": 12865 }
[ "java.util.List", "java.util.Objects", "java.util.stream.Collectors", "org.vitrivr.cineast.core.features.extractor.Extractor" ]
import java.util.List; import java.util.Objects; import java.util.stream.Collectors; import org.vitrivr.cineast.core.features.extractor.Extractor;
import java.util.*; import java.util.stream.*; import org.vitrivr.cineast.core.features.extractor.*;
[ "java.util", "org.vitrivr.cineast" ]
java.util; org.vitrivr.cineast;
1,926,024
public boolean evaluate(Context context, Writer writer, String logTag, Reader reader);
boolean function(Context context, Writer writer, String logTag, Reader reader);
/** * Renders the input reader using the context into the output writer. * To be used when a template is dynamically constructed, or want to * use Velocity as a token replacer. * * @param context context to use in rendering input string * @param writer Writer in which to render the output * @param logTag string to be used as the template name for log messages * in case of error * @param reader Reader containing the VTL to be rendered * * @return true if successful, false otherwise. If false, see * Velocity runtime log * @throws ParseErrorException The template could not be parsed. * @throws MethodInvocationException A method on a context object could not be invoked. * @throws ResourceNotFoundException A referenced resource could not be loaded. * @since Velocity 1.6 */
Renders the input reader using the context into the output writer. To be used when a template is dynamically constructed, or want to use Velocity as a token replacer
evaluate
{ "repo_name": "zhiqinghuang/core", "path": "src/org/apache/velocity/runtime/RuntimeServices.java", "license": "gpl-3.0", "size": 18767 }
[ "java.io.Reader", "java.io.Writer", "org.apache.velocity.context.Context" ]
import java.io.Reader; import java.io.Writer; import org.apache.velocity.context.Context;
import java.io.*; import org.apache.velocity.context.*;
[ "java.io", "org.apache.velocity" ]
java.io; org.apache.velocity;
531,786
public IPTunnelDatagramSocket getUnicastSocket() { return unicastSocket; }
IPTunnelDatagramSocket function() { return unicastSocket; }
/** * Retrieves the value of unicastSocket. * * @return The value of unicastSocket */
Retrieves the value of unicastSocket
getUnicastSocket
{ "repo_name": "fraunhoferfokus/fokus-upnp", "path": "upnp-gateways/src/main/java/de/fraunhofer/fokus/lsf/gateway/common/tunnel/TunnelLSFSocketStructure.java", "license": "gpl-3.0", "size": 5342 }
[ "de.fraunhofer.fokus.upnp.util.tunnel.common.ip.IPTunnelDatagramSocket" ]
import de.fraunhofer.fokus.upnp.util.tunnel.common.ip.IPTunnelDatagramSocket;
import de.fraunhofer.fokus.upnp.util.tunnel.common.ip.*;
[ "de.fraunhofer.fokus" ]
de.fraunhofer.fokus;
1,275,688
void removeAwaitingResume(DccFileTransfer transfer) { _awaitingResume.removeElement(transfer); } private PircBot _bot; private Vector _awaitingResume = new Vector();
void removeAwaitingResume(DccFileTransfer transfer) { _awaitingResume.removeElement(transfer); } private PircBot _bot; private Vector _awaitingResume = new Vector();
/** * Remove this transfer from the list of those awaiting resuming. */
Remove this transfer from the list of those awaiting resuming
removeAwaitingResume
{ "repo_name": "GHOSTnew/DarkIRC", "path": "org/jibble/pircbot/DccManager.java", "license": "gpl-3.0", "size": 5838 }
[ "java.util.Vector" ]
import java.util.Vector;
import java.util.*;
[ "java.util" ]
java.util;
2,602,642
final String where = MediaStore.MediaColumns.DATA + "=?"; final String[] selectionArgs = new String[]{ file.getAbsolutePath() }; final ContentResolver contentResolver = context.getContentResolver(); final Uri filesUri = MediaStore.Files.getContentUri("external"); // Delete the entry from the media database. This will actually delete media files. contentResolver.delete(filesUri, where, selectionArgs); // If the file is not a media file, create a new entry. if (file.exists()) { final ContentValues values = new ContentValues(); values.put(MediaStore.MediaColumns.DATA, file.getAbsolutePath()); contentResolver.insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values); // Delete the created entry, such that content provider will delete the file. contentResolver.delete(filesUri, where, selectionArgs); } return !file.exists(); }
final String where = MediaStore.MediaColumns.DATA + "=?"; final String[] selectionArgs = new String[]{ file.getAbsolutePath() }; final ContentResolver contentResolver = context.getContentResolver(); final Uri filesUri = MediaStore.Files.getContentUri(STR); contentResolver.delete(filesUri, where, selectionArgs); if (file.exists()) { final ContentValues values = new ContentValues(); values.put(MediaStore.MediaColumns.DATA, file.getAbsolutePath()); contentResolver.insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values); contentResolver.delete(filesUri, where, selectionArgs); } return !file.exists(); }
/** * Deletes the file. Returns true if the file has been successfully deleted or otherwise does * not exist. This operation is not recursive. */
Deletes the file. Returns true if the file has been successfully deleted or otherwise does not exist. This operation is not recursive
delete
{ "repo_name": "j3l11234/swiftp", "path": "app/src/main/java/be/ppareit/swiftp/utils/MediaStoreHack.java", "license": "gpl-3.0", "size": 11081 }
[ "android.content.ContentResolver", "android.content.ContentValues", "android.net.Uri", "android.provider.MediaStore" ]
import android.content.ContentResolver; import android.content.ContentValues; import android.net.Uri; import android.provider.MediaStore;
import android.content.*; import android.net.*; import android.provider.*;
[ "android.content", "android.net", "android.provider" ]
android.content; android.net; android.provider;
283,991
public static synchronized Integer save(ILabelBean labelBean, Locale locale) { TSeverityBean severityBean = (TSeverityBean)labelBean; boolean isNew = severityBean.getObjectID()==null; if (severityBean.getSortorder()==null) { Integer sortOrder = severityDAO.getNextSortOrder(); severityBean.setSortorder(sortOrder); } Integer objectID = severityDAO.save(severityBean); if (isNew) { severityBean.setObjectID(objectID); addFilterAssignments(objectID); LocalizedListIndexer.getInstance().addLabelBean(severityBean, LuceneUtil.LOOKUPENTITYTYPES.SEVERITY, isNew); } else { LocalizedListIndexer.getInstance().updateLabelBean(severityBean, LuceneUtil.LOOKUPENTITYTYPES.SEVERITY); } LocalizeBL.saveSystemFieldLocalizedResource( LocalizeBL.RESOURCE_TYPES.SEVERITY, objectID, severityBean.getLabel(), locale); LookupContainer.resetLookupMap(SystemFields.INTEGER_SEVERITY); //cache and possible lucene update in other cluster nodes ClusterMarkChangesBL.markDirtySystemListEntryInCluster(SystemFields.INTEGER_SEVERITY, objectID, ClusterMarkChangesBL.getChangeTypeByAddOrUpdate(isNew)); return objectID; }
static synchronized Integer function(ILabelBean labelBean, Locale locale) { TSeverityBean severityBean = (TSeverityBean)labelBean; boolean isNew = severityBean.getObjectID()==null; if (severityBean.getSortorder()==null) { Integer sortOrder = severityDAO.getNextSortOrder(); severityBean.setSortorder(sortOrder); } Integer objectID = severityDAO.save(severityBean); if (isNew) { severityBean.setObjectID(objectID); addFilterAssignments(objectID); LocalizedListIndexer.getInstance().addLabelBean(severityBean, LuceneUtil.LOOKUPENTITYTYPES.SEVERITY, isNew); } else { LocalizedListIndexer.getInstance().updateLabelBean(severityBean, LuceneUtil.LOOKUPENTITYTYPES.SEVERITY); } LocalizeBL.saveSystemFieldLocalizedResource( LocalizeBL.RESOURCE_TYPES.SEVERITY, objectID, severityBean.getLabel(), locale); LookupContainer.resetLookupMap(SystemFields.INTEGER_SEVERITY); ClusterMarkChangesBL.markDirtySystemListEntryInCluster(SystemFields.INTEGER_SEVERITY, objectID, ClusterMarkChangesBL.getChangeTypeByAddOrUpdate(isNew)); return objectID; }
/** * Saves a new/modified list entry * If sortOrder is not set it will be set to be the next available sortOrder * @param labelBean * @param copy * @param personID * @param locale * @return */
Saves a new/modified list entry If sortOrder is not set it will be set to be the next available sortOrder
save
{ "repo_name": "trackplus/Genji", "path": "src/main/java/com/aurel/track/admin/customize/lists/systemOption/SeverityBL.java", "license": "gpl-3.0", "size": 14612 }
[ "com.aurel.track.admin.customize.localize.LocalizeBL", "com.aurel.track.beans.ILabelBean", "com.aurel.track.beans.TSeverityBean", "com.aurel.track.cluster.ClusterMarkChangesBL", "com.aurel.track.fieldType.constants.SystemFields", "com.aurel.track.fieldType.runtime.base.LookupContainer", "com.aurel.track.lucene.LuceneUtil", "com.aurel.track.lucene.index.listFields.LocalizedListIndexer", "java.util.Locale" ]
import com.aurel.track.admin.customize.localize.LocalizeBL; import com.aurel.track.beans.ILabelBean; import com.aurel.track.beans.TSeverityBean; import com.aurel.track.cluster.ClusterMarkChangesBL; import com.aurel.track.fieldType.constants.SystemFields; import com.aurel.track.fieldType.runtime.base.LookupContainer; import com.aurel.track.lucene.LuceneUtil; import com.aurel.track.lucene.index.listFields.LocalizedListIndexer; import java.util.Locale;
import com.aurel.track.*; import com.aurel.track.admin.customize.localize.*; import com.aurel.track.beans.*; import com.aurel.track.cluster.*; import com.aurel.track.lucene.*; import com.aurel.track.lucene.index.*; import java.util.*;
[ "com.aurel.track", "java.util" ]
com.aurel.track; java.util;
2,720,101
public void addInitializers(ApplicationContextInitializer<?>... initializers) { this.initializers.addAll(Arrays.asList(initializers)); }
void function(ApplicationContextInitializer<?>... initializers) { this.initializers.addAll(Arrays.asList(initializers)); }
/** * Add {@link ApplicationContextInitializer}s to be applied to the Spring * {@link ApplicationContext}. * @param initializers the initializers to add */
Add <code>ApplicationContextInitializer</code>s to be applied to the Spring <code>ApplicationContext</code>
addInitializers
{ "repo_name": "Buzzardo/spring-boot", "path": "spring-boot-project/spring-boot/src/main/java/org/springframework/boot/SpringApplication.java", "license": "apache-2.0", "size": 51014 }
[ "java.util.Arrays", "org.springframework.context.ApplicationContextInitializer" ]
import java.util.Arrays; import org.springframework.context.ApplicationContextInitializer;
import java.util.*; import org.springframework.context.*;
[ "java.util", "org.springframework.context" ]
java.util; org.springframework.context;
2,048,940
protected ConnectionFactory retrieveConnectionFactory(String name) throws JMSException { if (name == null && defaultConnectionFactory != null) { return defaultConnectionFactory; // injected } else { try { // autowiring did not occur return (ConnectionFactory)SpringAppContext.getInstance().getBean(name); } catch (Exception ex) { logger.error(ex.getMessage(), ex); throw new JMSException("JMS ConnectionFactory not found: " + name); } } }
ConnectionFactory function(String name) throws JMSException { if (name == null && defaultConnectionFactory != null) { return defaultConnectionFactory; } else { try { return (ConnectionFactory)SpringAppContext.getInstance().getBean(name); } catch (Exception ex) { logger.error(ex.getMessage(), ex); throw new JMSException(STR + name); } } }
/** * Pooling and remote queues are configured via Spring in broker config XML. */
Pooling and remote queues are configured via Spring in broker config XML
retrieveConnectionFactory
{ "repo_name": "CenturyLinkCloud/mdw", "path": "mdw-common/src/com/centurylink/mdw/container/plugin/activemq/ActiveMqJms.java", "license": "apache-2.0", "size": 3687 }
[ "com.centurylink.mdw.spring.SpringAppContext", "javax.jms.ConnectionFactory", "javax.jms.JMSException" ]
import com.centurylink.mdw.spring.SpringAppContext; import javax.jms.ConnectionFactory; import javax.jms.JMSException;
import com.centurylink.mdw.spring.*; import javax.jms.*;
[ "com.centurylink.mdw", "javax.jms" ]
com.centurylink.mdw; javax.jms;
2,657,865
@WorkerThread File startFile(String key, long position, long length) throws CacheException;
File startFile(String key, long position, long length) throws CacheException;
/** * Obtains a cache file into which data can be written. Must only be called when holding a * corresponding hole {@link CacheSpan} obtained from {@link #startReadWrite(String, long, long)}. * * <p>This method may be slow and shouldn't normally be called on the main thread. * * @param key The cache key of the resource being written. * @param position The starting position in the resource from which data will be written. * @param length The length of the data being written, or {@link C#LENGTH_UNSET} if unknown. Used * only to ensure that there is enough space in the cache. * @return The file into which data should be written. * @throws CacheException If an error is encountered. */
Obtains a cache file into which data can be written. Must only be called when holding a corresponding hole <code>CacheSpan</code> obtained from <code>#startReadWrite(String, long, long)</code>. This method may be slow and shouldn't normally be called on the main thread
startFile
{ "repo_name": "google/ExoPlayer", "path": "library/datasource/src/main/java/com/google/android/exoplayer2/upstream/cache/Cache.java", "license": "apache-2.0", "size": 13274 }
[ "java.io.File" ]
import java.io.File;
import java.io.*;
[ "java.io" ]
java.io;
1,934,184
private void preProcessXmlType(JavaClass javaClass, TypeInfo info, NamespaceInfo packageNamespace) { org.eclipse.persistence.jaxb.xmlmodel.XmlType xmlType = new org.eclipse.persistence.jaxb.xmlmodel.XmlType(); // 14 xmlType=XmlType - default settings: name=null, namespace=null, factoryClass=null, factoryMethod=null, propOrder=null. if (helper.isAnnotationPresent(javaClass, XmlType.class)) { XmlType typeAnnotation = (XmlType) helper.getAnnotation(javaClass, XmlType.class); // 15 typeAnnotation=com.sun.proxy.$Proxy6"@javax.xml.bind.annotation.XmlType(factoryMethod=, name=OneClass, propOrder=[car], factoryClass=class javax.xml.bind.annotation.XmlType$DEFAULT, namespace=##default)" // set name xmlType.setName(typeAnnotation.name()); // 16 XmlType - name="OneClass // set namespace xmlType.setNamespace(typeAnnotation.namespace()); // 17 xmlType - namespace="##default" // set propOrder String[] propOrder = typeAnnotation.propOrder(); // 18 propOrder = ["car"] // initializes xmlType.propOrder to an empty ArrayList if (propOrder != null) { xmlType.getPropOrder(); // 19 OK, so this only initializes xmlType.propOrder to an empty ArrayList } for (String prop : propOrder) { xmlType.getPropOrder().add(prop); // 20 - puts "car" into xmlType.propOrder } // set factoryClass Class factoryClass = typeAnnotation.factoryClass(); // 21 factoryClass=java.lang.Class"class javax.xml.bind.annotation.XmlType$DEFAULT" if (factoryClass == DEFAULT.class) { xmlType.setFactoryClass("javax.xml.bind.annotation.XmlType.DEFAULT"); // 22 } else { xmlType.setFactoryClass(factoryClass.getCanonicalName()); } // set factoryMethodName xmlType.setFactoryMethod(typeAnnotation.factoryMethod()); // 23 defaults to factoryMethod="" } else { // set defaults xmlType.setNamespace(packageNamespace.getNamespace()); } info.setXmlType(xmlType); // 24 }
void function(JavaClass javaClass, TypeInfo info, NamespaceInfo packageNamespace) { org.eclipse.persistence.jaxb.xmlmodel.XmlType xmlType = new org.eclipse.persistence.jaxb.xmlmodel.XmlType(); if (helper.isAnnotationPresent(javaClass, XmlType.class)) { XmlType typeAnnotation = (XmlType) helper.getAnnotation(javaClass, XmlType.class); xmlType.setName(typeAnnotation.name()); xmlType.setNamespace(typeAnnotation.namespace()); String[] propOrder = typeAnnotation.propOrder(); if (propOrder != null) { xmlType.getPropOrder(); } for (String prop : propOrder) { xmlType.getPropOrder().add(prop); } Class factoryClass = typeAnnotation.factoryClass(); if (factoryClass == DEFAULT.class) { xmlType.setFactoryClass(STR); } else { xmlType.setFactoryClass(factoryClass.getCanonicalName()); } xmlType.setFactoryMethod(typeAnnotation.factoryMethod()); } else { xmlType.setNamespace(packageNamespace.getNamespace()); } info.setXmlType(xmlType); }
/** * Process @XmlType annotation on a given JavaClass and update the TypeInfo * for pre-processing. Note that if no @XmlType annotation is present we * still create a new XmlType an set it on the TypeInfo. * * @param javaClass * @param info * @param packageNamespace */
Process @XmlType annotation on a given JavaClass and update the TypeInfo for pre-processing. Note that if no @XmlType annotation is present we still create a new XmlType an set it on the TypeInfo
preProcessXmlType
{ "repo_name": "RallySoftware/eclipselink.runtime", "path": "moxy/org.eclipse.persistence.moxy/src/org/eclipse/persistence/jaxb/compiler/AnnotationsProcessor.java", "license": "epl-1.0", "size": 252702 }
[ "javax.xml.bind.annotation.XmlType", "org.eclipse.persistence.jaxb.javamodel.JavaClass" ]
import javax.xml.bind.annotation.XmlType; import org.eclipse.persistence.jaxb.javamodel.JavaClass;
import javax.xml.bind.annotation.*; import org.eclipse.persistence.jaxb.javamodel.*;
[ "javax.xml", "org.eclipse.persistence" ]
javax.xml; org.eclipse.persistence;
1,265,783
public java.util.List<fr.lip6.move.pnml.hlpn.dots.hlapi.DotConstantHLAPI> getSubterm_dots_DotConstantHLAPI(){ java.util.List<fr.lip6.move.pnml.hlpn.dots.hlapi.DotConstantHLAPI> retour = new ArrayList<fr.lip6.move.pnml.hlpn.dots.hlapi.DotConstantHLAPI>(); for (Term elemnt : getSubterm()) { if(elemnt.getClass().equals(fr.lip6.move.pnml.hlpn.dots.impl.DotConstantImpl.class)){ retour.add(new fr.lip6.move.pnml.hlpn.dots.hlapi.DotConstantHLAPI( (fr.lip6.move.pnml.hlpn.dots.DotConstant)elemnt )); } } return retour; }
java.util.List<fr.lip6.move.pnml.hlpn.dots.hlapi.DotConstantHLAPI> function(){ java.util.List<fr.lip6.move.pnml.hlpn.dots.hlapi.DotConstantHLAPI> retour = new ArrayList<fr.lip6.move.pnml.hlpn.dots.hlapi.DotConstantHLAPI>(); for (Term elemnt : getSubterm()) { if(elemnt.getClass().equals(fr.lip6.move.pnml.hlpn.dots.impl.DotConstantImpl.class)){ retour.add(new fr.lip6.move.pnml.hlpn.dots.hlapi.DotConstantHLAPI( (fr.lip6.move.pnml.hlpn.dots.DotConstant)elemnt )); } } return retour; }
/** * This accessor return a list of encapsulated subelement, only of DotConstantHLAPI kind. * WARNING : this method can creates a lot of new object in memory. */
This accessor return a list of encapsulated subelement, only of DotConstantHLAPI kind. WARNING : this method can creates a lot of new object in memory
getSubterm_dots_DotConstantHLAPI
{ "repo_name": "lhillah/pnmlframework", "path": "pnmlFw-HLPN/src/fr/lip6/move/pnml/hlpn/multisets/hlapi/EmptyHLAPI.java", "license": "epl-1.0", "size": 113920 }
[ "fr.lip6.move.pnml.hlpn.terms.Term", "java.util.ArrayList", "java.util.List" ]
import fr.lip6.move.pnml.hlpn.terms.Term; import java.util.ArrayList; import java.util.List;
import fr.lip6.move.pnml.hlpn.terms.*; import java.util.*;
[ "fr.lip6.move", "java.util" ]
fr.lip6.move; java.util;
1,442,494
public ApiResponse<V1RuntimeClass> createRuntimeClassWithHttpInfo( V1RuntimeClass body, String pretty, String dryRun, String fieldManager, String fieldValidation) throws ApiException { okhttp3.Call localVarCall = createRuntimeClassValidateBeforeCall( body, pretty, dryRun, fieldManager, fieldValidation, null); Type localVarReturnType = new TypeToken<V1RuntimeClass>() {}.getType(); return localVarApiClient.execute(localVarCall, localVarReturnType); }
ApiResponse<V1RuntimeClass> function( V1RuntimeClass body, String pretty, String dryRun, String fieldManager, String fieldValidation) throws ApiException { okhttp3.Call localVarCall = createRuntimeClassValidateBeforeCall( body, pretty, dryRun, fieldManager, fieldValidation, null); Type localVarReturnType = new TypeToken<V1RuntimeClass>() {}.getType(); return localVarApiClient.execute(localVarCall, localVarReturnType); }
/** * create a RuntimeClass * * @param body (required) * @param pretty If &#39;true&#39;, then the output is pretty printed. (optional) * @param dryRun When present, indicates that modifications should not be persisted. An invalid or * unrecognized dryRun directive will result in an error response and no further processing of * the request. Valid values are: - All: all dry run stages will be processed (optional) * @param fieldManager fieldManager is a name associated with the actor or entity that is making * these changes. The value must be less than or 128 characters long, and only contain * printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. (optional) * @param fieldValidation fieldValidation determines how the server should respond to * unknown/duplicate fields in the object in the request. Introduced as alpha in 1.23, older * servers or servers with the &#x60;ServerSideFieldValidation&#x60; feature disabled will * discard valid values specified in this param and not perform any server side field * validation. Valid values are: - Ignore: ignores unknown/duplicate fields. - Warn: responds * with a warning for each unknown/duplicate field, but successfully serves the request. - * Strict: fails the request on unknown/duplicate fields. (optional) * @return ApiResponse&lt;V1RuntimeClass&gt; * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the * response body * @http.response.details * <table summary="Response Details" border="1"> * <tr><td> Status Code </td><td> Description </td><td> Response Headers </td></tr> * <tr><td> 200 </td><td> OK </td><td> - </td></tr> * <tr><td> 201 </td><td> Created </td><td> - </td></tr> * <tr><td> 202 </td><td> Accepted </td><td> - </td></tr> * <tr><td> 401 </td><td> Unauthorized </td><td> - </td></tr> * </table> */
create a RuntimeClass
createRuntimeClassWithHttpInfo
{ "repo_name": "kubernetes-client/java", "path": "kubernetes/src/main/java/io/kubernetes/client/openapi/apis/NodeV1Api.java", "license": "apache-2.0", "size": 128012 }
[ "com.google.gson.reflect.TypeToken", "io.kubernetes.client.openapi.ApiException", "io.kubernetes.client.openapi.ApiResponse", "io.kubernetes.client.openapi.models.V1RuntimeClass", "java.lang.reflect.Type" ]
import com.google.gson.reflect.TypeToken; import io.kubernetes.client.openapi.ApiException; import io.kubernetes.client.openapi.ApiResponse; import io.kubernetes.client.openapi.models.V1RuntimeClass; import java.lang.reflect.Type;
import com.google.gson.reflect.*; import io.kubernetes.client.openapi.*; import io.kubernetes.client.openapi.models.*; import java.lang.reflect.*;
[ "com.google.gson", "io.kubernetes.client", "java.lang" ]
com.google.gson; io.kubernetes.client; java.lang;
2,165,497
@Override public T visitPrimaryNoNewArray_lfno_primary_lf_arrayAccess_lfno_primary(@NotNull Java8Parser.PrimaryNoNewArray_lfno_primary_lf_arrayAccess_lfno_primaryContext ctx) { return visitChildren(ctx); }
@Override public T visitPrimaryNoNewArray_lfno_primary_lf_arrayAccess_lfno_primary(@NotNull Java8Parser.PrimaryNoNewArray_lfno_primary_lf_arrayAccess_lfno_primaryContext ctx) { return visitChildren(ctx); }
/** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */
The default implementation returns the result of calling <code>#visitChildren</code> on ctx
visitMethodInvocation_lf_primary
{ "repo_name": "IsThisThePayneResidence/intellidots", "path": "src/main/java/ua/edu/hneu/ast/parsers/Java8BaseVisitor.java", "license": "gpl-3.0", "size": 65479 }
[ "org.antlr.v4.runtime.misc.NotNull" ]
import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.misc.*;
[ "org.antlr.v4" ]
org.antlr.v4;
1,462,704
@Generated @Selector("standardDeviation") public native double standardDeviation();
@Selector(STR) native double function();
/** * [@property] standardDeviation * <p> * An double representation of the standard deviation of the distribution. * <p> * This value is negative an unknown number of samples was used to compute the standard deviation. */
[@property] standardDeviation An double representation of the standard deviation of the distribution. This value is negative an unknown number of samples was used to compute the standard deviation
standardDeviation
{ "repo_name": "multi-os-engine/moe-core", "path": "moe.apple/moe.platform.ios/src/main/java/apple/metrickit/MXAverage.java", "license": "apache-2.0", "size": 5976 }
[ "org.moe.natj.objc.ann.Selector" ]
import org.moe.natj.objc.ann.Selector;
import org.moe.natj.objc.ann.*;
[ "org.moe.natj" ]
org.moe.natj;
1,363,551
public static RowsBuilder of(final Schema schema) { RowsBuilder builder = new RowsBuilder(); builder.type = schema; return builder; }
static RowsBuilder function(final Schema schema) { RowsBuilder builder = new RowsBuilder(); builder.type = schema; return builder; }
/** * Create a RowsBuilder with the specified row type info. * * <p>For example: * * <pre>{@code * TestUtils.RowsBuilder.of( * schema * ) * }</pre> */
Create a RowsBuilder with the specified row type info. For example: <code>TestUtils.RowsBuilder.of( schema ) </code>
of
{ "repo_name": "lukecwik/incubator-beam", "path": "sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/TestUtils.java", "license": "apache-2.0", "size": 7184 }
[ "org.apache.beam.sdk.schemas.Schema" ]
import org.apache.beam.sdk.schemas.Schema;
import org.apache.beam.sdk.schemas.*;
[ "org.apache.beam" ]
org.apache.beam;
653,228
protected static Vector getClassnames(String superclass, Vector packages) { Vector result; Vector names; int i; int n; result = new Vector(); for (i = 0; i < packages.size(); i++) { names = ClassDiscovery.find(superclass, (String) packages.get(i)); for (n = 0; n < names.size(); n++) { // skip non-public classes if (isValidClassname((String) names.get(n))) result.add(names.get(n)); } } return result; }
static Vector function(String superclass, Vector packages) { Vector result; Vector names; int i; int n; result = new Vector(); for (i = 0; i < packages.size(); i++) { names = ClassDiscovery.find(superclass, (String) packages.get(i)); for (n = 0; n < names.size(); n++) { if (isValidClassname((String) names.get(n))) result.add(names.get(n)); } } return result; }
/** * determines all the classes derived from the given superclass in the * specified packages * * @param superclass the class to find subclasses for * @param pacakges the packages to search in for subclasses * @return the classes that were found */
determines all the classes derived from the given superclass in the specified packages
getClassnames
{ "repo_name": "dsibournemouth/autoweka", "path": "weka-3.7.7/src/test/java/weka/test/WekaTestSuite.java", "license": "gpl-3.0", "size": 8710 }
[ "java.util.Vector" ]
import java.util.Vector;
import java.util.*;
[ "java.util" ]
java.util;
2,350,410
public void testSortRows2() throws Exception { KettleEnvironment.init(); // // Create a new transformation... // TransMeta transMeta = new TransMeta(); transMeta.setName("sortrowstest"); PluginRegistry registry = PluginRegistry.getInstance(); // // create an injector step... // String injectorStepname = "injector step"; InjectorMeta im = new InjectorMeta(); // Set the information of the injector. String injectorPid = registry.getPluginId(StepPluginType.class, im); StepMeta injectorStep = new StepMeta(injectorPid, injectorStepname, (StepMetaInterface)im); transMeta.addStep(injectorStep); // // Create a sort rows step // String sortRowsStepname = "sort rows step"; SortRowsMeta srm = new SortRowsMeta(); srm.setSortSize(Integer.toString(MAX_COUNT/10)); String [] sortFields = { "KEY1", "KEY2" }; boolean [] ascendingFields = { false, false }; boolean [] caseSensitive = { true, true }; srm.setFieldName(sortFields); srm.setAscending(ascendingFields); srm.setCaseSensitive(caseSensitive); srm.setPrefix("SortRowsTest"); srm.setDirectory("."); String sortRowsStepPid = registry.getPluginId(StepPluginType.class, srm); StepMeta sortRowsStep = new StepMeta(sortRowsStepPid, sortRowsStepname, (StepMetaInterface)srm); transMeta.addStep(sortRowsStep); TransHopMeta hi = new TransHopMeta(injectorStep, sortRowsStep); transMeta.addTransHop(hi); // // Create a dummy step // String dummyStepname = "dummy step"; DummyTransMeta dm = new DummyTransMeta(); String dummyPid = registry.getPluginId(StepPluginType.class, dm); StepMeta dummyStep = new StepMeta(dummyPid, dummyStepname, (StepMetaInterface)dm); transMeta.addStep(dummyStep); TransHopMeta hi3 = new TransHopMeta(sortRowsStep, dummyStep); transMeta.addTransHop(hi3); // Now execute the transformation... Trans trans = new Trans(transMeta); trans.prepareExecution(null); StepInterface si = trans.getStepInterface(dummyStepname, 0); RowStepCollector dummyRc = new RowStepCollector(); si.addRowListener(dummyRc); RowProducer rp = trans.addRowProducer(injectorStepname, 0); trans.startThreads(); // add rows List<RowMetaAndData> inputList = createIntegerData(); for ( RowMetaAndData rm : inputList ) { rp.putRow(rm.getRowMeta(), rm.getData()); } rp.finished(); trans.waitUntilFinished(); List<RowMetaAndData> resultRows = dummyRc.getRowsWritten(); checkRows(resultRows, false); }
void function() throws Exception { KettleEnvironment.init(); TransMeta transMeta = new TransMeta(); transMeta.setName(STR); PluginRegistry registry = PluginRegistry.getInstance(); String injectorStepname = STR; InjectorMeta im = new InjectorMeta(); String injectorPid = registry.getPluginId(StepPluginType.class, im); StepMeta injectorStep = new StepMeta(injectorPid, injectorStepname, (StepMetaInterface)im); transMeta.addStep(injectorStep); String sortRowsStepname = STR; SortRowsMeta srm = new SortRowsMeta(); srm.setSortSize(Integer.toString(MAX_COUNT/10)); String [] sortFields = { "KEY1", "KEY2" }; boolean [] ascendingFields = { false, false }; boolean [] caseSensitive = { true, true }; srm.setFieldName(sortFields); srm.setAscending(ascendingFields); srm.setCaseSensitive(caseSensitive); srm.setPrefix(STR); srm.setDirectory("."); String sortRowsStepPid = registry.getPluginId(StepPluginType.class, srm); StepMeta sortRowsStep = new StepMeta(sortRowsStepPid, sortRowsStepname, (StepMetaInterface)srm); transMeta.addStep(sortRowsStep); TransHopMeta hi = new TransHopMeta(injectorStep, sortRowsStep); transMeta.addTransHop(hi); String dummyStepname = STR; DummyTransMeta dm = new DummyTransMeta(); String dummyPid = registry.getPluginId(StepPluginType.class, dm); StepMeta dummyStep = new StepMeta(dummyPid, dummyStepname, (StepMetaInterface)dm); transMeta.addStep(dummyStep); TransHopMeta hi3 = new TransHopMeta(sortRowsStep, dummyStep); transMeta.addTransHop(hi3); Trans trans = new Trans(transMeta); trans.prepareExecution(null); StepInterface si = trans.getStepInterface(dummyStepname, 0); RowStepCollector dummyRc = new RowStepCollector(); si.addRowListener(dummyRc); RowProducer rp = trans.addRowProducer(injectorStepname, 0); trans.startThreads(); List<RowMetaAndData> inputList = createIntegerData(); for ( RowMetaAndData rm : inputList ) { rp.putRow(rm.getRowMeta(), rm.getData()); } rp.finished(); trans.waitUntilFinished(); List<RowMetaAndData> resultRows = dummyRc.getRowsWritten(); checkRows(resultRows, false); }
/** * Test case for sorting step .. descending order on "numeric" data. */
Test case for sorting step .. descending order on "numeric" data
testSortRows2
{ "repo_name": "jjeb/kettle-trunk", "path": "test/org/pentaho/di/trans/steps/sort/SortRowsTest.java", "license": "apache-2.0", "size": 11259 }
[ "java.util.List", "org.pentaho.di.core.KettleEnvironment", "org.pentaho.di.core.RowMetaAndData", "org.pentaho.di.core.plugins.PluginRegistry", "org.pentaho.di.core.plugins.StepPluginType", "org.pentaho.di.trans.RowProducer", "org.pentaho.di.trans.RowStepCollector", "org.pentaho.di.trans.Trans", "org.pentaho.di.trans.TransHopMeta", "org.pentaho.di.trans.TransMeta", "org.pentaho.di.trans.step.StepInterface", "org.pentaho.di.trans.step.StepMeta", "org.pentaho.di.trans.step.StepMetaInterface", "org.pentaho.di.trans.steps.dummytrans.DummyTransMeta", "org.pentaho.di.trans.steps.injector.InjectorMeta" ]
import java.util.List; import org.pentaho.di.core.KettleEnvironment; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.plugins.PluginRegistry; import org.pentaho.di.core.plugins.StepPluginType; import org.pentaho.di.trans.RowProducer; import org.pentaho.di.trans.RowStepCollector; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransHopMeta; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.di.trans.steps.dummytrans.DummyTransMeta; import org.pentaho.di.trans.steps.injector.InjectorMeta;
import java.util.*; import org.pentaho.di.core.*; import org.pentaho.di.core.plugins.*; import org.pentaho.di.trans.*; import org.pentaho.di.trans.step.*; import org.pentaho.di.trans.steps.dummytrans.*; import org.pentaho.di.trans.steps.injector.*;
[ "java.util", "org.pentaho.di" ]
java.util; org.pentaho.di;
82,327
void closeSocket(QSocket socket) { if (_throttle != null) _throttle.close(socket); }
void closeSocket(QSocket socket) { if (_throttle != null) _throttle.close(socket); }
/** * Notification when a socket closes. */
Notification when a socket closes
closeSocket
{ "repo_name": "christianchristensen/resin", "path": "modules/resin/src/com/caucho/network/listen/SocketLinkListener.java", "license": "gpl-2.0", "size": 40512 }
[ "com.caucho.vfs.QSocket" ]
import com.caucho.vfs.QSocket;
import com.caucho.vfs.*;
[ "com.caucho.vfs" ]
com.caucho.vfs;
2,550,185
public void setAsymmetric(boolean value) throws OntologyChangeException { try { OWLAxiom axiom = ontology.dataFactory.getOWLAsymmetricObjectPropertyAxiom(instance); if (value) ontology.ontologyManager.addAxiom(ontology.ontology, axiom); else ontology.ontologyManager.removeAxiom(ontology.ontology, axiom); } catch (OWLOntologyChangeException e) { throw new OntologyChangeException("Unable to add the characteristic axiom for \"" + getName() + "\""); } }
void function(boolean value) throws OntologyChangeException { try { OWLAxiom axiom = ontology.dataFactory.getOWLAsymmetricObjectPropertyAxiom(instance); if (value) ontology.ontologyManager.addAxiom(ontology.ontology, axiom); else ontology.ontologyManager.removeAxiom(ontology.ontology, axiom); } catch (OWLOntologyChangeException e) { throw new OntologyChangeException(STRSTR\""); } }
/** * Sets the asymmetric characteristic of the property. * @param value * The boolean value of the characteristic. * @throws OntologyChangeException * If the axiom cannot be added. **/
Sets the asymmetric characteristic of the property
setAsymmetric
{ "repo_name": "SPDSS/adss", "path": "it.polito.security.ontologies/src/it/polito/security/ontologies/OntologyObjectProperty.java", "license": "epl-1.0", "size": 31178 }
[ "it.polito.security.ontologies.exceptions.OntologyChangeException", "org.semanticweb.owlapi.model.OWLAxiom", "org.semanticweb.owlapi.model.OWLOntologyChangeException" ]
import it.polito.security.ontologies.exceptions.OntologyChangeException; import org.semanticweb.owlapi.model.OWLAxiom; import org.semanticweb.owlapi.model.OWLOntologyChangeException;
import it.polito.security.ontologies.exceptions.*; import org.semanticweb.owlapi.model.*;
[ "it.polito.security", "org.semanticweb.owlapi" ]
it.polito.security; org.semanticweb.owlapi;
731,012
return Utils.pythagF(x - other.x, y - other.y, z - other.z); }
return Utils.pythagF(x - other.x, y - other.y, z - other.z); }
/** * Computes the distance from this Point to another Point. * * @param other * The other Point to measure to. * @return The distance between the two points. */
Computes the distance from this Point to another Point
distanceTo
{ "repo_name": "drmercer/Schooner-3D", "path": "Schooner 3D/src/com/supermercerbros/gameengine/collision/Point.java", "license": "apache-2.0", "size": 2183 }
[ "com.supermercerbros.gameengine.util.Utils" ]
import com.supermercerbros.gameengine.util.Utils;
import com.supermercerbros.gameengine.util.*;
[ "com.supermercerbros.gameengine" ]
com.supermercerbros.gameengine;
1,262,679
public void createPartControl(Composite parent) { viewer = new CTabFolder(parent, SWT.MULTI | SWT.H_SCROLL | SWT.V_SCROLL ); tabs = new LinkedList<CTabItem>(); CTabItem itemStateFormulas = new CTabItem(viewer, SWT.CLOSE); itemStateFormulas.setText("State Formulas"); Text text = new Text(viewer, SWT.MULTI); text.setText("Content for Item State Formulas"); itemStateFormulas.setControl(text); tabs.add(itemStateFormulas); // new TableViewer(parent, SWT.MULTI | SWT.H_SCROLL | SWT.V_SCROLL); // viewer.setContentProvider(new ViewContentProvider()); // viewer.setLabelProvider(new ViewLabelProvider()); // viewer.setSorter(new NameSorter()); // viewer.setInput(getViewSite()); // Create the help context id for the viewer's control // PlatformUI.getWorkbench().getHelpSystem().setHelp(viewer.getControl(), "org.cmg.ml.sam.viewer"); // makeActions(); // hookContextMenu(); // hookDoubleClickAction(); // contributeToActionBars(); } // private void hookContextMenu() { // MenuManager menuMgr = new MenuManager("#PopupMenu"); // menuMgr.setRemoveAllWhenShown(true); // menuMgr.addMenuListener(new IMenuListener() { // public void menuAboutToShow(IMenuManager manager) { // SAMModelCheckingView.this.fillContextMenu(manager); // } // }); //// Menu menu = menuMgr.createContextMenu(viewer.getControl()); //// viewer.getControl().setMenu(menu); //// getSite().registerContextMenu(menuMgr, viewer); // } // private void contributeToActionBars() { // IActionBars bars = getViewSite().getActionBars(); // fillLocalPullDown(bars.getMenuManager()); // fillLocalToolBar(bars.getToolBarManager()); // } // private void fillLocalPullDown(IMenuManager manager) { // manager.add(action1); // manager.add(new Separator()); // manager.add(action2); // } // // private void fillContextMenu(IMenuManager manager) { // manager.add(action1); // manager.add(action2); // // Other plug-ins can contribute there actions here // manager.add(new Separator(IWorkbenchActionConstants.MB_ADDITIONS)); // } // // private void fillLocalToolBar(IToolBarManager manager) { // manager.add(action1); // manager.add(action2); // } // // private void makeActions() { // action1 = new Action() { // public void run() { // // for (CTabItem cTabItem : tabs) { // cTabItem.dispose(); // } // tabs = new LinkedList<CTabItem>(); // // } // }; // action1.setText("Action 1"); // action1.setToolTipText("Action 1 tooltip"); // action1.setImageDescriptor(PlatformUI.getWorkbench().getSharedImages(). // getImageDescriptor(ISharedImages.IMG_OBJS_INFO_TSK)); // // action2 = new Action() { // public void run() { // CTabItem itemStateFormulas = new CTabItem(viewer, SWT.CLOSE); // itemStateFormulas.setText("State Formulas "+tabs.size()); // Text text = new Text(viewer, SWT.MULTI); // text.setText("Content for Item State Formulas"); // itemStateFormulas.setControl(text); // tabs.add(itemStateFormulas); // } // }; // action2.setText("Action 2"); // action2.setToolTipText("Action 2 tooltip"); // action2.setImageDescriptor(PlatformUI.getWorkbench().getSharedImages(). // getImageDescriptor(ISharedImages.IMG_OBJS_INFO_TSK)); // doubleClickAction = new Action() { // public void run() { //// ISelection selection = viewer.getSelection(); //// Object obj = ((IStructuredSelection)selection).getFirstElement(); //// showMessage("Double-click detected on "+obj.toString()); // } // }; // } // private void hookDoubleClickAction() { //// viewer.addDoubleClickListener(new IDoubleClickListener() { //// public void doubleClick(DoubleClickEvent event) { //// doubleClickAction.run(); //// } //// }); // } // private void showMessage(String message) { // MessageDialog.openInformation( // viewer.getShell(), //// viewer.getControl().getShell(), // "Model checking View", // message); // }
void function(Composite parent) { viewer = new CTabFolder(parent, SWT.MULTI SWT.H_SCROLL SWT.V_SCROLL ); tabs = new LinkedList<CTabItem>(); CTabItem itemStateFormulas = new CTabItem(viewer, SWT.CLOSE); itemStateFormulas.setText(STR); Text text = new Text(viewer, SWT.MULTI); text.setText(STR); itemStateFormulas.setControl(text); tabs.add(itemStateFormulas); }
/** * This is a callback that will allow us * to create the viewer and initialize it. */
This is a callback that will allow us to create the viewer and initialize it
createPartControl
{ "repo_name": "Quanticol/jSAM", "path": "PLUGIN/org.cmg.ml.sam/src/org/cmg/ml/sam/views/SAMModelCheckingView.java", "license": "epl-1.0", "size": 11327 }
[ "java.util.LinkedList", "org.eclipse.swt.custom.CTabFolder", "org.eclipse.swt.custom.CTabItem", "org.eclipse.swt.widgets.Composite", "org.eclipse.swt.widgets.Text" ]
import java.util.LinkedList; import org.eclipse.swt.custom.CTabFolder; import org.eclipse.swt.custom.CTabItem; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Text;
import java.util.*; import org.eclipse.swt.custom.*; import org.eclipse.swt.widgets.*;
[ "java.util", "org.eclipse.swt" ]
java.util; org.eclipse.swt;
2,041,074
public SrfSloIndicatorTarget getEvidencesBySLO(int sloID) { List<ReportSynthesisSrfProgressTargetCases> targetCasesTemp = new ArrayList<>(); SrfSloIndicatorTarget sloTarget = new SrfSloIndicatorTarget(); GlobalUnit globalUnit = loggedCrp; if (sloID != 0) { // Get the list of liaison institutions Flagships and PMU. List<LiaisonInstitution> liaisonInstitutionsFg = globalUnit.getLiaisonInstitutions().stream() .filter(c -> c.getCrpProgram() != null && c.isActive() && c.getCrpProgram().getProgramType() == ProgramType.FLAGSHIP_PROGRAM_TYPE.getValue()) .collect(Collectors.toList()); if (liaisonInstitutionsFg != null && !liaisonInstitutionsFg.isEmpty()) { liaisonInstitutionsFg.sort(Comparator.comparing(LiaisonInstitution::getAcronym)); for (LiaisonInstitution li : liaisonInstitutionsFg) { ReportSynthesis reportSynthesisFP = reportSynthesisManager.findSynthesis(this.getActualPhase().getId(), li.getId()); // Fill sloTargets List sloTarget = srfSloIndicatorTargetManager.getSrfSloIndicatorTargetById(Long.parseLong(sloID + "")); if (sloTarget != null) { // Get value for 'no new evidence' check button ReportSynthesisSrfProgressTargetContribution sloContributionTemp = new ReportSynthesisSrfProgressTargetContribution(); if (reportSynthesisSrfProgressTargetContributionManager.findBySloTargetID(sloTarget.getId()) != null) { sloContributionTemp = reportSynthesisSrfProgressTargetContributionManager.findBySloTargetID(sloTarget.getId()).get(0); } if (sloContributionTemp != null) { sloTarget.setHasEvidence(sloContributionTemp.isHasEvidence()); } List<ReportSynthesisSrfProgressTargetCases> targetCases = new ArrayList<>(); if (reportSynthesisFP != null && reportSynthesisFP.getId() != null && sloTarget != null && sloTarget.getId() != null && reportSynthesisSrfProgressTargetCasesManager .getReportSynthesisSrfProgressId(reportSynthesisFP.getId(), sloTarget.getId()) != null) { targetCases = reportSynthesisSrfProgressTargetCasesManager .getReportSynthesisSrfProgressId(reportSynthesisFP.getId(), sloTarget.getId()); } if (targetCases != null && !targetCases.isEmpty()) { // Fill target cases for (ReportSynthesisSrfProgressTargetCases targetCase : targetCases) { List<ProgressTargetCaseGeographicScope> targetCaseGeographicScopes; // Geographic scope targetCaseGeographicScopes = progressTargetCaseGeographicScopeManager.findGeographicScopeByTargetCase(targetCase.getId()); if (targetCaseGeographicScopes != null) { targetCase.setGeographicScopes(targetCaseGeographicScopes); } // Geographic regions List<ProgressTargetCaseGeographicRegion> targetCaseGeographicRegions; targetCaseGeographicRegions = progressTargetCaseGeographicRegionManager.findGeographicRegionByTargetCase(targetCase.getId()); if (targetCaseGeographicRegions != null) { targetCase.setGeographicRegions(targetCaseGeographicRegions); } targetCase.setLiaisonInstitution(li); if (li.getCrpProgram() != null && li.getCrpProgram().getId() != null && targetCase.getLiaisonInstitution() != null) { CrpProgram crpProgram = crpProgramManager.getCrpProgramById(li.getCrpProgram().getId()); if (crpProgram != null) { targetCase.getLiaisonInstitution().setCrpProgram(crpProgram); } } // Geographic countries List<ProgressTargetCaseGeographicCountry> targetCaseGeographicCountries; targetCaseGeographicCountries = progressTargetCaseGeographicCountryManager.findGeographicCountryByTargetCase(targetCase.getId()); if (targetCaseGeographicCountries != null) { targetCase.setGeographicCountries(targetCaseGeographicCountries); } } targetCasesTemp.addAll(targetCases); } } } if (sloTarget != null) { sloTarget.setTargetCases(targetCasesTemp); } } } return sloTarget; }
SrfSloIndicatorTarget function(int sloID) { List<ReportSynthesisSrfProgressTargetCases> targetCasesTemp = new ArrayList<>(); SrfSloIndicatorTarget sloTarget = new SrfSloIndicatorTarget(); GlobalUnit globalUnit = loggedCrp; if (sloID != 0) { List<LiaisonInstitution> liaisonInstitutionsFg = globalUnit.getLiaisonInstitutions().stream() .filter(c -> c.getCrpProgram() != null && c.isActive() && c.getCrpProgram().getProgramType() == ProgramType.FLAGSHIP_PROGRAM_TYPE.getValue()) .collect(Collectors.toList()); if (liaisonInstitutionsFg != null && !liaisonInstitutionsFg.isEmpty()) { liaisonInstitutionsFg.sort(Comparator.comparing(LiaisonInstitution::getAcronym)); for (LiaisonInstitution li : liaisonInstitutionsFg) { ReportSynthesis reportSynthesisFP = reportSynthesisManager.findSynthesis(this.getActualPhase().getId(), li.getId()); sloTarget = srfSloIndicatorTargetManager.getSrfSloIndicatorTargetById(Long.parseLong(sloID + "")); if (sloTarget != null) { ReportSynthesisSrfProgressTargetContribution sloContributionTemp = new ReportSynthesisSrfProgressTargetContribution(); if (reportSynthesisSrfProgressTargetContributionManager.findBySloTargetID(sloTarget.getId()) != null) { sloContributionTemp = reportSynthesisSrfProgressTargetContributionManager.findBySloTargetID(sloTarget.getId()).get(0); } if (sloContributionTemp != null) { sloTarget.setHasEvidence(sloContributionTemp.isHasEvidence()); } List<ReportSynthesisSrfProgressTargetCases> targetCases = new ArrayList<>(); if (reportSynthesisFP != null && reportSynthesisFP.getId() != null && sloTarget != null && sloTarget.getId() != null && reportSynthesisSrfProgressTargetCasesManager .getReportSynthesisSrfProgressId(reportSynthesisFP.getId(), sloTarget.getId()) != null) { targetCases = reportSynthesisSrfProgressTargetCasesManager .getReportSynthesisSrfProgressId(reportSynthesisFP.getId(), sloTarget.getId()); } if (targetCases != null && !targetCases.isEmpty()) { for (ReportSynthesisSrfProgressTargetCases targetCase : targetCases) { List<ProgressTargetCaseGeographicScope> targetCaseGeographicScopes; targetCaseGeographicScopes = progressTargetCaseGeographicScopeManager.findGeographicScopeByTargetCase(targetCase.getId()); if (targetCaseGeographicScopes != null) { targetCase.setGeographicScopes(targetCaseGeographicScopes); } List<ProgressTargetCaseGeographicRegion> targetCaseGeographicRegions; targetCaseGeographicRegions = progressTargetCaseGeographicRegionManager.findGeographicRegionByTargetCase(targetCase.getId()); if (targetCaseGeographicRegions != null) { targetCase.setGeographicRegions(targetCaseGeographicRegions); } targetCase.setLiaisonInstitution(li); if (li.getCrpProgram() != null && li.getCrpProgram().getId() != null && targetCase.getLiaisonInstitution() != null) { CrpProgram crpProgram = crpProgramManager.getCrpProgramById(li.getCrpProgram().getId()); if (crpProgram != null) { targetCase.getLiaisonInstitution().setCrpProgram(crpProgram); } } List<ProgressTargetCaseGeographicCountry> targetCaseGeographicCountries; targetCaseGeographicCountries = progressTargetCaseGeographicCountryManager.findGeographicCountryByTargetCase(targetCase.getId()); if (targetCaseGeographicCountries != null) { targetCase.setGeographicCountries(targetCaseGeographicCountries); } } targetCasesTemp.addAll(targetCases); } } } if (sloTarget != null) { sloTarget.setTargetCases(targetCasesTemp); } } } return sloTarget; }
/** * Get the List of target cases for each SLO and flagship * * @param sloID * @return SrfSloIndicatorTarget list */
Get the List of target cases for each SLO and flagship
getEvidencesBySLO
{ "repo_name": "CCAFS/MARLO", "path": "marlo-web/src/main/java/org/cgiar/ccafs/marlo/action/annualReport/y2018/SrfProgressAction.java", "license": "gpl-3.0", "size": 69506 }
[ "java.util.ArrayList", "java.util.Comparator", "java.util.List", "java.util.stream.Collectors", "org.cgiar.ccafs.marlo.data.model.CrpProgram", "org.cgiar.ccafs.marlo.data.model.GlobalUnit", "org.cgiar.ccafs.marlo.data.model.LiaisonInstitution", "org.cgiar.ccafs.marlo.data.model.ProgramType", "org.cgiar.ccafs.marlo.data.model.ProgressTargetCaseGeographicCountry", "org.cgiar.ccafs.marlo.data.model.ProgressTargetCaseGeographicRegion", "org.cgiar.ccafs.marlo.data.model.ProgressTargetCaseGeographicScope", "org.cgiar.ccafs.marlo.data.model.ReportSynthesis", "org.cgiar.ccafs.marlo.data.model.ReportSynthesisSrfProgressTargetCases", "org.cgiar.ccafs.marlo.data.model.ReportSynthesisSrfProgressTargetContribution", "org.cgiar.ccafs.marlo.data.model.SrfSloIndicatorTarget" ]
import java.util.ArrayList; import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; import org.cgiar.ccafs.marlo.data.model.CrpProgram; import org.cgiar.ccafs.marlo.data.model.GlobalUnit; import org.cgiar.ccafs.marlo.data.model.LiaisonInstitution; import org.cgiar.ccafs.marlo.data.model.ProgramType; import org.cgiar.ccafs.marlo.data.model.ProgressTargetCaseGeographicCountry; import org.cgiar.ccafs.marlo.data.model.ProgressTargetCaseGeographicRegion; import org.cgiar.ccafs.marlo.data.model.ProgressTargetCaseGeographicScope; import org.cgiar.ccafs.marlo.data.model.ReportSynthesis; import org.cgiar.ccafs.marlo.data.model.ReportSynthesisSrfProgressTargetCases; import org.cgiar.ccafs.marlo.data.model.ReportSynthesisSrfProgressTargetContribution; import org.cgiar.ccafs.marlo.data.model.SrfSloIndicatorTarget;
import java.util.*; import java.util.stream.*; import org.cgiar.ccafs.marlo.data.model.*;
[ "java.util", "org.cgiar.ccafs" ]
java.util; org.cgiar.ccafs;
829,092
public static String getXmlHeader1_0(String encoding) { return String.format("<?xml version=\"1.0\" encoding=\"%s\"?>", encoding.replaceAll(StringConstants.UNDERSCORE, StringConstants.HYPHEN_MINUS)); }
static String function(String encoding) { return String.format(STR1.0\STR%s\"?>", encoding.replaceAll(StringConstants.UNDERSCORE, StringConstants.HYPHEN_MINUS)); }
/** * Returns an XML header, e.g. * * <pre> * <?xml version="1.0" encoding="UTF-8"?> * </pre> * * @param encoding * the encoding to specify in the XML header * @return an XML heading line */
Returns an XML header, e.g. <code> </code>
getXmlHeader1_0
{ "repo_name": "UCDenver-ccp/common", "path": "src/main/java/edu/ucdenver/ccp/common/xml/XmlUtil.java", "license": "bsd-3-clause", "size": 4629 }
[ "edu.ucdenver.ccp.common.string.StringConstants" ]
import edu.ucdenver.ccp.common.string.StringConstants;
import edu.ucdenver.ccp.common.string.*;
[ "edu.ucdenver.ccp" ]
edu.ucdenver.ccp;
2,340,978
public void setForeground(Color color) { if (areEqual(color, getForeground())) return; foreground = color; fireStateChanged(); }
void function(Color color) { if (areEqual(color, getForeground())) return; foreground = color; fireStateChanged(); }
/** * Sets the foreground color of this <code>ColorHighlighter</code> and notifies * registered ChangeListeners. * * @param color the foreground color of this <code>ColorHighlighter</code>, * or null, to clear any existing foreground color */
Sets the foreground color of this <code>ColorHighlighter</code> and notifies registered ChangeListeners
setForeground
{ "repo_name": "syncer/swingx", "path": "swingx-core/src/main/java/org/jdesktop/swingx/decorator/ColorHighlighter.java", "license": "lgpl-2.1", "size": 9792 }
[ "java.awt.Color" ]
import java.awt.Color;
import java.awt.*;
[ "java.awt" ]
java.awt;
2,335,707
public void testFloor() { ConcurrentSkipListSet q = set5(); Object e1 = q.floor(three); assertEquals(three, e1); Object e2 = q.floor(six); assertEquals(five, e2); Object e3 = q.floor(one); assertEquals(one, e3); Object e4 = q.floor(zero); assertNull(e4); }
void function() { ConcurrentSkipListSet q = set5(); Object e1 = q.floor(three); assertEquals(three, e1); Object e2 = q.floor(six); assertEquals(five, e2); Object e3 = q.floor(one); assertEquals(one, e3); Object e4 = q.floor(zero); assertNull(e4); }
/** * floor returns preceding element */
floor returns preceding element
testFloor
{ "repo_name": "YouDiSN/OpenJDK-Research", "path": "jdk9/jdk/test/java/util/concurrent/tck/ConcurrentSkipListSetTest.java", "license": "gpl-2.0", "size": 31925 }
[ "java.util.concurrent.ConcurrentSkipListSet" ]
import java.util.concurrent.ConcurrentSkipListSet;
import java.util.concurrent.*;
[ "java.util" ]
java.util;
2,348,901
public static <T extends TNumber> Operand<T> intersection( Scope scope, Operand<T> a, Operand<T> b) { return setOperation(scope, a, b, Operation.INTERSECTION); }
static <T extends TNumber> Operand<T> function( Scope scope, Operand<T> a, Operand<T> b) { return setOperation(scope, a, b, Operation.INTERSECTION); }
/** * Computes set intersection of elements in last dimension of <code>a</code> and <code>b</code>. * * @param scope current scope * @param a The first operand representing set <code>a</code> * @param b The other operand representing set <code>b</code> * @param <T>the data type for the sets * @return An Operand with the same rank as <code>a</code> and <code>b</code>, and all but the * last dimension the * same. Elements along the last dimension contain the results of the set * operation. */
Computes set intersection of elements in last dimension of <code>a</code> and <code>b</code>
intersection
{ "repo_name": "tensorflow/java", "path": "tensorflow-framework/src/main/java/org/tensorflow/framework/op/sets/Sets.java", "license": "apache-2.0", "size": 6074 }
[ "org.tensorflow.Operand", "org.tensorflow.op.Scope", "org.tensorflow.types.family.TNumber" ]
import org.tensorflow.Operand; import org.tensorflow.op.Scope; import org.tensorflow.types.family.TNumber;
import org.tensorflow.*; import org.tensorflow.op.*; import org.tensorflow.types.family.*;
[ "org.tensorflow", "org.tensorflow.op", "org.tensorflow.types" ]
org.tensorflow; org.tensorflow.op; org.tensorflow.types;
253,203
BOp getQuery();
BOp getQuery();
/** * The query. */
The query
getQuery
{ "repo_name": "smalyshev/blazegraph", "path": "bigdata/src/java/com/bigdata/bop/engine/IRunningQuery.java", "license": "gpl-2.0", "size": 6805 }
[ "com.bigdata.bop.BOp" ]
import com.bigdata.bop.BOp;
import com.bigdata.bop.*;
[ "com.bigdata.bop" ]
com.bigdata.bop;
1,594,914
public List<ClusterNode> get(int part);
List<ClusterNode> function(int part);
/** * Get affinity nodes for partition. * * @param part Partition. * @return Affinity nodes. */
Get affinity nodes for partition
get
{ "repo_name": "ilantukh/ignite", "path": "modules/core/src/main/java/org/apache/ignite/internal/processors/affinity/AffinityAssignment.java", "license": "apache-2.0", "size": 3844 }
[ "java.util.List", "org.apache.ignite.cluster.ClusterNode" ]
import java.util.List; import org.apache.ignite.cluster.ClusterNode;
import java.util.*; import org.apache.ignite.cluster.*;
[ "java.util", "org.apache.ignite" ]
java.util; org.apache.ignite;
2,632,743
InferenceStepResult applyAutomatedInferenceRule(@NonNull InferenceTargets targets);
InferenceStepResult applyAutomatedInferenceRule(@NonNull InferenceTargets targets);
/** * Applies this inference rule on the given targets. * * @param targets the statements on which to apply the inference rule. * * @return the result of the inference rule application. */
Applies this inference rule on the given targets
applyAutomatedInferenceRule
{ "repo_name": "urbas/mixr", "path": "devel/MixR/src/mixr/logic/AutomatedInferenceRule.java", "license": "mit", "size": 2082 }
[ "org.netbeans.api.annotations.common.NonNull" ]
import org.netbeans.api.annotations.common.NonNull;
import org.netbeans.api.annotations.common.*;
[ "org.netbeans.api" ]
org.netbeans.api;
2,518,377
public void disableAutoClose() throws SQLException { }
void function() throws SQLException { }
/** * * Disables autoclosing of connections and result sets. This is * not supported by tinySQL. * @see java.sql.Connection#disableAutoClose * */
Disables autoclosing of connections and result sets. This is not supported by tinySQL
disableAutoClose
{ "repo_name": "ryangoodrich/GT_CS4420_Spring_2013_TinySQL", "path": "com/sqlmagic/tinysql/tinySQLConnection.java", "license": "lgpl-2.1", "size": 15638 }
[ "java.sql.SQLException" ]
import java.sql.SQLException;
import java.sql.*;
[ "java.sql" ]
java.sql;
1,659,319
public void setTextSize(float textSize, int unit) { float rawTextSize = TypedValue.applyDimension( unit, textSize, getResources().getDisplayMetrics()); mTextPaint.setTextSize(rawTextSize); }
void function(float textSize, int unit) { float rawTextSize = TypedValue.applyDimension( unit, textSize, getResources().getDisplayMetrics()); mTextPaint.setTextSize(rawTextSize); }
/** * Set the default text size to a given unit and value. See {@link * TypedValue} for the possible dimension units. * * @param textSize The desired size in the given units. * @param unit The desired dimension unit. */
Set the default text size to a given unit and value. See <code>TypedValue</code> for the possible dimension units
setTextSize
{ "repo_name": "lsjwzh/FastTextView", "path": "widget.FastTextView/src/main/java/com/lsjwzh/widget/text/SingleLineTextView.java", "license": "apache-2.0", "size": 4507 }
[ "android.util.TypedValue" ]
import android.util.TypedValue;
import android.util.*;
[ "android.util" ]
android.util;
377,816
@GET @Path("/owner-resources/{id}") @AuthNimbusOp(value = "getOwnerResourceSummaries") @Produces("application/json") public Response getOwnerResource(@PathParam("id") String id, @QueryParam(callbackParameterName) String callback) throws TException { getOwnerResourceSummariesMeter.mark(); try (NimbusClient nimbusClient = NimbusClient.getConfiguredClient(config)) { return UIHelpers.makeStandardResponse( UIHelpers.getOwnerResourceSummary( nimbusClient.getClient().getOwnerResourceSummaries(id), nimbusClient.getClient(), id, config), callback ); } }
@Path(STR) @AuthNimbusOp(value = STR) @Produces(STR) Response function(@PathParam("id") String id, @QueryParam(callbackParameterName) String callback) throws TException { getOwnerResourceSummariesMeter.mark(); try (NimbusClient nimbusClient = NimbusClient.getConfiguredClient(config)) { return UIHelpers.makeStandardResponse( UIHelpers.getOwnerResourceSummary( nimbusClient.getClient().getOwnerResourceSummaries(id), nimbusClient.getClient(), id, config), callback ); } }
/** * /api/v1/owner-resources/:id -> owner resources. */
api/v1/owner-resources/:id -> owner resources
getOwnerResource
{ "repo_name": "kishorvpatil/incubator-storm", "path": "storm-webapp/src/main/java/org/apache/storm/daemon/ui/resources/StormApiResource.java", "license": "apache-2.0", "size": 30891 }
[ "javax.ws.rs.Path", "javax.ws.rs.PathParam", "javax.ws.rs.Produces", "javax.ws.rs.QueryParam", "javax.ws.rs.core.Response", "org.apache.storm.daemon.ui.UIHelpers", "org.apache.storm.thrift.TException", "org.apache.storm.utils.NimbusClient" ]
import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Response; import org.apache.storm.daemon.ui.UIHelpers; import org.apache.storm.thrift.TException; import org.apache.storm.utils.NimbusClient;
import javax.ws.rs.*; import javax.ws.rs.core.*; import org.apache.storm.daemon.ui.*; import org.apache.storm.thrift.*; import org.apache.storm.utils.*;
[ "javax.ws", "org.apache.storm" ]
javax.ws; org.apache.storm;
2,365,174
CompletionStage<Void> repeatMenuAsync(); } public static final String DICATION_ENGINE_PLUGIN_NAME = "dictationEngine"; public static final String GRAMMAR_ENGINE_PLUGIN_NAME = "grammarEngine"; public static final String JSGF_PARSER_ENGINE_PLUGIN_NAME = "jsgfParser"; public static final String KEYWORD_ENGINE_PLUGIN_NAME = "keywordEngine"; public static final String SYNTHESIS_ENGINE_PLUGIN_NAME = "synthesisEngine"; public static final String SPEAKER_PLUGIN_NAME = "speaker"; public static final String MICROPHONE_PLUGIN_NAME = "microphone"; private static final Pattern JSGF_SPECIAL = Pattern.compile("([;=|*+<>()\\[\\]{}/\\\\])");
CompletionStage<Void> repeatMenuAsync(); } public static final String DICATION_ENGINE_PLUGIN_NAME = STR; public static final String GRAMMAR_ENGINE_PLUGIN_NAME = STR; public static final String JSGF_PARSER_ENGINE_PLUGIN_NAME = STR; public static final String KEYWORD_ENGINE_PLUGIN_NAME = STR; public static final String SYNTHESIS_ENGINE_PLUGIN_NAME = STR; public static final String SPEAKER_PLUGIN_NAME = STR; public static final String MICROPHONE_PLUGIN_NAME = STR; private static final Pattern JSGF_SPECIAL = Pattern.compile(STR);
/** * Repeat this menu asynchronously. */
Repeat this menu asynchronously
repeatMenuAsync
{ "repo_name": "candrews/commander", "path": "api/src/main/java/com/integralblue/commander/Manager.java", "license": "agpl-3.0", "size": 6114 }
[ "java.util.concurrent.CompletionStage", "java.util.regex.Pattern" ]
import java.util.concurrent.CompletionStage; import java.util.regex.Pattern;
import java.util.concurrent.*; import java.util.regex.*;
[ "java.util" ]
java.util;
170,347
private DataCollectionConfigDao getDataCollectionConfigDao() { if (m_dataCollectionConfigDao == null) { setDataCollectionConfigDao(DataCollectionConfigFactory.getInstance()); } return m_dataCollectionConfigDao; }
DataCollectionConfigDao function() { if (m_dataCollectionConfigDao == null) { setDataCollectionConfigDao(DataCollectionConfigFactory.getInstance()); } return m_dataCollectionConfigDao; }
/** * Gets the data collection configuration DAO. * * @return the data collection configuration DAO */
Gets the data collection configuration DAO
getDataCollectionConfigDao
{ "repo_name": "aihua/opennms", "path": "opennms-services/src/main/java/org/opennms/netmgt/collectd/SnmpPropertyExtenderProcessor.java", "license": "agpl-3.0", "size": 5581 }
[ "org.opennms.netmgt.config.DataCollectionConfigFactory", "org.opennms.netmgt.config.api.DataCollectionConfigDao" ]
import org.opennms.netmgt.config.DataCollectionConfigFactory; import org.opennms.netmgt.config.api.DataCollectionConfigDao;
import org.opennms.netmgt.config.*; import org.opennms.netmgt.config.api.*;
[ "org.opennms.netmgt" ]
org.opennms.netmgt;
2,469,849
public static Criteria buildCriteriaFromMap(Map fieldValues, Object businessObject) { Criteria criteria = new Criteria(); BusinessObjectEntry entry = (BusinessObjectEntry) KRADServiceLocatorWeb.getDataDictionaryService().getDataDictionary().getBusinessObjectEntry(businessObject.getClass().getName()); //FieldDefinition lookupField = entry.getLookupDefinition().getLookupField(attributeName); //System.out.println(entry.getTitleAttribute()); try { Iterator propsIter = fieldValues.keySet().iterator(); while (propsIter.hasNext()) { String propertyName = (String) propsIter.next(); Object propertyValueObject = fieldValues.get(propertyName); String propertyValue = ""; FieldDefinition lookupField = (entry != null) ? entry.getLookupDefinition().getLookupField(propertyName) : null; if (lookupField != null && lookupField.isTreatWildcardsAndOperatorsAsLiteral()) { propertyValue = (propertyValueObject != null) ? StringUtils.replace(propertyValueObject.toString().trim(), "*", "\\*") : ""; } else { //propertyValue = (propertyValueObject != null) ? propertyValueObject.toString().trim() : ""; propertyValue = (propertyValueObject != null) ? StringUtils.replace(propertyValueObject.toString().trim(), "*", "%") : ""; } // if searchValue is empty and the key is not a valid property ignore boolean isCreated = createCriteria(businessObject, propertyValue, propertyName, criteria); if (!isCreated) { continue; } } } catch (Exception e) { LOG.error("OJBUtility.buildCriteriaFromMap()" + e); } return criteria; }
static Criteria function(Map fieldValues, Object businessObject) { Criteria criteria = new Criteria(); BusinessObjectEntry entry = (BusinessObjectEntry) KRADServiceLocatorWeb.getDataDictionaryService().getDataDictionary().getBusinessObjectEntry(businessObject.getClass().getName()); try { Iterator propsIter = fieldValues.keySet().iterator(); while (propsIter.hasNext()) { String propertyName = (String) propsIter.next(); Object propertyValueObject = fieldValues.get(propertyName); String propertyValue = STR*STR\\*STRSTR*STR%STRSTROJBUtility.buildCriteriaFromMap()" + e); } return criteria; }
/** * This method builds an OJB query criteria based on the input field map * * @param fieldValues the input field map * @param businessObject the given business object * @return an OJB query criteria */
This method builds an OJB query criteria based on the input field map
buildCriteriaFromMap
{ "repo_name": "Ariah-Group/Finance", "path": "af_webapp/src/main/java/org/kuali/kfs/gl/OJBUtility.java", "license": "apache-2.0", "size": 7869 }
[ "java.util.Iterator", "java.util.Map", "org.apache.ojb.broker.query.Criteria", "org.kuali.rice.kns.datadictionary.BusinessObjectEntry", "org.kuali.rice.krad.service.KRADServiceLocatorWeb" ]
import java.util.Iterator; import java.util.Map; import org.apache.ojb.broker.query.Criteria; import org.kuali.rice.kns.datadictionary.BusinessObjectEntry; import org.kuali.rice.krad.service.KRADServiceLocatorWeb;
import java.util.*; import org.apache.ojb.broker.query.*; import org.kuali.rice.kns.datadictionary.*; import org.kuali.rice.krad.service.*;
[ "java.util", "org.apache.ojb", "org.kuali.rice" ]
java.util; org.apache.ojb; org.kuali.rice;
1,597,847
public boolean canBeUsed(Context ctx, @Nullable Activity activity) { UserRecoverableErrorHandler errorHandler = activity != null ? new UserRecoverableErrorHandler.ModalDialog(activity) : new UserRecoverableErrorHandler.SystemNotification(); return ExternalAuthUtils.getInstance().canUseGooglePlayServices(ctx, errorHandler); }
boolean function(Context ctx, @Nullable Activity activity) { UserRecoverableErrorHandler errorHandler = activity != null ? new UserRecoverableErrorHandler.ModalDialog(activity) : new UserRecoverableErrorHandler.SystemNotification(); return ExternalAuthUtils.getInstance().canUseGooglePlayServices(ctx, errorHandler); }
/** * Returns whether the AccountIdProvider can be used. * Since the AccountIdProvider queries Google Play services, this basically checks whether * Google Play services is available. * * @param activity If an activity is provided, it will be used to show a Modal Dialog notifying * the user to update Google Play services, else a System notification is shown. */
Returns whether the AccountIdProvider can be used. Since the AccountIdProvider queries Google Play services, this basically checks whether Google Play services is available
canBeUsed
{ "repo_name": "CapOM/ChromiumGStreamerBackend", "path": "chrome/android/java/src/org/chromium/chrome/browser/signin/AccountIdProvider.java", "license": "bsd-3-clause", "size": 3178 }
[ "android.app.Activity", "android.content.Context", "javax.annotation.Nullable", "org.chromium.chrome.browser.externalauth.ExternalAuthUtils", "org.chromium.chrome.browser.externalauth.UserRecoverableErrorHandler" ]
import android.app.Activity; import android.content.Context; import javax.annotation.Nullable; import org.chromium.chrome.browser.externalauth.ExternalAuthUtils; import org.chromium.chrome.browser.externalauth.UserRecoverableErrorHandler;
import android.app.*; import android.content.*; import javax.annotation.*; import org.chromium.chrome.browser.externalauth.*;
[ "android.app", "android.content", "javax.annotation", "org.chromium.chrome" ]
android.app; android.content; javax.annotation; org.chromium.chrome;
2,814,873
@Nonnull Context setHost(@Nonnull String host);
@Nonnull Context setHost(@Nonnull String host);
/** * Set the host (without the port value). * * Please keep in mind this method doesn't alter/modify the <code>host</code> header. * * @param host Host value. * @return This context. */
Set the host (without the port value). Please keep in mind this method doesn't alter/modify the <code>host</code> header
setHost
{ "repo_name": "jooby-project/jooby", "path": "jooby/src/main/java/io/jooby/Context.java", "license": "apache-2.0", "size": 41138 }
[ "javax.annotation.Nonnull" ]
import javax.annotation.Nonnull;
import javax.annotation.*;
[ "javax.annotation" ]
javax.annotation;
2,850,239
@Override public void onPause(boolean multitasking) { if (sleepOnPause) { stopIoManager(); if (port != null) { try { port.close(); } catch (IOException e) { // Ignore } port = null; } } }
void function(boolean multitasking) { if (sleepOnPause) { stopIoManager(); if (port != null) { try { port.close(); } catch (IOException e) { } port = null; } } }
/** * Paused activity handler * @see org.apache.cordova.CordovaPlugin#onPause(boolean) */
Paused activity handler
onPause
{ "repo_name": "XisoDev/xisoPos", "path": "plugins/fr.drangies.cordova.serial/src/android/fr/drangies/cordova/serial/Serial.java", "license": "gpl-3.0", "size": 19469 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
1,933,914
private static void initOrientation(Configuration config) { int orientation = UIDevice.currentDevice().getOrientation(); switch (orientation) { case UIDeviceOrientation.Portrait: case UIDeviceOrientation.PortraitUpsideDown: case UIDeviceOrientation.FaceUp: case UIDeviceOrientation.FaceDown: config.orientation = Configuration.ORIENTATION_PORTRAIT; break; case UIDeviceOrientation.LandscapeLeft: case UIDeviceOrientation.LandscapeRight: config.orientation = Configuration.ORIENTATION_LANDSCAPE; break; default: config.orientation = Configuration.ORIENTATION_UNDEFINED; break; } }
static void function(Configuration config) { int orientation = UIDevice.currentDevice().getOrientation(); switch (orientation) { case UIDeviceOrientation.Portrait: case UIDeviceOrientation.PortraitUpsideDown: case UIDeviceOrientation.FaceUp: case UIDeviceOrientation.FaceDown: config.orientation = Configuration.ORIENTATION_PORTRAIT; break; case UIDeviceOrientation.LandscapeLeft: case UIDeviceOrientation.LandscapeRight: config.orientation = Configuration.ORIENTATION_LANDSCAPE; break; default: config.orientation = Configuration.ORIENTATION_UNDEFINED; break; } }
/** * * Initialize orientation * * @param config * The configuration to be initialized. * */
Initialize orientation
initOrientation
{ "repo_name": "shannah/cn1", "path": "Ports/iOSPort/xmlvm/src/android2iphone/android/internal/ConfigurationFactory.java", "license": "gpl-2.0", "size": 5387 }
[ "android.content.res.Configuration", "org.xmlvm.iphone.UIDevice", "org.xmlvm.iphone.UIDeviceOrientation" ]
import android.content.res.Configuration; import org.xmlvm.iphone.UIDevice; import org.xmlvm.iphone.UIDeviceOrientation;
import android.content.res.*; import org.xmlvm.iphone.*;
[ "android.content", "org.xmlvm.iphone" ]
android.content; org.xmlvm.iphone;
762,658
public PDBorderStyleDictionary getBorderStyle() { COSDictionary bs = (COSDictionary) annot.getDictionaryObject(COSName.BS); if (bs != null) { return new PDBorderStyleDictionary(bs); } else { return null; } }
PDBorderStyleDictionary function() { COSDictionary bs = (COSDictionary) annot.getDictionaryObject(COSName.BS); if (bs != null) { return new PDBorderStyleDictionary(bs); } else { return null; } }
/** * This will retrieve the border style dictionary, specifying the width and dash pattern used in drawing the * annotation. * * @return the border style dictionary. */
This will retrieve the border style dictionary, specifying the width and dash pattern used in drawing the annotation
getBorderStyle
{ "repo_name": "mathieufortin01/pdfbox", "path": "pdfbox/src/main/java/org/apache/pdfbox/pdmodel/fdf/FDFAnnotation.java", "license": "apache-2.0", "size": 27582 }
[ "org.apache.pdfbox.cos.COSDictionary", "org.apache.pdfbox.cos.COSName", "org.apache.pdfbox.pdmodel.interactive.annotation.PDBorderStyleDictionary" ]
import org.apache.pdfbox.cos.COSDictionary; import org.apache.pdfbox.cos.COSName; import org.apache.pdfbox.pdmodel.interactive.annotation.PDBorderStyleDictionary;
import org.apache.pdfbox.cos.*; import org.apache.pdfbox.pdmodel.interactive.annotation.*;
[ "org.apache.pdfbox" ]
org.apache.pdfbox;
523,521
private Map<Integer, List<HostDetailsEntityVO>> getAllHostDetailsFromDB(HttpServletRequest req) throws DemoPortalException { Map<Integer, List<HostDetailsEntityVO>> map =new HashMap<Integer, List<HostDetailsEntityVO>>(); //Get List of all host available. List<HostDetailsEntityVO> listOfVos = demoPortalServices.getHostListFromDB(getAttestationService(req,AttestationService.class)); int no_row_per_page = Integer.parseInt(TDPConfig.getConfiguration().getString("mtwilson.tdbp.paginationRowCount")); //Divide List of all host into a subList based on the value of host per page. List<List<HostDetailsEntityVO>> list = Lists.partition(listOfVos, no_row_per_page); //Creating a Map view of host list based on the Page No. int i=1; for (List<HostDetailsEntityVO> listForMap : list) { map.put(i, listForMap); i++; } //setting map into session attribute; HttpSession session = req.getSession(); session.setAttribute("HostVoList", map); return map; }
Map<Integer, List<HostDetailsEntityVO>> function(HttpServletRequest req) throws DemoPortalException { Map<Integer, List<HostDetailsEntityVO>> map =new HashMap<Integer, List<HostDetailsEntityVO>>(); List<HostDetailsEntityVO> listOfVos = demoPortalServices.getHostListFromDB(getAttestationService(req,AttestationService.class)); int no_row_per_page = Integer.parseInt(TDPConfig.getConfiguration().getString(STR)); List<List<HostDetailsEntityVO>> list = Lists.partition(listOfVos, no_row_per_page); int i=1; for (List<HostDetailsEntityVO> listForMap : list) { map.put(i, listForMap); i++; } HttpSession session = req.getSession(); session.setAttribute(STR, map); return map; }
/** * This method is used as a utility method to get Map Views for all host based on Page_no value. * * @param req * @return Map<Integer, List<HostDetailsEntityVO>> * @throws DemoPortalException */
This method is used as a utility method to get Map Views for all host based on Page_no value
getAllHostDetailsFromDB
{ "repo_name": "xe1gyq/OpenAttestation", "path": "portals/TrustDashBoard/src/main/java/com/intel/mountwilson/controller/DemoPortalDataController.java", "license": "bsd-3-clause", "size": 23437 }
[ "com.google.common.collect.Lists", "com.intel.mountwilson.common.DemoPortalException", "com.intel.mountwilson.common.TDPConfig", "com.intel.mountwilson.datamodel.HostDetailsEntityVO", "com.intel.mtwilson.AttestationService", "java.util.HashMap", "java.util.List", "java.util.Map", "javax.servlet.http.HttpServletRequest", "javax.servlet.http.HttpSession" ]
import com.google.common.collect.Lists; import com.intel.mountwilson.common.DemoPortalException; import com.intel.mountwilson.common.TDPConfig; import com.intel.mountwilson.datamodel.HostDetailsEntityVO; import com.intel.mtwilson.AttestationService; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession;
import com.google.common.collect.*; import com.intel.mountwilson.common.*; import com.intel.mountwilson.datamodel.*; import com.intel.mtwilson.*; import java.util.*; import javax.servlet.http.*;
[ "com.google.common", "com.intel.mountwilson", "com.intel.mtwilson", "java.util", "javax.servlet" ]
com.google.common; com.intel.mountwilson; com.intel.mtwilson; java.util; javax.servlet;
739,199
public static void close(URLConnection conn) { if (conn != null) { try { if (conn instanceof JarURLConnection) { JarURLConnection juc = (JarURLConnection) conn; JarFile jf = juc.getJarFile(); jf.close(); jf = null; } else if (conn instanceof HttpURLConnection) { ((HttpURLConnection) conn).disconnect(); } } catch (IOException exc) { //ignore } } }
static void function(URLConnection conn) { if (conn != null) { try { if (conn instanceof JarURLConnection) { JarURLConnection juc = (JarURLConnection) conn; JarFile jf = juc.getJarFile(); jf.close(); jf = null; } else if (conn instanceof HttpURLConnection) { ((HttpURLConnection) conn).disconnect(); } } catch (IOException exc) { } } }
/** * Closes an URLConnection if its concrete implementation provides * a way to close it that Ant knows of. * * @param conn connection, can be null * @since Ant 1.8.0 */
Closes an URLConnection if its concrete implementation provides a way to close it that Ant knows of
close
{ "repo_name": "BIORIMP/biorimp", "path": "BIO-RIMP/test_data/code/antapache/src/main/org/apache/tools/ant/util/FileUtils.java", "license": "gpl-2.0", "size": 67096 }
[ "java.io.IOException", "java.net.HttpURLConnection", "java.net.JarURLConnection", "java.net.URLConnection", "java.util.jar.JarFile" ]
import java.io.IOException; import java.net.HttpURLConnection; import java.net.JarURLConnection; import java.net.URLConnection; import java.util.jar.JarFile;
import java.io.*; import java.net.*; import java.util.jar.*;
[ "java.io", "java.net", "java.util" ]
java.io; java.net; java.util;
2,842,547
public boolean doRemoteCriteria(StringBuffer strbFilter, boolean bIncludeFileName, Vector<BaseField> vParamList, Document doc) { if (m_convFlag != null) if (!m_convFlag.getState()) return super.doRemoteCriteria(strbFilter, bIncludeFileName, vParamList, doc); // Flag not set, don't process it BaseField recordField = m_fldToCheck; if (m_fldToCheck == null) if (fieldNameToCheck != null) recordField = this.getOwner().getField(fieldNameToCheck); //.getText(recordString); if (m_fldToCompare != null) if (m_bDontFilterIfNullCompare) if ((m_fldToCompare.isNull()) && (m_fldToCompare.isNullable())) // Null field return super.doRemoteCriteria(strbFilter, bIncludeFileName, vParamList, doc); // Dont skip this record boolean bDontSkip; if (m_fldToCompare != null) bDontSkip = this.fieldCompare(recordField, m_fldToCompare, m_strSeekSign, strbFilter, bIncludeFileName, vParamList, doc); else bDontSkip = this.fieldCompare(recordField, m_strToCompare, m_strSeekSign, strbFilter, bIncludeFileName, vParamList, doc); if (strbFilter != null) bDontSkip = true; // Don't need to compare, if I'm creating a filter to pass to SQL if (bDontSkip) return super.doRemoteCriteria(strbFilter, bIncludeFileName, vParamList, doc); // Dont skip this record else return false; // Skip this one }
boolean function(StringBuffer strbFilter, boolean bIncludeFileName, Vector<BaseField> vParamList, Document doc) { if (m_convFlag != null) if (!m_convFlag.getState()) return super.doRemoteCriteria(strbFilter, bIncludeFileName, vParamList, doc); BaseField recordField = m_fldToCheck; if (m_fldToCheck == null) if (fieldNameToCheck != null) recordField = this.getOwner().getField(fieldNameToCheck); if (m_fldToCompare != null) if (m_bDontFilterIfNullCompare) if ((m_fldToCompare.isNull()) && (m_fldToCompare.isNullable())) return super.doRemoteCriteria(strbFilter, bIncludeFileName, vParamList, doc); boolean bDontSkip; if (m_fldToCompare != null) bDontSkip = this.fieldCompare(recordField, m_fldToCompare, m_strSeekSign, strbFilter, bIncludeFileName, vParamList, doc); else bDontSkip = this.fieldCompare(recordField, m_strToCompare, m_strSeekSign, strbFilter, bIncludeFileName, vParamList, doc); if (strbFilter != null) bDontSkip = true; if (bDontSkip) return super.doRemoteCriteria(strbFilter, bIncludeFileName, vParamList, doc); else return false; }
/** * Set up/do the remote criteria. * @param strbFilter The SQL query string to add to. * @param bIncludeFileName Include the file name with this query? * @param vParamList The param list to add the raw data to (for prepared statements). * @param doc * @return True if you should not skip this record (does a check on the local data). */
Set up/do the remote criteria
doRemoteCriteria
{ "repo_name": "jbundle/jbundle", "path": "base/base/src/main/java/org/jbundle/base/db/filter/CompareFileFilter.java", "license": "gpl-3.0", "size": 13232 }
[ "java.util.Vector", "org.bson.Document", "org.jbundle.base.field.BaseField" ]
import java.util.Vector; import org.bson.Document; import org.jbundle.base.field.BaseField;
import java.util.*; import org.bson.*; import org.jbundle.base.field.*;
[ "java.util", "org.bson", "org.jbundle.base" ]
java.util; org.bson; org.jbundle.base;
1,874,000
public static void expect(final Buffer buffer, final char ch) throws SipParseException { try { final int i = buffer.readUnsignedByte(); if (i != ch) { throw new SipParseException(buffer.getReaderIndex(), "Expected '" + ch + "' got '" + i + "'"); } } catch (final IOException e) { throw new SipParseException(buffer.getReaderIndex(), UNABLE_TO_READ_FROM_STREAM, e); } }
static void function(final Buffer buffer, final char ch) throws SipParseException { try { final int i = buffer.readUnsignedByte(); if (i != ch) { throw new SipParseException(buffer.getReaderIndex(), STR + ch + STR + i + "'"); } } catch (final IOException e) { throw new SipParseException(buffer.getReaderIndex(), UNABLE_TO_READ_FROM_STREAM, e); } }
/** * Check so that the next byte in the passed in buffer is the expected one. * * @param buffer * the buffer that we will check. * @param ch * the expected char * @throws SipParseException * in case the expected char is not the next char in the buffer * or if there is an error reading from the underlying stream */
Check so that the next byte in the passed in buffer is the expected one
expect
{ "repo_name": "aboutsip/pkts", "path": "pkts-sip/src/main/java/io/pkts/packet/sip/impl/SipParser.java", "license": "mit", "size": 86189 }
[ "io.pkts.buffer.Buffer", "io.pkts.packet.sip.SipParseException", "java.io.IOException" ]
import io.pkts.buffer.Buffer; import io.pkts.packet.sip.SipParseException; import java.io.IOException;
import io.pkts.buffer.*; import io.pkts.packet.sip.*; import java.io.*;
[ "io.pkts.buffer", "io.pkts.packet", "java.io" ]
io.pkts.buffer; io.pkts.packet; java.io;
2,310,511
@Override public void release(SocketWrapper<Long> socket, Processor<Long> processor, boolean isSocketClosing, boolean addToPoller) { processor.recycle(isSocketClosing); recycledProcessors.offer(processor); if (addToPoller) { ((AprEndpoint)proto.endpoint).getPoller().add( socket.getSocket().longValue(), proto.endpoint.getKeepAliveTimeout(), AprEndpoint.Poller.FLAGS_READ); } }
void function(SocketWrapper<Long> socket, Processor<Long> processor, boolean isSocketClosing, boolean addToPoller) { processor.recycle(isSocketClosing); recycledProcessors.offer(processor); if (addToPoller) { ((AprEndpoint)proto.endpoint).getPoller().add( socket.getSocket().longValue(), proto.endpoint.getKeepAliveTimeout(), AprEndpoint.Poller.FLAGS_READ); } }
/** * Expected to be used by the handler once the processor is no longer * required. */
Expected to be used by the handler once the processor is no longer required
release
{ "repo_name": "GazeboHub/ghub-portal-doc", "path": "doc/modelio/GHub Portal/mda/JavaDesigner/res/java/tomcat/java/org/apache/coyote/ajp/AjpAprProtocol.java", "license": "epl-1.0", "size": 5081 }
[ "org.apache.coyote.Processor", "org.apache.tomcat.util.net.AprEndpoint", "org.apache.tomcat.util.net.SocketWrapper" ]
import org.apache.coyote.Processor; import org.apache.tomcat.util.net.AprEndpoint; import org.apache.tomcat.util.net.SocketWrapper;
import org.apache.coyote.*; import org.apache.tomcat.util.net.*;
[ "org.apache.coyote", "org.apache.tomcat" ]
org.apache.coyote; org.apache.tomcat;
2,303,748
public void setDisplayOrder(Number value) { setAttributeInternal(DISPLAYORDER, value); }
void function(Number value) { setAttributeInternal(DISPLAYORDER, value); }
/** * * Sets <code>value</code> as attribute value for DISPLAY_ORDER using the alias name DisplayOrder */
Sets <code>value</code> as attribute value for DISPLAY_ORDER using the alias name DisplayOrder
setDisplayOrder
{ "repo_name": "CBIIT/cadsr-util", "path": "cadsrutil/src/java/gov/nih/nci/ncicb/cadsr/common/persistence/bc4j/ModulesForAFormViewRowImpl.java", "license": "bsd-3-clause", "size": 11301 }
[ "oracle.jbo.domain.Number" ]
import oracle.jbo.domain.Number;
import oracle.jbo.domain.*;
[ "oracle.jbo.domain" ]
oracle.jbo.domain;
2,022,899
public static boolean deleteDirectory(File file) { int tries = 0; int maxTries = 5; boolean exists = true; while (exists && (tries < maxTries)) { recursivelyDeleteDirectory(file); tries++; exists = file.exists(); if (exists) { try { Thread.sleep(1000); } catch (InterruptedException e) { // Ignore } } } return !exists; }
static boolean function(File file) { int tries = 0; int maxTries = 5; boolean exists = true; while (exists && (tries < maxTries)) { recursivelyDeleteDirectory(file); tries++; exists = file.exists(); if (exists) { try { Thread.sleep(1000); } catch (InterruptedException e) { } } } return !exists; }
/** * Recursively delete a directory, useful to zapping test data * * @param file the directory to be deleted * @return <tt>false</tt> if error deleting directory */
Recursively delete a directory, useful to zapping test data
deleteDirectory
{ "repo_name": "onders86/camel", "path": "components/camel-testng/src/main/java/org/apache/camel/testng/TestSupport.java", "license": "apache-2.0", "size": 18934 }
[ "java.io.File" ]
import java.io.File;
import java.io.*;
[ "java.io" ]
java.io;
462,790
public static void validateConsumerConfiguration(Properties config) { checkNotNull(config, "config can not be null"); validateAwsConfiguration(config); if (config.containsKey(ConsumerConfigConstants.STREAM_INITIAL_POSITION)) { String initPosType = config.getProperty(ConsumerConfigConstants.STREAM_INITIAL_POSITION); // specified initial position in stream must be either LATEST, TRIM_HORIZON or AT_TIMESTAMP try { InitialPosition.valueOf(initPosType); } catch (IllegalArgumentException e) { StringBuilder sb = new StringBuilder(); for (InitialPosition pos : InitialPosition.values()) { sb.append(pos.toString()).append(", "); } throw new IllegalArgumentException("Invalid initial position in stream set in config. Valid values are: " + sb.toString()); } // specified initial timestamp in stream when using AT_TIMESTAMP if (InitialPosition.valueOf(initPosType) == InitialPosition.AT_TIMESTAMP) { if (!config.containsKey(ConsumerConfigConstants.STREAM_INITIAL_TIMESTAMP)) { throw new IllegalArgumentException("Please set value for initial timestamp ('" + ConsumerConfigConstants.STREAM_INITIAL_TIMESTAMP + "') when using AT_TIMESTAMP initial position."); } validateOptionalDateProperty(config, ConsumerConfigConstants.STREAM_INITIAL_TIMESTAMP, config.getProperty(ConsumerConfigConstants.STREAM_TIMESTAMP_DATE_FORMAT, ConsumerConfigConstants.DEFAULT_STREAM_TIMESTAMP_DATE_FORMAT), "Invalid value given for initial timestamp for AT_TIMESTAMP initial position in stream. " + "Must be a valid format: yyyy-MM-dd'T'HH:mm:ss.SSSXXX or non-negative double value. For example, 2016-04-04T19:58:46.480-00:00 or 1459799926.480 ."); } } validateOptionalPositiveIntProperty(config, ConsumerConfigConstants.SHARD_GETRECORDS_MAX, "Invalid value given for maximum records per getRecords shard operation. Must be a valid non-negative integer value."); validateOptionalPositiveIntProperty(config, ConsumerConfigConstants.SHARD_GETRECORDS_RETRIES, "Invalid value given for maximum retry attempts for getRecords shard operation. Must be a valid non-negative integer value."); validateOptionalPositiveLongProperty(config, ConsumerConfigConstants.SHARD_GETRECORDS_BACKOFF_BASE, "Invalid value given for get records operation base backoff milliseconds. Must be a valid non-negative long value."); validateOptionalPositiveLongProperty(config, ConsumerConfigConstants.SHARD_GETRECORDS_BACKOFF_MAX, "Invalid value given for get records operation max backoff milliseconds. Must be a valid non-negative long value."); validateOptionalPositiveDoubleProperty(config, ConsumerConfigConstants.SHARD_GETRECORDS_BACKOFF_EXPONENTIAL_CONSTANT, "Invalid value given for get records operation backoff exponential constant. Must be a valid non-negative double value."); validateOptionalPositiveLongProperty(config, ConsumerConfigConstants.SHARD_GETRECORDS_INTERVAL_MILLIS, "Invalid value given for getRecords sleep interval in milliseconds. Must be a valid non-negative long value."); validateOptionalPositiveIntProperty(config, ConsumerConfigConstants.SHARD_GETITERATOR_RETRIES, "Invalid value given for maximum retry attempts for getShardIterator shard operation. Must be a valid non-negative integer value."); validateOptionalPositiveLongProperty(config, ConsumerConfigConstants.SHARD_GETITERATOR_BACKOFF_BASE, "Invalid value given for get shard iterator operation base backoff milliseconds. Must be a valid non-negative long value."); validateOptionalPositiveLongProperty(config, ConsumerConfigConstants.SHARD_GETITERATOR_BACKOFF_MAX, "Invalid value given for get shard iterator operation max backoff milliseconds. Must be a valid non-negative long value."); validateOptionalPositiveDoubleProperty(config, ConsumerConfigConstants.SHARD_GETITERATOR_BACKOFF_EXPONENTIAL_CONSTANT, "Invalid value given for get shard iterator operation backoff exponential constant. Must be a valid non-negative double value."); validateOptionalPositiveLongProperty(config, ConsumerConfigConstants.SHARD_DISCOVERY_INTERVAL_MILLIS, "Invalid value given for shard discovery sleep interval in milliseconds. Must be a valid non-negative long value."); validateOptionalPositiveLongProperty(config, ConsumerConfigConstants.STREAM_DESCRIBE_BACKOFF_BASE, "Invalid value given for describe stream operation base backoff milliseconds. Must be a valid non-negative long value."); validateOptionalPositiveLongProperty(config, ConsumerConfigConstants.STREAM_DESCRIBE_BACKOFF_MAX, "Invalid value given for describe stream operation max backoff milliseconds. Must be a valid non-negative long value."); validateOptionalPositiveDoubleProperty(config, ConsumerConfigConstants.STREAM_DESCRIBE_BACKOFF_EXPONENTIAL_CONSTANT, "Invalid value given for describe stream operation backoff exponential constant. Must be a valid non-negative double value."); if (config.containsKey(ConsumerConfigConstants.SHARD_GETRECORDS_INTERVAL_MILLIS)) { checkArgument( Long.parseLong(config.getProperty(ConsumerConfigConstants.SHARD_GETRECORDS_INTERVAL_MILLIS)) < ConsumerConfigConstants.MAX_SHARD_GETRECORDS_INTERVAL_MILLIS, "Invalid value given for getRecords sleep interval in milliseconds. Must be lower than " + ConsumerConfigConstants.MAX_SHARD_GETRECORDS_INTERVAL_MILLIS + " milliseconds." ); } }
static void function(Properties config) { checkNotNull(config, STR); validateAwsConfiguration(config); if (config.containsKey(ConsumerConfigConstants.STREAM_INITIAL_POSITION)) { String initPosType = config.getProperty(ConsumerConfigConstants.STREAM_INITIAL_POSITION); try { InitialPosition.valueOf(initPosType); } catch (IllegalArgumentException e) { StringBuilder sb = new StringBuilder(); for (InitialPosition pos : InitialPosition.values()) { sb.append(pos.toString()).append(STR); } throw new IllegalArgumentException(STR + sb.toString()); } if (InitialPosition.valueOf(initPosType) == InitialPosition.AT_TIMESTAMP) { if (!config.containsKey(ConsumerConfigConstants.STREAM_INITIAL_TIMESTAMP)) { throw new IllegalArgumentException(STR + ConsumerConfigConstants.STREAM_INITIAL_TIMESTAMP + STR); } validateOptionalDateProperty(config, ConsumerConfigConstants.STREAM_INITIAL_TIMESTAMP, config.getProperty(ConsumerConfigConstants.STREAM_TIMESTAMP_DATE_FORMAT, ConsumerConfigConstants.DEFAULT_STREAM_TIMESTAMP_DATE_FORMAT), STR + STR); } } validateOptionalPositiveIntProperty(config, ConsumerConfigConstants.SHARD_GETRECORDS_MAX, STR); validateOptionalPositiveIntProperty(config, ConsumerConfigConstants.SHARD_GETRECORDS_RETRIES, STR); validateOptionalPositiveLongProperty(config, ConsumerConfigConstants.SHARD_GETRECORDS_BACKOFF_BASE, STR); validateOptionalPositiveLongProperty(config, ConsumerConfigConstants.SHARD_GETRECORDS_BACKOFF_MAX, STR); validateOptionalPositiveDoubleProperty(config, ConsumerConfigConstants.SHARD_GETRECORDS_BACKOFF_EXPONENTIAL_CONSTANT, STR); validateOptionalPositiveLongProperty(config, ConsumerConfigConstants.SHARD_GETRECORDS_INTERVAL_MILLIS, STR); validateOptionalPositiveIntProperty(config, ConsumerConfigConstants.SHARD_GETITERATOR_RETRIES, STR); validateOptionalPositiveLongProperty(config, ConsumerConfigConstants.SHARD_GETITERATOR_BACKOFF_BASE, STR); validateOptionalPositiveLongProperty(config, ConsumerConfigConstants.SHARD_GETITERATOR_BACKOFF_MAX, STR); validateOptionalPositiveDoubleProperty(config, ConsumerConfigConstants.SHARD_GETITERATOR_BACKOFF_EXPONENTIAL_CONSTANT, STR); validateOptionalPositiveLongProperty(config, ConsumerConfigConstants.SHARD_DISCOVERY_INTERVAL_MILLIS, STR); validateOptionalPositiveLongProperty(config, ConsumerConfigConstants.STREAM_DESCRIBE_BACKOFF_BASE, STR); validateOptionalPositiveLongProperty(config, ConsumerConfigConstants.STREAM_DESCRIBE_BACKOFF_MAX, STR); validateOptionalPositiveDoubleProperty(config, ConsumerConfigConstants.STREAM_DESCRIBE_BACKOFF_EXPONENTIAL_CONSTANT, STR); if (config.containsKey(ConsumerConfigConstants.SHARD_GETRECORDS_INTERVAL_MILLIS)) { checkArgument( Long.parseLong(config.getProperty(ConsumerConfigConstants.SHARD_GETRECORDS_INTERVAL_MILLIS)) < ConsumerConfigConstants.MAX_SHARD_GETRECORDS_INTERVAL_MILLIS, STR + ConsumerConfigConstants.MAX_SHARD_GETRECORDS_INTERVAL_MILLIS + STR ); } }
/** * Validate configuration properties for {@link FlinkKinesisConsumer}. */
Validate configuration properties for <code>FlinkKinesisConsumer</code>
validateConsumerConfiguration
{ "repo_name": "zimmermatt/flink", "path": "flink-connectors/flink-connector-kinesis/src/main/java/org/apache/flink/streaming/connectors/kinesis/util/KinesisConfigUtil.java", "license": "apache-2.0", "size": 14690 }
[ "java.util.Properties", "org.apache.flink.streaming.connectors.kinesis.config.ConsumerConfigConstants", "org.apache.flink.util.Preconditions" ]
import java.util.Properties; import org.apache.flink.streaming.connectors.kinesis.config.ConsumerConfigConstants; import org.apache.flink.util.Preconditions;
import java.util.*; import org.apache.flink.streaming.connectors.kinesis.config.*; import org.apache.flink.util.*;
[ "java.util", "org.apache.flink" ]
java.util; org.apache.flink;
483,324
@Test(expected=NoSuchElementException.class) public void testNextWhenExhausted() { int sampleSize = 2; List<String> input = Arrays.asList("A", "B", "C", "D"); Iterable<List<String>> iterable = new CombinationIterable<String>(sampleSize, input); Iterator<List<String>> iterator = iterable.iterator(); while (iterator.hasNext()) { iterator.next(); } // This call should throw the NoSuchElementException iterator.next(); }
@Test(expected=NoSuchElementException.class) void function() { int sampleSize = 2; List<String> input = Arrays.asList("A", "B", "C", "D"); Iterable<List<String>> iterable = new CombinationIterable<String>(sampleSize, input); Iterator<List<String>> iterator = iterable.iterator(); while (iterator.hasNext()) { iterator.next(); } iterator.next(); }
/** * Test whether the 'next()' method of the iterator throws a * NoSuchElementException when the iterator is exhausted */
Test whether the 'next()' method of the iterator throws a NoSuchElementException when the iterator is exhausted
testNextWhenExhausted
{ "repo_name": "javagl/Combinatorics", "path": "src/main/test/de/javagl/utils/math/combinatorics/CombinationIterableTest.java", "license": "mit", "size": 3010 }
[ "java.util.Arrays", "java.util.Iterator", "java.util.List", "java.util.NoSuchElementException", "org.junit.Test" ]
import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import org.junit.Test;
import java.util.*; import org.junit.*;
[ "java.util", "org.junit" ]
java.util; org.junit;
2,429,482
SaslServerHandler createSaslHandler(ChannelAuthenticationScheme scheme) throws SaslException;
SaslServerHandler createSaslHandler(ChannelAuthenticationScheme scheme) throws SaslException;
/** * Creates server-side Sasl handler for given scheme. * * @param scheme the authentication scheme * @return the created {@link SaslServerHandler} instance * @throws SaslException */
Creates server-side Sasl handler for given scheme
createSaslHandler
{ "repo_name": "EvilMcJerkface/alluxio", "path": "core/common/src/main/java/alluxio/security/authentication/AuthenticationServer.java", "license": "apache-2.0", "size": 2087 }
[ "javax.security.sasl.SaslException" ]
import javax.security.sasl.SaslException;
import javax.security.sasl.*;
[ "javax.security" ]
javax.security;
203,131
@Test public void listToString() { class TestCase { List<Object> mInput; String mExpected; public TestCase(String expected, Object... objs) { mExpected = expected; mInput = Arrays.asList(objs); } } List<TestCase> testCases = new LinkedList<>(); testCases.add(new TestCase("")); testCases.add(new TestCase("foo", "foo")); testCases.add(new TestCase("foo bar", "foo", "bar")); testCases.add(new TestCase("1", 1)); testCases.add(new TestCase("1 2 3", 1, 2, 3)); for (TestCase testCase : testCases) { Assert.assertEquals(testCase.mExpected, CommonUtils.listToString(testCase.mInput)); } }
void function() { class TestCase { List<Object> mInput; String mExpected; public TestCase(String expected, Object... objs) { mExpected = expected; mInput = Arrays.asList(objs); } } List<TestCase> testCases = new LinkedList<>(); testCases.add(new TestCase(STRfooSTRfooSTRfoo barSTRfooSTRbarSTR1STR1 2 3", 1, 2, 3)); for (TestCase testCase : testCases) { Assert.assertEquals(testCase.mExpected, CommonUtils.listToString(testCase.mInput)); } }
/** * Tests the {@link CommonUtils#listToString(List)} method. */
Tests the <code>CommonUtils#listToString(List)</code> method
listToString
{ "repo_name": "WilliamZapata/alluxio", "path": "core/common/src/test/java/alluxio/util/CommonUtilsTest.java", "license": "apache-2.0", "size": 11284 }
[ "java.util.Arrays", "java.util.LinkedList", "java.util.List", "org.junit.Assert" ]
import java.util.Arrays; import java.util.LinkedList; import java.util.List; import org.junit.Assert;
import java.util.*; import org.junit.*;
[ "java.util", "org.junit" ]
java.util; org.junit;
906,386
private CoapResponse makeResponse(CoapRequest request) { CoapResponse response = new CoapResponse(CoapStatus.VALID); response.setToken(request.getToken()); return response; }
CoapResponse function(CoapRequest request) { CoapResponse response = new CoapResponse(CoapStatus.VALID); response.setToken(request.getToken()); return response; }
/** * API for making response to Resource * * @param request * ChannelHandlerContext of request message */
API for making response to Resource
makeResponse
{ "repo_name": "santais/iotivity_1.1.0", "path": "cloud/interface/src/main/java/org/iotivity/cloud/ciserver/resources/KeepAliveResource.java", "license": "apache-2.0", "size": 6723 }
[ "org.iotivity.cloud.base.protocols.coap.CoapRequest", "org.iotivity.cloud.base.protocols.coap.CoapResponse", "org.iotivity.cloud.base.protocols.coap.enums.CoapStatus" ]
import org.iotivity.cloud.base.protocols.coap.CoapRequest; import org.iotivity.cloud.base.protocols.coap.CoapResponse; import org.iotivity.cloud.base.protocols.coap.enums.CoapStatus;
import org.iotivity.cloud.base.protocols.coap.*; import org.iotivity.cloud.base.protocols.coap.enums.*;
[ "org.iotivity.cloud" ]
org.iotivity.cloud;
2,566,617
public final List<Edge> getEdges() { Set<Edge> list = new HashSet<>(); for (Triangle t : this) { list.addAll(t.getEdges()); } return new ArrayList<>(list); }
final List<Edge> function() { Set<Edge> list = new HashSet<>(); for (Triangle t : this) { list.addAll(t.getEdges()); } return new ArrayList<>(list); }
/** * Searches for all the edges contained in the mesh. * @return the list of all edges, without double */
Searches for all the edges contained in the mesh
getEdges
{ "repo_name": "Nantes1900/Nantes-1900", "path": "src/fr/nantes1900/models/basis/Mesh.java", "license": "gpl-3.0", "size": 19942 }
[ "java.util.ArrayList", "java.util.HashSet", "java.util.List", "java.util.Set" ]
import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set;
import java.util.*;
[ "java.util" ]
java.util;
2,207,782
public boolean clearContentsAndStylesOfCellsInAWorksheet(String range) throws InvalidKeyException, NoSuchAlgorithmException, IOException { boolean isContentsAndStylesOfCellsClearedSuccessfully = false; if(fileName == null || fileName.length() == 0) { throw new IllegalArgumentException("File name cannot be null or empty"); } if(worksheetName == null || worksheetName.length() == 0) { throw new IllegalArgumentException("Worksheet name cannot be null or empty"); } if(range == null || range.length() == 0) { throw new IllegalArgumentException("Range cannot be null or empty"); } String strURL = CELLS_URI + fileName + "/worksheets/" + worksheetName + "/cells/clearcontents?range=" + range; //sign URL String signedURL = Utils.sign(strURL); InputStream responseStream = Utils.processCommand(signedURL, "POST"); String responseJSONString = Utils.streamToString(responseStream); //Parsing JSON Gson gson = new Gson(); BaseResponse baseResponse = gson.fromJson(responseJSONString, BaseResponse.class); if (baseResponse.getCode().equals("200") && baseResponse.getStatus().equals("OK")) { isContentsAndStylesOfCellsClearedSuccessfully = true; } return isContentsAndStylesOfCellsClearedSuccessfully; }
boolean function(String range) throws InvalidKeyException, NoSuchAlgorithmException, IOException { boolean isContentsAndStylesOfCellsClearedSuccessfully = false; if(fileName == null fileName.length() == 0) { throw new IllegalArgumentException(STR); } if(worksheetName == null worksheetName.length() == 0) { throw new IllegalArgumentException(STR); } if(range == null range.length() == 0) { throw new IllegalArgumentException(STR); } String strURL = CELLS_URI + fileName + STR + worksheetName + STR + range; String signedURL = Utils.sign(strURL); InputStream responseStream = Utils.processCommand(signedURL, "POST"); String responseJSONString = Utils.streamToString(responseStream); Gson gson = new Gson(); BaseResponse baseResponse = gson.fromJson(responseJSONString, BaseResponse.class); if (baseResponse.getCode().equals("200") && baseResponse.getStatus().equals("OK")) { isContentsAndStylesOfCellsClearedSuccessfully = true; } return isContentsAndStylesOfCellsClearedSuccessfully; }
/** * Clear contents and styles of cells in excel worksheet * @param range Update the specified cells area e.g. A1:B3 * @throws java.security.InvalidKeyException If initialization fails because the provided key is null. * @throws java.security.NoSuchAlgorithmException If the specified algorithm (HmacSHA1) is not available by any provider. * @throws java.io.IOException If there is an IO error * @return Boolean variable that indicates whether contents and styles of cell cleared successfully */
Clear contents and styles of cells in excel worksheet
clearContentsAndStylesOfCellsInAWorksheet
{ "repo_name": "asposeforcloud/Aspose_Cloud_SDK_For_Android", "path": "asposecloudsdk/src/main/java/com/aspose/cloud/sdk/cells/api/Cell.java", "license": "mit", "size": 34616 }
[ "com.aspose.cloud.sdk.common.BaseResponse", "com.aspose.cloud.sdk.common.Utils", "com.google.gson.Gson", "java.io.IOException", "java.io.InputStream", "java.security.InvalidKeyException", "java.security.NoSuchAlgorithmException" ]
import com.aspose.cloud.sdk.common.BaseResponse; import com.aspose.cloud.sdk.common.Utils; import com.google.gson.Gson; import java.io.IOException; import java.io.InputStream; import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException;
import com.aspose.cloud.sdk.common.*; import com.google.gson.*; import java.io.*; import java.security.*;
[ "com.aspose.cloud", "com.google.gson", "java.io", "java.security" ]
com.aspose.cloud; com.google.gson; java.io; java.security;
34,789
protected void doDeployment(Deployment d) { Collection<QName> procs; try { procs = store.deploy(d.deployDir); _deployed.add(d); } catch (Exception ex) { if (d.expectedException == null) { ex.printStackTrace(); failure(d, "DEPLOY: Unexpected exception: " + ex, ex); } else if (!d.expectedException.isAssignableFrom(ex.getClass())) { ex.printStackTrace(); failure(d, "DEPLOY: Wrong exception; expected " + d.expectedException + " but got " + ex.getClass(), ex); } return; } try { for (QName procName : procs) { ProcessConfImpl conf = (ProcessConfImpl) store.getProcessConfiguration(procName); // Test processes always run with in-mem DAOs conf.setTransient(true); _server.register(conf); } } catch (Exception ex) { if (d.expectedException == null) failure(d, "REGISTER: Unexpected exception: " + ex, ex); else if (!d.expectedException.isAssignableFrom(ex.getClass())) failure(d, "REGISTER: Wrong exception; expected " + d.expectedException + " but got " + ex.getClass(), ex); } }
void function(Deployment d) { Collection<QName> procs; try { procs = store.deploy(d.deployDir); _deployed.add(d); } catch (Exception ex) { if (d.expectedException == null) { ex.printStackTrace(); failure(d, STR + ex, ex); } else if (!d.expectedException.isAssignableFrom(ex.getClass())) { ex.printStackTrace(); failure(d, STR + d.expectedException + STR + ex.getClass(), ex); } return; } try { for (QName procName : procs) { ProcessConfImpl conf = (ProcessConfImpl) store.getProcessConfiguration(procName); conf.setTransient(true); _server.register(conf); } } catch (Exception ex) { if (d.expectedException == null) failure(d, STR + ex, ex); else if (!d.expectedException.isAssignableFrom(ex.getClass())) failure(d, STR + d.expectedException + STR + ex.getClass(), ex); } }
/** * Do all the registered deployments. * * @param d */
Do all the registered deployments
doDeployment
{ "repo_name": "riftsaw/riftsaw-ode", "path": "bpel-test/src/main/java/org/apache/ode/test/BPELTestAbstract.java", "license": "apache-2.0", "size": 25659 }
[ "java.util.Collection", "javax.xml.namespace.QName", "org.apache.ode.store.ProcessConfImpl" ]
import java.util.Collection; import javax.xml.namespace.QName; import org.apache.ode.store.ProcessConfImpl;
import java.util.*; import javax.xml.namespace.*; import org.apache.ode.store.*;
[ "java.util", "javax.xml", "org.apache.ode" ]
java.util; javax.xml; org.apache.ode;
144,373
Assert.fail("Test 'ECRFStatusTypeDaoTransformTest.testToECRFStatusTypeVO' not implemented!"); }
Assert.fail(STR); }
/** * Test for method ECRFStatusTypeDao.toECRFStatusTypeVO * * @see org.phoenixctms.ctsms.domain.ECRFStatusTypeDao#toECRFStatusTypeVO(org.phoenixctms.ctsms.domain.ECRFStatusType source, org.phoenixctms.ctsms.vo.ECRFStatusTypeVO target) */
Test for method ECRFStatusTypeDao.toECRFStatusTypeVO
testToECRFStatusTypeVO
{ "repo_name": "phoenixctms/ctsms", "path": "core/src/test/java/org/phoenixctms/ctsms/domain/test/ECRFStatusTypeDaoTransformTest.java", "license": "lgpl-2.1", "size": 1321 }
[ "org.testng.Assert" ]
import org.testng.Assert;
import org.testng.*;
[ "org.testng" ]
org.testng;
594,735
@ServiceMethod(returns = ReturnType.SINGLE) private Mono<LabInner> updateAsync(String resourceGroupName, String labName, LabUpdate body) { return beginUpdateAsync(resourceGroupName, labName, body).last().flatMap(this.client::getLroFinalResultOrError); }
@ServiceMethod(returns = ReturnType.SINGLE) Mono<LabInner> function(String resourceGroupName, String labName, LabUpdate body) { return beginUpdateAsync(resourceGroupName, labName, body).last().flatMap(this.client::getLroFinalResultOrError); }
/** * Operation to update a lab resource. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param labName The name of the lab that uniquely identifies it within containing lab account. Used in resource * URIs. * @param body The request body. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the lab resource on successful completion of {@link Mono}. */
Operation to update a lab resource
updateAsync
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/labservices/azure-resourcemanager-labservices/src/main/java/com/azure/resourcemanager/labservices/implementation/LabsClientImpl.java", "license": "mit", "size": 103945 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod", "com.azure.resourcemanager.labservices.fluent.models.LabInner", "com.azure.resourcemanager.labservices.models.LabUpdate" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.resourcemanager.labservices.fluent.models.LabInner; import com.azure.resourcemanager.labservices.models.LabUpdate;
import com.azure.core.annotation.*; import com.azure.resourcemanager.labservices.fluent.models.*; import com.azure.resourcemanager.labservices.models.*;
[ "com.azure.core", "com.azure.resourcemanager" ]
com.azure.core; com.azure.resourcemanager;
1,882,402
public List<Users> findByEmailnotification(Integer emailnotification);
List<Users> function(Integer emailnotification);
/** * Find Users by emailnotification */
Find Users by emailnotification
findByEmailnotification
{ "repo_name": "faramir/ZawodyWeb", "path": "database/src/main/java/pl/umk/mat/zawodyweb/database/UsersDAO.java", "license": "bsd-2-clause", "size": 1818 }
[ "java.util.List", "pl.umk.mat.zawodyweb.database.pojo.Users" ]
import java.util.List; import pl.umk.mat.zawodyweb.database.pojo.Users;
import java.util.*; import pl.umk.mat.zawodyweb.database.pojo.*;
[ "java.util", "pl.umk.mat" ]
java.util; pl.umk.mat;
1,435,017
public static DocumentBuilderFactory createXMLDBFactory() { if (dbf == null) { dbf = DocumentBuilderFactory.newInstance(); dbf.setNamespaceAware(true); dbf.setValidating(false); dbf.setIgnoringComments(false); dbf.setIgnoringElementContentWhitespace(false); dbf.setExpandEntityReferences(true); } return dbf; }
static DocumentBuilderFactory function() { if (dbf == null) { dbf = DocumentBuilderFactory.newInstance(); dbf.setNamespaceAware(true); dbf.setValidating(false); dbf.setIgnoringComments(false); dbf.setIgnoringElementContentWhitespace(false); dbf.setExpandEntityReferences(true); } return dbf; }
/** * DOCUMENT ME! * * @return DOCUMENT ME! */
DOCUMENT ME
createXMLDBFactory
{ "repo_name": "andyglow/binxml", "path": "src/org/binxml/util/XmlUtil.java", "license": "lgpl-2.1", "size": 4117 }
[ "javax.xml.parsers.DocumentBuilderFactory" ]
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.*;
[ "javax.xml" ]
javax.xml;
1,896,934
MatrixData getMatrix();
MatrixData getMatrix();
/** * return internal and device size * * @return */
return internal and device size
getMatrix
{ "repo_name": "arneboe/PixelController", "path": "pixelcontroller-core/src/main/java/com/neophob/sematrix/core/api/PixelController.java", "license": "gpl-3.0", "size": 3109 }
[ "com.neophob.sematrix.core.visual.MatrixData" ]
import com.neophob.sematrix.core.visual.MatrixData;
import com.neophob.sematrix.core.visual.*;
[ "com.neophob.sematrix" ]
com.neophob.sematrix;
958,093
public void setType(ResourceType type) { this.type = type; }
void function(ResourceType type) { this.type = type; }
/** * Sets resource type * @param type resource type */
Sets resource type
setType
{ "repo_name": "radicalbit/ambari", "path": "ambari-server/src/main/java/org/apache/ambari/server/controller/PrivilegeResponse.java", "license": "apache-2.0", "size": 4207 }
[ "org.apache.ambari.server.security.authorization.ResourceType" ]
import org.apache.ambari.server.security.authorization.ResourceType;
import org.apache.ambari.server.security.authorization.*;
[ "org.apache.ambari" ]
org.apache.ambari;
2,090,918
public static String optionallyQuoteStringByOS( String string ) { String quote = getQuoteCharByOS(); if ( Utils.isEmpty( string ) ) { return quote; } // If the field already contains quotes, we don't touch it anymore, just // return the same string... // also return it if no spaces are found if ( string.indexOf( quote ) >= 0 || ( string.indexOf( ' ' ) < 0 && string.indexOf( '=' ) < 0 ) ) { return string; } else { return quote + string + quote; } }
static String function( String string ) { String quote = getQuoteCharByOS(); if ( Utils.isEmpty( string ) ) { return quote; } if ( string.indexOf( quote ) >= 0 ( string.indexOf( ' ' ) < 0 && string.indexOf( '=' ) < 0 ) ) { return string; } else { return quote + string + quote; } }
/** * Quote a string depending on the OS. Often used for shell calls. * * @return quoted string */
Quote a string depending on the OS. Often used for shell calls
optionallyQuoteStringByOS
{ "repo_name": "mkambol/pentaho-kettle", "path": "core/src/main/java/org/pentaho/di/core/Const.java", "license": "apache-2.0", "size": 121415 }
[ "org.pentaho.di.core.util.Utils" ]
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.util.*;
[ "org.pentaho.di" ]
org.pentaho.di;
2,908,870
public static void startMiniClusterWithConfig( MiniClusterConfigurationCallback miniClusterCallback) throws Exception { File baseDir = new File(System.getProperty("user.dir") + "/target/mini-tests"); assertTrue(baseDir.mkdirs() || baseDir.isDirectory()); // Make a shared MAC instance instead of spinning up one per test method MiniClusterHarness harness = new MiniClusterHarness(); if (TRUE.equals(System.getProperty(MiniClusterHarness.USE_KERBEROS_FOR_IT_OPTION))) { krb = new TestingKdc(); krb.start(); // Enabled krb auth Configuration conf = new Configuration(false); conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); UserGroupInformation.setConfiguration(conf); // Login as the client ClusterUser rootUser = krb.getRootUser(); // Get the krb token UserGroupInformation.loginUserFromKeytab(rootUser.getPrincipal(), rootUser.getKeytab().getAbsolutePath()); token = new KerberosToken(); } else { rootPassword = "rootPasswordShared1"; token = new PasswordToken(rootPassword); } cluster = harness.create(SharedMiniClusterBase.class.getName(), System.currentTimeMillis() + "_" + new SecureRandom().nextInt(Short.MAX_VALUE), token, miniClusterCallback, krb); cluster.start(); if (krb != null) { final String traceTable = Property.TRACE_TABLE.getDefaultValue(); final ClusterUser systemUser = krb.getAccumuloServerUser(), rootUser = krb.getRootUser(); // Login as the trace user // Open a client as the system user (ensures the user will exist for us to assign // permissions to) UserGroupInformation.loginUserFromKeytab(systemUser.getPrincipal(), systemUser.getKeytab().getAbsolutePath()); AuthenticationToken tempToken = new KerberosToken(); try (AccumuloClient c = cluster.createAccumuloClient(systemUser.getPrincipal(), tempToken)) { c.securityOperations().authenticateUser(systemUser.getPrincipal(), tempToken); } // Then, log back in as the "root" user and do the grant UserGroupInformation.loginUserFromKeytab(rootUser.getPrincipal(), rootUser.getKeytab().getAbsolutePath()); try (AccumuloClient c = cluster.createAccumuloClient(principal, token)) { // Create the trace table c.tableOperations().create(traceTable); // Trace user (which is the same kerberos principal as the system user, but using a normal // KerberosToken) needs // to have the ability to read, write and alter the trace table c.securityOperations().grantTablePermission(systemUser.getPrincipal(), traceTable, TablePermission.READ); c.securityOperations().grantTablePermission(systemUser.getPrincipal(), traceTable, TablePermission.WRITE); c.securityOperations().grantTablePermission(systemUser.getPrincipal(), traceTable, TablePermission.ALTER_TABLE); } } }
static void function( MiniClusterConfigurationCallback miniClusterCallback) throws Exception { File baseDir = new File(System.getProperty(STR) + STR); assertTrue(baseDir.mkdirs() baseDir.isDirectory()); MiniClusterHarness harness = new MiniClusterHarness(); if (TRUE.equals(System.getProperty(MiniClusterHarness.USE_KERBEROS_FOR_IT_OPTION))) { krb = new TestingKdc(); krb.start(); Configuration conf = new Configuration(false); conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, STR); UserGroupInformation.setConfiguration(conf); ClusterUser rootUser = krb.getRootUser(); UserGroupInformation.loginUserFromKeytab(rootUser.getPrincipal(), rootUser.getKeytab().getAbsolutePath()); token = new KerberosToken(); } else { rootPassword = STR; token = new PasswordToken(rootPassword); } cluster = harness.create(SharedMiniClusterBase.class.getName(), System.currentTimeMillis() + "_" + new SecureRandom().nextInt(Short.MAX_VALUE), token, miniClusterCallback, krb); cluster.start(); if (krb != null) { final String traceTable = Property.TRACE_TABLE.getDefaultValue(); final ClusterUser systemUser = krb.getAccumuloServerUser(), rootUser = krb.getRootUser(); UserGroupInformation.loginUserFromKeytab(systemUser.getPrincipal(), systemUser.getKeytab().getAbsolutePath()); AuthenticationToken tempToken = new KerberosToken(); try (AccumuloClient c = cluster.createAccumuloClient(systemUser.getPrincipal(), tempToken)) { c.securityOperations().authenticateUser(systemUser.getPrincipal(), tempToken); } UserGroupInformation.loginUserFromKeytab(rootUser.getPrincipal(), rootUser.getKeytab().getAbsolutePath()); try (AccumuloClient c = cluster.createAccumuloClient(principal, token)) { c.tableOperations().create(traceTable); c.securityOperations().grantTablePermission(systemUser.getPrincipal(), traceTable, TablePermission.READ); c.securityOperations().grantTablePermission(systemUser.getPrincipal(), traceTable, TablePermission.WRITE); c.securityOperations().grantTablePermission(systemUser.getPrincipal(), traceTable, TablePermission.ALTER_TABLE); } } }
/** * Starts a MiniAccumuloCluster instance with the default configuration but also provides the * caller the opportunity to update the configuration before the MiniAccumuloCluster is started. * * @param miniClusterCallback * A callback to configure the minicluster before it is started. */
Starts a MiniAccumuloCluster instance with the default configuration but also provides the caller the opportunity to update the configuration before the MiniAccumuloCluster is started
startMiniClusterWithConfig
{ "repo_name": "lstav/accumulo", "path": "test/src/main/java/org/apache/accumulo/harness/SharedMiniClusterBase.java", "license": "apache-2.0", "size": 8993 }
[ "java.io.File", "java.security.SecureRandom", "org.apache.accumulo.cluster.ClusterUser", "org.apache.accumulo.core.client.AccumuloClient", "org.apache.accumulo.core.client.security.tokens.AuthenticationToken", "org.apache.accumulo.core.client.security.tokens.KerberosToken", "org.apache.accumulo.core.client.security.tokens.PasswordToken", "org.apache.accumulo.core.conf.Property", "org.apache.accumulo.core.security.TablePermission", "org.apache.hadoop.conf.Configuration", "org.apache.hadoop.fs.CommonConfigurationKeysPublic", "org.apache.hadoop.security.UserGroupInformation", "org.junit.Assert" ]
import java.io.File; import java.security.SecureRandom; import org.apache.accumulo.cluster.ClusterUser; import org.apache.accumulo.core.client.AccumuloClient; import org.apache.accumulo.core.client.security.tokens.AuthenticationToken; import org.apache.accumulo.core.client.security.tokens.KerberosToken; import org.apache.accumulo.core.client.security.tokens.PasswordToken; import org.apache.accumulo.core.conf.Property; import org.apache.accumulo.core.security.TablePermission; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.security.UserGroupInformation; import org.junit.Assert;
import java.io.*; import java.security.*; import org.apache.accumulo.cluster.*; import org.apache.accumulo.core.client.*; import org.apache.accumulo.core.client.security.tokens.*; import org.apache.accumulo.core.conf.*; import org.apache.accumulo.core.security.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.security.*; import org.junit.*;
[ "java.io", "java.security", "org.apache.accumulo", "org.apache.hadoop", "org.junit" ]
java.io; java.security; org.apache.accumulo; org.apache.hadoop; org.junit;
1,112,230
@ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<RelationInner> list(String resourceGroupName, String workspaceName, String bookmarkId);
@ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<RelationInner> list(String resourceGroupName, String workspaceName, String bookmarkId);
/** * Gets all bookmark relations. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param bookmarkId Bookmark ID. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return all bookmark relations. */
Gets all bookmark relations
list
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/securityinsights/azure-resourcemanager-securityinsights/src/main/java/com/azure/resourcemanager/securityinsights/fluent/BookmarkRelationsClient.java", "license": "mit", "size": 8187 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod", "com.azure.core.http.rest.PagedIterable", "com.azure.resourcemanager.securityinsights.fluent.models.RelationInner" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedIterable; import com.azure.resourcemanager.securityinsights.fluent.models.RelationInner;
import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.resourcemanager.securityinsights.fluent.models.*;
[ "com.azure.core", "com.azure.resourcemanager" ]
com.azure.core; com.azure.resourcemanager;
1,483,259
private void writeQNameAttribute(final java.lang.String namespace, final java.lang.String attName, final javax.xml.namespace.QName qname, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { final java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = this.registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { this.registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } }
void function(final java.lang.String namespace, final java.lang.String attName, final javax.xml.namespace.QName qname, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { final java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = this.registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { this.registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } }
/** * Util method to write an attribute without the ns prefix */
Util method to write an attribute without the ns prefix
writeQNameAttribute
{ "repo_name": "caiocteodoro/nfe", "path": "src/main/java/com/fincatto/documentofiscal/nfe310/webservices/downloadnf/NfeDownloadNFStub.java", "license": "apache-2.0", "size": 91779 }
[ "javax.xml.namespace.QName" ]
import javax.xml.namespace.QName;
import javax.xml.namespace.*;
[ "javax.xml" ]
javax.xml;
2,599,753
public boolean isHandledType(int type) { final Set<Integer> typeSet = typeHandlers.keySet(); return typeSet.contains(type); }
boolean function(int type) { final Set<Integer> typeSet = typeHandlers.keySet(); return typeSet.contains(type); }
/** * Returns true if this type (form TokenTypes) is handled. * * @param type type from TokenTypes * @return true if handler is registered, false otherwise */
Returns true if this type (form TokenTypes) is handled
isHandledType
{ "repo_name": "sharang108/checkstyle", "path": "src/main/java/com/puppycrawl/tools/checkstyle/checks/indentation/HandlerFactory.java", "license": "lgpl-2.1", "size": 7179 }
[ "java.util.Set" ]
import java.util.Set;
import java.util.*;
[ "java.util" ]
java.util;
2,209,446
private String convertLevel(Scheme tdtscheme, Level tdtlevel, String input, Map<String, String> inputParameters, LevelTypeList outboundlevel) { debugprintln("convertLevel (line 820) - 19:12 21st October 2010"); debugprintln("==============================================="); debugprintln("CONVERT "+input+" to "+outboundlevel.toString()); String outboundstring; Map<String, String> extraparams = // new NoisyMap (new HashMap<String, String>(inputParameters)); // get the scheme's option key, which is the name of a // parameter whose value is matched to the option key of the // level. String optionValue; String optionkey = tdtscheme.getOptionKey(); debugprintln("optionkey for scheme = "+optionkey); debugprintln("tdtlevel.getType() = "+tdtlevel.getType().toString()); if (!((tdtlevel.getType() == LevelTypeList.TAG_ENCODING) || (tdtlevel.getType() == LevelTypeList.PURE_IDENTITY) || (tdtlevel.getType() == LevelTypeList.BINARY) )) { optionValue = inputParameters.get(optionkey); } else { optionValue=null; } debugprintln("optionValue = "+optionValue); // the name of a parameter which allows the appropriate option // to be selected // now consider the various options within the scheme and // level for each option element inside the level, check // whether the pattern attribute matches as a regular // expression String matchingOptionKey = null; Option matchingOption = null; Matcher prefixMatcher = null; Map<String,Option> pattern_map = new HashMap<String,Option>(); Map<String,Matcher> matcher_map = new HashMap<String,Matcher>(); debugprintln("line 858 input = "+input); for (Option opt : tdtlevel.getOption()) { if (optionValue == null || optionValue.equals(opt.getOptionKey())) { // possible match debugprintln("optionValue = "+optionValue); debugprintln("opt.getOptionKey() = "+opt.getOptionKey()); debugprintln("Pattern = "+opt.getPattern()); Matcher matcher = Pattern.compile("^"+opt.getPattern()).matcher(input); debugprintln("lookingAt ^"+opt.getPattern()); if (matcher.lookingAt()) { debugprintln("MATCHED!"); pattern_map.put(opt.getOptionKey(),opt); matcher_map.put(opt.getOptionKey(),matcher); } } } debugprintln("Size of pattern_map is "+pattern_map.size()); if (pattern_map.isEmpty()) { debugprintln("***EXCEPTION: No patterns matched (line 879)"); throw new TDTException("No patterns matched (line 880)"); } if (pattern_map.size() > 1) { debugprintln("optionkey = "+optionkey); debugprintln("extraparams.get("+optionkey+") = "+extraparams.get(optionkey)); debugprintln("optionValue = "+optionValue); if (pattern_map.containsKey(optionValue)) { debugprintln("matchingOptionKey = "+optionValue); debugprintln("matchingOption has pattern = "+pattern_map.get(optionValue).getPattern()); matchingOptionKey = optionValue; matchingOption=pattern_map.get(optionValue); prefixMatcher=matcher_map.get(optionValue); } } if (pattern_map.size() == 1) { debugprintln("matchingOptionKey = "+pattern_map.keySet().iterator().next()); matchingOptionKey = pattern_map.keySet().iterator().next().toString(); debugprintln("matchingOption has pattern = "+pattern_map.get(matchingOptionKey).getPattern()); matchingOption=pattern_map.get(matchingOptionKey); prefixMatcher=matcher_map.get(matchingOptionKey); } optionValue = matchingOptionKey; debugprintln("optionValue = "+optionValue); Level tdtoutlevel = findLevel(tdtscheme, outboundlevel); debugprint("tdtoutlevel prefixMatch = "); if (tdtoutlevel.getPrefixMatch() != null) { debugprintln(tdtoutlevel.getPrefixMatch().toString()); } else { debugprintln("null"); } Level tdttagurilevel = findLevel(tdtscheme, LevelTypeList.TAG_ENCODING); Level tdtbinarylevel = findLevel(tdtscheme, LevelTypeList.BINARY); debugprint("tdttagurilevel prefixMatch = "); debugprintln(tdttagurilevel.getPrefixMatch().toString()); Option tdtoutoption = findOption(tdtoutlevel, optionValue); debugprint("tdtoutoption pattern = "); if (tdtoutoption.getPattern() != null) { debugprintln(tdtoutoption.getPattern().toString()); } else { debugprintln("null"); } Option tdttagurioption = findOption(tdttagurilevel, optionValue); Option tdtbinaryoption = findOption(tdtbinarylevel, optionValue); debugprint("tdttagurioption pattern = "); debugprintln(tdttagurioption.getPattern().toString()); // EXTRACTION of values or each of the fields. // consider all fields within the matching option for (Field field : matchingOption.getField()) { BigInteger seq = field.getSeq(); String strfieldname = field.getName(); PadDirectionList padDir = field.getPadDir(); PadDirectionList taguriPadDir; PadDirectionList bitPadDir = field.getBitPadDir(); String padChar = field.getPadChar(); String taguriPadChar; String outPadChar; int requiredLength = -1; // -1 indicates that no length is specified if (field.getLength() != null) { requiredLength = field.getLength().intValue(); } debugprintln("---------------------------------------------------------"); debugprintln("fieldname = "+strfieldname); String strfieldvaluematched = prefixMatcher.group(seq.intValue()); debugprintln("strfieldvaluematched = "+strfieldvaluematched); debugprintln("---------------------------------------------------------"); Field outputfield = findField(tdtoutoption, strfieldname, tdtoutlevel); // debugprintln("outputfield characterset = "+outputfield.getCharacterSet().toString()); Field tagurifield = findField(tdttagurioption, strfieldname, tdttagurilevel); Field binaryfield = findField(tdtbinaryoption, strfieldname, tdtbinarylevel); if (tdtlevel.getType() == LevelTypeList.BINARY ) { debugprintln("Converting from BINARY to NON-BINARY - see Figure 9b"); String result9blayer1; String result9blayer2; String result9blayer3; if (binaryfield.getCompaction() != null) { if (binaryfield.getBitPadDir() != null) { // strip leading/trailing bits at the bitPadDir edge until a multiple of compaction bits is obtained int intcompaction = -1; String strCompaction = binaryfield.getCompaction(); if (strCompaction.equals("5-bit")) { intcompaction = 5; } if (strCompaction.equals("6-bit")) { intcompaction = 6; } if (strCompaction.equals("7-bit")) { intcompaction = 7; } if (strCompaction.equals("8-bit")) { intcompaction = 8; } if (intcompaction > -1) { result9blayer1 = stripbinarypadding(strfieldvaluematched, binaryfield.getBitPadDir(), intcompaction); } else { result9blayer1 = strfieldvaluematched; debugprintln("Invalid value for compaction"); } } else { // do nothing result9blayer1 = strfieldvaluematched; } // convert the sequence of bits into characters, considering that each byte may have been compacted, as indicated by the compaction attribute result9blayer2 = binaryToString(result9blayer1,binaryfield.getCompaction()); // check that the string value only contains characters from the permitted character set debugprintln("9b: Checking that result "+result9blayer2+" is within character set "+tagurifield.getCharacterSet()); checkWithinCharacterSet(strfieldname, result9blayer2, tagurifield.getCharacterSet()); } else { if (binaryfield.getBitPadDir() != null) { // strip leading/trailing bits at the bitPadDir edge until the first non-zero bit is encountered result9blayer1 = stripbinarypadding(strfieldvaluematched, binaryfield.getBitPadDir(), 0); } else { // do nothing result9blayer1 = strfieldvaluematched; } // consider the sequence of bits as an unsigned integer and convert this integer into a numeric string result9blayer2 = bin2dec(result9blayer1); debugprintln("9b: Intermediate results at layer 2="+result9blayer2); // check that the numeric value is not less than the specified minimum nor greater than the specified maximum / debugprintln("Processing RULE elements of type 'EXTRACT'"); int seq = 0; for (Rule tdtrule : tdtlevel.getRule()) { if (tdtrule.getType() == ModeList.EXTRACT) { debugprintln("Rule #"+tdtrule.getSeq().intValue()+": "+tdtrule.getNewFieldName()); assert seq < tdtrule.getSeq().intValue() : "Rule out of sequence order"; seq = tdtrule.getSeq().intValue(); processRules(extraparams, tdtrule); } } debugprintln("Finished processing 'EXTRACT' rules"); // Level tdtoutlevel = findLevel(tdtscheme, outboundlevel); // Option tdtoutoption = findOption(tdtoutlevel, optionValue); debugprintln("Processing RULE elements of type 'FORMAT'"); seq = 0; for (Rule tdtrule : tdtoutlevel.getRule()) { if (tdtrule.getType() == ModeList.FORMAT) { debugprintln("Rule #"+tdtrule.getSeq().intValue()+": "+tdtrule.getNewFieldName()); assert seq < tdtrule.getSeq().intValue() : "Rule out of sequence order"; seq = tdtrule.getSeq().intValue(); processRules(extraparams, tdtrule); } } debugprintln("Finished processing 'FORMAT' rules"); if (tdtoutlevel.getType() == LevelTypeList.BINARY ) { debugprintln("Converting output fields from NON-BINARY to BINARY - see Figure 9a"); for (Field field : tdtoutoption.getField()) { String strfieldname = field.getName(); Field tagurifield = findField(tdttagurioption, strfieldname, tdttagurilevel); Field binaryfield = findField(tdtbinaryoption, strfieldname, tdtbinarylevel); String strfieldvaluematched = extraparams.get(strfieldname); debugprintln("Output field: "+strfieldname+" had value "+strfieldvaluematched); String result9alayer1; if (tagurifield !=null) { if (tagurifield.getPadChar() != null) { if (binaryfield.getPadChar() != null) { debugprintln("9a Invalid TDT definition file"); result9alayer1=""; } else { // Strip non-binary field of any successive pad characters tagurifield.getPadChar() at edge tagurifield.getPadDir() result9alayer1 = stripPadChar(strfieldvaluematched,tagurifield.getPadDir(),tagurifield.getPadChar()); } } else { if (binaryfield.getPadChar() != null) { // Pad the non-binary field with pad characters binaryfield.getPadChar() at the edge binaryfield.getPadDir() to reach a total length of binaryfield.getLength() characters result9alayer1 = applyPadChar(strfieldvaluematched, binaryfield.getPadDir(), binaryfield.getPadChar(), binaryfield.getLength().intValue()); } else { // do not pad this field at the non-binary level result9alayer1 = strfieldvaluematched; } } debugprintln("\tIntermediate Result for Fig 9a at layer 1="+result9alayer1); String result9alayer2; if (binaryfield.getCompaction() != null) { // treat the field as an alphanumeric field // check that all of its characters are within the allowed character set checkWithinCharacterSet(strfieldname, result9alayer1, tagurifield.getCharacterSet()); // convert to binary using the compaction method specified for that field at binary level result9alayer2 = stringToBinary(result9alayer1,binaryfield.getCompaction().toString()); } else { // check that the non-binary value is not less than the minimum nor greater than the maximum value permitted if (result9alayer1.length() > 0) { checkMinimum(strfieldname, new BigInteger(result9alayer1), tagurifield.getDecimalMinimum()); checkMaximum(strfieldname, new BigInteger(result9alayer1), tagurifield.getDecimalMaximum()); } // treat the numeric field as as an unsigned integer and convert this integer into a sequence of bits result9alayer2 = dec2bin(result9alayer1); } debugprintln("\tIntermediate Result for Fig 9a at layer 2="+result9alayer2); String result9alayer3; if (binaryfield.getBitPadDir() != null) { debugprintln("9a Pad with leading/trailing bits at the "+binaryfield.getBitPadDir()+" edge to reach a total of "+binaryfield.getBitLength()+" bits"); result9alayer3 = applyPadChar(result9alayer2, binaryfield.getBitPadDir(), "0", binaryfield.getBitLength().intValue()); } else { debugprintln("9a Don't pad at binary level"); result9alayer3 = result9alayer2; } debugprintln("\tFinal Result for Fig 9a at layer 3="+result9alayer3); debugprintln("Need to put this value into extraparams as the value for key "+strfieldname); debugprintln("binaryfield.getBitLength() = "+binaryfield.getBitLength()); if ((binaryfield.getBitLength() != null) && (binaryfield.getBitLength().intValue() == 0)) { extraparams.put(strfieldname,""); } else { extraparams.put(strfieldname,result9alayer3); } } else { String result9alayer3; if (binaryfield.getBitPadDir() != null) { debugprintln("9a Pad with leading/trailing bits at the "+binaryfield.getBitPadDir()+" edge to reach a total of "+binaryfield.getBitLength()+" bits"); result9alayer3 = applyPadChar(dec2bin(strfieldvaluematched), binaryfield.getBitPadDir(), "0", binaryfield.getBitLength().intValue()); } else { debugprintln("9a Don't pad at binary level"); result9alayer3 = dec2bin(strfieldvaluematched); } debugprintln("binaryfield.getBitLength() = "+binaryfield.getBitLength()); if ((binaryfield.getBitLength() != null) && (binaryfield.getBitLength().intValue() == 0)) { extraparams.put(strfieldname,""); } else { extraparams.put(strfieldname,result9alayer3); } } } } debugprintln("Building final grammar"); // *** need to do check min/max just before building grammar - not earlier // *** may need to pass additional fields into buildGrammar in order to do this // *** logic is flawed here. We cannot test for fields that do not appear in the grammar string // *** instead we need to extract these from the grammar string and check against constraints expressed in either the rules of type="FORMAT" or the field in tdtoutoption. for (Field testfield : tdtoutoption.getField()) { String testfieldname = testfield.getName(); debugprintln("Field to be checked: "+testfieldname+" = "+extraparams.get(testfieldname)); if (outboundlevel == LevelTypeList.BINARY) { Field tagurifield = findField(tdttagurioption, testfieldname, tdttagurilevel); if (tagurifield.getDecimalMinimum() != null) { debugprintln("Decimal minimum = "+tagurifield.getDecimalMinimum()); checkMinimum(testfieldname, new BigInteger(bin2dec(extraparams.get(testfieldname))), testfield.getDecimalMinimum()); } if (tagurifield.getDecimalMaximum() != null) { debugprintln("Decimal maximum = "+tagurifield.getDecimalMaximum()); checkMaximum(testfieldname, new BigInteger(bin2dec(extraparams.get(testfieldname))), testfield.getDecimalMaximum()); } } else { if (testfield.getDecimalMinimum() != null) { debugprintln("Decimal minimum = "+testfield.getDecimalMinimum()); checkMinimum(testfieldname, new BigInteger(extraparams.get(testfieldname)), testfield.getDecimalMinimum()); } if (testfield.getDecimalMaximum() != null) { debugprintln("Decimal maximum = "+testfield.getDecimalMaximum()); checkMaximum(testfieldname, new BigInteger(extraparams.get(testfieldname)), testfield.getDecimalMaximum()); } if (testfield.getCharacterSet() != null) { debugprintln("Character set = "+testfield.getCharacterSet()); checkWithinCharacterSet(testfieldname, extraparams.get(testfieldname), testfield.getCharacterSet()); } } } // need to get fields for tdtoutoption // then check each one for min/max, charSet outboundstring = buildGrammar(tdtoutoption.getGrammar(), extraparams, outboundlevel); // debugprintln("final extraparams = " + extraparams); debugprintln("RESULT after building grammar = " + outboundstring); debugprintln("==============================================================================="); debugprintln(""); return outboundstring; }
String function(Scheme tdtscheme, Level tdtlevel, String input, Map<String, String> inputParameters, LevelTypeList outboundlevel) { debugprintln(STR); debugprintln(STR); debugprintln(STR+input+STR+outboundlevel.toString()); String outboundstring; Map<String, String> extraparams = (new HashMap<String, String>(inputParameters)); String optionValue; String optionkey = tdtscheme.getOptionKey(); debugprintln(STR+optionkey); debugprintln(STR+tdtlevel.getType().toString()); if (!((tdtlevel.getType() == LevelTypeList.TAG_ENCODING) (tdtlevel.getType() == LevelTypeList.PURE_IDENTITY) (tdtlevel.getType() == LevelTypeList.BINARY) )) { optionValue = inputParameters.get(optionkey); } else { optionValue=null; } debugprintln(STR+optionValue); String matchingOptionKey = null; Option matchingOption = null; Matcher prefixMatcher = null; Map<String,Option> pattern_map = new HashMap<String,Option>(); Map<String,Matcher> matcher_map = new HashMap<String,Matcher>(); debugprintln(STR+input); for (Option opt : tdtlevel.getOption()) { if (optionValue == null optionValue.equals(opt.getOptionKey())) { debugprintln(STR+optionValue); debugprintln(STR+opt.getOptionKey()); debugprintln(STR+opt.getPattern()); Matcher matcher = Pattern.compile("^"+opt.getPattern()).matcher(input); debugprintln(STR+opt.getPattern()); if (matcher.lookingAt()) { debugprintln(STR); pattern_map.put(opt.getOptionKey(),opt); matcher_map.put(opt.getOptionKey(),matcher); } } } debugprintln(STR+pattern_map.size()); if (pattern_map.isEmpty()) { debugprintln(STR); throw new TDTException(STR); } if (pattern_map.size() > 1) { debugprintln(STR+optionkey); debugprintln(STR+optionkey+STR+extraparams.get(optionkey)); debugprintln(STR+optionValue); if (pattern_map.containsKey(optionValue)) { debugprintln(STR+optionValue); debugprintln(STR+pattern_map.get(optionValue).getPattern()); matchingOptionKey = optionValue; matchingOption=pattern_map.get(optionValue); prefixMatcher=matcher_map.get(optionValue); } } if (pattern_map.size() == 1) { debugprintln(STR+pattern_map.keySet().iterator().next()); matchingOptionKey = pattern_map.keySet().iterator().next().toString(); debugprintln(STR+pattern_map.get(matchingOptionKey).getPattern()); matchingOption=pattern_map.get(matchingOptionKey); prefixMatcher=matcher_map.get(matchingOptionKey); } optionValue = matchingOptionKey; debugprintln(STR+optionValue); Level tdtoutlevel = findLevel(tdtscheme, outboundlevel); debugprint(STR); if (tdtoutlevel.getPrefixMatch() != null) { debugprintln(tdtoutlevel.getPrefixMatch().toString()); } else { debugprintln("null"); } Level tdttagurilevel = findLevel(tdtscheme, LevelTypeList.TAG_ENCODING); Level tdtbinarylevel = findLevel(tdtscheme, LevelTypeList.BINARY); debugprint(STR); debugprintln(tdttagurilevel.getPrefixMatch().toString()); Option tdtoutoption = findOption(tdtoutlevel, optionValue); debugprint(STR); if (tdtoutoption.getPattern() != null) { debugprintln(tdtoutoption.getPattern().toString()); } else { debugprintln("null"); } Option tdttagurioption = findOption(tdttagurilevel, optionValue); Option tdtbinaryoption = findOption(tdtbinarylevel, optionValue); debugprint(STR); debugprintln(tdttagurioption.getPattern().toString()); for (Field field : matchingOption.getField()) { BigInteger seq = field.getSeq(); String strfieldname = field.getName(); PadDirectionList padDir = field.getPadDir(); PadDirectionList taguriPadDir; PadDirectionList bitPadDir = field.getBitPadDir(); String padChar = field.getPadChar(); String taguriPadChar; String outPadChar; int requiredLength = -1; if (field.getLength() != null) { requiredLength = field.getLength().intValue(); } debugprintln(STR); debugprintln(STR+strfieldname); String strfieldvaluematched = prefixMatcher.group(seq.intValue()); debugprintln(STR+strfieldvaluematched); debugprintln(STR); Field outputfield = findField(tdtoutoption, strfieldname, tdtoutlevel); Field tagurifield = findField(tdttagurioption, strfieldname, tdttagurilevel); Field binaryfield = findField(tdtbinaryoption, strfieldname, tdtbinarylevel); if (tdtlevel.getType() == LevelTypeList.BINARY ) { debugprintln(STR); String result9blayer1; String result9blayer2; String result9blayer3; if (binaryfield.getCompaction() != null) { if (binaryfield.getBitPadDir() != null) { int intcompaction = -1; String strCompaction = binaryfield.getCompaction(); if (strCompaction.equals("5-bit")) { intcompaction = 5; } if (strCompaction.equals("6-bit")) { intcompaction = 6; } if (strCompaction.equals("7-bit")) { intcompaction = 7; } if (strCompaction.equals("8-bit")) { intcompaction = 8; } if (intcompaction > -1) { result9blayer1 = stripbinarypadding(strfieldvaluematched, binaryfield.getBitPadDir(), intcompaction); } else { result9blayer1 = strfieldvaluematched; debugprintln(STR); } } else { result9blayer1 = strfieldvaluematched; } result9blayer2 = binaryToString(result9blayer1,binaryfield.getCompaction()); debugprintln(STR+result9blayer2+STR+tagurifield.getCharacterSet()); checkWithinCharacterSet(strfieldname, result9blayer2, tagurifield.getCharacterSet()); } else { if (binaryfield.getBitPadDir() != null) { result9blayer1 = stripbinarypadding(strfieldvaluematched, binaryfield.getBitPadDir(), 0); } else { result9blayer1 = strfieldvaluematched; } result9blayer2 = bin2dec(result9blayer1); debugprintln(STR+result9blayer2); / debugprintln(STR); int seq = 0; for (Rule tdtrule : tdtlevel.getRule()) { if (tdtrule.getType() == ModeList.EXTRACT) { debugprintln(STR+tdtrule.getSeq().intValue()+STR+tdtrule.getNewFieldName()); assert seq < tdtrule.getSeq().intValue() : STR; seq = tdtrule.getSeq().intValue(); processRules(extraparams, tdtrule); } } debugprintln(STR); debugprintln(STR); seq = 0; for (Rule tdtrule : tdtoutlevel.getRule()) { if (tdtrule.getType() == ModeList.FORMAT) { debugprintln(STR+tdtrule.getSeq().intValue()+STR+tdtrule.getNewFieldName()); assert seq < tdtrule.getSeq().intValue() : STR; seq = tdtrule.getSeq().intValue(); processRules(extraparams, tdtrule); } } debugprintln(STR); if (tdtoutlevel.getType() == LevelTypeList.BINARY ) { debugprintln(STR); for (Field field : tdtoutoption.getField()) { String strfieldname = field.getName(); Field tagurifield = findField(tdttagurioption, strfieldname, tdttagurilevel); Field binaryfield = findField(tdtbinaryoption, strfieldname, tdtbinarylevel); String strfieldvaluematched = extraparams.get(strfieldname); debugprintln(STR+strfieldname+STR+strfieldvaluematched); String result9alayer1; if (tagurifield !=null) { if (tagurifield.getPadChar() != null) { if (binaryfield.getPadChar() != null) { debugprintln(STR); result9alayer1=STR\tIntermediate Result for Fig 9a at layer 1=STR\tIntermediate Result for Fig 9a at layer 2=STR9a Pad with leading/trailing bits at the STR edge to reach a total of STR bitsSTR0STR9a Don't pad at binary levelSTR\tFinal Result for Fig 9a at layer 3=STRNeed to put this value into extraparams as the value for key STRbinaryfield.getBitLength() = STRSTR9a Pad with leading/trailing bits at the STR edge to reach a total of STR bitsSTR0STR9a Don't pad at binary levelSTRbinaryfield.getBitLength() = STRSTRBuilding final grammarSTRField to be checked: STR = STRDecimal minimum = STRDecimal maximum = STRDecimal minimum = STRDecimal maximum = STRCharacter set = STRRESULT after building grammar = STR===============================================================================STR"); return outboundstring; }
/** * convert from a particular scheme / level */
convert from a particular scheme / level
convertLevel
{ "repo_name": "Auto-ID-Lab-Japan/fosstrak-tdt", "path": "src/main/java/org/fosstrak/tdt/TDTEngine.java", "license": "lgpl-2.1", "size": 96390 }
[ "java.math.BigInteger", "java.util.HashMap", "java.util.Map", "java.util.regex.Matcher", "java.util.regex.Pattern", "org.epcglobalinc.tdt.Field", "org.epcglobalinc.tdt.Level", "org.epcglobalinc.tdt.LevelTypeList", "org.epcglobalinc.tdt.ModeList", "org.epcglobalinc.tdt.Option", "org.epcglobalinc.tdt.PadDirectionList", "org.epcglobalinc.tdt.Rule", "org.epcglobalinc.tdt.Scheme" ]
import java.math.BigInteger; import java.util.HashMap; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.epcglobalinc.tdt.Field; import org.epcglobalinc.tdt.Level; import org.epcglobalinc.tdt.LevelTypeList; import org.epcglobalinc.tdt.ModeList; import org.epcglobalinc.tdt.Option; import org.epcglobalinc.tdt.PadDirectionList; import org.epcglobalinc.tdt.Rule; import org.epcglobalinc.tdt.Scheme;
import java.math.*; import java.util.*; import java.util.regex.*; import org.epcglobalinc.tdt.*;
[ "java.math", "java.util", "org.epcglobalinc.tdt" ]
java.math; java.util; org.epcglobalinc.tdt;
2,440,602
public void setContentPane(Container contentPanel) { if (contentPanel == null) { throw new IllegalArgumentException("Content pane can't be null"); } if (wrapper != null) { //these next two lines are as they are because if I try to remove //the "wrapper" component directly, then super.remove(comp) ends up //calling remove(int), which is overridden in this class, leading to //improper behavior. assert super.getComponent(0) == wrapper; super.remove(0); } wrapper = new WrapperContainer(contentPanel); super.addImpl(wrapper, BorderLayout.CENTER, -1); }
void function(Container contentPanel) { if (contentPanel == null) { throw new IllegalArgumentException(STR); } if (wrapper != null) { assert super.getComponent(0) == wrapper; super.remove(0); } wrapper = new WrapperContainer(contentPanel); super.addImpl(wrapper, BorderLayout.CENTER, -1); }
/** * Sets the content pane of this JXCollapsiblePane. Components must be added * to this content pane, not to the JXCollapsiblePane. * * @param contentPanel * @throws IllegalArgumentException * if contentPanel is null */
Sets the content pane of this JXCollapsiblePane. Components must be added to this content pane, not to the JXCollapsiblePane
setContentPane
{ "repo_name": "charlycoste/TreeD", "path": "src/org/jdesktop/swingx/JXCollapsiblePane.java", "license": "gpl-2.0", "size": 20625 }
[ "java.awt.BorderLayout", "java.awt.Container" ]
import java.awt.BorderLayout; import java.awt.Container;
import java.awt.*;
[ "java.awt" ]
java.awt;
540,634
@Test(expected = IllegalArgumentException.class) public final void testIsNotEmptyThrowsIllegalArgumentExceptionIfInputParamterIsAnEmptyArray() { final Collection<String> testList = newArrayList(); ParamValidator.isNotEmpty(testList.toArray(), ParamValidatorTest.DEFAULT_PARAM_NAME); }
@Test(expected = IllegalArgumentException.class) final void function() { final Collection<String> testList = newArrayList(); ParamValidator.isNotEmpty(testList.toArray(), ParamValidatorTest.DEFAULT_PARAM_NAME); }
/** * Test method for {@link ParamValidator#isNotEmpty(Object[], String)}. */
Test method for <code>ParamValidator#isNotEmpty(Object[], String)</code>
testIsNotEmptyThrowsIllegalArgumentExceptionIfInputParamterIsAnEmptyArray
{ "repo_name": "harmenweber/space-project", "path": "src/test/java/ch/harmen/util/ParamValidatorTest.java", "license": "mit", "size": 14983 }
[ "com.google.common.collect.Lists", "java.util.Collection", "org.junit.Test" ]
import com.google.common.collect.Lists; import java.util.Collection; import org.junit.Test;
import com.google.common.collect.*; import java.util.*; import org.junit.*;
[ "com.google.common", "java.util", "org.junit" ]
com.google.common; java.util; org.junit;
3,085
public static void setCompressionLevel(final int compressionLevel) { if (compressionLevel < Deflater.NO_COMPRESSION || compressionLevel > Deflater.BEST_COMPRESSION) { throw new IllegalArgumentException("Invalid compression level: " + compressionLevel); } IOUtil.compressionLevel = compressionLevel; }
static void function(final int compressionLevel) { if (compressionLevel < Deflater.NO_COMPRESSION compressionLevel > Deflater.BEST_COMPRESSION) { throw new IllegalArgumentException(STR + compressionLevel); } IOUtil.compressionLevel = compressionLevel; }
/** * Sets the GZip compression level for subsequent GZIPOutputStream object creation. * @param compressionLevel 0 <= compressionLevel <= 9 */
Sets the GZip compression level for subsequent GZIPOutputStream object creation
setCompressionLevel
{ "repo_name": "xubo245/CloudSW", "path": "src/main/java/htsjdk/samtools/util/IOUtil.java", "license": "gpl-2.0", "size": 35992 }
[ "java.util.zip.Deflater" ]
import java.util.zip.Deflater;
import java.util.zip.*;
[ "java.util" ]
java.util;
2,869,255
@Test public void testConstructorWithFilename() throws IOException, SettingsException, Error { Auth auth = new Auth("config/config.min.properties"); assertTrue(auth.getSettings() != null); Saml2Settings settings = new SettingsBuilder().fromFile("config/config.min.properties").build(); assertEquals(settings.getIdpEntityId(), auth.getSettings().getIdpEntityId()); assertEquals(settings.getSpEntityId(), auth.getSettings().getSpEntityId()); }
void function() throws IOException, SettingsException, Error { Auth auth = new Auth(STR); assertTrue(auth.getSettings() != null); Saml2Settings settings = new SettingsBuilder().fromFile(STR).build(); assertEquals(settings.getIdpEntityId(), auth.getSettings().getIdpEntityId()); assertEquals(settings.getSpEntityId(), auth.getSettings().getSpEntityId()); }
/** * Tests the constructor of Auth * Case: filename path provided * * @throws SettingsException * @throws IOException * @throws Error * * @see com.onelogin.saml2.Auth */
Tests the constructor of Auth Case: filename path provided
testConstructorWithFilename
{ "repo_name": "onelogin/java-saml", "path": "toolkit/src/test/java/com/onelogin/saml2/test/AuthTest.java", "license": "mit", "size": 108047 }
[ "com.onelogin.saml2.Auth", "com.onelogin.saml2.exception.Error", "com.onelogin.saml2.exception.SettingsException", "com.onelogin.saml2.settings.Saml2Settings", "com.onelogin.saml2.settings.SettingsBuilder", "java.io.IOException", "org.junit.Assert" ]
import com.onelogin.saml2.Auth; import com.onelogin.saml2.exception.Error; import com.onelogin.saml2.exception.SettingsException; import com.onelogin.saml2.settings.Saml2Settings; import com.onelogin.saml2.settings.SettingsBuilder; import java.io.IOException; import org.junit.Assert;
import com.onelogin.saml2.*; import com.onelogin.saml2.exception.*; import com.onelogin.saml2.settings.*; import java.io.*; import org.junit.*;
[ "com.onelogin.saml2", "java.io", "org.junit" ]
com.onelogin.saml2; java.io; org.junit;
378,816
protected Result append(final HRegion region, final Mutate mutate) throws IOException { long before = EnvironmentEdgeManager.currentTimeMillis(); Append append = ProtobufUtil.toAppend(mutate); Result r = null; if (region.getCoprocessorHost() != null) { r = region.getCoprocessorHost().preAppend(append); } if (r == null) { Integer lock = getLockFromId(append.getLockId()); r = region.append(append, lock, append.getWriteToWAL()); if (region.getCoprocessorHost() != null) { region.getCoprocessorHost().postAppend(append, r); } } metricsRegionServer.updateAppend(EnvironmentEdgeManager.currentTimeMillis() - before); return r; }
Result function(final HRegion region, final Mutate mutate) throws IOException { long before = EnvironmentEdgeManager.currentTimeMillis(); Append append = ProtobufUtil.toAppend(mutate); Result r = null; if (region.getCoprocessorHost() != null) { r = region.getCoprocessorHost().preAppend(append); } if (r == null) { Integer lock = getLockFromId(append.getLockId()); r = region.append(append, lock, append.getWriteToWAL()); if (region.getCoprocessorHost() != null) { region.getCoprocessorHost().postAppend(append, r); } } metricsRegionServer.updateAppend(EnvironmentEdgeManager.currentTimeMillis() - before); return r; }
/** * Execute an append mutation. * * @param region * @param mutate * @return the Result * @throws IOException */
Execute an append mutation
append
{ "repo_name": "daidong/DominoHBase", "path": "hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java", "license": "apache-2.0", "size": 156547 }
[ "java.io.IOException", "org.apache.hadoop.hbase.client.Append", "org.apache.hadoop.hbase.client.Result", "org.apache.hadoop.hbase.protobuf.ProtobufUtil", "org.apache.hadoop.hbase.protobuf.generated.ClientProtos", "org.apache.hadoop.hbase.util.EnvironmentEdgeManager" ]
import java.io.IOException; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import java.io.*; import org.apache.hadoop.hbase.client.*; import org.apache.hadoop.hbase.protobuf.*; import org.apache.hadoop.hbase.protobuf.generated.*; import org.apache.hadoop.hbase.util.*;
[ "java.io", "org.apache.hadoop" ]
java.io; org.apache.hadoop;
1,963,608
public static Long getLong(final LdapEntry ctx, final String attribute, final Long nullValue) { final String v = getString(ctx, attribute, nullValue.toString()); if (v != null && NumberUtils.isNumber(v)) { return Long.valueOf(v); } return nullValue; }
static Long function(final LdapEntry ctx, final String attribute, final Long nullValue) { final String v = getString(ctx, attribute, nullValue.toString()); if (v != null && NumberUtils.isNumber(v)) { return Long.valueOf(v); } return nullValue; }
/** * Reads a Long value from the LdapEntry. * * @param ctx the ldap entry * @param attribute the attribute name * @param nullValue the value which should be returning in case of a null value * @return the long value */
Reads a Long value from the LdapEntry
getLong
{ "repo_name": "0be1/cas", "path": "cas-server-support-ldap/src/main/java/org/jasig/cas/util/LdapUtils.java", "license": "apache-2.0", "size": 4999 }
[ "org.apache.commons.lang.math.NumberUtils", "org.ldaptive.LdapEntry" ]
import org.apache.commons.lang.math.NumberUtils; import org.ldaptive.LdapEntry;
import org.apache.commons.lang.math.*; import org.ldaptive.*;
[ "org.apache.commons", "org.ldaptive" ]
org.apache.commons; org.ldaptive;
1,995,239
public void unimplementedCall(com.google.protobuf.EmptyProtos.Empty request, io.grpc.stub.StreamObserver<com.google.protobuf.EmptyProtos.Empty> responseObserver) { asyncUnimplementedUnaryCall(getUnimplementedCallMethod(), responseObserver); }
void function(com.google.protobuf.EmptyProtos.Empty request, io.grpc.stub.StreamObserver<com.google.protobuf.EmptyProtos.Empty> responseObserver) { asyncUnimplementedUnaryCall(getUnimplementedCallMethod(), responseObserver); }
/** * <pre> * The test server will not implement this method. It will be used * to test the behavior when clients call unimplemented methods. * </pre> */
<code> The test server will not implement this method. It will be used to test the behavior when clients call unimplemented methods. </code>
unimplementedCall
{ "repo_name": "rmichela/grpc-java", "path": "interop-testing/src/generated/main/grpc/io/grpc/testing/integration/TestServiceGrpc.java", "license": "apache-2.0", "size": 43307 }
[ "io.grpc.stub.ServerCalls" ]
import io.grpc.stub.ServerCalls;
import io.grpc.stub.*;
[ "io.grpc.stub" ]
io.grpc.stub;
1,641,810
public static APIProduct getAPIProduct(GovernanceArtifact artifact, Registry registry) throws APIManagementException { APIProduct apiProduct; try { String artifactPath = GovernanceUtils.getArtifactPath(registry, artifact.getId()); String providerName = artifact.getAttribute(APIConstants.API_OVERVIEW_PROVIDER); String productName = artifact.getAttribute(APIConstants.API_OVERVIEW_NAME); String productVersion = artifact.getAttribute(APIConstants.API_OVERVIEW_VERSION); APIProductIdentifier apiProductIdentifier = new APIProductIdentifier(providerName, productName, productVersion); apiProductIdentifier.setUUID(artifact.getId()); apiProduct = new APIProduct(apiProductIdentifier); apiProduct.setUuid(artifact.getId()); apiProduct.setRating(Float.toString(getAverageRating(apiProductIdentifier))); ApiMgtDAO.getInstance().setAPIProductFromDB(apiProduct); setResourceProperties(apiProduct, registry, artifactPath); //set uuid apiProduct.setUuid(artifact.getId()); apiProduct.setContext(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT)); apiProduct.setDescription(artifact.getAttribute(APIConstants.API_OVERVIEW_DESCRIPTION)); apiProduct.setState(artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS)); apiProduct.setThumbnailUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_THUMBNAIL_URL)); apiProduct.setVisibility(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBILITY)); apiProduct.setVisibleRoles(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBLE_ROLES)); apiProduct.setVisibleTenants(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBLE_TENANTS)); apiProduct.setBusinessOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER)); apiProduct.setBusinessOwnerEmail(artifact.getAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER_EMAIL)); apiProduct.setTechnicalOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_TEC_OWNER)); apiProduct.setTechnicalOwnerEmail(artifact.getAttribute(APIConstants.API_OVERVIEW_TEC_OWNER_EMAIL)); apiProduct.setSubscriptionAvailability(artifact.getAttribute( APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABILITY)); apiProduct.setSubscriptionAvailableTenants(artifact.getAttribute( APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABLE_TENANTS)); String environments = artifact.getAttribute(APIConstants.API_OVERVIEW_ENVIRONMENTS); apiProduct.setEnvironments(extractEnvironmentsForAPI(environments)); apiProduct.setTransports(artifact.getAttribute(APIConstants.API_OVERVIEW_TRANSPORTS)); apiProduct.setApiSecurity(artifact.getAttribute(APIConstants.API_OVERVIEW_API_SECURITY)); apiProduct.setAuthorizationHeader(artifact.getAttribute(APIConstants.API_OVERVIEW_AUTHORIZATION_HEADER)); apiProduct.setCorsConfiguration(getCorsConfigurationFromArtifact(artifact)); apiProduct.setCreatedTime(registry.get(artifactPath).getCreatedTime()); apiProduct.setLastUpdated(registry.get(artifactPath).getLastModified()); apiProduct.setType(artifact.getAttribute(APIConstants.API_OVERVIEW_TYPE)); String tenantDomainName = MultitenantUtils.getTenantDomain(replaceEmailDomainBack(providerName)); apiProduct.setTenantDomain(tenantDomainName); int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomainName); String tiers = artifact.getAttribute(APIConstants.API_OVERVIEW_TIER); Map<String, Tier> definedTiers = getTiers(tenantId); Set<Tier> availableTier = getAvailableTiers(definedTiers, tiers, productName); apiProduct.setAvailableTiers(availableTier); // We set the context template here apiProduct.setContextTemplate(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT_TEMPLATE)); apiProduct.setEnableSchemaValidation(Boolean.parseBoolean(artifact.getAttribute( APIConstants.API_OVERVIEW_ENABLE_JSON_SCHEMA))); apiProduct.setEnableStore(Boolean.parseBoolean(artifact.getAttribute( APIConstants.API_OVERVIEW_ENABLE_STORE))); apiProduct.setTestKey(artifact.getAttribute(APIConstants.API_OVERVIEW_TESTKEY)); apiProduct.setResponseCache(artifact.getAttribute(APIConstants.API_OVERVIEW_RESPONSE_CACHING)); int cacheTimeout = APIConstants.API_RESPONSE_CACHE_TIMEOUT; try { cacheTimeout = Integer.parseInt(artifact.getAttribute(APIConstants.API_OVERVIEW_CACHE_TIMEOUT)); } catch (NumberFormatException e) { if (log.isDebugEnabled()) { log.debug("Error in converting cache time out due to " + e.getMessage()); } } apiProduct.setCacheTimeout(cacheTimeout); List<APIProductResource> resources = ApiMgtDAO.getInstance(). getAPIProductResourceMappings(apiProductIdentifier); Map<String, Scope> uniqueAPIProductScopeKeyMappings = new LinkedHashMap<>(); for (APIProductResource resource : resources) { List<Scope> resourceScopes = resource.getUriTemplate().retrieveAllScopes(); ListIterator it = resourceScopes.listIterator(); while (it.hasNext()) { Scope resourceScope = (Scope) it.next(); String scopeKey = resourceScope.getKey(); if (!uniqueAPIProductScopeKeyMappings.containsKey(scopeKey)) { resourceScope = getScopeByName(scopeKey, tenantDomainName); uniqueAPIProductScopeKeyMappings.put(scopeKey, resourceScope); } else { resourceScope = uniqueAPIProductScopeKeyMappings.get(scopeKey); } it.set(resourceScope); } } Set<String> tags = new HashSet<String>(); Tag[] tag = registry.getTags(artifactPath); for (Tag tag1 : tag) { tags.add(tag1.getTagName()); } apiProduct.addTags(tags); for (APIProductResource resource : resources) { String apiPath = APIUtil.getAPIPath(resource.getApiIdentifier()); Resource productResource = null; try { // Handles store and publisher visibility issue when associated apis have different visibility // restrictions. productResource = registry.get(apiPath); } catch (RegistryException e) { if (e.getClass().equals(AuthorizationFailedException.class)) { if (log.isDebugEnabled()) { log.debug("User is not authorized to access the resource " + apiPath); } continue; } else { String msg = "Failed to get product resource"; throw new APIManagementException(msg, e); } } String artifactId = productResource.getUUID(); resource.setApiId(artifactId); GenericArtifactManager artifactManager = getArtifactManager(registry, APIConstants.API_KEY); GenericArtifact apiArtifact = artifactManager.getGenericArtifact(resource.getApiId()); API api = getAPI(apiArtifact, registry); resource.setEndpointConfig(api.getEndpointConfig()); resource.setEndpointSecurityMap(setEndpointSecurityForAPIProduct(api)); } apiProduct.setProductResources(resources); //set data and status related to monetization apiProduct.setMonetizationStatus(Boolean.parseBoolean(artifact.getAttribute (APIConstants.Monetization.API_MONETIZATION_STATUS))); String monetizationInfo = artifact.getAttribute(APIConstants.Monetization.API_MONETIZATION_PROPERTIES); if (StringUtils.isNotBlank(monetizationInfo)) { JSONParser parser = new JSONParser(); JSONObject jsonObj = (JSONObject) parser.parse(monetizationInfo); apiProduct.setMonetizationProperties(jsonObj); } apiProduct.setApiCategories(getAPICategoriesFromAPIGovernanceArtifact(artifact, tenantId)); } catch (GovernanceException e) { String msg = "Failed to get API Product for artifact "; throw new APIManagementException(msg, e); } catch (RegistryException e) { String msg = "Failed to get LastAccess time or Rating"; throw new APIManagementException(msg, e); } catch (UserStoreException e) { String msg = "Failed to get User Realm of API Product Provider"; throw new APIManagementException(msg, e); } catch (ParseException e) { String msg = "Failed to get parse monetization information."; throw new APIManagementException(msg, e); } return apiProduct; }
static APIProduct function(GovernanceArtifact artifact, Registry registry) throws APIManagementException { APIProduct apiProduct; try { String artifactPath = GovernanceUtils.getArtifactPath(registry, artifact.getId()); String providerName = artifact.getAttribute(APIConstants.API_OVERVIEW_PROVIDER); String productName = artifact.getAttribute(APIConstants.API_OVERVIEW_NAME); String productVersion = artifact.getAttribute(APIConstants.API_OVERVIEW_VERSION); APIProductIdentifier apiProductIdentifier = new APIProductIdentifier(providerName, productName, productVersion); apiProductIdentifier.setUUID(artifact.getId()); apiProduct = new APIProduct(apiProductIdentifier); apiProduct.setUuid(artifact.getId()); apiProduct.setRating(Float.toString(getAverageRating(apiProductIdentifier))); ApiMgtDAO.getInstance().setAPIProductFromDB(apiProduct); setResourceProperties(apiProduct, registry, artifactPath); apiProduct.setUuid(artifact.getId()); apiProduct.setContext(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT)); apiProduct.setDescription(artifact.getAttribute(APIConstants.API_OVERVIEW_DESCRIPTION)); apiProduct.setState(artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS)); apiProduct.setThumbnailUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_THUMBNAIL_URL)); apiProduct.setVisibility(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBILITY)); apiProduct.setVisibleRoles(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBLE_ROLES)); apiProduct.setVisibleTenants(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBLE_TENANTS)); apiProduct.setBusinessOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER)); apiProduct.setBusinessOwnerEmail(artifact.getAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER_EMAIL)); apiProduct.setTechnicalOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_TEC_OWNER)); apiProduct.setTechnicalOwnerEmail(artifact.getAttribute(APIConstants.API_OVERVIEW_TEC_OWNER_EMAIL)); apiProduct.setSubscriptionAvailability(artifact.getAttribute( APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABILITY)); apiProduct.setSubscriptionAvailableTenants(artifact.getAttribute( APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABLE_TENANTS)); String environments = artifact.getAttribute(APIConstants.API_OVERVIEW_ENVIRONMENTS); apiProduct.setEnvironments(extractEnvironmentsForAPI(environments)); apiProduct.setTransports(artifact.getAttribute(APIConstants.API_OVERVIEW_TRANSPORTS)); apiProduct.setApiSecurity(artifact.getAttribute(APIConstants.API_OVERVIEW_API_SECURITY)); apiProduct.setAuthorizationHeader(artifact.getAttribute(APIConstants.API_OVERVIEW_AUTHORIZATION_HEADER)); apiProduct.setCorsConfiguration(getCorsConfigurationFromArtifact(artifact)); apiProduct.setCreatedTime(registry.get(artifactPath).getCreatedTime()); apiProduct.setLastUpdated(registry.get(artifactPath).getLastModified()); apiProduct.setType(artifact.getAttribute(APIConstants.API_OVERVIEW_TYPE)); String tenantDomainName = MultitenantUtils.getTenantDomain(replaceEmailDomainBack(providerName)); apiProduct.setTenantDomain(tenantDomainName); int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomainName); String tiers = artifact.getAttribute(APIConstants.API_OVERVIEW_TIER); Map<String, Tier> definedTiers = getTiers(tenantId); Set<Tier> availableTier = getAvailableTiers(definedTiers, tiers, productName); apiProduct.setAvailableTiers(availableTier); apiProduct.setContextTemplate(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT_TEMPLATE)); apiProduct.setEnableSchemaValidation(Boolean.parseBoolean(artifact.getAttribute( APIConstants.API_OVERVIEW_ENABLE_JSON_SCHEMA))); apiProduct.setEnableStore(Boolean.parseBoolean(artifact.getAttribute( APIConstants.API_OVERVIEW_ENABLE_STORE))); apiProduct.setTestKey(artifact.getAttribute(APIConstants.API_OVERVIEW_TESTKEY)); apiProduct.setResponseCache(artifact.getAttribute(APIConstants.API_OVERVIEW_RESPONSE_CACHING)); int cacheTimeout = APIConstants.API_RESPONSE_CACHE_TIMEOUT; try { cacheTimeout = Integer.parseInt(artifact.getAttribute(APIConstants.API_OVERVIEW_CACHE_TIMEOUT)); } catch (NumberFormatException e) { if (log.isDebugEnabled()) { log.debug(STR + e.getMessage()); } } apiProduct.setCacheTimeout(cacheTimeout); List<APIProductResource> resources = ApiMgtDAO.getInstance(). getAPIProductResourceMappings(apiProductIdentifier); Map<String, Scope> uniqueAPIProductScopeKeyMappings = new LinkedHashMap<>(); for (APIProductResource resource : resources) { List<Scope> resourceScopes = resource.getUriTemplate().retrieveAllScopes(); ListIterator it = resourceScopes.listIterator(); while (it.hasNext()) { Scope resourceScope = (Scope) it.next(); String scopeKey = resourceScope.getKey(); if (!uniqueAPIProductScopeKeyMappings.containsKey(scopeKey)) { resourceScope = getScopeByName(scopeKey, tenantDomainName); uniqueAPIProductScopeKeyMappings.put(scopeKey, resourceScope); } else { resourceScope = uniqueAPIProductScopeKeyMappings.get(scopeKey); } it.set(resourceScope); } } Set<String> tags = new HashSet<String>(); Tag[] tag = registry.getTags(artifactPath); for (Tag tag1 : tag) { tags.add(tag1.getTagName()); } apiProduct.addTags(tags); for (APIProductResource resource : resources) { String apiPath = APIUtil.getAPIPath(resource.getApiIdentifier()); Resource productResource = null; try { productResource = registry.get(apiPath); } catch (RegistryException e) { if (e.getClass().equals(AuthorizationFailedException.class)) { if (log.isDebugEnabled()) { log.debug(STR + apiPath); } continue; } else { String msg = STR; throw new APIManagementException(msg, e); } } String artifactId = productResource.getUUID(); resource.setApiId(artifactId); GenericArtifactManager artifactManager = getArtifactManager(registry, APIConstants.API_KEY); GenericArtifact apiArtifact = artifactManager.getGenericArtifact(resource.getApiId()); API api = getAPI(apiArtifact, registry); resource.setEndpointConfig(api.getEndpointConfig()); resource.setEndpointSecurityMap(setEndpointSecurityForAPIProduct(api)); } apiProduct.setProductResources(resources); apiProduct.setMonetizationStatus(Boolean.parseBoolean(artifact.getAttribute (APIConstants.Monetization.API_MONETIZATION_STATUS))); String monetizationInfo = artifact.getAttribute(APIConstants.Monetization.API_MONETIZATION_PROPERTIES); if (StringUtils.isNotBlank(monetizationInfo)) { JSONParser parser = new JSONParser(); JSONObject jsonObj = (JSONObject) parser.parse(monetizationInfo); apiProduct.setMonetizationProperties(jsonObj); } apiProduct.setApiCategories(getAPICategoriesFromAPIGovernanceArtifact(artifact, tenantId)); } catch (GovernanceException e) { String msg = STR; throw new APIManagementException(msg, e); } catch (RegistryException e) { String msg = STR; throw new APIManagementException(msg, e); } catch (UserStoreException e) { String msg = STR; throw new APIManagementException(msg, e); } catch (ParseException e) { String msg = STR; throw new APIManagementException(msg, e); } return apiProduct; }
/** * Retrieves api product artifact from registry * * @param artifact * @param registry * @return APIProduct * @throws org.wso2.carbon.apimgt.api.APIManagementException */
Retrieves api product artifact from registry
getAPIProduct
{ "repo_name": "tharikaGitHub/carbon-apimgt", "path": "components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/utils/APIUtil.java", "license": "apache-2.0", "size": 563590 }
[ "java.util.HashSet", "java.util.LinkedHashMap", "java.util.List", "java.util.ListIterator", "java.util.Map", "java.util.Set", "org.apache.commons.lang3.StringUtils", "org.json.simple.JSONObject", "org.json.simple.parser.JSONParser", "org.json.simple.parser.ParseException", "org.wso2.carbon.apimgt.api.APIManagementException", "org.wso2.carbon.apimgt.api.model.APIProduct", "org.wso2.carbon.apimgt.api.model.APIProductIdentifier", "org.wso2.carbon.apimgt.api.model.APIProductResource", "org.wso2.carbon.apimgt.api.model.Scope", "org.wso2.carbon.apimgt.api.model.Tier", "org.wso2.carbon.apimgt.impl.APIConstants", "org.wso2.carbon.apimgt.impl.dao.ApiMgtDAO", "org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder", "org.wso2.carbon.governance.api.common.dataobjects.GovernanceArtifact", "org.wso2.carbon.governance.api.exception.GovernanceException", "org.wso2.carbon.governance.api.generic.GenericArtifactManager", "org.wso2.carbon.governance.api.generic.dataobjects.GenericArtifact", "org.wso2.carbon.governance.api.util.GovernanceUtils", "org.wso2.carbon.registry.core.Registry", "org.wso2.carbon.registry.core.Resource", "org.wso2.carbon.registry.core.Tag", "org.wso2.carbon.registry.core.exceptions.RegistryException", "org.wso2.carbon.registry.core.secure.AuthorizationFailedException", "org.wso2.carbon.user.api.UserStoreException", "org.wso2.carbon.utils.multitenancy.MultitenantUtils" ]
import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Set; import org.apache.commons.lang3.StringUtils; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.json.simple.parser.ParseException; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.model.APIProduct; import org.wso2.carbon.apimgt.api.model.APIProductIdentifier; import org.wso2.carbon.apimgt.api.model.APIProductResource; import org.wso2.carbon.apimgt.api.model.Scope; import org.wso2.carbon.apimgt.api.model.Tier; import org.wso2.carbon.apimgt.impl.APIConstants; import org.wso2.carbon.apimgt.impl.dao.ApiMgtDAO; import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder; import org.wso2.carbon.governance.api.common.dataobjects.GovernanceArtifact; import org.wso2.carbon.governance.api.exception.GovernanceException; import org.wso2.carbon.governance.api.generic.GenericArtifactManager; import org.wso2.carbon.governance.api.generic.dataobjects.GenericArtifact; import org.wso2.carbon.governance.api.util.GovernanceUtils; import org.wso2.carbon.registry.core.Registry; import org.wso2.carbon.registry.core.Resource; import org.wso2.carbon.registry.core.Tag; import org.wso2.carbon.registry.core.exceptions.RegistryException; import org.wso2.carbon.registry.core.secure.AuthorizationFailedException; import org.wso2.carbon.user.api.UserStoreException; import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
import java.util.*; import org.apache.commons.lang3.*; import org.json.simple.*; import org.json.simple.parser.*; import org.wso2.carbon.apimgt.api.*; import org.wso2.carbon.apimgt.api.model.*; import org.wso2.carbon.apimgt.impl.*; import org.wso2.carbon.apimgt.impl.dao.*; import org.wso2.carbon.apimgt.impl.internal.*; import org.wso2.carbon.governance.api.common.dataobjects.*; import org.wso2.carbon.governance.api.exception.*; import org.wso2.carbon.governance.api.generic.*; import org.wso2.carbon.governance.api.generic.dataobjects.*; import org.wso2.carbon.governance.api.util.*; import org.wso2.carbon.registry.core.*; import org.wso2.carbon.registry.core.exceptions.*; import org.wso2.carbon.registry.core.secure.*; import org.wso2.carbon.user.api.*; import org.wso2.carbon.utils.multitenancy.*;
[ "java.util", "org.apache.commons", "org.json.simple", "org.wso2.carbon" ]
java.util; org.apache.commons; org.json.simple; org.wso2.carbon;
1,323,857
private IMarker createMarker(ITextEditor editor, String message, Variant variant) throws CoreException { IFile inputFile = getEditorFile(editor); if (inputFile != null) { IMarker marker = inputFile.createMarker(LOCATION_MARKER); marker.setAttribute(IMarker.MESSAGE, message); if (variant != null) { marker.setAttribute(LOCATION_MARKER_ATTRIBUTE_VARIANT, VariantRegistry.register(variant)); } return marker; } else { logger.warn("Editor is not handling a file"); return null; } }
IMarker function(ITextEditor editor, String message, Variant variant) throws CoreException { IFile inputFile = getEditorFile(editor); if (inputFile != null) { IMarker marker = inputFile.createMarker(LOCATION_MARKER); marker.setAttribute(IMarker.MESSAGE, message); if (variant != null) { marker.setAttribute(LOCATION_MARKER_ATTRIBUTE_VARIANT, VariantRegistry.register(variant)); } return marker; } else { logger.warn(STR); return null; } }
/** * Highlight a code location with a given message in an editor. * * To gain access to an editor, use {@link JavaEditorConnector#openEditor(JavaSoftwareElement)}. * * To reset existing highlighting, use * {@link JavaEditorConnector#resetLocationHighlighting(ITextEditor)}.<br> * Note: It is up to you to decide when to reset of you want to highlight several locations at * the same time. * * @param editor * The editor to set the highlighting in. * @param message * The message to mark the text with. * @return The marker for the given message. * @throws CoreException * Throws {@link CoreException} for invalid resources. */
Highlight a code location with a given message in an editor. To gain access to an editor, use <code>JavaEditorConnector#openEditor(JavaSoftwareElement)</code>. To reset existing highlighting, use <code>JavaEditorConnector#resetLocationHighlighting(ITextEditor)</code>. Note: It is up to you to decide when to reset of you want to highlight several locations at the same time
createMarker
{ "repo_name": "kopl/SPLevo", "path": "UI/org.splevo.ui/src/org/splevo/ui/sourceconnection/jdt/JavaEditorConnector.java", "license": "epl-1.0", "size": 13125 }
[ "org.eclipse.core.resources.IFile", "org.eclipse.core.resources.IMarker", "org.eclipse.core.runtime.CoreException", "org.eclipse.ui.texteditor.ITextEditor", "org.splevo.vpm.variability.Variant" ]
import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IMarker; import org.eclipse.core.runtime.CoreException; import org.eclipse.ui.texteditor.ITextEditor; import org.splevo.vpm.variability.Variant;
import org.eclipse.core.resources.*; import org.eclipse.core.runtime.*; import org.eclipse.ui.texteditor.*; import org.splevo.vpm.variability.*;
[ "org.eclipse.core", "org.eclipse.ui", "org.splevo.vpm" ]
org.eclipse.core; org.eclipse.ui; org.splevo.vpm;
791,357
public void updateBreadcrumbs(LabeledLink newLink) { logger.trace("updateBreadcrumbs (LabeledLink): {}", newLink.toString()); // List<LabeledLink> breadcrumbs = Collections.synchronizedList(this.breadcrumbs); synchronized (breadcrumbs) { // Always add the home page if there are no breadcrumbs if (breadcrumbs.isEmpty()) { resetBreadcrumbs(); } // logger.trace("Adding breadcrumb: {} ({})", newLink.getUrl(), newLink.getWeight()); // Determine the position at which to add the new link int position = breadcrumbs.size(); for (int i = 0; i < breadcrumbs.size(); ++i) { LabeledLink link = breadcrumbs.get(i); // logger.trace("existing breadcrumb: {}", link.toString()); if (link.getWeight() >= newLink.getWeight()) { position = i; break; } } try { // To avoid duplicate breadcrumbs while flipping pages, the LabeledLink.equals() method will prevent multiple breadcrumbs with the same name if (breadcrumbs.contains(newLink)) { logger.trace("Breadcrumb is already in the list: '{}'", newLink); } breadcrumbs.add(position, newLink); } finally { // Remove any following links, even if the proposed link is a duplicate if (position < breadcrumbs.size()) { try { breadcrumbs.subList(position + 1, breadcrumbs.size()).clear(); } catch (NullPointerException e) { // This throws a NPE sometimes } } // logger.trace("breadcrumbs: " + breadcrumbs.size() + " " + // breadcrumbs.toString()); } } }
void function(LabeledLink newLink) { logger.trace(STR, newLink.toString()); synchronized (breadcrumbs) { if (breadcrumbs.isEmpty()) { resetBreadcrumbs(); } int position = breadcrumbs.size(); for (int i = 0; i < breadcrumbs.size(); ++i) { LabeledLink link = breadcrumbs.get(i); if (link.getWeight() >= newLink.getWeight()) { position = i; break; } } try { if (breadcrumbs.contains(newLink)) { logger.trace(STR, newLink); } breadcrumbs.add(position, newLink); } finally { if (position < breadcrumbs.size()) { try { breadcrumbs.subList(position + 1, breadcrumbs.size()).clear(); } catch (NullPointerException e) { } } } } }
/** * Attaches a new link to the breadcrumb list at the appropriate position (depending on the link's weight). * * @param newLink The breadcrumb link to add. * @should always remove breadcrumbs coming after the proposed breadcrumb */
Attaches a new link to the breadcrumb list at the appropriate position (depending on the link's weight)
updateBreadcrumbs
{ "repo_name": "intranda/goobi-viewer-core", "path": "goobi-viewer-core/src/main/java/io/goobi/viewer/managedbeans/BreadcrumbBean.java", "license": "gpl-2.0", "size": 24161 }
[ "io.goobi.viewer.model.viewer.LabeledLink" ]
import io.goobi.viewer.model.viewer.LabeledLink;
import io.goobi.viewer.model.viewer.*;
[ "io.goobi.viewer" ]
io.goobi.viewer;
2,135,845
public void copyNode(RenderableNode node) { BehaviorTreeData data = new BehaviorTreeData(); data.setRoot(node.getNode()); BehaviorTreeFormat loader = new BehaviorTreeFormat(); ByteArrayOutputStream os = new ByteArrayOutputStream(10000); try { loader.save(os, data); BehaviorTreeData copy = loader.load(new ByteArrayInputStream(os.toByteArray())); Port.OutputPort parent = node.getInputPort().getTargetPort(); copy.createRenderable(behaviorNodeFactory); RenderableNode copyRenderable = copy.getRenderableNode(copy.getRoot()); addNode(copyRenderable); RenderableNode nodeToLayout; if (parent != null && copyRenderable.getInputPort() != null) { parent.setTarget(copyRenderable.getInputPort()); nodeToLayout = parent.node; } else { nodeToLayout = copyRenderable; } Vector2f oldPos = nodeToLayout.getPosition(); tree.layout(nodeToLayout); oldPos.sub(nodeToLayout.getPosition()); nodeToLayout.move(oldPos); } catch (IOException e) { logger.error("Failed to copy node", e); } }
void function(RenderableNode node) { BehaviorTreeData data = new BehaviorTreeData(); data.setRoot(node.getNode()); BehaviorTreeFormat loader = new BehaviorTreeFormat(); ByteArrayOutputStream os = new ByteArrayOutputStream(10000); try { loader.save(os, data); BehaviorTreeData copy = loader.load(new ByteArrayInputStream(os.toByteArray())); Port.OutputPort parent = node.getInputPort().getTargetPort(); copy.createRenderable(behaviorNodeFactory); RenderableNode copyRenderable = copy.getRenderableNode(copy.getRoot()); addNode(copyRenderable); RenderableNode nodeToLayout; if (parent != null && copyRenderable.getInputPort() != null) { parent.setTarget(copyRenderable.getInputPort()); nodeToLayout = parent.node; } else { nodeToLayout = copyRenderable; } Vector2f oldPos = nodeToLayout.getPosition(); tree.layout(nodeToLayout); oldPos.sub(nodeToLayout.getPosition()); nodeToLayout.move(oldPos); } catch (IOException e) { logger.error(STR, e); } }
/** * copy the given node. the new copy replaces the given one, so you should manipulate the original node, instead of the copy. * This is useful when in interaction listener, especially. */
copy the given node. the new copy replaces the given one, so you should manipulate the original node, instead of the copy. This is useful when in interaction listener, especially
copyNode
{ "repo_name": "leelib/Terasology", "path": "engine/src/main/java/org/terasology/logic/behavior/nui/BehaviorEditor.java", "license": "apache-2.0", "size": 11356 }
[ "java.io.ByteArrayInputStream", "java.io.ByteArrayOutputStream", "java.io.IOException", "org.terasology.logic.behavior.asset.BehaviorTreeData", "org.terasology.logic.behavior.asset.BehaviorTreeFormat", "org.terasology.math.geom.Vector2f" ]
import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import org.terasology.logic.behavior.asset.BehaviorTreeData; import org.terasology.logic.behavior.asset.BehaviorTreeFormat; import org.terasology.math.geom.Vector2f;
import java.io.*; import org.terasology.logic.behavior.asset.*; import org.terasology.math.geom.*;
[ "java.io", "org.terasology.logic", "org.terasology.math" ]
java.io; org.terasology.logic; org.terasology.math;
1,969,967
public static void close(@Nullable ResultSet rs) { if (rs != null) { try { rs.close(); } catch (SQLException e) { log.trace("Could not close JDBC result set", e); } catch (Exception e) { log.trace("Unexpected exception when closing JDBC result set", e); } } }
static void function(@Nullable ResultSet rs) { if (rs != null) { try { rs.close(); } catch (SQLException e) { log.trace(STR, e); } catch (Exception e) { log.trace(STR, e); } } }
/** * Closes the given result set and just logs any exception. * * @param rs The {@link java.sql.ResultSet} to close. */
Closes the given result set and just logs any exception
close
{ "repo_name": "alancnet/artifactory", "path": "storage/db/src/main/java/org/artifactory/storage/db/util/DbUtils.java", "license": "apache-2.0", "size": 8671 }
[ "java.sql.ResultSet", "java.sql.SQLException", "javax.annotation.Nullable" ]
import java.sql.ResultSet; import java.sql.SQLException; import javax.annotation.Nullable;
import java.sql.*; import javax.annotation.*;
[ "java.sql", "javax.annotation" ]
java.sql; javax.annotation;
545,132
public RelDataType createNewRowType(RelDataTypeFactory factory) { return factory.createStructType(types, fieldNames); } }
RelDataType function(RelDataTypeFactory factory) { return factory.createStructType(types, fieldNames); } }
/** * Creates new row type based on stores types and field names. * * @param factory factory for data type descriptors. * @return new row type */
Creates new row type based on stores types and field names
createNewRowType
{ "repo_name": "kkhatua/drill", "path": "exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillRelOptUtil.java", "license": "apache-2.0", "size": 28226 }
[ "org.apache.calcite.rel.type.RelDataType", "org.apache.calcite.rel.type.RelDataTypeFactory" ]
import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.*;
[ "org.apache.calcite" ]
org.apache.calcite;
626,928
public void saveTotalScores(List data, PublishedAssessmentIfc pub) { try { GradingService service = new GradingService(); ArrayList list = new ArrayList(data); service.saveTotalScores(list, pub); } catch (Exception ex) { throw new GradingServiceException(ex); } } /* public void saveItemScores(List data, HashMap map, PublishedAssessmentIfc pub) { try { GradingService service = new GradingService(); ArrayList list = new ArrayList(data); service.saveItemScores(list, map, pub); } catch (Exception ex) { throw new GradingServiceException(ex); } }
void function(List data, PublishedAssessmentIfc pub) { try { GradingService service = new GradingService(); ArrayList list = new ArrayList(data); service.saveTotalScores(list, pub); } catch (Exception ex) { throw new GradingServiceException(ex); } } /* public void saveItemScores(List data, HashMap map, PublishedAssessmentIfc pub) { try { GradingService service = new GradingService(); ArrayList list = new ArrayList(data); service.saveItemScores(list, map, pub); } catch (Exception ex) { throw new GradingServiceException(ex); } }
/** * Save the total scores. * @param data List of AssessmentGradingDataIfcs */
Save the total scores
saveTotalScores
{ "repo_name": "harfalm/Sakai-10.1", "path": "samigo/samigo-services/src/java/org/sakaiproject/tool/assessment/shared/impl/grading/GradingServiceImpl.java", "license": "apache-2.0", "size": 10727 }
[ "java.util.ArrayList", "java.util.List", "org.sakaiproject.tool.assessment.data.ifc.assessment.PublishedAssessmentIfc", "org.sakaiproject.tool.assessment.services.GradingService", "org.sakaiproject.tool.assessment.services.GradingServiceException" ]
import java.util.ArrayList; import java.util.List; import org.sakaiproject.tool.assessment.data.ifc.assessment.PublishedAssessmentIfc; import org.sakaiproject.tool.assessment.services.GradingService; import org.sakaiproject.tool.assessment.services.GradingServiceException;
import java.util.*; import org.sakaiproject.tool.assessment.data.ifc.assessment.*; import org.sakaiproject.tool.assessment.services.*;
[ "java.util", "org.sakaiproject.tool" ]
java.util; org.sakaiproject.tool;
652,106
ServiceFuture<Void> put500Async(Boolean booleanValue, final ServiceCallback<Void> serviceCallback);
ServiceFuture<Void> put500Async(Boolean booleanValue, final ServiceCallback<Void> serviceCallback);
/** * Return 500 status code, then 200 after retry. * * @param booleanValue Simple boolean value true * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link ServiceFuture} object */
Return 500 status code, then 200 after retry
put500Async
{ "repo_name": "anudeepsharma/autorest", "path": "src/generator/AutoRest.Java.Tests/src/main/java/fixtures/http/HttpRetrys.java", "license": "mit", "size": 13569 }
[ "com.microsoft.rest.ServiceCallback", "com.microsoft.rest.ServiceFuture" ]
import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.*;
[ "com.microsoft.rest" ]
com.microsoft.rest;
1,469,738
Call<ResponseBody> getArrayEmptyAsync(final ServiceCallback<Map<String, List<String>>> serviceCallback);
Call<ResponseBody> getArrayEmptyAsync(final ServiceCallback<Map<String, List<String>>> serviceCallback);
/** * Get an empty dictionary {}. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */
Get an empty dictionary {}
getArrayEmptyAsync
{ "repo_name": "matt-gibbs/AutoRest", "path": "AutoRest/Generators/Java/Java.Tests/src/main/java/fixtures/bodydictionary/Dictionary.java", "license": "mit", "size": 63614 }
[ "com.microsoft.rest.ServiceCallback", "com.squareup.okhttp.ResponseBody", "java.util.List", "java.util.Map" ]
import com.microsoft.rest.ServiceCallback; import com.squareup.okhttp.ResponseBody; import java.util.List; import java.util.Map;
import com.microsoft.rest.*; import com.squareup.okhttp.*; import java.util.*;
[ "com.microsoft.rest", "com.squareup.okhttp", "java.util" ]
com.microsoft.rest; com.squareup.okhttp; java.util;
2,661,726
int getRetries(); /** * Returns the message of the exception that occurred, the last time the job was executed. Returns null when no exception occurred. * * To get the full exception stacktrace, use {@link ManagementService#getJobExceptionStacktrace(String)}
int getRetries(); /** * Returns the message of the exception that occurred, the last time the job was executed. Returns null when no exception occurred. * * To get the full exception stacktrace, use {@link ManagementService#getJobExceptionStacktrace(String)}
/** * Returns the number of retries this job has left. Whenever the jobexecutor fails to execute the job, this value is decremented. When it hits zero, the job is supposed to be dead and not retried * again (ie a manual retry is required then). */
Returns the number of retries this job has left. Whenever the jobexecutor fails to execute the job, this value is decremented. When it hits zero, the job is supposed to be dead and not retried again (ie a manual retry is required then)
getRetries
{ "repo_name": "roberthafner/flowable-engine", "path": "modules/flowable-engine/src/main/java/org/activiti/engine/runtime/Job.java", "license": "apache-2.0", "size": 2431 }
[ "org.activiti.engine.ManagementService" ]
import org.activiti.engine.ManagementService;
import org.activiti.engine.*;
[ "org.activiti.engine" ]
org.activiti.engine;
2,544,801
public void testNestedEmptyDirs() throws Exception { final List<Event> evtList = new ArrayList<>(); final int evtsCnt = 2 + 1; final CountDownLatch latch = new CountDownLatch(evtsCnt);
void function() throws Exception { final List<Event> evtList = new ArrayList<>(); final int evtsCnt = 2 + 1; final CountDownLatch latch = new CountDownLatch(evtsCnt);
/** * Checks events on CRUD operations with multiple * empty directories. * * @throws Exception If failed. */
Checks events on CRUD operations with multiple empty directories
testNestedEmptyDirs
{ "repo_name": "psadusumilli/ignite", "path": "modules/core/src/test/java/org/apache/ignite/igfs/IgfsEventsAbstractSelfTest.java", "license": "apache-2.0", "size": 29327 }
[ "java.util.ArrayList", "java.util.List", "java.util.concurrent.CountDownLatch", "org.apache.ignite.events.Event" ]
import java.util.ArrayList; import java.util.List; import java.util.concurrent.CountDownLatch; import org.apache.ignite.events.Event;
import java.util.*; import java.util.concurrent.*; import org.apache.ignite.events.*;
[ "java.util", "org.apache.ignite" ]
java.util; org.apache.ignite;
989,755
public void batchProcessURL(URL url, Network network) { if (url == null) { return; } log("Input", Level.FINE, url); Element root = parseURL(url); if (root != null) { int attempt = 0; Exception failure = null; while (attempt < RETRY) { attempt++; try { processRoot(root, url, network); network.save(); break; } catch (Exception failed) { failure = failed; log(failed.toString(), Level.WARNING); log("Retrying", Level.WARNING); } } if (attempt == RETRY) { log("Retry failed", Level.WARNING); log(failure); } } } public Http() { this.domains = new HashMap<String, Http>(); }
void function(URL url, Network network) { if (url == null) { return; } log("Input", Level.FINE, url); Element root = parseURL(url); if (root != null) { int attempt = 0; Exception failure = null; while (attempt < RETRY) { attempt++; try { processRoot(root, url, network); network.save(); break; } catch (Exception failed) { failure = failed; log(failed.toString(), Level.WARNING); log(STR, Level.WARNING); } } if (attempt == RETRY) { log(STR, Level.WARNING); log(failure); } } } public Http() { this.domains = new HashMap<String, Http>(); }
/** * Process the URL as part of a batch. */
Process the URL as part of a batch
batchProcessURL
{ "repo_name": "BOTlibre/BOTlibre", "path": "micro-ai-engine/android/source/org/botlibre/sense/http/Http.java", "license": "epl-1.0", "size": 46787 }
[ "java.util.HashMap", "java.util.logging.Level", "org.botlibre.api.knowledge.Network", "org.w3c.dom.Element" ]
import java.util.HashMap; import java.util.logging.Level; import org.botlibre.api.knowledge.Network; import org.w3c.dom.Element;
import java.util.*; import java.util.logging.*; import org.botlibre.api.knowledge.*; import org.w3c.dom.*;
[ "java.util", "org.botlibre.api", "org.w3c.dom" ]
java.util; org.botlibre.api; org.w3c.dom;
460,949
public void performMoveDown(PaletteEntry entry) { PaletteContainer parent = entry.getParent(); if (!parent.moveDown(entry)) { // This is the case of a PaletteEntry that is its parent's last // child // and will have to move down into the next slot in the grandparent PaletteEntry parentSibling = null; PaletteContainer newParent = parent.getParent(); int insertionIndex = 0; if (canAdd(newParent, entry)) insertionIndex = newParent.getChildren().indexOf(parent) + 1; else { List parents = newParent.getChildren(); for (int i = parents.indexOf(parent) + 1; i < parents.size(); i++) { parentSibling = (PaletteEntry) parents.get(i); if (parentSibling instanceof PaletteContainer) { newParent = (PaletteContainer) parentSibling; if (canAdd(newParent, entry)) break; } } } parent.remove(entry); newParent.add(insertionIndex, entry); } }
void function(PaletteEntry entry) { PaletteContainer parent = entry.getParent(); if (!parent.moveDown(entry)) { PaletteEntry parentSibling = null; PaletteContainer newParent = parent.getParent(); int insertionIndex = 0; if (canAdd(newParent, entry)) insertionIndex = newParent.getChildren().indexOf(parent) + 1; else { List parents = newParent.getChildren(); for (int i = parents.indexOf(parent) + 1; i < parents.size(); i++) { parentSibling = (PaletteEntry) parents.get(i); if (parentSibling instanceof PaletteContainer) { newParent = (PaletteContainer) parentSibling; if (canAdd(newParent, entry)) break; } } } parent.remove(entry); newParent.add(insertionIndex, entry); } }
/** * Updates the model by moving the entry down. <br> * Called when the "Move Down" action in the PaletteCustomizerDialog is * invoked. * * @param entry * The selected palette entry (it'll never be <code>null</code>) * * @see #canMoveDown(PaletteEntry) */
Updates the model by moving the entry down. Called when the "Move Down" action in the PaletteCustomizerDialog is invoked
performMoveDown
{ "repo_name": "archimatetool/archi", "path": "org.eclipse.gef/src/org/eclipse/gef/ui/palette/PaletteCustomizer.java", "license": "mit", "size": 12307 }
[ "java.util.List", "org.eclipse.gef.palette.PaletteContainer", "org.eclipse.gef.palette.PaletteEntry" ]
import java.util.List; import org.eclipse.gef.palette.PaletteContainer; import org.eclipse.gef.palette.PaletteEntry;
import java.util.*; import org.eclipse.gef.palette.*;
[ "java.util", "org.eclipse.gef" ]
java.util; org.eclipse.gef;
2,320,404
DrawingAttributes getSelectionDrawingAttributes();
DrawingAttributes getSelectionDrawingAttributes();
/** * Return a set of drawing attributes that match what the presenter is using * for selection; * * @return DrawingAttributes containing rendering info for selected items. */
Return a set of drawing attributes that match what the presenter is using for selection
getSelectionDrawingAttributes
{ "repo_name": "d2fn/passage", "path": "src/main/java/com/bbn/openmap/gui/event/EventPresenter.java", "license": "mit", "size": 2256 }
[ "com.bbn.openmap.omGraphics.DrawingAttributes" ]
import com.bbn.openmap.omGraphics.DrawingAttributes;
import com.bbn.openmap.*;
[ "com.bbn.openmap" ]
com.bbn.openmap;
1,764,284
Intent intent = new Intent(DISPLAY_MESSAGE_ACTION); intent.putExtra(EXTRA_MESSAGE, message); context.sendBroadcast(intent); }
Intent intent = new Intent(DISPLAY_MESSAGE_ACTION); intent.putExtra(EXTRA_MESSAGE, message); context.sendBroadcast(intent); }
/** * Notifies UI to display a message. * <p> * This method is defined in the common helper because it's used both by * the UI and the background service. * * @param context application's context. * @param message message to be displayed. */
Notifies UI to display a message. This method is defined in the common helper because it's used both by the UI and the background service
displayMessage
{ "repo_name": "imalpasha/cj", "path": "carijodoh/src/main/java/com/fly/cj/ui/activity/PushNotification/CommonUtilities.java", "license": "mit", "size": 1524 }
[ "android.content.Intent" ]
import android.content.Intent;
import android.content.*;
[ "android.content" ]
android.content;
1,535,552
@Test(expected = IllegalArgumentException.class) public void shouldThrowIllegalArgumentException4() throws Exception { Maven.configureResolver() .withClassPathResolution(false).withMavenCentralRepo(false) .withRemoteRepo("jboss", "wrong://repository.jboss.org/nexus/content/repositories/releases/", "default") .loadPomFromFile("pom.xml").resolve("org.hornetq:hornetq-core:2.0.0.GA") .withoutTransitivity().asSingle(File.class); }
@Test(expected = IllegalArgumentException.class) void function() throws Exception { Maven.configureResolver() .withClassPathResolution(false).withMavenCentralRepo(false) .withRemoteRepo("jboss", STRpom.xmlSTRorg.hornetq:hornetq-core:2.0.0.GA") .withoutTransitivity().asSingle(File.class); }
/** * Test behaviour with an invalid URL */
Test behaviour with an invalid URL
shouldThrowIllegalArgumentException4
{ "repo_name": "oliveti/resolver", "path": "impl-maven/src/test/java/org/jboss/shrinkwrap/resolver/impl/maven/integration/AdditionalRemoteRepositoryTestCase.java", "license": "apache-2.0", "size": 11024 }
[ "java.io.File", "org.jboss.shrinkwrap.resolver.api.maven.Maven", "org.junit.Test" ]
import java.io.File; import org.jboss.shrinkwrap.resolver.api.maven.Maven; import org.junit.Test;
import java.io.*; import org.jboss.shrinkwrap.resolver.api.maven.*; import org.junit.*;
[ "java.io", "org.jboss.shrinkwrap", "org.junit" ]
java.io; org.jboss.shrinkwrap; org.junit;
2,600,917
public void serialize( OutputStream out ) throws Exception { Properties props = new Properties(); props .setProperty( "edition", "vZome" ); props .setProperty( "version", "5.0" ); this .serialize( out, props ); }
void function( OutputStream out ) throws Exception { Properties props = new Properties(); props .setProperty( STR, "vZome" ); props .setProperty( STR, "5.0" ); this .serialize( out, props ); }
/** * For backward-compatibility * @param out * @throws Exception */
For backward-compatibility
serialize
{ "repo_name": "david-hall/vzome-core", "path": "src/main/java/com/vzome/core/editor/DocumentModel.java", "license": "apache-2.0", "size": 55079 }
[ "java.io.OutputStream", "java.util.Properties" ]
import java.io.OutputStream; import java.util.Properties;
import java.io.*; import java.util.*;
[ "java.io", "java.util" ]
java.io; java.util;
1,995,509
static String replyToString(byte[] reply) { String result; try { result = new String(reply, DEFAULT_ENCODING); } catch (UnsupportedEncodingException uee) { uee.printStackTrace(); // not expected result = ""; } return result; }
static String replyToString(byte[] reply) { String result; try { result = new String(reply, DEFAULT_ENCODING); } catch (UnsupportedEncodingException uee) { uee.printStackTrace(); result = ""; } return result; }
/** * Converts an ADB reply to a string. */
Converts an ADB reply to a string
replyToString
{ "repo_name": "z7z8th/aster", "path": "src/com/android/ddmlib/AdbHelper.java", "license": "apache-2.0", "size": 29534 }
[ "java.io.UnsupportedEncodingException" ]
import java.io.UnsupportedEncodingException;
import java.io.*;
[ "java.io" ]
java.io;
2,303,329