signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class Group { /** * Search for package name in the sorted regular expression
* list , if found return the group name . If not , return null .
* @ param pkgName Name of package to be found in the regular
* expression list . */
String regExpGroupName ( String pkgName ) { } }
|
for ( int j = 0 ; j < sortedRegExpList . size ( ) ; j ++ ) { String regexp = sortedRegExpList . get ( j ) ; if ( pkgName . startsWith ( regexp ) ) { return regExpGroupMap . get ( regexp ) ; } } return null ;
|
public class WSubMenu { /** * Indicates whether this sub menu is disabled in the given context .
* @ return true if this sub menu is disabled . */
@ Override public boolean isDisabled ( ) { } }
|
if ( isFlagSet ( ComponentModel . DISABLED_FLAG ) ) { return true ; } MenuContainer container = WebUtilities . getAncestorOfClass ( MenuContainer . class , this ) ; if ( container instanceof Disableable && ( ( Disableable ) container ) . isDisabled ( ) ) { return true ; } return false ;
|
public class FacesConfigConverterTypeImpl { /** * If not already created , a new < code > attribute < / code > element will be created and returned .
* Otherwise , the first existing < code > attribute < / code > element will be returned .
* @ return the instance defined for the element < code > attribute < / code > */
public FacesConfigAttributeType < FacesConfigConverterType < T > > getOrCreateAttribute ( ) { } }
|
List < Node > nodeList = childNode . get ( "attribute" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new FacesConfigAttributeTypeImpl < FacesConfigConverterType < T > > ( this , "attribute" , childNode , nodeList . get ( 0 ) ) ; } return createAttribute ( ) ;
|
public class BusItinerary { /** * Remove a road segment from this itinerary .
* < p > The bus halts on the segment will also be removed .
* @ param segmentIndex is the index of the segment to remove .
* @ return < code > true < / code > if the segment was successfully removed , otherwise < code > false < / code > */
public boolean removeRoadSegment ( int segmentIndex ) { } }
|
if ( segmentIndex >= 0 && segmentIndex < this . roadSegments . getRoadSegmentCount ( ) ) { final RoadSegment segment = this . roadSegments . getRoadSegmentAt ( segmentIndex ) ; if ( segment != null ) { // Invalidate the bus halts on the segment
final Map < BusItineraryHalt , RoadSegment > segmentMap = new TreeMap < > ( ( obj1 , obj2 ) -> Integer . compare ( System . identityHashCode ( obj1 ) , System . identityHashCode ( obj2 ) ) ) ; final Iterator < BusItineraryHalt > haltIterator = this . validHalts . iterator ( ) ; while ( haltIterator . hasNext ( ) ) { final BusItineraryHalt halt = haltIterator . next ( ) ; final int sgmtIndex = halt . getRoadSegmentIndex ( ) ; if ( sgmtIndex == segmentIndex ) { segmentMap . put ( halt , null ) ; } else { final RoadSegment sgmt = this . roadSegments . getRoadSegmentAt ( sgmtIndex ) ; segmentMap . put ( halt , sgmt ) ; } } // Remove the road segment itself on the segment
this . roadSegments . removeRoadSegmentAt ( segmentIndex ) ; // Force the road segment indexes
for ( final Entry < BusItineraryHalt , RoadSegment > entry : segmentMap . entrySet ( ) ) { final BusItineraryHalt halt = entry . getKey ( ) ; final RoadSegment sgmt = entry . getValue ( ) ; if ( sgmt == null ) { halt . setRoadSegmentIndex ( - 1 ) ; halt . setPositionOnSegment ( Float . NaN ) ; halt . checkPrimitiveValidity ( ) ; } else { final int sgmtIndex = halt . getRoadSegmentIndex ( ) ; final int idx = this . roadSegments . indexOf ( sgmt ) ; if ( idx != sgmtIndex ) { halt . setRoadSegmentIndex ( idx ) ; halt . checkPrimitiveValidity ( ) ; } } } // Change the road network reference
if ( this . roadSegments . isEmpty ( ) && this . roadNetwork != null ) { final RoadNetwork network = this . roadNetwork . get ( ) ; if ( network != null ) { network . removeRoadNetworkListener ( this ) ; } this . roadNetwork = null ; } fireShapeChanged ( new BusChangeEvent ( this , BusChangeEventType . SEGMENT_REMOVED , segment , segmentIndex , "shape" , // $ NON - NLS - 1 $
null , null ) ) ; checkPrimitiveValidity ( ) ; return true ; } } return false ;
|
public class SnappyCodec { /** * Create a { @ link CompressionOutputStream } that will write to the given
* { @ link OutputStream } with the given { @ link Compressor } .
* @ param out
* the location for the final output stream
* @ param compressor
* compressor to use
* @ return a stream the user can write uncompressed data to have it
* compressed
* @ throws IOException */
@ Override public CompressionOutputStream createOutputStream ( OutputStream out , Compressor compressor ) throws IOException { } }
|
if ( ! isNativeSnappyLoaded ( conf ) ) { throw new CodecUnavailableException ( "native snappy library not available" ) ; } int bufferSize = conf . getInt ( IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY , IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT ) ; int compressionOverhead = ( bufferSize / 6 ) + 32 ; return new BlockCompressorStream ( out , compressor , bufferSize , compressionOverhead ) ;
|
public class Scanner { /** * Returns a charset object for the given charset name .
* @ throws NullPointerException is csn is null
* @ throws IllegalArgumentException if the charset is not supported */
private static Charset toCharset ( String csn ) { } }
|
Objects . requireNonNull ( csn , "charsetName" ) ; try { return Charset . forName ( csn ) ; } catch ( IllegalCharsetNameException | UnsupportedCharsetException e ) { // IllegalArgumentException should be thrown
throw new IllegalArgumentException ( e ) ; }
|
public class RepositoryMockAgent { /** * If the belief its a count of some sort his counting its increased by one .
* @ param bName
* - the name of the belief count . */
private void increaseBeliefCount ( String bName ) { } }
|
Object belief = this . getBelief ( bName ) ; int count = 0 ; if ( belief != null ) { count = ( Integer ) belief ; } this . setBelief ( bName , count + 1 ) ;
|
public class InternalWindowProcessFunction { /** * Initialization method for the function . It is called before the actual working methods . */
public void open ( Context < K , W > ctx ) throws Exception { } }
|
this . ctx = ctx ; this . windowAssigner . open ( ctx ) ;
|
public class Resolve { /** * Resolve an appropriate implicit this instance for t ' s container .
* JLS 8.8.5.1 and 15.9.2 */
Type resolveImplicitThis ( DiagnosticPosition pos , Env < AttrContext > env , Type t ) { } }
|
return resolveImplicitThis ( pos , env , t , false ) ;
|
public class AbstractPreferenceFragment { /** * Obtains the appearance of the dividers , which are shown above preference categories , from the
* activity ' s theme . */
private void obtainDividerDecoration ( ) { } }
|
int dividerColor ; try { dividerColor = ThemeUtil . getColor ( getActivity ( ) , R . attr . dividerColor ) ; } catch ( NotFoundException e ) { dividerColor = ContextCompat . getColor ( getActivity ( ) , R . color . preference_divider_color_light ) ; } this . dividerDecoration . setDividerColor ( dividerColor ) ; this . dividerDecoration . setDividerHeight ( DisplayUtil . dpToPixels ( getActivity ( ) , 1 ) ) ;
|
public class CommerceRegionPersistenceImpl { /** * Returns the last commerce region in the ordered set where commerceCountryId = & # 63 ; and active = & # 63 ; .
* @ param commerceCountryId the commerce country ID
* @ param active the active
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the last matching commerce region
* @ throws NoSuchRegionException if a matching commerce region could not be found */
@ Override public CommerceRegion findByC_A_Last ( long commerceCountryId , boolean active , OrderByComparator < CommerceRegion > orderByComparator ) throws NoSuchRegionException { } }
|
CommerceRegion commerceRegion = fetchByC_A_Last ( commerceCountryId , active , orderByComparator ) ; if ( commerceRegion != null ) { return commerceRegion ; } StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "commerceCountryId=" ) ; msg . append ( commerceCountryId ) ; msg . append ( ", active=" ) ; msg . append ( active ) ; msg . append ( "}" ) ; throw new NoSuchRegionException ( msg . toString ( ) ) ;
|
public class CmsDefaultUserSettings { /** * Initializes the preference configuration . < p >
* Note that this method should only be called once the resource types have been initialized , but after addPreference has been called for all configured preferences .
* @ param wpManager the active workplace manager */
public void initPreferences ( CmsWorkplaceManager wpManager ) { } }
|
CURRENT_DEFAULT_SETTINGS = this ; Class < ? > accessorClass = CmsUserSettingsStringPropertyWrapper . class ; // first initialize all built - in preferences . these are :
// a ) Bean properties of CmsUserSettingsStringPropertyWrapper
// b ) Editor setting preferences
// c ) Gallery setting preferences
PropertyDescriptor [ ] propDescs = PropertyUtils . getPropertyDescriptors ( accessorClass ) ; for ( PropertyDescriptor descriptor : propDescs ) { String name = descriptor . getName ( ) ; Method getter = descriptor . getReadMethod ( ) ; Method setter = descriptor . getWriteMethod ( ) ; if ( ( getter == null ) || ( setter == null ) ) { continue ; } PrefMetadata metadata = getter . getAnnotation ( PrefMetadata . class ) ; if ( metadata == null ) { CmsBuiltinPreference preference = new CmsBuiltinPreference ( name ) ; m_preferences . put ( preference . getName ( ) , preference ) ; } else { try { Constructor < ? > constructor = metadata . type ( ) . getConstructor ( String . class ) ; I_CmsPreference pref = ( I_CmsPreference ) constructor . newInstance ( name ) ; m_preferences . put ( pref . getName ( ) , pref ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } } } Map < String , String > editorValues = getEditorSettings ( ) ; if ( wpManager . getWorkplaceEditorManager ( ) != null ) { for ( String resType : wpManager . getWorkplaceEditorManager ( ) . getConfigurableEditors ( ) . keySet ( ) ) { if ( ! editorValues . containsKey ( resType ) ) { editorValues . put ( resType , null ) ; } } } for ( Map . Entry < String , String > editorSettingEntry : editorValues . entrySet ( ) ) { CmsEditorPreference pref = new CmsEditorPreference ( editorSettingEntry . getKey ( ) , editorSettingEntry . getValue ( ) ) ; m_preferences . put ( pref . getName ( ) , pref ) ; } Map < String , String > galleryValues = new HashMap < String , String > ( getStartGalleriesSettings ( ) ) ; for ( String key : wpManager . getGalleries ( ) . keySet ( ) ) { if ( ! galleryValues . containsKey ( key ) ) { galleryValues . put ( key , null ) ; } } for ( Map . Entry < String , String > galleryEntry : galleryValues . entrySet ( ) ) { CmsStartGallleryPreference pref = new CmsStartGallleryPreference ( galleryEntry . getKey ( ) , galleryEntry . getValue ( ) ) ; m_preferences . put ( pref . getName ( ) , pref ) ; } // Now process configured preferences . Each configuration entry is either
// for a built - in preference , in which case we create a wrapper around the existing preference ,
// or for a custom user - defined preference .
for ( CmsPreferenceData prefData : m_preferenceData ) { String name = prefData . getName ( ) ; I_CmsPreference pref = null ; if ( m_preferences . containsKey ( name ) ) { // we first remove the existing preference , because in a LinkedHashMap , put ( key , value ) will not
// update the position of the entry if the key already exists
pref = new CmsWrapperPreference ( prefData , m_preferences . remove ( name ) ) ; } else { pref = new CmsUserDefinedPreference ( prefData . getName ( ) , prefData . getDefaultValue ( ) , prefData . getPropertyDefinition ( ) , prefData . getTab ( ) ) ; } m_preferences . put ( pref . getName ( ) , pref ) ; pref . setValue ( this , prefData . getDefaultValue ( ) ) ; }
|
public class Calendar { /** * TODO : Check if calType can be passed via keyword on loc parameter instead . */
public static String getDateTimeFormatString ( ULocale loc , String calType , int dateStyle , int timeStyle ) { } }
|
if ( timeStyle < DateFormat . NONE || timeStyle > DateFormat . SHORT ) { throw new IllegalArgumentException ( "Illegal time style " + timeStyle ) ; } if ( dateStyle < DateFormat . NONE || dateStyle > DateFormat . SHORT ) { throw new IllegalArgumentException ( "Illegal date style " + dateStyle ) ; } PatternData patternData = PatternData . make ( loc , calType ) ; // Resolve a pattern for the date / time style
String pattern = null ; if ( ( timeStyle >= 0 ) && ( dateStyle >= 0 ) ) { pattern = SimpleFormatterImpl . formatRawPattern ( patternData . getDateTimePattern ( dateStyle ) , 2 , 2 , patternData . patterns [ timeStyle ] , patternData . patterns [ dateStyle + 4 ] ) ; } else if ( timeStyle >= 0 ) { pattern = patternData . patterns [ timeStyle ] ; } else if ( dateStyle >= 0 ) { pattern = patternData . patterns [ dateStyle + 4 ] ; } else { throw new IllegalArgumentException ( "No date or time style specified" ) ; } return pattern ;
|
public class HttpConnectionHelper { /** * 连接Http Server , 准备传送serialized - object
* @ param httpServerParam
* @ return
* @ throws java . lang . Exception */
public HttpURLConnection connectService ( HttpServerParam httpServerParam , String userPassword ) throws Exception { } }
|
HttpURLConnection httpURLConnection = null ; URL url = null ; try { url = new URL ( "http" , httpServerParam . getHost ( ) , httpServerParam . getPort ( ) , httpServerParam . getServletPath ( ) ) ; Debug . logVerbose ( "[JdonFramework]Service url=" + url , module ) ; httpURLConnection = ( HttpURLConnection ) url . openConnection ( ) ; httpURLConnection . setRequestMethod ( "POST" ) ; httpURLConnection . setDoOutput ( true ) ; httpURLConnection . setDoInput ( true ) ; httpURLConnection . setUseCaches ( false ) ; httpURLConnection . setRequestProperty ( "Content-Type" , "application/x-java-serialized-object" ) ; if ( ( userPassword != null ) && ( ! userPassword . equals ( "null" ) ) ) { String encoded = "Basic " + Base64 . encodeBytes ( userPassword . getBytes ( "UTF-8" ) ) ; httpURLConnection . setRequestProperty ( "Authorization" , encoded ) ; } } catch ( Exception ex ) { Debug . logError ( "[JdonFramework] connectServer " + url + " error: " + ex , module ) ; throw new Exception ( ex ) ; } return httpURLConnection ;
|
public class DeviceAttributeDAODefaultImpl { public void setAttributeValue ( final AttributeValue_3 attributeValue_3 ) { } }
|
deviceAttribute_3 = new DeviceAttribute_3 ( attributeValue_3 ) ; use_union = false ; attributeValue_5 . name = attributeValue_3 . name ; attributeValue_5 . quality = attributeValue_3 . quality ; attributeValue_5 . time = attributeValue_3 . time ; attributeValue_5 . r_dim = attributeValue_3 . r_dim ; attributeValue_5 . w_dim = attributeValue_3 . w_dim ; attributeValue_5 . err_list = attributeValue_3 . err_list ; attributeValue_5 . data_format = AttrDataFormat . FMT_UNKNOWN ;
|
public class SimpleDialogFragment { /** * Key method for extending { @ link com . avast . android . dialogs . fragment . SimpleDialogFragment } .
* Children can extend this to add more things to base builder . */
@ Override protected BaseDialogFragment . Builder build ( BaseDialogFragment . Builder builder ) { } }
|
final CharSequence title = getTitle ( ) ; if ( ! TextUtils . isEmpty ( title ) ) { builder . setTitle ( title ) ; } final CharSequence message = getMessage ( ) ; if ( ! TextUtils . isEmpty ( message ) ) { builder . setMessage ( message ) ; } final CharSequence positiveButtonText = getPositiveButtonText ( ) ; if ( ! TextUtils . isEmpty ( positiveButtonText ) ) { builder . setPositiveButton ( positiveButtonText , new View . OnClickListener ( ) { @ Override public void onClick ( View view ) { for ( IPositiveButtonDialogListener listener : getPositiveButtonDialogListeners ( ) ) { listener . onPositiveButtonClicked ( mRequestCode ) ; } dismiss ( ) ; } } ) ; } final CharSequence negativeButtonText = getNegativeButtonText ( ) ; if ( ! TextUtils . isEmpty ( negativeButtonText ) ) { builder . setNegativeButton ( negativeButtonText , new View . OnClickListener ( ) { @ Override public void onClick ( View view ) { for ( INegativeButtonDialogListener listener : getNegativeButtonDialogListeners ( ) ) { listener . onNegativeButtonClicked ( mRequestCode ) ; } dismiss ( ) ; } } ) ; } final CharSequence neutralButtonText = getNeutralButtonText ( ) ; if ( ! TextUtils . isEmpty ( neutralButtonText ) ) { builder . setNeutralButton ( neutralButtonText , new View . OnClickListener ( ) { @ Override public void onClick ( View view ) { for ( INeutralButtonDialogListener listener : getNeutralButtonDialogListeners ( ) ) { listener . onNeutralButtonClicked ( mRequestCode ) ; } dismiss ( ) ; } } ) ; } return builder ;
|
public class WriterInitializerFactory { /** * Provides WriterInitializer based on the writer . Mostly writer is decided by the Writer builder ( and destination ) that user passes .
* If there ' s more than one branch , it will instantiate same number of WriterInitializer instance as number of branches and combine it into MultiWriterInitializer .
* @ param state
* @ return WriterInitializer */
public static WriterInitializer newInstace ( State state , WorkUnitStream workUnits ) { } }
|
int branches = state . getPropAsInt ( ConfigurationKeys . FORK_BRANCHES_KEY , 1 ) ; if ( branches == 1 ) { return newSingleInstance ( state , workUnits , branches , 0 ) ; } List < WriterInitializer > wis = Lists . newArrayList ( ) ; for ( int branchId = 0 ; branchId < branches ; branchId ++ ) { wis . add ( newSingleInstance ( state , workUnits , branches , branchId ) ) ; } return new MultiWriterInitializer ( wis ) ;
|
public class NativeJavaPackage { /** * need to look for a class by that name */
NativeJavaPackage forcePackage ( String name , Scriptable scope ) { } }
|
Object cached = super . get ( name , this ) ; if ( cached != null && cached instanceof NativeJavaPackage ) { return ( NativeJavaPackage ) cached ; } String newPackage = packageName . length ( ) == 0 ? name : packageName + "." + name ; NativeJavaPackage pkg = new NativeJavaPackage ( true , newPackage , classLoader ) ; ScriptRuntime . setObjectProtoAndParent ( pkg , scope ) ; super . put ( name , this , pkg ) ; return pkg ;
|
public class JfifReader { /** * Performs the Jfif data extraction , adding found values to the specified
* instance of { @ link Metadata } . */
public void extract ( @ NotNull final RandomAccessReader reader , @ NotNull final Metadata metadata ) { } }
|
JfifDirectory directory = new JfifDirectory ( ) ; metadata . addDirectory ( directory ) ; try { // For JFIF , the tag number is also the offset into the segment
directory . setInt ( JfifDirectory . TAG_VERSION , reader . getUInt16 ( JfifDirectory . TAG_VERSION ) ) ; directory . setInt ( JfifDirectory . TAG_UNITS , reader . getUInt8 ( JfifDirectory . TAG_UNITS ) ) ; directory . setInt ( JfifDirectory . TAG_RESX , reader . getUInt16 ( JfifDirectory . TAG_RESX ) ) ; directory . setInt ( JfifDirectory . TAG_RESY , reader . getUInt16 ( JfifDirectory . TAG_RESY ) ) ; directory . setInt ( JfifDirectory . TAG_THUMB_WIDTH , reader . getUInt8 ( JfifDirectory . TAG_THUMB_WIDTH ) ) ; directory . setInt ( JfifDirectory . TAG_THUMB_HEIGHT , reader . getUInt8 ( JfifDirectory . TAG_THUMB_HEIGHT ) ) ; } catch ( IOException me ) { directory . addError ( me . getMessage ( ) ) ; }
|
public class InternalXbaseWithAnnotationsLexer { /** * $ ANTLR start " T _ _ 74" */
public final void mT__74 ( ) throws RecognitionException { } }
|
try { int _type = T__74 ; int _channel = DEFAULT_TOKEN_CHANNEL ; // InternalXbaseWithAnnotations . g : 72:7 : ( ' typeof ' )
// InternalXbaseWithAnnotations . g : 72:9 : ' typeof '
{ match ( "typeof" ) ; } state . type = _type ; state . channel = _channel ; } finally { }
|
public class Distribution { /** * Creates an Laplace smoothed Distribution from the given counter , ie adds one count
* to every item , including unseen ones , and divides by the total count .
* @ return a new add - 1 smoothed Distribution */
public static < E > Distribution < E > laplaceSmoothedDistribution ( Counter < E > counter , int numberOfKeys ) { } }
|
return laplaceSmoothedDistribution ( counter , numberOfKeys , 1.0 ) ;
|
public class ObjIteratorEx { /** * Lazy evaluation .
* @ param iteratorSupplier
* @ return */
public static < T > ObjIteratorEx < T > of ( final Supplier < ? extends Iterator < ? extends T > > iteratorSupplier ) { } }
|
N . checkArgNotNull ( iteratorSupplier , "iteratorSupplier" ) ; return new ObjIteratorEx < T > ( ) { private Iterator < ? extends T > iter = null ; private ObjIteratorEx < ? extends T > iterEx = null ; private boolean isInitialized = false ; @ Override public boolean hasNext ( ) { if ( isInitialized == false ) { init ( ) ; } return iter . hasNext ( ) ; } @ Override public T next ( ) { if ( isInitialized == false ) { init ( ) ; } return iter . next ( ) ; } @ Override public void skip ( long n ) { N . checkArgNotNegative ( n , "n" ) ; if ( isInitialized == false ) { init ( ) ; } if ( iterEx != null ) { iterEx . skip ( n ) ; } else { super . skip ( n ) ; } } @ Override public long count ( ) { if ( isInitialized == false ) { init ( ) ; } if ( iterEx != null ) { return iterEx . count ( ) ; } else { return super . count ( ) ; } } @ Override public void close ( ) { if ( isInitialized == false ) { init ( ) ; } if ( iterEx != null ) { iterEx . close ( ) ; } } private void init ( ) { if ( isInitialized == false ) { isInitialized = true ; iter = iteratorSupplier . get ( ) ; iterEx = iter instanceof ObjIteratorEx ? ( ObjIteratorEx < T > ) iter : null ; } } } ;
|
public class Compiler { /** * Parses the given morphlineFile , then finds the morphline with the given morphlineId within ,
* then compiles the morphline and returns the corresponding morphline command . The returned
* command will feed records into finalChild . */
public Command compile ( File morphlineFile , String morphlineId , MorphlineContext morphlineContext , Command finalChild , Config ... overrides ) { } }
|
Config config ; try { config = parse ( morphlineFile , overrides ) ; } catch ( IOException e ) { throw new MorphlineCompilationException ( "Cannot parse morphline file: " + morphlineFile , null , e ) ; } Config morphlineConfig = find ( morphlineId , config , morphlineFile . getPath ( ) ) ; Command morphlineCommand = compile ( morphlineConfig , morphlineContext , finalChild ) ; return morphlineCommand ;
|
public class SamlSettingsApi { /** * Delete upload metadata .
* Delete metadata .
* @ param location The region where send metadata . ( optional )
* @ return DeleteMetadataResponse
* @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */
public DeleteMetadataResponse deleteMetadata ( String location ) throws ApiException { } }
|
ApiResponse < DeleteMetadataResponse > resp = deleteMetadataWithHttpInfo ( location ) ; return resp . getData ( ) ;
|
public class PathController { /** * Get information for a specific path name / profileId or pathId
* @ param model
* @ param pathIdentifier
* @ param profileIdentifier
* @ return */
@ RequestMapping ( value = "/api/path/{pathIdentifier}" , method = RequestMethod . GET ) @ ResponseBody public EndpointOverride getPath ( Model model , @ PathVariable String pathIdentifier , @ RequestParam ( required = false ) String profileIdentifier , @ RequestParam ( value = "typeFilter[]" , required = false ) String [ ] typeFilter , @ RequestParam ( value = "clientUUID" , defaultValue = Constants . PROFILE_CLIENT_DEFAULT_ID ) String clientUUID ) throws Exception { } }
|
Identifiers identifiers = ControllerUtils . convertProfileAndPathIdentifier ( profileIdentifier , pathIdentifier ) ; return PathOverrideService . getInstance ( ) . getPath ( identifiers . getPathId ( ) , clientUUID , typeFilter ) ;
|
public class CommonOps_ZDRM { /** * Creates an identity matrix of the specified size . < br >
* < br >
* a < sub > ij < / sub > = 0 + 0i if i & ne ; j < br >
* a < sub > ij < / sub > = 1 + 0i if i = j < br >
* @ param width The width and height of the identity matrix .
* @ return A new instance of an identity matrix . */
public static ZMatrixRMaj identity ( int width ) { } }
|
ZMatrixRMaj A = new ZMatrixRMaj ( width , width ) ; for ( int i = 0 ; i < width ; i ++ ) { A . set ( i , i , 1 , 0 ) ; } return A ;
|
public class ExtensionManager { /** * Notifies the extensions that the kernel is started */
public void started ( ) { } }
|
for ( KernelExtension kernelExtension : kernelExtensions . keySet ( ) ) { kernelExtension . started ( kernelExtensions . get ( kernelExtension ) ) ; }
|
public class WebSocketClientHandshaker08 { /** * Sends the opening request to the server :
* < pre >
* GET / chat HTTP / 1.1
* Host : server . example . com
* Upgrade : websocket
* Connection : Upgrade
* Sec - WebSocket - Key : dGhlIHNhbXBsZSBub25jZQ = =
* Sec - WebSocket - Origin : http : / / example . com
* Sec - WebSocket - Protocol : chat , superchat
* Sec - WebSocket - Version : 8
* < / pre > */
@ Override protected FullHttpRequest newHandshakeRequest ( ) { } }
|
// Get path
URI wsURL = uri ( ) ; String path = rawPath ( wsURL ) ; // Get 16 bit nonce and base 64 encode it
byte [ ] nonce = WebSocketUtil . randomBytes ( 16 ) ; String key = WebSocketUtil . base64 ( nonce ) ; String acceptSeed = key + MAGIC_GUID ; byte [ ] sha1 = WebSocketUtil . sha1 ( acceptSeed . getBytes ( CharsetUtil . US_ASCII ) ) ; expectedChallengeResponseString = WebSocketUtil . base64 ( sha1 ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "WebSocket version 08 client handshake key: {}, expected response: {}" , key , expectedChallengeResponseString ) ; } // Format request
FullHttpRequest request = new DefaultFullHttpRequest ( HttpVersion . HTTP_1_1 , HttpMethod . GET , path ) ; HttpHeaders headers = request . headers ( ) ; if ( customHeaders != null ) { headers . add ( customHeaders ) ; } headers . set ( HttpHeaderNames . UPGRADE , HttpHeaderValues . WEBSOCKET ) . set ( HttpHeaderNames . CONNECTION , HttpHeaderValues . UPGRADE ) . set ( HttpHeaderNames . SEC_WEBSOCKET_KEY , key ) . set ( HttpHeaderNames . HOST , websocketHostValue ( wsURL ) ) . set ( HttpHeaderNames . SEC_WEBSOCKET_ORIGIN , websocketOriginValue ( wsURL ) ) ; String expectedSubprotocol = expectedSubprotocol ( ) ; if ( expectedSubprotocol != null && ! expectedSubprotocol . isEmpty ( ) ) { headers . set ( HttpHeaderNames . SEC_WEBSOCKET_PROTOCOL , expectedSubprotocol ) ; } headers . set ( HttpHeaderNames . SEC_WEBSOCKET_VERSION , "8" ) ; return request ;
|
public class Sql2o { /** * Begins a transaction with isolation level { @ link java . sql . Connection # TRANSACTION _ READ _ COMMITTED } . Every statement executed on the return { @ link Connection }
* instance , will be executed in the transaction . It is very important to always call either the { @ link org . sql2o . Connection # commit ( ) }
* method or the { @ link org . sql2o . Connection # rollback ( ) } method to close the transaction . Use proper try - catch logic .
* @ param connectionSource the { @ link ConnectionSource } implementation substitution ,
* that will be used instead of one from { @ link Sql2o } instance .
* @ return the { @ link Connection } instance to use to run statements in the transaction . */
public Connection beginTransaction ( ConnectionSource connectionSource ) { } }
|
return this . beginTransaction ( connectionSource , java . sql . Connection . TRANSACTION_READ_COMMITTED ) ;
|
public class ReflectionScanStatic { /** * 通过扫描 , 获取反射对象 */
private Reflections getReflection ( List < String > packNameList ) { } }
|
// filter
FilterBuilder filterBuilder = new FilterBuilder ( ) . includePackage ( Constants . DISCONF_PACK_NAME ) ; for ( String packName : packNameList ) { filterBuilder = filterBuilder . includePackage ( packName ) ; } Predicate < String > filter = filterBuilder ; // urls
Collection < URL > urlTotals = new ArrayList < URL > ( ) ; for ( String packName : packNameList ) { Set < URL > urls = ClasspathHelper . forPackage ( packName ) ; urlTotals . addAll ( urls ) ; } Reflections reflections = new Reflections ( new ConfigurationBuilder ( ) . filterInputsBy ( filter ) . setScanners ( new SubTypesScanner ( ) . filterResultsBy ( filter ) , new TypeAnnotationsScanner ( ) . filterResultsBy ( filter ) , new FieldAnnotationsScanner ( ) . filterResultsBy ( filter ) , new MethodAnnotationsScanner ( ) . filterResultsBy ( filter ) , new MethodParameterScanner ( ) ) . setUrls ( urlTotals ) ) ; return reflections ;
|
public class FedoraEventImpl { /** * The JCR - based Event : : getPath contains some Modeshape artifacts that must be removed or modified in
* order to correspond to the public resource path . For example , JCR Events will contain a trailing
* / jcr : content for Binaries , a trailing / propName for properties , and / # / notation for URI fragments . */
private static String cleanPath ( final Event event ) throws RepositoryException { } }
|
// remove any trailing data for property changes
final String path = PROPERTY_TYPES . contains ( event . getType ( ) ) ? event . getPath ( ) . substring ( 0 , event . getPath ( ) . lastIndexOf ( "/" ) ) : event . getPath ( ) ; // reformat any hash URIs and remove any trailing / jcr : content
final HashConverter converter = new HashConverter ( ) ; return converter . reverse ( ) . convert ( path . replaceAll ( "/" + JCR_CONTENT , "" ) ) ;
|
public class TIFFField { /** * Compares this < code > TIFFField < / code > with another
* < code > TIFFField < / code > by comparing the tags .
* < p > < b > Note : this class has a natural ordering that is inconsistent
* with < code > equals ( ) < / code > . < / b >
* @ throws IllegalArgumentException if the parameter is < code > null < / code > .
* @ throws ClassCastException if the parameter is not a
* < code > TIFFField < / code > . */
public int compareTo ( Object o ) { } }
|
if ( o == null ) { throw new IllegalArgumentException ( ) ; } int oTag = ( ( TIFFField ) o ) . getTag ( ) ; if ( tag < oTag ) { return - 1 ; } else if ( tag > oTag ) { return 1 ; } else { return 0 ; }
|
public class CraftingHelper { /** * Take a list of ItemStacks and amalgamate where possible . < br >
* @ param inputStacks a list of ItemStacks
* @ return a list of ItemStacks , where all items of the same type are grouped into one stack . */
public static List < ItemStack > consolidateItemStacks ( List < ItemStack > inputStacks ) { } }
|
// Horrible n ^ 2 method - we should do something nicer if this ever becomes a bottleneck .
List < ItemStack > outputStacks = new ArrayList < ItemStack > ( ) ; for ( ItemStack sourceIS : inputStacks ) { boolean bFound = false ; for ( ItemStack destIS : outputStacks ) { if ( destIS != null && sourceIS != null && itemStackIngredientsMatch ( destIS , sourceIS ) ) { bFound = true ; destIS . setCount ( destIS . getCount ( ) + sourceIS . getCount ( ) ) ; } } if ( ! bFound ) outputStacks . add ( sourceIS . copy ( ) ) ; } return outputStacks ;
|
public class BigendianEncoding { /** * Appends the base16 encoding of the specified { @ code value } to the { @ code dest } .
* @ param value the value to be converted .
* @ param dest the destination char array .
* @ param destOffset the starting offset in the destination char array . */
static void longToBase16String ( long value , char [ ] dest , int destOffset ) { } }
|
byteToBase16 ( ( byte ) ( value >> 56 & 0xFFL ) , dest , destOffset ) ; byteToBase16 ( ( byte ) ( value >> 48 & 0xFFL ) , dest , destOffset + BYTE_BASE16 ) ; byteToBase16 ( ( byte ) ( value >> 40 & 0xFFL ) , dest , destOffset + 2 * BYTE_BASE16 ) ; byteToBase16 ( ( byte ) ( value >> 32 & 0xFFL ) , dest , destOffset + 3 * BYTE_BASE16 ) ; byteToBase16 ( ( byte ) ( value >> 24 & 0xFFL ) , dest , destOffset + 4 * BYTE_BASE16 ) ; byteToBase16 ( ( byte ) ( value >> 16 & 0xFFL ) , dest , destOffset + 5 * BYTE_BASE16 ) ; byteToBase16 ( ( byte ) ( value >> 8 & 0xFFL ) , dest , destOffset + 6 * BYTE_BASE16 ) ; byteToBase16 ( ( byte ) ( value & 0xFFL ) , dest , destOffset + 7 * BYTE_BASE16 ) ;
|
public class EnvLoader { /** * Add listener .
* @ param listener object to listen for environment create / destroy
* @ param loader the context class loader */
public static void addClassLoaderListener ( EnvLoaderListener listener , ClassLoader loader ) { } }
|
for ( ; loader != null ; loader = loader . getParent ( ) ) { if ( loader instanceof EnvironmentClassLoader ) { ( ( EnvironmentClassLoader ) loader ) . addListener ( listener ) ; return ; } } /* if ( _ envSystemClassLoader ! = null ) {
_ envSystemClassLoader . addNotificationListener ( listener ) ;
return ; */
_globalLoaderListeners . add ( listener ) ;
|
public class TldFernManager { /** * Looks up the fern with the specified value . If non exist a new one is created and returned .
* @ param value The fern ' s value
* @ return The fern associated with that value */
public TldFernFeature lookupFern ( int value ) { } }
|
TldFernFeature found = table [ value ] ; if ( found == null ) { found = createFern ( ) ; found . init ( value ) ; table [ value ] = found ; } return found ;
|
public class HBaseReader { /** * ( non - Javadoc )
* @ see
* com . impetus . client . hbase . Reader # LoadData ( org . apache . hadoop . hbase . client
* . HTable , java . lang . String ) */
@ Override public List < HBaseData > LoadData ( HTableInterface hTable , Object rowKey , Filter filter , String ... columns ) throws IOException { } }
|
return LoadData ( hTable , Bytes . toString ( hTable . getTableName ( ) ) , rowKey , filter , columns ) ;
|
public class PropertyRendererRegistry { /** * Gets a renderer for the given property type . The lookup is as follow :
* < ul >
* < li > if a renderer was registered with
* { @ link # registerRenderer ( Class , TableCellRenderer ) } , it is returned ,
* else < / li >
* < li > if a renderer class was registered with
* { @ link # registerRenderer ( Class , Class ) } , it is returned , else
* < li >
* < li > it returns null . < / li >
* < / ul >
* @ param type
* @ return a renderer editor suitable for the Property type or null if none
* found */
public synchronized TableCellRenderer getRenderer ( Class < ? > type ) { } }
|
TableCellRenderer renderer = null ; Object value = typeToRenderer . get ( type ) ; if ( value instanceof TableCellRenderer ) { renderer = ( TableCellRenderer ) value ; } else if ( value instanceof Class < ? > ) { try { renderer = ( TableCellRenderer ) ( ( Class < ? extends TableCellRenderer > ) value ) . newInstance ( ) ; } catch ( InstantiationException ex ) { Logger . getLogger ( PropertyRendererRegistry . class . getName ( ) ) . log ( Level . SEVERE , null , ex ) ; } catch ( IllegalAccessException ex ) { Logger . getLogger ( PropertyRendererRegistry . class . getName ( ) ) . log ( Level . SEVERE , null , ex ) ; } } return renderer ;
|
public class CmsFocalPoint { /** * Positions the center of this widget over the given coordinates . < p >
* @ param x the x coordinate
* @ param y the y coordinate */
public void setCenterCoordsRelativeToParent ( int x , int y ) { } }
|
Style style = getElement ( ) . getStyle ( ) ; style . setLeft ( x - 10 , Unit . PX ) ; style . setTop ( y - 10 , Unit . PX ) ;
|
public class Parameter { /** * { @ inheritDoc }
* @ throws NullPointerException { @ inheritDoc } */
public < T extends Annotation > T getAnnotation ( Class < T > annotationClass ) { } }
|
Objects . requireNonNull ( annotationClass ) ; T [ ] annotations = getAnnotationsByType ( annotationClass ) ; return annotations . length > 0 ? annotations [ 0 ] : null ;
|
public class HandlerUtils { /** * Creates a copy of the passed in ILF node in the PLF if not already there as well as creating
* any ancestor nodes along the path from this node up to the layout root if they are not there . */
public static Element createPlfNodeAndPath ( Element compViewNode , boolean includeChildNodes , IPerson person ) throws PortalException { } }
|
// first attempt to get parent
Element compViewParent = ( Element ) compViewNode . getParentNode ( ) ; Element plfParent = getPLFNode ( compViewParent , person , true , false ) ; Document plf = ( Document ) person . getAttribute ( Constants . PLF ) ; // if ilf copy being created we can append to parent and use the
// position set to place it .
if ( compViewNode . getAttribute ( Constants . ATT_ID ) . startsWith ( Constants . FRAGMENT_ID_USER_PREFIX ) ) return createILFCopy ( compViewNode , compViewParent , includeChildNodes , plf , plfParent , person ) ; return createOrMovePLFOwnedNode ( compViewNode , compViewParent , true , // create if not found
includeChildNodes , plf , plfParent , person ) ;
|
public class SquareEdge { /** * Returns the destination node . */
public < T extends SquareNode > T destination ( SquareNode src ) { } }
|
if ( a == src ) return ( T ) b ; else if ( b == src ) return ( T ) a ; else throw new IllegalArgumentException ( "BUG! src is not a or b" ) ;
|
public class SpeechToText { /** * Delete a job .
* Deletes the specified job . You cannot delete a job that the service is actively processing . Once you delete a job ,
* its results are no longer available . The service automatically deletes a job and its results when the time to live
* for the results expires . You must use credentials for the instance of the service that owns a job to delete it .
* * * See also : * * [ Deleting a job ] ( https : / / cloud . ibm . com / docs / services / speech - to - text / async . html # delete - async ) .
* @ param deleteJobOptions the { @ link DeleteJobOptions } containing the options for the call
* @ return a { @ link ServiceCall } with a response type of Void */
public ServiceCall < Void > deleteJob ( DeleteJobOptions deleteJobOptions ) { } }
|
Validator . notNull ( deleteJobOptions , "deleteJobOptions cannot be null" ) ; String [ ] pathSegments = { "v1/recognitions" } ; String [ ] pathParameters = { deleteJobOptions . id ( ) } ; RequestBuilder builder = RequestBuilder . delete ( RequestBuilder . constructHttpUrl ( getEndPoint ( ) , pathSegments , pathParameters ) ) ; Map < String , String > sdkHeaders = SdkCommon . getSdkHeaders ( "speech_to_text" , "v1" , "deleteJob" ) ; for ( Entry < String , String > header : sdkHeaders . entrySet ( ) ) { builder . header ( header . getKey ( ) , header . getValue ( ) ) ; } return createServiceCall ( builder . build ( ) , ResponseConverterUtils . getVoid ( ) ) ;
|
public class OUser { /** * Checks if the user has the permission to access to the requested resource for the requested operation .
* @ param iResource
* Requested resource
* @ param iOperation
* Requested operation
* @ return The role that has granted the permission if any , otherwise null */
public ORole checkIfAllowed ( final String iResource , final int iOperation ) { } }
|
for ( ORole r : roles ) if ( r . allow ( iResource , iOperation ) ) return r ; return null ;
|
public class StartOrderResolver { /** * an appropriate container which can be linked into the image */
private List < Resolvable > resolve ( List < Resolvable > images ) { } }
|
List < Resolvable > resolved = new ArrayList < > ( ) ; // First pass : Pick all data images and all without dependencies
for ( Resolvable config : images ) { List < String > volumesOrLinks = extractDependentImagesFor ( config ) ; if ( volumesOrLinks == null ) { // A data image only or no dependency . Add it to the list of data image which can be always
// created first .
updateProcessedImages ( config ) ; resolved . add ( config ) ; } else { secondPass . add ( config ) ; } } // Next passes : Those with dependencies are checked whether they already have been visited .
return secondPass . size ( ) > 0 ? resolveRemaining ( resolved ) : resolved ;
|
public class JDBDT { /** * Delete data sets from the database .
* < p > The data sets should be associated to tables
* with defined key columns . The key column values
* of each entry a data set determine the rows to delete .
* @ param dataSets Data sets for deletion .
* @ see TableBuilder # key ( String . . . )
* @ see Table # getKeyColumns ( )
* @ see # update ( DataSet . . . )
* @ see # insert ( DataSet . . . )
* @ see # populate ( DataSet . . . )
* @ see # populateIfChanged ( DataSet . . . )
* @ since 1.2 */
@ SafeVarargs public static void delete ( DataSet ... dataSets ) { } }
|
foreach ( dataSets , DBSetup :: delete , CallInfo . create ( ) ) ;
|
public class EclipseAjcMojo { /** * write document to the file
* @ param document
* @ param file
* @ throws TransformerException
* @ throws FileNotFoundException */
private void writeDocument ( Document document , File file ) throws TransformerException , FileNotFoundException { } }
|
document . normalize ( ) ; DOMSource source = new DOMSource ( document ) ; StreamResult result = new StreamResult ( new FileOutputStream ( file ) ) ; Transformer transformer = TransformerFactory . newInstance ( ) . newTransformer ( ) ; transformer . setOutputProperty ( OutputKeys . INDENT , "yes" ) ; transformer . transform ( source , result ) ;
|
public class CreateInputSecurityGroupRequest { /** * List of IPv4 CIDR addresses to whitelist
* @ param whitelistRules
* List of IPv4 CIDR addresses to whitelist */
public void setWhitelistRules ( java . util . Collection < InputWhitelistRuleCidr > whitelistRules ) { } }
|
if ( whitelistRules == null ) { this . whitelistRules = null ; return ; } this . whitelistRules = new java . util . ArrayList < InputWhitelistRuleCidr > ( whitelistRules ) ;
|
public class DataTables { /** * Creates a table , i . e . a list of lists , with { @ code numberOfColumns } columns from the given { @ code data } including the header .
* Example :
* < pre > { @ code
* table ( 2,
* " name " , " age " , / / header
* " Peter " , 20 , / / first row of data
* " Susan " , 35 ) ; / / second row of data
* } < / pre >
* @ param numberOfColumns the number of columns the resulting table should have
* @ param data the data of the table represented as a one - dimensional list ,
* the number of entries must be a multiple of { @ code numberOfColumns } .
* The first { @ code numberOfColumns } elements are taken as the header of the table ,
* the remaining values as data
* @ return a list of list that contains the same data as { @ code data } , but in a two - dimensional form
* @ throws java . lang . IllegalArgumentException if { @ code data . length % numberOfColumns ! = 0} */
public static Iterable < ? extends Iterable < ? > > table ( int numberOfColumns , Object ... data ) { } }
|
if ( data . length % numberOfColumns != 0 ) { throw new IllegalArgumentException ( "The provided number of data elements '" + data . length + "' is not a multiple of " + numberOfColumns ) ; } return Iterables . partition ( Arrays . asList ( data ) , numberOfColumns ) ;
|
public class CPDefinitionVirtualSettingLocalServiceBaseImpl { /** * Returns the number of rows matching the dynamic query .
* @ param dynamicQuery the dynamic query
* @ param projection the projection to apply to the query
* @ return the number of rows matching the dynamic query */
@ Override public long dynamicQueryCount ( DynamicQuery dynamicQuery , Projection projection ) { } }
|
return cpDefinitionVirtualSettingPersistence . countWithDynamicQuery ( dynamicQuery , projection ) ;
|
public class XHTMLParser { /** * In order to handle empty content we use a { @ link PushbackReader } to try to read one character from the stream
* and if we get - 1 it means that the stream is empty and in this case we return an empty XDOM . */
private Reader getPushBackReader ( Reader source ) throws ParseException { } }
|
PushbackReader pushbackReader = new PushbackReader ( source ) ; int c ; try { c = pushbackReader . read ( ) ; if ( c == - 1 ) { return null ; } pushbackReader . unread ( c ) ; } catch ( IOException e ) { throw new ParseException ( "Failed to find out if the source to parse is empty or not" , e ) ; } return pushbackReader ;
|
public class MybatisRepository { @ Transactional ( ) public E add ( E entity ) { } }
|
getSqlSession ( ) . insert ( getMapId ( ADD_MAP_ID ) , entity ) ; return entity ;
|
public class TeaToolsUtils { /** * Formats the class name with trailing square brackets . */
public String getArrayClassName ( Class < ? > clazz ) { } }
|
if ( clazz . isArray ( ) ) { return getArrayClassName ( clazz . getComponentType ( ) ) + "[]" ; } return clazz . getName ( ) ;
|
public class UsersMultiSelectList { /** * Reloads / refreshes the list of { @ link User users } associated to the context .
* @ param contextId the ID of the context */
public void reloadUsers ( int contextId ) { } }
|
List < User > users = usersExtension . getContextUserAuthManager ( contextId ) . getUsers ( ) ; User [ ] usersArray = users . toArray ( new User [ users . size ( ) ] ) ; ListModel < User > usersModel = new DefaultComboBoxModel < User > ( usersArray ) ; this . setModel ( usersModel ) ;
|
public class GetTransitGatewayAttachmentPropagationsResult { /** * Information about the propagation route tables .
* @ return Information about the propagation route tables . */
public java . util . List < TransitGatewayAttachmentPropagation > getTransitGatewayAttachmentPropagations ( ) { } }
|
if ( transitGatewayAttachmentPropagations == null ) { transitGatewayAttachmentPropagations = new com . amazonaws . internal . SdkInternalList < TransitGatewayAttachmentPropagation > ( ) ; } return transitGatewayAttachmentPropagations ;
|
public class AbstractDataEditorWidget { /** * Creates the save command .
* @ see # doUpdate ( ) */
protected ActionCommand createUpdateCommand ( ) { } }
|
ActionCommand command = new ActionCommand ( UPDATE_COMMAND_ID ) { @ Override protected void doExecuteCommand ( ) { doUpdate ( ) ; } } ; command . setSecurityControllerId ( getId ( ) + "." + UPDATE_COMMAND_ID ) ; getApplicationConfig ( ) . commandConfigurer ( ) . configure ( command ) ; getDetailForm ( ) . addGuarded ( command , FormGuard . LIKE_COMMITCOMMAND ) ; return command ;
|
public class HashMac { /** * Computes an HMAC for the given message , using the key passed to the constructor .
* @ param message The message .
* @ return The HMAC value for the message and key . */
public String digest ( String message ) { } }
|
try { Mac mac = Mac . getInstance ( algorithm ) ; SecretKeySpec macKey = new SecretKeySpec ( key , algorithm ) ; mac . init ( macKey ) ; byte [ ] digest = mac . doFinal ( ByteArray . fromString ( message ) ) ; return ByteArray . toHex ( digest ) ; } catch ( NoSuchAlgorithmException e ) { throw new IllegalStateException ( "Algorithm unavailable: " + algorithm , e ) ; } catch ( InvalidKeyException e ) { throw new IllegalArgumentException ( "Unable to construct key for " + algorithm + ". Please check the value passed in when this class was initialised." , e ) ; }
|
public class FindBuddyImpl { /** * / * ( non - Javadoc )
* @ see com . tvd12 . ezyfox . core . command . BaseCommand # execute ( ) */
@ SuppressWarnings ( "unchecked" ) @ Override public ApiBuddy execute ( ) { } }
|
return ( ApiBuddy ) extension . getParentZone ( ) . getBuddyListManager ( ) . getBuddyList ( owner ) . getBuddy ( buddy ) ;
|
public class LatentRelationalAnalysis { /** * Returns an ArrayList of phrases with the greatest frequencies in the corpus .
* For each alternate pair , send a phrase query to the Lucene search engine
* containing the corpus . The phrase query will find the frequencies of phrases
* that begin with one member of the pair and end with the other . The phrases
* cannot have more than MAX _ PHRASE words .
* Select the top NUM _ FILTER ( current NUM _ FILTER = 3 ) most frequent phrases and
* return them along with the original pairs .
* NOTE : should be called before { @ link # findPatterns ( ) } .
* @ param A a { @ code String } containing the first member in the original pair
* @ param B a { @ code String } containing the second member in the original pair
* @ param A _ prime a { @ code Synset } array containing the alternates for A
* @ param B _ prime a { @ code Synset } array containing the alternates for B
* @ return an ArrayList of { @ code String } with the top NUM _ FILTER pairs along with the original pairs */
public static ArrayList < String > filterPhrases ( String INDEX_DIR , String A , String B , Synset [ ] A_prime , Synset [ ] B_prime ) { } }
|
HashMultiMap < Float , Pair < String > > phrase_frequencies = new HashMultiMap < Float , Pair < String > > ( ) ; // Search corpus . . . A : B
// phrase _ frequencies . put ( new Float ( countPhraseFrequencies ( INDEX _ DIR , A , B ) ) , new Pair < String > ( A , B ) ) ;
// System . err . println ( " Top 10 Similar words : " ) ;
int count = 0 ; for ( int i = 0 ; ( i < NUM_SIM && i < A_prime . length ) ; i ++ ) { String [ ] wordForms = A_prime [ i ] . getWordForms ( ) ; for ( int j = 0 ; j < wordForms . length ; j ++ ) { if ( wordForms [ j ] . compareTo ( A ) != 0 ) { // Search corpus . . . A ' : B
Float score = new Float ( countPhraseFrequencies ( INDEX_DIR , wordForms [ j ] , B ) ) ; phrase_frequencies . put ( score , new Pair < String > ( wordForms [ j ] , B ) ) ; count ++ ; } if ( count >= NUM_SIM ) break ; } if ( count >= NUM_SIM ) break ; } count = 0 ; for ( int i = 0 ; ( i < NUM_SIM && i < B_prime . length ) ; i ++ ) { String [ ] wordForms = B_prime [ i ] . getWordForms ( ) ; for ( int j = 0 ; j < wordForms . length ; j ++ ) { if ( wordForms [ j ] . compareTo ( B ) != 0 ) { // Search corpus . . . A : B '
Float score = new Float ( countPhraseFrequencies ( INDEX_DIR , A , wordForms [ j ] ) ) ; phrase_frequencies . put ( score , new Pair < String > ( A , wordForms [ j ] ) ) ; count ++ ; } if ( count >= NUM_SIM ) break ; } if ( count >= NUM_SIM ) break ; } // filter out the phrases and add the top 3 to the ArrayList , and return it
Iterator iter = phrase_frequencies . keySet ( ) . iterator ( ) ; // TODO : make number of filters dynamic
// create Array with size = num filters
ArrayList < String > filtered_phrases = new ArrayList < String > ( ) ; Float filter1 = new Float ( 0.0 ) ; Float filter2 = new Float ( 0.0 ) ; Float filter3 = new Float ( 0.0 ) ; while ( iter . hasNext ( ) ) { Float curr_key = ( Float ) iter . next ( ) ; // this will bump the filters up each time a greater value comes along
// so that filter1 will be the greatest key and filter3 the 3rd greatest
if ( curr_key > filter1 ) { filter3 = filter2 ; filter2 = filter1 ; filter1 = curr_key ; } else if ( curr_key > filter2 ) { filter3 = filter2 ; filter2 = curr_key ; } else if ( curr_key > filter3 ) { filter3 = curr_key ; } } int filter_count = 0 ; Iterator val_iter = phrase_frequencies . get ( filter1 ) . iterator ( ) ; while ( val_iter . hasNext ( ) && filter_count < 3 ) { String alternative_pair = val_iter . next ( ) . toString ( ) ; String pair_arr [ ] = parsePair ( alternative_pair ) ; filtered_phrases . add ( pair_arr [ 0 ] + ":" + pair_arr [ 1 ] ) ; filter_count ++ ; } val_iter = phrase_frequencies . get ( filter2 ) . iterator ( ) ; while ( val_iter . hasNext ( ) && filter_count < 3 ) { String alternative_pair = val_iter . next ( ) . toString ( ) ; String pair_arr [ ] = parsePair ( alternative_pair ) ; filtered_phrases . add ( pair_arr [ 0 ] + ":" + pair_arr [ 1 ] ) ; filter_count ++ ; } val_iter = phrase_frequencies . get ( filter3 ) . iterator ( ) ; while ( val_iter . hasNext ( ) && filter_count < 3 ) { String alternative_pair = val_iter . next ( ) . toString ( ) ; String pair_arr [ ] = parsePair ( alternative_pair ) ; filtered_phrases . add ( pair_arr [ 0 ] + ":" + pair_arr [ 1 ] ) ; filter_count ++ ; } // throw in the original pair also
filtered_phrases . add ( A + ":" + B ) ; return filtered_phrases ;
|
public class RecordingOutputStream { /** * Reset limits to effectively - unlimited defaults */
public void resetLimits ( ) { } }
|
maxLength = Long . MAX_VALUE ; timeoutMs = Long . MAX_VALUE ; maxRateBytesPerMs = Long . MAX_VALUE ;
|
public class A_CmsWorkflowManager { /** * Gets the configuration parameter for a given key , and if it doesn ' t find one , returns a default value . < p >
* @ param key the configuration key
* @ param defaultValue the default value to use when the configuration entry isn ' t found
* @ return the configuration value */
protected String getParameter ( String key , String defaultValue ) { } }
|
String result = m_parameters . get ( key ) ; if ( result == null ) { result = defaultValue ; } return result ;
|
public class LolChat { /** * Get a list of all your FriendGroups .
* @ return A List of all your FriendGroups */
public List < FriendGroup > getFriendGroups ( ) { } }
|
final ArrayList < FriendGroup > groups = new ArrayList < > ( ) ; for ( final RosterGroup g : connection . getRoster ( ) . getGroups ( ) ) { groups . add ( new FriendGroup ( this , connection , g ) ) ; } return groups ;
|
public class Cob2Xsd { /** * Create an empty XML Schema .
* If no targetNamespace , make sure there is no default namespace otherwise
* our complex types would be considered part of that default namespace
* ( usually XML Schema namespace ) .
* @ param encoding the character set used to encode this XML Schema
* @ param targetNamespace the target namespace to use ( null for no namespace )
* @ return a new empty XML schema using the model */
protected XmlSchema createXmlSchema ( final String encoding , final String targetNamespace ) { } }
|
XmlSchema xsd = new XmlSchema ( targetNamespace , new XmlSchemaCollection ( ) ) ; if ( targetNamespace != null ) { xsd . setElementFormDefault ( XmlSchemaForm . QUALIFIED ) ; } xsd . setAttributeFormDefault ( null ) ; xsd . setInputEncoding ( encoding ) ; if ( targetNamespace == null ) { NamespaceMap prefixmap = new NamespaceMap ( ) ; NamespacePrefixList npl = xsd . getNamespaceContext ( ) ; if ( npl == null ) { prefixmap . add ( "xsd" , XMLConstants . W3C_XML_SCHEMA_NS_URI ) ; } else { for ( int i = 0 ; i < npl . getDeclaredPrefixes ( ) . length ; i ++ ) { String prefix = npl . getDeclaredPrefixes ( ) [ i ] ; String namespace = npl . getNamespaceURI ( prefix ) ; if ( namespace . equals ( XMLConstants . W3C_XML_SCHEMA_NS_URI ) ) { if ( prefix . equals ( "" ) ) { prefix = "xsd" ; } } prefixmap . add ( prefix , namespace ) ; } } xsd . setNamespaceContext ( prefixmap ) ; } return xsd ;
|
public class MPXWriter { /** * This method is called to format an accrue type value .
* @ param type accrue type
* @ return formatted accrue type */
private String formatAccrueType ( AccrueType type ) { } }
|
return ( type == null ? null : LocaleData . getStringArray ( m_locale , LocaleData . ACCRUE_TYPES ) [ type . getValue ( ) - 1 ] ) ;
|
public class JMElasticsearchBulk { /** * Delete bulk docs async .
* @ param index the index
* @ param type the type
* @ param bulkResponseActionListener the bulk response action listener */
public void deleteBulkDocsAsync ( String index , String type , ActionListener < BulkResponse > bulkResponseActionListener ) { } }
|
executeBulkRequestAsync ( buildDeleteBulkRequestBuilder ( buildAllDeleteRequestBuilderList ( index , type ) ) , bulkResponseActionListener ) ;
|
public class SessionUtilExternalBrowser { /** * Receives SAML token from Snowflake via web browser
* @ param socket socket
* @ throws IOException if any IO error occurs
* @ throws SFException if a HTTP request from browser is invalid */
private void processSamlToken ( String [ ] rets , Socket socket ) throws IOException , SFException { } }
|
String targetLine = null ; String userAgent = null ; boolean isPost = false ; for ( String line : rets ) { if ( line . length ( ) > PREFIX_GET . length ( ) && line . substring ( 0 , PREFIX_GET . length ( ) ) . equalsIgnoreCase ( PREFIX_GET ) ) { targetLine = line ; } else if ( line . length ( ) > PREFIX_POST . length ( ) && line . substring ( 0 , PREFIX_POST . length ( ) ) . equalsIgnoreCase ( PREFIX_POST ) ) { targetLine = rets [ rets . length - 1 ] ; isPost = true ; } else if ( line . length ( ) > PREFIX_USER_AGENT . length ( ) && line . substring ( 0 , PREFIX_USER_AGENT . length ( ) ) . equalsIgnoreCase ( PREFIX_USER_AGENT ) ) { userAgent = line ; } } if ( targetLine == null ) { throw new SFException ( ErrorCode . NETWORK_ERROR , "Invalid HTTP request. No token is given from the browser." ) ; } if ( userAgent != null ) { logger . debug ( "{}" , userAgent ) ; } try { // attempt to get JSON response
extractJsonTokenFromPostRequest ( targetLine ) ; } catch ( IOException ex ) { String parameters = isPost ? extractTokenFromPostRequest ( targetLine ) : extractTokenFromGetRequest ( targetLine ) ; try { URI inputParameter = new URI ( parameters ) ; for ( NameValuePair urlParam : URLEncodedUtils . parse ( inputParameter , UTF8_CHARSET ) ) { if ( "token" . equals ( urlParam . getName ( ) ) ) { this . token = urlParam . getValue ( ) ; break ; } } } catch ( URISyntaxException ex0 ) { throw new SFException ( ErrorCode . NETWORK_ERROR , String . format ( "Invalid HTTP request. No token is given from the browser. %s, err: %s" , targetLine , ex0 ) ) ; } } if ( this . token == null ) { throw new SFException ( ErrorCode . NETWORK_ERROR , String . format ( "Invalid HTTP request. No token is given from the browser: %s" , targetLine ) ) ; } returnToBrowser ( socket ) ;
|
public class YoutubeSampleActivity { /** * Hook the DraggableListener to DraggablePanel to pause or resume the video when the
* DragglabePanel is maximized or closed . */
private void hookDraggablePanelListeners ( ) { } }
|
draggablePanel . setDraggableListener ( new DraggableListener ( ) { @ Override public void onMaximized ( ) { playVideo ( ) ; } @ Override public void onMinimized ( ) { // Empty
} @ Override public void onClosedToLeft ( ) { pauseVideo ( ) ; } @ Override public void onClosedToRight ( ) { pauseVideo ( ) ; } } ) ;
|
public class AccountsInner { /** * Lists the Data Lake Store accounts within the subscription . The response includes a link to the next page of results , if any .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; DataLakeStoreAccountInner & gt ; object */
public Observable < Page < DataLakeStoreAccountInner > > listAsync ( ) { } }
|
return listWithServiceResponseAsync ( ) . map ( new Func1 < ServiceResponse < Page < DataLakeStoreAccountInner > > , Page < DataLakeStoreAccountInner > > ( ) { @ Override public Page < DataLakeStoreAccountInner > call ( ServiceResponse < Page < DataLakeStoreAccountInner > > response ) { return response . body ( ) ; } } ) ;
|
public class NGSessionPool { /** * Shuts down the pool . The function waits for running nails to finish . */
void shutdown ( ) throws InterruptedException { } }
|
List < NGSession > allSessions ; synchronized ( lock ) { done = true ; allSessions = Stream . concat ( workingPool . stream ( ) , idlePool . stream ( ) ) . collect ( Collectors . toList ( ) ) ; idlePool . clear ( ) ; workingPool . clear ( ) ; } for ( NGSession session : allSessions ) { session . shutdown ( ) ; } // wait for all sessions to complete by either returning from waiting state or finishing their
// nails
long start = System . nanoTime ( ) ; for ( NGSession session : allSessions ) { long timeout = NGConstants . SESSION_TERMINATION_TIMEOUT_MILLIS - TimeUnit . MILLISECONDS . convert ( System . nanoTime ( ) - start , TimeUnit . NANOSECONDS ) ; if ( timeout < 1 ) { // Give all threads a chance to finish or pick up already finished threads
timeout = 1 ; } session . join ( timeout ) ; if ( session . isAlive ( ) ) { throw new IllegalStateException ( "NGSession has not completed in " + NGConstants . SESSION_TERMINATION_TIMEOUT_MILLIS + " ms" ) ; } }
|
public class GeoPackageCoreConnection { /** * Query the SQL for a single result typed object in the first column
* @ param < T >
* result value type
* @ param sql
* sql statement
* @ param args
* sql arguments
* @ return single result object
* @ since 3.1.0 */
public < T > T querySingleTypedResult ( String sql , String [ ] args ) { } }
|
@ SuppressWarnings ( "unchecked" ) T result = ( T ) querySingleResult ( sql , args ) ; return result ;
|
public class Hessian2Output { /** * Starts an envelope .
* < code > < pre >
* E major minor
* m b16 b8 method - name
* < / pre > < / code >
* @ param method the method name to call . */
public void startEnvelope ( String method ) throws IOException { } }
|
int offset = _offset ; if ( SIZE < offset + 32 ) { flushBuffer ( ) ; offset = _offset ; } _buffer [ _offset ++ ] = ( byte ) 'E' ; writeString ( method ) ;
|
public class AbstractKnownHostsKeyVerification { /** * Allows a host key , optionally recording the key to the known _ hosts file .
* @ param host
* the name of the host
* @ param pk
* the public key to allow
* @ param always
* true if the key should be written to the known _ hosts file
* @ throws InvalidHostFileException
* if the host file cannot be written
* @ since 0.2.0 */
public void allowHost ( String host , SshPublicKey pk , boolean always ) throws SshException { } }
|
// Put the host into the allowed hosts list , overiding any previous
// entry
if ( hashHosts ) { SshHmac sha1 = ( SshHmac ) ComponentManager . getInstance ( ) . supportedHMacsCS ( ) . getInstance ( "hmac-sha1" ) ; byte [ ] hashSalt = new byte [ sha1 . getMacLength ( ) ] ; ComponentManager . getInstance ( ) . getRND ( ) . nextBytes ( hashSalt ) ; sha1 . init ( hashSalt ) ; sha1 . update ( host . getBytes ( ) ) ; byte [ ] theHash = sha1 . doFinal ( ) ; String names = HASH_MAGIC + Base64 . encodeBytes ( hashSalt , false ) + HASH_DELIM + Base64 . encodeBytes ( theHash , false ) ; putAllowedKey ( names , pk , always ) ; } else { putAllowedKey ( host , pk , always ) ; } // allowedHosts . put ( host , pk ) ;
// If we always want to allow then save the host file with the
// new details
if ( always ) { try { saveHostFile ( ) ; } catch ( IOException ex ) { throw new SshException ( "knownhosts file could not be saved! " + ex . getMessage ( ) , SshException . INTERNAL_ERROR ) ; } }
|
public class ConstantsSummaryBuilder { /** * Build the list of packages .
* @ param node the XML element that specifies which components to document
* @ param contentTree the content tree to which the content list will be added */
public void buildContents ( XMLNode node , Content contentTree ) { } }
|
Content contentListTree = writer . getContentsHeader ( ) ; printedPackageHeaders . clear ( ) ; for ( PackageElement pkg : configuration . packages ) { if ( hasConstantField ( pkg ) && ! hasPrintedPackageIndex ( pkg ) ) { writer . addLinkToPackageContent ( pkg , printedPackageHeaders , contentListTree ) ; } } writer . addContentsList ( contentTree , contentListTree ) ;
|
public class PropertiesLoaderUtils { /** * Fill the given properties from the specified class path resource ( in ISO - 8859-1 encoding ) .
* < p > Merges properties if more than one resource of the same name
* found in the class path . < / p >
* @ param props the Properties instance to load into
* @ param resourceName the name of the class path resource
* @ param classLoader the class loader
* @ throws IOException if loading failed */
public static void fillProperties ( Properties props , String resourceName , ClassLoader classLoader ) throws IOException { } }
|
Enumeration < URL > urls = classLoader . getResources ( resourceName ) ; while ( urls . hasMoreElements ( ) ) { URL url = urls . nextElement ( ) ; URLConnection con = url . openConnection ( ) ; try ( InputStream is = con . getInputStream ( ) ) { if ( resourceName . endsWith ( XML_FILE_EXTENSION ) ) { props . loadFromXML ( is ) ; } else { props . load ( is ) ; } } } if ( resourceName . endsWith ( ENCRYPTED_RESOURCE_NAME_SUFFIX ) ) { for ( Map . Entry < ? , ? > entry : props . entrySet ( ) ) { Object key = entry . getKey ( ) ; Object val = entry . getValue ( ) ; if ( val instanceof String ) { String value = ( String ) val ; if ( PropertyValueEncryptionUtils . isEncryptedValue ( value ) ) { value = PropertyValueEncryptionUtils . decrypt ( value , PBEncryptionUtils . getEncryptor ( ) ) ; props . put ( key , value ) ; } } } }
|
public class DefaultGroovyMethods { /** * Iterates through this collection transforming each entry into a new value using Closure . IDENTITY
* as a transformer , basically returning a list of items copied from the original collection .
* < pre class = " groovyTestCase " > assert [ 1,2,3 ] = = [ 1,2,3 ] . collect ( ) < / pre >
* @ param self an Iterable
* @ return a List of the transformed values
* @ see Closure # IDENTITY
* @ since 2.5.0 */
@ SuppressWarnings ( "unchecked" ) public static < T > List < T > collect ( Iterable < T > self ) { } }
|
return collect ( self , ( Closure < T > ) Closure . IDENTITY ) ;
|
public class ThreadFactoryBuilder { /** * 设置线程优先级
* @ param priority 优先级
* @ return this
* @ see Thread # MIN _ PRIORITY
* @ see Thread # NORM _ PRIORITY
* @ see Thread # MAX _ PRIORITY */
public ThreadFactoryBuilder setPriority ( int priority ) { } }
|
if ( priority < Thread . MIN_PRIORITY ) { throw new IllegalArgumentException ( StrUtil . format ( "Thread priority ({}) must be >= {}" , priority , Thread . MIN_PRIORITY ) ) ; } if ( priority > Thread . MAX_PRIORITY ) { throw new IllegalArgumentException ( StrUtil . format ( "Thread priority ({}) must be <= {}" , priority , Thread . MAX_PRIORITY ) ) ; } this . priority = priority ; return this ;
|
public class DefaultFacebookClient { /** * Returns the base endpoint URL for the Graph API .
* @ return The base endpoint URL for the Graph API . */
protected String getFacebookGraphEndpointUrl ( ) { } }
|
if ( apiVersion . isUrlElementRequired ( ) ) { return getFacebookEndpointUrls ( ) . getGraphEndpoint ( ) + '/' + apiVersion . getUrlElement ( ) ; } else { return getFacebookEndpointUrls ( ) . getGraphEndpoint ( ) ; }
|
public class JmsConnectionImpl { /** * Sets the state .
* @ param state The state to set */
protected void setState ( int newState ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "setState" , newState ) ; synchronized ( stateLock ) { if ( ( newState == JmsInternalConstants . CLOSED ) || ( newState == JmsInternalConstants . STOPPED ) || ( newState == JmsInternalConstants . STARTED ) ) { state = newState ; stateLock . notifyAll ( ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "setState" ) ;
|
public class BitstampAdapters { /** * Adapts a Transaction [ ] to a Trades Object
* @ param transactions The Bitstamp transactions
* @ param currencyPair ( e . g . BTC / USD )
* @ return The XChange Trades */
public static Trades adaptTrades ( BitstampTransaction [ ] transactions , CurrencyPair currencyPair ) { } }
|
List < Trade > trades = new ArrayList < > ( ) ; long lastTradeId = 0 ; for ( BitstampTransaction tx : transactions ) { final long tradeId = tx . getTid ( ) ; if ( tradeId > lastTradeId ) { lastTradeId = tradeId ; } trades . add ( adaptTrade ( tx , currencyPair , 1000 ) ) ; } return new Trades ( trades , lastTradeId , TradeSortType . SortByID ) ;
|
public class scalarmult { /** * CONVERT # include " fe . h " */
public static int crypto_scalarmult ( byte [ ] q , byte [ ] n , byte [ ] p ) { } }
|
byte [ ] e = new byte [ 32 ] ; int i ; int [ ] x1 = new int [ 10 ] ; int [ ] x2 = new int [ 10 ] ; int [ ] z2 = new int [ 10 ] ; int [ ] x3 = new int [ 10 ] ; int [ ] z3 = new int [ 10 ] ; int [ ] tmp0 = new int [ 10 ] ; int [ ] tmp1 = new int [ 10 ] ; int pos ; int swap ; int b ; for ( i = 0 ; i < 32 ; ++ i ) e [ i ] = n [ i ] ; // e [ 0 ] & = 248;
// e [ 31 ] & = 127;
// e [ 31 ] | = 64;
fe_frombytes . fe_frombytes ( x1 , p ) ; fe_1 . fe_1 ( x2 ) ; fe_0 . fe_0 ( z2 ) ; fe_copy . fe_copy ( x3 , x1 ) ; fe_1 . fe_1 ( z3 ) ; swap = 0 ; for ( pos = 254 ; pos >= 0 ; -- pos ) { b = e [ pos / 8 ] >>> ( pos & 7 ) ; b &= 1 ; swap ^= b ; fe_cswap . fe_cswap ( x2 , x3 , swap ) ; fe_cswap . fe_cswap ( z2 , z3 , swap ) ; swap = b ; // CONVERT # include " montgomery . h "
/* qhasm : fe X2 */
/* qhasm : fe Z2 */
/* qhasm : fe X3 */
/* qhasm : fe Z3 */
/* qhasm : fe X4 */
/* qhasm : fe Z4 */
/* qhasm : fe X5 */
/* qhasm : fe Z5 */
/* qhasm : fe A */
/* qhasm : fe B */
/* qhasm : fe C */
/* qhasm : fe D */
/* qhasm : fe E */
/* qhasm : fe AA */
/* qhasm : fe BB */
/* qhasm : fe DA */
/* qhasm : fe CB */
/* qhasm : fe t0 */
/* qhasm : fe t1 */
/* qhasm : fe t2 */
/* qhasm : fe t3 */
/* qhasm : fe t4 */
/* qhasm : enter ladder */
/* qhasm : D = X3 - Z3 */
/* asm 1 : fe _ sub . fe _ sub ( > D = fe # 5 , < X3 = fe # 3 , < Z3 = fe # 4 ) ; */
/* asm 2 : fe _ sub . fe _ sub ( > D = tmp0 , < X3 = x3 , < Z3 = z3 ) ; */
fe_sub . fe_sub ( tmp0 , x3 , z3 ) ; /* qhasm : B = X2 - Z2 */
/* asm 1 : fe _ sub . fe _ sub ( > B = fe # 6 , < X2 = fe # 1 , < Z2 = fe # 2 ) ; */
/* asm 2 : fe _ sub . fe _ sub ( > B = tmp1 , < X2 = x2 , < Z2 = z2 ) ; */
fe_sub . fe_sub ( tmp1 , x2 , z2 ) ; /* qhasm : A = X2 + Z2 */
/* asm 1 : fe _ add . fe _ add ( > A = fe # 1 , < X2 = fe # 1 , < Z2 = fe # 2 ) ; */
/* asm 2 : fe _ add . fe _ add ( > A = x2 , < X2 = x2 , < Z2 = z2 ) ; */
fe_add . fe_add ( x2 , x2 , z2 ) ; /* qhasm : C = X3 + Z3 */
/* asm 1 : fe _ add . fe _ add ( > C = fe # 2 , < X3 = fe # 3 , < Z3 = fe # 4 ) ; */
/* asm 2 : fe _ add . fe _ add ( > C = z2 , < X3 = x3 , < Z3 = z3 ) ; */
fe_add . fe_add ( z2 , x3 , z3 ) ; /* qhasm : DA = D * A */
/* asm 1 : fe _ mul . fe _ mul ( > DA = fe # 4 , < D = fe # 5 , < A = fe # 1 ) ; */
/* asm 2 : fe _ mul . fe _ mul ( > DA = z3 , < D = tmp0 , < A = x2 ) ; */
fe_mul . fe_mul ( z3 , tmp0 , x2 ) ; /* qhasm : CB = C * B */
/* asm 1 : fe _ mul . fe _ mul ( > CB = fe # 2 , < C = fe # 2 , < B = fe # 6 ) ; */
/* asm 2 : fe _ mul . fe _ mul ( > CB = z2 , < C = z2 , < B = tmp1 ) ; */
fe_mul . fe_mul ( z2 , z2 , tmp1 ) ; /* qhasm : BB = B ^ 2 */
/* asm 1 : fe _ sq . fe _ sq ( > BB = fe # 5 , < B = fe # 6 ) ; */
/* asm 2 : fe _ sq . fe _ sq ( > BB = tmp0 , < B = tmp1 ) ; */
fe_sq . fe_sq ( tmp0 , tmp1 ) ; /* qhasm : AA = A ^ 2 */
/* asm 1 : fe _ sq . fe _ sq ( > AA = fe # 6 , < A = fe # 1 ) ; */
/* asm 2 : fe _ sq . fe _ sq ( > AA = tmp1 , < A = x2 ) ; */
fe_sq . fe_sq ( tmp1 , x2 ) ; /* qhasm : t0 = DA + CB */
/* asm 1 : fe _ add . fe _ add ( > t0 = fe # 3 , < DA = fe # 4 , < CB = fe # 2 ) ; */
/* asm 2 : fe _ add . fe _ add ( > t0 = x3 , < DA = z3 , < CB = z2 ) ; */
fe_add . fe_add ( x3 , z3 , z2 ) ; /* qhasm : assign x3 to t0 */
/* qhasm : t1 = DA - CB */
/* asm 1 : fe _ sub . fe _ sub ( > t1 = fe # 2 , < DA = fe # 4 , < CB = fe # 2 ) ; */
/* asm 2 : fe _ sub . fe _ sub ( > t1 = z2 , < DA = z3 , < CB = z2 ) ; */
fe_sub . fe_sub ( z2 , z3 , z2 ) ; /* qhasm : X4 = AA * BB */
/* asm 1 : fe _ mul . fe _ mul ( > X4 = fe # 1 , < AA = fe # 6 , < BB = fe # 5 ) ; */
/* asm 2 : fe _ mul . fe _ mul ( > X4 = x2 , < AA = tmp1 , < BB = tmp0 ) ; */
fe_mul . fe_mul ( x2 , tmp1 , tmp0 ) ; /* qhasm : E = AA - BB */
/* asm 1 : fe _ sub . fe _ sub ( > E = fe # 6 , < AA = fe # 6 , < BB = fe # 5 ) ; */
/* asm 2 : fe _ sub . fe _ sub ( > E = tmp1 , < AA = tmp1 , < BB = tmp0 ) ; */
fe_sub . fe_sub ( tmp1 , tmp1 , tmp0 ) ; /* qhasm : t2 = t1 ^ 2 */
/* asm 1 : fe _ sq . fe _ sq ( > t2 = fe # 2 , < t1 = fe # 2 ) ; */
/* asm 2 : fe _ sq . fe _ sq ( > t2 = z2 , < t1 = z2 ) ; */
fe_sq . fe_sq ( z2 , z2 ) ; /* qhasm : t3 = a24 * E */
/* asm 1 : fe _ mul121666 ( > t3 = fe # 4 , < E = fe # 6 ) ; */
/* asm 2 : fe _ mul121666 ( > t3 = z3 , < E = tmp1 ) ; */
fe_mul121666 . fe_mul121666 ( z3 , tmp1 ) ; /* qhasm : X5 = t0 ^ 2 */
/* asm 1 : fe _ sq . fe _ sq ( > X5 = fe # 3 , < t0 = fe # 3 ) ; */
/* asm 2 : fe _ sq . fe _ sq ( > X5 = x3 , < t0 = x3 ) ; */
fe_sq . fe_sq ( x3 , x3 ) ; /* qhasm : t4 = BB + t3 */
/* asm 1 : fe _ add . fe _ add ( > t4 = fe # 5 , < BB = fe # 5 , < t3 = fe # 4 ) ; */
/* asm 2 : fe _ add . fe _ add ( > t4 = tmp0 , < BB = tmp0 , < t3 = z3 ) ; */
fe_add . fe_add ( tmp0 , tmp0 , z3 ) ; /* qhasm : Z5 = X1 * t2 */
/* asm 1 : fe _ mul . fe _ mul ( > Z5 = fe # 4 , x1 , < t2 = fe # 2 ) ; */
/* asm 2 : fe _ mul . fe _ mul ( > Z5 = z3 , x1 , < t2 = z2 ) ; */
fe_mul . fe_mul ( z3 , x1 , z2 ) ; /* qhasm : Z4 = E * t4 */
/* asm 1 : fe _ mul . fe _ mul ( > Z4 = fe # 2 , < E = fe # 6 , < t4 = fe # 5 ) ; */
/* asm 2 : fe _ mul . fe _ mul ( > Z4 = z2 , < E = tmp1 , < t4 = tmp0 ) ; */
fe_mul . fe_mul ( z2 , tmp1 , tmp0 ) ; /* qhasm : return */
} fe_cswap . fe_cswap ( x2 , x3 , swap ) ; fe_cswap . fe_cswap ( z2 , z3 , swap ) ; fe_invert . fe_invert ( z2 , z2 ) ; fe_mul . fe_mul ( x2 , x2 , z2 ) ; fe_tobytes . fe_tobytes ( q , x2 ) ; return 0 ;
|
public class AbstractBlock { /** * Get the position of the provided block in the provided list of blocks .
* Can ' t use { @ link List # indexOf ( Object ) } since it ' s using { @ link Object # equals ( Object ) } internally which is not
* what we want since two WordBlock with the same text or two spaces are equals for example but we want to be able
* to target one specific Block .
* @ param block the block for which to find the position
* @ param blocks the list of blocks in which to look for the passed block
* @ return the position of the block , - 1 if the block can ' t be found */
private static int indexOfBlock ( Block block , List < Block > blocks ) { } }
|
int position = 0 ; for ( Block child : blocks ) { if ( child == block ) { return position ; } ++ position ; } return - 1 ;
|
public class StringUtil { /** * Returns the index within this string of the last occurrence of the
* specified character , searching backward starting at the specified index .
* @ param pString The string to test
* @ param pChar The character to look for
* @ param pPos The last index to test
* @ return if the string argument occurs as a substring within this object ,
* then the index of the first character of the first such substring is
* returned ; if it does not occur as a substring , - 1 is returned .
* @ see String # lastIndexOf ( int , int ) */
public static int lastIndexOfIgnoreCase ( String pString , int pChar , int pPos ) { } }
|
if ( ( pString == null ) ) { return - 1 ; } // Get first char
char lower = Character . toLowerCase ( ( char ) pChar ) ; char upper = Character . toUpperCase ( ( char ) pChar ) ; int indexLower ; int indexUpper ; // Test for char
indexLower = pString . lastIndexOf ( lower , pPos ) ; indexUpper = pString . lastIndexOf ( upper , pPos ) ; if ( indexLower < 0 ) { /* if ( indexUpper < 0)
return - 1 ; / / First char not found
else */
return indexUpper ; // Only upper
} else if ( indexUpper < 0 ) { return indexLower ; // Only lower
} else { // Both found , select last occurence
return ( indexLower > indexUpper ) ? indexLower : indexUpper ; }
|
public class Parameters { /** * Convenience method to call { @ link # getExistingFile ( String ) } and then apply { @ link
* FileUtils # loadStringSet ( CharSource ) } on it , if the param is present . If the param is missing ,
* { @ link Optional # absent ( ) } is returned . */
public Optional < ImmutableSet < String > > getOptionalFileAsStringSet ( String param ) throws IOException { } }
|
if ( isPresent ( param ) ) { return Optional . of ( FileUtils . loadStringSet ( Files . asCharSource ( getExistingFile ( param ) , Charsets . UTF_8 ) ) ) ; } else { return Optional . absent ( ) ; }
|
public class StandardBullhornData { /** * Makes the call to the resume parser . If parse fails this method will retry RESUME _ PARSE _ RETRY number of times .
* @ param url
* @ param requestPayLoad
* @ param uriVariables
* @ return */
protected ParsedResume parseResume ( String url , Object requestPayLoad , Map < String , String > uriVariables ) { } }
|
ParsedResume response = null ; for ( int tryNumber = 1 ; tryNumber <= RESUME_PARSE_RETRY ; tryNumber ++ ) { try { response = this . performPostResumeRequest ( url , requestPayLoad , uriVariables ) ; break ; } catch ( HttpStatusCodeException error ) { response = handleResumeParseError ( tryNumber , error ) ; } catch ( Exception e ) { log . error ( "error" , e ) ; } } return response ;
|
public class Normalizer { /** * Concatenate normalized strings , making sure that the result is normalized
* as well .
* If both the left and the right strings are in
* the normalization form according to " mode " ,
* then the result will be
* < code >
* dest = normalize ( left + right , mode )
* < / code >
* With the input strings already being normalized ,
* this function will use next ( ) and previous ( )
* to find the adjacent end pieces of the input strings .
* Only the concatenation of these end pieces will be normalized and
* then concatenated with the remaining parts of the input strings .
* It is allowed to have dest = = left to avoid copying the entire left string .
* @ param left Left source array , may be same as dest .
* @ param leftStart start in the left array .
* @ param leftLimit limit in the left array ( = = length )
* @ param right Right source array .
* @ param rightStart start in the right array .
* @ param rightLimit limit in the right array ( = = length )
* @ param dest The output buffer ; can be null if destStart = = destLimit = = 0
* for pure preflighting .
* @ param destStart start in the destination array
* @ param destLimit limit in the destination array ( = = length )
* @ param mode The normalization mode .
* @ param options The normalization options , ORed together ( 0 for no options ) .
* @ return Length of output ( number of chars ) when successful or
* IndexOutOfBoundsException
* @ exception IndexOutOfBoundsException whose message has the string
* representation of destination capacity required .
* @ see # normalize
* @ see # next
* @ see # previous
* @ exception IndexOutOfBoundsException if target capacity is less than the
* required length
* @ deprecated ICU 56 Use { @ link Normalizer2 } instead .
* @ hide original deprecated declaration */
@ Deprecated public static int concatenate ( char [ ] left , int leftStart , int leftLimit , char [ ] right , int rightStart , int rightLimit , char [ ] dest , int destStart , int destLimit , Normalizer . Mode mode , int options ) { } }
|
if ( dest == null ) { throw new IllegalArgumentException ( ) ; } /* check for overlapping right and destination */
if ( right == dest && rightStart < destLimit && destStart < rightLimit ) { throw new IllegalArgumentException ( "overlapping right and dst ranges" ) ; } /* allow left = = dest */
StringBuilder destBuilder = new StringBuilder ( leftLimit - leftStart + rightLimit - rightStart + 16 ) ; destBuilder . append ( left , leftStart , leftLimit - leftStart ) ; CharBuffer rightBuffer = CharBuffer . wrap ( right , rightStart , rightLimit - rightStart ) ; mode . getNormalizer2 ( options ) . append ( destBuilder , rightBuffer ) ; int destLength = destBuilder . length ( ) ; if ( destLength <= ( destLimit - destStart ) ) { destBuilder . getChars ( 0 , destLength , dest , destStart ) ; return destLength ; } else { throw new IndexOutOfBoundsException ( Integer . toString ( destLength ) ) ; }
|
public class Cache { /** * 将一个或多个值 value 插入到列表 key 的表尾 ( 最右边 ) 。
* 如果有多个 value 值 , 那么各个 value 值按从左到右的顺序依次插入到表尾 : 比如
* 对一个空列表 mylist 执行 RPUSH mylist a b c , 得出的结果列表为 a b c ,
* 等同于执行命令 RPUSH mylist a 、 RPUSH mylist b 、 RPUSH mylist c 。
* 如果 key 不存在 , 一个空列表会被创建并执行 RPUSH 操作 。
* 当 key 存在但不是列表类型时 , 返回一个错误 。 */
public Long rpush ( Object key , Object ... values ) { } }
|
Jedis jedis = getJedis ( ) ; try { return jedis . rpush ( keyToBytes ( key ) , valuesToBytesArray ( values ) ) ; } finally { close ( jedis ) ; }
|
public class WebUtils { /** * Creates and returns TextView objects based on WebElements
* @ return an ArrayList with TextViews */
private ArrayList < TextView > createAndReturnTextViewsFromWebElements ( boolean javaScriptWasExecuted ) { } }
|
ArrayList < TextView > webElementsAsTextViews = new ArrayList < TextView > ( ) ; if ( javaScriptWasExecuted ) { for ( WebElement webElement : webElementCreator . getWebElementsFromWebViews ( ) ) { if ( isWebElementSufficientlyShown ( webElement ) ) { RobotiumTextView textView = new RobotiumTextView ( inst . getContext ( ) , webElement . getText ( ) , webElement . getLocationX ( ) , webElement . getLocationY ( ) ) ; webElementsAsTextViews . add ( textView ) ; } } } return webElementsAsTextViews ;
|
public class PropertyWrapperImpl { /** * { @ inheritDoc } */
public Class < ? > getType ( ) { } }
|
if ( setter != null ) { return setter . getParameterTypes ( ) [ 0 ] ; } if ( getter != null ) { return getter . getReturnType ( ) ; } if ( field != null ) { return field . getType ( ) ; } return null ;
|
public class DataStore { /** * Create a DataStore object from JSON .
* @ param json JSON of the DataStore
* @ return DataStore object corresponding to the given JSON
* @ throws ParseException If the JSON is invalid */
public static DataStore fromJson ( final JSONObject json ) throws ParseException { } }
|
DataStore ret ; JSONArray jsonCols = json . getJSONArray ( "fields" ) ; if ( ! "time" . equals ( jsonCols . get ( 0 ) ) ) { throw new JSONException ( "time must be the first item in 'fields'" ) ; } Set < String > cols = new HashSet < String > ( ) ; for ( int i = 1 ; i < jsonCols . length ( ) ; i ++ ) { cols . add ( jsonCols . getString ( i ) ) ; } ret = new DataStore ( cols ) ; JSONArray jsonData = json . getJSONArray ( "data" ) ; for ( int i = 0 ; i < jsonData . length ( ) ; i ++ ) { JSONArray row = jsonData . getJSONArray ( i ) ; Long ts = row . getLong ( 0 ) ; Map < String , Object > vals = new HashMap < String , Object > ( ) ; for ( int j = 1 ; j < row . length ( ) ; j ++ ) { vals . put ( jsonCols . getString ( j ) , row . get ( j ) ) ; } ret . rows . put ( ts , vals ) ; } return ret ;
|
public class SolrIndexer { /** * Index an object and all of its payloads
* @ param oid
* : The identifier of the object
* @ throws IndexerException
* if there were errors during indexing */
@ Override public void index ( String oid ) throws IndexerException { } }
|
try { DigitalObject object = storage . getObject ( oid ) ; // Some workflow actions create payloads , so we can ' t iterate
// directly against the object .
String [ ] oldManifest = { } ; oldManifest = object . getPayloadIdList ( ) . toArray ( oldManifest ) ; for ( String payloadId : oldManifest ) { Payload payload = object . getPayload ( payloadId ) ; if ( ! payload . getLabel ( ) . matches ( "version_tfpackage_.*" ) ) { index ( object , payload ) ; } } } catch ( StorageException ex ) { throw new IndexerException ( ex ) ; }
|
public class GrailsWebUtil { /** * Retrieves the URI from the request from either the include attribute or the request . getRequestURI ( ) method .
* @ param request The HttpServletRequest instance
* @ return The String URI */
public static String getUriFromRequest ( HttpServletRequest request ) { } }
|
Object includeUri = request . getAttribute ( "javax.servlet.include.request_uri" ) ; return includeUri == null ? request . getRequestURI ( ) : ( String ) includeUri ;
|
public class JCRCacheHandler { /** * { @ inheritDoc } */
@ Override protected boolean matchKey ( Serializable cacheKey ) { } }
|
if ( cacheKey instanceof String ) { try { // check is prefix equals to " repository : "
String prefix = jcrOrganizationServiceImpl . getWorkingRepository ( ) . getConfiguration ( ) . getName ( ) + DELIMITER ; return ( ( String ) cacheKey ) . startsWith ( prefix ) ; } catch ( RepositoryException e ) { throw new IllegalStateException ( e . getMessage ( ) , e ) ; } catch ( RepositoryConfigurationException e ) { throw new IllegalStateException ( e . getMessage ( ) , e ) ; } } return false ;
|
public class BsonGenerator { /** * Escapes the given string according to { @ link # _ characterEscapes } . If
* there are no character escapes returns the original string .
* @ param string the string to escape
* @ return the escaped string or the original one if there is nothing to escape
* @ throws IOException if an escape sequence could not be retrieved */
protected String escapeCharacters ( String string ) throws IOException { } }
|
if ( _characterEscapes == null ) { // escaping not necessary
return string ; } StringBuilder sb = null ; int lastEscapePos = 0 ; for ( int i = 0 ; i < string . length ( ) ; ++ i ) { int c = string . charAt ( i ) ; if ( c <= 0x7F && _outputEscapes [ c ] == CharacterEscapes . ESCAPE_CUSTOM ) { SerializableString escape = _characterEscapes . getEscapeSequence ( c ) ; if ( escape == null ) { _reportError ( "Invalid custom escape definitions; custom escape " + "not found for character code 0x" + Integer . toHexString ( c ) + ", although was supposed to have one" ) ; } if ( sb == null ) { sb = new StringBuilder ( ) ; } if ( i > lastEscapePos ) { sb . append ( string , lastEscapePos , i ) ; } lastEscapePos = i + 1 ; sb . append ( escape . getValue ( ) ) ; } } if ( sb != null && lastEscapePos < string . length ( ) ) { sb . append ( string , lastEscapePos , string . length ( ) ) ; } if ( sb == null ) { return string ; } return sb . toString ( ) ;
|
public class FTPFileSystem { /** * Convenience method , so that we don ' t open a new connection when using this
* method from within another method . Otherwise every API invocation incurs
* the overhead of opening / closing a TCP connection . */
private boolean exists ( FTPClient client , Path file ) { } }
|
try { return getFileStatus ( client , file ) != null ; } catch ( FileNotFoundException fnfe ) { return false ; } catch ( IOException ioe ) { throw new FTPException ( "Failed to get file status" , ioe ) ; }
|
public class CmsUserDriver { /** * Returns a sql query to select groups . < p >
* @ param mainQuery the main select sql query
* @ param includeSubOus if groups in sub - ous should be included in the selection
* @ param readRoles if groups or roles whould be selected
* @ return a sql query to select groups */
protected String createRoleQuery ( String mainQuery , boolean includeSubOus , boolean readRoles ) { } }
|
String sqlQuery = m_sqlManager . readQuery ( mainQuery ) ; sqlQuery += " " ; if ( includeSubOus ) { sqlQuery += m_sqlManager . readQuery ( "C_GROUPS_GROUP_OU_LIKE_1" ) ; } else { sqlQuery += m_sqlManager . readQuery ( "C_GROUPS_GROUP_OU_EQUALS_1" ) ; } sqlQuery += AND_CONDITION ; if ( readRoles ) { sqlQuery += m_sqlManager . readQuery ( "C_GROUPS_SELECT_ROLES_1" ) ; } else { sqlQuery += m_sqlManager . readQuery ( "C_GROUPS_SELECT_GROUPS_1" ) ; } sqlQuery += " " ; sqlQuery += m_sqlManager . readQuery ( "C_GROUPS_ORDER_0" ) ; return sqlQuery ;
|
public class GetUserAttributeVerificationCodeRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetUserAttributeVerificationCodeRequest getUserAttributeVerificationCodeRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( getUserAttributeVerificationCodeRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getUserAttributeVerificationCodeRequest . getAccessToken ( ) , ACCESSTOKEN_BINDING ) ; protocolMarshaller . marshall ( getUserAttributeVerificationCodeRequest . getAttributeName ( ) , ATTRIBUTENAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class SequenceLabelerME { /** * sets the probs for the spans
* @ param spans
* @ return */
private Span [ ] setProbs ( final Span [ ] spans ) { } }
|
final double [ ] probs = probs ( spans ) ; if ( probs != null ) { for ( int i = 0 ; i < probs . length ; i ++ ) { final double prob = probs [ i ] ; spans [ i ] = new Span ( spans [ i ] , prob ) ; } } return spans ;
|
public class CombinedRuleUserList { /** * Gets the leftOperand value for this CombinedRuleUserList .
* @ return leftOperand * < span class = " constraint Required " > This field is required and
* should not be { @ code null } when it is contained within { @ link Operator } s
* : ADD . < / span > */
public com . google . api . ads . adwords . axis . v201809 . rm . Rule getLeftOperand ( ) { } }
|
return leftOperand ;
|
public class CommandLineParser { /** * unparseTokens .
* @ param args a { @ link java . util . List } object .
* @ param out a { @ link java . lang . StringBuilder } object . */
static public void unparseTokens ( final List < ICmdLineArg < ? > > args , final StringBuilder out ) { } }
|
final Iterator < ICmdLineArg < ? > > aIter = args . iterator ( ) ; boolean first = true ; while ( aIter . hasNext ( ) ) { final ICmdLineArg < ? > arg = aIter . next ( ) ; if ( arg . isParsed ( ) ) { if ( ! first ) out . append ( " " ) ; first = false ; arg . exportCommandLine ( out ) ; } }
|
public class BaseReportGenerator { /** * This method will be called when exceptions are thrown in
* { @ link # createReportBody ( com . itextpdf . text . Document , com . vectorprint . report . data . ReportDataHolder ) } or
* { @ link DebugHelper # appendDebugInfo ( com . itextpdf . text . pdf . PdfWriter , com . itextpdf . text . Document , com . vectorprint . configuration . EnhancedMap , com . vectorprint . report . itext . style . StylerFactory ) } ,
* it will rethrow the exception by default . If you provide a property { @ link ReportConstants # STOPONERROR } with a
* value of false , the stacktrace will be appended to the pdf and the document and writer will be closed .
* @ param ex
* @ param output the pdf document output stream
* @ return 1 */
protected int handleException ( Exception ex , OutputStream output ) throws VectorPrintRuntimeException { } }
|
if ( getSettings ( ) . getBooleanProperty ( Boolean . TRUE , ReportConstants . STOPONERROR ) ) { throw ( ex instanceof VectorPrintRuntimeException ) ? ( VectorPrintRuntimeException ) ex : new VectorPrintRuntimeException ( "failed to generate the report: " + ex . getMessage ( ) , ex ) ; } else { PrintStream out ; ByteArrayOutputStream bo = new ByteArrayOutputStream ( ) ; out = new PrintStream ( bo ) ; ex . printStackTrace ( out ) ; out . close ( ) ; try { Font f = FontFactory . getFont ( FontFactory . COURIER , 8 ) ; f . setColor ( itextHelper . fromColor ( getSettings ( ) . getColorProperty ( Color . MAGENTA , "debugcolor" ) ) ) ; String s = getSettings ( ) . getProperty ( bo . toString ( ) , "renderfault" ) ; eventHelper . setLastPage ( writer . getCurrentPageNumber ( ) ) ; document . setPageSize ( new Rectangle ( ItextHelper . mmToPts ( 297 ) , ItextHelper . mmToPts ( 210 ) ) ) ; document . setMargins ( 5 , 5 , 5 , 5 ) ; document . newPage ( ) ; eventHelper . setFailuresHereAfter ( true ) ; document . add ( new Chunk ( "Below you find information that help solving the problems in this report." , f ) . setLocalDestination ( FAILUREPAGE ) ) ; newLine ( ) ; document . add ( new Paragraph ( new Chunk ( s , f ) ) ) ; document . newPage ( ) ; DebugHelper . appendDebugInfo ( writer , document , settings , stylerFactory ) ; } catch ( VectorPrintException | DocumentException e ) { log . severe ( "Could not append to PDF:\n" + bo . toString ( ) ) ; log . log ( java . util . logging . Level . SEVERE , null , e ) ; } finally { document . close ( ) ; writer . close ( ) ; } } return ERRORINREPORT ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.