signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ExamplePickerTree { /** * Add a set of examples to the WTree . * @ param groupName The name of the example group . * @ param entries An array of examples in this group . */ public void addExamples ( final String groupName , final ExampleData [ ] entries ) { } }
data . add ( new ExampleMenuList ( groupName , entries ) ) ;
public class GameLoginSignUtil { /** * 验签 * @ param appId 应用程序id , 来源于华为开发者联盟上参数 * @ param cpId 开发者id , 来源于开发者联盟 * @ param privateKey 私钥 , 来源于开发者联盟 , 非常重要的信息 , 注意保存避免泄露 * @ param publicKey 公钥 , 用来验签服务端返回的结果 * @ param userData 用户的登录结果数据 * @ param callback 验签结果回调 */ private static void doCheck ( String appId , String cpId , String privateKey , String publicKey , GameUserData userData , ICheckLoginSignHandler callback ) { } }
// 创建body参数 byte [ ] body = getRequestBody ( appId , cpId , privateKey , userData ) ; // 发送请求 sendRequest ( URL , body , publicKey , callback ) ;
public class WDataTableRenderer { /** * Paints the column headings for the given table . * @ param table the table to paint the headings for . * @ param renderContext the RenderContext to paint to . */ private void paintColumnHeadings ( final WDataTable table , final WebXmlRenderContext renderContext ) { } }
XmlStringBuilder xml = renderContext . getWriter ( ) ; int [ ] columnOrder = table . getColumnOrder ( ) ; TableDataModel model = table . getDataModel ( ) ; final int columnCount = table . getColumnCount ( ) ; xml . appendTagOpen ( "ui:thead" ) ; xml . appendOptionalAttribute ( "hidden" , ! table . isShowColumnHeaders ( ) , "true" ) ; xml . appendClose ( ) ; if ( table . isShowRowHeaders ( ) ) { paintColumnHeading ( table . getRowHeaderColumn ( ) , false , renderContext ) ; } for ( int i = 0 ; i < columnCount ; i ++ ) { int colIndex = columnOrder == null ? i : columnOrder [ i ] ; WTableColumn col = table . getColumn ( colIndex ) ; if ( col . isVisible ( ) ) { boolean sortable = model . isSortable ( colIndex ) ; paintColumnHeading ( col , sortable , renderContext ) ; } } xml . appendEndTag ( "ui:thead" ) ;
public class LocalSocketUtil { /** * Check whether we can connect to a local Unix socket */ public static boolean canConnectUnixSocket ( File path ) { } }
try ( UnixSocketChannel channel = UnixSocketChannel . open ( ) ) { return channel . connect ( new UnixSocketAddress ( path ) ) ; } catch ( IOException e ) { return false ; }
public class CudaZeroHandler { /** * This method gets called from Allocator , during Allocator / MemoryHandler initialization * @ param configuration * @ param allocator */ @ Override public void init ( @ NonNull Configuration configuration , @ NonNull Allocator allocator ) { } }
this . configuration = configuration ; this . deviceMemoryTracker = new DeviceAllocationsTracker ( this . configuration ) ; this . flowController . init ( allocator ) ;
public class HttpResponseMessageImpl { /** * Set the " ownership " of this message to either a new servicecontext , i . e . * a message is being transfered from an inbound SC to and outbound SC , or * a channel wants to cache this message and would pass in null to this * method . * @ param hsc */ public void setOwner ( HttpServiceContext hsc ) { } }
// if we have an SC owner , set it ' s flag to false if ( null != getServiceContext ( ) ) { getServiceContext ( ) . setResponseOwner ( false ) ; } // if the owner is a new HSC , then init the flags if ( null != hsc ) { super . init ( hsc ) ; getServiceContext ( ) . setResponseOwner ( true ) ; setIncoming ( ! getServiceContext ( ) . isInboundConnection ( ) ) ; if ( ! isIncoming ( ) ) { // outgoing responses need header validation checks . . if config // hasn ' t disabled them setHeaderValidation ( getServiceContext ( ) . getHttpConfig ( ) . isHeaderValidationEnabled ( ) ) ; } }
public class CommerceShipmentItemUtil { /** * Returns the first commerce shipment item in the ordered set where groupId = & # 63 ; . * @ param groupId the group ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching commerce shipment item , or < code > null < / code > if a matching commerce shipment item could not be found */ public static CommerceShipmentItem fetchByGroupId_First ( long groupId , OrderByComparator < CommerceShipmentItem > orderByComparator ) { } }
return getPersistence ( ) . fetchByGroupId_First ( groupId , orderByComparator ) ;
public class CLI { /** * Connect to the server using a specified host and port . * @ param protocol The protocol * @ param controllerHost The host name . * @ param controllerPort The port . * @ param username The user name for logging in . * @ param password The password for logging in . */ public void connect ( String protocol , String controllerHost , int controllerPort , String username , char [ ] password , String clientBindAddress ) { } }
doConnect ( ( ) -> { return CommandContextFactory . getInstance ( ) . newCommandContext ( new CommandContextConfiguration . Builder ( ) . setController ( constructUri ( protocol , controllerHost , controllerPort ) ) . setUsername ( username ) . setPassword ( password ) . setErrorOnInteract ( false ) . setClientBindAddress ( clientBindAddress ) . build ( ) ) ; } ) ;
public class Iteration { /** * Return the current { @ link Iteration } payload variable name . * @ throws IllegalStateException if there is more than one variable in the { @ link Variables } stack , and the payload name cannot be determined . */ public static String getPayloadVariableName ( GraphRewrite event , EvaluationContext ctx ) throws IllegalStateException { } }
Variables variables = Variables . instance ( event ) ; Map < String , Iterable < ? extends WindupVertexFrame > > topLayer = variables . peek ( ) ; if ( topLayer . keySet ( ) . size ( ) != 1 ) { throw new IllegalStateException ( "Cannot determine Iteration payload variable name because the top " + "layer of " + Variables . class . getSimpleName ( ) + " stack contains " + topLayer . keySet ( ) . size ( ) + " variables: " + topLayer . keySet ( ) ) ; } String name = topLayer . keySet ( ) . iterator ( ) . next ( ) ; return name ;
public class JsonResponse { /** * Switch standard JSON engine to your new - ruled JSON engine . < br > * You can use this when the JSON response of different rule exists . < br > * The specified engine instance should be cached in your application . * @ param noArgLambda The callback for engine of JSON , which is used for JSON response writing . ( NotNull ) * @ return this . ( NotNull ) */ public JsonResponse < RESULT > switchJsonEngine ( Supplier < RealJsonEngine > noArgLambda ) { } }
assertArgumentNotNull ( "noArgLambda" , noArgLambda ) ; jsonEngineSwitcher = noArgLambda ; return this ;
public class Crawler { /** * Try to fire a given event on the Browser . * @ param eventable the eventable to fire * @ return true iff the event is fired */ private boolean fireEvent ( Eventable eventable ) { } }
Eventable eventToFire = eventable ; if ( eventable . getIdentification ( ) . getHow ( ) . toString ( ) . equals ( "xpath" ) && eventable . getRelatedFrame ( ) . equals ( "" ) ) { eventToFire = resolveByXpath ( eventable , eventToFire ) ; } boolean isFired = false ; try { isFired = browser . fireEventAndWait ( eventToFire ) ; } catch ( ElementNotVisibleException | NoSuchElementException e ) { if ( crawlRules . isCrawlHiddenAnchors ( ) && eventToFire . getElement ( ) != null && "A" . equals ( eventToFire . getElement ( ) . getTag ( ) ) ) { isFired = visitAnchorHrefIfPossible ( eventToFire ) ; } else { LOG . debug ( "Ignoring invisible element {}" , eventToFire . getElement ( ) ) ; } } catch ( InterruptedException e ) { LOG . debug ( "Interrupted during fire event" ) ; interruptThread ( ) ; return false ; } LOG . debug ( "Event fired={} for eventable {}" , isFired , eventable ) ; if ( isFired ) { // Let the controller execute its specified wait operation on the browser thread safe . waitConditionChecker . wait ( browser ) ; browser . closeOtherWindows ( ) ; return true ; } else { /* * Execute the OnFireEventFailedPlugins with the current crawlPath with the crawlPath * removed 1 state to represent the path TO here . */ plugins . runOnFireEventFailedPlugins ( context , eventable , crawlpath . immutableCopyWithoutLast ( ) ) ; return false ; // no event fired }
public class ResourceLoader { /** * Get the contents of a URL as an XML Document * @ param requestingClass the java . lang . Class object of the class that is attempting to load the * resource * @ param resource a String describing the full or partial URL of the resource whose contents to * load * @ param validate boolean . True if the document builder factory should validate , false * otherwise . * @ return the actual contents of the resource as an XML Document * @ throws ResourceMissingException * @ throws java . io . IOException * @ throws javax . xml . parsers . ParserConfigurationException * @ throws org . xml . sax . SAXException */ public static Document getResourceAsDocument ( Class < ? > requestingClass , String resource , boolean validate ) throws ResourceMissingException , IOException , ParserConfigurationException , SAXException { } }
Document document = null ; InputStream inputStream = null ; try { DocumentBuilderFactory factoryToUse = null ; if ( validate ) { factoryToUse = ResourceLoader . validatingDocumentBuilderFactory ; } else { factoryToUse = ResourceLoader . nonValidatingDocumentBuilderFactory ; } inputStream = getResourceAsStream ( requestingClass , resource ) ; DocumentBuilder db = factoryToUse . newDocumentBuilder ( ) ; db . setEntityResolver ( new DTDResolver ( ) ) ; db . setErrorHandler ( new SAXErrorHandler ( "ResourceLoader.getResourceAsDocument(" + resource + ")" ) ) ; document = db . parse ( inputStream ) ; } finally { if ( inputStream != null ) inputStream . close ( ) ; } return document ;
public class XsdAsmUtils { /** * Writes a given class to a . class file . * @ param className The class name , needed to name the file . * @ param classWriter The classWriter , which contains all the class information . */ static void writeClassToFile ( String className , ClassWriter classWriter , String apiName ) { } }
classWriter . visitEnd ( ) ; byte [ ] constructedClass = classWriter . toByteArray ( ) ; try ( FileOutputStream os = new FileOutputStream ( new File ( getFinalPathPart ( className , apiName ) ) ) ) { os . write ( constructedClass ) ; } catch ( IOException e ) { throw new AsmException ( "Exception while writing generated classes to the .class files." , e ) ; }
public class MapBasedXPathFunctionResolver { /** * Add a new function . * @ param aName * The qualified name of the function * @ param nArity * The number of parameters of the function * @ param aFunction * The function to be used . May not be < code > null < / code > . * @ return { @ link EChange } */ @ Nonnull public EChange addUniqueFunction ( @ Nonnull final QName aName , @ Nonnegative final int nArity , @ Nonnull final XPathFunction aFunction ) { } }
ValueEnforcer . notNull ( aFunction , "Function" ) ; final XPathFunctionKey aFunctionKey = new XPathFunctionKey ( aName , nArity ) ; if ( m_aMap . containsKey ( aFunctionKey ) ) return EChange . UNCHANGED ; m_aMap . put ( aFunctionKey , aFunction ) ; return EChange . CHANGED ;
public class ThriftCLIService { /** * If the proxy user name is provided then check privileges to substitute the user . * @ param realUser * @ param sessionConf * @ param ipAddress * @ return * @ throws HiveSQLException */ private String getProxyUser ( String realUser , Map < String , String > sessionConf , String ipAddress ) throws HiveSQLException { } }
String proxyUser = null ; // Http transport mode . // We set the thread local proxy username , in ThriftHttpServlet . if ( cliService . getHiveConf ( ) . getVar ( ConfVars . HIVE_SERVER2_TRANSPORT_MODE ) . equalsIgnoreCase ( "http" ) ) { proxyUser = SessionManager . getProxyUserName ( ) ; LOG . debug ( "Proxy user from query string: " + proxyUser ) ; } if ( proxyUser == null && sessionConf != null && sessionConf . containsKey ( HiveAuthFactory . HS2_PROXY_USER ) ) { String proxyUserFromThriftBody = sessionConf . get ( HiveAuthFactory . HS2_PROXY_USER ) ; LOG . debug ( "Proxy user from thrift body: " + proxyUserFromThriftBody ) ; proxyUser = proxyUserFromThriftBody ; } if ( proxyUser == null ) { return realUser ; } // check whether substitution is allowed if ( ! hiveConf . getBoolVar ( HiveConf . ConfVars . HIVE_SERVER2_ALLOW_USER_SUBSTITUTION ) ) { throw new HiveSQLException ( "Proxy user substitution is not allowed" ) ; } // If there ' s no authentication , then directly substitute the user if ( HiveAuthFactory . AuthTypes . NONE . toString ( ) . equalsIgnoreCase ( hiveConf . getVar ( ConfVars . HIVE_SERVER2_AUTHENTICATION ) ) ) { return proxyUser ; } // Verify proxy user privilege of the realUser for the proxyUser HiveAuthFactory . verifyProxyAccess ( realUser , proxyUser , ipAddress , hiveConf ) ; LOG . debug ( "Verified proxy user: " + proxyUser ) ; return proxyUser ;
public class Preconditions { /** * Ensures that a string reference passed as a parameter to the calling method is not null . * nor empty . * @ param reference a string reference * @ param errorMessage the exception message to use if the check fails ; will be converted to a * string using { @ link String # valueOf ( Object ) } * @ return the non - null reference that was validated * @ throws NullPointerException if { @ code reference } is null * @ throws IllegalArgumentException if { @ code reference } is empty */ public static String checkNotNullNorEmpty ( final String reference , final Object errorMessage ) { } }
if ( reference == null ) { throw new NullPointerException ( String . valueOf ( errorMessage ) ) ; } if ( reference . isEmpty ( ) ) { throw new IllegalArgumentException ( String . valueOf ( errorMessage ) ) ; } return reference ;
public class SHA1 { /** * 用SHA1算法生成安全签名 * @ param array 字符串 * @ return 安全签名 * @ throws AesException */ public static String getSHA1 ( String ... array ) throws AesException { } }
StringBuffer sb = new StringBuffer ( ) ; // 字符串排序 Arrays . sort ( array ) ; for ( String item : array ) { sb . append ( item ) ; } return getSHA1 ( sb . toString ( ) ) ;
public class ConnectionDAODefaultImpl { private long doPing ( final Connection connection ) throws DevFailed { } }
checkIfTango ( connection , "ping" ) ; build_connection ( connection ) ; final long t0 = System . currentTimeMillis ( ) ; try { connection . device . ping ( ) ; } catch ( final DevFailed e ) { final String reason = "TangoApi_CANNOT_PING_DEVICE" ; final String desc = "Cannot ping " + connection . devname ; final String origin = "Connection.ping()" ; Except . throw_connection_failed ( e , reason , desc , origin ) ; } catch ( final Exception e ) { ApiUtilDAODefaultImpl . removePendingRepliesOfDevice ( connection ) ; throw_dev_failed ( connection , e , "ping" , false ) ; } final long t1 = System . currentTimeMillis ( ) ; return ( int ) ( t1 - t0 ) * 1000 ; // Set as micro seconds
public class FTPClient { /** * Changes the default client timeout parameters . * In the beginning of the transfer , the critical moment is the wait * for the initial server reply . If it does not arrive after timeout , * client assumes that the transfer could not start for some reason and * aborts the operation . Default timeout in miliseconds * is Session . DEFAULT _ MAX _ WAIT . During the waiting period , * client polls the control channel once a certain period , which is by * default set to Session . DEFAULT _ WAIT _ DELAY . * < br > * Use this method to change these parameters . * @ param maxWait timeout in miliseconds * @ param waitDelay polling period */ public void setClientWaitParams ( int maxWait , int waitDelay ) { } }
if ( maxWait <= 0 || waitDelay <= 0 ) { throw new IllegalArgumentException ( "Parameter is less than 0" ) ; } this . session . maxWait = maxWait ; this . session . waitDelay = waitDelay ;
public class CommerceTierPriceEntryPersistenceImpl { /** * Returns the last commerce tier price entry in the ordered set where uuid = & # 63 ; . * @ param uuid the uuid * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce tier price entry * @ throws NoSuchTierPriceEntryException if a matching commerce tier price entry could not be found */ @ Override public CommerceTierPriceEntry findByUuid_Last ( String uuid , OrderByComparator < CommerceTierPriceEntry > orderByComparator ) throws NoSuchTierPriceEntryException { } }
CommerceTierPriceEntry commerceTierPriceEntry = fetchByUuid_Last ( uuid , orderByComparator ) ; if ( commerceTierPriceEntry != null ) { return commerceTierPriceEntry ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "uuid=" ) ; msg . append ( uuid ) ; msg . append ( "}" ) ; throw new NoSuchTierPriceEntryException ( msg . toString ( ) ) ;
public class ApiOvhIp { /** * Alter this object properties * REST : PUT / ip / { ip } / mitigation / { ipOnMitigation } * @ param body [ required ] New object properties * @ param ip [ required ] * @ param ipOnMitigation [ required ] */ public void ip_mitigation_ipOnMitigation_PUT ( String ip , String ipOnMitigation , OvhMitigationIp body ) throws IOException { } }
String qPath = "/ip/{ip}/mitigation/{ipOnMitigation}" ; StringBuilder sb = path ( qPath , ip , ipOnMitigation ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ;
public class CellConstraints { /** * Sets the column , row , width , and height ; uses a height ( row span ) of 1 and the horizontal and * vertical default alignments . < p > * < strong > Examples : < / strong > < pre > * cc . xyw ( 1 , 3 , 7 ) ; * cc . xyw ( 1 , 3 , 2 ) ; * < / pre > * @ param col the new column index * @ param row the new row index * @ param colSpan the column span or grid width * @ return this */ public CellConstraints xyw ( int col , int row , int colSpan ) { } }
return xywh ( col , row , colSpan , 1 , DEFAULT , DEFAULT ) ;
public class SwimMembershipProtocol { /** * Gossips this node ' s pending updates with a random set of peers . * @ param updates a collection of updated to gossip */ private void gossip ( Collection < ImmutableMember > updates ) { } }
// Get a list of available peers . If peers are available , randomize the peer list and select a subset of // peers with which to gossip updates . List < SwimMember > members = Lists . newArrayList ( randomMembers ) ; if ( ! members . isEmpty ( ) ) { Collections . shuffle ( members ) ; for ( int i = 0 ; i < Math . min ( members . size ( ) , config . getGossipFanout ( ) ) ; i ++ ) { gossip ( members . get ( i ) , updates ) ; } }
public class CmsResourceUtil { /** * Returns the id of the project in which the given resource is locked . < p > * @ return the id of the project in which the given resource is locked */ public CmsUUID getLockedInProjectId ( ) { } }
CmsUUID lockedInProject = null ; if ( getLock ( ) . isNullLock ( ) && ! getResource ( ) . getState ( ) . isUnchanged ( ) ) { // resource is unlocked and modified lockedInProject = getResource ( ) . getProjectLastModified ( ) ; } else if ( ! getResource ( ) . getState ( ) . isUnchanged ( ) ) { // resource is locked and modified lockedInProject = getProjectId ( ) ; } else if ( ! getLock ( ) . isNullLock ( ) ) { // resource is locked and unchanged lockedInProject = getLock ( ) . getProjectId ( ) ; } return lockedInProject ;
public class ServerImpl { /** * Send the id and the name to the client . * @ param client The client to send to . * @ param id The id to send . * @ param name The name to send . * @ throws IOException In case of error . */ private static void writeIdAndName ( ClientSocket client , int id , String name ) throws IOException { } }
// New client id client . getOut ( ) . writeByte ( id ) ; // New client name final byte [ ] data = name . getBytes ( NetworkMessage . CHARSET ) ; client . getOut ( ) . writeByte ( data . length ) ; client . getOut ( ) . write ( data ) ;
public class Retryer { /** * Sets the wait strategy which sleeps for an exponential amount of time after the first failed attempt * @ see Retryer # exponentialWait ( long , long ) * @ param multiplier * @ param maximumTime * @ param maximumUnit * @ return */ public Retryer < R > exponentialWait ( long multiplier , long maximumTime , TimeUnit maximumUnit ) { } }
return exponentialWait ( multiplier , checkNotNull ( maximumUnit ) . toMillis ( maximumTime ) ) ;
public class CommerceRegionLocalServiceBaseImpl { /** * Returns a range of all the commerce regions . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link com . liferay . commerce . model . impl . CommerceRegionModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param start the lower bound of the range of commerce regions * @ param end the upper bound of the range of commerce regions ( not inclusive ) * @ return the range of commerce regions */ @ Override public List < CommerceRegion > getCommerceRegions ( int start , int end ) { } }
return commerceRegionPersistence . findAll ( start , end ) ;
public class Client { /** * Revokes an access token and refresh token pair . * @ throws OAuthSystemException - if there is a IOException reading parameters of the httpURLConnection * @ throws OAuthProblemException - if there are errors validating the OneloginOAuthJSONAccessTokenResponse and throwOAuthProblemException is enabled * @ see < a target = " _ blank " href = " https : / / developers . onelogin . com / api - docs / 1 / oauth20 - tokens / revoke - tokens " > Revoke Tokens documentation < / a > */ public void revokeToken ( ) throws OAuthSystemException , OAuthProblemException { } }
cleanError ( ) ; if ( accessToken == null ) { throw new OAuthRuntimeException ( "Access token not provided" ) ; } OneloginURLConnectionClient httpClient = new OneloginURLConnectionClient ( ) ; OAuthClientRequest request = OAuthClientRequest . tokenLocation ( settings . getURL ( Constants . TOKEN_REVOKE_URL ) ) . buildBodyMessage ( ) ; Map < String , String > headers = getAuthorizedHeader ( false ) ; Map < String , Object > params = new HashMap < String , Object > ( ) ; params . put ( "access_token" , accessToken ) ; String body = JSONUtils . buildJSON ( params ) ; request . setBody ( body ) ; OneloginOAuthJSONAccessTokenResponse oAuthResponse = ( OneloginOAuthJSONAccessTokenResponse ) httpClient . execute ( request , headers , OAuth . HttpMethod . POST , OneloginOAuthJSONAccessTokenResponse . class ) ; if ( oAuthResponse . getResponseCode ( ) == 200 ) { accessToken = null ; refreshToken = null ; expiration = null ; } else { error = oAuthResponse . getError ( ) ; errorDescription = oAuthResponse . getErrorDescription ( ) ; }
public class Config { /** * Determines if the given propery is in the map * @ param property The property to search for * @ return True if the property exists and has a value , not an empty string */ public final boolean hasProperty ( final String property ) { } }
final String val = properties . get ( property ) ; if ( val == null ) return false ; if ( val . isEmpty ( ) ) return false ; return true ;
public class BatchScheduleActionDeleteRequest { /** * A list of schedule actions to delete . * @ param actionNames * A list of schedule actions to delete . */ public void setActionNames ( java . util . Collection < String > actionNames ) { } }
if ( actionNames == null ) { this . actionNames = null ; return ; } this . actionNames = new java . util . ArrayList < String > ( actionNames ) ;
public class InternalSARLLexer { /** * $ ANTLR start " T _ _ 130" */ public final void mT__130 ( ) throws RecognitionException { } }
try { int _type = T__130 ; int _channel = DEFAULT_TOKEN_CHANNEL ; // InternalSARL . g : 116:8 : ( ' while ' ) // InternalSARL . g : 116:10 : ' while ' { match ( "while" ) ; } state . type = _type ; state . channel = _channel ; } finally { }
public class StorableIndex { /** * Returns a StorableIndex instance which is unique or not . */ public StorableIndex < S > unique ( boolean unique ) { } }
if ( unique == mUnique ) { return this ; } return new StorableIndex < S > ( mProperties , mDirections , unique , mClustered , false ) ;
public class ZoneRules { /** * Gets the standard offset for the specified instant in this zone . * This provides access to historic information on how the standard offset * has changed over time . * The standard offset is the offset before any daylight saving time is applied . * This is typically the offset applicable during winter . * @ param instant the instant to find the offset information for , not null , but null * may be ignored if the rules have a single offset for all instants * @ return the standard offset , not null */ public ZoneOffset getStandardOffset ( Instant instant ) { } }
if ( savingsInstantTransitions . length == 0 ) { return standardOffsets [ 0 ] ; } long epochSec = instant . getEpochSecond ( ) ; int index = Arrays . binarySearch ( standardTransitions , epochSec ) ; if ( index < 0 ) { // switch negative insert position to start of matched range index = - index - 2 ; } return standardOffsets [ index + 1 ] ;
public class GameContainer { /** * Get the build number of slick * @ return The build number of slick */ public static int getBuildVersion ( ) { } }
try { Properties props = new Properties ( ) ; props . load ( ResourceLoader . getResourceAsStream ( "version" ) ) ; int build = Integer . parseInt ( props . getProperty ( "build" ) ) ; Log . info ( "Slick Build #" + build ) ; return build ; } catch ( Exception e ) { Log . error ( "Unable to determine Slick build number" ) ; return - 1 ; }
public class LangType { /** * Sets the value of the provided property to the provided value . * @ param propName * allowed object is { @ link String } * @ param value * allowed object is { @ link Object } */ public void set ( String propName , Object value ) { } }
if ( propName . equals ( "value" ) ) { setValue ( ( ( String ) value ) ) ; } if ( propName . equals ( "lang" ) ) { setLang ( ( ( String ) value ) ) ; }
public class SourceSnippets { /** * [ START get _ source ] */ static Source getSource ( SourceName sourceName ) { } }
try ( SecurityCenterClient client = SecurityCenterClient . create ( ) ) { // Start setting up a request to get a source . // SourceName sourceName = SourceName . of ( / * organization = * / " 123234324 " , / * source = * / // "423432321 " ) ; GetSourceRequest . Builder request = GetSourceRequest . newBuilder ( ) . setName ( sourceName . toString ( ) ) ; // Call the API . Source response = client . getSource ( request . build ( ) ) ; System . out . println ( "Source: " + response ) ; return response ; } catch ( IOException e ) { throw new RuntimeException ( "Couldn't create client." , e ) ; }
public class VLDockingBeanPostProcessor { /** * { @ inheritDoc } * Replaces those view descriptors not implementing { @ link VLDockingViewDescriptor } . */ @ Override public Object postProcessAfterInitialization ( Object bean , String beanName ) { } }
// throws BeansException { if ( bean instanceof VLDockingViewDescriptor ) { return bean ; } else if ( bean instanceof ViewDescriptor ) { final ViewDescriptor sourceViewDescriptor = ( ViewDescriptor ) bean ; final ViewDescriptor targetViewDescriptor = this . getTemplate ( sourceViewDescriptor ) ; // Copy source state ObjectUtils . shallowCopy ( sourceViewDescriptor , targetViewDescriptor ) ; return targetViewDescriptor ; } return bean ;
public class CmsWorkplaceAppManager { /** * Returns the quick launch apps set for the current user . < p > * @ param cms the cms context * @ return the quick launch app configurations */ protected List < I_CmsWorkplaceAppConfiguration > getUserQuickLauchConfigurations ( CmsObject cms ) { } }
String apps_info = ( String ) cms . getRequestContext ( ) . getCurrentUser ( ) . getAdditionalInfo ( QUICK_LAUCH_APPS_KEY ) ; String [ ] appIds = null ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( apps_info ) ) { try { JSONArray ids = new JSONArray ( apps_info ) ; appIds = new String [ ids . length ( ) ] ; for ( int i = 0 ; i < appIds . length ; i ++ ) { appIds [ i ] = ids . getString ( i ) ; } } catch ( JSONException e ) { LOG . error ( "Error parsing user quick launch apps setting." , e ) ; appIds = null ; } } return getAppConfigurations ( appIds != null ? appIds : DEFAULT_USER_APPS ) ;
public class OrderUtils { /** * Return the order on the specified { @ code type } , or the specified * default value if none can be found . * < p > Take care of { @ link Order @ Order } and { @ code @ javax . annotation . Priority } . * @ param type the type to handle * @ return the priority value , or the specified default order if none can be found */ public static Integer getOrder ( Class < ? > type , Integer defaultOrder ) { } }
Order order = AnnotationUtils . findAnnotation ( type , Order . class ) ; if ( order != null ) { return order . value ( ) ; } Integer priorityOrder = getPriority ( type ) ; if ( priorityOrder != null ) { return priorityOrder ; } return defaultOrder ;
public class TableReader { /** * Read the table from an input stream and populate the supplied Table instance . * @ param is input stream from table file * @ param table Table instance */ private void read ( InputStream is , Table table ) throws IOException { } }
byte [ ] buffer = new byte [ m_definition . getPageSize ( ) ] ; while ( true ) { int bytesRead = is . read ( buffer ) ; if ( bytesRead == - 1 ) { break ; } if ( bytesRead != buffer . length ) { throw new IOException ( "Unexpected end of file" ) ; } readPage ( buffer , table ) ; }
public class PropertyUtil { /** * Returns the requested getter method from an object instance . * @ param methodName Name of the getter method . * @ param instance Object instance to search . * @ param valueClass The return value type ( null if don ' t care ) . * @ return The getter method . * @ throws NoSuchMethodException If method was not found . */ public static Method findGetter ( String methodName , Object instance , Class < ? > valueClass ) throws NoSuchMethodException { } }
return findMethod ( methodName , instance , valueClass , false ) ;
public class JsonReader { /** * Returns the { @ link com . google . gson . stream . JsonToken # STRING string } value of the next token , * consuming it . If the next token is a number , this method will return its * string form . * @ throws IllegalStateException if the next token is not a string or if * this reader is closed . */ public String nextString ( ) throws IOException { } }
int p = peeked ; if ( p == PEEKED_NONE ) { p = doPeek ( ) ; } String result ; if ( p == PEEKED_UNQUOTED ) { result = nextUnquotedValue ( ) ; } else if ( p == PEEKED_SINGLE_QUOTED ) { result = nextQuotedValue ( '\'' ) ; } else if ( p == PEEKED_DOUBLE_QUOTED ) { result = nextQuotedValue ( '"' ) ; } else if ( p == PEEKED_BUFFERED ) { result = peekedString ; peekedString = null ; } else if ( p == PEEKED_LONG ) { result = Long . toString ( peekedLong ) ; } else if ( p == PEEKED_NUMBER ) { result = new String ( buffer , pos , peekedNumberLength ) ; pos += peekedNumberLength ; } else { throw new IllegalStateException ( "Expected a string but was " + peek ( ) + locationString ( ) ) ; } peeked = PEEKED_NONE ; pathIndices [ stackSize - 1 ] ++ ; return result ;
public class ArrayUtil { /** * 去除 { @ code null } 或者 " " 元素 * @ param array 数组 * @ return 处理后的数组 * @ since 3.2.2 */ public static < T extends CharSequence > T [ ] removeEmpty ( T [ ] array ) { } }
return filter ( array , new Filter < T > ( ) { @ Override public boolean accept ( T t ) { return false == StrUtil . isEmpty ( t ) ; } } ) ;
public class CreateHITWithHITTypeRequest { /** * If the HITLayoutId is provided , any placeholder values must be filled in with values using the HITLayoutParameter * structure . For more information , see HITLayout . * @ param hITLayoutParameters * If the HITLayoutId is provided , any placeholder values must be filled in with values using the * HITLayoutParameter structure . For more information , see HITLayout . */ public void setHITLayoutParameters ( java . util . Collection < HITLayoutParameter > hITLayoutParameters ) { } }
if ( hITLayoutParameters == null ) { this . hITLayoutParameters = null ; return ; } this . hITLayoutParameters = new java . util . ArrayList < HITLayoutParameter > ( hITLayoutParameters ) ;
public class OjbTagsHandler { /** * Generates a torque schema for the model . * @ param attributes The attributes of the tag * @ return The property value * @ exception XDocletException If an error occurs * @ doc . tag type = " content " */ public String createTorqueSchema ( Properties attributes ) throws XDocletException { } }
String dbName = ( String ) getDocletContext ( ) . getConfigParam ( CONFIG_PARAM_DATABASENAME ) ; _torqueModel = new TorqueModelDef ( dbName , _model ) ; return "" ;
public class CarrierRefresher { /** * Helper method to fetch a config from a specific node of the cluster . * @ param bucketName the name of the bucket . * @ param hostname the hostname of the node to fetch from . * @ return a raw configuration or an error . */ private Observable < ProposedBucketConfigContext > refreshAgainstNode ( final String bucketName , final NetworkAddress hostname ) { } }
return Buffers . wrapColdWithAutoRelease ( Observable . defer ( new Func0 < Observable < GetBucketConfigResponse > > ( ) { @ Override public Observable < GetBucketConfigResponse > call ( ) { return cluster ( ) . send ( new GetBucketConfigRequest ( bucketName , hostname ) ) ; } } ) ) . doOnNext ( new Action1 < GetBucketConfigResponse > ( ) { @ Override public void call ( GetBucketConfigResponse response ) { if ( ! response . status ( ) . isSuccess ( ) ) { if ( response . content ( ) != null && response . content ( ) . refCnt ( ) > 0 ) { response . content ( ) . release ( ) ; } throw new ConfigurationException ( "Could not fetch config from node: " + response ) ; } } } ) . map ( new Func1 < GetBucketConfigResponse , ProposedBucketConfigContext > ( ) { @ Override public ProposedBucketConfigContext call ( GetBucketConfigResponse response ) { String raw = response . content ( ) . toString ( CharsetUtil . UTF_8 ) . trim ( ) ; if ( response . content ( ) . refCnt ( ) > 0 ) { response . content ( ) . release ( ) ; } raw = raw . replace ( "$HOST" , response . hostname ( ) . address ( ) ) ; return new ProposedBucketConfigContext ( bucketName , raw , hostname ) ; } } ) . doOnError ( new Action1 < Throwable > ( ) { @ Override public void call ( Throwable ex ) { LOGGER . debug ( "Could not fetch config from bucket \"" + bucketName + "\" against \"" + hostname + "\"." , ex ) ; } } ) ;
public class NetworkConfig { /** * Reconstructs an existing channel */ private Channel reconstructChannel ( HFClient client , String channelName , JsonObject jsonChannel , NetworkConfigAddPeerHandler networkConfigAddPeerHandler , NetworkConfigAddOrdererHandler networkConfigAddOrdererHandler ) throws NetworkConfigurationException { } }
Channel channel = null ; try { channel = client . newChannel ( channelName ) ; // orderers is an array of orderer name strings JsonArray ordererNames = getJsonValueAsArray ( jsonChannel . get ( "orderers" ) ) ; // out ( " Orderer names : " + ( ordererNames = = null ? " null " : ordererNames . toString ( ) ) ) ; if ( ordererNames != null ) { for ( JsonValue jsonVal : ordererNames ) { String ordererName = getJsonValueAsString ( jsonVal ) ; // Orderer orderer = getOrderer ( client , ordererName ) ; Node node = orderers . get ( ordererName ) ; if ( null == node ) { throw new NetworkConfigurationException ( format ( "Error constructing channel %s. Orderer %s not defined in configuration" , channelName , ordererName ) ) ; } logger . debug ( format ( "Channel %s, adding orderer %s, url: %s" , channel . getName ( ) , ordererName , node . url ) ) ; Properties nodeProps = node . properties ; if ( null != nodeProps ) { nodeProps = ( Properties ) nodeProps . clone ( ) ; } networkConfigAddOrdererHandler . addOrderer ( this , client , channel , ordererName , node . url , nodeProps , node . jsonObject ) ; } } // peers is an object containing a nested object for each peer JsonObject jsonPeers = getJsonObject ( jsonChannel , "peers" ) ; boolean foundPeer = false ; // out ( " Peers : " + ( peers = = null ? " null " : peers . toString ( ) ) ) ; if ( jsonPeers != null ) { for ( Entry < String , JsonValue > entry : jsonPeers . entrySet ( ) ) { String peerName = entry . getKey ( ) ; if ( logger . isTraceEnabled ( ) ) { logger . trace ( format ( "NetworkConfig.reconstructChannel: Processing peer %s" , peerName ) ) ; } JsonObject jsonPeer = getJsonValueAsObject ( entry . getValue ( ) ) ; if ( jsonPeer == null ) { throw new NetworkConfigurationException ( format ( "Error constructing channel %s. Invalid peer entry: %s" , channelName , peerName ) ) ; } Node node = peers . get ( peerName ) ; if ( node == null ) { throw new NetworkConfigurationException ( format ( "Error constructing channel %s. Peer %s not defined in configuration" , channelName , peerName ) ) ; } // Set the various roles PeerOptions peerOptions = PeerOptions . createPeerOptions ( ) ; for ( PeerRole peerRole : PeerRole . values ( ) ) { setPeerRole ( channelName , peerOptions , jsonPeer , peerRole ) ; } logger . debug ( format ( "Channel %s, adding peer %s, url: %s" , channel . getName ( ) , peerName , node . url ) ) ; Properties nodeProps = node . properties ; if ( null != nodeProps ) { nodeProps = ( Properties ) nodeProps . clone ( ) ; } networkConfigAddPeerHandler . addPeer ( this , client , channel , peerName , node . url , nodeProps , peerOptions , node . jsonObject ) ; foundPeer = true ; } } if ( ! foundPeer ) { // peers is a required field throw new NetworkConfigurationException ( format ( "Error constructing channel %s. At least one peer must be specified" , channelName ) ) ; } } catch ( InvalidArgumentException e ) { throw new IllegalArgumentException ( e ) ; } return channel ;
public class BlockdevRef { @ Nonnull public static BlockdevRef definition ( @ Nonnull BlockdevOptions definition ) { } }
BlockdevRef self = new BlockdevRef ( ) ; self . definition = definition ; return self ;
public class DataSet { /** * Output this data set as an angle code XML data file * @ param out The output stream to write to * @ param imageName The name of the image * @ throws IOException Indicates a failure to build the XML */ public void toAngelCodeXML ( PrintStream out , String imageName ) throws IOException { } }
try { DocumentBuilder builder = DocumentBuilderFactory . newInstance ( ) . newDocumentBuilder ( ) ; Document document = builder . newDocument ( ) ; Element root = document . createElement ( "font" ) ; document . appendChild ( root ) ; Element info = document . createElement ( "info" ) ; info . setAttribute ( "face" , fontName ) ; info . setAttribute ( "size" , "" + size ) ; info . setAttribute ( "bold" , "0" ) ; info . setAttribute ( "italic" , "0" ) ; info . setAttribute ( "charSet" , setName ) ; info . setAttribute ( "unicode" , "" + 0 ) ; info . setAttribute ( "stretchH" , "" + 100 ) ; info . setAttribute ( "smooth" , "" + 0 ) ; info . setAttribute ( "aa" , "" + 0 ) ; info . setAttribute ( "padding" , "0,0,0,0" ) ; info . setAttribute ( "spacing" , "0,0" ) ; root . appendChild ( info ) ; Element common = document . createElement ( "common" ) ; common . setAttribute ( "lineHeight" , "" + lineHeight ) ; common . setAttribute ( "base" , "0" ) ; common . setAttribute ( "scaleW" , "" + width ) ; common . setAttribute ( "scaleH" , "" + height ) ; common . setAttribute ( "pages" , "1" ) ; common . setAttribute ( "packed" , "0" ) ; root . appendChild ( common ) ; Element pages = document . createElement ( "pages" ) ; Element page = document . createElement ( "page" ) ; page . setAttribute ( "id" , "0" ) ; page . setAttribute ( "file" , imageName ) ; root . appendChild ( pages ) ; pages . appendChild ( page ) ; Element charsElement = document . createElement ( "chars" ) ; charsElement . setAttribute ( "count" , "" + chars . size ( ) ) ; root . appendChild ( charsElement ) ; for ( int i = 0 ; i < chars . size ( ) ; i ++ ) { CharData c = ( CharData ) chars . get ( i ) ; Element charElement = document . createElement ( "char" ) ; charElement . setAttribute ( "id" , "" + c . getID ( ) ) ; charElement . setAttribute ( "x" , "" + c . getX ( ) ) ; charElement . setAttribute ( "y" , "" + c . getY ( ) ) ; charElement . setAttribute ( "width" , "" + c . getWidth ( ) ) ; charElement . setAttribute ( "height" , "" + c . getHeight ( ) ) ; charElement . setAttribute ( "xoffset" , "0" ) ; charElement . setAttribute ( "yoffset" , "" + c . getYOffset ( ) ) ; charElement . setAttribute ( "xadvance" , "" + c . getXAdvance ( ) ) ; charElement . setAttribute ( "page" , "0" ) ; charElement . setAttribute ( "chnl" , "0" ) ; charsElement . appendChild ( charElement ) ; } Element kernsElement = document . createElement ( "kernings" ) ; kernsElement . setAttribute ( "count" , "" + kerning . size ( ) ) ; root . appendChild ( kernsElement ) ; for ( int i = 0 ; i < kerning . size ( ) ; i ++ ) { KerningData k = ( KerningData ) kerning . get ( i ) ; Element kernElement = document . createElement ( "kerning" ) ; kernElement . setAttribute ( "first" , "" + k . first ) ; kernElement . setAttribute ( "second" , "" + k . second ) ; kernElement . setAttribute ( "amount" , "" + k . offset ) ; kernsElement . appendChild ( kernElement ) ; } Result result = new StreamResult ( new OutputStreamWriter ( out , "utf-8" ) ) ; DOMSource source = new DOMSource ( document ) ; TransformerFactory factory = TransformerFactory . newInstance ( ) ; factory . setAttribute ( "indent-number" , new Integer ( 2 ) ) ; Transformer xformer = factory . newTransformer ( ) ; xformer . setOutputProperty ( OutputKeys . INDENT , "yes" ) ; xformer . transform ( source , result ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; IOException x = new IOException ( ) ; x . initCause ( e ) ; throw x ; }
public class Record { /** * Accessor method used to retrieve an Integer object representing the * contents of an individual field . If the field does not exist in the * record , null is returned . * @ param field the index number of the field to be retrieved * @ return the value of the required field */ public ProjectTimeFormat getTimeFormat ( int field ) { } }
ProjectTimeFormat result ; if ( ( field < m_fields . length ) && ( m_fields [ field ] . length ( ) != 0 ) ) { result = ProjectTimeFormat . getInstance ( Integer . parseInt ( m_fields [ field ] ) ) ; } else { result = ProjectTimeFormat . TWELVE_HOUR ; } return ( result ) ;
public class MemberFilterActionGroup { /** * Restores the state of the filter actions from a memento . * Note : This method does not refresh the viewer . * @ param memento the memento from which the state is restored */ public void restoreState ( IMemento memento ) { } }
setMemberFilters ( new int [ ] { FILTER_FIELDS , FILTER_STATIC , FILTER_NONPUBLIC , FILTER_LOCALTYPES } , new boolean [ ] { Boolean . valueOf ( memento . getString ( TAG_HIDEFIELDS ) ) . booleanValue ( ) , Boolean . valueOf ( memento . getString ( TAG_HIDESTATIC ) ) . booleanValue ( ) , Boolean . valueOf ( memento . getString ( TAG_HIDENONPUBLIC ) ) . booleanValue ( ) , Boolean . valueOf ( memento . getString ( TAG_HIDELOCALTYPES ) ) . booleanValue ( ) } , false ) ;
public class Utils { /** * Breaks up the string , if wider than " columns " characters . * @ param sthe string to process * @ param columnsthe width in columns * @ returnthe processed string */ public static String [ ] breakUp ( String s , int columns ) { } }
Vector < String > result ; String line ; BreakIterator boundary ; int boundaryStart ; int boundaryEnd ; String word ; String punctuation ; int i ; String [ ] lines ; result = new Vector < String > ( ) ; punctuation = " .,;:!?'\"" ; lines = s . split ( "\n" ) ; for ( i = 0 ; i < lines . length ; i ++ ) { boundary = BreakIterator . getWordInstance ( ) ; boundary . setText ( lines [ i ] ) ; boundaryStart = boundary . first ( ) ; boundaryEnd = boundary . next ( ) ; line = "" ; while ( boundaryEnd != BreakIterator . DONE ) { word = lines [ i ] . substring ( boundaryStart , boundaryEnd ) ; if ( line . length ( ) >= columns ) { if ( word . length ( ) == 1 ) { if ( punctuation . indexOf ( word . charAt ( 0 ) ) > - 1 ) { line += word ; word = "" ; } } result . add ( line ) ; line = "" ; } line += word ; boundaryStart = boundaryEnd ; boundaryEnd = boundary . next ( ) ; } if ( line . length ( ) > 0 ) result . add ( line ) ; } return result . toArray ( new String [ result . size ( ) ] ) ;
public class MFVec2f { /** * Write out the current value of this field into the external valueDestination array . * @ param valueDestination - where all the SFVec2f object values are returned * in a single - dimentional array */ public void getValue ( float [ ] valueDestination ) { } }
valueDestination = new float [ size ( ) * 2 ] ; for ( int i = 0 ; i < size ( ) ; i ++ ) { SFVec2f sfVec2f = value . get ( i ) ; valueDestination [ i * 3 ] = sfVec2f . x ; valueDestination [ i * 3 + 1 ] = sfVec2f . y ; }
public class AESUtil { /** * Generate key . * @ return the byte [ ] * @ throws GeneralSecurityException the general security exception */ public static byte [ ] generateKey ( ) throws GeneralSecurityException { } }
KeyGenerator keyGenerator = KeyGenerator . getInstance ( KEY_ALGORITHM ) ; // java max 128 keyGenerator . init ( 128 ) ; SecretKey secretKey = keyGenerator . generateKey ( ) ; return secretKey . getEncoded ( ) ;
public class TextLoader { /** * Load a text from the specified file and put it in the provided StringBuffer . * @ param source source file . * @ param buffer buffer to load text into . * @ return the buffer . * @ throws IOException if there is a problem to deal with . */ public StringBuffer append ( File source , StringBuffer buffer ) throws IOException { } }
return append ( new FileInputStream ( source ) , buffer ) ;
public class LightWeightLinkedSet { /** * Remove all elements from the set and return them in order . Traverse the * link list , don ' t worry about hashtable - faster version of the parent * method . * @ param retList put elements into the given list */ public void pollAllToList ( List < T > retList ) { } }
while ( head != null ) { retList . add ( head . element ) ; head = head . after ; } this . clear ( ) ;
public class AbstractModuleStarter { /** * Define a new component interface definition . * @ param interfaceClass the interface class implemented * @ param exclusive the exclusive flag * @ param reverse the reverse flag */ protected void define ( final Class < ? extends Component < ? > > interfaceClass , final boolean exclusive , final boolean reverse ) { } }
preloadClass ( interfaceClass ) ; getFacade ( ) . componentFactory ( ) . define ( RegistrationPointItemBase . create ( ) . interfaceClass ( interfaceClass ) . exclusive ( exclusive ) . reverse ( reverse ) ) ;
public class Base64 { /** * Encodes a raw byte array into a BASE64 < code > char [ ] < / code > representation in accordance with RFC 2045. * @ param sArr The bytes to convert . If < code > null < / code > or length 0 an empty array will be returned . * @ param lineSep Optional " \ r \ n " after 76 characters , unless end of file . < br > * No line separator will be in breach of RFC 2045 which specifies max 76 per line but will be a * little faster . * @ return A BASE64 encoded array . Never < code > null < / code > . */ private char [ ] encodeToChar ( byte [ ] sArr , boolean lineSep ) { } }
// Check special case int sLen = sArr != null ? sArr . length : 0 ; if ( sLen == 0 ) { return new char [ 0 ] ; } int eLen = ( sLen / 3 ) * 3 ; // # of bytes that can encode evenly into 24 - bit chunks int left = sLen - eLen ; // # of bytes that remain after 24 - bit chunking . Always 0 , 1 or 2 int cCnt = ( ( ( sLen - 1 ) / 3 + 1 ) << 2 ) ; // # of base64 - encoded characters including padding int dLen = cCnt + ( lineSep ? ( cCnt - 1 ) / 76 << 1 : 0 ) ; // Length of returned char array with padding and any line separators int padCount = 0 ; if ( left == 2 ) { padCount = 1 ; } else if ( left == 1 ) { padCount = 2 ; } char [ ] dArr = new char [ urlsafe ? ( dLen - padCount ) : dLen ] ; // Encode even 24 - bits for ( int s = 0 , d = 0 , cc = 0 ; s < eLen ; ) { // Copy next three bytes into lower 24 bits of int , paying attension to sign . int i = ( sArr [ s ++ ] & 0xff ) << 16 | ( sArr [ s ++ ] & 0xff ) << 8 | ( sArr [ s ++ ] & 0xff ) ; // Encode the int into four chars dArr [ d ++ ] = ALPHABET [ ( i >>> 18 ) & 0x3f ] ; dArr [ d ++ ] = ALPHABET [ ( i >>> 12 ) & 0x3f ] ; dArr [ d ++ ] = ALPHABET [ ( i >>> 6 ) & 0x3f ] ; dArr [ d ++ ] = ALPHABET [ i & 0x3f ] ; // Add optional line separator if ( lineSep && ++ cc == 19 && d < dLen - 2 ) { dArr [ d ++ ] = '\r' ; dArr [ d ++ ] = '\n' ; cc = 0 ; } } // Pad and encode last bits if source isn ' t even 24 bits . if ( left > 0 ) { // Prepare the int int i = ( ( sArr [ eLen ] & 0xff ) << 10 ) | ( left == 2 ? ( ( sArr [ sLen - 1 ] & 0xff ) << 2 ) : 0 ) ; // Set last four chars dArr [ dLen - 4 ] = ALPHABET [ i >> 12 ] ; dArr [ dLen - 3 ] = ALPHABET [ ( i >>> 6 ) & 0x3f ] ; // dArr [ dLen - 2 ] = left = = 2 ? ALPHABET [ i & 0x3f ] : ' = ' ; // dArr [ dLen - 1 ] = ' = ' ; if ( left == 2 ) { dArr [ dLen - 2 ] = ALPHABET [ i & 0x3f ] ; } else if ( ! urlsafe ) { // if not urlsafe , we need to include the padding characters dArr [ dLen - 2 ] = '=' ; } if ( ! urlsafe ) { // include padding dArr [ dLen - 1 ] = '=' ; } } return dArr ;
public class RealVoltDB { /** * ZK . Otherwise , try to do the write / read race to ZK on startup . */ private AbstractTopology getTopology ( StartAction startAction , Map < Integer , HostInfo > hostInfos , JoinCoordinator joinCoordinator ) { } }
AbstractTopology topology = null ; if ( startAction == StartAction . JOIN ) { assert ( joinCoordinator != null ) ; topology = joinCoordinator . getTopology ( ) ; } else if ( startAction . doesRejoin ( ) ) { topology = TopologyZKUtils . readTopologyFromZK ( m_messenger . getZK ( ) ) ; } else { try { return TopologyZKUtils . readTopologyFromZK ( m_messenger . getZK ( ) , null ) ; } catch ( KeeperException . NoNodeException e ) { hostLog . debug ( "Topology doesn't exist yet try to create it" ) ; } catch ( KeeperException | InterruptedException | JSONException e ) { VoltDB . crashLocalVoltDB ( "Unable to read topology from ZK, dying" , true , e ) ; } // initial start or recover int hostcount = m_clusterSettings . get ( ) . hostcount ( ) ; if ( hostInfos . size ( ) != ( hostcount - m_config . m_missingHostCount ) ) { VoltDB . crashLocalVoltDB ( "The total number of live and missing hosts must be the same as the cluster host count" , false , null ) ; } int kfactor = getKFactor ( ) ; if ( kfactor == 0 && m_config . m_missingHostCount > 0 ) { VoltDB . crashLocalVoltDB ( "A cluster with 0 kfactor can not be started with missing nodes " , false , null ) ; } if ( hostcount <= kfactor ) { VoltDB . crashLocalVoltDB ( "Not enough nodes to ensure K-Safety." , false , null ) ; } // Missing hosts can ' t be more than number of partition groups times k - factor int partitionGroupCount = m_clusterSettings . get ( ) . hostcount ( ) / ( kfactor + 1 ) ; if ( m_config . m_missingHostCount > ( partitionGroupCount * kfactor ) ) { VoltDB . crashLocalVoltDB ( "Too many nodes are missing at startup. This cluster only allow up to " + ( partitionGroupCount * kfactor ) + " missing hosts." ) ; } // startup or recover a cluster with missing nodes . make up the missing hosts to fool the topology // The topology will contain hosts which are marked as missing . The missing hosts will not host any master partitions . // At least one partition replica must be on the live hosts ( not missing ) . Otherwise , the cluster will not be started up . // LeaderAppointer will ignore these hosts during startup . int sph = hostInfos . values ( ) . iterator ( ) . next ( ) . m_localSitesCount ; int missingHostId = Integer . MAX_VALUE ; Set < Integer > missingHosts = Sets . newHashSet ( ) ; for ( int i = 0 ; i < m_config . m_missingHostCount ; i ++ ) { hostInfos . put ( missingHostId , new HostInfo ( "" , AbstractTopology . PLACEMENT_GROUP_DEFAULT , sph , "" ) ) ; missingHosts . add ( missingHostId -- ) ; } int totalSites = sph * hostcount ; if ( totalSites % ( kfactor + 1 ) != 0 ) { VoltDB . crashLocalVoltDB ( "Total number of sites is not divisible by the number of partitions." , false , null ) ; } topology = AbstractTopology . getTopology ( hostInfos , missingHosts , kfactor , ( m_config . m_restorePlacement && m_config . m_startAction . doesRecover ( ) ) ) ; String err ; if ( ( err = topology . validateLayout ( m_messenger . getLiveHostIds ( ) ) ) != null ) { hostLog . warn ( "Unable to find optimal placement layout. " + err ) ; hostLog . warn ( "When using placement groups, follow two rules to get better cluster availability:\n" + " 1. Each placement group must have the same number of nodes, and\n" + " 2. The number of partition replicas (kfactor + 1) must be a multiple of the number of placement groups." ) ; } if ( topology . hasMissingPartitions ( ) ) { VoltDB . crashLocalVoltDB ( "Some partitions are missing in the topology" , false , null ) ; } if ( m_config . m_restorePlacement && m_config . m_startAction . doesRecover ( ) && topology . version > 1 ) { consoleLog . info ( "Partition placement has been restored." ) ; } topology = TopologyZKUtils . registerTopologyToZK ( m_messenger . getZK ( ) , topology ) ; } return topology ;
public class LifecycleHooks { /** * Get the specified field of the supplied object . * @ param target target object * @ param name field name * @ return { @ link Field } object for the requested field * @ throws NoSuchFieldException if a field with the specified name is not found * @ throws SecurityException if the request is denied */ static Field getDeclaredField ( Object target , String name ) throws NoSuchFieldException { } }
Throwable thrown = null ; for ( Class < ? > current = target . getClass ( ) ; current != null ; current = current . getSuperclass ( ) ) { try { return current . getDeclaredField ( name ) ; } catch ( NoSuchFieldException e ) { thrown = e ; } catch ( SecurityException e ) { thrown = e ; break ; } } throw UncheckedThrow . throwUnchecked ( thrown ) ;
public class MultiPolygon { /** * Return the total length . */ public double getLength ( ) { } }
double len = 0 ; if ( ! isEmpty ( ) ) { for ( Polygon polygon : polygons ) { len += polygon . getLength ( ) ; } } return len ;
public class CPOptionPersistenceImpl { /** * Returns all the cp options where groupId = & # 63 ; . * @ param groupId the group ID * @ return the matching cp options */ @ Override public List < CPOption > findByGroupId ( long groupId ) { } }
return findByGroupId ( groupId , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ;
public class ImageDescriptorRegistry { /** * Disposes all images managed by this registry . */ public void dispose ( ) { } }
for ( Iterator < Image > iter = fRegistry . values ( ) . iterator ( ) ; iter . hasNext ( ) ; ) { Image image = ( Image ) iter . next ( ) ; image . dispose ( ) ; } fRegistry . clear ( ) ;
public class ConfigUpdateController { /** * 配置项的更新 * @ param configId * @ param value * @ return */ @ RequestMapping ( value = "/item/{configId}" , method = RequestMethod . PUT ) @ ResponseBody public JsonObjectBase updateItem ( @ PathVariable long configId , String value ) { } }
// 业务校验 configValidator . validateUpdateItem ( configId , value ) ; LOG . info ( "start to update config: " + configId ) ; // 更新 , 并写入数据库 String emailNotification = "" ; emailNotification = configMgr . updateItemValue ( configId , value ) ; // 通知ZK configMgr . notifyZookeeper ( configId ) ; return buildSuccess ( emailNotification ) ;
public class ClassDocImpl { /** * Return methods in class . * This method is overridden by AnnotationTypeDocImpl . * @ param filter include only the included methods if filter = = true * @ return an array of MethodDocImpl for representing the visible * methods in this class . Does not include constructors . */ public MethodDoc [ ] methods ( boolean filter ) { } }
Names names = tsym . name . table . names ; List < MethodDocImpl > methods = List . nil ( ) ; for ( Symbol sym : tsym . members ( ) . getSymbols ( NON_RECURSIVE ) ) { if ( sym != null && sym . kind == MTH && sym . name != names . init && sym . name != names . clinit ) { MethodSymbol s = ( MethodSymbol ) sym ; if ( ! filter || env . shouldDocument ( s ) ) { methods = methods . prepend ( env . getMethodDoc ( s ) ) ; } } } // # # # Cache methods here ? return methods . toArray ( new MethodDocImpl [ methods . length ( ) ] ) ;
public class AmpSkin { /** * * * * * * Methods * * * * * */ protected void handleEvents ( final String EVENT_TYPE ) { } }
super . handleEvents ( EVENT_TYPE ) ; if ( "FINISHED" . equals ( EVENT_TYPE ) ) { if ( gauge . isHighlightSections ( ) ) { redraw ( ) ; } } else if ( "ANGLE" . equals ( EVENT_TYPE ) ) { double currentValue = ( needleRotate . getAngle ( ) + START_ANGLE - 180 ) / angleStep + gauge . getMinValue ( ) ; lcdText . setText ( ( String . format ( locale , formatString , currentValue ) ) ) ; if ( gauge . isLcdVisible ( ) ) { lcdText . setAlignment ( Pos . CENTER_RIGHT ) ; lcdText . setTranslateX ( ( width - lcdText . getPrefWidth ( ) ) * 0.5 ) ; } else { lcdText . setAlignment ( Pos . CENTER ) ; lcdText . setTranslateX ( ( width - lcdText . getLayoutBounds ( ) . getWidth ( ) ) * 0.5 ) ; } } else if ( "VISIBILITY" . equals ( EVENT_TYPE ) ) { enableNode ( ledCanvas , gauge . isLedVisible ( ) ) ; enableNode ( titleText , ! gauge . getTitle ( ) . isEmpty ( ) ) ; enableNode ( unitText , ! gauge . getUnit ( ) . isEmpty ( ) ) ; enableNode ( lcd , gauge . isLcdVisible ( ) ) ; enableNode ( lcdText , gauge . isValueVisible ( ) ) ; enableNode ( threshold , gauge . isThresholdVisible ( ) ) ; enableNode ( average , gauge . isAverageVisible ( ) ) ; boolean markersVisible = gauge . getMarkersVisible ( ) ; for ( Shape shape : markerMap . values ( ) ) { Helper . enableNode ( shape , markersVisible ) ; } resize ( ) ; redraw ( ) ; } else if ( "LED" . equals ( EVENT_TYPE ) ) { if ( gauge . isLedVisible ( ) ) { drawLed ( led ) ; } } else if ( "LCD" . equals ( EVENT_TYPE ) ) { if ( gauge . isLcdVisible ( ) ) redraw ( ) ; } else if ( "RECALC" . equals ( EVENT_TYPE ) ) { angleStep = gauge . getAngleStep ( ) ; if ( gauge . getValue ( ) < gauge . getMinValue ( ) ) { oldValue = gauge . getMinValue ( ) ; } if ( gauge . getValue ( ) > gauge . getMaxValue ( ) ) { oldValue = gauge . getMaxValue ( ) ; } redraw ( ) ; rotateNeedle ( ) ; }
public class OboFileParser { /** * parse the Synonym String from the Term . * value can be : * < pre > " ca _ bind " RELATED [ uniprot : curation ] < / pre > * @ param value * @ return the synonym text */ private Synonym parseSynonym ( String key , String value ) throws IOException { } }
// logger . info ( " PARSE SYNONYM " + key + " " + value ) ; int startIndex = findUnescaped ( value , '"' , 0 , value . length ( ) ) ; if ( startIndex == - 1 ) throw new IOException ( "Expected \"" + line + " " + linenum ) ; SOPair p = unescape ( value , '"' , startIndex + 1 , value . length ( ) , true ) ; int defIndex = findUnescaped ( value , '[' , p . index , value . length ( ) ) ; if ( defIndex == - 1 ) { throw new IOException ( "Badly formatted synonym. " + "No dbxref list found." + line + " " + linenum ) ; } String leftovers = value . substring ( p . index + 1 , defIndex ) . trim ( ) ; StringTokenizer tokenizer = new StringTokenizer ( leftovers , " \t" ) ; int scope = Synonym . RELATED_SYNONYM ; if ( key . equals ( OboFileHandler . EXACT_SYNONYM ) ) scope = Synonym . EXACT_SYNONYM ; else if ( key . equals ( OboFileHandler . BROAD_SYNONYM ) ) scope = Synonym . BROAD_SYNONYM ; else if ( key . equals ( OboFileHandler . NARROW_SYNONYM ) ) scope = Synonym . NARROW_SYNONYM ; String catID = null ; for ( int i = 0 ; tokenizer . hasMoreTokens ( ) ; i ++ ) { String token = tokenizer . nextToken ( ) ; // logger . info ( " TOKEN : " + token ) ; if ( i == 0 ) { if ( token . equals ( "RELATED" ) ) scope = Synonym . RELATED_SYNONYM ; else if ( token . equals ( "UNSPECIFIED" ) ) scope = Synonym . RELATED_SYNONYM ; else if ( token . equals ( "EXACT" ) ) scope = Synonym . EXACT_SYNONYM ; else if ( token . equals ( "BROAD" ) ) scope = Synonym . BROAD_SYNONYM ; else if ( token . equals ( "NARROW" ) ) scope = Synonym . NARROW_SYNONYM ; else throw new IOException ( "Found unexpected scope " + "identifier " + token + line ) ; } else if ( i == 1 ) { catID = token ; } else throw new IOException ( "Expected dbxref list," + " instead found " + token + line ) ; } Synonym synonym = new Synonym ( ) ; synonym . setScope ( scope ) ; synonym . setCategory ( catID ) ; synonym . setName ( p . str ) ; // logger . info ( " SYNONYM : " + p . str + " " + synonym . getCategory ( ) + " " + synonym . getScope ( ) ) ; Map < String , Object > [ ] refs = getDbxrefList ( value , defIndex + 1 , value . length ( ) ) ; // set the refs in the synonym for ( Map < String , Object > ref : refs ) { @ SuppressWarnings ( "unused" ) String xref = ( String ) ref . get ( "xref" ) ; @ SuppressWarnings ( "unused" ) String desc = ( String ) ref . get ( "desc" ) ; // logger . info ( xref + " " + desc ) ; @ SuppressWarnings ( "unused" ) NestedValue nv = ( NestedValue ) ref . get ( "nv" ) ; // TODO : add implementation for this . . . } return synonym ;
public class BeatGridFinder { /** * We have obtained a beat grid for a device , so store it and alert any listeners . * @ param update the update which caused us to retrieve this beat grid * @ param beatGrid the beat grid which we retrieved */ private void updateBeatGrid ( TrackMetadataUpdate update , BeatGrid beatGrid ) { } }
hotCache . put ( DeckReference . getDeckReference ( update . player , 0 ) , beatGrid ) ; // Main deck if ( update . metadata . getCueList ( ) != null ) { // Update the cache with any hot cues in this track as well for ( CueList . Entry entry : update . metadata . getCueList ( ) . entries ) { if ( entry . hotCueNumber != 0 ) { hotCache . put ( DeckReference . getDeckReference ( update . player , entry . hotCueNumber ) , beatGrid ) ; } } } deliverBeatGridUpdate ( update . player , beatGrid ) ;
public class ReflectionUtils { /** * Make a copy of the writable object using serialization to a buffer * @ param dst the object to copy from * @ param src the object to copy into , which is destroyed * @ throws IOException */ @ SuppressWarnings ( "unchecked" ) public static < T > T copy ( Configuration conf , T src , T dst ) throws IOException { } }
CopyInCopyOutBuffer buffer = cloneBuffers . get ( ) ; buffer . outBuffer . reset ( ) ; SerializationFactory factory = getFactory ( conf ) ; Class < T > cls = ( Class < T > ) src . getClass ( ) ; Serializer < T > serializer = factory . getSerializer ( cls ) ; serializer . open ( buffer . outBuffer ) ; serializer . serialize ( src ) ; buffer . moveData ( ) ; Deserializer < T > deserializer = factory . getDeserializer ( cls ) ; deserializer . open ( buffer . inBuffer ) ; dst = deserializer . deserialize ( dst ) ; return dst ;
public class SessionContext { /** * Makes a copy of the SessionContext . This is used for debugging . * @ return */ public SessionContext copy ( ) { } }
SessionContext copy = new SessionContext ( ) ; copy . brownoutMode = brownoutMode ; copy . cancelled = cancelled ; copy . shouldStopFilterProcessing = shouldStopFilterProcessing ; copy . shouldSendErrorResponse = shouldSendErrorResponse ; copy . errorResponseSent = errorResponseSent ; copy . debugRouting = debugRouting ; copy . debugRequest = debugRequest ; copy . debugRequestHeadersOnly = debugRequestHeadersOnly ; copy . timings = timings ; Iterator < String > it = keySet ( ) . iterator ( ) ; String key = it . next ( ) ; while ( key != null ) { Object orig = get ( key ) ; try { Object copyValue = DeepCopy . copy ( orig ) ; if ( copyValue != null ) { copy . set ( key , copyValue ) ; } else { copy . set ( key , orig ) ; } } catch ( NotSerializableException e ) { copy . set ( key , orig ) ; } if ( it . hasNext ( ) ) { key = it . next ( ) ; } else { key = null ; } } return copy ;
public class Parameters { /** * Throws a ParameterException if none of the supplied parameters are defined . */ public void assertAtLeastOneDefined ( final String param1 , final String ... moreParams ) { } }
if ( ! isPresent ( param1 ) ) { for ( final String moreParam : moreParams ) { if ( isPresent ( moreParam ) ) { return ; } } final List < String > paramsForError = Lists . newArrayList ( ) ; paramsForError . add ( param1 ) ; paramsForError . addAll ( Arrays . asList ( moreParams ) ) ; throw new ParameterException ( String . format ( "At least one of %s must be defined." , StringUtils . CommaSpaceJoiner . join ( paramsForError ) ) ) ; }
public class CoordinatorAdminUtils { /** * Adds OPT _ U | OPT _ URL option to OptionParser , with multiple arguments . * @ param parser OptionParser to be modified * @ param required Tells if this option is required or optional */ public static void acceptsUrlMultiple ( OptionParser parser ) { } }
parser . acceptsAll ( Arrays . asList ( OPT_U , OPT_URL ) , "coordinator bootstrap urls" ) . withRequiredArg ( ) . describedAs ( "url-list" ) . withValuesSeparatedBy ( ',' ) . ofType ( String . class ) ;
public class AtomTetrahedralLigandPlacer3D { /** * Gets the angleKey attribute of the AtomPlacer3D object . * @ param id1 Description of the Parameter * @ param id2 Description of the Parameter * @ param id3 Description of the Parameter * @ return The angleKey value */ public double getAngleValue ( String id1 , String id2 , String id3 ) { } }
String akey = "" ; if ( pSet . containsKey ( ( "angle" + id1 + ";" + id2 + ";" + id3 ) ) ) { akey = "angle" + id1 + ";" + id2 + ";" + id3 ; } else if ( pSet . containsKey ( ( "angle" + id3 + ";" + id2 + ";" + id1 ) ) ) { akey = "angle" + id3 + ";" + id2 + ";" + id1 ; } else if ( pSet . containsKey ( ( "angle" + id2 + ";" + id1 + ";" + id3 ) ) ) { akey = "angle" + id2 + ";" + id1 + ";" + id3 ; } else if ( pSet . containsKey ( ( "angle" + id1 + ";" + id3 + ";" + id2 ) ) ) { akey = "angle" + id1 + ";" + id3 + ";" + id2 ; } else if ( pSet . containsKey ( ( "angle" + id3 + ";" + id1 + ";" + id2 ) ) ) { akey = "angle" + id3 + ";" + id1 + ";" + id2 ; } else if ( pSet . containsKey ( ( "angle" + id2 + ";" + id3 + ";" + id1 ) ) ) { akey = "angle" + id2 + ";" + id3 + ";" + id1 ; } else { System . out . println ( "AngleKEYError:Unknown angle " + id1 + " " + id2 + " " + id3 + " take default angle:" + TETRAHEDRAL_ANGLE ) ; return TETRAHEDRAL_ANGLE ; } return ( ( Double ) ( ( ( List ) pSet . get ( akey ) ) . get ( 0 ) ) ) . doubleValue ( ) ;
public class AuditActionContextCouchDbRepository { /** * Find audit records for authentication throttling . * @ param remoteAddress remote IP address * @ param username username * @ param failureCode failure code * @ param applicationCode application code * @ param cutoffTime cut off time * @ return records for authentication throttleing decision */ @ View ( name = "by_throttle_params" , map = "classpath:CouchDbAuditActionContext_by_throttle_params.js" ) public List < CouchDbAuditActionContext > findByThrottleParams ( final String remoteAddress , final String username , final String failureCode , final String applicationCode , final LocalDateTime cutoffTime ) { } }
val view = createQuery ( "by_throttle_params" ) . startKey ( ComplexKey . of ( remoteAddress , username , failureCode , applicationCode , cutoffTime ) ) . endKey ( ComplexKey . of ( remoteAddress , username , failureCode , applicationCode , "999999" ) ) ; return db . queryView ( view , CouchDbAuditActionContext . class ) ;
public class AWSBudgetsClient { /** * Lists the subscribers that are associated with a notification . * @ param describeSubscribersForNotificationRequest * Request of DescribeSubscribersForNotification * @ return Result of the DescribeSubscribersForNotification operation returned by the service . * @ throws InternalErrorException * An error on the server occurred during the processing of your request . Try again later . * @ throws NotFoundException * We can ’ t locate the resource that you specified . * @ throws InvalidParameterException * An error on the client occurred . Typically , the cause is an invalid input value . * @ throws InvalidNextTokenException * The pagination token is invalid . * @ throws ExpiredNextTokenException * The pagination token expired . * @ sample AWSBudgets . DescribeSubscribersForNotification */ @ Override public DescribeSubscribersForNotificationResult describeSubscribersForNotification ( DescribeSubscribersForNotificationRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeSubscribersForNotification ( request ) ;
public class PasswordHashCreatorManager { /** * Register a new password hash creator . No other password hash creator with * the same algorithm name may be registered . * @ param aPasswordHashCreator * The password hash creator to be registered . May not be * < code > null < / code > . */ public void registerPasswordHashCreator ( @ Nonnull final IPasswordHashCreator aPasswordHashCreator ) { } }
ValueEnforcer . notNull ( aPasswordHashCreator , "PasswordHashCreator" ) ; final String sAlgorithmName = aPasswordHashCreator . getAlgorithmName ( ) ; if ( StringHelper . hasNoText ( sAlgorithmName ) ) throw new IllegalArgumentException ( "PasswordHashCreator algorithm '" + aPasswordHashCreator + "' is empty!" ) ; m_aRWLock . writeLocked ( ( ) -> { if ( m_aPasswordHashCreators . containsKey ( sAlgorithmName ) ) throw new IllegalArgumentException ( "Another PasswordHashCreator for algorithm '" + sAlgorithmName + "' is already registered!" ) ; m_aPasswordHashCreators . put ( sAlgorithmName , aPasswordHashCreator ) ; } ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "Registered password hash creator algorithm '" + sAlgorithmName + "' to " + aPasswordHashCreator ) ;
public class LBoolSupplierBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */ @ Nonnull public static LBoolSupplier boolSupplierFrom ( Consumer < LBoolSupplierBuilder > buildingFunction ) { } }
LBoolSupplierBuilder builder = new LBoolSupplierBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ;
public class CoreUserApiKeyAuthProviderClient { /** * Fetches the user API keys associated with the current user . * @ return the user API keys associated with the current user . */ protected List < UserApiKey > fetchApiKeysInternal ( ) { } }
final StitchAuthRequest . Builder reqBuilder = new StitchAuthRequest . Builder ( ) ; reqBuilder . withMethod ( Method . GET ) . withPath ( this . getBaseRoute ( ) ) . withRefreshToken ( ) ; return ( List < UserApiKey > ) getRequestClient ( ) . doAuthenticatedRequest ( reqBuilder . build ( ) , new CollectionDecoder < > ( new UserApiKeyDecoder ( ) ) ) ;
public class YamlDataProviderImpl { /** * Gets yaml data for requested indexes . * @ param indexes * the input string represent the indexes to be parse * @ return Object [ ] [ ] Two dimensional object to be used with TestNG DataProvider * @ throws IOException */ @ Override public Object [ ] [ ] getDataByIndex ( String indexes ) throws IOException , DataProviderException { } }
logger . entering ( indexes ) ; int [ ] arrayIndex = DataProviderHelper . parseIndexString ( indexes ) ; Object [ ] [ ] yamlObjRequested = getDataByIndex ( arrayIndex ) ; logger . exiting ( ( Object [ ] ) yamlObjRequested ) ; return yamlObjRequested ;
public class AbstractNotificationBuilder { /** * Sets the notification batching interval , indicating when the notification * should be delivered to the device * @ param delivery batching interval * @ return this */ public A notificationClass ( DeliveryClass delivery ) { } }
this . headers . add ( Pair . of ( "X-NotificationClass" , String . valueOf ( deliveryValueOf ( delivery ) ) ) ) ; return ( A ) this ;
public class InsertAllRequest { /** * Returns a builder for an { @ code InsertAllRequest } object given the destination table . */ public static Builder newBuilder ( String datasetId , String tableId ) { } }
return new Builder ( ) . setTable ( TableId . of ( datasetId , tableId ) ) ;
public class PartitionImpl { /** * If not already created , a new < code > reducer < / code > element with the given value will be created . * Otherwise , the existing < code > reducer < / code > element will be returned . * @ return a new or existing instance of < code > PartitionReducer < Partition < T > > < / code > */ public PartitionReducer < Partition < T > > getOrCreateReducer ( ) { } }
Node node = childNode . getOrCreate ( "reducer" ) ; PartitionReducer < Partition < T > > reducer = new PartitionReducerImpl < Partition < T > > ( this , "reducer" , childNode , node ) ; return reducer ;
public class ImageMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Image image , ProtocolMarshaller protocolMarshaller ) { } }
if ( image == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( image . getBytes ( ) , BYTES_BINDING ) ; protocolMarshaller . marshall ( image . getS3Object ( ) , S3OBJECT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Accordion { /** * Icons to use for headers . * @ param header * @ param headerSelected * @ return instance of the current component */ public Accordion setIcons ( UiIcon header , UiIcon headerSelected ) { } }
setIcons ( new AccordionIcon ( header , headerSelected ) ) ; return this ;
public class FacebookEndpoint { /** * Asynchronously requests the Page accounts associated with the linked account . Requires an opened active { @ link Session } . * @ param callback a { @ link Callback } when the request completes . * @ return true if the request is made ; false if no opened { @ link Session } is active . */ boolean requestAccounts ( Callback callback ) { } }
boolean isSuccessful = false ; Session session = Session . getActiveSession ( ) ; if ( session != null && session . isOpened ( ) ) { // Construct fields to request . Bundle params = new Bundle ( ) ; params . putString ( ACCOUNTS_LISTING_FEILDS_KEY , ACCOUNTS_LISTING_FIELDS_VALUE ) ; // Construct and execute albums listing request . Request request = new Request ( session , ACCOUNTS_LISTING_GRAPH_PATH , params , HttpMethod . GET , callback ) ; request . executeAsync ( ) ; isSuccessful = true ; } return isSuccessful ;
public class UtilWavelet { /** * Adjusts the values inside a wavelet transform to make it easier to view . * @ param transform * @ param numLevels Number of levels in the transform */ public static void adjustForDisplay ( ImageGray transform , int numLevels , double valueRange ) { } }
if ( transform instanceof GrayF32 ) adjustForDisplay ( ( GrayF32 ) transform , numLevels , ( float ) valueRange ) ; else adjustForDisplay ( ( GrayI ) transform , numLevels , ( int ) valueRange ) ;
public class PoolsImpl { /** * Updates the properties of the specified pool . * This only replaces the pool properties specified in the request . For example , if the pool has a start task associated with it , and a request does not specify a start task element , then the pool keeps the existing start task . * @ param poolId The ID of the pool to update . * @ param poolPatchParameter The parameters for the request . * @ param poolPatchOptions Additional parameters for the operation * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceResponseWithHeaders } object if successful . */ public Observable < Void > patchAsync ( String poolId , PoolPatchParameter poolPatchParameter , PoolPatchOptions poolPatchOptions ) { } }
return patchWithServiceResponseAsync ( poolId , poolPatchParameter , poolPatchOptions ) . map ( new Func1 < ServiceResponseWithHeaders < Void , PoolPatchHeaders > , Void > ( ) { @ Override public Void call ( ServiceResponseWithHeaders < Void , PoolPatchHeaders > response ) { return response . body ( ) ; } } ) ;
public class MetadataToH2 { /** * - - TABLES - - - - - */ private void createTablesAndPkAndIndexes ( ) { } }
for ( Table t : metadata . getTables ( ) ) { comment ( " == TABLES [ " + t . getName ( ) + " ] =======================" ) ; createTable ( t ) ; createIndexes ( t ) ; if ( t . hasH2IdentityPk ( ) ) { line ( "ALTER TABLE " , t . getName ( ) , " ALTER COLUMN " , t . getPrimaryKey ( ) , " IDENTITY;" ) ; } else if ( t . hasPk ( ) ) { line ( "CREATE PRIMARY KEY ON " , t . getName ( ) , " (" , toCsvString ( t . getPrimaryKeys ( ) ) , ");" ) ; } line ( "" ) ; }
public class host_interface { /** * Use this operation to reset interface settings . */ public static host_interface reset ( nitro_service client , host_interface resource ) throws Exception { } }
return ( ( host_interface [ ] ) resource . perform_operation ( client , "reset" ) ) [ 0 ] ;
public class PaymentActivity { /** * The name of the function has to be * onPaymentError * Wrap your code in try catch , as shown , to ensure that this method runs correctly */ @ SuppressWarnings ( "unused" ) @ Override public void onPaymentError ( int code , String response ) { } }
try { Toast . makeText ( this , "Payment failed: " + code + " " + response , Toast . LENGTH_SHORT ) . show ( ) ; } catch ( Exception e ) { Log . e ( TAG , "Exception in onPaymentError" , e ) ; }
public class SharedTreeMojoModel { /** * Returns staged predictions of tree algorithms ( prediction probabilities of trees per iteration ) . * The output structure is for tree Tt and class Cc : * Binomial models : [ probability T1 . C1 , probability T2 . C1 , . . . , Tt . C1 ] where Tt . C1 correspond to the the probability p0 * Multinomial models : [ probability T1 . C1 , probability T1 . C2 , . . . , Tt . Cc ] * @ param row Input row . * @ param predsLength Length of prediction result . * @ return array of staged prediction probabilities */ public double [ ] scoreStagedPredictions ( double [ ] row , int predsLength ) { } }
int contribOffset = nclasses ( ) == 1 ? 0 : 1 ; double [ ] trees_result = new double [ _ntree_groups * _ntrees_per_group ] ; for ( int groupIndex = 0 ; groupIndex < _ntree_groups ; groupIndex ++ ) { double [ ] tmpPreds = new double [ predsLength ] ; scoreTreeRange ( row , 0 , groupIndex + 1 , tmpPreds ) ; unifyPreds ( row , 0 , tmpPreds ) ; for ( int classIndex = 0 ; classIndex < _ntrees_per_group ; classIndex ++ ) { int tree_index = groupIndex * _ntrees_per_group + classIndex ; trees_result [ tree_index ] = tmpPreds [ contribOffset + classIndex ] ; } } return trees_result ;
public class CallbacksDispatcher { /** * Only weak references are stored for weak listeners . They will be removed from the dispatcher * automatically , once the listener objects are GCed . Note that weak listeners cannot be removed * explicitly . */ public synchronized void addWeakListener ( L listener ) { } }
Preconditions . checkNotNull ( listener ) ; _log . info ( "Adding a weak listener " + listener ) ; _autoListeners . put ( listener , null ) ;
public class IndexNode { /** * Called on the most bottom node */ private IndexNode copyWith ( byte [ ] key , int file , int offset , int size , OverwriteHook overwriteHook , RecordChange recordChange ) throws IOException { } }
if ( leafNodes == null ) throw new IllegalArgumentException ( ) ; byte [ ] newPrefix ; byte [ ] [ ] newKeyParts ; LeafNode [ ] newLeafNodes ; if ( leafNodes . length == 0 ) { overwriteHook . setOverwritten ( false , - 1 , - 1 ) ; if ( overwriteHook . check ( - 1 , - 1 ) ) { return new IndexNode ( segment , prefix , keyParts , new LeafNode [ ] { new LeafNode ( file , offset , ( short ) 1 ) } ) ; } else { segment . getCompactor ( ) . free ( file , size ) ; return this ; } } int insertPart = getInsertionPoint ( key ) ; LeafNode oldLeafNode = leafNodes [ insertPart ] ; short numRecords = oldLeafNode . numRecords ; switch ( recordChange ) { case INCREASE : case INCREASE_FOR_OLD : if ( numRecords == Short . MAX_VALUE ) { throw new IllegalStateException ( "Too many records for this key (short overflow)" ) ; } numRecords ++ ; break ; case MOVE : break ; case DECREASE : numRecords -- ; break ; } EntryRecord hak ; try { hak = oldLeafNode . loadHeaderAndKey ( segment . getFileProvider ( ) ) ; } catch ( IndexNodeOutdatedException e ) { throw new IllegalStateException ( "Index cannot be outdated for segment updater thread" , e ) ; } int keyComp = compare ( hak . getKey ( ) , key ) ; if ( keyComp == 0 ) { if ( numRecords > 0 ) { if ( overwriteHook . check ( oldLeafNode . file , oldLeafNode . offset ) ) { newPrefix = prefix ; newKeyParts = keyParts ; newLeafNodes = new LeafNode [ leafNodes . length ] ; System . arraycopy ( leafNodes , 0 , newLeafNodes , 0 , leafNodes . length ) ; // Do not update the file and offset for DROPPED IndexRequests if ( recordChange == RecordChange . INCREASE || recordChange == RecordChange . MOVE ) { if ( trace ) { log . trace ( String . format ( "Overwriting %d:%d with %d:%d (%d)" , oldLeafNode . file , oldLeafNode . offset , file , offset , numRecords ) ) ; } newLeafNodes [ insertPart ] = new LeafNode ( file , offset , numRecords ) ; segment . getCompactor ( ) . free ( oldLeafNode . file , hak . getHeader ( ) . totalLength ( ) ) ; } else { if ( trace ) { log . trace ( String . format ( "Updating num records for %d:%d to %d" , oldLeafNode . file , oldLeafNode . offset , numRecords ) ) ; } newLeafNodes [ insertPart ] = new LeafNode ( oldLeafNode . file , oldLeafNode . offset , numRecords ) ; } overwriteHook . setOverwritten ( true , oldLeafNode . file , oldLeafNode . offset ) ; } else { overwriteHook . setOverwritten ( false , - 1 , - 1 ) ; segment . getCompactor ( ) . free ( file , size ) ; return this ; } } else { overwriteHook . setOverwritten ( true , oldLeafNode . file , oldLeafNode . offset ) ; if ( keyParts . length <= 1 ) { newPrefix = Util . EMPTY_BYTE_ARRAY ; newKeyParts = Util . EMPTY_BYTE_ARRAY_ARRAY ; } else { newPrefix = prefix ; newKeyParts = new byte [ keyParts . length - 1 ] [ ] ; if ( insertPart == keyParts . length ) { System . arraycopy ( keyParts , 0 , newKeyParts , 0 , newKeyParts . length ) ; } else { System . arraycopy ( keyParts , 0 , newKeyParts , 0 , insertPart ) ; System . arraycopy ( keyParts , insertPart + 1 , newKeyParts , insertPart , newKeyParts . length - insertPart ) ; } } if ( leafNodes . length > 0 ) { newLeafNodes = new LeafNode [ leafNodes . length - 1 ] ; System . arraycopy ( leafNodes , 0 , newLeafNodes , 0 , insertPart ) ; System . arraycopy ( leafNodes , insertPart + 1 , newLeafNodes , insertPart , newLeafNodes . length - insertPart ) ; } else { newLeafNodes = leafNodes ; } segment . getCompactor ( ) . free ( oldLeafNode . file , hak . getHeader ( ) . totalLength ( ) ) ; } } else { // IndexRequest cannot be MOVED or DROPPED when the key is not in the index assert recordChange == RecordChange . INCREASE ; overwriteHook . setOverwritten ( false , - 1 , - 1 ) ; // We have to insert the record even if this is a delete request and the key was not found // because otherwise we would have incorrect numRecord count . Eventually , Compactor will // drop the tombstone and update index , removing this node if ( keyParts . length == 0 ) { // TODO : we may use unnecessarily long keys here and the key is never shortened newPrefix = keyComp > 0 ? key : hak . getKey ( ) ; } else { newPrefix = commonPrefix ( prefix , key ) ; } newKeyParts = new byte [ keyParts . length + 1 ] [ ] ; newLeafNodes = new LeafNode [ leafNodes . length + 1 ] ; copyKeyParts ( keyParts , 0 , newKeyParts , 0 , insertPart , prefix , newPrefix ) ; copyKeyParts ( keyParts , insertPart , newKeyParts , insertPart + 1 , keyParts . length - insertPart , prefix , newPrefix ) ; if ( keyComp > 0 ) { newKeyParts [ insertPart ] = substring ( key , newPrefix . length , keyComp ) ; System . arraycopy ( leafNodes , 0 , newLeafNodes , 0 , insertPart + 1 ) ; System . arraycopy ( leafNodes , insertPart + 1 , newLeafNodes , insertPart + 2 , leafNodes . length - insertPart - 1 ) ; newLeafNodes [ insertPart + 1 ] = new LeafNode ( file , offset , ( short ) 1 ) ; } else { newKeyParts [ insertPart ] = substring ( hak . getKey ( ) , newPrefix . length , - keyComp ) ; System . arraycopy ( leafNodes , 0 , newLeafNodes , 0 , insertPart ) ; System . arraycopy ( leafNodes , insertPart , newLeafNodes , insertPart + 1 , leafNodes . length - insertPart ) ; newLeafNodes [ insertPart ] = new LeafNode ( file , offset , ( short ) 1 ) ; } } return new IndexNode ( segment , newPrefix , newKeyParts , newLeafNodes ) ;
public class DescribeEmergencyContactSettingsResult { /** * A list of email addresses that the DRT can use to contact you during a suspected attack . * @ param emergencyContactList * A list of email addresses that the DRT can use to contact you during a suspected attack . */ public void setEmergencyContactList ( java . util . Collection < EmergencyContact > emergencyContactList ) { } }
if ( emergencyContactList == null ) { this . emergencyContactList = null ; return ; } this . emergencyContactList = new java . util . ArrayList < EmergencyContact > ( emergencyContactList ) ;
public class OrientModule { /** * Binds registered custom types as guice singletons and register them with multibinder . */ private void configureCustomTypes ( ) { } }
// empty binding is required in any case final Multibinder < OObjectSerializer > typesBinder = Multibinder . newSetBinder ( binder ( ) , OObjectSerializer . class ) ; if ( ! customTypes . isEmpty ( ) ) { for ( Class < ? extends OObjectSerializer > type : customTypes ) { bind ( type ) . in ( Singleton . class ) ; typesBinder . addBinding ( ) . to ( type ) ; } }
public class ParticleEditor { /** * Save a complete particle system XML description */ public void saveSystem ( ) { } }
int resp = chooser . showSaveDialog ( this ) ; if ( resp == JFileChooser . APPROVE_OPTION ) { File file = chooser . getSelectedFile ( ) ; if ( ! file . getName ( ) . endsWith ( ".xml" ) ) { file = new File ( file . getAbsolutePath ( ) + ".xml" ) ; } try { ParticleIO . saveConfiguredSystem ( file , game . getSystem ( ) ) ; } catch ( IOException e ) { Log . error ( e ) ; JOptionPane . showMessageDialog ( this , e . getMessage ( ) ) ; } }
public class TraceInterceptor { /** * Logs an error to the span . * @ param span The span * @ param e The error */ public static void logError ( Span span , Throwable e ) { } }
HashMap < String , Object > fields = new HashMap < > ( ) ; fields . put ( Fields . ERROR_OBJECT , e ) ; String message = e . getMessage ( ) ; if ( message != null ) { fields . put ( Fields . MESSAGE , message ) ; } span . log ( fields ) ;
public class LogLogisticDistribution { /** * Probability density function . * @ param val Value * @ param shape Shape * @ param location Location * @ param scale Scale * @ return logPDF */ public static double logpdf ( double val , double shape , double location , double scale ) { } }
if ( val < location ) { return Double . NEGATIVE_INFINITY ; } val = ( val - location ) / scale ; final double lval = FastMath . log ( val ) ; if ( lval == Double . POSITIVE_INFINITY ) { return Double . NEGATIVE_INFINITY ; } return FastMath . log ( shape / scale ) + ( shape - 1. ) * lval - 2. * FastMath . log1p ( FastMath . exp ( lval * shape ) ) ;
public class HashUtil { /** * DJB算法 * @ param str 字符串 * @ return hash值 */ public static int djbHash ( String str ) { } }
int hash = 5381 ; for ( int i = 0 ; i < str . length ( ) ; i ++ ) { hash = ( ( hash << 5 ) + hash ) + str . charAt ( i ) ; } return hash & 0x7FFFFFFF ;
public class Pages { /** * Adds a page at the end */ private void grow ( ) { } }
char [ ] [ ] newPages ; if ( lastFilled != pageSize ) { throw new IllegalStateException ( ) ; } lastNo ++ ; if ( lastNo >= pages . length ) { newPages = new char [ lastNo * 5 / 2 ] [ ] ; System . arraycopy ( pages , 0 , newPages , 0 , lastNo ) ; pages = newPages ; } if ( pages [ lastNo ] == null ) { pages [ lastNo ] = new char [ pageSize ] ; } lastFilled = 0 ;