repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
qianhk/KaiPractice
app/src/main/java/com/njnu/kai/practice/image/shadow/ShadowImageFragment.java
1150
package com.njnu.kai.practice.image.shadow; import android.graphics.Color; import android.graphics.drawable.Drawable; import android.os.Bundle; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import com.njnu.kai.practice.R; import com.njnu.kai.practice.image.DrawableLess; import com.njnu.kai.support.BaseTestFragment; /** * @author kai * @since 17/4/3 */ public class ShadowImageFragment extends BaseTestFragment { private View mRootView; @Override protected View onCreateContentView(LayoutInflater layoutInflater, ViewGroup viewGroup, Bundle bundle) { mRootView = layoutInflater.inflate(R.layout.fragment_shadow_image, viewGroup, false); ImageView imageView = (ImageView) mRootView.findViewById(R.id.iv_test); imageView.setImageResource(R.drawable.heart); ImageView imageView2 = (ImageView) mRootView.findViewById(R.id.iv_test2); Drawable drawable = getResources().getDrawable(R.drawable.heart); imageView2.setImageDrawable(DrawableLess.$tint(drawable, Color.GREEN)); return mRootView; } }
apache-2.0
cymcsg/UltimateRecyclerView
UltimateRecyclerView/ultimaterecyclerview/src/main/java/com/marshalchen/ultimaterecyclerview/ui/divideritemdecoration/VerticalDividerItemDecoration.java
4956
package com.marshalchen.ultimaterecyclerview.ui.divideritemdecoration; import android.content.Context; import android.graphics.Rect; import android.graphics.drawable.Drawable; import androidx.annotation.DimenRes; import androidx.core.view.ViewCompat; import androidx.recyclerview.widget.RecyclerView; import android.view.View; /** * Created by yqritc on 2015/01/15. */ public class VerticalDividerItemDecoration extends FlexibleDividerDecoration { private MarginProvider mMarginProvider; protected VerticalDividerItemDecoration(Builder builder) { super(builder); mMarginProvider = builder.mMarginProvider; } @Override protected Rect getDividerBound(int position, RecyclerView parent, View child) { Rect bounds = new Rect(0, 0, 0, 0); int transitionX = (int) ViewCompat.getTranslationX(child); int transitionY = (int) ViewCompat.getTranslationY(child); RecyclerView.LayoutParams params = (RecyclerView.LayoutParams) child.getLayoutParams(); bounds.top = parent.getPaddingTop() + mMarginProvider.dividerTopMargin(position, parent) + transitionY; bounds.bottom = parent.getHeight() - parent.getPaddingBottom() - mMarginProvider.dividerBottomMargin(position, parent) + transitionY; int dividerSize = getDividerSize(position, parent); if (mDividerType == DividerType.DRAWABLE) { bounds.left = child.getRight() + params.leftMargin + transitionX; bounds.right = bounds.left + dividerSize; } else { bounds.left = child.getRight() + params.leftMargin + dividerSize / 2 + transitionX; bounds.right = bounds.left; } return bounds; } @Override protected void setItemOffsets(Rect outRect, int position, RecyclerView parent) { outRect.set(0, 0, getDividerSize(position, parent), 0); } private int getDividerSize(int position, RecyclerView parent) { if (mPaintProvider != null) { return (int) mPaintProvider.dividerPaint(position, parent).getStrokeWidth(); } else if (mSizeProvider != null) { return mSizeProvider.dividerSize(position, parent); } else if (mDrawableProvider != null) { Drawable drawable = mDrawableProvider.drawableProvider(position, parent); return drawable.getIntrinsicWidth(); } throw new RuntimeException("failed to get size"); } /** * Interface for controlling divider margin */ public interface MarginProvider { /** * Returns top margin of divider. * * @param position Divider position * @param parent RecyclerView * @return top margin */ int dividerTopMargin(int position, RecyclerView parent); /** * Returns bottom margin of divider. * * @param position Divider position * @param parent RecyclerView * @return bottom margin */ int dividerBottomMargin(int position, RecyclerView parent); } public static class Builder extends FlexibleDividerDecoration.Builder<Builder> { private MarginProvider mMarginProvider = new MarginProvider() { @Override public int dividerTopMargin(int position, RecyclerView parent) { return 0; } @Override public int dividerBottomMargin(int position, RecyclerView parent) { return 0; } }; public Builder(Context context) { super(context); } public Builder margin(final int topMargin, final int bottomMargin) { return marginProvider(new MarginProvider() { @Override public int dividerTopMargin(int position, RecyclerView parent) { return topMargin; } @Override public int dividerBottomMargin(int position, RecyclerView parent) { return bottomMargin; } }); } public Builder margin(int verticalMargin) { return margin(verticalMargin, verticalMargin); } public Builder marginResId(@DimenRes int topMarginId, @DimenRes int bottomMarginId) { return margin(mResources.getDimensionPixelSize(topMarginId), mResources.getDimensionPixelSize(bottomMarginId)); } public Builder marginResId(@DimenRes int verticalMarginId) { return marginResId(verticalMarginId, verticalMarginId); } public Builder marginProvider(MarginProvider provider) { mMarginProvider = provider; return this; } public VerticalDividerItemDecoration build() { checkBuilderParams(); return new VerticalDividerItemDecoration(this); } } }
apache-2.0
moreus/hadoop
hadoop-0.23.10/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java
34861
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import static org.apache.hadoop.hdfs.protocol.HdfsConstants.MAX_PATH_DEPTH; import static org.apache.hadoop.hdfs.protocol.HdfsConstants.MAX_PATH_LENGTH; import java.io.FileNotFoundException; import java.io.IOException; import java.net.InetSocketAddress; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import org.apache.commons.logging.Log; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FsServerDefaults; import org.apache.hadoop.fs.InvalidPathException; import org.apache.hadoop.fs.Options; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.ParentNotDirectoryException; import org.apache.hadoop.fs.UnresolvedLinkException; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.PermissionStatus; import static org.apache.hadoop.hdfs.DFSConfigKeys.*; import org.apache.hadoop.hdfs.HDFSPolicyProvider; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.BlockListAsLongs; import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks; import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException; import org.apache.hadoop.hdfs.protocol.NSQuotaExceededException; import org.apache.hadoop.hdfs.protocol.QuotaExceededException; import org.apache.hadoop.hdfs.protocol.RecoveryInProgressException; import org.apache.hadoop.hdfs.protocol.UnregisteredNodeException; import org.apache.hadoop.hdfs.protocol.UnresolvedPathException; import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType; import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction; import org.apache.hadoop.hdfs.protocol.HdfsConstants.UpgradeAction; import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.hdfs.server.common.IncorrectVersionException; import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NamenodeRole; import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.namenode.metrics.NameNodeMetrics; import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods; import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations; import org.apache.hadoop.hdfs.server.protocol.DatanodeCommand; import org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol; import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration; import org.apache.hadoop.hdfs.server.protocol.NamenodeCommand; import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol; import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols; import org.apache.hadoop.hdfs.server.protocol.NamenodeRegistration; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.hdfs.server.protocol.NodeRegistration; import org.apache.hadoop.hdfs.server.protocol.ReceivedDeletedBlockInfo; import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest; import org.apache.hadoop.hdfs.server.protocol.UpgradeCommand; import org.apache.hadoop.io.EnumSetWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.ipc.ProtocolSignature; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.net.Node; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.Groups; import org.apache.hadoop.security.RefreshUserMappingsProtocol; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AuthorizationException; import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.tools.GetUserMappingsProtocol; /** * This class is responsible for handling all of the RPC calls to the NameNode. * It is created, started, and stopped by {@link NameNode}. */ class NameNodeRpcServer implements NamenodeProtocols { private static final Log LOG = NameNode.LOG; private static final Log stateChangeLog = NameNode.stateChangeLog; private static final Log blockStateChangeLog = NameNode.blockStateChangeLog; // Dependencies from other parts of NN. private final FSNamesystem namesystem; protected final NameNode nn; private final NameNodeMetrics metrics; private final boolean serviceAuthEnabled; /** The RPC server that listens to requests from DataNodes */ private final RPC.Server serviceRpcServer; private final InetSocketAddress serviceRPCAddress; /** The RPC server that listens to requests from clients */ protected final RPC.Server server; protected final InetSocketAddress rpcAddress; public NameNodeRpcServer(Configuration conf, NameNode nn) throws IOException { this.nn = nn; this.namesystem = nn.getNamesystem(); this.metrics = NameNode.getNameNodeMetrics(); int handlerCount = conf.getInt(DFS_NAMENODE_HANDLER_COUNT_KEY, DFS_NAMENODE_HANDLER_COUNT_DEFAULT); InetSocketAddress socAddr = nn.getRpcServerAddress(conf); InetSocketAddress dnSocketAddr = nn.getServiceRpcServerAddress(conf); if (dnSocketAddr != null) { int serviceHandlerCount = conf.getInt(DFS_NAMENODE_SERVICE_HANDLER_COUNT_KEY, DFS_NAMENODE_SERVICE_HANDLER_COUNT_DEFAULT); this.serviceRpcServer = RPC.getServer(NamenodeProtocols.class, this, dnSocketAddr.getHostName(), dnSocketAddr.getPort(), serviceHandlerCount, false, conf, namesystem.getDelegationTokenSecretManager()); this.serviceRPCAddress = this.serviceRpcServer.getListenerAddress(); nn.setRpcServiceServerAddress(conf, serviceRPCAddress); } else { serviceRpcServer = null; serviceRPCAddress = null; } this.server = RPC.getServer(NamenodeProtocols.class, this, socAddr.getHostName(), socAddr.getPort(), handlerCount, false, conf, namesystem.getDelegationTokenSecretManager()); // set service-level authorization security policy if (serviceAuthEnabled = conf.getBoolean( CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false)) { this.server.refreshServiceAcl(conf, new HDFSPolicyProvider()); if (this.serviceRpcServer != null) { this.serviceRpcServer.refreshServiceAcl(conf, new HDFSPolicyProvider()); } } // The rpc-server port can be ephemeral... ensure we have the correct info this.rpcAddress = this.server.getListenerAddress(); nn.setRpcServerAddress(conf, rpcAddress); // Set terse exception whose stack trace won't be logged this.server.addTerseExceptions(SafeModeException.class, FileNotFoundException.class, HadoopIllegalArgumentException.class, FileAlreadyExistsException.class, InvalidPathException.class, ParentNotDirectoryException.class, UnresolvedLinkException.class, AlreadyBeingCreatedException.class, QuotaExceededException.class, RecoveryInProgressException.class, AccessControlException.class, InvalidToken.class, LeaseExpiredException.class, NSQuotaExceededException.class, DSQuotaExceededException.class); } /** * Actually start serving requests. */ void start() { server.start(); //start RPC server if (serviceRpcServer != null) { serviceRpcServer.start(); } } /** * Wait until the RPC server has shut down. */ void join() throws InterruptedException { this.server.join(); } void stop() { if(server != null) server.stop(); if(serviceRpcServer != null) serviceRpcServer.stop(); } InetSocketAddress getServiceRpcAddress() { return serviceRPCAddress; } InetSocketAddress getRpcAddress() { return rpcAddress; } @Override // VersionedProtocol public ProtocolSignature getProtocolSignature(String protocol, long clientVersion, int clientMethodsHash) throws IOException { return ProtocolSignature.getProtocolSignature( this, protocol, clientVersion, clientMethodsHash); } @Override public long getProtocolVersion(String protocol, long clientVersion) throws IOException { if (protocol.equals(ClientProtocol.class.getName())) { return ClientProtocol.versionID; } else if (protocol.equals(DatanodeProtocol.class.getName())){ return DatanodeProtocol.versionID; } else if (protocol.equals(NamenodeProtocol.class.getName())){ return NamenodeProtocol.versionID; } else if (protocol.equals(RefreshAuthorizationPolicyProtocol.class.getName())){ return RefreshAuthorizationPolicyProtocol.versionID; } else if (protocol.equals(RefreshUserMappingsProtocol.class.getName())){ return RefreshUserMappingsProtocol.versionID; } else if (protocol.equals(GetUserMappingsProtocol.class.getName())){ return GetUserMappingsProtocol.versionID; } else { throw new IOException("Unknown protocol to name node: " + protocol); } } private static UserGroupInformation getRemoteUser() throws IOException { return NameNode.getRemoteUser(); } ///////////////////////////////////////////////////// // NamenodeProtocol ///////////////////////////////////////////////////// @Override // NamenodeProtocol public BlocksWithLocations getBlocks(DatanodeInfo datanode, long size) throws IOException { if(size <= 0) { throw new IllegalArgumentException( "Unexpected not positive size: "+size); } return namesystem.getBlockManager().getBlocks(datanode, size); } @Override // NamenodeProtocol public ExportedBlockKeys getBlockKeys() throws IOException { return namesystem.getBlockManager().getBlockKeys(); } @Override // NamenodeProtocol public void errorReport(NamenodeRegistration registration, int errorCode, String msg) throws IOException { verifyRequest(registration); LOG.info("Error report from " + registration + ": " + msg); if(errorCode == FATAL) namesystem.releaseBackupNode(registration); } @Override // NamenodeProtocol public NamenodeRegistration register(NamenodeRegistration registration) throws IOException { verifyVersion(registration.getVersion()); NamenodeRegistration myRegistration = nn.setRegistration(); namesystem.registerBackupNode(registration, myRegistration); return myRegistration; } @Override // NamenodeProtocol public NamenodeCommand startCheckpoint(NamenodeRegistration registration) throws IOException { verifyRequest(registration); if(!nn.isRole(NamenodeRole.NAMENODE)) throw new IOException("Only an ACTIVE node can invoke startCheckpoint."); return namesystem.startCheckpoint(registration, nn.setRegistration()); } @Override // NamenodeProtocol public void endCheckpoint(NamenodeRegistration registration, CheckpointSignature sig) throws IOException { verifyRequest(registration); if(!nn.isRole(NamenodeRole.NAMENODE)) throw new IOException("Only an ACTIVE node can invoke endCheckpoint."); namesystem.endCheckpoint(registration, sig); } @Override // ClientProtocol public Token<DelegationTokenIdentifier> getDelegationToken(Text renewer) throws IOException { return namesystem.getDelegationToken(renewer); } @Override // ClientProtocol public long renewDelegationToken(Token<DelegationTokenIdentifier> token) throws InvalidToken, IOException { return namesystem.renewDelegationToken(token); } @Override // ClientProtocol public void cancelDelegationToken(Token<DelegationTokenIdentifier> token) throws IOException { namesystem.cancelDelegationToken(token); } @Override // ClientProtocol public LocatedBlocks getBlockLocations(String src, long offset, long length) throws IOException { metrics.incrGetBlockLocations(); return namesystem.getBlockLocations(getClientMachine(), src, offset, length); } @Override // ClientProtocol public FsServerDefaults getServerDefaults() throws IOException { return namesystem.getServerDefaults(); } @Override // ClientProtocol public void create(String src, FsPermission masked, String clientName, EnumSetWritable<CreateFlag> flag, boolean createParent, short replication, long blockSize) throws IOException { String clientMachine = getClientMachine(); if (stateChangeLog.isDebugEnabled()) { stateChangeLog.debug("*DIR* NameNode.create: file " +src+" for "+clientName+" at "+clientMachine); } if (!checkPathLength(src)) { throw new IOException("create: Pathname too long. Limit " + MAX_PATH_LENGTH + " characters, " + MAX_PATH_DEPTH + " levels."); } namesystem.startFile(src, new PermissionStatus(getRemoteUser().getShortUserName(), null, masked), clientName, clientMachine, flag.get(), createParent, replication, blockSize); metrics.incrFilesCreated(); metrics.incrCreateFileOps(); } @Override // ClientProtocol public LocatedBlock append(String src, String clientName) throws IOException { String clientMachine = getClientMachine(); if (stateChangeLog.isDebugEnabled()) { stateChangeLog.debug("*DIR* NameNode.append: file " +src+" for "+clientName+" at "+clientMachine); } LocatedBlock info = namesystem.appendFile(src, clientName, clientMachine); metrics.incrFilesAppended(); return info; } @Override // ClientProtocol public boolean recoverLease(String src, String clientName) throws IOException { String clientMachine = getClientMachine(); return namesystem.recoverLease(src, clientName, clientMachine); } @Override // ClientProtocol public boolean setReplication(String src, short replication) throws IOException { return namesystem.setReplication(src, replication); } @Override // ClientProtocol public void setPermission(String src, FsPermission permissions) throws IOException { namesystem.setPermission(src, permissions); } @Override // ClientProtocol public void setOwner(String src, String username, String groupname) throws IOException { namesystem.setOwner(src, username, groupname); } @Override // ClientProtocol public LocatedBlock addBlock(String src, String clientName, ExtendedBlock previous, DatanodeInfo[] excludedNodes) throws IOException { if(stateChangeLog.isDebugEnabled()) { stateChangeLog.debug("*BLOCK* NameNode.addBlock: file " +src+" for "+clientName); } HashMap<Node, Node> excludedNodesSet = null; if (excludedNodes != null) { excludedNodesSet = new HashMap<Node, Node>(excludedNodes.length); for (Node node:excludedNodes) { excludedNodesSet.put(node, node); } } LocatedBlock locatedBlock = namesystem.getAdditionalBlock(src, clientName, previous, excludedNodesSet); if (locatedBlock != null) metrics.incrAddBlockOps(); return locatedBlock; } @Override // ClientProtocol public LocatedBlock getAdditionalDatanode(final String src, final ExtendedBlock blk, final DatanodeInfo[] existings, final DatanodeInfo[] excludes, final int numAdditionalNodes, final String clientName ) throws IOException { if (LOG.isDebugEnabled()) { LOG.debug("getAdditionalDatanode: src=" + src + ", blk=" + blk + ", existings=" + Arrays.asList(existings) + ", excludes=" + Arrays.asList(excludes) + ", numAdditionalNodes=" + numAdditionalNodes + ", clientName=" + clientName); } metrics.incrGetAdditionalDatanodeOps(); HashMap<Node, Node> excludeSet = null; if (excludes != null) { excludeSet = new HashMap<Node, Node>(excludes.length); for (Node node : excludes) { excludeSet.put(node, node); } } return namesystem.getAdditionalDatanode(src, blk, existings, excludeSet, numAdditionalNodes, clientName); } /** * The client needs to give up on the block. */ public void abandonBlock(ExtendedBlock b, String src, String holder) throws IOException { if(stateChangeLog.isDebugEnabled()) { stateChangeLog.debug("*BLOCK* NameNode.abandonBlock: " +b+" of file "+src); } if (!namesystem.abandonBlock(b, src, holder)) { throw new IOException("Cannot abandon block during write to " + src); } } @Override // ClientProtocol public boolean complete(String src, String clientName, ExtendedBlock last) throws IOException { if(stateChangeLog.isDebugEnabled()) { stateChangeLog.debug("*DIR* NameNode.complete: " + src + " for " + clientName); } return namesystem.completeFile(src, clientName, last); } /** * The client has detected an error on the specified located blocks * and is reporting them to the server. For now, the namenode will * mark the block as corrupt. In the future we might * check the blocks are actually corrupt. */ @Override public void reportBadBlocks(LocatedBlock[] blocks) throws IOException { stateChangeLog.info("*DIR* NameNode.reportBadBlocks"); for (int i = 0; i < blocks.length; i++) { ExtendedBlock blk = blocks[i].getBlock(); DatanodeInfo[] nodes = blocks[i].getLocations(); for (int j = 0; j < nodes.length; j++) { DatanodeInfo dn = nodes[j]; namesystem.getBlockManager().findAndMarkBlockAsCorrupt(blk, dn, "client machine reported it"); } } } @Override // ClientProtocol public LocatedBlock updateBlockForPipeline(ExtendedBlock block, String clientName) throws IOException { return namesystem.updateBlockForPipeline(block, clientName); } @Override // ClientProtocol public void updatePipeline(String clientName, ExtendedBlock oldBlock, ExtendedBlock newBlock, DatanodeID[] newNodes) throws IOException { namesystem.updatePipeline(clientName, oldBlock, newBlock, newNodes); } @Override // DatanodeProtocol public void commitBlockSynchronization(ExtendedBlock block, long newgenerationstamp, long newlength, boolean closeFile, boolean deleteblock, DatanodeID[] newtargets) throws IOException { namesystem.commitBlockSynchronization(block, newgenerationstamp, newlength, closeFile, deleteblock, newtargets); } @Override // ClientProtocol public long getPreferredBlockSize(String filename) throws IOException { return namesystem.getPreferredBlockSize(filename); } @Deprecated @Override // ClientProtocol public boolean rename(String src, String dst) throws IOException { if(stateChangeLog.isDebugEnabled()) { stateChangeLog.debug("*DIR* NameNode.rename: " + src + " to " + dst); } if (!checkPathLength(dst)) { throw new IOException("rename: Pathname too long. Limit " + MAX_PATH_LENGTH + " characters, " + MAX_PATH_DEPTH + " levels."); } boolean ret = namesystem.renameTo(src, dst); if (ret) { metrics.incrFilesRenamed(); } return ret; } @Override // ClientProtocol public void concat(String trg, String[] src) throws IOException { namesystem.concat(trg, src); } @Override // ClientProtocol public void rename2(String src, String dst, Options.Rename... options) throws IOException { if(stateChangeLog.isDebugEnabled()) { stateChangeLog.debug("*DIR* NameNode.rename: " + src + " to " + dst); } if (!checkPathLength(dst)) { throw new IOException("rename: Pathname too long. Limit " + MAX_PATH_LENGTH + " characters, " + MAX_PATH_DEPTH + " levels."); } namesystem.renameTo(src, dst, options); metrics.incrFilesRenamed(); } @Override // ClientProtocol public boolean delete(String src, boolean recursive) throws IOException { if (stateChangeLog.isDebugEnabled()) { stateChangeLog.debug("*DIR* Namenode.delete: src=" + src + ", recursive=" + recursive); } boolean ret = namesystem.delete(src, recursive); if (ret) metrics.incrDeleteFileOps(); return ret; } /** * Check path length does not exceed maximum. Returns true if * length and depth are okay. Returns false if length is too long * or depth is too great. */ private boolean checkPathLength(String src) { Path srcPath = new Path(src); return (src.length() <= MAX_PATH_LENGTH && srcPath.depth() <= MAX_PATH_DEPTH); } @Override // ClientProtocol public boolean mkdirs(String src, FsPermission masked, boolean createParent) throws IOException { if(stateChangeLog.isDebugEnabled()) { stateChangeLog.debug("*DIR* NameNode.mkdirs: " + src); } if (!checkPathLength(src)) { throw new IOException("mkdirs: Pathname too long. Limit " + MAX_PATH_LENGTH + " characters, " + MAX_PATH_DEPTH + " levels."); } return namesystem.mkdirs(src, new PermissionStatus(getRemoteUser().getShortUserName(), null, masked), createParent); } @Override // ClientProtocol public void renewLease(String clientName) throws IOException { namesystem.renewLease(clientName); } @Override // ClientProtocol public DirectoryListing getListing(String src, byte[] startAfter, boolean needLocation) throws IOException { DirectoryListing files = namesystem.getListing( src, startAfter, needLocation); if (files != null) { metrics.incrGetListingOps(); metrics.incrFilesInGetListingOps(files.getPartialListing().length); } return files; } @Override // ClientProtocol public HdfsFileStatus getFileInfo(String src) throws IOException { metrics.incrFileInfoOps(); return namesystem.getFileInfo(src, true); } @Override // ClientProtocol public HdfsFileStatus getFileLinkInfo(String src) throws IOException { metrics.incrFileInfoOps(); return namesystem.getFileInfo(src, false); } @Override public long[] getStats() { return namesystem.getStats(); } @Override // ClientProtocol public DatanodeInfo[] getDatanodeReport(DatanodeReportType type) throws IOException { DatanodeInfo results[] = namesystem.datanodeReport(type); if (results == null ) { throw new IOException("Cannot find datanode report"); } return results; } @Override // ClientProtocol public boolean setSafeMode(SafeModeAction action) throws IOException { return namesystem.setSafeMode(action); } @Override // ClientProtocol public boolean restoreFailedStorage(String arg) throws AccessControlException { return namesystem.restoreFailedStorage(arg); } @Override // ClientProtocol public void saveNamespace() throws IOException { namesystem.saveNamespace(); } @Override // ClientProtocol public void refreshNodes() throws IOException { namesystem.refreshNodes(); } @Override // NamenodeProtocol public long getTransactionID() { return namesystem.getEditLog().getSyncTxId(); } @Override // NamenodeProtocol public CheckpointSignature rollEditLog() throws IOException { return namesystem.rollEditLog(); } @Override public RemoteEditLogManifest getEditLogManifest(long sinceTxId) throws IOException { return namesystem.getEditLog().getEditLogManifest(sinceTxId); } @Override // ClientProtocol public void finalizeUpgrade() throws IOException { namesystem.finalizeUpgrade(); } @Override // ClientProtocol public UpgradeStatusReport distributedUpgradeProgress(UpgradeAction action) throws IOException { return namesystem.distributedUpgradeProgress(action); } @Override // ClientProtocol public void metaSave(String filename) throws IOException { namesystem.metaSave(filename); } @Override // ClientProtocol public CorruptFileBlocks listCorruptFileBlocks(String path, String cookie) throws IOException { String[] cookieTab = new String[] { cookie }; Collection<FSNamesystem.CorruptFileBlockInfo> fbs = namesystem.listCorruptFileBlocks(path, cookieTab); String[] files = new String[fbs.size()]; int i = 0; for(FSNamesystem.CorruptFileBlockInfo fb: fbs) { files[i++] = fb.path; } return new CorruptFileBlocks(files, cookieTab[0]); } /** * Tell all datanodes to use a new, non-persistent bandwidth value for * dfs.datanode.balance.bandwidthPerSec. * @param bandwidth Blanacer bandwidth in bytes per second for all datanodes. * @throws IOException */ @Override // ClientProtocol public void setBalancerBandwidth(long bandwidth) throws IOException { namesystem.setBalancerBandwidth(bandwidth); } @Override // ClientProtocol public ContentSummary getContentSummary(String path) throws IOException { return namesystem.getContentSummary(path); } @Override // ClientProtocol public void setQuota(String path, long namespaceQuota, long diskspaceQuota) throws IOException { namesystem.setQuota(path, namespaceQuota, diskspaceQuota); } @Override // ClientProtocol public void fsync(String src, String clientName) throws IOException { namesystem.fsync(src, clientName); } @Override // ClientProtocol public void setTimes(String src, long mtime, long atime) throws IOException { namesystem.setTimes(src, mtime, atime); } @Override // ClientProtocol public void createSymlink(String target, String link, FsPermission dirPerms, boolean createParent) throws IOException { metrics.incrCreateSymlinkOps(); /* We enforce the MAX_PATH_LENGTH limit even though a symlink target * URI may refer to a non-HDFS file system. */ if (!checkPathLength(link)) { throw new IOException("Symlink path exceeds " + MAX_PATH_LENGTH + " character limit"); } if ("".equals(target)) { throw new IOException("Invalid symlink target"); } final UserGroupInformation ugi = getRemoteUser(); namesystem.createSymlink(target, link, new PermissionStatus(ugi.getShortUserName(), null, dirPerms), createParent); } @Override // ClientProtocol public String getLinkTarget(String path) throws IOException { metrics.incrGetLinkTargetOps(); try { HdfsFileStatus stat = namesystem.getFileInfo(path, false); if (stat != null) { // NB: getSymlink throws IOException if !stat.isSymlink() return stat.getSymlink(); } } catch (UnresolvedPathException e) { return e.getResolvedPath().toString(); } catch (UnresolvedLinkException e) { // The NameNode should only throw an UnresolvedPathException throw new AssertionError("UnresolvedLinkException thrown"); } return null; } @Override // DatanodeProtocol public DatanodeRegistration registerDatanode(DatanodeRegistration nodeReg) throws IOException { verifyVersion(nodeReg.getVersion()); namesystem.registerDatanode(nodeReg); return nodeReg; } @Override // DatanodeProtocol public DatanodeCommand[] sendHeartbeat(DatanodeRegistration nodeReg, long capacity, long dfsUsed, long remaining, long blockPoolUsed, int xmitsInProgress, int xceiverCount, int failedVolumes) throws IOException { verifyRequest(nodeReg); return namesystem.handleHeartbeat(nodeReg, capacity, dfsUsed, remaining, blockPoolUsed, xceiverCount, xmitsInProgress, failedVolumes); } @Override // DatanodeProtocol public DatanodeCommand blockReport(DatanodeRegistration nodeReg, String poolId, long[] blocks) throws IOException { verifyRequest(nodeReg); BlockListAsLongs blist = new BlockListAsLongs(blocks); if(blockStateChangeLog.isDebugEnabled()) { blockStateChangeLog.debug("*BLOCK* NameNode.blockReport: " + "from " + nodeReg.getName() + " " + blist.getNumberOfBlocks() + " blocks"); } namesystem.getBlockManager().processReport(nodeReg, poolId, blist); if (nn.getFSImage().isUpgradeFinalized()) return new DatanodeCommand.Finalize(poolId); return null; } @Override // DatanodeProtocol public void blockReceivedAndDeleted(DatanodeRegistration nodeReg, String poolId, ReceivedDeletedBlockInfo[] receivedAndDeletedBlocks) throws IOException { verifyRequest(nodeReg); if(stateChangeLog.isDebugEnabled()) { blockStateChangeLog.debug("*BLOCK* NameNode.blockReceivedAndDeleted: " +"from "+nodeReg.getName()+" "+receivedAndDeletedBlocks.length +" blocks."); } namesystem.getBlockManager().blockReceivedAndDeleted( nodeReg, poolId, receivedAndDeletedBlocks); } @Override // DatanodeProtocol public void errorReport(DatanodeRegistration nodeReg, int errorCode, String msg) throws IOException { String dnName = (nodeReg == null ? "unknown DataNode" : nodeReg.getName()); if (errorCode == DatanodeProtocol.NOTIFY) { LOG.info("Error report from " + dnName + ": " + msg); return; } verifyRequest(nodeReg); if (errorCode == DatanodeProtocol.DISK_ERROR) { LOG.warn("Disk error on " + dnName + ": " + msg); } else if (errorCode == DatanodeProtocol.FATAL_DISK_ERROR) { LOG.warn("Fatal disk error on " + dnName + ": " + msg); namesystem.getBlockManager().getDatanodeManager().removeDatanode(nodeReg); } else { LOG.info("Error report from " + dnName + ": " + msg); } } @Override // DatanodeProtocol, NamenodeProtocol public NamespaceInfo versionRequest() throws IOException { return namesystem.getNamespaceInfo(); } @Override // DatanodeProtocol public UpgradeCommand processUpgradeCommand(UpgradeCommand comm) throws IOException { return namesystem.processDistributedUpgradeCommand(comm); } /** * Verify request. * * Verifies correctness of the datanode version, registration ID, and * if the datanode does not need to be shutdown. * * @param nodeReg data node registration * @throws IOException */ void verifyRequest(NodeRegistration nodeReg) throws IOException { verifyVersion(nodeReg.getVersion()); if (!namesystem.getRegistrationID().equals(nodeReg.getRegistrationID())) { LOG.warn("Invalid registrationID - expected: " + namesystem.getRegistrationID() + " received: " + nodeReg.getRegistrationID()); throw new UnregisteredNodeException(nodeReg); } } @Override // RefreshAuthorizationPolicyProtocol public void refreshServiceAcl() throws IOException { if (!serviceAuthEnabled) { throw new AuthorizationException("Service Level Authorization not enabled!"); } this.server.refreshServiceAcl(new Configuration(), new HDFSPolicyProvider()); if (this.serviceRpcServer != null) { this.serviceRpcServer.refreshServiceAcl(new Configuration(), new HDFSPolicyProvider()); } } @Override // RefreshAuthorizationPolicyProtocol public void refreshUserToGroupsMappings() throws IOException { LOG.info("Refreshing all user-to-groups mappings. Requested by user: " + getRemoteUser().getShortUserName()); Groups.getUserToGroupsMappingService().refresh(); } @Override // RefreshAuthorizationPolicyProtocol public void refreshSuperUserGroupsConfiguration() { LOG.info("Refreshing SuperUser proxy group mapping list "); ProxyUsers.refreshSuperUserGroupsConfiguration(); } @Override // GetUserMappingsProtocol public String[] getGroupsForUser(String user) throws IOException { if (LOG.isDebugEnabled()) { LOG.debug("Getting groups for user " + user); } return UserGroupInformation.createRemoteUser(user).getGroupNames(); } /** * Verify version. * * @param version * @throws IOException */ void verifyVersion(int version) throws IOException { if (version != HdfsConstants.LAYOUT_VERSION) throw new IncorrectVersionException(version, "data node"); } private static String getClientMachine() { String clientMachine = NamenodeWebHdfsMethods.getRemoteAddress(); if (clientMachine == null) { //not a web client clientMachine = Server.getRemoteAddress(); } if (clientMachine == null) { //not a RPC client clientMachine = ""; } return clientMachine; } }
apache-2.0
pubudu91/docker-registry
src/gen/java/org/eclipse/packagedrone/dockerregistry/api/ApiOriginFilter.java
797
package org.eclipse.packagedrone.dockerregistry.api; import java.io.IOException; import javax.servlet.*; import javax.servlet.http.HttpServletResponse; public class ApiOriginFilter implements javax.servlet.Filter { public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { HttpServletResponse res = (HttpServletResponse) response; res.addHeader("Access-Control-Allow-Origin", "*"); res.addHeader("Access-Control-Allow-Methods", "GET, POST, DELETE, PUT"); res.addHeader("Access-Control-Allow-Headers", "Content-Type"); chain.doFilter(request, response); } public void destroy() {} public void init(FilterConfig filterConfig) throws ServletException {} }
apache-2.0
igorbolic/memcached-spring-boot
memcached-spring-boot-autoconfigure/src/main/java/io/sixhours/memcached/cache/SpyMemcachedClient.java
2060
/* * Copyright 2016-2021 Sixhours * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.sixhours.memcached.cache; import net.spy.memcached.MemcachedClient; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * {@code SpyMemcached} memcached client implementation. * * @author Sasa Bolic */ public class SpyMemcachedClient implements IMemcachedClient { private static final Log log = LogFactory.getLog(SpyMemcachedClient.class); private final MemcachedClient memcachedClient; public SpyMemcachedClient(MemcachedClient memcachedClient) { log.info("SpyMemcached client initialized."); this.memcachedClient = memcachedClient; } @Override public MemcachedClient nativeClient() { return this.memcachedClient; } @Override public Object get(String key) { return this.memcachedClient.get(key); } @Override public void set(String key, int exp, Object value) { this.memcachedClient.set(key, exp, value); } @Override public void touch(String key, int exp) { this.memcachedClient.touch(key, exp); } @Override public void delete(String key) { this.memcachedClient.delete(key); } @Override public void flush() { this.memcachedClient.flush(); } @Override public long incr(String key, int by) { return this.memcachedClient.incr(key, by); } @Override public void shutdown() { this.memcachedClient.shutdown(); } }
apache-2.0
rwl/requestfactory-addon
addon/src/main/java/org/springframework/roo/addon/requestfactory/account/AccountOperations.java
840
package org.springframework.roo.addon.requestfactory.account; import org.springframework.roo.model.JavaPackage; import org.springframework.roo.model.JavaType; /** * Interface of operations this add-on offers. Typically used by a command type or an external add-on. * * @since 1.1 */ public interface AccountOperations { String SECURITY_FILTER_NAME = "springSecurityFilterChain"; /** * Indicate commands should be available * * @return true if it should be available, otherwise false */ boolean isAddCommandAvailable(); boolean isSetupCommandAvailable(); /** * Annotate the provided Java type with the trigger of this add-on */ void annotateAccountType(JavaType type, final JavaPackage sharedPackage); void setupSecurity(JavaType type, final JavaPackage accountPackage); }
apache-2.0
XiaoMi/galaxy-sdk-java
galaxy-thrift-api/src/main/thrift-java/com/xiaomi/infra/galaxy/sds/thrift/CommittedOffset.java
23954
/** * Autogenerated by Thrift Compiler (0.9.2) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package com.xiaomi.infra.galaxy.sds.thrift; import libthrift091.scheme.IScheme; import libthrift091.scheme.SchemeFactory; import libthrift091.scheme.StandardScheme; import libthrift091.scheme.TupleScheme; import libthrift091.protocol.TTupleProtocol; import libthrift091.protocol.TProtocolException; import libthrift091.EncodingUtils; import libthrift091.TException; import libthrift091.async.AsyncMethodCallback; import libthrift091.server.AbstractNonblockingServer.*; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import javax.annotation.Generated; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) @Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2016-3-25") public class CommittedOffset implements libthrift091.TBase<CommittedOffset, CommittedOffset._Fields>, java.io.Serializable, Cloneable, Comparable<CommittedOffset> { private static final libthrift091.protocol.TStruct STRUCT_DESC = new libthrift091.protocol.TStruct("CommittedOffset"); private static final libthrift091.protocol.TField COMMITTED_DATA_OFFSET_FIELD_DESC = new libthrift091.protocol.TField("committedDataOffset", libthrift091.protocol.TType.MAP, (short)1); private static final libthrift091.protocol.TField DATA_COMMIT_FINISHED_FIELD_DESC = new libthrift091.protocol.TField("dataCommitFinished", libthrift091.protocol.TType.BOOL, (short)2); private static final libthrift091.protocol.TField COMMITTED_EDIT_OFFSET_FIELD_DESC = new libthrift091.protocol.TField("committedEditOffset", libthrift091.protocol.TType.I64, (short)3); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new CommittedOffsetStandardSchemeFactory()); schemes.put(TupleScheme.class, new CommittedOffsetTupleSchemeFactory()); } /** * 当前存量数据已确认的消费偏移 */ public Map<String,Datum> committedDataOffset; // optional /** * 存量数据是否确认消费完毕 */ public boolean dataCommitFinished; // optional /** * 当前增量数据已确认的消费偏移 */ public long committedEditOffset; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements libthrift091.TFieldIdEnum { /** * 当前存量数据已确认的消费偏移 */ COMMITTED_DATA_OFFSET((short)1, "committedDataOffset"), /** * 存量数据是否确认消费完毕 */ DATA_COMMIT_FINISHED((short)2, "dataCommitFinished"), /** * 当前增量数据已确认的消费偏移 */ COMMITTED_EDIT_OFFSET((short)3, "committedEditOffset"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // COMMITTED_DATA_OFFSET return COMMITTED_DATA_OFFSET; case 2: // DATA_COMMIT_FINISHED return DATA_COMMIT_FINISHED; case 3: // COMMITTED_EDIT_OFFSET return COMMITTED_EDIT_OFFSET; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments private static final int __DATACOMMITFINISHED_ISSET_ID = 0; private static final int __COMMITTEDEDITOFFSET_ISSET_ID = 1; private byte __isset_bitfield = 0; private static final _Fields optionals[] = {_Fields.COMMITTED_DATA_OFFSET,_Fields.DATA_COMMIT_FINISHED,_Fields.COMMITTED_EDIT_OFFSET}; public static final Map<_Fields, libthrift091.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, libthrift091.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, libthrift091.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.COMMITTED_DATA_OFFSET, new libthrift091.meta_data.FieldMetaData("committedDataOffset", libthrift091.TFieldRequirementType.OPTIONAL, new libthrift091.meta_data.FieldValueMetaData(libthrift091.protocol.TType.MAP , "Dictionary"))); tmpMap.put(_Fields.DATA_COMMIT_FINISHED, new libthrift091.meta_data.FieldMetaData("dataCommitFinished", libthrift091.TFieldRequirementType.OPTIONAL, new libthrift091.meta_data.FieldValueMetaData(libthrift091.protocol.TType.BOOL))); tmpMap.put(_Fields.COMMITTED_EDIT_OFFSET, new libthrift091.meta_data.FieldMetaData("committedEditOffset", libthrift091.TFieldRequirementType.OPTIONAL, new libthrift091.meta_data.FieldValueMetaData(libthrift091.protocol.TType.I64))); metaDataMap = Collections.unmodifiableMap(tmpMap); libthrift091.meta_data.FieldMetaData.addStructMetaDataMap(CommittedOffset.class, metaDataMap); } public CommittedOffset() { } /** * Performs a deep copy on <i>other</i>. */ public CommittedOffset(CommittedOffset other) { __isset_bitfield = other.__isset_bitfield; if (other.isSetCommittedDataOffset()) { this.committedDataOffset = other.committedDataOffset; } this.dataCommitFinished = other.dataCommitFinished; this.committedEditOffset = other.committedEditOffset; } public CommittedOffset deepCopy() { return new CommittedOffset(this); } @Override public void clear() { this.committedDataOffset = null; setDataCommitFinishedIsSet(false); this.dataCommitFinished = false; setCommittedEditOffsetIsSet(false); this.committedEditOffset = 0; } public int getCommittedDataOffsetSize() { return (this.committedDataOffset == null) ? 0 : this.committedDataOffset.size(); } public void putToCommittedDataOffset(String key, Datum val) { if (this.committedDataOffset == null) { this.committedDataOffset = new HashMap<String,Datum>(); } this.committedDataOffset.put(key, val); } /** * 当前存量数据已确认的消费偏移 */ public Map<String,Datum> getCommittedDataOffset() { return this.committedDataOffset; } /** * 当前存量数据已确认的消费偏移 */ public CommittedOffset setCommittedDataOffset(Map<String,Datum> committedDataOffset) { this.committedDataOffset = committedDataOffset; return this; } public void unsetCommittedDataOffset() { this.committedDataOffset = null; } /** Returns true if field committedDataOffset is set (has been assigned a value) and false otherwise */ public boolean isSetCommittedDataOffset() { return this.committedDataOffset != null; } public void setCommittedDataOffsetIsSet(boolean value) { if (!value) { this.committedDataOffset = null; } } /** * 存量数据是否确认消费完毕 */ public boolean isDataCommitFinished() { return this.dataCommitFinished; } /** * 存量数据是否确认消费完毕 */ public CommittedOffset setDataCommitFinished(boolean dataCommitFinished) { this.dataCommitFinished = dataCommitFinished; setDataCommitFinishedIsSet(true); return this; } public void unsetDataCommitFinished() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __DATACOMMITFINISHED_ISSET_ID); } /** Returns true if field dataCommitFinished is set (has been assigned a value) and false otherwise */ public boolean isSetDataCommitFinished() { return EncodingUtils.testBit(__isset_bitfield, __DATACOMMITFINISHED_ISSET_ID); } public void setDataCommitFinishedIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __DATACOMMITFINISHED_ISSET_ID, value); } /** * 当前增量数据已确认的消费偏移 */ public long getCommittedEditOffset() { return this.committedEditOffset; } /** * 当前增量数据已确认的消费偏移 */ public CommittedOffset setCommittedEditOffset(long committedEditOffset) { this.committedEditOffset = committedEditOffset; setCommittedEditOffsetIsSet(true); return this; } public void unsetCommittedEditOffset() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __COMMITTEDEDITOFFSET_ISSET_ID); } /** Returns true if field committedEditOffset is set (has been assigned a value) and false otherwise */ public boolean isSetCommittedEditOffset() { return EncodingUtils.testBit(__isset_bitfield, __COMMITTEDEDITOFFSET_ISSET_ID); } public void setCommittedEditOffsetIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __COMMITTEDEDITOFFSET_ISSET_ID, value); } public void setFieldValue(_Fields field, Object value) { switch (field) { case COMMITTED_DATA_OFFSET: if (value == null) { unsetCommittedDataOffset(); } else { setCommittedDataOffset((Map<String,Datum>)value); } break; case DATA_COMMIT_FINISHED: if (value == null) { unsetDataCommitFinished(); } else { setDataCommitFinished((Boolean)value); } break; case COMMITTED_EDIT_OFFSET: if (value == null) { unsetCommittedEditOffset(); } else { setCommittedEditOffset((Long)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case COMMITTED_DATA_OFFSET: return getCommittedDataOffset(); case DATA_COMMIT_FINISHED: return Boolean.valueOf(isDataCommitFinished()); case COMMITTED_EDIT_OFFSET: return Long.valueOf(getCommittedEditOffset()); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case COMMITTED_DATA_OFFSET: return isSetCommittedDataOffset(); case DATA_COMMIT_FINISHED: return isSetDataCommitFinished(); case COMMITTED_EDIT_OFFSET: return isSetCommittedEditOffset(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof CommittedOffset) return this.equals((CommittedOffset)that); return false; } public boolean equals(CommittedOffset that) { if (that == null) return false; boolean this_present_committedDataOffset = true && this.isSetCommittedDataOffset(); boolean that_present_committedDataOffset = true && that.isSetCommittedDataOffset(); if (this_present_committedDataOffset || that_present_committedDataOffset) { if (!(this_present_committedDataOffset && that_present_committedDataOffset)) return false; if (!this.committedDataOffset.equals(that.committedDataOffset)) return false; } boolean this_present_dataCommitFinished = true && this.isSetDataCommitFinished(); boolean that_present_dataCommitFinished = true && that.isSetDataCommitFinished(); if (this_present_dataCommitFinished || that_present_dataCommitFinished) { if (!(this_present_dataCommitFinished && that_present_dataCommitFinished)) return false; if (this.dataCommitFinished != that.dataCommitFinished) return false; } boolean this_present_committedEditOffset = true && this.isSetCommittedEditOffset(); boolean that_present_committedEditOffset = true && that.isSetCommittedEditOffset(); if (this_present_committedEditOffset || that_present_committedEditOffset) { if (!(this_present_committedEditOffset && that_present_committedEditOffset)) return false; if (this.committedEditOffset != that.committedEditOffset) return false; } return true; } @Override public int hashCode() { List<Object> list = new ArrayList<Object>(); boolean present_committedDataOffset = true && (isSetCommittedDataOffset()); list.add(present_committedDataOffset); if (present_committedDataOffset) list.add(committedDataOffset); boolean present_dataCommitFinished = true && (isSetDataCommitFinished()); list.add(present_dataCommitFinished); if (present_dataCommitFinished) list.add(dataCommitFinished); boolean present_committedEditOffset = true && (isSetCommittedEditOffset()); list.add(present_committedEditOffset); if (present_committedEditOffset) list.add(committedEditOffset); return list.hashCode(); } @Override public int compareTo(CommittedOffset other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = Boolean.valueOf(isSetCommittedDataOffset()).compareTo(other.isSetCommittedDataOffset()); if (lastComparison != 0) { return lastComparison; } if (isSetCommittedDataOffset()) { lastComparison = libthrift091.TBaseHelper.compareTo(this.committedDataOffset, other.committedDataOffset); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetDataCommitFinished()).compareTo(other.isSetDataCommitFinished()); if (lastComparison != 0) { return lastComparison; } if (isSetDataCommitFinished()) { lastComparison = libthrift091.TBaseHelper.compareTo(this.dataCommitFinished, other.dataCommitFinished); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetCommittedEditOffset()).compareTo(other.isSetCommittedEditOffset()); if (lastComparison != 0) { return lastComparison; } if (isSetCommittedEditOffset()) { lastComparison = libthrift091.TBaseHelper.compareTo(this.committedEditOffset, other.committedEditOffset); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(libthrift091.protocol.TProtocol iprot) throws libthrift091.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(libthrift091.protocol.TProtocol oprot) throws libthrift091.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("CommittedOffset("); boolean first = true; if (isSetCommittedDataOffset()) { sb.append("committedDataOffset:"); if (this.committedDataOffset == null) { sb.append("null"); } else { sb.append(this.committedDataOffset); } first = false; } if (isSetDataCommitFinished()) { if (!first) sb.append(", "); sb.append("dataCommitFinished:"); sb.append(this.dataCommitFinished); first = false; } if (isSetCommittedEditOffset()) { if (!first) sb.append(", "); sb.append("committedEditOffset:"); sb.append(this.committedEditOffset); first = false; } sb.append(")"); return sb.toString(); } public void validate() throws libthrift091.TException { // check for required fields // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new libthrift091.protocol.TCompactProtocol(new libthrift091.transport.TIOStreamTransport(out))); } catch (libthrift091.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bitfield = 0; read(new libthrift091.protocol.TCompactProtocol(new libthrift091.transport.TIOStreamTransport(in))); } catch (libthrift091.TException te) { throw new java.io.IOException(te); } } private static class CommittedOffsetStandardSchemeFactory implements SchemeFactory { public CommittedOffsetStandardScheme getScheme() { return new CommittedOffsetStandardScheme(); } } private static class CommittedOffsetStandardScheme extends StandardScheme<CommittedOffset> { public void read(libthrift091.protocol.TProtocol iprot, CommittedOffset struct) throws libthrift091.TException { libthrift091.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == libthrift091.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // COMMITTED_DATA_OFFSET if (schemeField.type == libthrift091.protocol.TType.MAP) { { libthrift091.protocol.TMap _map436 = iprot.readMapBegin(); struct.committedDataOffset = new HashMap<String,Datum>(2*_map436.size); String _key437; Datum _val438; for (int _i439 = 0; _i439 < _map436.size; ++_i439) { _key437 = iprot.readString(); _val438 = new Datum(); _val438.read(iprot); struct.committedDataOffset.put(_key437, _val438); } iprot.readMapEnd(); } struct.setCommittedDataOffsetIsSet(true); } else { libthrift091.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // DATA_COMMIT_FINISHED if (schemeField.type == libthrift091.protocol.TType.BOOL) { struct.dataCommitFinished = iprot.readBool(); struct.setDataCommitFinishedIsSet(true); } else { libthrift091.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // COMMITTED_EDIT_OFFSET if (schemeField.type == libthrift091.protocol.TType.I64) { struct.committedEditOffset = iprot.readI64(); struct.setCommittedEditOffsetIsSet(true); } else { libthrift091.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: libthrift091.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); // check for required fields of primitive type, which can't be checked in the validate method struct.validate(); } public void write(libthrift091.protocol.TProtocol oprot, CommittedOffset struct) throws libthrift091.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.committedDataOffset != null) { if (struct.isSetCommittedDataOffset()) { oprot.writeFieldBegin(COMMITTED_DATA_OFFSET_FIELD_DESC); { oprot.writeMapBegin(new libthrift091.protocol.TMap(libthrift091.protocol.TType.STRING, libthrift091.protocol.TType.STRUCT, struct.committedDataOffset.size())); for (Map.Entry<String, Datum> _iter440 : struct.committedDataOffset.entrySet()) { oprot.writeString(_iter440.getKey()); _iter440.getValue().write(oprot); } oprot.writeMapEnd(); } oprot.writeFieldEnd(); } } if (struct.isSetDataCommitFinished()) { oprot.writeFieldBegin(DATA_COMMIT_FINISHED_FIELD_DESC); oprot.writeBool(struct.dataCommitFinished); oprot.writeFieldEnd(); } if (struct.isSetCommittedEditOffset()) { oprot.writeFieldBegin(COMMITTED_EDIT_OFFSET_FIELD_DESC); oprot.writeI64(struct.committedEditOffset); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class CommittedOffsetTupleSchemeFactory implements SchemeFactory { public CommittedOffsetTupleScheme getScheme() { return new CommittedOffsetTupleScheme(); } } private static class CommittedOffsetTupleScheme extends TupleScheme<CommittedOffset> { @Override public void write(libthrift091.protocol.TProtocol prot, CommittedOffset struct) throws libthrift091.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.isSetCommittedDataOffset()) { optionals.set(0); } if (struct.isSetDataCommitFinished()) { optionals.set(1); } if (struct.isSetCommittedEditOffset()) { optionals.set(2); } oprot.writeBitSet(optionals, 3); if (struct.isSetCommittedDataOffset()) { { oprot.writeI32(struct.committedDataOffset.size()); for (Map.Entry<String, Datum> _iter441 : struct.committedDataOffset.entrySet()) { oprot.writeString(_iter441.getKey()); _iter441.getValue().write(oprot); } } } if (struct.isSetDataCommitFinished()) { oprot.writeBool(struct.dataCommitFinished); } if (struct.isSetCommittedEditOffset()) { oprot.writeI64(struct.committedEditOffset); } } @Override public void read(libthrift091.protocol.TProtocol prot, CommittedOffset struct) throws libthrift091.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(3); if (incoming.get(0)) { { libthrift091.protocol.TMap _map442 = new libthrift091.protocol.TMap(libthrift091.protocol.TType.STRING, libthrift091.protocol.TType.STRUCT, iprot.readI32()); struct.committedDataOffset = new HashMap<String,Datum>(2*_map442.size); String _key443; Datum _val444; for (int _i445 = 0; _i445 < _map442.size; ++_i445) { _key443 = iprot.readString(); _val444 = new Datum(); _val444.read(iprot); struct.committedDataOffset.put(_key443, _val444); } } struct.setCommittedDataOffsetIsSet(true); } if (incoming.get(1)) { struct.dataCommitFinished = iprot.readBool(); struct.setDataCommitFinishedIsSet(true); } if (incoming.get(2)) { struct.committedEditOffset = iprot.readI64(); struct.setCommittedEditOffsetIsSet(true); } } } }
apache-2.0
rwoodley/Team10182
archive/2015-16/ComputerVision/CVApp03/app/src/main/java/com/noblenetwork/drwcollegeprep/cvapp03/Utilities.java
3749
package com.noblenetwork.drwcollegeprep.cvapp03; import android.content.Context; import android.content.Intent; import android.graphics.Bitmap; import android.net.Uri; import android.os.Environment; import android.util.Log; import org.opencv.android.Utils; import org.opencv.core.Core; import org.opencv.core.Mat; import org.opencv.core.MatOfPoint; import org.opencv.core.Scalar; import org.opencv.imgproc.Imgproc; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.Iterator; import java.util.List; public class Utilities { private static final String TAG = "ORBDetector::Utilities"; public static final int MEDIA_TYPE_IMAGE = 1; public static final int MEDIA_TYPE_VIDEO = 2; public static void saveImg(Mat outputImage, Context context) { File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE, context); if (pictureFile == null){ Log.e(TAG, "Error creating media file, check storage permissions: "); return; } try { FileOutputStream fos = new FileOutputStream(pictureFile); Bitmap m_bmp = Bitmap.createBitmap(outputImage.width(), outputImage.height(), Bitmap.Config.ARGB_8888); Utils.matToBitmap(outputImage, m_bmp); m_bmp.compress(Bitmap.CompressFormat.PNG, 100, fos); fos.flush(); fos.close(); Log.d(TAG, "Saved image as: " + pictureFile.getName()); } catch (FileNotFoundException e) { Log.d(TAG, "File not found: " + e.getMessage()); } catch (IOException e) { Log.d(TAG, "Error accessing file: " + e.getMessage()); } } private static File getOutputMediaFile(int type, Context context){ File mediaStorageDir = getStorageDirectory(); // Create a media file name String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date()); File mediaFile; if (type == MEDIA_TYPE_IMAGE){ String path = mediaStorageDir.getPath() + File.separator + "IMG_"+ timeStamp + ".jpg"; mediaFile = new File(path); galleryAddPic(path, context); } else if(type == MEDIA_TYPE_VIDEO) { mediaFile = new File(mediaStorageDir.getPath() + File.separator + "VID_"+ timeStamp + ".mp4"); } else { return null; } return mediaFile; } public static File getStorageDirectory() { // To be safe, you should check that the SDCard is mounted // using Environment.getExternalStorageState() before doing this. File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory( Environment.DIRECTORY_PICTURES), "Team10181"); // This location works best if you want the created images to be shared // between applications and persist after your app has been uninstalled. // Create the storage directory if it does not exist if (! mediaStorageDir.exists()){ if (! mediaStorageDir.mkdirs()){ Log.d("Team10181", "failed to create directory"); return null; } } return mediaStorageDir; } private static void galleryAddPic(String path, Context context) { Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE); File f = new File(path); Uri contentUri = Uri.fromFile(f); mediaScanIntent.setData(contentUri); context.sendBroadcast(mediaScanIntent); } }
apache-2.0
michaelmosmann/wicket-in-3-days
src/main/java/de/mosmann/topics/basics/forms/TripleAValidator.java
585
package de.mosmann.topics.basics.forms; import org.apache.wicket.behavior.Behavior; import org.apache.wicket.validation.IValidatable; import org.apache.wicket.validation.IValidator; import org.apache.wicket.validation.ValidationError; public class TripleAValidator extends Behavior implements IValidator<String> { @Override public void validate(IValidatable<String> validatable) { String value = validatable.getValue(); if ((value == null) || (!value.startsWith("AAA"))) { validatable.error(new ValidationError().addKey("tripleAAA").setVariable("value", value)); } } }
apache-2.0
adligo/tests4j.adligo.org
src/org/adligo/tests4j/models/shared/reference_groups/jse/v1_6/JSE_1_6_Math.java
1850
package org.adligo.tests4j.models.shared.reference_groups.jse.v1_6; import java.util.Collections; import java.util.HashMap; import java.util.Map; import org.adligo.tests4j.models.shared.association.I_PackageConstantLookupModel; import org.adligo.tests4j.shared.asserts.reference.NameOnlyReferenceGroup; import org.adligo.tests4j.shared.asserts.reference.ReferenceGroupBaseDelegate; /** * these are constants for the versions * of java, the plan is to support at least the * previous 3 minor versions (today on 8/26/2014 that would be 1.8, 1.7, 1.6) * * partially generated by org.adligo.tests4j_gen.console.JSEGroupGen * copy/pasting... * Also this class should eventually include the entire api * (public methods and fields), for assertion dependency. * * @author scott * */ public class JSE_1_6_Math extends ReferenceGroupBaseDelegate implements I_JSE_1_6_Math, I_PackageConstantLookupModel { public static final String JAVA_MATH = "java.math"; private static final Map<String,String> CONSTANT_LOOKUP = getConstantLookup(); public static final JSE_1_6_Math INSTANCE = new JSE_1_6_Math(); private static Map<String,String> getConstantLookup() { Map<String,String> toRet = new HashMap<>(); toRet.put("java.math.BigDecimal","BIG_DECIMAL"); toRet.put("java.math.BigInteger","BIG_INTEGER"); toRet.put("java.math.RoundingMode","ROUNDING_MODE"); toRet.put("java.math.MathContext","MATH_CONTEXT"); return Collections.unmodifiableMap(toRet); } private JSE_1_6_Math() { super.setDelegate(new NameOnlyReferenceGroup(CONSTANT_LOOKUP.keySet())); } @Override public String getPackageName() { return JAVA_MATH; } @Override public String getConstantName(String javaName) { return CONSTANT_LOOKUP.get(javaName); } @Override public Map<String, String> getModelMap() { return CONSTANT_LOOKUP; }; }
apache-2.0
BrentDouglas/chainlink
core/src/main/java/io/machinecode/chainlink/core/jsl/fluent/Jsl.java
5622
/* * Copyright 2015 Brent Douglas and other contributors * as indicated by the @author tags. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.machinecode.chainlink.core.jsl.fluent; import io.machinecode.chainlink.core.jsl.fluent.execution.FluentDecision; import io.machinecode.chainlink.core.jsl.fluent.execution.FluentFlow; import io.machinecode.chainlink.core.jsl.fluent.execution.FluentSplit; import io.machinecode.chainlink.core.jsl.fluent.execution.FluentStep; import io.machinecode.chainlink.core.jsl.fluent.partition.FluentAnalyser; import io.machinecode.chainlink.core.jsl.fluent.partition.FluentCollector; import io.machinecode.chainlink.core.jsl.fluent.partition.FluentMapper; import io.machinecode.chainlink.core.jsl.fluent.partition.FluentPartition; import io.machinecode.chainlink.core.jsl.fluent.partition.FluentPlan; import io.machinecode.chainlink.core.jsl.fluent.partition.FluentReducer; import io.machinecode.chainlink.core.jsl.fluent.task.FluentBatchlet; import io.machinecode.chainlink.core.jsl.fluent.task.FluentCheckpointAlgorithm; import io.machinecode.chainlink.core.jsl.fluent.task.FluentChunk; import io.machinecode.chainlink.core.jsl.fluent.task.FluentExceptionClass; import io.machinecode.chainlink.core.jsl.fluent.task.FluentExceptionClassFilter; import io.machinecode.chainlink.core.jsl.fluent.task.FluentItemProcessor; import io.machinecode.chainlink.core.jsl.fluent.task.FluentItemReader; import io.machinecode.chainlink.core.jsl.fluent.task.FluentItemWriter; import io.machinecode.chainlink.core.jsl.fluent.transition.FluentEnd; import io.machinecode.chainlink.core.jsl.fluent.transition.FluentFail; import io.machinecode.chainlink.core.jsl.fluent.transition.FluentNext; import io.machinecode.chainlink.core.jsl.fluent.transition.FluentStop; /** * @author <a href="mailto:brent.n.douglas@gmail.com">Brent Douglas</a> * @since 1.0 */ public class Jsl { public static FluentJob job(final String id) { return new FluentJob().setId(id); } public static FluentListener listener(final String ref) { return new FluentListener().setRef(ref); } public static FluentListeners listeners() { return new FluentListeners(); } public static FluentProperty property() { return new FluentProperty(); } public static FluentProperties properties() { return new FluentProperties(); } // Execution public static FluentDecision decision(final String id) { return new FluentDecision().setId(id); } public static FluentFlow flow(final String id) { return new FluentFlow().setId(id); } public static FluentSplit split(final String id) { return new FluentSplit().setId(id); } public static FluentStep step(final String id) { return new FluentStep().setId(id); } // Transition public static FluentEnd end() { return new FluentEnd(); } public static FluentFail fail() { return new FluentFail(); } public static FluentNext next() { return new FluentNext(); } public static FluentStop stop() { return new FluentStop(); } // Partition public static FluentAnalyser analyser(final String ref) { return new FluentAnalyser().setRef(ref); } public static FluentCollector collector(final String ref) { return new FluentCollector().setRef(ref); } public static FluentPartition partition() { return new FluentPartition(); } public static FluentMapper mapper(final String ref) { return new FluentMapper().setRef(ref); } public static FluentPlan plan() { return new FluentPlan(); } public static FluentReducer reducer(final String ref) { return new FluentReducer().setRef(ref); } // Task public static FluentBatchlet batchlet(final String ref) { return new FluentBatchlet().setRef(ref); } public static FluentCheckpointAlgorithm checkpointAlgorithm(final String ref) { return new FluentCheckpointAlgorithm().setRef(ref); } public static FluentChunk chunk() { return new FluentChunk(); } public static FluentExceptionClass classes() { return new FluentExceptionClass(); } public static FluentExceptionClassFilter filter() { return new FluentExceptionClassFilter(); } public static FluentExceptionClassFilter skippableExceptionClasses() { return filter(); } public static FluentExceptionClassFilter retryableExceptionClasses() { return filter(); } public static FluentExceptionClassFilter noRollbackExceptionClasses() { return filter(); } public static FluentItemProcessor processor(final String ref) { return new FluentItemProcessor().setRef(ref); } public static FluentItemReader reader(final String ref) { return new FluentItemReader().setRef(ref); } public static FluentItemWriter writer(final String ref) { return new FluentItemWriter().setRef(ref); } }
apache-2.0
brandt/GridSphere
src/org/gridsphere/portlet/impl/PortletConfigImpl.java
8233
package org.gridsphere.portlet.impl; import org.gridsphere.portletcontainer.impl.descriptor.InitParam; import org.gridsphere.portletcontainer.impl.descriptor.PortletDefinition; import org.gridsphere.portletcontainer.impl.descriptor.PortletInfo; import org.gridsphere.portletcontainer.impl.descriptor.SupportedLocale; import javax.portlet.Portlet; import javax.portlet.PortletConfig; import javax.portlet.PortletContext; import javax.servlet.ServletConfig; import java.util.*; /** * The <CODE>PortletConfig</CODE> interface provides the portlet with * its configuration. The configuration holds information about the * portlet that is valid for all users. The configuration is retrieved * from the portlet definition in the deployment descriptor. * The portlet can only read the configuration data. * <p/> * The configuration information contains the portlet name, the portlet * initialization parameters, the portlet resource bundle and the portlet * application context. * * @see Portlet */ public class PortletConfigImpl implements PortletConfig { private PortletContext context = null; private ClassLoader classLoader = null; private String portletName = null; private ResourceBundle infoBundle = null; private String resources = null; private Hashtable initParams = new Hashtable(); private static class DefaultResourceBundle extends ListResourceBundle { private Object[][] resources; public DefaultResourceBundle(PortletInfo portletInfo) { String title = ((portletInfo.getTitle() != null) ? portletInfo.getTitle().getContent() : ""); String shortTitle = ((portletInfo.getShortTitle() != null) ? portletInfo.getShortTitle().getContent() : ""); String keywords = ((portletInfo.getKeywords() != null) ? portletInfo.getKeywords().getContent() : ""); resources = new Object[][]{ {"javax.portlet.title", title}, {"javax.portlet.short-title", shortTitle}, {"javax.portlet.keywords", keywords} }; } public Object[][] getContents() { return resources; } } private static class ResourceBundleImpl extends ResourceBundle { private HashMap data; public ResourceBundleImpl(ResourceBundle bundle, ResourceBundle defaults) { data = new HashMap(); importData(defaults); importData(bundle); } private void importData(ResourceBundle bundle) { if (bundle != null) { for (Enumeration e = bundle.getKeys(); e.hasMoreElements();) { String key = (String) e.nextElement(); Object value = bundle.getObject(key); data.put(key, value); } } } protected Object handleGetObject(String key) { return data.get(key); } public Enumeration getKeys() { return new Enumerator(data.keySet()); } } /** * Constructs an instance of PortletConfig from a servlet configuration * object and an application portlet descriptor * * @param servletConfig a <code>ServletConfig</code> * @param definition a <code>PortletDefinition</code> */ public PortletConfigImpl(ServletConfig servletConfig, PortletDefinition definition, ClassLoader classLoader) { this.classLoader = classLoader; // create init params InitParam[] params = definition.getInitParam(); if (params != null) { for (int i = 0; i < params.length; i++) { InitParam iparam = params[i]; String name = iparam.getName().getContent(); String value = iparam.getValue().getContent(); if ((name != null) && (value != null)) { initParams.put(name, value); } } } // create portlet context context = new PortletContextImpl(servletConfig.getServletContext()); // get portlet name portletName = definition.getPortletName().getContent(); SupportedLocale[] locales = definition.getSupportedLocale(); Locale[] supportedLocales = new Locale[locales.length]; for (int i = 0; i < locales.length; i++) { supportedLocales[i] = new Locale(locales[i].getContent()); } PortletInfo portletInfo = definition.getPortletInfo(); if (portletInfo != null) { infoBundle = new DefaultResourceBundle(portletInfo); } if (definition.getResourceBundle() != null) { resources = definition.getResourceBundle().getContent(); } //this.logConfig(); } /** * Returns the name of the portlet. * <p/> * The name may be provided via server administration, assigned in the * portlet application deployment descriptor with the <code>portlet-name</code> * tag. * * @return the portlet name */ public String getPortletName() { return portletName; } /** * Returns the <code>PortletContext</code> of the portlet application * the portlet is in. * * @return a <code>PortletContext</code> object, used by the * caller to interact with its portlet container * @see PortletContext */ public PortletContext getPortletContext() { return context; } /** * Returns the path name of this portlet context * * @return Returns the context path of the web application. */ public String getContextPath() { // todo fix me to confirm to servlet spec 2.5 return ""; } /** * Gets the resource bundle for the given locale based on the * resource bundle defined in the deployment descriptor * with <code>resource-bundle</code> tag or the inlined resources * defined in the deployment descriptor. * * @param locale the locale for which to retrieve the resource bundle * @return the resource bundle for the given locale */ public ResourceBundle getResourceBundle(java.util.Locale locale) { if (resources == null) { return infoBundle; } ResourceBundle resourceBundle = null; try { resourceBundle = ResourceBundle.getBundle(resources, locale, classLoader); if (infoBundle != null) { return new ResourceBundleImpl(resourceBundle, infoBundle); } } catch (MissingResourceException e) { System.err.println("Unable to find resource bundle: " + resources + " for locale: " + locale); if (infoBundle != null) { return infoBundle; } // if everything goes wrong get the english locale (which needs to be there) resourceBundle = ResourceBundle.getBundle(resources, Locale.ENGLISH, classLoader); } return resourceBundle; } /** * Returns a String containing the value of the named initialization parameter, * or null if the parameter does not exist. * * @param name a <code>String</code> specifying the name * of the initialization parameter * @return a <code>String</code> containing the value * of the initialization parameter * @exception IllegalArgumentException if name is <code>null</code>. */ public String getInitParameter(String name) { if (name == null) throw new IllegalArgumentException("name is NULL"); return (String) initParams.get(name); } /** * Returns the names of the portlet initialization parameters as an * <code>Enumeration</code> of String objects, or an empty <code>Enumeration</code> if the * portlet has no initialization parameters. * * @return an <code>Enumeration</code> of <code>String</code> * objects containing the names of the portlet * initialization parameters, or an empty <code>Enumeration</code> if the * portlet has no initialization parameters. */ public java.util.Enumeration getInitParameterNames() { return initParams.keys(); } }
apache-2.0
banq/jdonframework
src/main/java/com/jdon/container/interceptor/ProxyFactory.java
1535
/* * Copyright 2003-2009 the original author or authors. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain event copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.jdon.container.interceptor; import net.sf.cglib.proxy.Enhancer; import net.sf.cglib.proxy.MethodInterceptor; import com.jdon.util.Debug; public class ProxyFactory { private final static String module = ProxyFactory.class.getName(); public Object createProxy(MethodInterceptor methodInterceptor, Object target, Class[] interfaces) { Debug.logVerbose("[JdonFramework]enter Proxy.newProxyInstance ", module); Object dynamicProxy = null; try { Enhancer enhancer = new Enhancer(); enhancer.setCallback(methodInterceptor); enhancer.setInterfaces(interfaces); dynamicProxy = enhancer.create(); } catch (Exception ex) { Debug.logError("[JdonFramework] Proxy.newProxyInstance error:" + ex, module); } catch (Throwable ex) { Debug.logError("[JdonFramework] Proxy.newProxyInstance error:" + ex, module); } return dynamicProxy; } }
apache-2.0
viydaag/vaadin-fluent-api
src/main/java/com/vaadin/fluent/api/FluentSlider.java
2472
package com.vaadin.fluent.api; import com.vaadin.shared.ui.slider.SliderOrientation; import com.vaadin.ui.Slider; /** * The base interface for fluent versions of {@link Slider} * * @see Slider * * @param <THIS> Self-referential generic THIS */ public interface FluentSlider<THIS extends FluentSlider<THIS>> extends FluentAbstractField<THIS, Double> { /** * Sets the minimum slider value. If the current value of the slider is * smaller than this, the value is set to the new minimum. * * @param min * The new minimum slider value * @return this for method chaining * @see Slider#setMin(double) */ @SuppressWarnings("unchecked") public default THIS withMin(double min) { ((Slider) this).setMin(min); return (THIS) this; } /** * Sets the maximum slider value. If the current value of the slider is * larger than this, the value is set to the new maximum. * * @param max * The new maximum slider value * @return this for method chaining * @see Slider#setMax(double) */ @SuppressWarnings("unchecked") public default THIS withMax(double max) { ((Slider) this).setMax(max); return (THIS) this; } /** * Sets the orientation of the slider. * * @param orientation * the new orientation, either * {@link SliderOrientation#HORIZONTAL} or * {@link SliderOrientation#VERTICAL} * @return this for method chaining * @see Slider#setOrientation(SliderOrientation) */ @SuppressWarnings("unchecked") public default THIS withOrientation(SliderOrientation orientation) { ((Slider) this).setOrientation(orientation); return (THIS) this; } /** * Set a new resolution for the slider. The resolution is the number of * digits after the decimal point. * * @throws IllegalArgumentException * if resolution is negative. * * @param resolution * the number of digits after the decimal point * @return this for method chaining * @see Slider#setResolution(int) */ @SuppressWarnings("unchecked") public default THIS withResolution(int resolution) { ((Slider) this).setResolution(resolution); return (THIS) this; } }
apache-2.0
Emerjoin/Hi-Framework
Web/src/main/java/org/emerjoin/hi/web/events/sse/WebEventsController.java
6891
package org.emerjoin.hi.web.events.sse; import org.emerjoin.hi.web.ActiveUser; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.enterprise.context.ApplicationScoped; import javax.servlet.AsyncContext; import java.util.*; import java.util.concurrent.ConcurrentHashMap; /** * @author Mario Junior. */ @ApplicationScoped public class WebEventsController { private List<WebEventsListener> listeners = new ArrayList<>(); private Map<String,ListenersPool> pools = new ConcurrentHashMap<>(); private Map<String,List<WebEventsListener>> userListenersMap = new ConcurrentHashMap<>(); private static final Logger LOGGER = LoggerFactory.getLogger(WebEventsController.class); public void execute(WebEventPublishRequest request){ if(request==null) throw new IllegalArgumentException("request must not be null"); if(request.getChannelsList().isEmpty()) { this.publish(request.getEvent()); return; } this.publish(request.getEvent(),request.getChannelsArray()); } public void quitChannel(String userId, String channel){ if(userId==null||userId.isEmpty()) throw new IllegalArgumentException("userId reference must not be null nor empty"); if(channel==null||channel.isEmpty()) throw new IllegalArgumentException("channel reference must not be null nor empty"); LOGGER.info(String.format("User [%s] quitting channel [%s]",userId,channel)); ListenersPool consumersPool = pools.get(channel); if(consumersPool==null){ LOGGER.warn(String.format("There is no listeners pool named [%s]",channel)); return; } List<WebEventsListener> listenerList = userListenersMap.get(userId); if(listenerList==null){ LOGGER.info("No Listeners found for user with Id="+userId); return; } LOGGER.info(String.format("Detaching %d listener(s) from channel",listenerList.size())); for(WebEventsListener listener: listenerList){ consumersPool.remove(listener); } } public void joinChannel(String userId, String channel){ if(userId==null||userId.isEmpty()) throw new IllegalArgumentException("userId reference must not be null nor empty"); if(channel==null||channel.isEmpty()) throw new IllegalArgumentException("channel reference must not be null nor empty"); LOGGER.info(String.format("User [%s] joining channel [%s]",userId,channel)); ListenersPool consumersPool = pools.get(channel); if(consumersPool==null){ consumersPool= new ListenersPool(channel); pools.put(channel,consumersPool); } List<WebEventsListener> listenerList = userListenersMap.get(userId); if(listenerList==null||listenerList.isEmpty()){ LOGGER.info("There are no listeners bound to user with Id="+userId); return; }else LOGGER.info(String.format("Adding %d listener(s) to channel",listenerList.size())); for(WebEventsListener listener: listenerList){ consumersPool.addListener(listener); } } WebEventsListener addListener(ActiveUser activeUser, AsyncContext context){ String userId = activeUser.getUniqueId(); WebEventsListener listener = new WebEventsListener(context, userId); LOGGER.info(String.format("Listener [%s] joining...",listener)); listeners.add(listener); List<WebEventsListener> eventsListeners = userListenersMap.get(userId); if(eventsListeners==null){ eventsListeners = Collections.synchronizedList(new ArrayList<>()); userListenersMap.put(userId,eventsListeners); } eventsListeners.add(listener); joinListener(listener,activeUser.getWebEventChannel()); for(String channel: activeUser.getWebEventSubscriptions()){ joinListener(listener,channel); } return listener; } private void joinListener(WebEventsListener listener, String channel){ ListenersPool pool = pools.get(channel); LOGGER.info(String.format("Adding Listener [%s] to channel [%s]",listener,channel)); if(pool==null){ pool = new ListenersPool(channel); pools.put(channel,pool); } pool.addListener(listener); } private void publish(WebEvent event, String... channels){ Collection<ListenersPool> listenerPools = getConsumerPools(channels); Collection<WebEventsListener> listeners = getListeners(listenerPools); this.deliver(event,listeners); } private void publish(WebEvent event){ Collection<WebEventsListener> consumers = getListeners(pools.values()); this.deliver(event,consumers); } private Collection<WebEventsListener> getListeners(String... channels){ Collection<ListenersPool> consumersPools = getConsumerPools( channels); return getListeners(consumersPools); } private Collection<WebEventsListener> getListeners(Collection<ListenersPool> listenerPools){ Collection<WebEventsListener> eventListeners = new ArrayList<>(); for(ListenersPool pool: listenerPools){ for(WebEventsListener listener: pool.getEventListeners()){ if(!eventListeners.contains(listener)) eventListeners.add(listener); } } return eventListeners; } private Collection<ListenersPool> getConsumerPools(String... channels){ List<ListenersPool> poolList = new ArrayList<>(); for(String channel: channels){ ListenersPool pool = pools.get(channel); if(pool==null){ LOGGER.warn(String.format("There is no Listeners pool with name=[%s]",channel)); continue; } poolList.add(pool); } return poolList; } private void deliver(WebEvent event, Collection<WebEventsListener> listeners){ for(WebEventsListener listener: listeners){ try { listener.deliver(event); LOGGER.info(String.format("[%s] delivered successfully",event.getClass().getSimpleName())); }catch (Exception ex){ LOGGER.error("error found while sending message to Listener",ex); Collection<ListenersPool> poolsList = pools.values(); for(ListenersPool pool: poolsList) pool.remove(listener); listeners.remove(listener); List<WebEventsListener> eventsListeners = userListenersMap.get(listener.getUserId()); if(eventsListeners!=null) eventsListeners.remove(listener); listener.dispose(); } } } }
apache-2.0
prashant003/interimage-2
interimage-datamining/src/main/java/br/puc_rio/ele/lvc/interimage/datamining/udf/DecisionTreeClassifier.java
1696
package br.puc_rio.ele.lvc.interimage.datamining.udf; import java.io.IOException; import java.lang.Exception; import org.apache.pig.EvalFunc; import org.apache.pig.data.DataType; import org.apache.pig.data.Tuple; import org.apache.pig.impl.logicalLayer.schema.Schema; import br.puc_rio.ele.lvc.interimage.datamining.DataParser; import weka.classifiers.trees.J48; import weka.core.Instance; import weka.core.Instances; public class DecisionTreeClassifier extends EvalFunc<String> { private final DataParser dataParser = new DataParser(); @Override public String exec(Tuple input) throws IOException { if (input == null) return null; try { Integer numFeatures = DataType.toInteger(input.get(0)); double [] testData; testData = new double[numFeatures]; for (int i=0; i<numFeatures; i++) testData[i] = DataType.toDouble(input.get(i+1)); Object objData = input.get(input.size()-1); Instances trainData = dataParser.parseData(objData); J48 csfr = new J48(); csfr.buildClassifier(trainData); double classification = 0; Instance myinstance = trainData.instance(0); for (int i=0; i<numFeatures; i++) myinstance.setValue(i, testData[i]); classification = csfr.classifyInstance(myinstance); //return testData.toString(); return myinstance.attribute(trainData.classIndex()).value((int) classification); } catch (Exception e) { throw new IOException("Caught exception processing input row ", e); } } @Override public Schema outputSchema(Schema input) { return new Schema(new Schema.FieldSchema(null, DataType.CHARARRAY)); } }
apache-2.0
hgani/androlib
ganilib/src/main/java/com/gani/lib/http/PostDelegate.java
1116
package com.gani.lib.http; import java.io.IOException; import java.net.HttpURLConnection; import java.net.MalformedURLException; final class PostDelegate extends HttpDelegate { private static final long serialVersionUID = 1L; private HttpMethod method; private GImmutableParams params; PostDelegate(String nakedUrl, GImmutableParams params, HttpHook hook, HttpMethod method) { super(nakedUrl, hook); this.method = method; this.params = GImmutableParams.fromNullable(params); } @Override protected String getMethod() { return method.name(); } @Override protected HttpURLConnection makeConnection() throws IOException { HttpURLConnection connection = GHttp.instance().openConnection(getFullUrl(), params, method); connection.setDoOutput(true); byte[] data = GHttp.instance().processParams(params, method) .toMutable().put("_method", getMethod()).toImmutable() .asQueryString().getBytes("UTF-8"); connection.getOutputStream().write(data); return connection; } @Override protected String getFullUrl() { return getUrl(); } }
apache-2.0
nezihyigitbasi/presto
presto-spi/src/main/java/com/facebook/presto/spi/connector/ConnectorContext.java
1845
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.spi.connector; import com.facebook.presto.spi.NodeManager; import com.facebook.presto.spi.PageIndexerFactory; import com.facebook.presto.spi.PageSorter; import com.facebook.presto.spi.function.FunctionMetadataManager; import com.facebook.presto.spi.function.StandardFunctionResolution; import com.facebook.presto.spi.relation.RowExpressionService; import com.facebook.presto.spi.type.TypeManager; public interface ConnectorContext { default NodeManager getNodeManager() { throw new UnsupportedOperationException(); } default TypeManager getTypeManager() { throw new UnsupportedOperationException(); } default FunctionMetadataManager getFunctionMetadataManager() { throw new UnsupportedOperationException(); } default StandardFunctionResolution getStandardFunctionResolution() { throw new UnsupportedOperationException(); } default PageSorter getPageSorter() { throw new UnsupportedOperationException(); } default PageIndexerFactory getPageIndexerFactory() { throw new UnsupportedOperationException(); } default RowExpressionService getRowExpressionService() { throw new UnsupportedOperationException(); } }
apache-2.0
makohill/VerticalMarquee
app/src/main/java/com/marquee/MainActivity.java
2071
package com.marquee; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.widget.Toast; import java.util.ArrayList; import java.util.List; import com.marquee.R; public class MainActivity extends AppCompatActivity { private VerticalMarqueeView marqueeView; private VerticalMarqueeAdapter marqueeBaseAdapter; private List<String> list; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); marqueeView= (VerticalMarqueeView) this.findViewById(R.id.marquee); list=new ArrayList<>(); list.add("定理描述素数的比较准确的分布情况。素数的出现规律一直困惑著数学家。"); list.add("出现规律一直困惑著数学家。一个个地看,素数在正整数中的出"); list.add("理,也可以称为素数正态分布定理猜想,有待数学家在数学上"); list.add("理,学家哈达玛(JacquesHadamard)和比利时数学家普森(Char其是黎曼ζ函数。 因,有待数学家在数学上"); list.add("定理描述素数的比较准确的分布情况。素数的出现规律一直困惑著数学家。一个个地看,素数在正整数中的出现没有什么规律。可是总体地看,素数的个数竟然有规可循"); list.add("出现规律一直困惑著数学家。一个个地看,素数在正整数中的出"); list.add("理,也可以称为素数正态分布定理猜想,有待数学家在数学上"); list.add("理,学家哈达玛(JacquesHadamard)和比利时数学家普森(Char其是黎曼ζ函数。 因,有待数学家在数学上"); marqueeBaseAdapter=new VerticalMarqueeAdapter(list); marqueeView.setAdapter(marqueeBaseAdapter); marqueeBaseAdapter.setMarqueeOnClickListener(new VerticalMarqueeAdapter.MarqueeOnClickListener() { @Override public void onClick(int position) { Toast.makeText(MainActivity.this, "点击", Toast.LENGTH_SHORT).show(); } }); marqueeView.start(); } }
apache-2.0
anttribe/vigor-framework
vigor/vigor-defensor/vigor-defensor-auth/src/main/java/org/anttribe/vigor/defensor/auth/realm/DefensorAuthorizingRealm.java
10006
/* * 文 件 名: DefensorAuthorizingRealm.java * 版 本 : vigor-defensor-auth © 2016 Anttribe. All rights reserved. * 描 述 : <描述> * 修 改 人: zhaoyong * 修改时间: 2016年2月29日 */ package org.anttribe.vigor.defensor.auth.realm; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.annotation.PostConstruct; import org.anttribe.vigor.defensor.auth.UsernamePasswordToken; import org.anttribe.vigor.defensor.auth.constants.Keys; import org.anttribe.vigor.defensor.auth.credential.DefensorCredentialsMatcher; import org.anttribe.vigor.defensor.domain.Resource; import org.anttribe.vigor.defensor.domain.Role; import org.anttribe.vigor.defensor.domain.User; import org.anttribe.vigor.defensor.service.IResourceService; import org.anttribe.vigor.defensor.service.IRoleService; import org.anttribe.vigor.defensor.service.IUserService; import org.anttribe.vigor.defensor.type.ResourceType; import org.anttribe.vigor.infra.security.DefaultPasswordService; import org.anttribe.vigor.infra.security.PasswordService; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.apache.shiro.SecurityUtils; import org.apache.shiro.authc.AuthenticationException; import org.apache.shiro.authc.AuthenticationInfo; import org.apache.shiro.authc.AuthenticationToken; import org.apache.shiro.authc.SimpleAuthenticationInfo; import org.apache.shiro.authc.UnknownAccountException; import org.apache.shiro.authz.AuthorizationInfo; import org.apache.shiro.authz.SimpleAuthorizationInfo; import org.apache.shiro.realm.AuthorizingRealm; import org.apache.shiro.session.Session; import org.apache.shiro.subject.PrincipalCollection; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; /** * @author zhaoyong * @version 2016年2月29日 */ public class DefensorAuthorizingRealm extends AuthorizingRealm { private static final Logger logger = LoggerFactory.getLogger(DefensorAuthorizingRealm.class); @Autowired private IUserService userService; @Autowired private IRoleService roleService; @Autowired private IResourceService resourceService; private PasswordService passwordService = new DefaultPasswordService(); @PostConstruct public void initCredentialsMatcher() { DefensorCredentialsMatcher matcher = new DefensorCredentialsMatcher(); matcher.setPasswordService(passwordService); setCredentialsMatcher(matcher); } @SuppressWarnings("unchecked") @Override protected AuthorizationInfo doGetAuthorizationInfo(PrincipalCollection principals) { Session session = SecurityUtils.getSubject().getSession(); User user = (User)session.getAttribute(Keys.KEY_USER_SESSION); if (null != user && null != user.getId()) { this.clearCachedAuthorizationInfo(principals); SimpleAuthorizationInfo authorizationInfo = new SimpleAuthorizationInfo(); // 加载权限 List<Role> roles = (List<Role>)session.getAttribute(Keys.KEY_USER_ROLES); List<Resource> resources = (List<Resource>)session.getAttribute(Keys.KEY_USER_RESOURCES); if (CollectionUtils.isEmpty(roles) || CollectionUtils.isEmpty(resources)) { this.loadUserResources(user, user.getIdentity(), session); } // 用户角色 roles = (List<Role>)session.getAttribute(Keys.KEY_USER_ROLES); List<String> userRoles = this.processUserRoles(roles); authorizationInfo.addRoles(userRoles); // 用户资源权限 resources = (List<Resource>)session.getAttribute(Keys.KEY_USER_RESOURCES); List<String> permissions = this.processUserPermision(resources); authorizationInfo.addStringPermissions(permissions); return authorizationInfo; } return null; } /** * 处理用户角色 * * @param roles * @return List<String> */ private List<String> processUserRoles(List<Role> roles) { List<String> roleCodes = new ArrayList<String>(); if (!CollectionUtils.isEmpty(roles)) { for (Role role : roles) { if (StringUtils.isEmpty(role.getCode())) { continue; } roleCodes.add(role.getCode()); } } return roleCodes; } /** * 处理用户权限 * * @param resources */ private List<String> processUserPermision(List<Resource> resources) { List<String> permissions = new ArrayList<String>(); if (!CollectionUtils.isEmpty(resources)) { for (Resource resource : resources) { ResourceType resourceType = resource.getResourceType(); if (null == resourceType) { continue; } String permission = resourceType.assemblePermission(resource); if (StringUtils.isEmpty(permission)) { continue; } permissions.addAll(this.processUserPermision(resource.getChildren())); } } return permissions; } @SuppressWarnings("unchecked") @Override protected AuthenticationInfo doGetAuthenticationInfo(AuthenticationToken token) throws AuthenticationException { UsernamePasswordToken usernamePasswordToken = (UsernamePasswordToken)token; String username = usernamePasswordToken.getUsername(); if (!StringUtils.isEmpty(username)) { Map<String, Object> criteria = new HashMap<String, Object>(); criteria.put("username", username); User user = userService.findEntity(criteria); if (null == user) { // 未知帐号异常 throw new UnknownAccountException(); } // 验证用户密码 boolean passwordMatch = passwordService.passwordsMatch(new String(usernamePasswordToken.getPassword()), user.getPassword()); if (!passwordMatch) { logger.warn("Failed to login with the password not match."); throw new AuthenticationException("Failed to login with the password not match."); } // 设置用户当前身份 user.setIdentity(usernamePasswordToken.getIdentity()); // 获取当前会话 Session session = SecurityUtils.getSubject().getSession(); // 加载用户角色、资源权限 this.loadUserResources(user, usernamePasswordToken.getIdentity(), session); List<Role> roles = (List<Role>)session.getAttribute(Keys.KEY_USER_ROLES); if (CollectionUtils.isEmpty(roles)) { // 该身份下没有角色, 登录失败 logger.warn("Failed to login with identity, there is no role with this identity: {}.", usernamePasswordToken.getIdentity()); throw new AuthenticationException("Failed to login with identity, there is no role with this identity: " + usernamePasswordToken.getIdentity() + "."); } // 将用户信息放置session中 session.setAttribute(Keys.KEY_USER_SESSION, user); // 构造当前用户菜单 List<Resource> resources = (List<Resource>)session.getAttribute(Keys.KEY_USER_RESOURCES); if (!CollectionUtils.isEmpty(resources)) { List<Resource> menuResources = this.processMenuResource(resources); session.setAttribute(Keys.KEY_MENUS, menuResources); } SimpleAuthenticationInfo authentication = new SimpleAuthenticationInfo(user.getUsername(), user.getPassword(), null, getName()); clearCachedAuthenticationInfo(authentication.getPrincipals()); return authentication; } return null; } /** * 加载用户角色、资源权限 * * @param user * @param identity * @param session */ private void loadUserResources(User user, String identity, Session session) { // 加载用户角色 List<Role> userRoles = roleService.listUserRoles(user, identity); session.setAttribute(Keys.KEY_USER_ROLES, userRoles); if (!CollectionUtils.isEmpty(userRoles)) { // 加载对应的资源权限 List<Resource> resources = resourceService.listResources(userRoles); session.setAttribute(Keys.KEY_USER_RESOURCES, resources); } } /** * 处理菜单资源 * * @param resources List<Resource> * @return List<Resource> */ private List<Resource> processMenuResource(List<Resource> resources) { List<Resource> menuResources = null; if (!CollectionUtils.isEmpty(resources)) { menuResources = new ArrayList<Resource>(); for (Resource resource : resources) { ResourceType resourceType = resource.getResourceType(); if (resourceType.isMenuResource(resource)) { resource.setChildren(this.processMenuResource(resource.getChildren())); menuResources.add(resource); } } } return menuResources; } public void setPasswordService(PasswordService passwordService) { this.passwordService = passwordService; } }
apache-2.0
nextreports/nextreports-designer
src/ro/nextreports/designer/datasource/TagsDialog.java
2002
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ro.nextreports.designer.datasource; import ro.nextreports.engine.util.StringUtil; import java.util.List; import ro.nextreports.designer.ui.BaseDialog; import ro.nextreports.designer.util.I18NSupport; import ro.nextreports.designer.util.Show; /** * Created by IntelliJ IDEA. * User: mihai.panaitescu * Date: Oct 17, 2008 * Time: 11:20:00 AM */ public class TagsDialog extends BaseDialog { public TagsDialog(TagsPanel tagsPanel) { super(tagsPanel, I18NSupport.getString("connection.dialog.tags.title")); } public List<String> getTagsValues() { return ((TagsPanel)basePanel).getTagsValues(); } public List<String> getTags() { return ((TagsPanel)basePanel).getTags(); } protected boolean ok() { List<String> tags = getTags(); List<String> tagsValues = getTagsValues(); for (int i=0, size=tagsValues.size(); i<size; i++) { if ("".equals(tagsValues.get(i).trim())) { Show.info(I18NSupport.getString("connection.dialog.tags.enter", StringUtil.capitalize(tags.get(i)))); return false; } } return true; } }
apache-2.0
kevinx701/SeLion
codegen/src/main/java/com/paypal/selion/elements/AndroidSeLionElementList.java
6149
/*-------------------------------------------------------------------------------------------------------------------*\ | Copyright (C) 2015-2016 PayPal | | | | Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance | | with the License. | | | | You may obtain a copy of the License at | | | | http://www.apache.org/licenses/LICENSE-2.0 | | | | Unless required by applicable law or agreed to in writing, software distributed under the License is distributed | | on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for | | the specific language governing permissions and limitations under the License. | \*-------------------------------------------------------------------------------------------------------------------*/ package com.paypal.selion.elements; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import com.paypal.selion.plugins.GUIObjectDetails; /** * The class represents the elements that can be given in a page object data source and are recognized by Android * platform */ public class AndroidSeLionElementList extends AbstractSeLionElementList { public static final String UIAUTOMATION_ELEMENT_CLASS = "com.paypal.selion.platform.mobile.android"; public static final AndroidSeLionElementList UiButton = new AndroidSeLionElementList(UIAUTOMATION_ELEMENT_CLASS, "UiButton", true); public static final AndroidSeLionElementList UiList = new AndroidSeLionElementList(UIAUTOMATION_ELEMENT_CLASS, "UiList", true); public static final AndroidSeLionElementList UiSlider = new AndroidSeLionElementList(UIAUTOMATION_ELEMENT_CLASS, "UiSlider", true); public static final AndroidSeLionElementList UiSwitch = new AndroidSeLionElementList(UIAUTOMATION_ELEMENT_CLASS, "UiSwitch", true); public static final AndroidSeLionElementList UiTextView = new AndroidSeLionElementList(UIAUTOMATION_ELEMENT_CLASS, "UiTextView", true); public static final AndroidSeLionElementList UiObject = new AndroidSeLionElementList(UIAUTOMATION_ELEMENT_CLASS, "UiObject", true); public static final AndroidSeLionElementList BASE_CLASS = new AndroidSeLionElementList(null, "baseClass", false); private static AndroidSeLionElementList[] values = { UiButton, UiList, UiSlider, UiSwitch, UiTextView, UiObject, BASE_CLASS }; protected AndroidSeLionElementList(String elementPackage, String element, boolean uiElement) { super(elementPackage, element, uiElement); } /** * By providing the qualified name of a custom element we can register it to the element array. Custom elements are * inserted before SeLion elements, if you use the same name it will overwrite the existing element. * * @param element * string of the qualified class */ public static void registerElement(String element) { List<AndroidSeLionElementList> temp = new ArrayList<>(Arrays.asList(values)); temp.add(0, new AndroidSeLionElementList(HtmlElementUtils.getPackage(element), HtmlElementUtils.getClass(element), true)); values = temp.toArray(new AndroidSeLionElementList[temp.size()]); } /** * @param rawType * The String using which an attempt to find a matching {@link AndroidSeLionElementList} is to be * performed. * @return A {@link AndroidSeLionElementList} if the type ends with one of the values of * {@link AndroidSeLionElementList} that were passed as android elements (or) <code>null</code> if there * were no matches. */ public static AndroidSeLionElementList findMatch(String rawType) { return (AndroidSeLionElementList) findMatch(values, rawType); } /** * @param element * The element that needs to be searched. * @return <code>true</code> if the element was found in the set of elements provided. */ public static boolean isExactMatch(String element) { return isExactMatch(values, element); } /** * @param element * The element that needs to be tested for being a valid {@link AndroidSeLionElementList} and whose * {@link AndroidSeLionElementList#isUIElement()} returns true. * @return <code>true</code> if there was a match and <code>false</code> otherwise. */ public static boolean isValidUIElement(String element) { return isValidUIElement(values, element); } public static List<GUIObjectDetails> getGUIObjectList(List<String> keys) { List<GUIObjectDetails> mobileObjectDetailsList = new ArrayList<GUIObjectDetails>(); for (String key : keys) { AndroidSeLionElementList element = AndroidSeLionElementList.findMatch(key); if (element != null && element.isUIElement()) { GUIObjectDetails mobileObjectDetails = null; mobileObjectDetails = new GUIObjectDetails(element.stringify(), key, element.getElementPackage()); mobileObjectDetailsList.add(mobileObjectDetails); } } return mobileObjectDetailsList; } }
apache-2.0
googleads/googleads-java-lib
modules/dfp_axis/src/main/java/com/google/api/ads/admanager/axis/v202111/TargetingValue.java
5195
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * TargetingValue.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.admanager.axis.v202111; /** * Contains a {@link Targeting} value. * * <p><b>This object is experimental! <code>TargetingValue</code> * is an experimental, innovative, * and rapidly changing new feature for Ad Manager. Unfortunately, * being on the bleeding edge means * that we may make backwards-incompatible changes to <code>TargetingValue</code>. * We will inform * the community when this feature is no longer experimental.</b> */ public class TargetingValue extends com.google.api.ads.admanager.axis.v202111.ObjectValue implements java.io.Serializable { /* The {@code Targeting} value. */ private com.google.api.ads.admanager.axis.v202111.Targeting value; public TargetingValue() { } public TargetingValue( com.google.api.ads.admanager.axis.v202111.Targeting value) { this.value = value; } @Override public String toString() { return com.google.common.base.MoreObjects.toStringHelper(this.getClass()) .omitNullValues() .add("value", getValue()) .toString(); } /** * Gets the value value for this TargetingValue. * * @return value * The {@code Targeting} value. */ public com.google.api.ads.admanager.axis.v202111.Targeting getValue() { return value; } /** * Sets the value value for this TargetingValue. * * @param value * The {@code Targeting} value. */ public void setValue(com.google.api.ads.admanager.axis.v202111.Targeting value) { this.value = value; } private java.lang.Object __equalsCalc = null; public synchronized boolean equals(java.lang.Object obj) { if (!(obj instanceof TargetingValue)) return false; TargetingValue other = (TargetingValue) obj; if (obj == null) return false; if (this == obj) return true; if (__equalsCalc != null) { return (__equalsCalc == obj); } __equalsCalc = obj; boolean _equals; _equals = super.equals(obj) && ((this.value==null && other.getValue()==null) || (this.value!=null && this.value.equals(other.getValue()))); __equalsCalc = null; return _equals; } private boolean __hashCodeCalc = false; public synchronized int hashCode() { if (__hashCodeCalc) { return 0; } __hashCodeCalc = true; int _hashCode = super.hashCode(); if (getValue() != null) { _hashCode += getValue().hashCode(); } __hashCodeCalc = false; return _hashCode; } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(TargetingValue.class, true); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "TargetingValue")); org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("value"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "value")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "Targeting")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } /** * Get Custom Serializer */ public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanSerializer( _javaType, _xmlType, typeDesc); } /** * Get Custom Deserializer */ public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanDeserializer( _javaType, _xmlType, typeDesc); } }
apache-2.0
michaelvandeweerd/sensor
src/net/michaelvandeweerd/sensor/data/Axis.java
1871
package net.michaelvandeweerd.sensor.data; /** * An connection between two nodes. * * @author Michaël van de Weerd */ public class Axis { /** * The name of the current axis. */ private String name; /** * The lower node of the current axis. */ private Extreme lower; /** * The higher node of the current axis. */ private Extreme higher; /** * Construct an axis and two extremes. */ public Axis() { this(new Extreme(), new Extreme()); } /** * Construct an axis with a name, and two extremes. */ public Axis(String name) { this(); this.name = name; } /** * Construct an axis with two extremes. * * @param lower * The lower extreme of the axis to construct. * @param higher * The higher extreme of the axis to construct. */ public Axis(Extreme lower, Extreme higher) { this.lower = lower; this.higher = higher; } /** * Construct an axis with a name and two extremes. * * @param name * The name of the axis to construct. * @param lower * The lower extreme of the axis to construct. * @param higher * The higher extreme of the axis to construct. */ public Axis(String name, Extreme lower, Extreme higher) { this(lower, higher); this.name = name; } /** * Return the name of the current axis. * * @return The name of the current axis. */ public String getName() { return name; } /** * Return the lower extreme of the current axis. * * @return The lower extreme of the current axis. */ public Extreme getLower() { return lower; } /** * Return the higher extreme of the current axis. * * @return The higher extreme of the current axis. */ public Extreme getHigher() { return higher; } }
apache-2.0
yuananf/presto
presto-main/src/test/java/com/facebook/presto/operator/TestExchangeOperator.java
16654
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.operator; import com.facebook.presto.execution.buffer.PagesSerde; import com.facebook.presto.execution.buffer.PagesSerdeFactory; import com.facebook.presto.execution.buffer.TestingPagesSerdeFactory; import com.facebook.presto.metadata.RemoteTransactionHandle; import com.facebook.presto.metadata.Split; import com.facebook.presto.operator.ExchangeOperator.ExchangeOperatorFactory; import com.facebook.presto.spi.Page; import com.facebook.presto.spi.type.Type; import com.facebook.presto.split.RemoteSplit; import com.facebook.presto.sql.planner.plan.PlanNodeId; import com.google.common.base.Splitter; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.ImmutableListMultimap.Builder; import com.google.common.collect.Iterables; import io.airlift.http.client.HttpClient; import io.airlift.http.client.HttpStatus; import io.airlift.http.client.Request; import io.airlift.http.client.Response; import io.airlift.http.client.testing.TestingHttpClient; import io.airlift.http.client.testing.TestingResponse; import io.airlift.slice.DynamicSliceOutput; import io.airlift.units.DataSize; import io.airlift.units.Duration; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import java.net.URI; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import static com.facebook.presto.PrestoMediaTypes.PRESTO_PAGES; import static com.facebook.presto.SequencePageBuilder.createSequencePage; import static com.facebook.presto.SessionTestUtils.TEST_SESSION; import static com.facebook.presto.client.PrestoHeaders.PRESTO_BUFFER_COMPLETE; import static com.facebook.presto.client.PrestoHeaders.PRESTO_PAGE_NEXT_TOKEN; import static com.facebook.presto.client.PrestoHeaders.PRESTO_PAGE_TOKEN; import static com.facebook.presto.client.PrestoHeaders.PRESTO_TASK_INSTANCE_ID; import static com.facebook.presto.execution.buffer.PagesSerdeUtil.writePages; import static com.facebook.presto.execution.buffer.TestingPagesSerdeFactory.testingPagesSerde; import static com.facebook.presto.operator.ExchangeOperator.REMOTE_CONNECTOR_ID; import static com.facebook.presto.operator.PageAssertions.assertPageEquals; import static com.facebook.presto.spi.type.VarcharType.VARCHAR; import static com.facebook.presto.testing.TestingTaskContext.createTaskContext; import static io.airlift.concurrent.Threads.daemonThreadsNamed; import static io.airlift.units.DataSize.Unit.MEGABYTE; import static java.util.concurrent.Executors.newScheduledThreadPool; import static javax.ws.rs.core.HttpHeaders.CONTENT_TYPE; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNull; import static org.testng.Assert.assertTrue; @Test(singleThreaded = true) public class TestExchangeOperator { private static final List<Type> TYPES = ImmutableList.of(VARCHAR); private static final Page PAGE = createSequencePage(TYPES, 10, 100); private static final PagesSerdeFactory SERDE_FACTORY = new TestingPagesSerdeFactory(); private static final PagesSerde PAGES_SERDE = testingPagesSerde(); private static final String TASK_1_ID = "task1"; private static final String TASK_2_ID = "task2"; private static final String TASK_3_ID = "task3"; private final LoadingCache<String, TaskBuffer> taskBuffers = CacheBuilder.newBuilder().build(CacheLoader.from(TaskBuffer::new)); private ScheduledExecutorService scheduler; private ScheduledExecutorService scheduledExecutor; private HttpClient httpClient; private ExchangeClientSupplier exchangeClientSupplier; private ExecutorService pageBufferClientCallbackExecutor; @SuppressWarnings("resource") @BeforeClass public void setUp() { scheduler = newScheduledThreadPool(4, daemonThreadsNamed("test-%s")); scheduledExecutor = newScheduledThreadPool(2, daemonThreadsNamed("test-scheduledExecutor-%s")); pageBufferClientCallbackExecutor = Executors.newSingleThreadExecutor(); httpClient = new TestingHttpClient(new HttpClientHandler(taskBuffers), scheduler); exchangeClientSupplier = (systemMemoryUsageListener) -> new ExchangeClient( new DataSize(32, MEGABYTE), new DataSize(10, MEGABYTE), 3, new Duration(1, TimeUnit.MINUTES), true, httpClient, scheduler, systemMemoryUsageListener, pageBufferClientCallbackExecutor); } @AfterClass(alwaysRun = true) public void tearDown() { httpClient.close(); httpClient = null; scheduler.shutdownNow(); scheduler = null; scheduledExecutor.shutdownNow(); scheduledExecutor = null; pageBufferClientCallbackExecutor.shutdownNow(); pageBufferClientCallbackExecutor = null; } @BeforeMethod public void setUpMethod() { taskBuffers.invalidateAll(); } @Test public void testSimple() throws Exception { SourceOperator operator = createExchangeOperator(); operator.addSplit(newRemoteSplit(TASK_1_ID)); operator.addSplit(newRemoteSplit(TASK_2_ID)); operator.addSplit(newRemoteSplit(TASK_3_ID)); operator.noMoreSplits(); // add pages and close the buffers taskBuffers.getUnchecked(TASK_1_ID).addPages(10, true); taskBuffers.getUnchecked(TASK_2_ID).addPages(10, true); taskBuffers.getUnchecked(TASK_3_ID).addPages(10, true); // read the pages waitForPages(operator, 30); // wait for finished waitForFinished(operator); } private static Split newRemoteSplit(String taskId) { return new Split(REMOTE_CONNECTOR_ID, new RemoteTransactionHandle(), new RemoteSplit(URI.create("http://localhost/" + taskId))); } @Test public void testWaitForClose() throws Exception { SourceOperator operator = createExchangeOperator(); operator.addSplit(newRemoteSplit(TASK_1_ID)); operator.addSplit(newRemoteSplit(TASK_2_ID)); operator.addSplit(newRemoteSplit(TASK_3_ID)); operator.noMoreSplits(); // add pages and leave buffers open taskBuffers.getUnchecked(TASK_1_ID).addPages(1, false); taskBuffers.getUnchecked(TASK_2_ID).addPages(1, false); taskBuffers.getUnchecked(TASK_3_ID).addPages(1, false); // read 3 pages waitForPages(operator, 3); // verify state assertEquals(operator.isFinished(), false); assertEquals(operator.needsInput(), false); assertEquals(operator.getOutput(), null); // add more pages and close the buffers taskBuffers.getUnchecked(TASK_1_ID).addPages(2, true); taskBuffers.getUnchecked(TASK_2_ID).addPages(2, true); taskBuffers.getUnchecked(TASK_3_ID).addPages(2, true); // read all pages waitForPages(operator, 6); // wait for finished waitForFinished(operator); } @Test public void testWaitForNoMoreSplits() throws Exception { SourceOperator operator = createExchangeOperator(); // add a buffer location containing one page and close the buffer operator.addSplit(newRemoteSplit(TASK_1_ID)); // add pages and leave buffers open taskBuffers.getUnchecked(TASK_1_ID).addPages(1, true); // read page waitForPages(operator, 1); // verify state assertEquals(operator.isFinished(), false); assertEquals(operator.needsInput(), false); assertEquals(operator.getOutput(), null); // add a buffer location operator.addSplit(newRemoteSplit(TASK_2_ID)); // set no more splits (buffer locations) operator.noMoreSplits(); // add two pages and close the last buffer taskBuffers.getUnchecked(TASK_2_ID).addPages(2, true); // read all pages waitForPages(operator, 2); // wait for finished waitForFinished(operator); } @Test public void testFinish() throws Exception { SourceOperator operator = createExchangeOperator(); operator.addSplit(newRemoteSplit(TASK_1_ID)); operator.addSplit(newRemoteSplit(TASK_2_ID)); operator.addSplit(newRemoteSplit(TASK_3_ID)); operator.noMoreSplits(); // add pages and leave buffers open taskBuffers.getUnchecked(TASK_1_ID).addPages(1, false); taskBuffers.getUnchecked(TASK_2_ID).addPages(1, false); taskBuffers.getUnchecked(TASK_3_ID).addPages(1, false); // read 3 pages waitForPages(operator, 3); // verify state assertEquals(operator.isFinished(), false); assertEquals(operator.needsInput(), false); assertEquals(operator.getOutput(), null); // finish without closing buffers operator.finish(); // wait for finished waitForFinished(operator); } private SourceOperator createExchangeOperator() { ExchangeOperatorFactory operatorFactory = new ExchangeOperatorFactory(0, new PlanNodeId("test"), exchangeClientSupplier, SERDE_FACTORY, TYPES); DriverContext driverContext = createTaskContext(scheduler, scheduledExecutor, TEST_SESSION) .addPipelineContext(0, true, true) .addDriverContext(); SourceOperator operator = operatorFactory.createOperator(driverContext); assertEquals(operator.getOperatorContext().getOperatorStats().getSystemMemoryReservation().toBytes(), 0); return operator; } private static List<Page> waitForPages(Operator operator, int expectedPageCount) throws InterruptedException { // read expected pages or until 10 seconds has passed long endTime = System.nanoTime() + TimeUnit.SECONDS.toNanos(10); List<Page> outputPages = new ArrayList<>(); boolean greaterThanZero = false; while (System.nanoTime() - endTime < 0) { if (operator.isFinished()) { break; } if (operator.getOperatorContext().getDriverContext().getPipelineContext().getPipelineStats().getSystemMemoryReservation().toBytes() > 0) { greaterThanZero = true; break; } else { Thread.sleep(10); } } assertTrue(greaterThanZero); while (outputPages.size() < expectedPageCount && System.nanoTime() < endTime) { assertEquals(operator.needsInput(), false); if (operator.isFinished()) { break; } Page outputPage = operator.getOutput(); if (outputPage != null) { outputPages.add(outputPage); } else { Thread.sleep(10); } } // sleep for a bit to make sure that there aren't extra pages on the way Thread.sleep(10); // verify state assertEquals(operator.needsInput(), false); assertNull(operator.getOutput()); // verify pages assertEquals(outputPages.size(), expectedPageCount); for (Page page : outputPages) { assertPageEquals(operator.getTypes(), page, PAGE); } assertEquals(operator.getOperatorContext().getOperatorStats().getSystemMemoryReservation().toBytes(), 0); return outputPages; } private static void waitForFinished(Operator operator) throws InterruptedException { // wait for finished or until 10 seconds has passed long endTime = System.nanoTime() + TimeUnit.SECONDS.toNanos(10); while (System.nanoTime() - endTime < 0) { assertEquals(operator.needsInput(), false); assertNull(operator.getOutput()); if (operator.isFinished()) { break; } Thread.sleep(10); } // verify final state assertEquals(operator.isFinished(), true); assertEquals(operator.needsInput(), false); assertNull(operator.getOutput()); assertEquals(operator.getOperatorContext().getOperatorStats().getSystemMemoryReservation().toBytes(), 0); } private static class HttpClientHandler implements TestingHttpClient.Processor { private final LoadingCache<String, TaskBuffer> taskBuffers; public HttpClientHandler(LoadingCache<String, TaskBuffer> taskBuffers) { this.taskBuffers = taskBuffers; } @Override public Response handle(Request request) { ImmutableList<String> parts = ImmutableList.copyOf(Splitter.on("/").omitEmptyStrings().split(request.getUri().getPath())); if (request.getMethod().equals("DELETE")) { assertEquals(parts.size(), 1); return new TestingResponse(HttpStatus.OK, ImmutableListMultimap.of(), new byte[0]); } assertEquals(parts.size(), 2); String taskId = parts.get(0); int pageToken = Integer.parseInt(parts.get(1)); Builder<String, String> headers = ImmutableListMultimap.builder(); headers.put(PRESTO_TASK_INSTANCE_ID, "task-instance-id"); headers.put(PRESTO_PAGE_TOKEN, String.valueOf(pageToken)); TaskBuffer taskBuffer = taskBuffers.getUnchecked(taskId); Page page = taskBuffer.getPage(pageToken); headers.put(CONTENT_TYPE, PRESTO_PAGES); if (page != null) { headers.put(PRESTO_PAGE_NEXT_TOKEN, String.valueOf(pageToken + 1)); headers.put(PRESTO_BUFFER_COMPLETE, String.valueOf(false)); DynamicSliceOutput output = new DynamicSliceOutput(256); writePages(PAGES_SERDE, output, page); return new TestingResponse(HttpStatus.OK, headers.build(), output.slice().getInput()); } else if (taskBuffer.isFinished()) { headers.put(PRESTO_PAGE_NEXT_TOKEN, String.valueOf(pageToken)); headers.put(PRESTO_BUFFER_COMPLETE, String.valueOf(true)); return new TestingResponse(HttpStatus.OK, headers.build(), new byte[0]); } else { headers.put(PRESTO_PAGE_NEXT_TOKEN, String.valueOf(pageToken)); headers.put(PRESTO_BUFFER_COMPLETE, String.valueOf(false)); return new TestingResponse(HttpStatus.NO_CONTENT, headers.build(), new byte[0]); } } } private static class TaskBuffer { private final List<Page> buffer = new ArrayList<>(); private int acknowledgedPages; private boolean closed; private synchronized void addPages(int pages, boolean close) { addPages(Collections.nCopies(pages, PAGE)); if (close) { closed = true; } } public synchronized void addPages(Iterable<Page> pages) { Iterables.addAll(buffer, pages); } public synchronized Page getPage(int pageSequenceId) { acknowledgedPages = Math.max(acknowledgedPages, pageSequenceId); if (pageSequenceId >= buffer.size()) { return null; } return buffer.get(pageSequenceId); } private synchronized boolean isFinished() { return closed && acknowledgedPages == buffer.size(); } } }
apache-2.0
ayltai/Newspaper
app/src/main/java/com/github/ayltai/newspaper/view/ModelPresenter.java
659
package com.github.ayltai.newspaper.view; import javax.annotation.Nonnull; import androidx.annotation.CallSuper; import androidx.annotation.NonNull; import androidx.annotation.UiThread; import lombok.Getter; import lombok.Setter; public abstract class ModelPresenter<M, V extends Presenter.View> extends BasePresenter<V> { @Getter @Setter private M model; @UiThread public abstract void bindModel(); @CallSuper @Override public void onViewAttached(@Nonnull @NonNull @lombok.NonNull final V view, final boolean isFirstAttachment) { super.onViewAttached(view, isFirstAttachment); this.bindModel(); } }
apache-2.0
ontop/ontop
db/rdb/src/main/java/it/unibz/inf/ontop/generation/serializer/impl/HSQLDBSelectFromWhereSerializer.java
1125
package it.unibz.inf.ontop.generation.serializer.impl; import com.google.inject.Inject; import com.google.inject.Singleton; import it.unibz.inf.ontop.generation.algebra.SelectFromWhereWithModifiers; import it.unibz.inf.ontop.generation.serializer.SelectFromWhereSerializer; import it.unibz.inf.ontop.dbschema.DBParameters; import it.unibz.inf.ontop.model.term.TermFactory; @Singleton public class HSQLDBSelectFromWhereSerializer extends DefaultSelectFromWhereSerializer implements SelectFromWhereSerializer { @Inject private HSQLDBSelectFromWhereSerializer(TermFactory termFactory) { super(new DefaultSQLTermSerializer(termFactory) { @Override protected String serializeStringConstant(String constant) { return "'" + constant + "'"; } }); } @Override public SelectFromWhereSerializer.QuerySerialization serialize(SelectFromWhereWithModifiers selectFromWhere, DBParameters dbParameters) { return selectFromWhere.acceptVisitor( new DefaultRelationVisitingSerializer(dbParameters.getQuotedIDFactory())); } }
apache-2.0
cgs1999/mybams
src/common/com/pinhuba/common/pages/PagerHelper.java
2632
package com.pinhuba.common.pages; import java.util.List; public class PagerHelper { protected static Pager getPager(String pageSize,String currentPage,int totalRows,String pageMethod) { // 定义pager对象,用于传到页面 Pager pager = null; if(pageSize != null){ pager = new Pager(totalRows,Integer.parseInt(pageSize)); }else{ pager = new Pager(totalRows); } // 如果当前页号为空,表示为首次查询该页 // 如果不为空,则刷新pager对象,输入当前页号等信息 if (currentPage != null) { pager.refresh(Integer.parseInt(currentPage)); } if (pageMethod != null) { if (pageMethod.equals("first")) { pager.first(); } else if (pageMethod.equals("previous")) { pager.previous(); } else if (pageMethod.equals("next")) { pager.next(); } else if (pageMethod.equals("last")) { pager.last(); }else{ pager.go(); } }else{ pager.go(); } return pager; } protected static Pager getSessionPager(String pageSize,String currentPage,int totalRows,String pageMethod,List list) { // 定义pager对象,用于传到页面 Pager pager = null; if(pageSize != null){ pager = new Pager(totalRows,Integer.parseInt(pageSize)); }else{ pager = new Pager(totalRows); } pager.setResultList(list); // 如果当前页号为空,表示为首次查询该页 // 如果不为空,则刷新pager对象,输入当前页号等信息 if (currentPage != null) { pager.refresh(Integer.parseInt(currentPage)); } if (pageMethod != null) { if (pageMethod.equals("first")) { pager.first(); } else if (pageMethod.equals("previous")) { pager.previous(); } else if (pageMethod.equals("next")) { pager.next(); } else if (pageMethod.equals("last")) { pager.last(); }else{ pager.go(); } }else{ pager.go(); } return pager; } public static Pager getPager(Pager oldPager,int rowCount){ Pager newPager = getPager(String.valueOf(oldPager.getPageSize()), String.valueOf(oldPager.getCurrentPage()), rowCount, oldPager.getPageMethod()); return newPager; } public static Pager getSessionPager(Pager oldPager,int rowCount,List resultList){ Pager newPager = getSessionPager(String.valueOf(oldPager.getPageSize()), String.valueOf(oldPager.getCurrentPage()), rowCount, oldPager.getPageMethod(), resultList); return newPager; } /** * 检查页码 checkPageNo * * @param pageNo * @return if pageNo==null or pageNo<1 then return 1 else return pageNo */ public static int cpn(Integer pageNo) { return (pageNo == null || pageNo < 1) ? 1 : pageNo; } }
apache-2.0
k21/buck
src/com/facebook/buck/artifact_cache/ThriftArtifactCache.java
15258
/* * Copyright 2016-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.artifact_cache; import com.facebook.buck.artifact_cache.thrift.ArtifactMetadata; import com.facebook.buck.artifact_cache.thrift.BuckCacheFetchRequest; import com.facebook.buck.artifact_cache.thrift.BuckCacheFetchResponse; import com.facebook.buck.artifact_cache.thrift.BuckCacheRequest; import com.facebook.buck.artifact_cache.thrift.BuckCacheRequestType; import com.facebook.buck.artifact_cache.thrift.BuckCacheResponse; import com.facebook.buck.artifact_cache.thrift.BuckCacheStoreRequest; import com.facebook.buck.artifact_cache.thrift.PayloadInfo; import com.facebook.buck.io.LazyPath; import com.facebook.buck.log.Logger; import com.facebook.buck.rules.RuleKey; import com.facebook.buck.slb.HttpResponse; import com.facebook.buck.slb.ThriftProtocol; import com.facebook.buck.slb.ThriftUtil; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.io.ByteSource; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.List; import java.util.Optional; import okhttp3.MediaType; import okhttp3.Request; import okhttp3.RequestBody; import okio.BufferedSink; /** * This is the Thrift protocol for the cache. The underlying channel is still HTTP but the payload * is Thrift. To learn a bit more about the protocol please look at ThriftArtifactCacheProtocol. */ public class ThriftArtifactCache extends AbstractNetworkCache { private static final Logger LOG = Logger.get(ThriftArtifactCache.class); public static final MediaType HYBRID_THRIFT_STREAM_CONTENT_TYPE = MediaType.parse("application/x-hybrid-thrift-binary"); public static final String PROTOCOL_HEADER = "X-Thrift-Protocol"; public static final ThriftProtocol PROTOCOL = ThriftProtocol.COMPACT; private final String hybridThriftEndpoint; private final boolean distributedBuildModeEnabled; public ThriftArtifactCache(NetworkCacheArgs args) { super(args); Preconditions.checkArgument( args.getThriftEndpointPath().isPresent(), "Hybrid thrift endpoint path is mandatory for the ThriftArtifactCache."); this.hybridThriftEndpoint = args.getThriftEndpointPath().orElse(""); this.distributedBuildModeEnabled = args.distributedBuildModeEnabled(); } @Override protected FetchResult fetchImpl(RuleKey ruleKey, LazyPath output) throws IOException { FetchResult.Builder resultBuilder = FetchResult.builder(); BuckCacheFetchRequest fetchRequest = new BuckCacheFetchRequest(); com.facebook.buck.artifact_cache.thrift.RuleKey thriftRuleKey = new com.facebook.buck.artifact_cache.thrift.RuleKey(); thriftRuleKey.setHashString(ruleKey.getHashCode().toString()); fetchRequest.setRuleKey(thriftRuleKey); fetchRequest.setRepository(getRepository()); fetchRequest.setScheduleType(scheduleType); fetchRequest.setDistributedBuildModeEnabled(distributedBuildModeEnabled); BuckCacheRequest cacheRequest = new BuckCacheRequest(); cacheRequest.setType(BuckCacheRequestType.FETCH); cacheRequest.setFetchRequest(fetchRequest); LOG.verbose("Will fetch key %s", thriftRuleKey); final ThriftArtifactCacheProtocol.Request request = ThriftArtifactCacheProtocol.createRequest(PROTOCOL, cacheRequest); Request.Builder builder = toOkHttpRequest(request); try (HttpResponse httpResponse = fetchClient.makeRequest(hybridThriftEndpoint, builder)) { if (httpResponse.statusCode() != 200) { String message = String.format( "Failed to fetch cache artifact with HTTP status code [%d:%s] " + " to url [%s] for rule key [%s].", httpResponse.statusCode(), httpResponse.statusMessage(), httpResponse.requestUrl(), ruleKey.toString()); LOG.error(message); return resultBuilder .setCacheResult(CacheResult.error(getName(), getMode(), message)) .build(); } try (ThriftArtifactCacheProtocol.Response response = ThriftArtifactCacheProtocol.parseResponse(PROTOCOL, httpResponse.getBody())) { resultBuilder.setResponseSizeBytes(httpResponse.contentLength()); BuckCacheResponse cacheResponse = response.getThriftData(); if (!cacheResponse.isWasSuccessful()) { LOG.warn("Request was unsuccessful: %s", cacheResponse.getErrorMessage()); return resultBuilder .setCacheResult( CacheResult.error(getName(), getMode(), cacheResponse.getErrorMessage())) .build(); } BuckCacheFetchResponse fetchResponse = cacheResponse.getFetchResponse(); if (LOG.isDebugEnabled()) { LOG.debug( "Debug info for cache fetch request: request=[%s] response=[%s]", ThriftUtil.thriftToDebugJson(cacheRequest), ThriftUtil.thriftToDebugJson(cacheResponse)); } if (!fetchResponse.isArtifactExists()) { LOG.verbose("Artifact did not exist."); return resultBuilder.setCacheResult(CacheResult.miss()).build(); } LOG.verbose("Got artifact. Attempting to read payload."); Path tmp = createTempFileForDownload(); ThriftArtifactCacheProtocol.Response.ReadPayloadInfo readResult; try (OutputStream tmpFile = getProjectFilesystem().newFileOutputStream(tmp)) { try { readResult = response.readPayload(tmpFile); } catch (IOException e) { LOG.debug(e, "encountered an exception while receiving the payload for %s", ruleKey); throw e; } LOG.verbose("Successfully read payload: %d bytes.", readResult.getBytesRead()); } if (!fetchResponse.isSetMetadata()) { String msg = String.format( "ArtifactMetadata section is missing in the response. response=[%s]", ThriftUtil.thriftToDebugJson(fetchResponse)); return resultBuilder.setCacheResult(CacheResult.error(getName(), getMode(), msg)).build(); } ArtifactMetadata metadata = fetchResponse.getMetadata(); if (LOG.isVerboseEnabled()) { LOG.verbose( String.format( "Fetched artifact with rule key [%s] contains the following metadata: [%s].", ruleKey, ThriftUtil.thriftToDebugJson(metadata))); } if (!metadata.isSetRuleKeys()) { return resultBuilder .setCacheResult( CacheResult.error( getName(), getMode(), "Rule key section in the metadata is not set.")) .build(); } ImmutableSet<RuleKey> associatedRuleKeys = null; try { associatedRuleKeys = toImmutableSet(metadata.getRuleKeys()); } catch (IllegalArgumentException e) { String msg = String.format( "Exception parsing the rule keys in the metadata section [%s] with exception [%s].", ThriftUtil.thriftToDebugJson(metadata), e.toString()); return resultBuilder.setCacheResult(CacheResult.error(getName(), getMode(), msg)).build(); } resultBuilder .setBuildTarget(Optional.ofNullable(metadata.getBuildTarget())) .setAssociatedRuleKeys(associatedRuleKeys) .setArtifactSizeBytes(readResult.getBytesRead()); if (!metadata.isSetArtifactPayloadMd5()) { String msg = "Fetched artifact is missing the MD5 hash."; LOG.warn(msg); } else { resultBuilder.setArtifactContentHash(metadata.getArtifactPayloadMd5()); if (!readResult .getMd5Hash() .equals(fetchResponse.getMetadata().getArtifactPayloadMd5())) { String msg = String.format( "The artifact fetched from cache is corrupted. ExpectedMD5=[%s] ActualMD5=[%s]", fetchResponse.getMetadata().getArtifactPayloadMd5(), readResult.getMd5Hash()); LOG.error(msg); return resultBuilder .setCacheResult(CacheResult.error(getName(), getMode(), msg)) .build(); } } // This makes sure we don't have 'half downloaded files' in the dir cache. getProjectFilesystem().move(tmp, output.get(), StandardCopyOption.REPLACE_EXISTING); return resultBuilder .setCacheResult( CacheResult.hit( getName(), getMode(), ImmutableMap.copyOf(fetchResponse.getMetadata().getMetadata()), readResult.getBytesRead())) .build(); } } } private static ImmutableSet<RuleKey> toImmutableSet( List<com.facebook.buck.artifact_cache.thrift.RuleKey> ruleKeys) { return ImmutableSet.copyOf( Iterables.transform(ruleKeys, input -> new RuleKey(input.getHashString()))); } @Override protected StoreResult storeImpl(final ArtifactInfo info, final Path file) throws IOException { StoreResult.Builder resultBuilder = StoreResult.builder(); final ByteSource artifact = new ByteSource() { @Override public InputStream openStream() throws IOException { return getProjectFilesystem().newFileInputStream(file); } }; BuckCacheStoreRequest storeRequest = new BuckCacheStoreRequest(); ArtifactMetadata artifactMetadata = infoToMetadata(info, artifact, getRepository(), scheduleType, distributedBuildModeEnabled); storeRequest.setMetadata(artifactMetadata); PayloadInfo payloadInfo = new PayloadInfo(); long artifactSizeBytes = artifact.size(); payloadInfo.setSizeBytes(artifactSizeBytes); BuckCacheRequest cacheRequest = new BuckCacheRequest(); cacheRequest.addToPayloads(payloadInfo); cacheRequest.setType(BuckCacheRequestType.STORE); cacheRequest.setStoreRequest(storeRequest); if (LOG.isVerboseEnabled()) { LOG.verbose( String.format( "Storing artifact with metadata: [%s].", ThriftUtil.thriftToDebugJson(artifactMetadata))); } final ThriftArtifactCacheProtocol.Request request = ThriftArtifactCacheProtocol.createRequest(PROTOCOL, cacheRequest, artifact); Request.Builder builder = toOkHttpRequest(request); resultBuilder.setRequestSizeBytes(request.getRequestLengthBytes()); try (HttpResponse httpResponse = storeClient.makeRequest(hybridThriftEndpoint, builder)) { if (httpResponse.statusCode() != 200) { throw new IOException( String.format( "Failed to store cache artifact with HTTP status code [%d:%s] " + " to url [%s] for build target [%s] that has size [%d] bytes.", httpResponse.statusCode(), httpResponse.statusMessage(), httpResponse.requestUrl(), info.getBuildTarget().orElse(null), artifactSizeBytes)); } try (ThriftArtifactCacheProtocol.Response response = ThriftArtifactCacheProtocol.parseResponse(PROTOCOL, httpResponse.getBody())) { BuckCacheResponse cacheResponse = response.getThriftData(); if (!cacheResponse.isWasSuccessful()) { reportFailureWithFormatKey( "Failed to store artifact with thriftErrorMessage=[%s] " + "url=[%s] artifactSizeBytes=[%d]", response.getThriftData().getErrorMessage(), httpResponse.requestUrl(), artifactSizeBytes); } resultBuilder.setArtifactContentHash(storeRequest.getMetadata().artifactPayloadMd5); resultBuilder.setWasStoreSuccessful(cacheResponse.isWasSuccessful()); if (LOG.isDebugEnabled()) { LOG.debug( "Debug info for cache store request: artifactMetadata=[%s] response=[%s]", ThriftUtil.thriftToDebugJson(artifactMetadata), ThriftUtil.thriftToDebugJson(cacheResponse)); } } } return resultBuilder.build(); } private Path createTempFileForDownload() throws IOException { getProjectFilesystem().mkdirs(getProjectFilesystem().getBuckPaths().getScratchDir()); return getProjectFilesystem() .createTempFile( getProjectFilesystem().getBuckPaths().getScratchDir(), "buckcache_artifact", ".tmp"); } private static ArtifactMetadata infoToMetadata( ArtifactInfo info, ByteSource file, String repository, String scheduleType, boolean distributedBuildModeEnabled) throws IOException { ArtifactMetadata metadata = new ArtifactMetadata(); if (info.getBuildTarget().isPresent()) { metadata.setBuildTarget(info.getBuildTarget().get().toString()); } metadata.setRuleKeys( ImmutableList.copyOf( Iterables.transform( info.getRuleKeys(), input -> { com.facebook.buck.artifact_cache.thrift.RuleKey ruleKey = new com.facebook.buck.artifact_cache.thrift.RuleKey(); ruleKey.setHashString(input.getHashCode().toString()); return ruleKey; }))); metadata.setMetadata(info.getMetadata()); metadata.setArtifactPayloadMd5(ThriftArtifactCacheProtocol.computeMd5Hash(file)); metadata.setRepository(repository); metadata.setScheduleType(scheduleType); metadata.setDistributedBuildModeEnabled(distributedBuildModeEnabled); return metadata; } private static Request.Builder toOkHttpRequest( final ThriftArtifactCacheProtocol.Request request) { Request.Builder builder = new Request.Builder().addHeader(PROTOCOL_HEADER, PROTOCOL.toString().toLowerCase()); builder.post( new RequestBody() { @Override public MediaType contentType() { return HYBRID_THRIFT_STREAM_CONTENT_TYPE; } @Override public long contentLength() throws IOException { return request.getRequestLengthBytes(); } @Override public void writeTo(BufferedSink bufferedSink) throws IOException { request.writeAndClose(bufferedSink.outputStream()); } }); return builder; } }
apache-2.0
reportportal/commons-dao
src/main/java/com/epam/ta/reportportal/dao/IntegrationTypeRepository.java
1809
/* * Copyright 2019 EPAM Systems * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.epam.ta.reportportal.dao; import com.epam.ta.reportportal.entity.enums.IntegrationGroupEnum; import com.epam.ta.reportportal.entity.integration.IntegrationType; import java.util.List; import java.util.Optional; /** * Repository for {@link com.epam.ta.reportportal.entity.integration.IntegrationType} entity * * @author Yauheni_Martynau */ public interface IntegrationTypeRepository extends ReportPortalRepository<IntegrationType, Long> { /** * Retrieve all {@link IntegrationType} by {@link IntegrationType#integrationGroup} * * @param integrationGroup {@link IntegrationType#integrationGroup} * @return The {@link List} of the {@link IntegrationType} */ List<IntegrationType> findAllByIntegrationGroup(IntegrationGroupEnum integrationGroup); /** * Retrieve all {@link IntegrationType} ordered by {@link IntegrationType#creationDate} in ascending order * * @return The {@link List} of the {@link IntegrationType} */ List<IntegrationType> findAllByOrderByCreationDate(); /** * Find integration by name * * @param name Integration name * @return @return The {@link Optional} of the {@link IntegrationType} */ Optional<IntegrationType> findByName(String name); }
apache-2.0
state-hiu/geowave
geowave-raster/src/main/java/mil/nga/giat/geowave/raster/Resolution.java
2056
package mil.nga.giat.geowave.raster; import java.nio.ByteBuffer; import java.util.Arrays; import mil.nga.giat.geowave.index.Persistable; public class Resolution implements Comparable<Resolution>, Persistable { private double[] resolutionPerDimension; protected Resolution() {} public Resolution( final double[] resolutionPerDimension ) { this.resolutionPerDimension = resolutionPerDimension; } public int getDimensions() { return resolutionPerDimension.length; } public double getResolution( final int dimension ) { return resolutionPerDimension[dimension]; } public double[] getResolutionPerDimension() { return resolutionPerDimension; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + Arrays.hashCode(resolutionPerDimension); return result; } @Override public boolean equals( final Object obj ) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Resolution other = (Resolution) obj; if (!Arrays.equals( resolutionPerDimension, other.resolutionPerDimension)) { return false; } return true; } @Override public int compareTo( final Resolution o ) { double resSum = 0; double otherResSum = 0; for (final double res : resolutionPerDimension) { resSum += res; } for (final double res : o.resolutionPerDimension) { otherResSum += res; } return Double.compare( resSum, otherResSum); } @Override public byte[] toBinary() { final ByteBuffer buf = ByteBuffer.allocate(resolutionPerDimension.length * 8); for (final double val : resolutionPerDimension) { buf.putDouble(val); } return buf.array(); } @Override public void fromBinary( final byte[] bytes ) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int length = bytes.length / 8; resolutionPerDimension = new double[length]; for (int i = 0; i < length; i++) { resolutionPerDimension[i] = buf.getDouble(); } } }
apache-2.0
fossamagna/liquibase
liquibase-core/src/main/java/liquibase/ui/ConsoleUIService.java
1986
package liquibase.ui; import liquibase.AbstractExtensibleObject; import java.io.PrintStream; /** * {@link UIService} implementation that sends messages to stdout and stderr. */ public class ConsoleUIService extends AbstractExtensibleObject implements UIService { private PrintStream outputStream = System.out; private PrintStream errorStream = System.out; private boolean outputStackTraces = false; /** * Returns {@link liquibase.plugin.Plugin#PRIORITY_NOT_APPLICABLE} because it must be manually configured as needed */ @Override public int getPriority() { return PRIORITY_NOT_APPLICABLE; } @Override public void sendMessage(String message) { getOutputStream().println(message); } @Override public void sendErrorMessage(String message) { getErrorStream().println(message); } @Override public void sendErrorMessage(String message, Throwable exception) { sendErrorMessage(message); if (getOutputStackTraces()) { exception.printStackTrace(getErrorStream()); } } @SuppressWarnings("WeakerAccess") public PrintStream getOutputStream() { return outputStream; } @SuppressWarnings("unused") public void setOutputStream(PrintStream outputStream) { this.outputStream = outputStream; } @SuppressWarnings("WeakerAccess") public PrintStream getErrorStream() { return errorStream; } @SuppressWarnings("unused") public void setErrorStream(PrintStream errorStream) { this.errorStream = errorStream; } @SuppressWarnings("WeakerAccess") public boolean getOutputStackTraces() { return outputStackTraces; } /** * Set to true to output stacktraces. Defaults to not outputing them. */ @SuppressWarnings("unused") public void setOutputStackTraces(boolean outputStackTraces) { this.outputStackTraces = outputStackTraces; } }
apache-2.0
wmm387/wmm
app/src/main/java/com/wangyuanwmm/wmm/entity/ZhihuDailyNews.java
1061
package com.wangyuanwmm.wmm.entity; import java.util.ArrayList; public class ZhihuDailyNews { private String date; private ArrayList<Question> stories; public String getDate() { return date; } public void setDate(String date) { this.date = date; } public ArrayList<Question> getStories() { return stories; } public class Question { private ArrayList<String> images; private int type; private int id; private String title; public ArrayList<String> getImages() { return images; } public int getType() { return type; } public void setType(int type) { this.type = type; } public int getId() { return id; } public void setId(int id) { this.id = id; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } } }
apache-2.0
spring-cloud/spring-cloud-commons
spring-cloud-commons/src/main/java/org/springframework/cloud/client/circuitbreaker/AbstractCircuitBreakerFactory.java
2180
/* * Copyright 2013-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.client.circuitbreaker; import java.util.concurrent.ConcurrentHashMap; import java.util.function.Consumer; import java.util.function.Function; /** * Base class for factories which produce circuit breakers. * * @author Ryan Baxter */ public abstract class AbstractCircuitBreakerFactory<CONF, CONFB extends ConfigBuilder<CONF>> { private final ConcurrentHashMap<String, CONF> configurations = new ConcurrentHashMap<>(); /** * Adds configurations for circuit breakers. * @param ids The id of the circuit breaker * @param consumer A configuration builder consumer, allows consumers to customize the * builder before the configuration is built */ public void configure(Consumer<CONFB> consumer, String... ids) { for (String id : ids) { CONFB builder = configBuilder(id); consumer.accept(builder); CONF conf = builder.build(); getConfigurations().put(id, conf); } } /** * Gets the configurations for the circuit breakers. * @return The configurations */ protected ConcurrentHashMap<String, CONF> getConfigurations() { return configurations; } /** * Creates a configuration builder for the given id. * @param id The id of the circuit breaker * @return The configuration builder */ protected abstract CONFB configBuilder(String id); /** * Sets the default configuration for circuit breakers. * @param defaultConfiguration A function that returns the default configuration */ public abstract void configureDefault(Function<String, CONF> defaultConfiguration); }
apache-2.0
fppt/jedis-mock
src/test/java/com/github/fppt/jedismock/comparisontests/connection/ConnectionOperationsTest.java
1122
package com.github.fppt.jedismock.comparisontests.connection; import com.github.fppt.jedismock.comparisontests.ComparisonBase; import org.junit.jupiter.api.TestTemplate; import org.junit.jupiter.api.extension.ExtendWith; import redis.clients.jedis.Client; import redis.clients.jedis.Jedis; import static org.junit.jupiter.api.Assertions.assertEquals; @ExtendWith(ComparisonBase.class) public class ConnectionOperationsTest { @TestTemplate public void whenUsingQuit_EnsureTheResultIsOK(Jedis jedis) { //Create a new connection Client client = jedis.getClient(); Jedis newJedis = new Jedis(client.getHost(), client.getPort()); newJedis.set("A happy lucky key", "A sad value"); assertEquals("OK", newJedis.quit()); assertEquals("A sad value", jedis.get("A happy lucky key")); } @TestTemplate public void whenPinging_Pong(Jedis jedis) { assertEquals("PONG", jedis.ping()); } @TestTemplate public void whenSettingClientName_EnsureOkResponseIsReturned(Jedis jedis) { assertEquals("OK", jedis.clientSetname("P.Myo")); } }
apache-2.0
nature2014/nature
src/actions/backend/BackendVolunteerInterviewAction.java
2652
/** * */ package actions.backend; import bl.beans.SourceCodeBean; import vo.table.TableHeaderVo; import vo.table.TableInitVo; import vo.table.TableQueryVo; import bl.beans.VolunteerBean; import java.util.List; /** * @author gudong * @since $Date:2014-02-10$ */ public class BackendVolunteerInterviewAction extends BackendVolunteerAction { @Override public String getTableTitle() { return "<li>员工管理</li><li class=\"active\">面试</li>"; } @Override public String getActionPrex() { return getRequest().getContextPath() + "/backend/volunteerInterview"; } @Override public String getCustomJs() { return getRequest().getContextPath() + "/js/volunteerInterview.js"; } @Override public TableQueryVo getModel() { // 0=已注册、1=已审核、2=已面试、3=正在服务期、4=已注销 TableQueryVo model = super.getModel(); model.getFilter().put("status", VolunteerBean.VIERFIED); return model; } @Override public TableInitVo getTableInit() { TableInitVo init = new TableInitVo(); init.getAoColumns().add(new TableHeaderVo("name", "员工").enableSearch()); init.getAoColumns().add(new TableHeaderVo("code", "工号").enableSearch()); init.getAoColumns().add(new TableHeaderVo("identityCard", "证件号").enableSearch()); List<SourceCodeBean> sourceList = (List<SourceCodeBean>) SOURBUS.getAllLeaves().getResponseData(); String[][] sources = new String[2][sourceList.size()]; if (sourceList.size()>0) { for (int i = 0; i < sourceList.size(); i++) { sources[0][i] = sourceList.get(i).getCode(); sources[1][i] = sourceList.get(i).getName(); } } else { sources = null; } init.getAoColumns().add(new TableHeaderVo("occupation", "职称").addSearchOptions(sources)); init.getAoColumns().add(new TableHeaderVo("registerFrom", "注册来源").addSearchOptions(new String[][] { { "1", "2"}, { "网站", "微信"}})); init.getAoColumns().add(new TableHeaderVo("sex", "性别").addSearchOptions(new String[][]{{"1", "2"}, {"男", "女"}})); init.getAoColumns().add(new TableHeaderVo("cellPhone", "手机", false)); init.getAoColumns().add(new TableHeaderVo("wechat", "微信", false)); init.getAoColumns().add(new TableHeaderVo("email", "邮箱", false)); init.setDisableTools(true); return init; } /** * * @return */ public String interview() { VolunteerBean volunteer = (VolunteerBean) getBusiness().getLeaf(getId()).getResponseData(); setVolunteer(volunteer); return SUCCESS; } }
apache-2.0
cojen/Cojen
src/main/java/org/cojen/util/QuickConstructorGenerator.java
8592
/* * Copyright 2007-2010 Brian S O'Neill * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Copyright 2006 Amazon Technologies, Inc. or its affiliates. * Amazon, Amazon.com and Carbonado are trademarks or registered trademarks * of Amazon Technologies, Inc. or its affiliates. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.cojen.util; import java.lang.reflect.Constructor; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.lang.reflect.UndeclaredThrowableException; import java.util.Map; import java.security.AccessController; import java.security.PrivilegedAction; import org.cojen.classfile.CodeBuilder; import org.cojen.classfile.RuntimeClassFile; import org.cojen.classfile.TypeDesc; /** * Generates code to invoke constructors. This is a replacement for {@link * java.lang.reflect.Constructor} which is easier to use and performs * better. In one tested situation, overall performance was improved by about * 10%. * * <p>QuickConstructorGenerator is not general purpose however, as the * parameters to the constructor must be known, and the constructor must be * public. It is intended to be used for constructing instances of * auto-generated classes. The exact parameters may be known at compile time, * but the actual object type is not. * * @author Brian S O'Neill * @since 2.1 */ public class QuickConstructorGenerator { // Map<factory class, Map<object type, factory instance>> @SuppressWarnings("unchecked") private static Cache<Class<?>, Cache<Class<?>, Object>> cCache = new WeakIdentityCache(17); /** * Returns a factory instance for one type of object. Each method in the * interface defines a constructor via its parameters. Any checked * exceptions declared thrown by the constructor must also be declared by * the method. The method return types can be the same type as the * constructed object or a supertype. * * <p>Here is a contrived example for constructing strings. In practice, * such a string factory is useless, since the "new" operator can be * invoked directly. * * <pre> * public interface StringFactory { * String newEmptyString(); * * String newStringFromChars(char[] chars); * * String newStringFromBytes(byte[] bytes, String charsetName) * throws UnsupportedEncodingException; * } * </pre> * * Here's an example of it being used: * * <pre> * StringFactory sf = QuickConstructorGenerator.getInstance(String.class, StringFactory.class); * ... * String str = sf.newStringFromChars(new char[] {'h', 'e', 'l', 'l', 'o'}); * </pre> * * @param objectType type of object to construct * @param factory interface defining which objects can be constructed * @throws IllegalArgumentException if factory type is not an interface or * if it is malformed */ @SuppressWarnings("unchecked") public static synchronized <F> F getInstance(final Class<?> objectType, final Class<F> factory) { Cache<Class<?>, Object> innerCache = cCache.get(factory); if (innerCache == null) { innerCache = new SoftValueCache(5); cCache.put(factory, innerCache); } F instance = (F) innerCache.get(objectType); if (instance != null) { return instance; } if (objectType == null) { throw new IllegalArgumentException("No object type"); } if (factory == null) { throw new IllegalArgumentException("No factory type"); } if (!factory.isInterface()) { throw new IllegalArgumentException("Factory must be an interface"); } final Cache<Class<?>, Object> fInnerCache = innerCache; return AccessController.doPrivileged(new PrivilegedAction<F>() { public F run() { return getInstance(fInnerCache, objectType, factory); } }); } private static synchronized <F> F getInstance(Cache<Class<?>, Object> innerCache, Class<?> objectType, Class<F> factory) { String prefix = objectType.getName(); if (prefix.startsWith("java.")) { // Defining classes in java packages is restricted. int index = prefix.lastIndexOf('.'); if (index > 0) { prefix = prefix.substring(index + 1); } } RuntimeClassFile cf = null; for (Method method : factory.getMethods()) { if (!Modifier.isAbstract(method.getModifiers())) { continue; } Constructor ctor; try { ctor = objectType.getConstructor((Class[]) method.getParameterTypes()); } catch (NoSuchMethodException e) { throw new IllegalArgumentException(e); } if (!method.getReturnType().isAssignableFrom(objectType)) { throw new IllegalArgumentException ("Method return type must be \"" + objectType.getName() + "\" or supertype: " + method); } Class<?>[] methodExTypes = method.getExceptionTypes(); for (Class<?> ctorExType : ctor.getExceptionTypes()) { if (RuntimeException.class.isAssignableFrom(ctorExType) || Error.class.isAssignableFrom(ctorExType)) { continue; } exCheck: { // Make sure method declares throwing it or a supertype. for (Class<?> methodExType : methodExTypes) { if (methodExType.isAssignableFrom(ctorExType)) { break exCheck; } } throw new IllegalArgumentException("Method must declare throwing \"" + ctorExType.getName() +"\": " + method); } } if (cf == null) { cf = new RuntimeClassFile(prefix, null, objectType.getClassLoader()); cf.setSourceFile(QuickConstructorGenerator.class.getName()); cf.setTarget("1.5"); cf.addInterface(factory); cf.markSynthetic(); cf.addDefaultConstructor(); } // Now define the method that constructs the object. CodeBuilder b = new CodeBuilder(cf.addMethod(method)); b.newObject(TypeDesc.forClass(objectType)); b.dup(); int count = b.getParameterCount(); for (int i=0; i<count; i++) { b.loadLocal(b.getParameter(i)); } b.invoke(ctor); b.returnValue(TypeDesc.OBJECT); } if (cf == null) { // No methods found to implement. throw new IllegalArgumentException("No methods in factory to implement"); } F instance; try { instance = (F) cf.defineClass().newInstance(); } catch (IllegalAccessException e) { throw new UndeclaredThrowableException(e); } catch (InstantiationException e) { throw new UndeclaredThrowableException(e); } innerCache.put(objectType, instance); return instance; } }
apache-2.0
drisoftie/EasyFragments
easyfrags/EasyFragments/src/main/java/com/drisoftie/frags/IFragManagedMenu.java
2119
/* * Copyright [2015] [Alexander Dridiger - drisoftie@gmail.com] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.drisoftie.frags; import android.view.Menu; import android.view.MenuItem; /** * @author Alexander Dridiger * */ public interface IFragManagedMenu { /** * Indicates if the {@link Menu} should be cleared before initiating it. * * @return */ boolean shouldClearMenu(); /** * Indicates if the {@link Menu} should be inflated. * * @return */ boolean checkMenuInflation(Menu menu); /** * {@link Menu} was created and is ready to work with. */ void onMenuReady(); /** * Returns the resource id to the {@link Menu} resource to load. * * @return */ int provideMenuRes(); /** * Returns the inflated {@link Menu}. * * @return the menu */ Menu getMenu(); /** * Indicate that the {@link Menu} should be set to visible when created. * * @return */ boolean isVisibleMenu(); /** * Makes all the given {@link MenuItem}s by their id visible or invisible. If {@code itemIds} are {@code null}, all the {@link MenuItem} * s inside the {@link Menu} are targeted. * * @param visible * @param itemIds */ void changeMenuVisibilityById(boolean visible, int... itemIds); /** * Makes all the given {@link MenuItem}s visible or invisible. If {@code items} are {@code null}, all the {@link MenuItem}s inside the * {@link Menu} are targeted. * * @param visible * @param items */ void changeMenuVisibility(boolean visible, MenuItem... items); }
apache-2.0
UnquietCode/Flapi
src/main/java/unquietcode/tools/flapi/DescriptorBuilderException.java
1061
/********************************************************************* Copyright 2014 the Flapi authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ********************************************************************/ package unquietcode.tools.flapi; public class DescriptorBuilderException extends RuntimeException { public DescriptorBuilderException(String message) { super(message); } public DescriptorBuilderException(Throwable cause) { super(cause); } public DescriptorBuilderException(String message, Throwable cause) { super(message, cause); } }
apache-2.0
moparisthebest/beehive
beehive-netui-core/src/main/java/org/apache/beehive/netui/script/el/tokens/ExpressionToken.java
11520
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * $Header:$ */ package org.apache.beehive.netui.script.el.tokens; import java.lang.reflect.Array; import java.lang.reflect.Method; import java.util.List; import java.util.Map; import org.apache.beehive.netui.util.internal.cache.PropertyCache; import org.apache.beehive.netui.util.logging.Logger; import org.apache.beehive.netui.script.el.util.ParseUtils; /** * */ public abstract class ExpressionToken { private static final Logger LOGGER = Logger.getInstance(ArrayIndexToken.class); private static final PropertyCache PROPERTY_CACHE = new PropertyCache(); public abstract Object read(Object object); public abstract void write(Object object, Object value); public abstract String getTokenString(); /** * Lookup the <code>key</code> in the <code>map</code>. * @param map the map * @param key the key * @return the value found at <code>map.get(key)</code> or <code>null</code> if no value was found */ protected final Object mapLookup(Map map, Object key) { LOGGER.trace("get value from Map"); return map.get(key); } /** * Get a JavaBean property from the given <code>value</code> * @param value the JavaBean * @param propertyName the property name * @return the value of the property from the object <code>value</code> */ protected final Object beanLookup(Object value, Object propertyName) { LOGGER.trace("get JavaBean property : " + propertyName); return ParseUtils.getProperty(value, propertyName.toString(), PROPERTY_CACHE); } /** * Get the value in a {@link List} at <code>index</code>. * @param list the List * @param index the index * @return the value returned from <code>list.get(index)</code> */ protected final Object listLookup(List list, int index) { LOGGER.trace("get value in List index " + index); return list.get(index); } /** * Get the value from <code>array</code> at <code>index</code> * @param array the array * @param index the index * @return the value returned from <code>Array.get(array, index)</code> */ protected final Object arrayLookup(Object array, int index) { LOGGER.trace("get value from array index " + index); return Array.get(array, index); } /** * Update the value of <code>key</code> in <code>map</code> * @param map the map * @param key the key * @param value the value */ protected final void mapUpdate(Map map, Object key, Object value) { Object o = map.get(key); /* If a value exists in map.get(key), convert the "value" parameter into the type of map.get(key). It's a best guess as to what the type of the Map _should_ be without any further reflective information about the types contained in the map. */ if(o != null) { Class type = o.getClass(); value = ParseUtils.convertType(value, type); } map.put(key, value); } protected final void arrayUpdate(Object array, int index, Object value) { Object converted = value; Class elementType = array.getClass().getComponentType(); if(!elementType.isAssignableFrom(value.getClass())) { converted = ParseUtils.convertType(value, elementType); } try { Array.set(array, index, converted); } catch(Exception e) { String msg = "An error occurred setting a value at index \"" + index + "\" on an array with component types \"" + elementType + "\". Cause: " + e.toString(); LOGGER.error(msg); throw new RuntimeException(msg); } } /** * Update a {@link List} with the Object <code>value</code> at <code>index</code>. * @param list the List * @param index the index * @param value the new value */ protected final void listUpdate(List list, int index, Object value) { Object converted = value; if(list.size() > index) { Object o = list.get(index); // can only convert types when there is an item in the currently requested place if(o != null) { Class itemType = o.getClass(); converted = ParseUtils.convertType(value, itemType); } list.set(index, value); } else { // @note: not sure that this is the right thing. Question is whether or not to insert nulls here to fill list up to "index" // @update: List doesn't guarantee that implementations will accept nulls. So, we can't rely on that as a solution. // @update: this is an unfortunate but necessary solution...unless the List has enough elements to // accomodate the new item at a particular index, this must be an error case. The reasons are this: // 1) can't fill the list with nulls, List implementations are allowed to disallow them // 2) can't just do an "add" to the list -- in processing [0] and [1] on an empty list, [1] may get processed first. // this will go into list slot [0]. then, [0] gets processed and simply overwrites the previous because it's // already in the list // 3) can't go to a mixed approach because there's no metadata about what has been done and no time to build // something that is apt to be complicated and exposed to the user // so... // the ultimate 8.1sp2 functionality is to simply disallow updating a value in a list that doesn't exist. that // being said, it is still possible to simply add to the list. if {actionForm.list[42]} inserts into the 42'nd // item, {actionForm.list} will just do an append on POST since there is no index specified. this fix does // not break backwards compatability because it will work on full lists and is completely broken now on empty // lists, so changing this just gives a better exception message that "ArrayIndexOutOfBounds". :) // // September 2, 2003 // ekoneil@apache.com // String msg = "An error occurred setting a value at index \"" + index + "\" because the list is " + (list != null ? (" of size " + list.size()) : "null") + ". " + "Be sure to allocate enough items in the List to accomodate any updates which may occur against the list."; LOGGER.error(msg); throw new RuntimeException(msg); } } /** * Update a JavaBean property named <code>identifier</code> with the given <code>value</code>. * @param bean the JavaBean * @param identifier the property name * @param value the new value */ protected final void beanUpdate(Object bean, Object identifier, Object value) { LOGGER.trace("Update bean \"" + bean + "\" property \"" + identifier + "\""); String propertyName = identifier.toString(); Class beanType = bean.getClass(); Class propType = PROPERTY_CACHE.getPropertyType(beanType, propertyName); // Get the type of the JavaBean property given reflected information from the JavaBean's type if(propType != null) { try { // The type of the JavaBean property is a List. To update it, get the List and // append the value to the end of the List. if(List.class.isAssignableFrom(propType)) { Method listGetter = PROPERTY_CACHE.getPropertyGetter(beanType, propertyName); if(listGetter != null) { List list = (List)listGetter.invoke(bean, (Object[])null); applyValuesToList(value, list); return; } } // The JavaBean is an Object, so set the Bean's property with the given value else { Method setter = PROPERTY_CACHE.getPropertySetter(beanType, propertyName); if(setter != null) { LOGGER.trace("Set property via setter method: [" + setter + "]"); Class targetType = setter.getParameterTypes()[0]; Object converted = ParseUtils.convertType(value, targetType); setter.invoke(bean, new Object[]{converted}); return; } } } catch(Exception e) { String msg = "Could not update proprety named \"" + propertyName + "\" on bean of type \"" + beanType + "\". Cause: " + e; LOGGER.error(msg, e); throw new RuntimeException(msg, e); } } String msg = "Could not update expression because a public JavaBean setter for the property \"" + identifier + "\" could not be found."; LOGGER.error(msg); throw new RuntimeException(msg); } /** * Attempt to convert a String indexString into an integer index. * @param indexString the index string * @return the converted integer */ protected final int parseIndex(String indexString) { try { return Integer.parseInt(indexString); } catch(Exception e) { String msg = "Error converting \"" + indexString + "\" into an integer. Cause: " + e; LOGGER.error(msg, e); throw new RuntimeException(msg, e); } } /** * Set a list of values on a {@link List}. The behavior of this method is different given * the type of <code>value</code>:<br/> * - value is java.lang.String[]: add each item in the String[] to the list * - value is a String: add the value to the list * - otherwise, add the value to the end of the list * @param value the value to apply to a list * @param list the {@link List} */ private static void applyValuesToList(Object value, List list) { if(list == null) { String msg = "Can not add a value to a null java.util.List"; LOGGER.error(msg); throw new RuntimeException(msg); } if(value instanceof String[]) { String[] ary = (String[])value; for(int i = 0; i < ary.length; i++) list.add(ary[i]); } else if(value instanceof String) list.add(value); // types that are not String[] or String are just set on the object else list.add(value); } }
apache-2.0
shufudong/bboss
bboss-util/src/org/frameworkset/util/SerialException.java
536
package org.frameworkset.util; public class SerialException extends RuntimeException { public SerialException() { // TODO Auto-generated constructor stub } public SerialException(String message) { super(message); // TODO Auto-generated constructor stub } public SerialException(Throwable cause) { super(cause); // TODO Auto-generated constructor stub } public SerialException(String message, Throwable cause) { super(message, cause); // TODO Auto-generated constructor stub } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-redshift/src/main/java/com/amazonaws/services/redshift/model/transform/SubscriptionCategoryNotFoundExceptionUnmarshaller.java
1666
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.redshift.model.transform; import org.w3c.dom.Node; import javax.annotation.Generated; import com.amazonaws.AmazonServiceException; import com.amazonaws.transform.StandardErrorUnmarshaller; import com.amazonaws.services.redshift.model.SubscriptionCategoryNotFoundException; @Generated("com.amazonaws:aws-java-sdk-code-generator") public class SubscriptionCategoryNotFoundExceptionUnmarshaller extends StandardErrorUnmarshaller { public SubscriptionCategoryNotFoundExceptionUnmarshaller() { super(SubscriptionCategoryNotFoundException.class); } @Override public AmazonServiceException unmarshall(Node node) throws Exception { // Bail out if this isn't the right error code that this // marshaller understands String errorCode = parseErrorCode(node); if (errorCode == null || !errorCode.equals("SubscriptionCategoryNotFound")) return null; SubscriptionCategoryNotFoundException e = (SubscriptionCategoryNotFoundException) super.unmarshall(node); return e; } }
apache-2.0
fortitudetec/hdfs-backup
s3-backup/s3-backup-store/src/test/java/backup/store/s3/S3BackupStoreUtil.java
2570
/* * Copyright 2016 Fortitude Technologies LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package backup.store.s3; import java.util.List; import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; import com.amazonaws.services.s3.AmazonS3Client; import com.amazonaws.services.s3.model.ObjectListing; import com.amazonaws.services.s3.model.S3ObjectSummary; public class S3BackupStoreUtil { public static void removeBucket(String bucketName) throws Exception { removeAllObjects(bucketName); AmazonS3Client client = new AmazonS3Client(new DefaultAWSCredentialsProviderChain()); client.deleteBucket(bucketName); } public static void removeAllObjects(String bucketName) throws Exception { AmazonS3Client client = new AmazonS3Client(new DefaultAWSCredentialsProviderChain()); ObjectListing listObjects = client.listObjects(bucketName); List<S3ObjectSummary> objectSummaries = listObjects.getObjectSummaries(); for (S3ObjectSummary objectSummary : objectSummaries) { String key = objectSummary.getKey(); client.deleteObject(bucketName, key); } } public static void removeAllObjects(String bucketName, String prefix) throws Exception { AmazonS3Client client = new AmazonS3Client(new DefaultAWSCredentialsProviderChain()); ObjectListing listObjects = client.listObjects(bucketName); List<S3ObjectSummary> objectSummaries = listObjects.getObjectSummaries(); for (S3ObjectSummary objectSummary : objectSummaries) { String key = objectSummary.getKey(); if (key.startsWith(prefix)) { client.deleteObject(bucketName, key); } } } public static boolean exists(String bucketName) throws Exception { AmazonS3Client client = new AmazonS3Client(new DefaultAWSCredentialsProviderChain()); return client.doesBucketExist(bucketName); } public static void createBucket(String bucketName) throws Exception { AmazonS3Client client = new AmazonS3Client(new DefaultAWSCredentialsProviderChain()); client.createBucket(bucketName); } }
apache-2.0
rkapsi/daap
src/main/java/org/ardverk/daap/chunks/impl/SongSampleRate.java
1399
/* * Digital Audio Access Protocol (DAAP) Library * Copyright (C) 2004-2010 Roger Kapsi * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ardverk.daap.chunks.impl; import org.ardverk.daap.chunks.UIntChunk; /** * The sample rate of the Song in kHz. * * @author Roger Kapsi */ public class SongSampleRate extends UIntChunk { public static final int KHZ_44100 = 44100; /** * Creates a new SongSampleRate with 0 kHz You can change this value with * {@see #setValue(int)}. */ public SongSampleRate() { this(KHZ_44100); } /** * Creates a new SongSampleRate with the assigned sample rate. You can * change this value with {@see #setValue(int)}. * * @param <tt>rate</tt> the rate of this song in kHz. */ public SongSampleRate(long rate) { super("assr", "daap.songsamplerate", rate); } }
apache-2.0
Notronix/JaLAPI
src/main/java/com/notronix/lw/api/model/RefundError.java
955
package com.notronix.lw.api.model; import java.time.Instant; import java.util.UUID; public class RefundError { private UUID RefundRowId; private String ErrorMessage; private Instant DateStamp; private Boolean Acknowledged; public UUID getRefundRowId() { return RefundRowId; } public void setRefundRowId(UUID refundRowId) { RefundRowId = refundRowId; } public String getErrorMessage() { return ErrorMessage; } public void setErrorMessage(String errorMessage) { ErrorMessage = errorMessage; } public Instant getDateStamp() { return DateStamp; } public void setDateStamp(Instant dateStamp) { DateStamp = dateStamp; } public Boolean getAcknowledged() { return Acknowledged; } public void setAcknowledged(Boolean acknowledged) { Acknowledged = acknowledged; } }
apache-2.0
PATRIC3/p3_solr
solr/core/src/java/org/apache/solr/util/SolrPluginUtils.java
36795
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.util; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.regex.Pattern; import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.MapSolrParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.SimpleOrderedMap; import org.apache.solr.common.util.StrUtils; import org.apache.solr.core.RequestParams; import org.apache.solr.core.SolrCore; import org.apache.solr.handler.component.HighlightComponent; import org.apache.solr.handler.component.ResponseBuilder; import org.apache.solr.handler.component.ShardRequest; import org.apache.solr.highlight.SolrHighlighter; import org.apache.solr.parser.QueryParser; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrRequestHandler; import org.apache.solr.request.json.RequestUtil; import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.schema.IndexSchema; import org.apache.solr.schema.SchemaField; import org.apache.solr.search.CacheRegenerator; import org.apache.solr.search.DocIterator; import org.apache.solr.search.DocList; import org.apache.solr.search.DocSet; import org.apache.solr.search.FieldParams; import org.apache.solr.search.QParser; import org.apache.solr.search.QueryParsing; import org.apache.solr.search.ReturnFields; import org.apache.solr.search.SolrCache; import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.search.SolrQueryParser; import org.apache.solr.search.SortSpecParsing; import org.apache.solr.search.SyntaxError; import com.google.common.collect.ImmutableMap; /** * <p>Utilities that may be of use to RequestHandlers.</p> * * <p> * Many of these functions have code that was stolen/mutated from * StandardRequestHandler. * </p> * * <p>:TODO: refactor StandardRequestHandler to use these utilities</p> * * <p>:TODO: Many "standard" functionality methods are not cognisant of * default parameter settings. */ public class SolrPluginUtils { /** * Map containing all the possible purposes codes of a request as key and * the corresponding readable purpose as value */ private static final Map<Integer, String> purposes; static { Map<Integer, String> map = new TreeMap<>(); map.put(ShardRequest.PURPOSE_PRIVATE, "PRIVATE"); map.put(ShardRequest.PURPOSE_GET_TOP_IDS, "GET_TOP_IDS"); map.put(ShardRequest.PURPOSE_REFINE_TOP_IDS, "REFINE_TOP_IDS"); map.put(ShardRequest.PURPOSE_GET_FACETS, "GET_FACETS"); map.put(ShardRequest.PURPOSE_REFINE_FACETS, "REFINE_FACETS"); map.put(ShardRequest.PURPOSE_GET_FIELDS, "GET_FIELDS"); map.put(ShardRequest.PURPOSE_GET_HIGHLIGHTS, "GET_HIGHLIGHTS"); map.put(ShardRequest.PURPOSE_GET_DEBUG, "GET_DEBUG"); map.put(ShardRequest.PURPOSE_GET_STATS, "GET_STATS"); map.put(ShardRequest.PURPOSE_GET_TERMS, "GET_TERMS"); map.put(ShardRequest.PURPOSE_GET_TOP_GROUPS, "GET_TOP_GROUPS"); map.put(ShardRequest.PURPOSE_GET_MLT_RESULTS, "GET_MLT_RESULTS"); map.put(ShardRequest.PURPOSE_REFINE_PIVOT_FACETS, "REFINE_PIVOT_FACETS"); map.put(ShardRequest.PURPOSE_SET_TERM_STATS, "SET_TERM_STATS"); map.put(ShardRequest.PURPOSE_GET_TERM_STATS, "GET_TERM_STATS"); purposes = Collections.unmodifiableMap(map); } private static final MapSolrParams maskUseParams = new MapSolrParams(ImmutableMap.<String, String>builder() .put(RequestParams.USEPARAM, "") .build()); /** * Set default-ish params on a SolrQueryRequest. * * RequestHandlers can use this method to ensure their defaults and * overrides are visible to other components such as the response writer * * @param req The request whose params we are interested i * @param defaults values to be used if no values are specified in the request params * @param appends values to be appended to those from the request (or defaults) when dealing with multi-val params, or treated as another layer of defaults for singl-val params. * @param invariants values which will be used instead of any request, or default values, regardless of context. */ public static void setDefaults(SolrQueryRequest req, SolrParams defaults, SolrParams appends, SolrParams invariants) { setDefaults(null, req, defaults, appends, invariants); } public static void setDefaults(SolrRequestHandler handler, SolrQueryRequest req, SolrParams defaults, SolrParams appends, SolrParams invariants) { List<String> paramNames = null; String useParams = req.getParams().get(RequestParams.USEPARAM); if (useParams != null && !useParams.isEmpty()) { // now that we have expanded the request macro useParams with the actual values // it makes no sense to keep it visible now on. // distrib request sends all params to the nodes down the line and // if it sends the useParams to other nodes , they will expand them as well. // which is not desirable. At the same time, because we send the useParams // value as an empty string to other nodes we get the desired benefit of // overriding the useParams specified in the requestHandler directly req.setParams(SolrParams.wrapDefaults(maskUseParams, req.getParams())); } if (useParams == null) useParams = (String) req.getContext().get(RequestParams.USEPARAM); if (useParams != null && !useParams.isEmpty()) paramNames = StrUtils.splitSmart(useParams, ','); if (paramNames != null) { for (String name : paramNames) { SolrParams requestParams = req.getCore().getSolrConfig().getRequestParams().getParams(name); if (requestParams != null) { defaults = SolrParams.wrapDefaults(requestParams, defaults); } } } RequestUtil.processParams(handler, req, defaults, appends, invariants); } /** * SolrIndexSearch.numDocs(Query,Query) freaks out if the filtering * query is null, so we use this workarround. */ public static int numDocs(SolrIndexSearcher s, Query q, Query f) throws IOException { return (null == f) ? s.getDocSet(q).size() : s.numDocs(q,f); } private final static Pattern splitList=Pattern.compile(",| "); /** Split a value that may contain a comma, space of bar separated list. */ public static String[] split(String value){ return splitList.split(value.trim(), 0); } /** * Pre-fetch documents into the index searcher's document cache. * * This is an entirely optional step which you might want to perform for * the following reasons: * * <ul> * <li>Locates the document-retrieval costs in one spot, which helps * detailed performance measurement</li> * * <li>Determines a priori what fields will be needed to be fetched by * various subtasks, like response writing and highlighting. This * minimizes the chance that many needed fields will be loaded lazily. * (it is more efficient to load all the field we require normally).</li> * </ul> * * If lazy field loading is disabled, this method does nothing. */ public static void optimizePreFetchDocs(ResponseBuilder rb, DocList docs, Query query, SolrQueryRequest req, SolrQueryResponse res) throws IOException { SolrIndexSearcher searcher = req.getSearcher(); if(!searcher.enableLazyFieldLoading) { // nothing to do return; } ReturnFields returnFields = res.getReturnFields(); if(returnFields.getLuceneFieldNames() != null) { Set<String> fieldFilter = returnFields.getLuceneFieldNames(); if (rb.doHighlights) { // copy return fields list fieldFilter = new HashSet<>(fieldFilter); // add highlight fields SolrHighlighter highlighter = HighlightComponent.getHighlighter(req.getCore()); for (String field: highlighter.getHighlightFields(query, req, null)) fieldFilter.add(field); // fetch unique key if one exists. SchemaField keyField = searcher.getSchema().getUniqueKeyField(); if(null != keyField) fieldFilter.add(keyField.getName()); } // get documents DocIterator iter = docs.iterator(); for (int i=0; i<docs.size(); i++) { searcher.doc(iter.nextDoc(), fieldFilter); } } } public static Set<String> getDebugInterests(String[] params, ResponseBuilder rb){ Set<String> debugInterests = new HashSet<>(); if (params != null) { for (int i = 0; i < params.length; i++) { if (params[i].equalsIgnoreCase("all") || params[i].equalsIgnoreCase("true")){ rb.setDebug(true); break; //still might add others } else if (params[i].equals(CommonParams.TIMING)){ rb.setDebugTimings(true); } else if (params[i].equals(CommonParams.QUERY)){ rb.setDebugQuery(true); } else if (params[i].equals(CommonParams.RESULTS)){ rb.setDebugResults(true); } else if (params[i].equals(CommonParams.TRACK)){ rb.setDebugTrack(true); } } } return debugInterests; } /** * <p> * Returns a NamedList containing many "standard" pieces of debugging * information. * </p> * * <ul> * <li>rawquerystring - the 'q' param exactly as specified by the client * </li> * <li>querystring - the 'q' param after any preprocessing done by the plugin * </li> * <li>parsedquery - the main query executed formated by the Solr * QueryParsing utils class (which knows about field types) * </li> * <li>parsedquery_toString - the main query executed formatted by its * own toString method (in case it has internal state Solr * doesn't know about) * </li> * <li>explain - the list of score explanations for each document in * results against query. * </li> * <li>otherQuery - the query string specified in 'explainOther' query param. * </li> * <li>explainOther - the list of score explanations for each document in * results against 'otherQuery' * </li> * </ul> * * @param req the request we are dealing with * @param userQuery the users query as a string, after any basic * preprocessing has been done * @param query the query built from the userQuery * (and perhaps other clauses) that identifies the main * result set of the response. * @param results the main result set of the response * @return The debug info * @throws java.io.IOException if there was an IO error */ public static NamedList doStandardDebug( SolrQueryRequest req, String userQuery, Query query, DocList results, boolean dbgQuery, boolean dbgResults) throws IOException { NamedList dbg = new SimpleOrderedMap(); doStandardQueryDebug(req, userQuery, query, dbgQuery, dbg); doStandardResultsDebug(req, query, results, dbgResults, dbg); return dbg; } public static void doStandardQueryDebug( SolrQueryRequest req, String userQuery, Query query, boolean dbgQuery, NamedList dbg) { if (dbgQuery) { /* userQuery may have been pre-processed .. expose that */ dbg.add("rawquerystring", req.getParams().get(CommonParams.Q)); dbg.add("querystring", userQuery); /* QueryParsing.toString isn't perfect, use it to see converted * values, use regular toString to see any attributes of the * underlying Query it may have missed. */ dbg.add("parsedquery", QueryParsing.toString(query, req.getSchema())); dbg.add("parsedquery_toString", query.toString()); } } public static void doStandardResultsDebug( SolrQueryRequest req, Query query, DocList results, boolean dbgResults, NamedList dbg) throws IOException { if (dbgResults) { SolrIndexSearcher searcher = req.getSearcher(); IndexSchema schema = searcher.getSchema(); boolean explainStruct = req.getParams().getBool(CommonParams.EXPLAIN_STRUCT, false); if (results != null) { NamedList<Explanation> explain = getExplanations(query, results, searcher, schema); dbg.add("explain", explainStruct ? explanationsToNamedLists(explain) : explanationsToStrings(explain)); } String otherQueryS = req.getParams().get(CommonParams.EXPLAIN_OTHER); if (otherQueryS != null && otherQueryS.length() > 0) { DocList otherResults = doSimpleQuery(otherQueryS, req, 0, 10); dbg.add("otherQuery", otherQueryS); NamedList<Explanation> explainO = getExplanations(query, otherResults, searcher, schema); dbg.add("explainOther", explainStruct ? explanationsToNamedLists(explainO) : explanationsToStrings(explainO)); } } } public static NamedList<Object> explanationToNamedList(Explanation e) { NamedList<Object> out = new SimpleOrderedMap<>(); out.add("match", e.isMatch()); out.add("value", e.getValue()); out.add("description", e.getDescription()); Explanation[] details = e.getDetails(); // short circut out if (0 == details.length) return out; List<NamedList<Object>> kids = new ArrayList<>(details.length); for (Explanation d : details) { kids.add(explanationToNamedList(d)); } out.add("details", kids); return out; } public static NamedList<NamedList<Object>> explanationsToNamedLists (NamedList<Explanation> explanations) { NamedList<NamedList<Object>> out = new SimpleOrderedMap<>(); for (Map.Entry<String,Explanation> entry : explanations) { out.add(entry.getKey(), explanationToNamedList(entry.getValue())); } return out; } /** * Generates an NamedList of Explanations for each item in a list of docs. * * @param query The Query you want explanations in the context of * @param docs The Documents you want explained relative that query */ public static NamedList<Explanation> getExplanations (Query query, DocList docs, SolrIndexSearcher searcher, IndexSchema schema) throws IOException { NamedList<Explanation> explainList = new SimpleOrderedMap<>(); DocIterator iterator = docs.iterator(); for (int i=0; i<docs.size(); i++) { int id = iterator.nextDoc(); Document doc = searcher.doc(id); String strid = schema.printableUniqueKey(doc); explainList.add(strid, searcher.explain(query, id) ); } return explainList; } private static NamedList<String> explanationsToStrings (NamedList<Explanation> explanations) { NamedList<String> out = new SimpleOrderedMap<>(); for (Map.Entry<String,Explanation> entry : explanations) { out.add(entry.getKey(), "\n"+entry.getValue().toString()); } return out; } /** * Executes a basic query */ public static DocList doSimpleQuery(String sreq, SolrQueryRequest req, int start, int limit) throws IOException { List<String> commands = StrUtils.splitSmart(sreq,';'); String qs = commands.size() >= 1 ? commands.get(0) : ""; try { Query query = QParser.getParser(qs, null, req).getQuery(); // If the first non-query, non-filter command is a simple sort on an indexed field, then // we can use the Lucene sort ability. Sort sort = null; if (commands.size() >= 2) { sort = SortSpecParsing.parseSortSpec(commands.get(1), req).getSort(); } DocList results = req.getSearcher().getDocList(query,(DocSet)null, sort, start, limit); return results; } catch (SyntaxError e) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Error parsing query: " + qs); } } private static final Pattern whitespacePattern = Pattern.compile("\\s+"); private static final Pattern caratPattern = Pattern.compile("\\^"); private static final Pattern tildePattern = Pattern.compile("[~]"); /** * Given a string containing fieldNames and boost info, * converts it to a Map from field name to boost info. * * <p> * Doesn't care if boost info is negative, you're on your own. * </p> * <p> * Doesn't care if boost info is missing, again: you're on your own. * </p> * * @param in a String like "fieldOne^2.3 fieldTwo fieldThree^-0.4" * @return Map of fieldOne =&gt; 2.3, fieldTwo =&gt; null, fieldThree =&gt; -0.4 */ public static Map<String,Float> parseFieldBoosts(String in) { return parseFieldBoosts(new String[]{in}); } /** * Like <code>parseFieldBoosts(String)</code>, but parses all the strings * in the provided array (which may be null). * * @param fieldLists an array of Strings eg. <code>{"fieldOne^2.3", "fieldTwo", fieldThree^-0.4}</code> * @return Map of fieldOne =&gt; 2.3, fieldTwo =&gt; null, fieldThree =&gt; -0.4 */ public static Map<String,Float> parseFieldBoosts(String[] fieldLists) { if (null == fieldLists || 0 == fieldLists.length) { return new HashMap<>(); } Map<String, Float> out = new HashMap<>(7); for (String in : fieldLists) { if (null == in) { continue; } in = in.trim(); if(in.length()==0) { continue; } String[] bb = whitespacePattern.split(in); for (String s : bb) { String[] bbb = caratPattern.split(s); out.put(bbb[0], 1 == bbb.length ? null : Float.valueOf(bbb[1])); } } return out; } /** /** * Like {@link #parseFieldBoosts}, but allows for an optional slop value prefixed by "~". * * @param fieldLists - an array of Strings eg. <code>{"fieldOne^2.3", "fieldTwo", fieldThree~5^-0.4}</code> * @param wordGrams - (0=all words, 2,3 = shingle size) * @param defaultSlop - the default slop for this param * @return - FieldParams containing the fieldname,boost,slop,and shingle size */ public static List<FieldParams> parseFieldBoostsAndSlop(String[] fieldLists,int wordGrams,int defaultSlop) { if (null == fieldLists || 0 == fieldLists.length) { return new ArrayList<>(); } List<FieldParams> out = new ArrayList<>(); for (String in : fieldLists) { if (null == in) { continue; } in = in.trim(); if(in.length()==0) { continue; } String[] fieldConfigs = whitespacePattern.split(in); for (String s : fieldConfigs) { String[] fieldAndSlopVsBoost = caratPattern.split(s); String[] fieldVsSlop = tildePattern.split(fieldAndSlopVsBoost[0]); String field = fieldVsSlop[0]; int slop = (2 == fieldVsSlop.length) ? Integer.valueOf(fieldVsSlop[1]) : defaultSlop; Float boost = (1 == fieldAndSlopVsBoost.length) ? 1 : Float.valueOf(fieldAndSlopVsBoost[1]); FieldParams fp = new FieldParams(field,wordGrams,slop,boost); out.add(fp); } } return out; } /** * Checks the number of optional clauses in the query, and compares it * with the specification string to determine the proper value to use. * * <p> * Details about the specification format can be found * <a href="doc-files/min-should-match.html">here</a> * </p> * * <p>A few important notes...</p> * <ul> * <li> * If the calculations based on the specification determine that no * optional clauses are needed, BooleanQuerysetMinMumberShouldMatch * will never be called, but the usual rules about BooleanQueries * still apply at search time (a BooleanQuery containing no required * clauses must still match at least one optional clause) * <li> * <li> * No matter what number the calculation arrives at, * BooleanQuery.setMinShouldMatch() will never be called with a * value greater then the number of optional clauses (or less then 1) * </li> * </ul> * * <p>:TODO: should optimize the case where number is same * as clauses to just make them all "required" * </p> */ public static void setMinShouldMatch(BooleanQuery.Builder q, String spec) { int optionalClauses = 0; for (BooleanClause c : q.build().clauses()) { if (c.getOccur() == Occur.SHOULD) { optionalClauses++; } } int msm = calculateMinShouldMatch(optionalClauses, spec); if (0 < msm) { q.setMinimumNumberShouldMatch(msm); } } public static BooleanQuery setMinShouldMatch(BooleanQuery q, String spec) { BooleanQuery.Builder builder = new BooleanQuery.Builder(); builder.setDisableCoord(q.isCoordDisabled()); for (BooleanClause clause : q) { builder.add(clause); } setMinShouldMatch(builder, spec); return builder.build(); } // private static Pattern spaceAroundLessThanPattern = Pattern.compile("\\s*<\\s*"); private static Pattern spaceAroundLessThanPattern = Pattern.compile("(\\s+<\\s*)|(\\s*<\\s+)"); private static Pattern spacePattern = Pattern.compile(" "); private static Pattern lessThanPattern = Pattern.compile("<"); /** * helper exposed for UnitTests * @see #setMinShouldMatch */ static int calculateMinShouldMatch(int optionalClauseCount, String spec) { int result = optionalClauseCount; spec = spec.trim(); if (-1 < spec.indexOf("<")) { /* we have conditional spec(s) */ spec = spaceAroundLessThanPattern.matcher(spec).replaceAll("<"); for (String s : spacePattern.split(spec)) { String[] parts = lessThanPattern.split(s,0); int upperBound = Integer.parseInt(parts[0]); if (optionalClauseCount <= upperBound) { return result; } else { result = calculateMinShouldMatch (optionalClauseCount, parts[1]); } } return result; } /* otherwise, simple expresion */ if (-1 < spec.indexOf('%')) { /* percentage - assume the % was the last char. If not, let Integer.parseInt fail. */ spec = spec.substring(0,spec.length()-1); int percent = Integer.parseInt(spec); float calc = (result * percent) * (1/100f); result = calc < 0 ? result + (int)calc : (int)calc; } else { int calc = Integer.parseInt(spec); result = calc < 0 ? result + calc : calc; } return (optionalClauseCount < result ? optionalClauseCount : (result < 0 ? 0 : result)); } /** * Recursively walks the "from" query pulling out sub-queries and * adding them to the "to" query. * * <p> * Boosts are multiplied as needed. Sub-BooleanQueryies which are not * optional will not be flattened. From will be mangled durring the walk, * so do not attempt to reuse it. * </p> */ public static void flattenBooleanQuery(BooleanQuery.Builder to, BooleanQuery from) { flattenBooleanQuery(to, from, 1f); } private static void flattenBooleanQuery(BooleanQuery.Builder to, BooleanQuery from, float fromBoost) { for (BooleanClause clause : from.clauses()) { Query cq = clause.getQuery(); float boost = fromBoost; while (cq instanceof BoostQuery) { BoostQuery bq = (BoostQuery) cq; cq = bq.getQuery(); boost *= bq.getBoost(); } if (cq instanceof BooleanQuery && !clause.isRequired() && !clause.isProhibited()) { /* we can recurse */ flattenBooleanQuery(to, (BooleanQuery)cq, boost); } else { to.add(clause); } } } /** * Escapes all special characters except '"', '-', and '+' */ public static CharSequence partialEscape(CharSequence s) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < s.length(); i++) { char c = s.charAt(i); if (c == '\\' || c == '!' || c == '(' || c == ')' || c == ':' || c == '^' || c == '[' || c == ']' || c == '/' || c == '{' || c == '}' || c == '~' || c == '*' || c == '?' ) { sb.append('\\'); } sb.append(c); } return sb; } // Pattern to detect dangling operator(s) at end of query // \s+[-+\s]+$ private final static Pattern DANGLING_OP_PATTERN = Pattern.compile( "\\s+[-+\\s]+$" ); // Pattern to detect consecutive + and/or - operators // \s+[+-](?:\s*[+-]+)+ private final static Pattern CONSECUTIVE_OP_PATTERN = Pattern.compile( "\\s+[+-](?:\\s*[+-]+)+" ); protected static final String UNKNOWN_VALUE = "Unknown"; /** * Strips operators that are used illegally, otherwise returns its * input. Some examples of illegal user queries are: "chocolate +- * chip", "chocolate - - chip", and "chocolate chip -". */ public static CharSequence stripIllegalOperators(CharSequence s) { String temp = CONSECUTIVE_OP_PATTERN.matcher( s ).replaceAll( " " ); return DANGLING_OP_PATTERN.matcher( temp ).replaceAll( "" ); } /** * Returns its input if there is an even (ie: balanced) number of * '"' characters -- otherwise returns a String in which all '"' * characters are striped out. */ public static CharSequence stripUnbalancedQuotes(CharSequence s) { int count = 0; for (int i = 0; i < s.length(); i++) { if (s.charAt(i) == '\"') { count++; } } if (0 == (count & 1)) { return s; } return s.toString().replace("\"",""); } /** * Adds to {@code dest} all the not-null elements of {@code entries} that have non-null names * * @param entries The array of entries to be added to the {@link NamedList} {@code dest} * @param dest The {@link NamedList} instance where the not-null elements of entries are added * @return Returns The {@code dest} input object */ public static <T> NamedList<T> removeNulls(Map.Entry<String, T>[] entries, NamedList<T> dest) { for (int i=0; i<entries.length; i++) { Map.Entry<String, T> entry = entries[i]; if (entry != null) { String key = entry.getKey(); if (key != null) { dest.add(key, entry.getValue()); } } } return dest; } /** * A subclass of SolrQueryParser that supports aliasing fields for * constructing DisjunctionMaxQueries. */ public static class DisjunctionMaxQueryParser extends SolrQueryParser { /** A simple container for storing alias info * @see #aliases */ protected static class Alias { public float tie; public Map<String,Float> fields; } /** * Where we store a map from field name we expect to see in our query * string, to Alias object containing the fields to use in our * DisjunctionMaxQuery and the tiebreaker to use. */ protected Map<String,Alias> aliases = new HashMap<>(3); public DisjunctionMaxQueryParser(QParser qp, String defaultField) { super(qp,defaultField); // don't trust that our parent class won't ever change its default setDefaultOperator(QueryParser.Operator.OR); } /** * Add an alias to this query parser. * * @param field the field name that should trigger alias mapping * @param fieldBoosts the mapping from fieldname to boost value that * should be used to build up the clauses of the * DisjunctionMaxQuery. * @param tiebreaker to the tiebreaker to be used in the * DisjunctionMaxQuery * @see SolrPluginUtils#parseFieldBoosts */ public void addAlias(String field, float tiebreaker, Map<String,Float> fieldBoosts) { Alias a = new Alias(); a.tie = tiebreaker; a.fields = fieldBoosts; aliases.put(field, a); } /** * Delegates to the super class unless the field has been specified * as an alias -- in which case we recurse on each of * the aliased fields, and the results are composed into a * DisjunctionMaxQuery. (so yes: aliases which point at other * aliases should work) */ @Override protected Query getFieldQuery(String field, String queryText, boolean quoted) throws SyntaxError { if (aliases.containsKey(field)) { Alias a = aliases.get(field); List<Query> disjuncts = new ArrayList<>(); for (String f : a.fields.keySet()) { Query sub = getFieldQuery(f,queryText,quoted); if (null != sub) { if (null != a.fields.get(f)) { sub = new BoostQuery(sub, a.fields.get(f)); } disjuncts.add(sub); } } return disjuncts.isEmpty() ? null : new DisjunctionMaxQuery(disjuncts, a.tie); } else { try { return super.getFieldQuery(field, queryText, quoted); } catch (Exception e) { return null; } } } } /** * Determines the correct Sort based on the request parameter "sort" * * @return null if no sort is specified. */ public static Sort getSort(SolrQueryRequest req) { String sort = req.getParams().get(CommonParams.SORT); if (null == sort || sort.equals("")) { return null; } SolrException sortE = null; Sort ss = null; try { ss = SortSpecParsing.parseSortSpec(sort, req).getSort(); } catch (SolrException e) { sortE = e; } if ((null == ss) || (null != sortE)) { /* we definitely had some sort of sort string from the user, * but no SortSpec came out of it */ SolrCore.log.warn("Invalid sort \""+sort+"\" was specified, ignoring", sortE); return null; } return ss; } /** Turns an array of query strings into a List of Query objects. * * @return null if no queries are generated */ public static List<Query> parseQueryStrings(SolrQueryRequest req, String[] queries) throws SyntaxError { if (null == queries || 0 == queries.length) return null; List<Query> out = new ArrayList<>(queries.length); for (String q : queries) { if (null != q && 0 != q.trim().length()) { out.add(QParser.getParser(q, null, req).getQuery()); } } return out; } /** * A CacheRegenerator that can be used whenever the items in the cache * are not dependant on the current searcher. * * <p> * Flat out copies the oldKey=&gt;oldVal pair into the newCache * </p> */ public static class IdentityRegenerator implements CacheRegenerator { @Override public boolean regenerateItem(SolrIndexSearcher newSearcher, SolrCache newCache, SolrCache oldCache, Object oldKey, Object oldVal) throws IOException { newCache.put(oldKey,oldVal); return true; } } /** * Convert a DocList to a SolrDocumentList * * The optional param "ids" is populated with the lucene document id * for each SolrDocument. * * @param docs The {@link org.apache.solr.search.DocList} to convert * @param searcher The {@link org.apache.solr.search.SolrIndexSearcher} to use to load the docs from the Lucene index * @param fields The names of the Fields to load * @param ids A map to store the ids of the docs * @return The new {@link org.apache.solr.common.SolrDocumentList} containing all the loaded docs * @throws java.io.IOException if there was a problem loading the docs * @since solr 1.4 */ public static SolrDocumentList docListToSolrDocumentList( DocList docs, SolrIndexSearcher searcher, Set<String> fields, Map<SolrDocument, Integer> ids ) throws IOException { IndexSchema schema = searcher.getSchema(); SolrDocumentList list = new SolrDocumentList(); list.setNumFound(docs.matches()); list.setMaxScore(docs.maxScore()); list.setStart(docs.offset()); DocIterator dit = docs.iterator(); while (dit.hasNext()) { int docid = dit.nextDoc(); Document luceneDoc = searcher.doc(docid, fields); SolrDocument doc = new SolrDocument(); for( IndexableField field : luceneDoc) { if (null == fields || fields.contains(field.name())) { SchemaField sf = schema.getField( field.name() ); doc.addField( field.name(), sf.getType().toObject( field ) ); } } if (docs.hasScores() && (null == fields || fields.contains("score"))) { doc.addField("score", dit.score()); } list.add( doc ); if( ids != null ) { ids.put( doc, new Integer(docid) ); } } return list; } public static void invokeSetters(Object bean, NamedList initArgs) { if (initArgs == null) return; Class clazz = bean.getClass(); Method[] methods = clazz.getMethods(); Iterator<Map.Entry<String, Object>> iterator = initArgs.iterator(); while (iterator.hasNext()) { Map.Entry<String, Object> entry = iterator.next(); String key = entry.getKey(); String setterName = "set" + String.valueOf(Character.toUpperCase(key.charAt(0))) + key.substring(1); Method method = null; try { for (Method m : methods) { if (m.getName().equals(setterName) && m.getParameterTypes().length == 1) { method = m; break; } } if (method == null) { throw new RuntimeException("no setter corrresponding to '" + key + "' in " + clazz.getName()); } Class pClazz = method.getParameterTypes()[0]; Object val = entry.getValue(); method.invoke(bean, val); } catch (InvocationTargetException | IllegalAccessException e1) { throw new RuntimeException("Error invoking setter " + setterName + " on class : " + clazz.getName(), e1); } } } /** * Given the integer purpose of a request generates a readable value corresponding * the request purposes (there can be more than one on a single request). If * there is a purpose parameter present that's not known this method will * return {@value #UNKNOWN_VALUE} * @param reqPurpose Numeric request purpose * @return a comma separated list of purposes or {@value #UNKNOWN_VALUE} */ public static String getRequestPurpose(Integer reqPurpose) { if (reqPurpose != null) { StringBuilder builder = new StringBuilder(); for (Map.Entry<Integer, String>entry : purposes.entrySet()) { if ((reqPurpose & entry.getKey()) != 0) { builder.append(entry.getValue() + ","); } } if (builder.length() == 0) { return UNKNOWN_VALUE; } builder.setLength(builder.length() - 1); return builder.toString(); } return UNKNOWN_VALUE; } }
apache-2.0
openwide-java/owsi-core-parent
owsi-core/owsi-core-components/owsi-core-component-jpa-security/src/test/java/fr/openwide/core/test/jpa/security/config/spring/JpaSecurityTestApplicationPropertyConfig.java
616
package fr.openwide.core.test.jpa.security.config.spring; import fr.openwide.core.jpa.more.business.parameter.dao.ParameterDaoImpl; import fr.openwide.core.spring.config.spring.AbstractApplicationPropertyConfig; import fr.openwide.core.spring.property.dao.IMutablePropertyDao; import fr.openwide.core.spring.property.service.IPropertyRegistry; public class JpaSecurityTestApplicationPropertyConfig extends AbstractApplicationPropertyConfig { @Override protected void register(IPropertyRegistry registry) { } @Override public IMutablePropertyDao mutablePropertyDao() { return new ParameterDaoImpl(); } }
apache-2.0
spring-projects/spring-ldap
core/src/test/java/org/springframework/ldap/filter/NotFilterTest.java
1683
/* * Copyright 2005-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.ldap.filter; import com.gargoylesoftware.base.testing.EqualsTester; import org.junit.Test; import static org.assertj.core.api.Assertions.assertThat; /** * Unit tests for the NotFilter class. * * @author Mattias Hellborg Arthursson */ public class NotFilterTest { @Test public void testConstructor() { EqualsFilter filter = new EqualsFilter("a", "b"); NotFilter notFilter = new NotFilter(filter); assertThat(notFilter.encode()).isEqualTo("(!(a=b))"); } @Test public void testEquals() { EqualsFilter filter = new EqualsFilter("a", "b"); NotFilter originalObject = new NotFilter(filter); NotFilter identicalObject = new NotFilter(filter); NotFilter differentObject = new NotFilter(new EqualsFilter("a", "a")); NotFilter subclassObject = new NotFilter(filter) { }; new EqualsTester(originalObject, identicalObject, differentObject, subclassObject); } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-pinpointemail/src/main/java/com/amazonaws/services/pinpointemail/model/transform/SendEmailRequestMarshaller.java
4054
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.pinpointemail.model.transform; import java.util.List; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.pinpointemail.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * SendEmailRequestMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class SendEmailRequestMarshaller { private static final MarshallingInfo<String> FROMEMAILADDRESS_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("FromEmailAddress").build(); private static final MarshallingInfo<StructuredPojo> DESTINATION_BINDING = MarshallingInfo.builder(MarshallingType.STRUCTURED) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("Destination").build(); private static final MarshallingInfo<List> REPLYTOADDRESSES_BINDING = MarshallingInfo.builder(MarshallingType.LIST) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("ReplyToAddresses").build(); private static final MarshallingInfo<String> FEEDBACKFORWARDINGEMAILADDRESS_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("FeedbackForwardingEmailAddress").build(); private static final MarshallingInfo<StructuredPojo> CONTENT_BINDING = MarshallingInfo.builder(MarshallingType.STRUCTURED) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("Content").build(); private static final MarshallingInfo<List> EMAILTAGS_BINDING = MarshallingInfo.builder(MarshallingType.LIST).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("EmailTags").build(); private static final MarshallingInfo<String> CONFIGURATIONSETNAME_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("ConfigurationSetName").build(); private static final SendEmailRequestMarshaller instance = new SendEmailRequestMarshaller(); public static SendEmailRequestMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(SendEmailRequest sendEmailRequest, ProtocolMarshaller protocolMarshaller) { if (sendEmailRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(sendEmailRequest.getFromEmailAddress(), FROMEMAILADDRESS_BINDING); protocolMarshaller.marshall(sendEmailRequest.getDestination(), DESTINATION_BINDING); protocolMarshaller.marshall(sendEmailRequest.getReplyToAddresses(), REPLYTOADDRESSES_BINDING); protocolMarshaller.marshall(sendEmailRequest.getFeedbackForwardingEmailAddress(), FEEDBACKFORWARDINGEMAILADDRESS_BINDING); protocolMarshaller.marshall(sendEmailRequest.getContent(), CONTENT_BINDING); protocolMarshaller.marshall(sendEmailRequest.getEmailTags(), EMAILTAGS_BINDING); protocolMarshaller.marshall(sendEmailRequest.getConfigurationSetName(), CONFIGURATIONSETNAME_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-lightsail/src/main/java/com/amazonaws/services/lightsail/model/transform/CreateLoadBalancerRequestProtocolMarshaller.java
2746
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.lightsail.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.Request; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.lightsail.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.protocol.*; import com.amazonaws.protocol.Protocol; import com.amazonaws.annotation.SdkInternalApi; /** * CreateLoadBalancerRequest Marshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class CreateLoadBalancerRequestProtocolMarshaller implements Marshaller<Request<CreateLoadBalancerRequest>, CreateLoadBalancerRequest> { private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.AWS_JSON).requestUri("/") .httpMethodName(HttpMethodName.POST).hasExplicitPayloadMember(false).hasPayloadMembers(true) .operationIdentifier("Lightsail_20161128.CreateLoadBalancer").serviceName("AmazonLightsail").build(); private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory; public CreateLoadBalancerRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) { this.protocolFactory = protocolFactory; } public Request<CreateLoadBalancerRequest> marshall(CreateLoadBalancerRequest createLoadBalancerRequest) { if (createLoadBalancerRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { final ProtocolRequestMarshaller<CreateLoadBalancerRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller(SDK_OPERATION_BINDING, createLoadBalancerRequest); protocolMarshaller.startMarshalling(); CreateLoadBalancerRequestMarshaller.getInstance().marshall(createLoadBalancerRequest, protocolMarshaller); return protocolMarshaller.finishMarshalling(); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-inspector2/src/main/java/com/amazonaws/services/inspector2/model/transform/ListCoverageStatisticsRequestMarshaller.java
2756
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.inspector2.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.inspector2.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * ListCoverageStatisticsRequestMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class ListCoverageStatisticsRequestMarshaller { private static final MarshallingInfo<StructuredPojo> FILTERCRITERIA_BINDING = MarshallingInfo.builder(MarshallingType.STRUCTURED) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("filterCriteria").build(); private static final MarshallingInfo<String> GROUPBY_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("groupBy").build(); private static final MarshallingInfo<String> NEXTTOKEN_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("nextToken").build(); private static final ListCoverageStatisticsRequestMarshaller instance = new ListCoverageStatisticsRequestMarshaller(); public static ListCoverageStatisticsRequestMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(ListCoverageStatisticsRequest listCoverageStatisticsRequest, ProtocolMarshaller protocolMarshaller) { if (listCoverageStatisticsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(listCoverageStatisticsRequest.getFilterCriteria(), FILTERCRITERIA_BINDING); protocolMarshaller.marshall(listCoverageStatisticsRequest.getGroupBy(), GROUPBY_BINDING); protocolMarshaller.marshall(listCoverageStatisticsRequest.getNextToken(), NEXTTOKEN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
Intera/urlaubsverwaltung
src/main/java/org/synyx/urlaubsverwaltung/restapi/vacationoverview/VacationOverviewController.java
1744
package org.synyx.urlaubsverwaltung.restapi.vacationoverview; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import org.synyx.urlaubsverwaltung.core.holiday.VacationOverview; import org.synyx.urlaubsverwaltung.restapi.ResponseWrapper; import java.util.List; @Api("VacationOverview: Get Vacation-Overview Metadata") @RestController("restApiVacationOverview") @RequestMapping("/api") public class VacationOverviewController { private final VacationOverviewService vacationOverviewService; @Autowired VacationOverviewController(VacationOverviewService vacationOverviewService) { this.vacationOverviewService = vacationOverviewService; } @ApiOperation( value = "Get Vacation-Overview Metadata", notes = "Get Vacation-Overview metadata for all members of a department") @GetMapping("/vacationoverview") public ResponseWrapper<VacationOverviewResponse> getHolydayOverview( @RequestParam("selectedDepartment") String selectedDepartment, @RequestParam("selectedYear") Integer selectedYear, @RequestParam("selectedMonth") Integer selectedMonth) { List<VacationOverview> holidayOverviewList = vacationOverviewService.getVacationOverviews(selectedDepartment, selectedYear, selectedMonth); return new ResponseWrapper<>(new VacationOverviewResponse(holidayOverviewList)); } }
apache-2.0
nocuous/genworld
src/main/java/com/nocuous/genworld/GenWorldCommand.java
8678
package com.nocuous.genworld; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import net.minecraft.command.CommandBase; import net.minecraft.command.ICommandSender; import net.minecraft.util.ChunkCoordinates; import net.minecraft.world.WorldServer; import net.minecraftforge.common.DimensionManager; public class GenWorldCommand extends CommandBase { private transient List<Thread> worldLoaders; SaveChunksProgressUpdate output; public void close() { for (Iterator<Thread> thread = worldLoaders.iterator(); thread .hasNext();) { try { thread.next().join(); } catch (Exception e) { } } worldLoaders.clear(); worldLoaders = null; } @Override public String getCommandName() { // TODO Auto-generated method stub return "genworld"; } @Override public String getCommandUsage(ICommandSender var1) { // TODO Auto-generated method stub return "/genworld - use without any parameters for help with the genworld mod"; } private WorldServer getWorld(String worldName) { WorldServer[] worlds = DimensionManager.getWorlds(); for (int i = 0; i < worlds.length; i++) { if (worlds[i].provider.getDimensionName().equalsIgnoreCase( worldName.replace("\"", "")) || worlds[i].provider.getSaveFolder().equalsIgnoreCase( worldName.replace("\"", ""))) { return worlds[i]; } } return null; } private void printWorlds() { WorldServer[] worlds_print = DimensionManager.getWorlds(); for (int i = 0; i < worlds_print.length; i++) { printOutput("Found world: " + worlds_print[i].provider.getDimensionName() + " Save Folder: " + worlds_print[i].provider.getSaveFolder()); } } private void box(String worldName, int xMin, int yMin, int xMax, int yMax) { WorldServer world = getWorld(worldName); if (world == null) { printOutput("World not found: " + worldName); return; } printOutput("Pregenerating world: " + world.provider.getDimensionName()); ChunkCoordinates spawn = world.getSpawnPoint(); WorldSectorLoader wsl = new WorldSectorLoader(world, xMin / 16, yMin / 16, (xMax - xMin) / 16, (yMax - yMin) / 16, false, output); // world sector loader is a runnable due to previous attempts at // threading it out. wsl.run(); } private void spawnSquare(String worldName, int size) { WorldServer world = getWorld(worldName); if (world == null) { printOutput("World not found: " + worldName); return; } printOutput("Pregenerating world: " + world.provider.getDimensionName()); //get spawn point in block coordinates - DEFINITELY NOT CHUNK COORDINATES ChunkCoordinates spawn = world.getSpawnPoint(); printOutput("Spawn coordinates: " + spawn.posX + "/" + spawn.posY + "/" + spawn.posX); int chunkDimension = size / 16; int offsetX = (spawn.posX/16) - (chunkDimension / 2); //MC uses Z like bitmap Y int offsetY = (spawn.posZ/16) - (chunkDimension / 2); WorldSectorLoader wsl = new WorldSectorLoader(world, offsetX, offsetY, chunkDimension, chunkDimension, false, output); // world sector loader is a runnable due to previous attempts at // threading it out. wsl.run(); } private void spawnCircle(String worldName, int radius) { WorldServer world = getWorld(worldName); if (world == null) { printOutput("World not found: " + worldName); return; } printOutput("Pregenerating world: " + world.provider.getDimensionName()); //get spawn point in block coordinates - DEFINITELY NOT CHUNK COORDINATES ChunkCoordinates spawn = world.getSpawnPoint(); int size = radius * 2; int chunkDimension = size / 16; printOutput("Spawn coordinates: " + spawn.posX + "/" + spawn.posY + "/" + spawn.posX); int offsetX = (spawn.posX/16) - (chunkDimension / 2); //MC uses Z like bitmap Y int offsetY = (spawn.posZ/16) - (chunkDimension / 2); WorldSectorLoader wsl = new WorldSectorLoader(world, offsetX, offsetY, chunkDimension, chunkDimension, true, output); // world sector loader is a runnable due to previous attempts at // threading it out. wsl.run(); } private void circle(String worldName, int x, int y, int radius) { WorldServer world = getWorld(worldName); if (world == null) { printOutput("World not found: " + worldName); return; } printOutput("Pregenerating world: " + world.provider.getDimensionName()); ChunkCoordinates spawn = world.getSpawnPoint(); int size = radius * 2; int chunkDimension = size / 16; int offsetX = x - (radius); int offsetY = y - (radius); WorldSectorLoader wsl = new WorldSectorLoader(world, offsetX / 16, offsetY / 16, chunkDimension, chunkDimension, true, output); // world sector loader is a runnable due to previous attempts at // threading it out. wsl.run(); } private void printOutput(String text) { if (output != null) output.displayProgressMessage(text); } @Override public void processCommand(ICommandSender var1, String[] var2) { // TODO Auto-generated method stub output = new SaveChunksProgressUpdate(var1); if (var2.length < 1) { printOutput("Not Enough Arguments: /genworld {command arguments}"); printOutput("Commands: "); printOutput("SpawnSquare - pregenerates a square around the spawn point."); printOutput(" SpawnSquare <world> <size in blocks> - Ex: /genworld SpawnSquare Overworld 1000"); printOutput("box - pregenerates a user defined rectangle using block coordinates."); printOutput(" box <world> <x min> <y min> <x max> <y max> - Ex: /genworld box Overworld -500 -500 500 500"); printOutput("SpawnCircle - pregenerates a circle around the spawn point."); printOutput(" SpawnCircle <world> <radius in blocks> - Ex: /genworld SpawnCircle Overworld 500"); printOutput("circle - pregenerates a circle at the given coordinates."); printOutput(" SpawnCircle <world> <x> <y> <radius in blocks> - Ex: /genworld circle Overworld 0 0 500"); printOutput("Available Worlds:"); printWorlds(); output = null; return; } if (var2[0].equalsIgnoreCase("spawnsquare")) { // pregen world in a square shape centered around the player start if (var2.length != 3) { printOutput("Not Enough Arguments: /genworld SpawnSquare <world> <size>"); output = null; return; } printOutput("args: '" + var2[1] + "' '" + var2[2] + "'"); String worldName = var2[1]; int size = 0; try { size = Integer.parseInt(var2[2]); } catch (Exception e) { printOutput("Size was not a number"); output = null; return; } spawnSquare(worldName, size); } else if (var2[0].equalsIgnoreCase("box")) { if (var2.length != 6) { printOutput("Not Enough Arguments: /genworld box <world> <x min> <y min> <x max> <y max>"); output = null; return; } printOutput("args: '" + var2[1] + "' '" + var2[2] + "' '" + var2[3] + "' '" + var2[4] + "' '" + var2[5] + "'"); String worldName = var2[1]; int xMin = 0; int xMax = 0; int yMin = 0; int yMax = 0; try { xMin = Integer.parseInt(var2[2]); yMin = Integer.parseInt(var2[3]); xMax = Integer.parseInt(var2[4]); yMax = Integer.parseInt(var2[5]); } catch (Exception e) { printOutput("Size was not a number"); output = null; return; } box(worldName, xMin, yMin, xMax, yMax); } else if (var2[0].equalsIgnoreCase("spawncircle")) { // pregen world in a square shape centered around the player start if (var2.length != 3) { printOutput("Not Enough Arguments: /genworld SpawnRadius <world> <radius>"); output = null; return; } printOutput("args: '" + var2[1] + "' '" + var2[2] + "'"); String worldName = var2[1]; int size = 0; try { size = Integer.parseInt(var2[2]); } catch (Exception e) { printOutput("Radius was not a number"); output = null; return; } spawnCircle(worldName, size); } else if (var2[0].equalsIgnoreCase("circle")) { if (var2.length != 5) { printOutput("Not Enough Arguments: /genworld circle <world> <x> <y> <radius>"); output = null; return; } printOutput("args: '" + var2[1] + "' '" + var2[2] + "' '" + var2[3] + "' '" + var2[4] + "'"); String worldName = var2[1]; int x = 0; int y = 0; int radius = 0; try { x = Integer.parseInt(var2[2]); y = Integer.parseInt(var2[3]); radius = Integer.parseInt(var2[4]); } catch (Exception e) { printOutput("Size was not a number"); output = null; return; } circle(worldName, x, y, radius); } else { printOutput("genworld - Invalid option: " + var2[0]); } printOutput("genworld done"); output = null; } }
apache-2.0
cbeams-archive/spring-framework-2.5.x
sandbox/src/org/springframework/osgi/context/ContextLoaderBundleActivator.java
12168
/* * Copyright 2002-2006 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Created on 23-Jan-2006 by Adrian Colyer */ package org.springframework.osgi.context; import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.util.Dictionary; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.osgi.framework.Bundle; import org.osgi.framework.BundleActivator; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceReference; import org.osgi.util.tracker.ServiceTracker; import org.osgi.util.tracker.ServiceTrackerCustomizer; import org.springframework.context.ApplicationContext; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.osgi.context.support.DefaultOsgiBundleXmlApplicationContextFactory; import org.springframework.osgi.context.support.OsgiBundleXmlApplicationContext; import org.springframework.osgi.context.support.OsgiBundleXmlApplicationContextFactory; import org.springframework.osgi.service.OsgiServiceUtils; import org.springframework.util.ClassUtils; import org.springframework.util.StringUtils; /** * OSGi bundle activator class to be used as the bundle activator for OSGi * bundles using Spring services. When the bundle is activated it creates an * application context and destroys it when the bundled is stopped. * * By default the ContextLoaderBundleActivator will look for an application * context file in the location * /META-INF/&lt;bundle-symbolic-name&gt;-context.xml. You can override this * default behaviour by adding a bundle manifest header entry of the form * * Spring-Context: &lt;comma-delimited list of context file locations&gt; * * The manifest entry may contain any number of resource paths, separated by any * number of commas and spaces. * * TODO: support parent application context via additional header giving name of * parent application context service (by default this will be * <bundle-symbolic-name>-springApplicationContext. * * @author Adrian Colyer * @since 2.0 */ public class ContextLoaderBundleActivator implements BundleActivator { private static final String CONTEXT_LOCATION_HEADER = "Spring-Context"; private static final String PARENT_CONTEXT_SERVICE_NAME_HEADER = "Spring-Parent-Context"; private static final String CONTEXT_LOCATION_DELIMITERS = ", "; private static final String DEFAULT_CONTEXT_PREFIX = "/META-INF/"; private static final String DEFAULT_CONTEXT_POSTFIX = "-context.xml"; private static final String CONTEXT_OPTIONS = "Spring-Context-Options"; /** * is this really required? - if no parent is found, an exception is thrown * anyway */ private static final String FAIL_FAST_OPTION = "honor-dependent-services"; private OsgiBundleXmlApplicationContextFactory contextFactory = new DefaultOsgiBundleXmlApplicationContextFactory(); private ConfigurableApplicationContext applicationContext; private ServiceReference parentServiceReference; private ServiceTracker serviceTracker; private static final Log log = LogFactory.getLog(ContextLoaderBundleActivator.class); /** * BundleActivator.start */ public void start(BundleContext bundleContext) throws Exception { Bundle myBundle = bundleContext.getBundle(); log.info("starting bundle " + myBundle.getSymbolicName() + myBundle.getBundleId()); String[] applicationContextLocations = getApplicationContextLocations(myBundle); ApplicationContext parent = getParentApplicationContext(bundleContext); this.applicationContext = this.contextFactory.createApplicationContext(parent, bundleContext, applicationContextLocations); } /* * (non-Javadoc) * * @see org.osgi.framework.BundleActivator#stop(org.osgi.framework.BundleContext) */ public void stop(BundleContext bundleContext) throws Exception { if (this.applicationContext != null) { this.applicationContext.close(); } if (this.parentServiceReference != null) { bundleContext.ungetService(this.parentServiceReference); } if (this.serviceTracker != null) serviceTracker.close(); } /** * Search for the Spring-Parent-Context application context. * * @param context * @return */ protected ApplicationContext getParentApplicationContext(final BundleContext context) { String parentContextServiceName = (String) context.getBundle().getHeaders().get( PARENT_CONTEXT_SERVICE_NAME_HEADER); if (parentContextServiceName == null) { if (log.isDebugEnabled()) log.debug("no need to look for a parent context"); return null; } else { if (log.isDebugEnabled()) log.debug("looking for a parent context..."); // try to find the service String filter = "(" + OsgiBundleXmlApplicationContext.APPLICATION_CONTEXT_SERVICE_NAME_HEADER + "=" + parentContextServiceName + ")"; parentServiceReference = OsgiServiceUtils.getService(context, ApplicationContext.class, filter); // TODO: register as service listener..., probably in a proxy to the // app context // that we create here and return instead. // Costin: done, should be verified though. return createApplicationContextProxy(context, parentServiceReference); } } /** * Create a proxy around the target application context. * * @param parent * @return */ protected ApplicationContext createApplicationContextProxy(BundleContext context, ServiceReference serviceReference) { LookupApplicationContextInvocationHandler handler = new LookupApplicationContextInvocationHandler(context, serviceReference, serviceTracker); // TODO: interfaces are detected dynamically - is this dangerous (for // example if the parent context changes) // As the child depends on it, recreating the parent context should // trigger the whole process again. ApplicationContext target = handler.getTarget(); Class[] ifaces = (target == null ? new Class[] { ApplicationContext.class } : ClassUtils.getAllInterfaces(target)); return (ApplicationContext) Proxy.newProxyInstance(getClass().getClassLoader(), ifaces, handler); } /** * Retrieves the org.springframework.context manifest header attribute and * parses it to create a String[] of resource names for creating the * application context. * * If the org.springframework.context header is not present, the default * <bundle-symbolic-name>-context.xml file will be returned. */ protected String[] getApplicationContextLocations(Bundle bundle) { Dictionary manifestHeaders = bundle.getHeaders(); String contextLocationsHeader = (String) manifestHeaders.get(CONTEXT_LOCATION_HEADER); if (contextLocationsHeader != null) { // (Dictionary does not offer a "containsKey" operation) return addBundlePrefixTo(StringUtils.tokenizeToStringArray(contextLocationsHeader, CONTEXT_LOCATION_DELIMITERS)); } else { String defaultName = DEFAULT_CONTEXT_PREFIX + bundle.getSymbolicName() + DEFAULT_CONTEXT_POSTFIX; return addBundlePrefixTo(new String[] { defaultName }); } } /** * add the "bundle:" prefix to the resource location paths in the given * argument. This ensures that only this bundle will be searched for * matching resources. * * Modifies the argument in place and returns it. */ private String[] addBundlePrefixTo(String[] resourcePaths) { for (int i = 0; i < resourcePaths.length; i++) { resourcePaths[i] = OsgiBundleResource.BUNDLE_URL_PREFIX + resourcePaths[i]; } return resourcePaths; } // for testing... protected void setApplicationContext(ConfigurableApplicationContext context) { this.applicationContext = context; } // for testing... protected void setParentServiceReference(ServiceReference ref) { this.parentServiceReference = ref; } // for testing... protected void setApplicationContextFactory(OsgiBundleXmlApplicationContextFactory factory) { this.contextFactory = factory; } /** * Simple lookup proxy using a ServiceTracker underneath. * * @author Costin Leau * */ private static class LookupApplicationContextInvocationHandler implements InvocationHandler { private final ServiceReference serviceReference; private ApplicationContext target; public LookupApplicationContextInvocationHandler(final BundleContext context, ServiceReference serviceRef, ServiceTracker serviceTracker) { this.serviceReference = serviceRef; ApplicationContext parent = (ApplicationContext) context.getService(serviceReference); serviceTracker = new ServiceTracker(context, serviceReference, new ServiceTrackerCustomizer() { public Object addingService(ServiceReference ref) { if (log.isDebugEnabled()) log.debug("parentApplicationContext has been discovered"); // multiple parent contexts are already handled by // OsgiServiceUtils.getService() target = (ApplicationContext) context.getService(ref); return target; } public void modifiedService(ServiceReference ref, Object service) { if (log.isDebugEnabled()) log.debug("parentApplicationContext has been modified"); // TODO: should we refresh the child context (happens // automatically if the parent is refreshed) } public void removedService(ServiceReference ref, Object service) { if (log.isDebugEnabled()) log.debug("parentApplicationContext has been removed"); target = null; // TODO: should we close the child context? } }); } /** * Used to get the discovered target object (for example for detecting * the implemented interfaces). * * @return */ protected ApplicationContext getTarget() { return target; } public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { String methodName = method.getName(); if (methodName.equals("equals")) { // Only consider equal when proxies are identical. return (proxy == args[0] ? Boolean.TRUE : Boolean.FALSE); } else if (methodName.equals("hashCode")) { // Use hashCode of SessionFactory proxy. return new Integer(hashCode()); } try { if (target != null) return method.invoke(this.target, args); } catch (InvocationTargetException ex) { throw ex.getTargetException(); } throw new UnsupportedOperationException("no parentApplicationContext in place"); } } /** * Close suppressing invocation handler - proxy used as a 'shield' against * forbidden close inside an OSGi environment. * * Not used at the moment. * * @author Costin Leau * */ private static class CloseSuppresingApplicationContextInvocationHandler implements InvocationHandler { private final ApplicationContext target; public CloseSuppresingApplicationContextInvocationHandler(ApplicationContext appContext) { this.target = appContext; } public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { String methodName = method.getName(); // suppress close calls: // applicationContext close // Lifecycle interface // TODO: what about refresh() ? if (methodName.equals("close") || methodName.equals("stop")) return null; if (methodName.equals("equals")) { // Only consider equal when proxies are identical. return (proxy == args[0] ? Boolean.TRUE : Boolean.FALSE); } else if (methodName.equals("hashCode")) { // Use hashCode of SessionFactory proxy. return new Integer(hashCode()); } try { return method.invoke(this.target, args); } catch (InvocationTargetException ex) { throw ex.getTargetException(); } } } }
apache-2.0
dawutao/MyCoolWeather
app/src/main/java/com/mycoolweather/android/db/City.java
853
package com.mycoolweather.android.db; import org.litepal.crud.DataSupport; /** * Created by wutao on 2017/05/23. */ public class City extends DataSupport { private int id; private String cityName; public int getId() { return id; } public void setId(int id) { this.id = id; } public String getCityName() { return cityName; } public void setCityName(String cityName) { this.cityName = cityName; } public int getCityCode() { return cityCode; } public void setCityCode(int cityCode) { this.cityCode = cityCode; } private int cityCode; public int getProvincecId() { return provincecId; } public void setProvincecId(int provincecId) { this.provincecId = provincecId; } private int provincecId; }
apache-2.0
IvanBelyaev/ibelyaev
chapter_002/src/main/java/ru/job4j/chess/FigureNotFoundException.java
451
package ru.job4j.chess; /** * FigureNotFoundException. * The exception is thrown when the specified cell has no figure. * @author Ivan Belyaev * @since 16.11.2017 * @version 1.0 */ public class FigureNotFoundException extends ChessException { /** * The constructor creates the object FigureNotFoundException. * @param message - message. */ public FigureNotFoundException(String message) { super(message); } }
apache-2.0
Tfancy/coolweather
app/src/main/java/com/jm/zuikuweather/gson/AQI.java
214
package com.jm.zuikuweather.gson; /** * Created by t_jm on 2017/9/15. */ public class AQI { public AQICity city; public class AQICity{ public String aqi; public String pm25; } }
apache-2.0
jiyilanzhou/coolweather
app/src/main/java/com/example/coolweather/db/CoolWeatherOpenHelper.java
1383
package com.example.coolweather.db; import android.content.Context; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; /** * Created by Administrator on 2016/4/5. */ public class CoolWeatherOpenHelper extends SQLiteOpenHelper { public static final String CREATE_PROVINCE = "create table Province (" + "id integer primary key autoincrement, " + "province_name text, " + "province_code text)"; public static final String CREATE_CITY = "create table City (" + "id integer primary key autoincrement, " + "city_name text, " + "city_code text, "+ "province_id integer)"; public static final String CREATE_COUNTY = "create table County (" + "id integer primary key autoincrement, " + "county_name text, " + "county_code text, " + "city_id integer)"; public CoolWeatherOpenHelper(Context context, String name, SQLiteDatabase.CursorFactory factory, int version) { super(context, name, factory, version); } @Override public void onCreate(SQLiteDatabase db) { db.execSQL(CREATE_PROVINCE); db.execSQL(CREATE_CITY); db.execSQL(CREATE_COUNTY); } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { } }
apache-2.0
EMResearch/EMB
jdk_11_gradle/cs/graphql/patio-api/src/main/java/patio/group/graphql/GetGroupInput.java
2728
/* * Copyright (C) 2019 Kaleidos Open Source SL * * This file is part of PATIO. * PATIO is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * PATIO is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with PATIO. If not, see <https://www.gnu.org/licenses/> */ package patio.group.graphql; import java.util.UUID; /** * Input to build a group * * @since 0.1.0 */ public class GetGroupInput { private final UUID groupId; private final UUID currentUserId; /** * Gets group id. * * @return the group id */ public UUID getGroupId() { return groupId; } /** * Gets current user id. * * @return the current user id */ public UUID getCurrentUserId() { return currentUserId; } /** * Initializes the input * * @param groupId the group id * @param currentUserId the current user id */ public GetGroupInput(UUID groupId, UUID currentUserId) { this.groupId = groupId; this.currentUserId = currentUserId; } /** * Creates a new builder to create a new instance of type {@link GetGroupInput} * * @return an instance of {@link Builder} * @since 0.1.0 */ public static Builder newBuilder() { return new Builder(); } /** * Builds an instance of type {@link GetGroupInput} * * @since 0.1.0 */ public static class Builder { private transient GetGroupInput input = new GetGroupInput(null, null); private Builder() { /* empty */ } /** * Sets the currentUserId * * @param currentUserId the current user's id * @return current builder instance * @since 0.1.0 */ public Builder withCurrentUserId(UUID currentUserId) { this.input = new GetGroupInput(input.getGroupId(), currentUserId); return this; } /** * Sets the groupId * * @param groupId the group id * @return the builder * @since 0.1.0 */ public Builder withGroupId(UUID groupId) { this.input = new GetGroupInput(groupId, input.getCurrentUserId()); return this; } /** * Returns the instance built with this builder * * @return an instance of type {@link GetGroupInput} * @since 0.1.0 */ public GetGroupInput build() { return this.input; } } }
apache-2.0
retoomey/WG3
org.wg3.core/src/org/wg3/storage/Array2D.java
1493
package org.wg3.storage;; /** Array2D hides the internals of storage of a 2D array. * This way we can store it sparse, full, off to disk, etc... * * FIXME: add iterators so that sparse data can be accessed * without scanning an entire grid... * * * @author Robert Toomey */ public interface Array2D<T> { /** Get a value from given x and y */ T get(int x, int y); /** Set a value given an x and y */ void set(int x, int y, T value); /** Get the 'x' dimension of the array */ int getX(); /** Get the 'y' dimension of the array */ int getY(); /** Return the full size of the array */ int size(); /** Return an efficient 1D float access to given col of 2Dfloat, * this method 'may' copy, but probably shouldn't. Return null * if you can't implement this */ Array1D<T> getCol(int i); /** Return an efficient 1D float access to given col of 2Dfloat, * this method 'may' copy, but probably shouldn't. Return null * if you can't implement this */ Array1D<T> getRow(int i); /** begin row ordered for a mass get/set of data. Allows array to * optimize sub-tile loading. This should expect a looped call with * row ordered calls. X before Y. */ public void beginRowOrdered(); /** being row orderer for a mass get/set of data. Allows array to * optimize sub-tile loading */ public void endRowOrdered(); }
apache-2.0
objectos/way
way-code-testing/src/test/resources/code/MethodThrows.java
507
package br.com.objectos.way.code.fakes; import java.io.IOException; import java.util.concurrent.ExecutionException; import br.com.objectos.way.code.Testing; @Testing public abstract class MethodThrows { public void doNotThrow() { } public void throwsUnchecked() throws NullPointerException { throw new NullPointerException(); } abstract void throwsChecked() throws IOException; abstract void throwsCheckedMany() throws ExecutionException, IOException, InterruptedException; }
apache-2.0
torrances/swtk-commons
commons-dict-wordnet-indexbyname/src/main/java/org/swtk/commons/dict/wordnet/indexbyname/instance/m/e/f/WordnetNounIndexNameInstanceMEF.java
1430
package org.swtk.commons.dict.wordnet.indexbyname.instance.m.e.f; import java.util.ArrayList; import java.util.Collection; import java.util.Map; import java.util.TreeMap; import org.swtk.common.dict.dto.wordnet.IndexNoun; import com.trimc.blogger.commons.utils.GsonUtils; public final class WordnetNounIndexNameInstanceMEF { private static Map<String, Collection<IndexNoun>> map = new TreeMap<String, Collection<IndexNoun>>(); static { add("{\"term\":\"mefenamic acid\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03748022\"]}"); add("{\"term\":\"mefloquine\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03748219\"]}"); add("{\"term\":\"mefloquine hydrochloride\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03748219\"]}"); add("{\"term\":\"mefoxin\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03000447\"]}"); } private static void add(final String JSON) { IndexNoun indexNoun = GsonUtils.toObject(JSON, IndexNoun.class); Collection<IndexNoun> list = (map.containsKey(indexNoun.getTerm())) ? map.get(indexNoun.getTerm()) : new ArrayList<IndexNoun>(); list.add(indexNoun); map.put(indexNoun.getTerm(), list); } public static Collection<IndexNoun> get(final String TERM) { return map.get(TERM); } public static boolean has(final String TERM) { return map.containsKey(TERM); } public static Collection<String> terms() { return map.keySet(); } }
apache-2.0
daisy/pipeline-issues
libs/dotify/dotify.formatter.impl/src/org/daisy/dotify/formatter/impl/row/AbstractBlockContentManager.java
4706
package org.daisy.dotify.formatter.impl.row; import org.daisy.dotify.api.formatter.Marker; import org.daisy.dotify.formatter.impl.common.FormatterCoreContext; import org.daisy.dotify.formatter.impl.search.DefaultContext; import java.util.List; import java.util.Optional; /** * TODO: Write java doc. */ public abstract class AbstractBlockContentManager implements BlockStatistics { //Immutable protected final int flowWidth; protected final RowDataProperties rdp; protected final BlockMargin margins; protected final BlockSpacing spacing; private final int minWidth; //Mutable protected final FormatterCoreContext fcontext; AbstractBlockContentManager(int flowWidth, RowDataProperties rdp, FormatterCoreContext fcontext) { this(flowWidth, rdp, fcontext, null); } protected AbstractBlockContentManager( int flowWidth, RowDataProperties rdp, FormatterCoreContext fcontext, Integer minWidth ) { this.flowWidth = flowWidth; this.margins = rdp.getMargins(); this.spacing = new BlockSpacing(margins, rdp, flowWidth, fcontext.getSpaceCharacter()); this.fcontext = fcontext; this.rdp = rdp; this.minWidth = minWidth == null ? flowWidth - margins.getLeftMargin().getContent().length() - margins.getRightMargin().getContent().length() : minWidth; } protected AbstractBlockContentManager(AbstractBlockContentManager template) { this.flowWidth = template.flowWidth; this.rdp = template.rdp; this.margins = template.margins; this.spacing = template.spacing; this.minWidth = template.minWidth; // TODO: fcontext is mutable, but mutating is related to DOM creation, // and we assume for now that DOM creation is NOT going on when rendering has begun. this.fcontext = template.fcontext; } public abstract AbstractBlockContentManager copy(); public abstract void setContext(DefaultContext context); /** * Returns true if the manager has more rows. * * @return returns true if there are more rows, false otherwise */ public abstract boolean hasNext(); /** * Returns true if the manager has some "significant" content. * * @return returns true if there is significant content, false otherwise. */ public abstract boolean hasSignificantContent(); /** * Gets the next row from the manager with the specified width. * * @return returns the next row */ public Optional<RowImpl> getNext() { return getNext(LineProperties.DEFAULT); } public abstract Optional<RowImpl> getNext(LineProperties lineProps); /** * Returns true if this manager supports rows with variable maximum * width, false otherwise. * * @return true if variable maximum width is supported, false otherwise */ public abstract boolean supportsVariableWidth(); /** * Resets the state of the content manager to the first row. */ public abstract void reset(); public MarginProperties getLeftMarginParent() { return margins.getLeftParent(); } public MarginProperties getRightMarginParent() { return margins.getRightParent(); } public List<RowImpl> getCollapsiblePreContentRows() { return spacing.getCollapsiblePreContentRows(); } public boolean hasCollapsiblePreContentRows() { return !spacing.getCollapsiblePreContentRows().isEmpty(); } public List<RowImpl> getInnerPreContentRows() { return spacing.getInnerPreContentRows(); } public boolean hasInnerPreContentRows() { return !spacing.getInnerPreContentRows().isEmpty(); } public List<RowImpl> getPostContentRows() { return spacing.getPostContentRows(); } public boolean hasPostContentRows() { return !spacing.getPostContentRows().isEmpty(); } public List<RowImpl> getSkippablePostContentRows() { return spacing.getSkippablePostContentRows(); } public boolean hasSkippablePostContentRows() { return !spacing.getSkippablePostContentRows().isEmpty(); } @Override public int getMinimumAvailableWidth() { return minWidth; } /** * Get markers that are not attached to a row, i.e. markers that proceeds any text contents * * @return returns markers that proceeds this FlowGroups text contents */ public abstract List<Marker> getGroupMarkers(); public abstract List<String> getGroupAnchors(); public abstract List<String> getGroupIdentifiers(); }
apache-2.0
TapCard/TapCard
tapcard/src/test/java/io/github/tapcard/emvnfccard/utils/AtrTest.java
1170
package io.github.tapcard.emvnfccard.utils; import org.fest.assertions.Assertions; import org.junit.Test; import java.util.Arrays; @SuppressWarnings("ArraysAsListWithZeroOrOneArgument") public class AtrTest { @Test public void testDescriptionFromATR() { Assertions.assertThat(AtrUtils.getDescription("3B 02 14 50")).isEqualTo(Arrays.asList("Schlumberger Multiflex 3k")); Assertions.assertThat(AtrUtils.getDescription(null)).isEqualTo(null); Assertions .assertThat(AtrUtils.getDescription("3B 07 64 11..........")) .isEqualTo( Arrays.asList("HID Corporate 1000 Format", "http://www.hidglobal.com/sites/hidglobal.com/files/resource_files/omnikey_contactless_developer_guide.pdf")); Assertions .assertThat(AtrUtils.getDescription("3F FD 11 25 02 50 00 03 33 B0 15 69 FF 4A 50 F0 80 03 4B 4C 03")) .isEqualTo(Arrays.asList("Kabel Deutschland G02 (Pay TV)")); } @Test public void testDescriptionFromATS() { Assertions.assertThat(AtrUtils.getDescriptionFromAts("20 63 CB A3 A0")).isEqualTo( Arrays.asList("VISA card from Banque Populaire")); Assertions.assertThat(AtrUtils.getDescription(null)).isEqualTo(null); } }
apache-2.0
marcosoliveirasilva/web_application_mobile
src/br/edu/ifba/wmobile/huehue/produtos/sala/Televisao.java
466
package br.edu.ifba.wmobile.huehue.produtos.sala; import java.util.ArrayList; import java.util.List; import br.edu.ifba.wmobile.huehue.produtos.IProdutos; public class Televisao implements IProdutos { @Override public String getCessao() { return "SALA"; } @Override public List<String> getNome() { List<String> itens = new ArrayList<String>(); itens.add("TELEVISAO"); return itens; } @Override public double getPreco() { return 999.90; } }
apache-2.0
adam-roughton/Concentus
Service/src/main/java/com/adamroughton/concentus/clienthandler/ActionCollectorAllocationStrategy.java
255
package com.adamroughton.concentus.clienthandler; import org.javatuples.Pair; import com.adamroughton.concentus.messaging.SocketIdentity; interface ActionCollectorAllocationStrategy { Pair<Integer, SocketIdentity> allocateClient(long clientId); }
apache-2.0
Notronix/JaLAPI
src/main/java/com/notronix/lw/impl/method/orders/ProcessOrdersInBatchMethod.java
1852
package com.notronix.lw.impl.method.orders; import com.google.api.client.http.HttpContent; import com.google.gson.Gson; import com.notronix.lw.api.model.ProcessOrderResult; import com.notronix.lw.impl.method.AbstractLinnworksAPIMethod; import java.util.*; import static com.notronix.lw.impl.method.MethodUtils.putIfProvided; import static com.notronix.lw.impl.method.MethodUtils.urlEncode; import static java.util.Objects.requireNonNull; public class ProcessOrdersInBatchMethod extends AbstractLinnworksAPIMethod<List<ProcessOrderResult>> { private List<UUID> ordersIds; private UUID locationId; @Override public String getURI() { return "Orders/ProcessOrdersInBatch"; } @Override public HttpContent getContent(Gson gson) { Map<String, Object> params = new HashMap<>(); params.put("ordersIds", gson.toJson(requireNonNull(ordersIds))); putIfProvided(params, "locationId", locationId); return urlEncode(params); } @Override public List<ProcessOrderResult> getResponse(Gson gson, String jsonPayload) { return Arrays.asList(gson.fromJson(jsonPayload, ProcessOrderResult[].class)); } public List<UUID> getOrdersIds() { return ordersIds; } public void setOrdersIds(List<UUID> ordersIds) { this.ordersIds = ordersIds; } public ProcessOrdersInBatchMethod withOrdersIds(List<UUID> ordersIds) { this.ordersIds = ordersIds; return this; } public UUID getLocationId() { return locationId; } public void setLocationId(UUID locationId) { this.locationId = locationId; } public ProcessOrdersInBatchMethod withLocationId(UUID locationId) { this.locationId = locationId; return this; } }
apache-2.0
bounswe/bounswe2016group7
utopic/api-client/src/main/java/com/bounswe/group7/api/client/CommentServiceClient.java
1565
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.bounswe.group7.api.client; import com.bounswe.group7.model.Comments; import com.bounswe.group7.model.VotedComments; import com.google.gson.reflect.TypeToken; import java.util.List; /** * * @author Batuhan */ public class CommentServiceClient extends BaseClient{ public CommentServiceClient(){ } public CommentServiceClient(String token){ super(token); } public Comments createComment(Comments request) throws Exception { return post(getResource().path("createComment"), new TypeToken<Comments>() { }, request); } public List<Comments> getTopicComments(Long topicId) throws Exception { return post(getResource().path("public/getTopicComments"), new TypeToken<List<Comments>>() { }, topicId); } public boolean deleteComment(Long commentId) throws Exception { return post(getResource().path("deleteComment"), new TypeToken<Boolean>() { }, commentId); } public boolean voteComment(VotedComments votedComment) throws Exception { return post(getResource().path("voteComment"), new TypeToken<Boolean>() { }, votedComment); } public int getCommentRate(Long commentId) throws Exception { return post(getResource().path("getCommentRate"), new TypeToken<Integer>() { }, commentId); } }
apache-2.0
mpaladin/skeleton-flume-extra
src/test/java/ch/cern/skeleton/flume/sink/TestDummySink.java
1333
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package ch.cern.skeleton.flume.sink; import org.apache.flume.EventDeliveryException; import org.apache.flume.lifecycle.LifecycleException; import org.junit.Before; import org.junit.Test; public class TestDummySink { private DummySink sink; @Before public void setUp() { sink = new DummySink(); } /** * Test something. */ @Test public void testSomething() throws InterruptedException, LifecycleException, EventDeliveryException { // test something } }
apache-2.0
google/paco
Paco-Server/src/com/google/sampling/experiential/cloudsql/columns/DataTypeColumns.java
457
package com.google.sampling.experiential.cloudsql.columns; public class DataTypeColumns { public static final String TABLE_NAME = "data_type"; public static final String DATA_TYPE_ID = "data_type_id"; public static final String NAME = "name"; public static final String IS_NUMERIC = "is_numeric"; public static final String MULTI_SELECT = "multi_select"; public static final String RESPONSE_MAPPING_REQUIRED = "response_mapping_required"; }
apache-2.0
Echo19890615/feign
benchmark/src/main/java/feign/benchmark/RealRequestBenchmarks.java
2723
package feign.benchmark; import okhttp3.OkHttpClient; import okhttp3.Request; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Measurement; import org.openjdk.jmh.annotations.Mode; import org.openjdk.jmh.annotations.OutputTimeUnit; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.TearDown; import org.openjdk.jmh.annotations.Warmup; import java.io.IOException; import java.util.concurrent.TimeUnit; import feign.Feign; import feign.Response; import io.netty.buffer.ByteBuf; import io.reactivex.netty.RxNetty; import io.reactivex.netty.protocol.http.server.HttpServer; import io.reactivex.netty.protocol.http.server.HttpServerRequest; import io.reactivex.netty.protocol.http.server.HttpServerResponse; import io.reactivex.netty.protocol.http.server.RequestHandler; @Measurement(iterations = 5, time = 1) @Warmup(iterations = 10, time = 1) @Fork(3) @BenchmarkMode(Mode.Throughput) @OutputTimeUnit(TimeUnit.SECONDS) @State(Scope.Benchmark) public class RealRequestBenchmarks { private static final int SERVER_PORT = 8765; private HttpServer<ByteBuf, ByteBuf> server; private OkHttpClient client; private FeignTestInterface okFeign; private Request queryRequest; @Setup public void setup() { server = RxNetty.createHttpServer(SERVER_PORT, new RequestHandler<ByteBuf, ByteBuf>() { public rx.Observable handle(HttpServerRequest<ByteBuf> request, HttpServerResponse<ByteBuf> response) { return response.flush(); } }); server.start(); client = new OkHttpClient(); client.setRetryOnConnectionFailure(false); okFeign = Feign.builder() .client(new feign.okhttp.OkHttpClient(client)) .target(FeignTestInterface.class, "http://localhost:" + SERVER_PORT); queryRequest = new Request.Builder() .url("http://localhost:" + SERVER_PORT + "/?Action=GetUser&Version=2010-05-08&limit=1") .build(); } @TearDown public void tearDown() throws InterruptedException { server.shutdown(); } /** * How fast can we execute get commands synchronously? */ @Benchmark public com.squareup.okhttp.Response query_baseCaseUsingOkHttp() throws IOException { com.squareup.okhttp.Response result = client.newCall(queryRequest).execute(); result.body().close(); return result; } /** * How fast can we execute get commands synchronously using Feign? */ @Benchmark public Response query_feignUsingOkHttp() { return okFeign.query(); } }
apache-2.0
UweTrottmann/QuickDic-Dictionary
src/com/hughes/android/dictionary/engine/NormalizeComparator.java
1351
// Copyright 2011 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.hughes.android.dictionary.engine; import java.util.Comparator; import com.ibm.icu.text.Transliterator; public class NormalizeComparator implements Comparator<String> { final Transliterator normalizer; final Comparator<Object> comparator; public NormalizeComparator(final Transliterator normalizer, final Comparator<Object> comparator) { this.normalizer = normalizer; this.comparator = comparator; } @Override public int compare(final String s1, final String s2) { final String n1 = normalizer.transform(s1); final String n2 = normalizer.transform(s2); final int cn = comparator.compare(n1, n2); if (cn != 0) { return cn; } return comparator.compare(s1, s2); } }
apache-2.0
mrluo735/cas-5.1.0
core/cas-server-core-audit/src/main/java/org/apereo/cas/audit/spi/config/CasCoreAuditConfiguration.java
11120
package org.apereo.cas.audit.spi.config; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.apereo.cas.audit.spi.CredentialsAsFirstParameterResourceResolver; import org.apereo.cas.audit.spi.DefaultDelegatingAuditTrailManager; import org.apereo.cas.audit.spi.DelegatingAuditTrailManager; import org.apereo.cas.audit.spi.MessageBundleAwareResourceResolver; import org.apereo.cas.audit.spi.PrincipalIdProvider; import org.apereo.cas.audit.spi.ServiceResourceResolver; import org.apereo.cas.audit.spi.ThreadLocalPrincipalResolver; import org.apereo.cas.audit.spi.TicketAsFirstParameterResourceResolver; import org.apereo.cas.configuration.CasConfigurationProperties; import org.apereo.cas.configuration.model.core.audit.AuditProperties; import org.apereo.inspektr.audit.AuditTrailManagementAspect; import org.apereo.inspektr.audit.AuditTrailManager; import org.apereo.inspektr.audit.spi.AuditActionResolver; import org.apereo.inspektr.audit.spi.AuditResourceResolver; import org.apereo.inspektr.audit.spi.support.DefaultAuditActionResolver; import org.apereo.inspektr.audit.spi.support.ReturnValueAsStringResourceResolver; import org.apereo.inspektr.audit.support.Slf4jLoggingAuditTrailManager; import org.apereo.inspektr.common.spi.PrincipalResolver; import org.apereo.inspektr.common.web.ClientInfoThreadLocalFilter; import org.aspectj.lang.JoinPoint; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.web.servlet.FilterRegistrationBean; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.EnableAspectJAutoProxy; import org.springframework.core.Ordered; import org.springframework.webflow.execution.Event; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.Map; /** * This is {@link CasCoreAuditConfiguration}. * * @author Misagh Moayyed * @since 5.0.0 */ @Configuration("casCoreAuditConfiguration") @EnableAspectJAutoProxy @EnableConfigurationProperties(CasConfigurationProperties.class) public class CasCoreAuditConfiguration { private static final String AUDIT_ACTION_SUFFIX_FAILED = "_FAILED"; @Autowired private CasConfigurationProperties casProperties; @Bean public AuditTrailManagementAspect auditTrailManagementAspect(@Qualifier("auditTrailManager") final AuditTrailManager auditTrailManager) { final AuditTrailManagementAspect aspect = new AuditTrailManagementAspect( casProperties.getAudit().getAppCode(), auditablePrincipalResolver(principalIdProvider()), Collections.singletonList(auditTrailManager), auditActionResolverMap(), auditResourceResolverMap()); aspect.setFailOnAuditFailures(!casProperties.getAudit().isIgnoreAuditFailures()); return aspect; } @ConditionalOnMissingBean(name = "auditTrailManager") @Bean public DelegatingAuditTrailManager auditTrailManager() { final Slf4jLoggingAuditTrailManager mgmr = new Slf4jLoggingAuditTrailManager(); mgmr.setUseSingleLine(casProperties.getAudit().isUseSingleLine()); mgmr.setEntrySeparator(casProperties.getAudit().getSinglelineSeparator()); mgmr.setAuditFormat(casProperties.getAudit().getAuditFormat()); return new DefaultDelegatingAuditTrailManager(mgmr); } @Bean public FilterRegistrationBean casClientInfoLoggingFilter() { final AuditProperties audit = casProperties.getAudit(); final FilterRegistrationBean bean = new FilterRegistrationBean(); bean.setFilter(new ClientInfoThreadLocalFilter()); bean.setUrlPatterns(Collections.singleton("/*")); bean.setName("CAS Client Info Logging Filter"); bean.setAsyncSupported(true); bean.setOrder(Ordered.HIGHEST_PRECEDENCE); final Map<String, String> initParams = new HashMap<>(); if (StringUtils.isNotBlank(audit.getAlternateClientAddrHeaderName())) { initParams.put(ClientInfoThreadLocalFilter.CONST_IP_ADDRESS_HEADER, audit.getAlternateClientAddrHeaderName()); } if (StringUtils.isNotBlank(audit.getAlternateServerAddrHeaderName())) { initParams.put(ClientInfoThreadLocalFilter.CONST_SERVER_IP_ADDRESS_HEADER, audit.getAlternateServerAddrHeaderName()); } initParams.put(ClientInfoThreadLocalFilter.CONST_USE_SERVER_HOST_ADDRESS, String.valueOf(audit.isUseServerHostAddress())); bean.setInitParameters(initParams); return bean; } @ConditionalOnMissingBean(name = "authenticationActionResolver") @Bean public AuditActionResolver authenticationActionResolver() { return new DefaultAuditActionResolver("_SUCCESS", AUDIT_ACTION_SUFFIX_FAILED); } @ConditionalOnMissingBean(name = "ticketCreationActionResolver") @Bean public AuditActionResolver ticketCreationActionResolver() { return new DefaultAuditActionResolver("_CREATED", "_NOT_CREATED"); } @ConditionalOnMissingBean(name = "ticketValidationActionResolver") @Bean public AuditActionResolver ticketValidationActionResolver() { return new DefaultAuditActionResolver("D", AUDIT_ACTION_SUFFIX_FAILED); } @ConditionalOnMissingBean(name = "returnValueResourceResolver") @Bean public AuditResourceResolver returnValueResourceResolver() { return new ReturnValueAsStringResourceResolver(); } @ConditionalOnMissingBean(name = "nullableReturnValueResourceResolver") @Bean public AuditResourceResolver nullableReturnValueResourceResolver() { return new AuditResourceResolver() { @Override public String[] resolveFrom(final JoinPoint joinPoint, final Object o) { if (o == null) { return new String[0]; } if (o instanceof Event) { final Event event = Event.class.cast(o); final String sourceName = event.getSource().getClass().getSimpleName(); final String result = new ToStringBuilder(event, ToStringStyle.NO_CLASS_NAME_STYLE) .append("event", event.getId()) .append("timestamp", new Date(event.getTimestamp())) .append("source", sourceName) .toString(); return new String[]{result}; } return returnValueResourceResolver().resolveFrom(joinPoint, o); } @Override public String[] resolveFrom(final JoinPoint joinPoint, final Exception e) { return returnValueResourceResolver().resolveFrom(joinPoint, e); } }; } @ConditionalOnMissingBean(name = "auditActionResolverMap") @Bean public Map<String, AuditActionResolver> auditActionResolverMap() { final Map<String, AuditActionResolver> map = new HashMap<>(); final AuditActionResolver resolver = authenticationActionResolver(); map.put("AUTHENTICATION_RESOLVER", resolver); map.put("SAVE_SERVICE_ACTION_RESOLVER", resolver); map.put("CHANGE_PASSWORD_ACTION_RESOLVER", resolver); final AuditActionResolver defResolver = new DefaultAuditActionResolver(); map.put("DESTROY_TICKET_GRANTING_TICKET_RESOLVER", defResolver); map.put("DESTROY_PROXY_GRANTING_TICKET_RESOLVER", defResolver); final AuditActionResolver cResolver = ticketCreationActionResolver(); map.put("CREATE_PROXY_GRANTING_TICKET_RESOLVER", cResolver); map.put("GRANT_SERVICE_TICKET_RESOLVER", cResolver); map.put("GRANT_PROXY_TICKET_RESOLVER", cResolver); map.put("CREATE_TICKET_GRANTING_TICKET_RESOLVER", cResolver); map.put("TRUSTED_AUTHENTICATION_ACTION_RESOLVER", cResolver); map.put("AUTHENTICATION_EVENT_ACTION_RESOLVER", new DefaultAuditActionResolver("_TRIGGERED", StringUtils.EMPTY)); final AuditActionResolver adResolver = new DefaultAuditActionResolver(); map.put("ADAPTIVE_RISKY_AUTHENTICATION_ACTION_RESOLVER", adResolver); map.put("VALIDATE_SERVICE_TICKET_RESOLVER", ticketValidationActionResolver()); return map; } @ConditionalOnMissingBean(name = "auditResourceResolverMap") @Bean public Map<String, AuditResourceResolver> auditResourceResolverMap() { final Map<String, AuditResourceResolver> map = new HashMap<>(); map.put("AUTHENTICATION_RESOURCE_RESOLVER", new CredentialsAsFirstParameterResourceResolver()); map.put("CREATE_TICKET_GRANTING_TICKET_RESOURCE_RESOLVER", this.messageBundleAwareResourceResolver()); map.put("CREATE_PROXY_GRANTING_TICKET_RESOURCE_RESOLVER", this.messageBundleAwareResourceResolver()); map.put("DESTROY_TICKET_GRANTING_TICKET_RESOURCE_RESOLVER", this.ticketResourceResolver()); map.put("DESTROY_PROXY_GRANTING_TICKET_RESOURCE_RESOLVER", this.ticketResourceResolver()); map.put("GRANT_SERVICE_TICKET_RESOURCE_RESOLVER", new ServiceResourceResolver()); map.put("GRANT_PROXY_TICKET_RESOURCE_RESOLVER", new ServiceResourceResolver()); map.put("VALIDATE_SERVICE_TICKET_RESOURCE_RESOLVER", this.ticketResourceResolver()); map.put("SAVE_SERVICE_RESOURCE_RESOLVER", returnValueResourceResolver()); map.put("CHANGE_PASSWORD_RESOURCE_RESOLVER", returnValueResourceResolver()); map.put("TRUSTED_AUTHENTICATION_RESOURCE_RESOLVER", returnValueResourceResolver()); map.put("ADAPTIVE_RISKY_AUTHENTICATION_RESOURCE_RESOLVER", returnValueResourceResolver()); map.put("AUTHENTICATION_EVENT_RESOURCE_RESOLVER", nullableReturnValueResourceResolver()); return map; } @ConditionalOnMissingBean(name = "auditablePrincipalResolver") @Bean public PrincipalResolver auditablePrincipalResolver(@Qualifier("principalIdProvider") final PrincipalIdProvider principalIdProvider) { return new ThreadLocalPrincipalResolver(principalIdProvider); } @ConditionalOnMissingBean(name = "ticketResourceResolver") @Bean public AuditResourceResolver ticketResourceResolver() { return new TicketAsFirstParameterResourceResolver(); } @ConditionalOnMissingBean(name = "messageBundleAwareResourceResolver") @Bean public AuditResourceResolver messageBundleAwareResourceResolver() { return new MessageBundleAwareResourceResolver(); } @ConditionalOnMissingBean(name = "principalIdProvider") @Bean public PrincipalIdProvider principalIdProvider() { return new PrincipalIdProvider() { }; } }
apache-2.0
paulnguyen/data
nosql/mapdb/src/main/java/org/mapdb/UnsafeStuff.java
29307
package org.mapdb; import java.io.DataInput; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.Arrays; import java.util.concurrent.locks.ReentrantLock; import java.util.logging.Level; import java.util.logging.Logger; import static java.lang.Long.rotateLeft; import static org.mapdb.DataIO.PRIME64_1; import static org.mapdb.DataIO.PRIME64_2; import static org.mapdb.DataIO.PRIME64_3; import static org.mapdb.DataIO.PRIME64_4; import static org.mapdb.DataIO.PRIME64_5; /** * Contains classes which use {@code sun.misc.Unsafe}. * This class will fail to compile on Android, to proceed just delete it and associated unit test. * It is not referenced directly, is only instantiated indirectly with reflection, * and MapDB will use other option. * */ class UnsafeStuff { static final Logger LOG = Logger.getLogger(UnsafeStuff.class.getName()); static final sun.misc.Unsafe UNSAFE = getUnsafe(); @SuppressWarnings("restriction") private static sun.misc.Unsafe getUnsafe() { if(ByteOrder.nativeOrder()!=ByteOrder.LITTLE_ENDIAN){ LOG.log(Level.WARNING,"This is not Little Endian platform. Unsafe optimizations are disabled."); return null; } try { java.lang.reflect.Field singleoneInstanceField = sun.misc.Unsafe.class.getDeclaredField("theUnsafe"); singleoneInstanceField.setAccessible(true); sun.misc.Unsafe ret = (sun.misc.Unsafe)singleoneInstanceField.get(null); return ret; } catch (Throwable e) { LOG.log(Level.WARNING,"Could not instantiate sun.misc.Unsafe. Fall back to DirectByteBuffer and other alternatives.",e); return null; } } private static final long BYTE_ARRAY_OFFSET; private static final int BYTE_ARRAY_SCALE; private static final long INT_ARRAY_OFFSET; private static final int INT_ARRAY_SCALE; private static final long SHORT_ARRAY_OFFSET; private static final int SHORT_ARRAY_SCALE; private static final long CHAR_ARRAY_OFFSET; private static final int CHAR_ARRAY_SCALE; static { BYTE_ARRAY_OFFSET = UNSAFE==null?-1:UNSAFE.arrayBaseOffset(byte[].class); BYTE_ARRAY_SCALE = UNSAFE==null?-1:UNSAFE.arrayIndexScale(byte[].class); INT_ARRAY_OFFSET = UNSAFE==null?-1:UNSAFE.arrayBaseOffset(int[].class); INT_ARRAY_SCALE = UNSAFE==null?-1:UNSAFE.arrayIndexScale(int[].class); SHORT_ARRAY_OFFSET = UNSAFE==null?-1:UNSAFE.arrayBaseOffset(short[].class); SHORT_ARRAY_SCALE = UNSAFE==null?-1:UNSAFE.arrayIndexScale(short[].class); CHAR_ARRAY_OFFSET = UNSAFE==null?-1:UNSAFE.arrayBaseOffset(char[].class); CHAR_ARRAY_SCALE = UNSAFE==null?-1:UNSAFE.arrayIndexScale(char[].class); } public static boolean unsafeAvailable(){ return UNSAFE !=null; } static final class UnsafeVolume extends Volume { // Cached array base offset private static final long ARRAY_BASE_OFFSET = UNSAFE ==null?-1 : UNSAFE.arrayBaseOffset(byte[].class);; public static final VolumeFactory FACTORY = new VolumeFactory() { @Override public Volume makeVolume(String file, boolean readOnly, boolean fileLockDisable, int sliceShift, long initSize, boolean fixedSize) { return new UnsafeVolume(0,sliceShift, initSize); } }; public static boolean unsafeAvailable(){ return UNSAFE !=null; } // This number limits the number of bytes to copy per call to Unsafe's // copyMemory method. A limit is imposed to allow for safepoint polling // during a large copy static final long UNSAFE_COPY_THRESHOLD = 1024L * 1024L; static void copyFromArray(byte[] src, long srcPos, long dstAddr, long length) { long offset = ARRAY_BASE_OFFSET + srcPos; while (length > 0) { long size = (length > UNSAFE_COPY_THRESHOLD) ? UNSAFE_COPY_THRESHOLD : length; UNSAFE.copyMemory(src, offset, null, dstAddr, size); length -= size; offset += size; dstAddr += size; } } static void copyToArray(long srcAddr, byte[] dst, long dstPos, long length) { long offset = ARRAY_BASE_OFFSET + dstPos; while (length > 0) { long size = (length > UNSAFE_COPY_THRESHOLD) ? UNSAFE_COPY_THRESHOLD : length; UNSAFE.copyMemory(null, srcAddr, dst, offset, size); length -= size; srcAddr += size; offset += size; } } protected volatile long[] addresses= new long[0]; protected volatile sun.nio.ch.DirectBuffer[] buffers = new sun.nio.ch.DirectBuffer[0]; protected final long sizeLimit; protected final boolean hasLimit; protected final int sliceShift; protected final int sliceSizeModMask; protected final int sliceSize; protected final ReentrantLock growLock = new ReentrantLock(CC.FAIR_LOCKS); public UnsafeVolume() { this(0, CC.VOLUME_PAGE_SHIFT,0L); } public UnsafeVolume(long sizeLimit, int sliceShift, long initSize) { this.sizeLimit = sizeLimit; this.hasLimit = sizeLimit>0; this.sliceShift = sliceShift; this.sliceSize = 1<< sliceShift; this.sliceSizeModMask = sliceSize -1; if(initSize!=0) ensureAvailable(initSize); } @Override public void ensureAvailable(long offset) { offset=Fun.roundUp(offset,1L<<sliceShift); //*LOG*/ System.err.printf("tryAvailabl: offset:%d\n",offset); //*LOG*/ System.err.flush(); if(hasLimit && offset>sizeLimit) { //return false; throw new IllegalAccessError("too big"); //TODO size limit here } int slicePos = (int) (offset >>> sliceShift); //check for most common case, this is already mapped if (slicePos < addresses.length){ return; } growLock.lock(); try{ //check second time if(slicePos<= addresses.length) return; //already enough space int oldSize = addresses.length; long[] addresses2 = addresses; sun.nio.ch.DirectBuffer[] buffers2 = buffers; int newSize = slicePos; addresses2 = Arrays.copyOf(addresses2, newSize); buffers2 = Arrays.copyOf(buffers2, newSize); for(int pos=oldSize;pos<addresses2.length;pos++) { //take address from DirectByteBuffer so allocated memory can be released by GC sun.nio.ch.DirectBuffer buf = (sun.nio.ch.DirectBuffer) ByteBuffer.allocateDirect(sliceSize); long address = buf.address(); //TODO is cleanup necessary here? //PERF speedup by copying an array for(long i=0;i<sliceSize;i+=8) { UNSAFE.putLong(address + i, 0L); } buffers2[pos]=buf; addresses2[pos]=address; } addresses = addresses2; buffers = buffers2; }finally{ growLock.unlock(); } } @Override public void truncate(long size) { //TODO support truncate } @Override public void putLong(long offset, long value) { //*LOG*/ System.err.printf("putLong: offset:%d, value:%d\n",offset,value); //*LOG*/ System.err.flush(); value = Long.reverseBytes(value); final long address = addresses[((int) (offset >>> sliceShift))]; offset = offset & sliceSizeModMask; UNSAFE.putLong(address + offset, value); } @Override public void putInt(long offset, int value) { //*LOG*/ System.err.printf("putInt: offset:%d, value:%d\n",offset,value); //*LOG*/ System.err.flush(); value = Integer.reverseBytes(value); final long address = addresses[((int) (offset >>> sliceShift))]; offset = offset & sliceSizeModMask; UNSAFE.putInt(address + offset, value); } @Override public void putByte(long offset, byte value) { //*LOG*/ System.err.printf("putByte: offset:%d, value:%d\n",offset,value); //*LOG*/ System.err.flush(); final long address = addresses[((int) (offset >>> sliceShift))]; offset = offset & sliceSizeModMask; UNSAFE.putByte(address + offset, value); } @Override public void putData(long offset, byte[] src, int srcPos, int srcSize) { // for(int pos=srcPos;pos<srcPos+srcSize;pos++){ // UNSAFE.putByte(address+offset+pos,src[pos]); // } //*LOG*/ System.err.printf("putData: offset:%d, srcLen:%d, srcPos:%d, srcSize:%d\n",offset, src.length, srcPos, srcSize); //*LOG*/ System.err.flush(); final long address = addresses[((int) (offset >>> sliceShift))]; offset = offset & sliceSizeModMask; copyFromArray(src, srcPos, address+offset, srcSize); } @Override public void putData(long offset, ByteBuffer buf) { //*LOG*/ System.err.printf("putData: offset:%d, bufPos:%d, bufLimit:%d:\n",offset,buf.position(), buf.limit()); //*LOG*/ System.err.flush(); final long address = addresses[((int) (offset >>> sliceShift))]; offset = offset & sliceSizeModMask; for(int pos=buf.position();pos<buf.limit();pos++){ UNSAFE.putByte(address + offset + pos, buf.get(pos)); } } @Override public long getLong(long offset) { //*LOG*/ System.err.printf("getLong: offset:%d \n",offset); //*LOG*/ System.err.flush(); final long address = addresses[((int) (offset >>> sliceShift))]; offset = offset & sliceSizeModMask; long l = UNSAFE.getLong(address +offset); return Long.reverseBytes(l); } @Override public int getInt(long offset) { //*LOG*/ System.err.printf("getInt: offset:%d\n",offset); //*LOG*/ System.err.flush(); final long address = addresses[((int) (offset >>> sliceShift))]; offset = offset & sliceSizeModMask; int i = UNSAFE.getInt(address +offset); return Integer.reverseBytes(i); } @Override public byte getByte(long offset) { //*LOG*/ System.err.printf("getByte: offset:%d\n",offset); //*LOG*/ System.err.flush(); final long address = addresses[((int) (offset >>> sliceShift))]; offset = offset & sliceSizeModMask; return UNSAFE.getByte(address +offset); } @Override public DataInput getDataInput(long offset, int size) { final long address = addresses[((int) (offset >>> sliceShift))]; offset = offset & sliceSizeModMask; return new DataInputUnsafe(address, (int) offset); } @Override public void getData(long offset, byte[] bytes, int bytesPos, int size) { final long address = addresses[((int) (offset >>> sliceShift))]; offset = offset & sliceSizeModMask; copyToArray(address+offset,bytes, bytesPos,size); } // @Override // public DataInput2 getDataInput(long offset, int size) { // //*LOG*/ System.err.printf("getDataInput: offset:%d, size:%d\n",offset,size); // //*LOG*/ System.err.flush(); // byte[] dst = new byte[size]; //// for(int pos=0;pos<size;pos++){ //// dst[pos] = UNSAFE.getByte(address +offset+pos); //// } // // final long address = addresses[((int) (offset >>> sliceShift))]; // offset = offset & sliceSizeModMask; // // copyToArray(address+offset, dst, ARRAY_BASE_OFFSET, // 0, // size); // // return new DataInput2(dst); // } @Override public void putDataOverlap(long offset, byte[] data, int pos, int len) { boolean overlap = (offset>>>sliceShift != (offset+len)>>>sliceShift); if(overlap){ while(len>0){ long addr = addresses[((int) (offset >>> sliceShift))]; long pos2 = offset&sliceSizeModMask; long toPut = Math.min(len,sliceSize - pos2); //System.arraycopy(data, pos, b, pos2, toPut); copyFromArray(data,pos,addr+pos2,toPut); pos+=toPut; len -=toPut; offset+=toPut; } }else{ putData(offset,data,pos,len); } } @Override public DataInput getDataInputOverlap(long offset, int size) { boolean overlap = (offset>>>sliceShift != (offset+size)>>>sliceShift); if(overlap){ byte[] bb = new byte[size]; final int origLen = size; while(size>0){ long addr = addresses[((int) (offset >>> sliceShift))]; long pos = offset&sliceSizeModMask; long toPut = Math.min(size,sliceSize - pos); //System.arraycopy(b, pos, bb, origLen - size, toPut); copyToArray(addr+pos,bb,origLen-size,toPut); size -=toPut; offset+=toPut; } return new DataIO.DataInputByteArray(bb); }else{ //return mapped buffer return getDataInput(offset,size); } } @Override public void close() { closed = true; sun.nio.ch.DirectBuffer[] buf2 = buffers; buffers=null; addresses = null; for(sun.nio.ch.DirectBuffer buf:buf2){ buf.cleaner().clean(); } } @Override public void sync() { } @Override public int sliceSize() { return sliceSize; } @Override public boolean isSliced() { return true; } @Override public long length() { return 1L*addresses.length*sliceSize; } @Override public File getFile() { return null; } @Override public boolean getFileLocked() { return false; } @Override public void clear(long startOffset, long endOffset) { while(startOffset<endOffset){ putByte(startOffset++, (byte) 0); //PERF use batch copy } } public static final class DataInputUnsafe implements DataIO.DataInputInternal{ protected final long baseAdress; protected long pos2; public DataInputUnsafe(long baseAdress, int pos) { this.baseAdress = baseAdress; this.pos2 = baseAdress+pos; } @Override public int getPos() { return (int) (pos2-baseAdress); } @Override public void setPos(int pos) { this.pos2 = baseAdress+pos; } @Override public byte[] internalByteArray() { return null; } @Override public ByteBuffer internalByteBuffer() { return null; } @Override public void close() { } @Override public long unpackLong() throws IOException { sun.misc.Unsafe UNSAFE = UnsafeStuff.UNSAFE; long pos = pos2; long ret = 0; byte v; do{ //$DELAY$ v = UNSAFE.getByte(pos++); ret = (ret<<7 ) | (v & 0x7F); }while(v<0); pos2 = pos; return ret; } @Override public int unpackInt() throws IOException { sun.misc.Unsafe UNSAFE = UnsafeStuff.UNSAFE; long pos = pos2; int ret = 0; byte v; do{ //$DELAY$ v = UNSAFE.getByte(pos++); ret = (ret<<7 ) | (v & 0x7F); }while(v<0); pos2 = pos; return ret; } @Override public long[] unpackLongArrayDeltaCompression(final int size) throws IOException { sun.misc.Unsafe UNSAFE = UnsafeStuff.UNSAFE; long[] ret = new long[size]; long pos2_ = pos2; long prev=0; byte v; for(int i=0;i<size;i++){ long r = 0; do { //$DELAY$ v = UNSAFE.getByte(pos2_++); r = (r << 7) | (v & 0x7F); } while (v < 0); prev+=r; ret[i]=prev; } pos2 = pos2_; return ret; } @Override public void unpackLongArray(long[] array, int start, int end) { sun.misc.Unsafe UNSAFE = UnsafeStuff.UNSAFE; long pos2_ = pos2; long ret; byte v; for(;start<end;start++) { ret = 0; do { //$DELAY$ v = UNSAFE.getByte(pos2_++); ret = (ret << 7) | (v & 0x7F); } while (v < 0); array[start] = ret; } pos2 = pos2_; } @Override public void unpackIntArray(int[] array, int start, int end) { sun.misc.Unsafe UNSAFE = UnsafeStuff.UNSAFE; long pos2_ = pos2; int ret; byte v; for(;start<end;start++) { ret = 0; do { //$DELAY$ v = UNSAFE.getByte(pos2_++); ret = (ret << 7) | (v & 0x7F); } while (v < 0); array[start]=ret; } pos2 = pos2_; } @Override public void readFully(byte[] b) throws IOException { copyToArray(pos2, b, 0, b.length); pos2+=b.length; } @Override public void readFully(byte[] b, int off, int len) throws IOException { copyToArray(pos2,b,off,len); pos2+=len; } @Override public int skipBytes(int n) throws IOException { pos2+=n; return n; } @Override public boolean readBoolean() throws IOException { return readByte()==1; } @Override public byte readByte() throws IOException { return UnsafeStuff.UNSAFE.getByte(pos2++); } @Override public int readUnsignedByte() throws IOException { return UnsafeStuff.UNSAFE.getByte(pos2++) & 0xFF; } @Override public short readShort() throws IOException { //$DELAY$ return (short)((readByte() << 8) | (readByte() & 0xff)); } @Override public int readUnsignedShort() throws IOException { //$DELAY$ return readChar(); } @Override public char readChar() throws IOException { //$DELAY$ return (char)( ((readByte() & 0xff) << 8) | ((readByte() & 0xff))); } @Override public int readInt() throws IOException { int ret = UnsafeStuff.UNSAFE.getInt(pos2); pos2+=4; return Integer.reverseBytes(ret); } @Override public long readLong() throws IOException { long ret = UnsafeStuff.UNSAFE.getLong(pos2); pos2+=8; return Long.reverseBytes(ret); } @Override public float readFloat() throws IOException { return Float.intBitsToFloat(readInt()); } @Override public double readDouble() throws IOException { return Double.longBitsToDouble(readLong()); } @Override public String readLine() throws IOException { return readUTF(); } @Override public String readUTF() throws IOException { final int len = unpackInt(); char[] b = new char[len]; for (int i = 0; i < len; i++) //$DELAY$ b[i] = (char) unpackInt(); return new String(b); } } } /** * <p> * Calculates XXHash64 from given {@code byte[]} buffer. * </p><p> * This code comes from <a href="https://github.com/jpountz/lz4-java">LZ4-Java</a> created * by Adrien Grand. * </p> * * @param buf to calculate hash from * @param off offset to start calculation from * @param len length of data to calculate hash * @param seed hash seed * @return XXHash. */ public static long hash(byte[] buf, int off, int len, long seed) { if (UNSAFE==null){ return DataIO.hash(buf,off,len,seed); } if (len < 0) { throw new IllegalArgumentException("lengths must be >= 0"); } if(off<0 || off>buf.length || off+len<0 || off+len>buf.length){ throw new IndexOutOfBoundsException(); } final int end = off + len; long h64; if (len >= 32) { final int limit = end - 32; long v1 = seed + PRIME64_1 + PRIME64_2; long v2 = seed + PRIME64_2; long v3 = seed + 0; long v4 = seed - PRIME64_1; do { v1 += readLongLE(buf, off) * PRIME64_2; v1 = rotateLeft(v1, 31); v1 *= PRIME64_1; off += 8; v2 += readLongLE(buf, off) * PRIME64_2; v2 = rotateLeft(v2, 31); v2 *= PRIME64_1; off += 8; v3 += readLongLE(buf, off) * PRIME64_2; v3 = rotateLeft(v3, 31); v3 *= PRIME64_1; off += 8; v4 += readLongLE(buf, off) * PRIME64_2; v4 = rotateLeft(v4, 31); v4 *= PRIME64_1; off += 8; } while (off <= limit); h64 = rotateLeft(v1, 1) + rotateLeft(v2, 7) + rotateLeft(v3, 12) + rotateLeft(v4, 18); v1 *= PRIME64_2; v1 = rotateLeft(v1, 31); v1 *= PRIME64_1; h64 ^= v1; h64 = h64 * PRIME64_1 + PRIME64_4; v2 *= PRIME64_2; v2 = rotateLeft(v2, 31); v2 *= PRIME64_1; h64 ^= v2; h64 = h64 * PRIME64_1 + PRIME64_4; v3 *= PRIME64_2; v3 = rotateLeft(v3, 31); v3 *= PRIME64_1; h64 ^= v3; h64 = h64 * PRIME64_1 + PRIME64_4; v4 *= PRIME64_2; v4 = rotateLeft(v4, 31); v4 *= PRIME64_1; h64 ^= v4; h64 = h64 * PRIME64_1 + PRIME64_4; } else { h64 = seed + PRIME64_5; } h64 += len; while (off <= end - 8) { long k1 = readLongLE(buf, off); k1 *= PRIME64_2; k1 = rotateLeft(k1, 31); k1 *= PRIME64_1; h64 ^= k1; h64 = rotateLeft(h64, 27) * PRIME64_1 + PRIME64_4; off += 8; } if (off <= end - 4) { h64 ^= (readIntLE(buf, off) & 0xFFFFFFFFL) * PRIME64_1; h64 = rotateLeft(h64, 23) * PRIME64_2 + PRIME64_3; off += 4; } while (off < end) { h64 ^= (buf[off] & 0xFF) * PRIME64_5; h64 = rotateLeft(h64, 11) * PRIME64_1; ++off; } h64 ^= h64 >>> 33; h64 *= PRIME64_2; h64 ^= h64 >>> 29; h64 *= PRIME64_3; h64 ^= h64 >>> 32; return h64; } public static long readLongLE(byte[] src, int srcOff) { return UNSAFE.getLong(src, BYTE_ARRAY_OFFSET + srcOff); } public static int readIntLE(byte[] src, int srcOff) { return UNSAFE.getInt(src, BYTE_ARRAY_OFFSET + srcOff); } /** * <p> * Calculates XXHash64 from given {@code char[]} buffer. * </p><p> * This code comes from <a href="https://github.com/jpountz/lz4-java">LZ4-Java</a> created * by Adrien Grand. * </p> * * @param buf to calculate hash from * @param off offset to start calculation from * @param len length of data to calculate hash * @param seed hash seed * @return XXHash. */ public static long hash(char[] buf, int off, int len, long seed) { if (UNSAFE==null){ return DataIO.hash(buf,off,len,seed); } if (len < 0) { throw new IllegalArgumentException("lengths must be >= 0"); } if(off<0 || off>buf.length || off+len<0 || off+len>buf.length){ throw new IndexOutOfBoundsException(); } final int end = off + len; long h64; if (len >= 16) { final int limit = end - 16; long v1 = seed + PRIME64_1 + PRIME64_2; long v2 = seed + PRIME64_2; long v3 = seed + 0; long v4 = seed - PRIME64_1; do { v1 += readLongLE(buf, off) * PRIME64_2; v1 = rotateLeft(v1, 31); v1 *= PRIME64_1; off += 4; v2 += readLongLE(buf, off) * PRIME64_2; v2 = rotateLeft(v2, 31); v2 *= PRIME64_1; off += 4; v3 += readLongLE(buf, off) * PRIME64_2; v3 = rotateLeft(v3, 31); v3 *= PRIME64_1; off += 4; v4 += readLongLE(buf, off) * PRIME64_2; v4 = rotateLeft(v4, 31); v4 *= PRIME64_1; off += 4; } while (off <= limit); h64 = rotateLeft(v1, 1) + rotateLeft(v2, 7) + rotateLeft(v3, 12) + rotateLeft(v4, 18); v1 *= PRIME64_2; v1 = rotateLeft(v1, 31); v1 *= PRIME64_1; h64 ^= v1; h64 = h64 * PRIME64_1 + PRIME64_4; v2 *= PRIME64_2; v2 = rotateLeft(v2, 31); v2 *= PRIME64_1; h64 ^= v2; h64 = h64 * PRIME64_1 + PRIME64_4; v3 *= PRIME64_2; v3 = rotateLeft(v3, 31); v3 *= PRIME64_1; h64 ^= v3; h64 = h64 * PRIME64_1 + PRIME64_4; v4 *= PRIME64_2; v4 = rotateLeft(v4, 31); v4 *= PRIME64_1; h64 ^= v4; h64 = h64 * PRIME64_1 + PRIME64_4; } else { h64 = seed + PRIME64_5; } h64 += len; while (off <= end - 4) { long k1 = readLongLE(buf, off); k1 *= PRIME64_2; k1 = rotateLeft(k1, 31); k1 *= PRIME64_1; h64 ^= k1; h64 = rotateLeft(h64, 27) * PRIME64_1 + PRIME64_4; off += 4; } if (off <= end - 2) { h64 ^= (readIntLE(buf, off) & 0xFFFFFFFFL) * PRIME64_1; h64 = rotateLeft(h64, 23) * PRIME64_2 + PRIME64_3; off += 2; } while (off < end) { h64 ^= (readCharLE(buf,off) & 0xFFFF) * PRIME64_5; h64 = rotateLeft(h64, 11) * PRIME64_1; ++off; } h64 ^= h64 >>> 33; h64 *= PRIME64_2; h64 ^= h64 >>> 29; h64 *= PRIME64_3; h64 ^= h64 >>> 32; return h64; } public static long readLongLE(char[] src, int srcOff) { return UNSAFE.getLong(src, CHAR_ARRAY_OFFSET + srcOff * CHAR_ARRAY_SCALE); } public static int readIntLE(char[] src, int srcOff) { return UNSAFE.getInt(src, CHAR_ARRAY_OFFSET + srcOff * CHAR_ARRAY_SCALE); } public static char readCharLE(char[] src, int srcOff) { return UNSAFE.getChar(src, CHAR_ARRAY_OFFSET + srcOff*CHAR_ARRAY_SCALE); } }
apache-2.0
fnkhan/second
src/main/java/org/openflow/protocol/statistics/OFVendorStatistics.java
2798
/******************************************************************************* * Copyright 2014 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ /** * Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior * University * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. **/ package org.openflow.protocol.statistics; import org.jboss.netty.buffer.ChannelBuffer; /** * The base class for vendor implemented statistics * * @author David Erickson (daviderickson@cs.stanford.edu) */ public class OFVendorStatistics implements OFStatistics { protected int vendor; protected byte[] body; // non-message fields protected int length = 0; @Override public void readFrom(final ChannelBuffer data) { this.vendor = data.readInt(); if (this.body == null) { this.body = new byte[this.length - 4]; } data.readBytes(this.body); } @Override public void writeTo(final ChannelBuffer data) { data.writeInt(this.vendor); if (this.body != null) { data.writeBytes(this.body); } } @Override public int hashCode() { final int prime = 457; int result = 1; result = prime * result + this.vendor; return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (!(obj instanceof OFVendorStatistics)) { return false; } final OFVendorStatistics other = (OFVendorStatistics) obj; if (this.vendor != other.vendor) { return false; } return true; } @Override public int getLength() { return this.length; } public void setLength(final int length) { this.length = length; } }
apache-2.0
p473lr/i-urge-mafia-gear
UCF HSPT Team/src/y2007/roboto.java
3189
import java.io.*; // Import input and output libraries import java.util.*; // Import utility libraries (for Scanner) class roboto { // Create "infinity" constant to be a very large number, with regard to the // problem. The maximum junction resistance in this problem is 500, so one // billion effectivly acts like infinity public static final int INFINITY = 1000000000; /****************************************************************************/ public static void main(String[] args) throws IOException { // Create input file scanner Scanner inpt = new Scanner(new File("roboto.in")); // Get the number of circuits to be tested int numDesigns = inpt.nextInt(); for (int i = 0; i < numDesigns; i++) { // Get the number of junctions in this design int numJunctions = inpt.nextInt(); // Create a 2D array to hold the junction connection information int[][] junctions = new int[numJunctions][numJunctions]; // Get each junction connection resistance for (int j = 0; j < numJunctions; j++) for (int k = 0; k < numJunctions; k++) { junctions[j][k] = inpt.nextInt(); // If there is no connection between j and k, as denoted by -1, there is // essentially an infinite resistance between these junctions, so we set // the value of the resistance here to infinity if (junctions[j][k] == -1) junctions[j][k] = INFINITY; } // Find the shortest paths between each junction using Floyd's Algorithm // (see function for more details) int[][] pathLengths = floyd(junctions); // If the shorest path from junction 0 to junction 1 is zero, then we know // that there is some path in the circuit that contains a zero resistance // path between the two terminals, and thus there is a short circuit if (pathLengths[0][1] == 0) System.out.println("Circuit Design #" + (i+1) + ": " + "Back to the drawing board"); else System.out.println("Circuit Design #" + (i+1) + ": " + "No more hedgehog troubles"); } // All done now, so we can close the input file scanner inpt.close(); } /****************************************************************************/ /* Floyd's Algorithm- finds the shortest path from each junction to each other junction. It does this by trying to find a shorter path from i to j by going through junction k. More information can be found at http://en.wikipedia.org/wiki/Floyd%27s_algorithm The returned matrix is such that adj[i][j] = the length of the shortest path from i to j */ public static int[][] floyd (int[][] junctions) { //Make a copy becuase the matrix is destroyed through Floyd's int[][] adj = junctions.clone(); //Start finding shortest paths for (int k = 0; k < adj.length; k++) for (int i = 0; i < adj.length; i++) for (int j = 0; j < adj.length; j++) if (adj[i][k] + adj[k][j] < adj[i][j]) adj[i][j] = adj[i][k] + adj[k][j]; return adj; } }
apache-2.0
nigelsmall/geoff
src/test/java/com/nigelsmall/geoff/reader/GeoffReaderTest.java
838
package com.nigelsmall.geoff.reader; import org.junit.Test; import java.io.StringReader; import static org.junit.Assert.assertEquals; /** * @author mh * @since 16.01.14 */ public class GeoffReaderTest { @Test public void testReadInvalidData() throws Exception { try { new GeoffReader(new StringReader("(A {b})")).readSubgraph(); } catch(GeoffReaderException gre) { assertEquals("Unexpected character at line 0 column 5",gre.getMessage()); } } @Test public void testReadInvalidDataOnSecondLine() throws Exception { try { new GeoffReader(new StringReader("(A {\"b\":123})\n(A {")).readSubgraph(); } catch(GeoffReaderException gre) { assertEquals("Unexpected character at line 1 column 4",gre.getMessage()); } } }
apache-2.0
madjam/copycat-1
server/src/main/java/io/atomix/copycat/server/Commit.java
5010
/* * Copyright 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.atomix.copycat.server; import io.atomix.copycat.client.Command; import io.atomix.copycat.client.Operation; import io.atomix.copycat.client.Query; import io.atomix.copycat.client.session.Session; import java.time.Instant; /** * Represents the committed state and metadata of a Raft state machine operation. * <p> * When {@link Command commands} and {@link Query queries} are applied to the Raft {@link StateMachine}, they're * wrapped in a commit object. The commit object provides useful metadata regarding the location of the commit * in the Raft replicated log, the {@link #time()} at which the commit was logged, and the {@link Session} that * submitted the operation to the cluster. * <p> * When state machines are done using a commit object, users should always call either {@link #clean()} or {@link #close()}. * Failing to call either method is a bug, and Copycat will log a warning message in such cases. * * @see Command * @see Query * @see Session * @see Instant * * @author <a href="http://github.com/kuujo">Jordan Halterman</a> */ public interface Commit<T extends Operation> extends AutoCloseable { /** * Returns the commit index. * <p> * This is the index at which the committed {@link Operation} was written in the Raft log. * Copycat guarantees that this index will be unique for {@link Command} commits and will be the same for all * instances of the given operation on all servers in the cluster. * <p> * For {@link Query} operations, the returned {@code index} may actually be representative of the last committed * index in the Raft log since queries are not actually written to disk. Thus, query commits cannot be assumed * to have unique indexes. * * @return The commit index. * @throws IllegalStateException If the commit is {@link #close() closed} or was {@link #clean() cleaned} */ long index(); /** * Returns the session that submitted the operation. * <p> * The returned {@link Session} is representative of the session that submitted the operation * that resulted in this {@link Commit}. The session can be used to {@link Session#publish(String, Object)} * event messages to the client. * * @return The session that created the commit. * @throws IllegalStateException If the commit is {@link #close() closed} or was {@link #clean() cleaned} */ Session session(); /** * Returns the time at which the operation was committed. * <p> * The time is representative of the time at which the leader wrote the operation to its log. Because instants * are replicated through the Raft consensus algorithm, they are guaranteed to be consistent across all servers * and therefore can be used to perform time-dependent operations such as expiring keys or timeouts. Additionally, * commit times are guaranteed to progress monotonically, never going back in time. * <p> * Users should <em>never</em> use {@code System} time to control behavior in a state machine and should instead rely * upon {@link Commit} times or use the {@link StateMachineExecutor} for time-based controls. * * @return The commit time. * @throws IllegalStateException If the commit is {@link #close() closed} or was {@link #clean() cleaned} */ Instant time(); /** * Returns the commit type. * <p> * This is the {@link java.lang.Class} returned by the committed operation's {@link Object#getClass()} method. * * @return The commit type. * @throws IllegalStateException If the commit is {@link #close() closed} or was {@link #clean() cleaned} */ Class<T> type(); /** * Returns the operation submitted by the user. * * @return The operation submitted by the user. * @throws IllegalStateException If the commit is {@link #close() closed} or was {@link #clean() cleaned} */ T operation(); /** * Cleans the commit from the underlying log. * <p> * When the commit is cleaned, it will be removed from the log and may be removed permanently from disk at some * arbitrary point in the future. Cleaning the commit effectively closes it, so once this method is called there * is no need to call {@link #close()}. */ void clean(); /** * Closes the commit. * <p> * Once the commit is closed, it may be recycled and should no longer be accessed by the closer. */ @Override void close(); }
apache-2.0
graphium-project/graphium
api/src/main/java/at/srfg/graphium/api/controller/GlobalExceptionController.java
5859
/** * Copyright © 2017 Salzburg Research Forschungsgesellschaft (graphium@salzburgresearch.at) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package at.srfg.graphium.api.controller; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.expression.AccessException; import org.springframework.http.HttpStatus; import org.springframework.web.HttpMediaTypeNotAcceptableException; import org.springframework.web.bind.annotation.ControllerAdvice; import org.springframework.web.bind.annotation.ExceptionHandler; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.servlet.ModelAndView; import org.springframework.web.servlet.view.json.MappingJackson2JsonView; import at.srfg.graphium.api.exceptions.InconsistentStateException; import at.srfg.graphium.api.exceptions.NotificationException; import at.srfg.graphium.api.exceptions.ResourceNotFoundException; import at.srfg.graphium.api.exceptions.ValidationException; import at.srfg.graphium.core.exception.GraphAlreadyExistException; import at.srfg.graphium.core.exception.GraphNotExistsException; import at.srfg.graphium.core.exception.SubscriptionFailedException; import at.srfg.graphium.io.adapter.exception.XInfoNotSupportedException; import at.srfg.graphium.io.exception.WaySegmentSerializationException; /** * Created by shennebe on 30.08.2016. */ @ControllerAdvice public class GlobalExceptionController { private static Logger log = LoggerFactory .getLogger(GlobalExceptionController.class); @ResponseStatus(value = HttpStatus.NOT_IMPLEMENTED, reason = "XInfo Not Implemented") @ExceptionHandler(XInfoNotSupportedException.class) public void handleXInfoNotSupportedException(XInfoNotSupportedException ex) { log.warn("This external info is not supported",ex); } @ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "Resource not found") @ExceptionHandler(ResourceNotFoundException.class) public void handleResourceNotException(ResourceNotFoundException ex) { log.warn("Requested resource could not be found",ex); } @ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "Graph does not exists") @ExceptionHandler(GraphNotExistsException.class) public void handleGraphNotExistsException(GraphNotExistsException ex) { log.warn("Graph does not exists"); } @ResponseStatus(value = HttpStatus.CONFLICT, reason = "Subscription failed") @ExceptionHandler(SubscriptionFailedException.class) public void handleSubscriptionFailedException(SubscriptionFailedException ex) { log.warn("Subscription failed"); } @ResponseStatus(value = HttpStatus.UNSUPPORTED_MEDIA_TYPE, reason = "Mediatype is not acceptable") @ExceptionHandler(HttpMediaTypeNotAcceptableException.class) public void handleMediaTypeNotAcceptableException(HttpMediaTypeNotAcceptableException ex) { log.warn("Requested media type is not acceptable"); } @ResponseStatus(value = HttpStatus.INTERNAL_SERVER_ERROR, reason = "Serialization of waysegment data failed") @ExceptionHandler(WaySegmentSerializationException.class) public void handleWaySegmentSerializationException(WaySegmentSerializationException e) { log.warn("Serialization of waysegment data failed"); } @ResponseStatus(value = HttpStatus.CONFLICT, reason = "Inconsistent state in Segment Ids") @ExceptionHandler(InconsistentStateException.class) public void handleInconsistentStateException(InconsistentStateException ex) { log.error("Error occured during request",ex); } @ResponseStatus(value = HttpStatus.CONFLICT, reason = "Graph already exists") @ExceptionHandler(GraphAlreadyExistException.class) public void handleGraphAlreadyExistsException(GraphAlreadyExistException ex) { log.error("Error occured during request",ex); } @ResponseStatus(value = HttpStatus.BAD_GATEWAY, reason = "Satellite Graphiums could not be notified") @ExceptionHandler(NotificationException.class) public void handleNotificationException(NotificationException ex) { log.error("Error occured during request",ex); } @ResponseStatus(value = HttpStatus.UNPROCESSABLE_ENTITY, reason = "Validation of changed parameter failed") @ExceptionHandler(ValidationException.class) public void handleValidationException(ValidationException ex) { log.warn("Invalid request"); } @ResponseStatus(value = HttpStatus.FORBIDDEN, reason = "Access to unknown or restricted field change requested") @ExceptionHandler(AccessException.class) public void handleAccessException(AccessException ex) { log.error("Access to unknown or restricted field requested"); } @ResponseStatus(value = HttpStatus.INTERNAL_SERVER_ERROR) @ExceptionHandler(Exception.class) public ModelAndView handleInconsistentStateException(Exception ex) { log.error("Error occured during request",ex); return this.adaptException2MAV(ex); } private ModelAndView adaptException2MAV(Exception ex) { ModelAndView mav = new ModelAndView(); mav.setView(new MappingJackson2JsonView()); mav.addObject("exception",ex.getClass().getSimpleName()); mav.addObject("message", ex.getMessage()); return mav; } }
apache-2.0
willemsrb/simple-jmx
src/test/java/nl/futureedge/simple/jmx/client/ClientProviderTest.java
774
package nl.futureedge.simple.jmx.client; import java.net.MalformedURLException; import javax.management.remote.JMXConnector; import javax.management.remote.JMXServiceURL; import nl.futureedge.simple.jmx.SimpleJmx; import org.junit.Assert; import org.junit.Test; public class ClientProviderTest { @Test public void test() throws MalformedURLException { final JMXConnector connector = new ClientProvider().newJMXConnector(new JMXServiceURL(SimpleJmx.PROTOCOL, "localhost", 0), null); Assert.assertNotNull(connector); } @Test(expected = MalformedURLException.class) public void testInvalidProtocol() throws MalformedURLException { new ClientProvider().newJMXConnector(new JMXServiceURL("invalid", "localhost", 0), null); } }
apache-2.0
subchen/jetbrick-template-1x
src/main/java/jetbrick/template/JetEngine.java
11248
/** * jetbrick-template * http://subchen.github.io/jetbrick-template/ * * Copyright 2010-2014 Guoqiang Chen. All rights reserved. * Email: subchen@gmail.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jetbrick.template; import java.io.File; import java.lang.annotation.Annotation; import java.util.*; import jetbrick.template.JetConfig.CompileStrategy; import jetbrick.template.compiler.JavaCompiler; import jetbrick.template.compiler.JetTemplateClassLoader; import jetbrick.template.parser.VariableResolver; import jetbrick.template.resource.Resource; import jetbrick.template.resource.SourceCodeResource; import jetbrick.template.resource.loader.CompiledClassResourceLoader; import jetbrick.template.resource.loader.ResourceLoader; import jetbrick.template.utils.*; import jetbrick.template.utils.finder.AnnotationClassLookupUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class JetEngine { private static final Logger log = LoggerFactory.getLogger(JetEngine.class); public static final String VERSION = Version.getVersion(JetEngine.class); private JetConfig config; private ResourceLoader resourceLoader; private VariableResolver resolver; private JetTemplateClassLoader classLoader; private ConcurrentResourceCache resourceCache; private ConcurrentTemplateCache templateCache; private JavaCompiler javaCompiler; private JetSecurityManager securityManager; private JetGlobalVariables globalVariables; public static JetEngine create() { return new JetEngine(new JetConfig().loadClasspath(JetConfig.DEFAULT_CONFIG_FILE)); } public static JetEngine create(File configFile) { return new JetEngine(new JetConfig().loadFile(configFile)); } public static JetEngine create(Properties properties) { return new JetEngine(new JetConfig().load(properties)); } // 提供给 JetWebEngine 用 protected JetEngine() { } protected JetEngine(JetConfig config) { load(config); } protected void load(JetConfig config) { this.config = config.build(); this.resolver = createVariableResolver(); this.resourceLoader = createResourceLoader(); this.classLoader = new JetTemplateClassLoader(config); this.resourceCache = new ConcurrentResourceCache(); this.templateCache = new ConcurrentTemplateCache(); this.securityManager = createSecurityManager(); this.globalVariables = createGlobalVariables(); if (config.getCompileStrategy() == CompileStrategy.precompile) { startPreCompileTask(); } } /** * 根据一个绝对路径,判断资源文件是否存在. */ public boolean lookupResource(String name) { name = PathUtils.getStandardizedName(name); return resourceCache.get(name) != null; } /** * 根据一个绝对路径,获取一个资源对象. * * @throws ResourceNotFoundException */ public Resource getResource(String name) throws ResourceNotFoundException { name = PathUtils.getStandardizedName(name); Resource resource = resourceCache.get(name); if (resource == null) { throw new ResourceNotFoundException(name); } return resource; } /** * 根据一个绝对路径,获取一个模板对象. * * @throws ResourceNotFoundException */ public JetTemplate getTemplate(String name) throws ResourceNotFoundException { name = PathUtils.getStandardizedName(name); JetTemplate template = templateCache.get(name); template.checkLastModified(); return template; } /** * 直接从源代码中创建一个新的模板对象. * * <p>返回的对象内部没有缓存,每次都会重新进行解析和编译,如果需要缓存,请在外面直接实现。</p> * * @since 1.1.0 */ public JetTemplate createTemplate(String source) { Resource resource = new SourceCodeResource(source); return new JetTemplate(this, resource); } protected VariableResolver getVariableResolver() { return resolver; } protected JetTemplateClassLoader getClassLoader() { return classLoader; } protected JavaCompiler getJavaCompiler() { if (javaCompiler == null) { // 在 compileStrategy == none 的情况下,采用延迟加载,可以有效避免没有 javax.tools.JavaCompiler 的情况 synchronized (this) { if (javaCompiler == null) { javaCompiler = JavaCompiler.create(this.classLoader, config); } } } return javaCompiler; } public JetSecurityManager getSecurityManager() { return securityManager; } public JetGlobalVariables getGlobalVariables() { return globalVariables; } /** * 获取模板配置. */ public JetConfig getConfig() { return config; } /** * 获取模板引擎的版本号. */ public String getVersion() { return VERSION; } private VariableResolver createVariableResolver() { VariableResolver resolver = new VariableResolver(); for (String pkg : config.getImportPackages()) { resolver.addImportPackage(pkg); } for (String klassName : config.getImportClasses()) { resolver.addImportClass(klassName); } for (String method : config.getImportMethods()) { resolver.addMethodClass(method); } for (String function : config.getImportFunctions()) { resolver.addFunctionClass(function); } for (String tag : config.getImportTags()) { resolver.addTagClass(tag); } for (String variable : config.getImportVariables()) { int pos = variable.lastIndexOf(" "); String defination = variable.substring(0, pos); String id = variable.substring(pos + 1); resolver.addGlobalVariable(defination, id); } if (config.isImportAutoscan()) { log.info("Starting to autoscan the JetMethods, JetFunctions, JetTags implements..."); autoScanClassImplements(resolver); } return resolver; } // 自动扫描 annotation @SuppressWarnings({ "unchecked" }) private void autoScanClassImplements(VariableResolver resolver) { List<String> scanPackages = config.getImportAutoscanPackages(); //@formatter:off Class<? extends Annotation>[] annoClasses = (Class<? extends Annotation>[]) new Class<?>[] { JetAnnotations.Methods.class, JetAnnotations.Functions.class, JetAnnotations.Tags.class, }; //@formatter:on long ts = System.currentTimeMillis(); Set<Class<?>> klasses = AnnotationClassLookupUtils.getClasses(scanPackages, true, annoClasses, config.isImportAutoscanSkiperrors()); ts = System.currentTimeMillis() - ts; log.info("Successfully to find {} classes, cost {} ms.", klasses.size(), ts); for (Class<?> klass : klasses) { for (Annotation anno : klass.getAnnotations()) { if (anno instanceof JetAnnotations.Methods) { resolver.addMethodClass(klass); } else if (anno instanceof JetAnnotations.Functions) { resolver.addFunctionClass(klass); } else if (anno instanceof JetAnnotations.Tags) { resolver.addTagClass(klass); } } } } private ResourceLoader createResourceLoader() { try { ResourceLoader resourceLoader; if (config.getCompileStrategy() == CompileStrategy.none) { // 这种情况下,使用自定义的 ResourceLoader resourceLoader = new CompiledClassResourceLoader(); } else { resourceLoader = (ResourceLoader) config.getTemplateLoader().newInstance(); } resourceLoader.initialize(this, config.getTemplatePath(), config.getInputEncoding()); return resourceLoader; } catch (Exception e) { throw ExceptionUtils.uncheck(e); } } private JetSecurityManager createSecurityManager() { Class<?> klass = config.getSecurityManager(); if (klass == null) { return null; } try { JetSecurityManager manager = (JetSecurityManager) klass.newInstance(); manager.initialize(this); return manager; } catch (Exception e) { throw new RuntimeException(e); } } private JetGlobalVariables createGlobalVariables() { Class<?> klass = config.getGlobalVariables(); if (klass == null) { return null; } try { return (JetGlobalVariables) klass.newInstance(); } catch (Exception e) { throw new RuntimeException(e); } } private void startPreCompileTask() { // 启动预编译线程 Thread thread = new Thread() { @Override public void run() { List<String> resources = resourceLoader.loadAll(); log.info("Find {} templates to precompile ...", resources.size()); int succ = 0; int fail = 0; long ts = System.currentTimeMillis(); for (String name : resources) { try { getTemplate(name); succ++; } catch (Exception e) { fail++; log.error("precompile error.", e); } } ts = System.currentTimeMillis() - ts; log.info("Completed precompile templates in {} ms, success = {}, failure = {}.", ts, succ, fail); } }; thread.setName("JetPreCompiler"); thread.setDaemon(true); thread.start(); } private class ConcurrentResourceCache extends ConcurrentCache<String, Resource> { @Override protected Resource doGetValue(String name) { return JetEngine.this.resourceLoader.load(name); } } private class ConcurrentTemplateCache extends ConcurrentCache<String, JetTemplate> { @Override protected JetTemplate doGetValue(String name) { Resource resource = JetEngine.this.getResource(name); return new JetTemplate(JetEngine.this, resource); } } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-codepipeline/src/main/java/com/amazonaws/services/codepipeline/model/transform/DisableStageTransitionResultJsonUnmarshaller.java
1710
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.codepipeline.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.codepipeline.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import static com.fasterxml.jackson.core.JsonToken.*; /** * DisableStageTransitionResult JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DisableStageTransitionResultJsonUnmarshaller implements Unmarshaller<DisableStageTransitionResult, JsonUnmarshallerContext> { public DisableStageTransitionResult unmarshall(JsonUnmarshallerContext context) throws Exception { DisableStageTransitionResult disableStageTransitionResult = new DisableStageTransitionResult(); return disableStageTransitionResult; } private static DisableStageTransitionResultJsonUnmarshaller instance; public static DisableStageTransitionResultJsonUnmarshaller getInstance() { if (instance == null) instance = new DisableStageTransitionResultJsonUnmarshaller(); return instance; } }
apache-2.0
wgcv/Programacion-Distribuida
Graficos Paradigma Cliente Servidor/KnockKnock/src/knock2/ClientViewer.java
382
package knock2; import javax.swing.JFrame; public class ClientViewer { /** * @param args */ public static void main(String[] args) { // TODO Auto-generated method stub JFrame frame = new ClientFrame(); frame.setTitle("Client Knock Knock"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.setVisible(true); } }
apache-2.0
tomzhang/clocker
docker/src/main/java/brooklyn/networking/sdn/SdnProviderImpl.java
13424
/* * Copyright 2014-2015 by Cloudsoft Corporation Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brooklyn.networking.sdn; import java.net.InetAddress; import java.util.Collection; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import brooklyn.config.render.RendererHints; import brooklyn.entity.Entity; import brooklyn.entity.Group; import brooklyn.entity.basic.BasicGroup; import brooklyn.entity.basic.BasicStartableImpl; import brooklyn.entity.basic.DelegateEntity; import brooklyn.entity.basic.DynamicGroup; import brooklyn.entity.basic.Entities; import brooklyn.entity.basic.EntityLocal; import brooklyn.entity.basic.EntityPredicates; import brooklyn.entity.container.docker.DockerContainer; import brooklyn.entity.container.docker.DockerHost; import brooklyn.entity.container.docker.DockerInfrastructure; import brooklyn.entity.group.AbstractMembershipTrackingPolicy; import brooklyn.entity.group.DynamicCluster; import brooklyn.entity.proxying.EntitySpec; import brooklyn.event.feed.ConfigToAttributes; import brooklyn.location.Location; import brooklyn.networking.VirtualNetwork; import brooklyn.networking.location.NetworkProvisioningExtension; import brooklyn.policy.PolicySpec; import brooklyn.util.collections.QuorumCheck.QuorumChecks; import brooklyn.util.net.Cidr; import com.google.common.base.Optional; import com.google.common.base.Predicates; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; public abstract class SdnProviderImpl extends BasicStartableImpl implements SdnProvider{ private static final Logger LOG = LoggerFactory.getLogger(SdnProvider.class); /** Held while obtaining new IP addresses for containers. */ protected transient final Object addressMutex = new Object[0]; /** Held while adding or removing new {@link SdnAgent} entities on hosts. */ protected transient final Object hostMutex = new Object[0]; /** Mutex for provisioning new networks */ protected transient final Object networkMutex = new Object[0]; @Override public void init() { LOG.info("Starting SDN provider id {}", getId()); super.init(); ConfigToAttributes.apply(this, DOCKER_INFRASTRUCTURE); BasicGroup agents = addChild(EntitySpec.create(BasicGroup.class) .configure(BasicGroup.RUNNING_QUORUM_CHECK, QuorumChecks.atLeastOneUnlessEmpty()) .configure(BasicGroup.UP_QUORUM_CHECK, QuorumChecks.atLeastOneUnlessEmpty()) .displayName("SDN Host Agents")); BasicGroup networks = addChild(EntitySpec.create(BasicGroup.class) .configure(BasicGroup.RUNNING_QUORUM_CHECK, QuorumChecks.atLeastOneUnlessEmpty()) .configure(BasicGroup.UP_QUORUM_CHECK, QuorumChecks.atLeastOneUnlessEmpty()) .configure(BasicGroup.MEMBER_DELEGATE_CHILDREN, true) .displayName("SDN Managed Networks")); BasicGroup applications = addChild(EntitySpec.create(BasicGroup.class) .configure(BasicGroup.RUNNING_QUORUM_CHECK, QuorumChecks.atLeastOneUnlessEmpty()) .configure(BasicGroup.UP_QUORUM_CHECK, QuorumChecks.atLeastOneUnlessEmpty()) .displayName("SDN Networked Applications")); if (Entities.isManaged(this)) { Entities.manage(agents); Entities.manage(networks); Entities.manage(applications); } setAttribute(SDN_AGENTS, agents); setAttribute(SDN_NETWORKS, networks); setAttribute(SDN_APPLICATIONS, applications); synchronized (addressMutex) { setAttribute(ALLOCATED_IPS, 0); setAttribute(ALLOCATED_ADDRESSES, Maps.<String, InetAddress>newConcurrentMap()); setAttribute(SUBNET_ADDRESS_ALLOCATIONS, Maps.<String, Integer>newConcurrentMap()); } synchronized (networkMutex) { setAttribute(ALLOCATED_NETWORKS, 0); setAttribute(SUBNETS, Maps.<String, Cidr>newConcurrentMap()); } setAttribute(SUBNET_ENTITIES, Maps.<String, VirtualNetwork>newConcurrentMap()); setAttribute(CONTAINER_ADDRESSES, HashMultimap.<String, InetAddress>create()); } @Override public InetAddress getNextAgentAddress(String agentId) { synchronized (addressMutex) { Cidr cidr = config().get(AGENT_CIDR); Integer allocated = getAttribute(ALLOCATED_IPS); InetAddress next = cidr.addressAtOffset(allocated + 1); setAttribute(ALLOCATED_IPS, allocated + 1); Map<String, InetAddress> addresses = getAttribute(ALLOCATED_ADDRESSES); addresses.put(agentId, next); setAttribute(ALLOCATED_ADDRESSES, addresses); return next; } } @Override public InetAddress getNextContainerAddress(String subnetId) { Cidr cidr = getSubnetCidr(subnetId); synchronized (addressMutex) { Map<String, Integer> allocations = getAttribute(SUBNET_ADDRESS_ALLOCATIONS); Integer allocated = allocations.get(subnetId); if (allocated == null) allocated = 1; InetAddress next = cidr.addressAtOffset(allocated + 1); allocations.put(subnetId, allocated + 1); setAttribute(SUBNET_ADDRESS_ALLOCATIONS, allocations); return next; } } @Override public Cidr getNextSubnetCidr(String networkId) { synchronized (networkMutex) { Cidr networkCidr = getNextSubnetCidr(); recordSubnetCidr(networkId, networkCidr); return networkCidr; } } @Override public Cidr getNextSubnetCidr() { synchronized (networkMutex) { Cidr networkCidr = config().get(CONTAINER_NETWORK_CIDR); Integer networkSize = config().get(CONTAINER_NETWORK_SIZE); Integer allocated = getAttribute(ALLOCATED_NETWORKS); InetAddress baseAddress = networkCidr.addressAtOffset(allocated * (1 << (32 - networkSize))); Cidr subnetCidr = new Cidr(baseAddress.getHostAddress() + "/" + networkSize); LOG.debug("Allocated {} from {} for subnet #{}", new Object[] { subnetCidr, networkCidr, allocated }); setAttribute(ALLOCATED_NETWORKS, allocated + 1); return subnetCidr; } } @Override public void recordSubnetCidr(String networkId, Cidr subnetCidr) { synchronized (networkMutex) { Map<String, Cidr> subnets = getAttribute(SdnProvider.SUBNETS); subnets.put(networkId, subnetCidr); setAttribute(SdnProvider.SUBNETS, subnets); } } @Override public void recordSubnetCidr(String networkId, Cidr subnetCidr, int allocated) { synchronized (networkMutex) { recordSubnetCidr(networkId, subnetCidr); Map<String, Integer> allocations = getAttribute(SUBNET_ADDRESS_ALLOCATIONS); allocations.put(networkId, allocated); setAttribute(SUBNET_ADDRESS_ALLOCATIONS, allocations); } } @Override public Cidr getSubnetCidr(String networkId) { synchronized (networkMutex) { Map<String, Cidr> subnets = getAttribute(SdnProvider.SUBNETS); return subnets.get(networkId); } } @Override public Object getNetworkMutex() { return networkMutex; } @Override public DynamicCluster getDockerHostCluster() { return config().get(DOCKER_INFRASTRUCTURE).getAttribute(DockerInfrastructure.DOCKER_HOST_CLUSTER); } @Override public Group getAgents() { return getAttribute(SDN_AGENTS); } public static class MemberTrackingPolicy extends AbstractMembershipTrackingPolicy { @Override protected void onEntityEvent(EventType type, Entity member) { ((SdnProviderImpl) super.entity).onHostChanged(member); } } @Override public void start(Collection<? extends Location> locations) { setAttribute(SERVICE_UP, Boolean.FALSE); // Add ouserlves as an extension to the Docker location DockerInfrastructure infrastructure = (DockerInfrastructure) config().get(DOCKER_INFRASTRUCTURE); infrastructure.getDynamicLocation().addExtension(NetworkProvisioningExtension.class, this); super.start(locations); addHostTrackerPolicy(); setAttribute(SERVICE_UP, Boolean.TRUE); } @Override public void stop() { setAttribute(SERVICE_UP, Boolean.FALSE); super.stop(); } @Override public void rebind() { super.rebind(); // TODO implement custom SDN provider rebind logic } protected void addHostTrackerPolicy() { Group hosts = getDockerHostCluster(); if (hosts != null) { MemberTrackingPolicy hostTrackerPolicy = addPolicy(PolicySpec.create(MemberTrackingPolicy.class) .displayName("Docker host tracker") .configure("group", hosts)); LOG.info("Added policy {} to {}, during start", hostTrackerPolicy, this); } } private void onHostAdded(Entity item) { synchronized (hostMutex) { if (item instanceof DockerHost) { addHost((DockerHost) item); } } } private void onHostRemoved(Entity item) { synchronized (hostMutex) { if (item instanceof DockerHost) { removeHost((DockerHost) item); } } } private void onHostChanged(Entity item) { synchronized (hostMutex) { boolean exists = getDockerHostCluster().hasMember(item); Boolean running = item.getAttribute(SERVICE_UP); if (exists && running && item.getAttribute(SdnAgent.SDN_AGENT) == null) { onHostAdded(item); } else if (!exists) { onHostRemoved(item); } } } @Override public Map<String, Cidr> listManagedNetworkAddressSpace() { return ImmutableMap.copyOf(getAttribute(SUBNETS)); } @Override public void provisionNetwork(VirtualNetwork network) { // Call provisionNetwork on one of the agents to create it SdnAgent agent = (SdnAgent) (getAgents().getMembers().iterator().next()); String networkId = agent.provisionNetwork(network); // Create a DynamicGroup with all attached entities EntitySpec<DynamicGroup> networkSpec = EntitySpec.create(DynamicGroup.class) .configure(DynamicGroup.ENTITY_FILTER, Predicates.and( Predicates.not(Predicates.or(Predicates.instanceOf(DockerContainer.class), Predicates.instanceOf(DelegateEntity.class))), EntityPredicates.attributeEqualTo(DockerContainer.DOCKER_INFRASTRUCTURE, getAttribute(DOCKER_INFRASTRUCTURE)), SdnAttributes.attachedToNetwork(networkId))) .configure(DynamicGroup.MEMBER_DELEGATE_CHILDREN, true) .displayName(network.getDisplayName()); DynamicGroup subnet = getAttribute(SDN_APPLICATIONS).addMemberChild(networkSpec); Entities.manage(subnet); ((EntityLocal) subnet).setAttribute(VirtualNetwork.NETWORK_ID, networkId); ((EntityLocal) network).setAttribute(VirtualNetwork.NETWORKED_APPLICATIONS, subnet); getAttribute(SDN_NETWORKS).addMember(network); } @Override public void deallocateNetwork(VirtualNetwork network) { String networkId = network.getAttribute(VirtualNetwork.NETWORK_ID); Optional<Entity> found = Iterables.tryFind(getAttribute(SDN_APPLICATIONS).getMembers(), EntityPredicates.attributeEqualTo(VirtualNetwork.NETWORK_ID, networkId)); if (found.isPresent()) { Entity group = found.get(); getAttribute(SDN_APPLICATIONS).removeMember(group); getAttribute(SDN_APPLICATIONS).removeChild(group); Entities.unmanage(group); } else { LOG.warn("Cannot find group containing {} network entities", networkId); } getAttribute(SDN_NETWORKS).removeMember(network); // TODO actually deprovision the network if possible? } static { RendererHints.register(SDN_AGENTS, new RendererHints.NamedActionWithUrl("Open", DelegateEntity.EntityUrl.entityUrl())); RendererHints.register(SDN_NETWORKS, new RendererHints.NamedActionWithUrl("Open", DelegateEntity.EntityUrl.entityUrl())); RendererHints.register(SDN_APPLICATIONS, new RendererHints.NamedActionWithUrl("Open", DelegateEntity.EntityUrl.entityUrl())); RendererHints.register(DOCKER_INFRASTRUCTURE, new RendererHints.NamedActionWithUrl("Open", DelegateEntity.EntityUrl.entityUrl())); } }
apache-2.0
ruebot/fcrepo4
fcrepo-http-commons/src/main/java/org/fcrepo/http/commons/exceptionhandlers/NodeNotFoundExceptionMapper.java
1544
/* * Copyright 2015 DuraSpace, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fcrepo.http.commons.exceptionhandlers; import static javax.ws.rs.core.Response.status; import static javax.ws.rs.core.Response.Status.NOT_FOUND; import static org.slf4j.LoggerFactory.getLogger; import javax.ws.rs.core.Response; import javax.ws.rs.ext.ExceptionMapper; import javax.ws.rs.ext.Provider; import org.modeshape.jcr.cache.NodeNotFoundException; import org.slf4j.Logger; /** * Translate Modeshape jcr NodeNotFoundException to HTTP 404 Not Found * * @author lsitu */ @Provider public class NodeNotFoundExceptionMapper implements ExceptionMapper<NodeNotFoundException> { private static final Logger LOGGER = getLogger(NodeNotFoundExceptionMapper.class); @Override public Response toResponse(final NodeNotFoundException e) { LOGGER.debug( "NodeNotFoundException intercepted by NodeNotFoundExceptionMapper: \n", e); return status(NOT_FOUND).build(); } }
apache-2.0
Esri/arcgis-runtime-samples-java
map/display-map/src/main/java/com/esri/samples/display_map/DisplayMapSample.java
2471
/* * Copyright 2017 Esri. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.esri.samples.display_map; import javafx.application.Application; import javafx.scene.Scene; import javafx.scene.layout.StackPane; import javafx.stage.Stage; import com.esri.arcgisruntime.ArcGISRuntimeEnvironment; import com.esri.arcgisruntime.mapping.ArcGISMap; import com.esri.arcgisruntime.mapping.BasemapStyle; import com.esri.arcgisruntime.mapping.view.MapView; public class DisplayMapSample extends Application { private MapView mapView; @Override public void start(Stage stage) { try { // create stack pane and application scene StackPane stackPane = new StackPane(); Scene scene = new Scene(stackPane); // set title, size, and add scene to stage stage.setTitle("Display Map Sample"); stage.setWidth(800); stage.setHeight(700); stage.setScene(scene); stage.show(); // authentication with an API key or named user is required to access basemaps and other location services String yourAPIKey = System.getProperty("apiKey"); ArcGISRuntimeEnvironment.setApiKey(yourAPIKey); // create a map with the standard imagery basemap style ArcGISMap map = new ArcGISMap(BasemapStyle.ARCGIS_IMAGERY_STANDARD); // create a map view and set the map to it mapView = new MapView(); mapView.setMap(map); // add the map view to the stack pane stackPane.getChildren().addAll(mapView); } catch (Exception e) { // on any error, display the stack trace. e.printStackTrace(); } } /** * Stops and releases all resources used in application. */ @Override public void stop() { if (mapView != null) { mapView.dispose(); } } /** * Opens and runs application. * * @param args arguments passed to this application */ public static void main(String[] args) { Application.launch(args); } }
apache-2.0
jahlborn/sqlbuilder
src/main/java/com/healthmarketscience/sqlbuilder/dbspec/RejoinTable.java
3880
/* Copyright (c) 2008 Health Market Science, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.healthmarketscience.sqlbuilder.dbspec; import java.util.ArrayList; import java.util.List; /** * Utility class for using a table multiple times in the same query with * different aliases. All Columns returned by this class will also use the * new alias. All other methods return information from the original table. * * @author James Ahlborn */ public class RejoinTable implements Table { /** the original table */ private Table _table; /** the new alias to use for this table */ private String _alias; /** the wrapped columns from the original table */ private List<RejoinColumn> _columns; public RejoinTable(Table table, String alias) { _table = table; _alias = alias; _columns = new ArrayList<RejoinColumn>(_table.getColumns().size()); for(Column column : _table.getColumns()) { _columns.add(new RejoinColumn(column)); } } public Table getOriginalTable() { return _table; } @Override public String getAlias() { return _alias; } @Override public String getTableNameSQL() { return _table.getTableNameSQL(); } @Override public List<RejoinColumn> getColumns() { return _columns; } @Override public List<? extends Constraint> getConstraints() { return _table.getConstraints(); } /** * Finds the RejoinColumn in this table with the given name. */ public RejoinColumn findColumnByName(String name) { for(RejoinColumn col : getColumns()) { if((name == col.getColumnNameSQL()) || ((name != null) && name.equals(col.getColumnNameSQL()))) { return col; } } return null; } /** * Finds the RejoinColumn in this table for the given original column. */ public RejoinColumn findColumn(Column origCol) { for(RejoinColumn col : getColumns()) { if(origCol == col.getOriginalColumn()) { return col; } } return null; } @Override public String toString() { return "Rejoin: " + getOriginalTable().toString() + "(" + getAlias() + ")"; } /** * Utility class which wraps a Column and returns a reference to the * RejoinTable instead of the original table. All other methods return the * information from the original column. */ @SuppressWarnings("deprecation") public class RejoinColumn implements Column { /** the original column object */ private Column _column; private RejoinColumn(Column column) { _column = column; } public Column getOriginalColumn() { return _column; } @Override public RejoinTable getTable() { return RejoinTable.this; } @Override public String getColumnNameSQL() { return _column.getColumnNameSQL(); } @Override public String getTypeNameSQL() { return _column.getTypeNameSQL(); } @Override public Integer getTypeLength() { return _column.getTypeLength(); } @Override public List<?> getTypeQualifiers() { return _column.getTypeQualifiers(); } @Override public List<? extends Constraint> getConstraints() { return _column.getConstraints(); } @Override public Object getDefaultValue() { return _column.getDefaultValue(); } @Override public String toString() { return "Rejoin: " + getOriginalColumn().toString() + "(" + getTable() + ")"; } } }
apache-2.0
sundevin/PicturePicker
picturepicker/src/main/java/devin/com/picturepicker/view/photoview/PhotoView.java
5235
/** * Copyright (C) 2016 Hyphenate Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /******************************************************************************* * Copyright 2011, 2012 Chris Banes. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package devin.com.picturepicker.view.photoview; import android.content.Context; import android.graphics.RectF; import android.graphics.drawable.Drawable; import android.net.Uri; import android.util.AttributeSet; public class PhotoView extends android.support.v7.widget.AppCompatImageView implements IPhotoView { private final PhotoViewAttacher mAttacher; private ScaleType mPendingScaleType; public PhotoView(Context context) { this(context, null); } public PhotoView(Context context, AttributeSet attr) { this(context, attr, 0); } public PhotoView(Context context, AttributeSet attr, int defStyle) { super(context, attr, defStyle); super.setScaleType(ScaleType.MATRIX); mAttacher = new PhotoViewAttacher(this); if (null != mPendingScaleType) { setScaleType(mPendingScaleType); mPendingScaleType = null; } } @Override public boolean canZoom() { return mAttacher.canZoom(); } @Override public RectF getDisplayRect() { return mAttacher.getDisplayRect(); } @Override public float getMinScale() { return mAttacher.getMinScale(); } @Override public float getMidScale() { return mAttacher.getMidScale(); } @Override public float getMaxScale() { return mAttacher.getMaxScale(); } @Override public float getScale() { return mAttacher.getScale(); } @Override public ScaleType getScaleType() { return mAttacher.getScaleType(); } @Override public void setAllowParentInterceptOnEdge(boolean allow) { mAttacher.setAllowParentInterceptOnEdge(allow); } @Override public void setMinScale(float minScale) { mAttacher.setMinScale(minScale); } @Override public void setMidScale(float midScale) { mAttacher.setMidScale(midScale); } @Override public void setMaxScale(float maxScale) { mAttacher.setMaxScale(maxScale); } @Override // setImageBitmap calls through to this method public void setImageDrawable(Drawable drawable) { super.setImageDrawable(drawable); if (null != mAttacher) { mAttacher.update(); } } @Override public void setImageResource(int resId) { super.setImageResource(resId); if (null != mAttacher) { mAttacher.update(); } } @Override public void setImageURI(Uri uri) { super.setImageURI(uri); if (null != mAttacher) { mAttacher.update(); } } @Override public void setOnMatrixChangeListener(PhotoViewAttacher.OnMatrixChangedListener listener) { mAttacher.setOnMatrixChangeListener(listener); } @Override public void setOnLongClickListener(OnLongClickListener l) { mAttacher.setOnLongClickListener(l); } @Override public void setOnPhotoTapListener(PhotoViewAttacher.OnPhotoTapListener listener) { mAttacher.setOnPhotoTapListener(listener); } @Override public void setOnViewTapListener(PhotoViewAttacher.OnViewTapListener listener) { mAttacher.setOnViewTapListener(listener); } @Override public void setScaleType(ScaleType scaleType) { if (null != mAttacher) { mAttacher.setScaleType(scaleType); } else { mPendingScaleType = scaleType; } } @Override public void setZoomable(boolean zoomable) { mAttacher.setZoomable(zoomable); } @Override public void zoomTo(float scale, float focalX, float focalY) { mAttacher.zoomTo(scale, focalX, focalY); } @Override protected void onDetachedFromWindow() { mAttacher.cleanup(); super.onDetachedFromWindow(); } }
apache-2.0
consulo/consulo-tasks
tasks-core/src/main/java/com/intellij/tasks/jira/CachedIconLoader.java
2332
/** * Copyright (C) 2008 Atlassian * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.tasks.jira; import consulo.ui.image.Image; import consulo.ui.image.ImageEffects; import java.io.IOException; import java.net.URL; import java.util.HashMap; import java.util.Map; public final class CachedIconLoader { private static Map<String, Image> icons = new HashMap<>(); private static Map<String, Image> disabledIcons = new HashMap<>(); private CachedIconLoader() { } public static Image getDisabledIcon(String urlString) { return disabledIcons.get(urlString); } private static void addDisabledIcon(String urlString, Image icon) { disabledIcons.put(urlString, icon); } private static Image generateDisabledIcon(Image icon) { return ImageEffects.grayed(icon); } private static void maybeGenerateDisabledIcon(String urlString, Image icon) { if(disabledIcons.containsKey(urlString) || icon == null) { return; } addDisabledIcon(urlString, generateDisabledIcon(icon)); } public static Image getIcon(URL url) { if(url != null) { String key = url.toString(); if(!icons.containsKey(key)) { try { Image i = Image.fromUrl(url); icons.put(key, i); maybeGenerateDisabledIcon(key, i); } catch (IOException ignored) { } } return icons.get(key); } else { return null; } } public static Image getIcon(String urlString) { if(urlString != null) { if(!icons.containsKey(urlString)) { try { URL url = new URL(urlString); Image i = Image.fromUrl(url); icons.put(urlString, i); maybeGenerateDisabledIcon(urlString, i); } catch(IOException ignored) { return null; } } return icons.get(urlString); } else { return null; } } }
apache-2.0
ceylon/ceylon
model/src/org/eclipse/ceylon/model/loader/model/FieldValue.java
1193
/******************************************************************************** * Copyright (c) 2011-2017 Red Hat Inc. and/or its affiliates and others * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * http://www.apache.org/licenses/LICENSE-2.0 * * SPDX-License-Identifier: Apache-2.0 ********************************************************************************/ package org.eclipse.ceylon.model.loader.model; import org.eclipse.ceylon.model.typechecker.model.Value; /** * Marker class to be able to mark class attributes that are not JavaBean properties * but simple fields. Used for Java interoperability only. * * @author Stéphane Épardaud <stef@epardaud.fr> */ public class FieldValue extends Value { private String fieldName; public FieldValue(String fieldName){ this.fieldName = fieldName; } public String getRealName(){ return fieldName; } @Override protected Class<?> getModelClass() { return getClass().getSuperclass(); } @Override public boolean isJava() { return true; } }
apache-2.0
agileowl/tapestry-5
tapestry-core/src/main/java/org/apache/tapestry5/internal/services/ApplicationGlobalsImpl.java
1311
// Copyright 2006, 2007, 2008 The Apache Software Foundation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.apache.tapestry5.internal.services; import org.apache.tapestry5.services.ApplicationGlobals; import org.apache.tapestry5.services.Context; import javax.servlet.ServletContext; public class ApplicationGlobalsImpl implements ApplicationGlobals { private ServletContext servletContext; private Context context; public void storeServletContext(ServletContext context) { servletContext = context; } public ServletContext getServletContext() { return servletContext; } public Context getContext() { return context; } public void storeContext(Context context) { this.context = context; } }
apache-2.0
riftsaw/riftsaw-ode
bpel-dao/src/main/java/org/apache/ode/dao/bpel/ActivityRecoveryDAO.java
1234
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.ode.dao.bpel; import java.util.Date; import org.w3c.dom.Element; /** * Activity recovery object. Registered when activity enters recovery state. */ public interface ActivityRecoveryDAO { long getActivityId(); String getChannel(); String getReason(); Element getDetails(); Date getDateTime(); String getActions(); String[] getActionsList(); int getRetries(); }
apache-2.0
CloudBPM/SwinFlowCloud-CloudSide
core.runtime/src/main/java/com/cloudibpm/core/runtime/util/json/FormDataLoader.java
10187
/** * */ package com.cloudibpm.core.runtime.util.json; import com.cloudibpm.core.TreeNode; import com.cloudibpm.core.buildtime.wfprocess.WfProcess; import com.cloudibpm.core.data.*; import com.cloudibpm.core.data.variable.ArrayDataVariable; import com.cloudibpm.core.data.variable.DataVariable; import org.json.JSONArray; import org.json.JSONObject; import java.util.ArrayList; import java.util.List; /** * @author Dahai Cao created on 20180305 * */ public class FormDataLoader { /** * */ public static String parseJSONtoLoadData(String jsonForm, WfProcess pi) throws Exception { JSONObject obj = new JSONObject(jsonForm); if (!obj.isNull("children")) { parseChildrenFromJSON(obj.getJSONArray("children"), pi); } // parse all propagation rules. // this.parseRules(this); return obj.toString(); } public static String parseJSON2LoadData(JSONObject jsonForm, WfProcess pi) throws Exception { if (!jsonForm.isNull("children")) { parseChildrenFromJSON(jsonForm.getJSONArray("children"), pi); } // parse all propagation rules. // this.parseRules(this); return jsonForm.toString(); } /** * 为界面组件赋值,也就是说,是实现数据变量和界面组件之间的绑定。 * * @param jsonarr form's UI component list * @param pi */ private static void parseChildrenFromJSON(JSONArray jsonarr, WfProcess pi) { if (jsonarr != null && jsonarr.length() > 0) { for (int i = 0; i < jsonarr.length(); i++) { JSONObject uicomponent = jsonarr.getJSONObject(i); String ctype = uicomponent.getString("classtypename"); if (ctype.equals("Row") || ctype.equals("Column")) { if (!uicomponent.isNull("children")) { parseChildrenFromJSON(uicomponent.getJSONArray("children"), pi); } } else { if (ctype.equals("SingleLineText") || ctype.equals("SingleSelect") || ctype.equals("MultipleLineText") || ctype.equals("RichTextInput")) { // ac: accessControl, 0: read only; 1: writable //if (!uicomponent.isNull("ac")) { // String ac = content.getString("ac"); //} putStringValue(uicomponent, pi); } else if (ctype.equals("IntegerInput") || ctype.equals("NaturalNumberInput")) { putIntegerValue(uicomponent, pi); } else if (ctype.equals("CurrencyInput") || ctype.equals("DecimalsInput")) { putDoubleValue(uicomponent, pi); } else if (ctype.equals("CheckBoxes")) { putArrayValues(uicomponent, pi); } else if (ctype.equals("Radios")) { putStringValue(uicomponent, pi); } else if (ctype.equals("DateTimeInput")) { putDateTimeStringValue(uicomponent, pi); } else if (ctype.equals("DateTimeRangeInput")) { putTimeDurationStringValue(uicomponent, pi); } else if (ctype.equals("FileUpload")) { } else if (ctype.equals("FileDisplayer")) { } else if (ctype.equals("FilesDisplayer")) { } else if (ctype.equals("Image")) { } } } } } private static void putArrayValues(JSONObject uicomponent, WfProcess pi) { if (!uicomponent.isNull("varId")) { String vid = uicomponent.getString("varId"); TreeNode v = pi.seekByID(vid); if (v instanceof ArrayDataVariable) { ArrayDataVariable dv = (ArrayDataVariable) v; if (dv.getValues() != null) { Constant[] vals = (Constant[])dv.getValues(); List<String> values = new ArrayList<>(); if (vals.length > 0) { if (dv.getDatatype().equals("Integer") || dv.getDatatype().equals("int")) { for (int i = 0; i < vals.length; i++) { values.add(vals[i].getValue()); } } else if (dv.getDatatype().equals("Double") || dv.getDatatype().equals("double") || dv.getDatatype().equals("Float") || dv.getDatatype().equals("float")) { for (int i = 0; i < vals.length; i++) { values.add(vals[i].getValue()); } } else if (dv.getDatatype().equals("Boolean")) { for (int i = 0; i < vals.length; i++) { values.add(vals[i].getValue()); } } else if (dv.getDatatype().equals("String")) { for (int i = 0; i < vals.length; i++) { values.add(vals[i].getValue()); } } else if (dv.getDatatype().equals("DateTime") || dv.getDatatype().equals("Date") || dv.getDatatype().equals("Time")) { for (int i = 0; i < vals.length; i++) { values.add(vals[i].getValue()); } } else if (dv.getDatatype().equals("TimeDuration")) { for (int i = 0; i < vals.length; i++) { values.add(vals[i].getValue()); } } else if (dv.getDatatype().equals("Currency")) { for (int i = 0; i < vals.length; i++) { values.add(vals[i].getValue()); } } else if (dv.getDatatype().equals("JSONData")) { for (int i = 0; i < vals.length; i++) { values.add(vals[i].getValue()); } } else if (dv.getDatatype().equals("File")) { for (int i = 0; i < vals.length; i++) { values.add(vals[i].getValue()); } } else if (dv.getDatatype().equals("Handwriting")) { for (int i = 0; i < vals.length; i++) { values.add(vals[i].getValue()); } } } uicomponent.put("initValue", values.toArray(new String[values.size()])); } } } } private static void putStringValue(JSONObject uicomponent, WfProcess pi) { if (!uicomponent.isNull("varId")) { String vid = uicomponent.getString("varId"); TreeNode v = pi.seekByID(vid); if (v instanceof DataVariable) { Object o = ((DataVariable) v).getValue(); if (o instanceof StringConstant) { uicomponent.put("initValue", ((StringConstant) o).getValue()); } } } } private static void putIntegerValue(JSONObject uicomponent, WfProcess pi) { if (!uicomponent.isNull("varId")) { String vid = uicomponent.getString("varId"); TreeNode v = pi.seekByID(vid); if (v instanceof DataVariable) { Object o = ((DataVariable) v).getValue(); if (o instanceof IntegerConstant) { uicomponent.put("initValue", ((IntegerConstant) o).getValue()); } } } } private static void putDoubleValue(JSONObject uicomponent, WfProcess pi) { if (!uicomponent.isNull("varId")) { String vid = uicomponent.getString("varId"); TreeNode v = pi.seekByID(vid); if (v instanceof DataVariable) { Object o = ((DataVariable) v).getValue(); if (o instanceof DoubleConstant) { uicomponent.put("initValue", ((DoubleConstant) o).getValue()); } } } } private static void putDateTimeStringValue(JSONObject uicomponent, WfProcess pi) { if (!uicomponent.isNull("varId")) { String vid = uicomponent.getString("varId"); TreeNode v = pi.seekByID(vid); if (v instanceof DataVariable) { Object o = ((DataVariable) v).getValue(); if (o instanceof DateTimeConstant) { uicomponent.put("initValue", ((DateTimeConstant) o).getValue()); } } } } private static void putTimeDurationStringValue(JSONObject uicomponent, WfProcess pi) { if (!uicomponent.isNull("varId")) { String vid = uicomponent.getString("varId"); TreeNode v = pi.seekByID(vid); if (v instanceof DataVariable) { Object o = ((DataVariable) v).getValue(); if (o instanceof TimDurationConstant) { uicomponent.put("largeDuration", ((TimDurationConstant) o).getLargeDuration()); uicomponent.put("largeDurationUnit", ((TimDurationConstant) o).getLargeDurationUnit()); uicomponent.put("hours", ((TimDurationConstant) o).getHours()); uicomponent.put("minutes", ((TimDurationConstant) o).getMinutes()); uicomponent.put("seconds", ((TimDurationConstant) o).getSeconds()); } } } } }
apache-2.0
McLeodMoores/starling
projects/financial/src/main/java/com/opengamma/financial/analytics/riskfactors/DefaultRiskFactorsConfigurationProviderBuilder.java
1730
/** * Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.financial.analytics.riskfactors; import org.fudgemsg.FudgeField; import org.fudgemsg.FudgeMsg; import org.fudgemsg.MutableFudgeMsg; import org.fudgemsg.mapping.FudgeBuilder; import org.fudgemsg.mapping.FudgeBuilderFor; import org.fudgemsg.mapping.FudgeDeserializer; import org.fudgemsg.mapping.FudgeSerializer; import com.opengamma.util.money.Currency; /** * Fudge message builder for {@link DefaultRiskFactorsConfigurationProvider}. */ @FudgeBuilderFor(DefaultRiskFactorsConfigurationProvider.class) public class DefaultRiskFactorsConfigurationProviderBuilder implements FudgeBuilder<DefaultRiskFactorsConfigurationProvider> { private static final String CURRENCY_FIELD = "currency"; @Override public MutableFudgeMsg buildMessage(final FudgeSerializer serializer, final DefaultRiskFactorsConfigurationProvider object) { final MutableFudgeMsg msg = serializer.newMessage(); serializer.addToMessage(msg, CURRENCY_FIELD, null, object.getCurrencyOverride()); return msg; } @Override public DefaultRiskFactorsConfigurationProvider buildObject(final FudgeDeserializer deserializer, final FudgeMsg msg) { return new DefaultRiskFactorsConfigurationProvider(getCurrencyOverride(deserializer, msg)); } protected Currency getCurrencyOverride(final FudgeDeserializer deserializer, final FudgeMsg msg) { final FudgeField currencyField = msg.getByName(CURRENCY_FIELD); final Currency currencyOverride = currencyField != null ? deserializer.fieldValueToObject(Currency.class, currencyField) : null; return currencyOverride; } }
apache-2.0
dbarentine/totalconnect
totalconnect/src/main/java/com/barentine/totalconnect/ws/PanelMetadataAndStatusResultsExV1.java
1849
package com.barentine.totalconnect.ws; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for PanelMetadataAndStatusResultsEx_V1 complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="PanelMetadataAndStatusResultsEx_V1"> * &lt;complexContent> * &lt;extension base="{https://services.alarmnet.com/TC2/}WebMethodResults"> * &lt;sequence> * &lt;element name="PanelMetadataAndStatus" type="{https://services.alarmnet.com/TC2/}PanelMetadataAndStatusInfoEx_V1" minOccurs="0"/> * &lt;/sequence> * &lt;/extension> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "PanelMetadataAndStatusResultsEx_V1", propOrder = { "panelMetadataAndStatus" }) public class PanelMetadataAndStatusResultsExV1 extends WebMethodResults { @XmlElement(name = "PanelMetadataAndStatus") protected PanelMetadataAndStatusInfoExV1 panelMetadataAndStatus; /** * Gets the value of the panelMetadataAndStatus property. * * @return * possible object is * {@link PanelMetadataAndStatusInfoExV1 } * */ public PanelMetadataAndStatusInfoExV1 getPanelMetadataAndStatus() { return panelMetadataAndStatus; } /** * Sets the value of the panelMetadataAndStatus property. * * @param value * allowed object is * {@link PanelMetadataAndStatusInfoExV1 } * */ public void setPanelMetadataAndStatus(PanelMetadataAndStatusInfoExV1 value) { this.panelMetadataAndStatus = value; } }
apache-2.0
ThoughtsLive/jira-steps
src/main/java/org/thoughtslive/jenkins/plugins/jira/api/EmailTo.java
1116
package org.thoughtslive.jenkins.plugins.jira.api; import java.io.Serializable; import java.util.List; import org.kohsuke.stapler.DataBoundConstructor; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; import lombok.ToString; @Data @ToString @NoArgsConstructor @AllArgsConstructor(onConstructor = @__({@DataBoundConstructor})) @JsonInclude(JsonInclude.Include.NON_DEFAULT) @JsonIgnoreProperties(ignoreUnknown = true) @Builder public class EmailTo implements Serializable { private static final long serialVersionUID = 1986209333454558972L; @JsonProperty("reporter") private Boolean reporter; @JsonProperty("assignee") private Boolean assignee; @JsonProperty("watchers") private Boolean watchers; @JsonProperty("voters") private Boolean voters; @JsonProperty("users") private List<User> users = null; @JsonProperty("groups") private List<Group> groups = null; }
apache-2.0
Ariah-Group/Finance
af_webapp/src/main/java/org/kuali/kfs/gl/batch/service/BatchSortService.java
846
/* * Copyright 2008-2009 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kfs.gl.batch.service; import java.util.Comparator; public interface BatchSortService { public void sortTextFileWithFields(String inputFileName, String outputFileName, Comparator comparator); }
apache-2.0
teatrove/teatrove
teaservlet/src/main/java/org/teatrove/teaservlet/assets/UrlAssetFactory.java
2794
package org.teatrove.teaservlet.assets; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import org.teatrove.trove.log.Log; import org.teatrove.trove.util.PropertyMap; public class UrlAssetFactory extends AbstractAssetFactory { private URL baseUrl; public UrlAssetFactory() { super(); } public UrlAssetFactory(String baseUrl) throws MalformedURLException { this(new URL(baseUrl)); } public UrlAssetFactory(URL baseUrl) { String basePath = baseUrl.toExternalForm(); if (!basePath.endsWith("/")) { try { baseUrl = new URL(basePath.concat("/")); } catch (MalformedURLException exception) { throw new IllegalStateException(exception); } } this.baseUrl = baseUrl; } @Override public String toString() { return baseUrl.toExternalForm(); } @Override public void init(Log log, PropertyMap properties) throws Exception { super.init(log, properties); // lookup base path, if provided String base = properties.getString("baseUrl"); if (base != null) { if (!base.endsWith("/")) { base = base.concat("/"); } this.baseUrl = new URL(base); } // validate base path if (this.baseUrl == null) { throw new IllegalStateException("missing base url"); } } @Override public InputStream getAsset(String path) { // validate path path = validatePath(path); // append the path to create a new URL URL resourceUrl = null; try { resourceUrl = new URL(baseUrl.toExternalForm().concat(path)); } catch (MalformedURLException exception) { log.debug( "invalid asset path url: " + baseUrl.toExternalForm() + path ); log.debug(exception); return null; } // ensure the resulting file is a child of the base String basePath = baseUrl.toExternalForm(); String resourcePath = resourceUrl.toExternalForm(); if (!resourcePath.startsWith(basePath)) { log.error( "url paths must be relative to base url: " + baseUrl.toExternalForm() + ":" + path ); return null; } // open stream for associated url try { return resourceUrl.openStream(); } catch (IOException ioe) { log.debug( "unable to open asset stream: " + resourceUrl.toExternalForm()); log.debug(ioe); return null; } } }
apache-2.0
bjorndm/prebake
code/third_party/bdb/src/com/sleepycat/je/txn/TxnAbort.java
1008
/*- * See the file LICENSE for redistribution information. * * Copyright (c) 2002-2010 Oracle. All rights reserved. * * $Id: TxnAbort.java,v 1.30 2010/01/04 15:50:52 cwl Exp $ */ package com.sleepycat.je.txn; import com.sleepycat.je.log.Loggable; /** * This class writes out a transaction commit or transaction end record. */ public class TxnAbort extends TxnEnd { public TxnAbort(long id, long lastLsn, int masterId) { super(id, lastLsn, masterId); } /** * For constructing from the log. */ public TxnAbort() { } /* * Log support */ protected String getTagName() { return "TxnAbort"; } /** * @see Loggable#logicalEquals */ public boolean logicalEquals(Loggable other) { if (!(other instanceof TxnAbort)) return false; TxnAbort otherAbort = (TxnAbort) other; return ((id == otherAbort.id) && (repMasterNodeId == otherAbort.repMasterNodeId)); } }
apache-2.0
HuangLS/neo4j
community/function/src/main/java/org/neo4j/function/primitive/FunctionToPrimitiveLong.java
999
/* * Copyright (c) 2002-2018 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.neo4j.function.primitive; /** * @deprecated use {@link org.neo4j.function.ToLongFunction} instead */ @Deprecated public interface FunctionToPrimitiveLong<T> { long apply( T value ); }
apache-2.0