code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_BLOCK_WRITE_LOCATEFOLLOWINGBLOCK_RETRIES_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_BLOCK_WRITE_LOCATEFOLLOWINGBLOCK_RETRIES_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_BLOCK_WRITE_RETRIES_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_BLOCK_WRITE_RETRIES_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_CACHED_CONN_RETRY_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_CACHED_CONN_RETRY_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_FAILOVER_MAX_ATTEMPTS_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_FAILOVER_MAX_ATTEMPTS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_FAILOVER_SLEEPTIME_BASE_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_FAILOVER_SLEEPTIME_BASE_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_FAILOVER_SLEEPTIME_MAX_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_FAILOVER_SLEEPTIME_MAX_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_READ_PREFETCH_SIZE_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_RETRY_WINDOW_BASE; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_CACHE_CAPACITY_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_CACHE_CAPACITY_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_CACHE_EXPIRY_MSEC_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_CACHE_EXPIRY_MSEC_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_CACHE_DROP_BEHIND_READS; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_CACHE_DROP_BEHIND_WRITES; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_CACHE_READAHEAD; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_WRITE_EXCLUDE_NODES_CACHE_EXPIRY_INTERVAL; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_WRITE_EXCLUDE_NODES_CACHE_EXPIRY_INTERVAL_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_WRITE_PACKET_SIZE_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_WRITE_PACKET_SIZE_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_SOCKET_WRITE_TIMEOUT_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_REPLICATION_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_REPLICATION_KEY; import java.io.BufferedOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.NetworkInterface; import java.net.Socket; import java.net.SocketException; import java.net.SocketAddress; import java.net.URI; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Random; import javax.net.SocketFactory; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.BlockStorageLocation; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FsServerDefaults; import org.apache.hadoop.fs.FsStatus; import org.apache.hadoop.fs.HdfsBlockLocation; import org.apache.hadoop.fs.InvalidPathException; import org.apache.hadoop.fs.MD5MD5CRC32CastagnoliFileChecksum; import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum; import org.apache.hadoop.fs.MD5MD5CRC32GzipFileChecksum; import org.apache.hadoop.fs.Options; import org.apache.hadoop.fs.Options.ChecksumOpt; import org.apache.hadoop.fs.ParentNotDirectoryException; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.UnresolvedLinkException; import org.apache.hadoop.fs.VolumeId; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.client.HdfsDataInputStream; import org.apache.hadoop.hdfs.client.HdfsDataOutputStream; import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks; import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.HdfsBlocksMetadata; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType; import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.protocol.NSQuotaExceededException; import org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException; import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport; import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus; import org.apache.hadoop.hdfs.protocol.UnresolvedPathException; import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferEncryptor; import org.apache.hadoop.hdfs.protocol.datatransfer.IOStreamPair; import org.apache.hadoop.hdfs.protocol.datatransfer.Op; import org.apache.hadoop.hdfs.protocol.datatransfer.ReplaceDatanodeOnFailure; import org.apache.hadoop.hdfs.protocol.datatransfer.Sender; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpBlockChecksumResponseProto; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status; import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey; import org.apache.hadoop.hdfs.protocolPB.PBHelper; import org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.hdfs.server.datanode.CachingStrategy; import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.namenode.SafeModeException; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.EnumSetWritable; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.MD5Hash; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.retry.LossyRetryInvocationHandler; import org.apache.hadoop.ipc.Client; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.net.DNS; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenRenewer; import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.DataChecksum.Type; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.Time; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.net.InetAddresses; /******************************************************** * DFSClient can connect to a Hadoop Filesystem and * perform basic file tasks. It uses the ClientProtocol * to communicate with a NameNode daemon, and connects * directly to DataNodes to read/write block data. * * Hadoop DFS users should obtain an instance of * DistributedFileSystem, which uses DFSClient to handle * filesystem tasks. * ********************************************************/ @InterfaceAudience.Private public class DFSClient implements java.io.Closeable { public static final Log LOG = LogFactory.getLog(DFSClient.class); public static final long SERVER_DEFAULTS_VALIDITY_PERIOD = 60 * 60 * 1000L; // 1 hour static final int TCP_WINDOW_SIZE = 128 * 1024; // 128 KB private final Configuration conf; private final Conf dfsClientConf; final ClientProtocol namenode; /* The service used for delegation tokens */ private Text dtService; final UserGroupInformation ugi; volatile boolean clientRunning = true; volatile long lastLeaseRenewal; private volatile FsServerDefaults serverDefaults; private volatile long serverDefaultsLastUpdate; final String clientName; SocketFactory socketFactory; final ReplaceDatanodeOnFailure dtpReplaceDatanodeOnFailure; final FileSystem.Statistics stats; private final String authority; final PeerCache peerCache; private Random r = new Random(); private SocketAddress[] localInterfaceAddrs; private DataEncryptionKey encryptionKey; private boolean shouldUseLegacyBlockReaderLocal; private final CachingStrategy defaultReadCachingStrategy; private final CachingStrategy defaultWriteCachingStrategy; /** * DFSClient configuration */ public static class Conf { final int hdfsTimeout; // timeout value for a DFS operation. final int maxFailoverAttempts; final int failoverSleepBaseMillis; final int failoverSleepMaxMillis; final int maxBlockAcquireFailures; final int confTime; final int ioBufferSize; final ChecksumOpt defaultChecksumOpt; final int writePacketSize; final int socketTimeout; final int socketCacheCapacity; final long socketCacheExpiry; final long excludedNodesCacheExpiry; /** Wait time window (in msec) if BlockMissingException is caught */ final int timeWindow; final int nCachedConnRetry; final int nBlockWriteRetry; final int nBlockWriteLocateFollowingRetry; final long defaultBlockSize; final long prefetchSize; final short defaultReplication; final String taskId; final FsPermission uMask; final boolean connectToDnViaHostname; final boolean getHdfsBlocksMetadataEnabled; final int getFileBlockStorageLocationsNumThreads; final int getFileBlockStorageLocationsTimeout; final boolean useLegacyBlockReader; final boolean useLegacyBlockReaderLocal; final String domainSocketPath; final boolean skipShortCircuitChecksums; final int shortCircuitBufferSize; final boolean shortCircuitLocalReads; final boolean domainSocketDataTraffic; final int shortCircuitStreamsCacheSize; final long shortCircuitStreamsCacheExpiryMs; public Conf(Configuration conf) { // The hdfsTimeout is currently the same as the ipc timeout hdfsTimeout = Client.getTimeout(conf); maxFailoverAttempts = conf.getInt( DFS_CLIENT_FAILOVER_MAX_ATTEMPTS_KEY, DFS_CLIENT_FAILOVER_MAX_ATTEMPTS_DEFAULT); failoverSleepBaseMillis = conf.getInt( DFS_CLIENT_FAILOVER_SLEEPTIME_BASE_KEY, DFS_CLIENT_FAILOVER_SLEEPTIME_BASE_DEFAULT); failoverSleepMaxMillis = conf.getInt( DFS_CLIENT_FAILOVER_SLEEPTIME_MAX_KEY, DFS_CLIENT_FAILOVER_SLEEPTIME_MAX_DEFAULT); maxBlockAcquireFailures = conf.getInt( DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_KEY, DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_DEFAULT); confTime = conf.getInt(DFS_DATANODE_SOCKET_WRITE_TIMEOUT_KEY, HdfsServerConstants.WRITE_TIMEOUT); ioBufferSize = conf.getInt( CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY, CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT); defaultChecksumOpt = getChecksumOptFromConf(conf); socketTimeout = conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, HdfsServerConstants.READ_TIMEOUT); /** dfs.write.packet.size is an internal config variable */ writePacketSize = conf.getInt(DFS_CLIENT_WRITE_PACKET_SIZE_KEY, DFS_CLIENT_WRITE_PACKET_SIZE_DEFAULT); defaultBlockSize = conf.getLongBytes(DFS_BLOCK_SIZE_KEY, DFS_BLOCK_SIZE_DEFAULT); defaultReplication = (short) conf.getInt( DFS_REPLICATION_KEY, DFS_REPLICATION_DEFAULT); taskId = conf.get("mapreduce.task.attempt.id", "NONMAPREDUCE"); socketCacheCapacity = conf.getInt(DFS_CLIENT_SOCKET_CACHE_CAPACITY_KEY, DFS_CLIENT_SOCKET_CACHE_CAPACITY_DEFAULT); socketCacheExpiry = conf.getLong(DFS_CLIENT_SOCKET_CACHE_EXPIRY_MSEC_KEY, DFS_CLIENT_SOCKET_CACHE_EXPIRY_MSEC_DEFAULT); excludedNodesCacheExpiry = conf.getLong( DFS_CLIENT_WRITE_EXCLUDE_NODES_CACHE_EXPIRY_INTERVAL, DFS_CLIENT_WRITE_EXCLUDE_NODES_CACHE_EXPIRY_INTERVAL_DEFAULT); prefetchSize = conf.getLong(DFS_CLIENT_READ_PREFETCH_SIZE_KEY, 10 * defaultBlockSize); timeWindow = conf.getInt(DFS_CLIENT_RETRY_WINDOW_BASE, 3000); nCachedConnRetry = conf.getInt(DFS_CLIENT_CACHED_CONN_RETRY_KEY, DFS_CLIENT_CACHED_CONN_RETRY_DEFAULT); nBlockWriteRetry = conf.getInt(DFS_CLIENT_BLOCK_WRITE_RETRIES_KEY, DFS_CLIENT_BLOCK_WRITE_RETRIES_DEFAULT); nBlockWriteLocateFollowingRetry = conf.getInt( DFS_CLIENT_BLOCK_WRITE_LOCATEFOLLOWINGBLOCK_RETRIES_KEY, DFS_CLIENT_BLOCK_WRITE_LOCATEFOLLOWINGBLOCK_RETRIES_DEFAULT); uMask = FsPermission.getUMask(conf); connectToDnViaHostname = conf.getBoolean(DFS_CLIENT_USE_DN_HOSTNAME, DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT); getHdfsBlocksMetadataEnabled = conf.getBoolean( DFSConfigKeys.DFS_HDFS_BLOCKS_METADATA_ENABLED, DFSConfigKeys.DFS_HDFS_BLOCKS_METADATA_ENABLED_DEFAULT); getFileBlockStorageLocationsNumThreads = conf.getInt( DFSConfigKeys.DFS_CLIENT_FILE_BLOCK_STORAGE_LOCATIONS_NUM_THREADS, DFSConfigKeys.DFS_CLIENT_FILE_BLOCK_STORAGE_LOCATIONS_NUM_THREADS_DEFAULT); getFileBlockStorageLocationsTimeout = conf.getInt( DFSConfigKeys.DFS_CLIENT_FILE_BLOCK_STORAGE_LOCATIONS_TIMEOUT, DFSConfigKeys.DFS_CLIENT_FILE_BLOCK_STORAGE_LOCATIONS_TIMEOUT_DEFAULT); useLegacyBlockReader = conf.getBoolean( DFSConfigKeys.DFS_CLIENT_USE_LEGACY_BLOCKREADER, DFSConfigKeys.DFS_CLIENT_USE_LEGACY_BLOCKREADER_DEFAULT); useLegacyBlockReaderLocal = conf.getBoolean( DFSConfigKeys.DFS_CLIENT_USE_LEGACY_BLOCKREADERLOCAL, DFSConfigKeys.DFS_CLIENT_USE_LEGACY_BLOCKREADERLOCAL_DEFAULT); shortCircuitLocalReads = conf.getBoolean( DFSConfigKeys.DFS_CLIENT_READ_SHORTCIRCUIT_KEY, DFSConfigKeys.DFS_CLIENT_READ_SHORTCIRCUIT_DEFAULT); domainSocketDataTraffic = conf.getBoolean( DFSConfigKeys.DFS_CLIENT_DOMAIN_SOCKET_DATA_TRAFFIC, DFSConfigKeys.DFS_CLIENT_DOMAIN_SOCKET_DATA_TRAFFIC_DEFAULT); domainSocketPath = conf.getTrimmed( DFSConfigKeys.DFS_DOMAIN_SOCKET_PATH_KEY, DFSConfigKeys.DFS_DOMAIN_SOCKET_PATH_DEFAULT); if (BlockReaderLocal.LOG.isDebugEnabled()) { BlockReaderLocal.LOG.debug( DFSConfigKeys.DFS_CLIENT_USE_LEGACY_BLOCKREADERLOCAL + " = " + useLegacyBlockReaderLocal); BlockReaderLocal.LOG.debug( DFSConfigKeys.DFS_CLIENT_READ_SHORTCIRCUIT_KEY + " = " + shortCircuitLocalReads); BlockReaderLocal.LOG.debug( DFSConfigKeys.DFS_CLIENT_DOMAIN_SOCKET_DATA_TRAFFIC + " = " + domainSocketDataTraffic); BlockReaderLocal.LOG.debug( DFSConfigKeys.DFS_DOMAIN_SOCKET_PATH_KEY + " = " + domainSocketPath); } skipShortCircuitChecksums = conf.getBoolean( DFSConfigKeys.DFS_CLIENT_READ_SHORTCIRCUIT_SKIP_CHECKSUM_KEY, DFSConfigKeys.DFS_CLIENT_READ_SHORTCIRCUIT_SKIP_CHECKSUM_DEFAULT); shortCircuitBufferSize = conf.getInt( DFSConfigKeys.DFS_CLIENT_READ_SHORTCIRCUIT_BUFFER_SIZE_KEY, DFSConfigKeys.DFS_CLIENT_READ_SHORTCIRCUIT_BUFFER_SIZE_DEFAULT); shortCircuitStreamsCacheSize = conf.getInt( DFSConfigKeys.DFS_CLIENT_READ_SHORTCIRCUIT_STREAMS_CACHE_SIZE_KEY, DFSConfigKeys.DFS_CLIENT_READ_SHORTCIRCUIT_STREAMS_CACHE_SIZE_DEFAULT); shortCircuitStreamsCacheExpiryMs = conf.getLong( DFSConfigKeys.DFS_CLIENT_READ_SHORTCIRCUIT_STREAMS_CACHE_EXPIRY_MS_KEY, DFSConfigKeys.DFS_CLIENT_READ_SHORTCIRCUIT_STREAMS_CACHE_EXPIRY_MS_DEFAULT); } private DataChecksum.Type getChecksumType(Configuration conf) { final String checksum = conf.get( DFSConfigKeys.DFS_CHECKSUM_TYPE_KEY, DFSConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT); try { return DataChecksum.Type.valueOf(checksum); } catch(IllegalArgumentException iae) { LOG.warn("Bad checksum type: " + checksum + ". Using default " + DFSConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT); return DataChecksum.Type.valueOf( DFSConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT); } } // Construct a checksum option from conf private ChecksumOpt getChecksumOptFromConf(Configuration conf) { DataChecksum.Type type = getChecksumType(conf); int bytesPerChecksum = conf.getInt(DFS_BYTES_PER_CHECKSUM_KEY, DFS_BYTES_PER_CHECKSUM_DEFAULT); return new ChecksumOpt(type, bytesPerChecksum); } // create a DataChecksum with the default option. private DataChecksum createChecksum() throws IOException { return createChecksum(null); } private DataChecksum createChecksum(ChecksumOpt userOpt) throws IOException { // Fill in any missing field with the default. ChecksumOpt myOpt = ChecksumOpt.processChecksumOpt( defaultChecksumOpt, userOpt); DataChecksum dataChecksum = DataChecksum.newDataChecksum( myOpt.getChecksumType(), myOpt.getBytesPerChecksum()); if (dataChecksum == null) { throw new IOException("Invalid checksum type specified: " + myOpt.getChecksumType().name()); } return dataChecksum; } } public Conf getConf() { return dfsClientConf; } Configuration getConfiguration() { return conf; } /** * A map from file names to {@link DFSOutputStream} objects * that are currently being written by this client. * Note that a file can only be written by a single client. */ private final Map<String, DFSOutputStream> filesBeingWritten = new HashMap<String, DFSOutputStream>(); private final DomainSocketFactory domainSocketFactory; /** * Same as this(NameNode.getAddress(conf), conf); * @see #DFSClient(InetSocketAddress, Configuration) * @deprecated Deprecated at 0.21 */ @Deprecated public DFSClient(Configuration conf) throws IOException { this(NameNode.getAddress(conf), conf); } public DFSClient(InetSocketAddress address, Configuration conf) throws IOException { this(NameNode.getUri(address), conf); } /** * Same as this(nameNodeUri, conf, null); * @see #DFSClient(URI, Configuration, FileSystem.Statistics) */ public DFSClient(URI nameNodeUri, Configuration conf ) throws IOException { this(nameNodeUri, conf, null); } /** * Same as this(nameNodeUri, null, conf, stats); * @see #DFSClient(URI, ClientProtocol, Configuration, FileSystem.Statistics) */ public DFSClient(URI nameNodeUri, Configuration conf, FileSystem.Statistics stats) throws IOException { this(nameNodeUri, null, conf, stats); } /** * Create a new DFSClient connected to the given nameNodeUri or rpcNamenode. * If HA is enabled and a positive value is set for * {@link DFSConfigKeys#DFS_CLIENT_TEST_DROP_NAMENODE_RESPONSE_NUM_KEY} in the * configuration, the DFSClient will use {@link LossyRetryInvocationHandler} * as its RetryInvocationHandler. Otherwise one of nameNodeUri or rpcNamenode * must be null. */ @VisibleForTesting public DFSClient(URI nameNodeUri, ClientProtocol rpcNamenode, Configuration conf, FileSystem.Statistics stats) throws IOException { // Copy only the required DFSClient configuration this.dfsClientConf = new Conf(conf); this.shouldUseLegacyBlockReaderLocal = this.dfsClientConf.useLegacyBlockReaderLocal; if (this.dfsClientConf.useLegacyBlockReaderLocal) { LOG.debug("Using legacy short-circuit local reads."); } this.conf = conf; this.stats = stats; this.socketFactory = NetUtils.getSocketFactory(conf, ClientProtocol.class); this.dtpReplaceDatanodeOnFailure = ReplaceDatanodeOnFailure.get(conf); this.ugi = UserGroupInformation.getCurrentUser(); this.authority = nameNodeUri == null? "null": nameNodeUri.getAuthority(); this.clientName = "DFSClient_" + dfsClientConf.taskId + "_" + DFSUtil.getRandom().nextInt() + "_" + Thread.currentThread().getId(); int numResponseToDrop = conf.getInt( DFSConfigKeys.DFS_CLIENT_TEST_DROP_NAMENODE_RESPONSE_NUM_KEY, DFSConfigKeys.DFS_CLIENT_TEST_DROP_NAMENODE_RESPONSE_NUM_DEFAULT); NameNodeProxies.ProxyAndInfo<ClientProtocol> proxyInfo = null; if (numResponseToDrop > 0) { // This case is used for testing. LOG.warn(DFSConfigKeys.DFS_CLIENT_TEST_DROP_NAMENODE_RESPONSE_NUM_KEY + " is set to " + numResponseToDrop + ", this hacked client will proactively drop responses"); proxyInfo = NameNodeProxies.createProxyWithLossyRetryHandler(conf, nameNodeUri, ClientProtocol.class, numResponseToDrop); } if (proxyInfo != null) { this.dtService = proxyInfo.getDelegationTokenService(); this.namenode = proxyInfo.getProxy(); } else if (rpcNamenode != null) { // This case is used for testing. Preconditions.checkArgument(nameNodeUri == null); this.namenode = rpcNamenode; dtService = null; } else { Preconditions.checkArgument(nameNodeUri != null, "null URI"); proxyInfo = NameNodeProxies.createProxy(conf, nameNodeUri, ClientProtocol.class); this.dtService = proxyInfo.getDelegationTokenService(); this.namenode = proxyInfo.getProxy(); } // read directly from the block file if configured. this.domainSocketFactory = new DomainSocketFactory(dfsClientConf); String localInterfaces[] = conf.getTrimmedStrings(DFSConfigKeys.DFS_CLIENT_LOCAL_INTERFACES); localInterfaceAddrs = getLocalInterfaceAddrs(localInterfaces); if (LOG.isDebugEnabled() && 0 != localInterfaces.length) { LOG.debug("Using local interfaces [" + Joiner.on(',').join(localInterfaces)+ "] with addresses [" + Joiner.on(',').join(localInterfaceAddrs) + "]"); } this.peerCache = PeerCache.getInstance(dfsClientConf.socketCacheCapacity, dfsClientConf.socketCacheExpiry); Boolean readDropBehind = (conf.get(DFS_CLIENT_CACHE_DROP_BEHIND_READS) == null) ? null : conf.getBoolean(DFS_CLIENT_CACHE_DROP_BEHIND_READS, false); Long readahead = (conf.get(DFS_CLIENT_CACHE_READAHEAD) == null) ? null : conf.getLong(DFS_CLIENT_CACHE_READAHEAD, 0); Boolean writeDropBehind = (conf.get(DFS_CLIENT_CACHE_DROP_BEHIND_WRITES) == null) ? null : conf.getBoolean(DFS_CLIENT_CACHE_DROP_BEHIND_WRITES, false); this.defaultReadCachingStrategy = new CachingStrategy(readDropBehind, readahead); this.defaultWriteCachingStrategy = new CachingStrategy(writeDropBehind, readahead); } /** * Return the socket addresses to use with each configured * local interface. Local interfaces may be specified by IP * address, IP address range using CIDR notation, interface * name (e.g. eth0) or sub-interface name (e.g. eth0:0). * The socket addresses consist of the IPs for the interfaces * and the ephemeral port (port 0). If an IP, IP range, or * interface name matches an interface with sub-interfaces * only the IP of the interface is used. Sub-interfaces can * be used by specifying them explicitly (by IP or name). * * @return SocketAddresses for the configured local interfaces, * or an empty array if none are configured * @throws UnknownHostException if a given interface name is invalid */ private static SocketAddress[] getLocalInterfaceAddrs( String interfaceNames[]) throws UnknownHostException { List<SocketAddress> localAddrs = new ArrayList<SocketAddress>(); for (String interfaceName : interfaceNames) { if (InetAddresses.isInetAddress(interfaceName)) { localAddrs.add(new InetSocketAddress(interfaceName, 0)); } else if (NetUtils.isValidSubnet(interfaceName)) { for (InetAddress addr : NetUtils.getIPs(interfaceName, false)) { localAddrs.add(new InetSocketAddress(addr, 0)); } } else { for (String ip : DNS.getIPs(interfaceName, false)) { localAddrs.add(new InetSocketAddress(ip, 0)); } } } return localAddrs.toArray(new SocketAddress[localAddrs.size()]); } /** * Select one of the configured local interfaces at random. We use a random * interface because other policies like round-robin are less effective * given that we cache connections to datanodes. * * @return one of the local interface addresses at random, or null if no * local interfaces are configured */ SocketAddress getRandomLocalInterfaceAddr() { if (localInterfaceAddrs.length == 0) { return null; } final int idx = r.nextInt(localInterfaceAddrs.length); final SocketAddress addr = localInterfaceAddrs[idx]; if (LOG.isDebugEnabled()) { LOG.debug("Using local interface " + addr); } return addr; } /** * Return the number of times the client should go back to the namenode * to retrieve block locations when reading. */ int getMaxBlockAcquireFailures() { return dfsClientConf.maxBlockAcquireFailures; } /** * Return the timeout that clients should use when writing to datanodes. * @param numNodes the number of nodes in the pipeline. */ int getDatanodeWriteTimeout(int numNodes) { return (dfsClientConf.confTime > 0) ? (dfsClientConf.confTime + HdfsServerConstants.WRITE_TIMEOUT_EXTENSION * numNodes) : 0; } int getDatanodeReadTimeout(int numNodes) { return dfsClientConf.socketTimeout > 0 ? (HdfsServerConstants.READ_TIMEOUT_EXTENSION * numNodes + dfsClientConf.socketTimeout) : 0; } int getHdfsTimeout() { return dfsClientConf.hdfsTimeout; } @VisibleForTesting public String getClientName() { return clientName; } void checkOpen() throws IOException { if (!clientRunning) { IOException result = new IOException("Filesystem closed"); throw result; } } /** Return the lease renewer instance. The renewer thread won't start * until the first output stream is created. The same instance will * be returned until all output streams are closed. */ public LeaseRenewer getLeaseRenewer() throws IOException { return LeaseRenewer.getInstance(authority, ugi, this); } /** Get a lease and start automatic renewal */ private void beginFileLease(final String src, final DFSOutputStream out) throws IOException { getLeaseRenewer().put(src, out, this); } /** Stop renewal of lease for the file. */ void endFileLease(final String src) throws IOException { getLeaseRenewer().closeFile(src, this); } /** Put a file. Only called from LeaseRenewer, where proper locking is * enforced to consistently update its local dfsclients array and * client's filesBeingWritten map. */ void putFileBeingWritten(final String src, final DFSOutputStream out) { synchronized(filesBeingWritten) { filesBeingWritten.put(src, out); // update the last lease renewal time only when there was no // writes. once there is one write stream open, the lease renewer // thread keeps it updated well with in anyone's expiration time. if (lastLeaseRenewal == 0) { updateLastLeaseRenewal(); } } } /** Remove a file. Only called from LeaseRenewer. */ void removeFileBeingWritten(final String src) { synchronized(filesBeingWritten) { filesBeingWritten.remove(src); if (filesBeingWritten.isEmpty()) { lastLeaseRenewal = 0; } } } /** Is file-being-written map empty? */ boolean isFilesBeingWrittenEmpty() { synchronized(filesBeingWritten) { return filesBeingWritten.isEmpty(); } } /** @return true if the client is running */ boolean isClientRunning() { return clientRunning; } long getLastLeaseRenewal() { return lastLeaseRenewal; } void updateLastLeaseRenewal() { synchronized(filesBeingWritten) { if (filesBeingWritten.isEmpty()) { return; } lastLeaseRenewal = Time.now(); } } /** * Renew leases. * @return true if lease was renewed. May return false if this * client has been closed or has no files open. **/ boolean renewLease() throws IOException { if (clientRunning && !isFilesBeingWrittenEmpty()) { try { namenode.renewLease(clientName); updateLastLeaseRenewal(); return true; } catch (IOException e) { // Abort if the lease has already expired. final long elapsed = Time.now() - getLastLeaseRenewal(); if (elapsed > HdfsConstants.LEASE_HARDLIMIT_PERIOD) { LOG.warn("Failed to renew lease for " + clientName + " for " + (elapsed/1000) + " seconds (>= soft-limit =" + (HdfsConstants.LEASE_HARDLIMIT_PERIOD/1000) + " seconds.) " + "Closing all files being written ...", e); closeAllFilesBeingWritten(true); } else { // Let the lease renewer handle it and retry. throw e; } } } return false; } /** * Close connections the Namenode. */ void closeConnectionToNamenode() { RPC.stopProxy(namenode); } /** Abort and release resources held. Ignore all errors. */ void abort() { clientRunning = false; closeAllFilesBeingWritten(true); try { // remove reference to this client and stop the renewer, // if there is no more clients under the renewer. getLeaseRenewer().closeClient(this); } catch (IOException ioe) { LOG.info("Exception occurred while aborting the client " + ioe); } closeConnectionToNamenode(); } /** Close/abort all files being written. */ private void closeAllFilesBeingWritten(final boolean abort) { for(;;) { final String src; final DFSOutputStream out; synchronized(filesBeingWritten) { if (filesBeingWritten.isEmpty()) { return; } src = filesBeingWritten.keySet().iterator().next(); out = filesBeingWritten.remove(src); } if (out != null) { try { if (abort) { out.abort(); } else { out.close(); } } catch(IOException ie) { LOG.error("Failed to " + (abort? "abort": "close") + " file " + src, ie); } } } } /** * Close the file system, abandoning all of the leases and files being * created and close connections to the namenode. */ @Override public synchronized void close() throws IOException { if(clientRunning) { closeAllFilesBeingWritten(false); clientRunning = false; getLeaseRenewer().closeClient(this); // close connections to the namenode closeConnectionToNamenode(); } } /** * Get the default block size for this cluster * @return the default block size in bytes */ public long getDefaultBlockSize() { return dfsClientConf.defaultBlockSize; } /** * @see ClientProtocol#getPreferredBlockSize(String) */ public long getBlockSize(String f) throws IOException { try { return namenode.getPreferredBlockSize(f); } catch (IOException ie) { LOG.warn("Problem getting block size", ie); throw ie; } } /** * Get server default values for a number of configuration params. * @see ClientProtocol#getServerDefaults() */ public FsServerDefaults getServerDefaults() throws IOException { long now = Time.now(); if (now - serverDefaultsLastUpdate > SERVER_DEFAULTS_VALIDITY_PERIOD) { serverDefaults = namenode.getServerDefaults(); serverDefaultsLastUpdate = now; } return serverDefaults; } /** * Get a canonical token service name for this client's tokens. Null should * be returned if the client is not using tokens. * @return the token service for the client */ @InterfaceAudience.LimitedPrivate( { "HDFS" }) public String getCanonicalServiceName() { return (dtService != null) ? dtService.toString() : null; } /** * @see ClientProtocol#getDelegationToken(Text) */ public Token<DelegationTokenIdentifier> getDelegationToken(Text renewer) throws IOException { assert dtService != null; Token<DelegationTokenIdentifier> token = namenode.getDelegationToken(renewer); if (token != null) { token.setService(this.dtService); LOG.info("Created " + DelegationTokenIdentifier.stringifyToken(token)); } else { LOG.info("Cannot get delegation token from " + renewer); } return token; } /** * Renew a delegation token * @param token the token to renew * @return the new expiration time * @throws InvalidToken * @throws IOException * @deprecated Use Token.renew instead. */ @Deprecated public long renewDelegationToken(Token<DelegationTokenIdentifier> token) throws InvalidToken, IOException { LOG.info("Renewing " + DelegationTokenIdentifier.stringifyToken(token)); try { return token.renew(conf); } catch (InterruptedException ie) { throw new RuntimeException("caught interrupted", ie); } catch (RemoteException re) { throw re.unwrapRemoteException(InvalidToken.class, AccessControlException.class); } } private static Map<String, Boolean> localAddrMap = Collections .synchronizedMap(new HashMap<String, Boolean>()); static boolean isLocalAddress(InetSocketAddress targetAddr) { InetAddress addr = targetAddr.getAddress(); Boolean cached = localAddrMap.get(addr.getHostAddress()); if (cached != null) { if (LOG.isTraceEnabled()) { LOG.trace("Address " + targetAddr + (cached ? " is local" : " is not local")); } return cached; } boolean local = NetUtils.isLocalAddress(addr); if (LOG.isTraceEnabled()) { LOG.trace("Address " + targetAddr + (local ? " is local" : " is not local")); } localAddrMap.put(addr.getHostAddress(), local); return local; } /** * Should the block access token be refetched on an exception * * @param ex Exception received * @param targetAddr Target datanode address from where exception was received * @return true if block access token has expired or invalid and it should be * refetched */ private static boolean tokenRefetchNeeded(IOException ex, InetSocketAddress targetAddr) { /* * Get a new access token and retry. Retry is needed in 2 cases. 1) When * both NN and DN re-started while DFSClient holding a cached access token. * 2) In the case that NN fails to update its access key at pre-set interval * (by a wide margin) and subsequently restarts. In this case, DN * re-registers itself with NN and receives a new access key, but DN will * delete the old access key from its memory since it's considered expired * based on the estimated expiration date. */ if (ex instanceof InvalidBlockTokenException || ex instanceof InvalidToken) { LOG.info("Access token was invalid when connecting to " + targetAddr + " : " + ex); return true; } return false; } /** * Cancel a delegation token * @param token the token to cancel * @throws InvalidToken * @throws IOException * @deprecated Use Token.cancel instead. */ @Deprecated public void cancelDelegationToken(Token<DelegationTokenIdentifier> token) throws InvalidToken, IOException { LOG.info("Cancelling " + DelegationTokenIdentifier.stringifyToken(token)); try { token.cancel(conf); } catch (InterruptedException ie) { throw new RuntimeException("caught interrupted", ie); } catch (RemoteException re) { throw re.unwrapRemoteException(InvalidToken.class, AccessControlException.class); } } @InterfaceAudience.Private public static class Renewer extends TokenRenewer { static { //Ensure that HDFS Configuration files are loaded before trying to use // the renewer. HdfsConfiguration.init(); } @Override public boolean handleKind(Text kind) { return DelegationTokenIdentifier.HDFS_DELEGATION_KIND.equals(kind); } @SuppressWarnings("unchecked") @Override public long renew(Token<?> token, Configuration conf) throws IOException { Token<DelegationTokenIdentifier> delToken = (Token<DelegationTokenIdentifier>) token; ClientProtocol nn = getNNProxy(delToken, conf); try { return nn.renewDelegationToken(delToken); } catch (RemoteException re) { throw re.unwrapRemoteException(InvalidToken.class, AccessControlException.class); } } @SuppressWarnings("unchecked") @Override public void cancel(Token<?> token, Configuration conf) throws IOException { Token<DelegationTokenIdentifier> delToken = (Token<DelegationTokenIdentifier>) token; LOG.info("Cancelling " + DelegationTokenIdentifier.stringifyToken(delToken)); ClientProtocol nn = getNNProxy(delToken, conf); try { nn.cancelDelegationToken(delToken); } catch (RemoteException re) { throw re.unwrapRemoteException(InvalidToken.class, AccessControlException.class); } } private static ClientProtocol getNNProxy( Token<DelegationTokenIdentifier> token, Configuration conf) throws IOException { URI uri = HAUtil.getServiceUriFromToken(token); if (HAUtil.isTokenForLogicalUri(token) && !HAUtil.isLogicalUri(conf, uri)) { // If the token is for a logical nameservice, but the configuration // we have disagrees about that, we can't actually renew it. // This can be the case in MR, for example, if the RM doesn't // have all of the HA clusters configured in its configuration. throw new IOException("Unable to map logical nameservice URI '" + uri + "' to a NameNode. Local configuration does not have " + "a failover proxy provider configured."); } NameNodeProxies.ProxyAndInfo<ClientProtocol> info = NameNodeProxies.createProxy(conf, uri, ClientProtocol.class); assert info.getDelegationTokenService().equals(token.getService()) : "Returned service '" + info.getDelegationTokenService().toString() + "' doesn't match expected service '" + token.getService().toString() + "'"; return info.getProxy(); } @Override public boolean isManaged(Token<?> token) throws IOException { return true; } } /** * Report corrupt blocks that were discovered by the client. * @see ClientProtocol#reportBadBlocks(LocatedBlock[]) */ public void reportBadBlocks(LocatedBlock[] blocks) throws IOException { namenode.reportBadBlocks(blocks); } public short getDefaultReplication() { return dfsClientConf.defaultReplication; } public LocatedBlocks getLocatedBlocks(String src, long start) throws IOException { return getLocatedBlocks(src, start, dfsClientConf.prefetchSize); } /* * This is just a wrapper around callGetBlockLocations, but non-static so that * we can stub it out for tests. */ @VisibleForTesting public LocatedBlocks getLocatedBlocks(String src, long start, long length) throws IOException { return callGetBlockLocations(namenode, src, start, length); } /** * @see ClientProtocol#getBlockLocations(String, long, long) */ static LocatedBlocks callGetBlockLocations(ClientProtocol namenode, String src, long start, long length) throws IOException { try { return namenode.getBlockLocations(src, start, length); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class, UnresolvedPathException.class); } } /** * Recover a file's lease * @param src a file's path * @return true if the file is already closed * @throws IOException */ boolean recoverLease(String src) throws IOException { checkOpen(); try { return namenode.recoverLease(src, clientName); } catch (RemoteException re) { throw re.unwrapRemoteException(FileNotFoundException.class, AccessControlException.class, UnresolvedPathException.class); } } /** * Get block location info about file * * getBlockLocations() returns a list of hostnames that store * data for a specific file region. It returns a set of hostnames * for every block within the indicated region. * * This function is very useful when writing code that considers * data-placement when performing operations. For example, the * MapReduce system tries to schedule tasks on the same machines * as the data-block the task processes. */ public BlockLocation[] getBlockLocations(String src, long start, long length) throws IOException, UnresolvedLinkException { LocatedBlocks blocks = getLocatedBlocks(src, start, length); BlockLocation[] locations = DFSUtil.locatedBlocks2Locations(blocks); HdfsBlockLocation[] hdfsLocations = new HdfsBlockLocation[locations.length]; for (int i = 0; i < locations.length; i++) { hdfsLocations[i] = new HdfsBlockLocation(locations[i], blocks.get(i)); } return hdfsLocations; } /** * Get block location information about a list of {@link HdfsBlockLocation}. * Used by {@link DistributedFileSystem#getFileBlockStorageLocations(List)} to * get {@link BlockStorageLocation}s for blocks returned by * {@link DistributedFileSystem#getFileBlockLocations(org.apache.hadoop.fs.FileStatus, long, long)} * . * * This is done by making a round of RPCs to the associated datanodes, asking * the volume of each block replica. The returned array of * {@link BlockStorageLocation} expose this information as a * {@link VolumeId}. * * @param blockLocations * target blocks on which to query volume location information * @return volumeBlockLocations original block array augmented with additional * volume location information for each replica. */ public BlockStorageLocation[] getBlockStorageLocations( List<BlockLocation> blockLocations) throws IOException, UnsupportedOperationException, InvalidBlockTokenException { if (!getConf().getHdfsBlocksMetadataEnabled) { throw new UnsupportedOperationException("Datanode-side support for " + "getVolumeBlockLocations() must also be enabled in the client " + "configuration."); } // Downcast blockLocations and fetch out required LocatedBlock(s) List<LocatedBlock> blocks = new ArrayList<LocatedBlock>(); for (BlockLocation loc : blockLocations) { if (!(loc instanceof HdfsBlockLocation)) { throw new ClassCastException("DFSClient#getVolumeBlockLocations " + "expected to be passed HdfsBlockLocations"); } HdfsBlockLocation hdfsLoc = (HdfsBlockLocation) loc; blocks.add(hdfsLoc.getLocatedBlock()); } // Re-group the LocatedBlocks to be grouped by datanodes, with the values // a list of the LocatedBlocks on the datanode. Map<DatanodeInfo, List<LocatedBlock>> datanodeBlocks = new LinkedHashMap<DatanodeInfo, List<LocatedBlock>>(); for (LocatedBlock b : blocks) { for (DatanodeInfo info : b.getLocations()) { if (!datanodeBlocks.containsKey(info)) { datanodeBlocks.put(info, new ArrayList<LocatedBlock>()); } List<LocatedBlock> l = datanodeBlocks.get(info); l.add(b); } } // Make RPCs to the datanodes to get volume locations for its replicas List<HdfsBlocksMetadata> metadatas = BlockStorageLocationUtil .queryDatanodesForHdfsBlocksMetadata(conf, datanodeBlocks, getConf().getFileBlockStorageLocationsNumThreads, getConf().getFileBlockStorageLocationsTimeout, getConf().connectToDnViaHostname); // Regroup the returned VolumeId metadata to again be grouped by // LocatedBlock rather than by datanode Map<LocatedBlock, List<VolumeId>> blockVolumeIds = BlockStorageLocationUtil .associateVolumeIdsWithBlocks(blocks, datanodeBlocks, metadatas); // Combine original BlockLocations with new VolumeId information BlockStorageLocation[] volumeBlockLocations = BlockStorageLocationUtil .convertToVolumeBlockLocations(blocks, blockVolumeIds); return volumeBlockLocations; } public DFSInputStream open(String src) throws IOException, UnresolvedLinkException { return open(src, dfsClientConf.ioBufferSize, true, null); } /** * Create an input stream that obtains a nodelist from the * namenode, and then reads from all the right places. Creates * inner subclass of InputStream that does the right out-of-band * work. * @deprecated Use {@link #open(String, int, boolean)} instead. */ @Deprecated public DFSInputStream open(String src, int buffersize, boolean verifyChecksum, FileSystem.Statistics stats) throws IOException, UnresolvedLinkException { return open(src, buffersize, verifyChecksum); } /** * Create an input stream that obtains a nodelist from the * namenode, and then reads from all the right places. Creates * inner subclass of InputStream that does the right out-of-band * work. */ public DFSInputStream open(String src, int buffersize, boolean verifyChecksum) throws IOException, UnresolvedLinkException { checkOpen(); // Get block info from namenode return new DFSInputStream(this, src, buffersize, verifyChecksum); } /** * Get the namenode associated with this DFSClient object * @return the namenode associated with this DFSClient object */ public ClientProtocol getNamenode() { return namenode; } /** * Call {@link #create(String, boolean, short, long, Progressable)} with * default <code>replication</code> and <code>blockSize<code> and null <code> * progress</code>. */ public OutputStream create(String src, boolean overwrite) throws IOException { return create(src, overwrite, dfsClientConf.defaultReplication, dfsClientConf.defaultBlockSize, null); } /** * Call {@link #create(String, boolean, short, long, Progressable)} with * default <code>replication</code> and <code>blockSize<code>. */ public OutputStream create(String src, boolean overwrite, Progressable progress) throws IOException { return create(src, overwrite, dfsClientConf.defaultReplication, dfsClientConf.defaultBlockSize, progress); } /** * Call {@link #create(String, boolean, short, long, Progressable)} with * null <code>progress</code>. */ public OutputStream create(String src, boolean overwrite, short replication, long blockSize) throws IOException { return create(src, overwrite, replication, blockSize, null); } /** * Call {@link #create(String, boolean, short, long, Progressable, int)} * with default bufferSize. */ public OutputStream create(String src, boolean overwrite, short replication, long blockSize, Progressable progress) throws IOException { return create(src, overwrite, replication, blockSize, progress, dfsClientConf.ioBufferSize); } /** * Call {@link #create(String, FsPermission, EnumSet, short, long, * Progressable, int, ChecksumOpt)} with default <code>permission</code> * {@link FsPermission#getFileDefault()}. * * @param src File name * @param overwrite overwrite an existing file if true * @param replication replication factor for the file * @param blockSize maximum block size * @param progress interface for reporting client progress * @param buffersize underlying buffersize * * @return output stream */ public OutputStream create(String src, boolean overwrite, short replication, long blockSize, Progressable progress, int buffersize) throws IOException { return create(src, FsPermission.getFileDefault(), overwrite ? EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE) : EnumSet.of(CreateFlag.CREATE), replication, blockSize, progress, buffersize, null); } /** * Call {@link #create(String, FsPermission, EnumSet, boolean, short, * long, Progressable, int, ChecksumOpt)} with <code>createParent</code> * set to true. */ public DFSOutputStream create(String src, FsPermission permission, EnumSet<CreateFlag> flag, short replication, long blockSize, Progressable progress, int buffersize, ChecksumOpt checksumOpt) throws IOException { return create(src, permission, flag, true, replication, blockSize, progress, buffersize, checksumOpt, null); } /** * Create a new dfs file with the specified block replication * with write-progress reporting and return an output stream for writing * into the file. * * @param src File name * @param permission The permission of the directory being created. * If null, use default permission {@link FsPermission#getFileDefault()} * @param flag indicates create a new file or create/overwrite an * existing file or append to an existing file * @param createParent create missing parent directory if true * @param replication block replication * @param blockSize maximum block size * @param progress interface for reporting client progress * @param buffersize underlying buffer size * @param checksumOpt checksum options * * @return output stream * * @see ClientProtocol#create(String, FsPermission, String, EnumSetWritable, * boolean, short, long) for detailed description of exceptions thrown */ public DFSOutputStream create(String src, FsPermission permission, EnumSet<CreateFlag> flag, boolean createParent, short replication, long blockSize, Progressable progress, int buffersize, ChecksumOpt checksumOpt) throws IOException { return create(src, permission, flag, createParent, replication, blockSize, progress, buffersize, checksumOpt, null); } /** * Same as {@link #create(String, FsPermission, EnumSet, boolean, short, long, * Progressable, int, ChecksumOpt)} with the addition of favoredNodes that is * a hint to where the namenode should place the file blocks. * The favored nodes hint is not persisted in HDFS. Hence it may be honored * at the creation time only. HDFS could move the blocks during balancing or * replication, to move the blocks from favored nodes. A value of null means * no favored nodes for this create */ public DFSOutputStream create(String src, FsPermission permission, EnumSet<CreateFlag> flag, boolean createParent, short replication, long blockSize, Progressable progress, int buffersize, ChecksumOpt checksumOpt, InetSocketAddress[] favoredNodes) throws IOException { checkOpen(); if (permission == null) { permission = FsPermission.getFileDefault(); } FsPermission masked = permission.applyUMask(dfsClientConf.uMask); if(LOG.isDebugEnabled()) { LOG.debug(src + ": masked=" + masked); } String[] favoredNodeStrs = null; if (favoredNodes != null) { favoredNodeStrs = new String[favoredNodes.length]; for (int i = 0; i < favoredNodes.length; i++) { favoredNodeStrs[i] = favoredNodes[i].getHostName() + ":" + favoredNodes[i].getPort(); } } final DFSOutputStream result = DFSOutputStream.newStreamForCreate(this, src, masked, flag, createParent, replication, blockSize, progress, buffersize, dfsClientConf.createChecksum(checksumOpt), favoredNodeStrs); beginFileLease(src, result); return result; } /** * Append to an existing file if {@link CreateFlag#APPEND} is present */ private DFSOutputStream primitiveAppend(String src, EnumSet<CreateFlag> flag, int buffersize, Progressable progress) throws IOException { if (flag.contains(CreateFlag.APPEND)) { HdfsFileStatus stat = getFileInfo(src); if (stat == null) { // No file to append to // New file needs to be created if create option is present if (!flag.contains(CreateFlag.CREATE)) { throw new FileNotFoundException("failed to append to non-existent file " + src + " on client " + clientName); } return null; } return callAppend(stat, src, buffersize, progress); } return null; } /** * Same as {{@link #create(String, FsPermission, EnumSet, short, long, * Progressable, int, ChecksumOpt)} except that the permission * is absolute (ie has already been masked with umask. */ public DFSOutputStream primitiveCreate(String src, FsPermission absPermission, EnumSet<CreateFlag> flag, boolean createParent, short replication, long blockSize, Progressable progress, int buffersize, ChecksumOpt checksumOpt) throws IOException, UnresolvedLinkException { checkOpen(); CreateFlag.validate(flag); DFSOutputStream result = primitiveAppend(src, flag, buffersize, progress); if (result == null) { DataChecksum checksum = dfsClientConf.createChecksum(checksumOpt); result = DFSOutputStream.newStreamForCreate(this, src, absPermission, flag, createParent, replication, blockSize, progress, buffersize, checksum); } beginFileLease(src, result); return result; } /** * Creates a symbolic link. * * @see ClientProtocol#createSymlink(String, String,FsPermission, boolean) */ public void createSymlink(String target, String link, boolean createParent) throws IOException { try { FsPermission dirPerm = FsPermission.getDefault().applyUMask(dfsClientConf.uMask); namenode.createSymlink(target, link, dirPerm, createParent); } catch (RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, FileAlreadyExistsException.class, FileNotFoundException.class, ParentNotDirectoryException.class, NSQuotaExceededException.class, DSQuotaExceededException.class, UnresolvedPathException.class, SnapshotAccessControlException.class); } } /** * Resolve the *first* symlink, if any, in the path. * * @see ClientProtocol#getLinkTarget(String) */ public String getLinkTarget(String path) throws IOException { checkOpen(); try { return namenode.getLinkTarget(path); } catch (RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class); } } /** Method to get stream returned by append call */ private DFSOutputStream callAppend(HdfsFileStatus stat, String src, int buffersize, Progressable progress) throws IOException { LocatedBlock lastBlock = null; try { lastBlock = namenode.append(src, clientName); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class, SafeModeException.class, DSQuotaExceededException.class, UnsupportedOperationException.class, UnresolvedPathException.class, SnapshotAccessControlException.class); } return DFSOutputStream.newStreamForAppend(this, src, buffersize, progress, lastBlock, stat, dfsClientConf.createChecksum()); } /** * Append to an existing HDFS file. * * @param src file name * @param buffersize buffer size * @param progress for reporting write-progress; null is acceptable. * @param statistics file system statistics; null is acceptable. * @return an output stream for writing into the file * * @see ClientProtocol#append(String, String) */ public HdfsDataOutputStream append(final String src, final int buffersize, final Progressable progress, final FileSystem.Statistics statistics ) throws IOException { final DFSOutputStream out = append(src, buffersize, progress); return new HdfsDataOutputStream(out, statistics, out.getInitialLen()); } private DFSOutputStream append(String src, int buffersize, Progressable progress) throws IOException { checkOpen(); HdfsFileStatus stat = getFileInfo(src); if (stat == null) { // No file found throw new FileNotFoundException("failed to append to non-existent file " + src + " on client " + clientName); } final DFSOutputStream result = callAppend(stat, src, buffersize, progress); beginFileLease(src, result); return result; } /** * Set replication for an existing file. * @param src file name * @param replication * * @see ClientProtocol#setReplication(String, short) */ public boolean setReplication(String src, short replication) throws IOException { try { return namenode.setReplication(src, replication); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class, SafeModeException.class, DSQuotaExceededException.class, UnresolvedPathException.class, SnapshotAccessControlException.class); } } /** * Rename file or directory. * @see ClientProtocol#rename(String, String) * @deprecated Use {@link #rename(String, String, Options.Rename...)} instead. */ @Deprecated public boolean rename(String src, String dst) throws IOException { checkOpen(); try { return namenode.rename(src, dst); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, NSQuotaExceededException.class, DSQuotaExceededException.class, UnresolvedPathException.class, SnapshotAccessControlException.class); } } /** * Move blocks from src to trg and delete src * See {@link ClientProtocol#concat(String, String [])}. */ public void concat(String trg, String [] srcs) throws IOException { checkOpen(); try { namenode.concat(trg, srcs); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, UnresolvedPathException.class, SnapshotAccessControlException.class); } } /** * Rename file or directory. * @see ClientProtocol#rename2(String, String, Options.Rename...) */ public void rename(String src, String dst, Options.Rename... options) throws IOException { checkOpen(); try { namenode.rename2(src, dst, options); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, DSQuotaExceededException.class, FileAlreadyExistsException.class, FileNotFoundException.class, ParentNotDirectoryException.class, SafeModeException.class, NSQuotaExceededException.class, UnresolvedPathException.class, SnapshotAccessControlException.class); } } /** * Delete file or directory. * See {@link ClientProtocol#delete(String, boolean)}. */ @Deprecated public boolean delete(String src) throws IOException { checkOpen(); return namenode.delete(src, true); } /** * delete file or directory. * delete contents of the directory if non empty and recursive * set to true * * @see ClientProtocol#delete(String, boolean) */ public boolean delete(String src, boolean recursive) throws IOException { checkOpen(); try { return namenode.delete(src, recursive); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class, SafeModeException.class, UnresolvedPathException.class, SnapshotAccessControlException.class); } } /** Implemented using getFileInfo(src) */ public boolean exists(String src) throws IOException { checkOpen(); return getFileInfo(src) != null; } /** * Get a partial listing of the indicated directory * No block locations need to be fetched */ public DirectoryListing listPaths(String src, byte[] startAfter) throws IOException { return listPaths(src, startAfter, false); } /** * Get a partial listing of the indicated directory * * Recommend to use HdfsFileStatus.EMPTY_NAME as startAfter * if the application wants to fetch a listing starting from * the first entry in the directory * * @see ClientProtocol#getListing(String, byte[], boolean) */ public DirectoryListing listPaths(String src, byte[] startAfter, boolean needLocation) throws IOException { checkOpen(); try { return namenode.getListing(src, startAfter, needLocation); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class, UnresolvedPathException.class); } } /** * Get the file info for a specific file or directory. * @param src The string representation of the path to the file * @return object containing information regarding the file * or null if file not found * * @see ClientProtocol#getFileInfo(String) for description of exceptions */ public HdfsFileStatus getFileInfo(String src) throws IOException { checkOpen(); try { return namenode.getFileInfo(src); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class, UnresolvedPathException.class); } } /** * Close status of a file * @return true if file is already closed */ public boolean isFileClosed(String src) throws IOException{ checkOpen(); try { return namenode.isFileClosed(src); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class, UnresolvedPathException.class); } } /** * Get the file info for a specific file or directory. If src * refers to a symlink then the FileStatus of the link is returned. * @param src path to a file or directory. * * For description of exceptions thrown * @see ClientProtocol#getFileLinkInfo(String) */ public HdfsFileStatus getFileLinkInfo(String src) throws IOException { checkOpen(); try { return namenode.getFileLinkInfo(src); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, UnresolvedPathException.class); } } /** * Get the checksum of a file. * @param src The file path * @return The checksum * @see DistributedFileSystem#getFileChecksum(Path) */ public MD5MD5CRC32FileChecksum getFileChecksum(String src) throws IOException { checkOpen(); return getFileChecksum(src, clientName, namenode, socketFactory, dfsClientConf.socketTimeout, getDataEncryptionKey(), dfsClientConf.connectToDnViaHostname); } @InterfaceAudience.Private public void clearDataEncryptionKey() { LOG.debug("Clearing encryption key"); synchronized (this) { encryptionKey = null; } } /** * @return true if data sent between this client and DNs should be encrypted, * false otherwise. * @throws IOException in the event of error communicating with the NN */ boolean shouldEncryptData() throws IOException { FsServerDefaults d = getServerDefaults(); return d == null ? false : d.getEncryptDataTransfer(); } @InterfaceAudience.Private public DataEncryptionKey getDataEncryptionKey() throws IOException { if (shouldEncryptData()) { synchronized (this) { if (encryptionKey == null || encryptionKey.expiryDate < Time.now()) { LOG.debug("Getting new encryption token from NN"); encryptionKey = namenode.getDataEncryptionKey(); } return encryptionKey; } } else { return null; } } /** * Get the checksum of a file. * @param src The file path * @param clientName the name of the client requesting the checksum. * @param namenode the RPC proxy for the namenode * @param socketFactory to create sockets to connect to DNs * @param socketTimeout timeout to use when connecting and waiting for a response * @param encryptionKey the key needed to communicate with DNs in this cluster * @param connectToDnViaHostname whether the client should use hostnames instead of IPs * @return The checksum */ private static MD5MD5CRC32FileChecksum getFileChecksum(String src, String clientName, ClientProtocol namenode, SocketFactory socketFactory, int socketTimeout, DataEncryptionKey encryptionKey, boolean connectToDnViaHostname) throws IOException { //get all block locations LocatedBlocks blockLocations = callGetBlockLocations(namenode, src, 0, Long.MAX_VALUE); if (null == blockLocations) { throw new FileNotFoundException("File does not exist: " + src); } List<LocatedBlock> locatedblocks = blockLocations.getLocatedBlocks(); final DataOutputBuffer md5out = new DataOutputBuffer(); int bytesPerCRC = -1; DataChecksum.Type crcType = DataChecksum.Type.DEFAULT; long crcPerBlock = 0; boolean refetchBlocks = false; int lastRetriedIndex = -1; //get block checksum for each block for(int i = 0; i < locatedblocks.size(); i++) { if (refetchBlocks) { // refetch to get fresh tokens blockLocations = callGetBlockLocations(namenode, src, 0, Long.MAX_VALUE); if (null == blockLocations) { throw new FileNotFoundException("File does not exist: " + src); } locatedblocks = blockLocations.getLocatedBlocks(); refetchBlocks = false; } LocatedBlock lb = locatedblocks.get(i); final ExtendedBlock block = lb.getBlock(); final DatanodeInfo[] datanodes = lb.getLocations(); //try each datanode location of the block final int timeout = 3000 * datanodes.length + socketTimeout; boolean done = false; for(int j = 0; !done && j < datanodes.length; j++) { DataOutputStream out = null; DataInputStream in = null; try { //connect to a datanode IOStreamPair pair = connectToDN(socketFactory, connectToDnViaHostname, encryptionKey, datanodes[j], timeout); out = new DataOutputStream(new BufferedOutputStream(pair.out, HdfsConstants.SMALL_BUFFER_SIZE)); in = new DataInputStream(pair.in); if (LOG.isDebugEnabled()) { LOG.debug("write to " + datanodes[j] + ": " + Op.BLOCK_CHECKSUM + ", block=" + block); } // get block MD5 new Sender(out).blockChecksum(block, lb.getBlockToken()); final BlockOpResponseProto reply = BlockOpResponseProto.parseFrom(PBHelper.vintPrefixed(in)); if (reply.getStatus() != Status.SUCCESS) { if (reply.getStatus() == Status.ERROR_ACCESS_TOKEN) { throw new InvalidBlockTokenException(); } else { throw new IOException("Bad response " + reply + " for block " + block + " from datanode " + datanodes[j]); } } OpBlockChecksumResponseProto checksumData = reply.getChecksumResponse(); //read byte-per-checksum final int bpc = checksumData.getBytesPerCrc(); if (i == 0) { //first block bytesPerCRC = bpc; } else if (bpc != bytesPerCRC) { throw new IOException("Byte-per-checksum not matched: bpc=" + bpc + " but bytesPerCRC=" + bytesPerCRC); } //read crc-per-block final long cpb = checksumData.getCrcPerBlock(); if (locatedblocks.size() > 1 && i == 0) { crcPerBlock = cpb; } //read md5 final MD5Hash md5 = new MD5Hash( checksumData.getMd5().toByteArray()); md5.write(md5out); // read crc-type final DataChecksum.Type ct; if (checksumData.hasCrcType()) { ct = PBHelper.convert(checksumData .getCrcType()); } else { LOG.debug("Retrieving checksum from an earlier-version DataNode: " + "inferring checksum by reading first byte"); ct = inferChecksumTypeByReading( clientName, socketFactory, socketTimeout, lb, datanodes[j], encryptionKey, connectToDnViaHostname); } if (i == 0) { // first block crcType = ct; } else if (crcType != DataChecksum.Type.MIXED && crcType != ct) { // if crc types are mixed in a file crcType = DataChecksum.Type.MIXED; } done = true; if (LOG.isDebugEnabled()) { if (i == 0) { LOG.debug("set bytesPerCRC=" + bytesPerCRC + ", crcPerBlock=" + crcPerBlock); } LOG.debug("got reply from " + datanodes[j] + ": md5=" + md5); } } catch (InvalidBlockTokenException ibte) { if (i > lastRetriedIndex) { if (LOG.isDebugEnabled()) { LOG.debug("Got access token error in response to OP_BLOCK_CHECKSUM " + "for file " + src + " for block " + block + " from datanode " + datanodes[j] + ". Will retry the block once."); } lastRetriedIndex = i; done = true; // actually it's not done; but we'll retry i--; // repeat at i-th block refetchBlocks = true; break; } } catch (IOException ie) { LOG.warn("src=" + src + ", datanodes["+j+"]=" + datanodes[j], ie); } finally { IOUtils.closeStream(in); IOUtils.closeStream(out); } } if (!done) { throw new IOException("Fail to get block MD5 for " + block); } } //compute file MD5 final MD5Hash fileMD5 = MD5Hash.digest(md5out.getData()); switch (crcType) { case CRC32: return new MD5MD5CRC32GzipFileChecksum(bytesPerCRC, crcPerBlock, fileMD5); case CRC32C: return new MD5MD5CRC32CastagnoliFileChecksum(bytesPerCRC, crcPerBlock, fileMD5); default: // If there is no block allocated for the file, // return one with the magic entry that matches what previous // hdfs versions return. if (locatedblocks.size() == 0) { return new MD5MD5CRC32GzipFileChecksum(0, 0, fileMD5); } // we should never get here since the validity was checked // when getCrcType() was called above. return null; } } /** * Connect to the given datanode's datantrasfer port, and return * the resulting IOStreamPair. This includes encryption wrapping, etc. */ private static IOStreamPair connectToDN( SocketFactory socketFactory, boolean connectToDnViaHostname, DataEncryptionKey encryptionKey, DatanodeInfo dn, int timeout) throws IOException { boolean success = false; Socket sock = null; try { sock = socketFactory.createSocket(); String dnAddr = dn.getXferAddr(connectToDnViaHostname); if (LOG.isDebugEnabled()) { LOG.debug("Connecting to datanode " + dnAddr); } NetUtils.connect(sock, NetUtils.createSocketAddr(dnAddr), timeout); sock.setSoTimeout(timeout); OutputStream unbufOut = NetUtils.getOutputStream(sock); InputStream unbufIn = NetUtils.getInputStream(sock); IOStreamPair ret; if (encryptionKey != null) { ret = DataTransferEncryptor.getEncryptedStreams( unbufOut, unbufIn, encryptionKey); } else { ret = new IOStreamPair(unbufIn, unbufOut); } success = true; return ret; } finally { if (!success) { IOUtils.closeSocket(sock); } } } /** * Infer the checksum type for a replica by sending an OP_READ_BLOCK * for the first byte of that replica. This is used for compatibility * with older HDFS versions which did not include the checksum type in * OpBlockChecksumResponseProto. * * @param in input stream from datanode * @param out output stream to datanode * @param lb the located block * @param clientName the name of the DFSClient requesting the checksum * @param dn the connected datanode * @return the inferred checksum type * @throws IOException if an error occurs */ private static Type inferChecksumTypeByReading( String clientName, SocketFactory socketFactory, int socketTimeout, LocatedBlock lb, DatanodeInfo dn, DataEncryptionKey encryptionKey, boolean connectToDnViaHostname) throws IOException { IOStreamPair pair = connectToDN(socketFactory, connectToDnViaHostname, encryptionKey, dn, socketTimeout); try { DataOutputStream out = new DataOutputStream(new BufferedOutputStream(pair.out, HdfsConstants.SMALL_BUFFER_SIZE)); DataInputStream in = new DataInputStream(pair.in); new Sender(out).readBlock(lb.getBlock(), lb.getBlockToken(), clientName, 0, 1, true, CachingStrategy.newDefaultStrategy()); final BlockOpResponseProto reply = BlockOpResponseProto.parseFrom(PBHelper.vintPrefixed(in)); if (reply.getStatus() != Status.SUCCESS) { if (reply.getStatus() == Status.ERROR_ACCESS_TOKEN) { throw new InvalidBlockTokenException(); } else { throw new IOException("Bad response " + reply + " trying to read " + lb.getBlock() + " from datanode " + dn); } } return PBHelper.convert(reply.getReadOpChecksumInfo().getChecksum().getType()); } finally { IOUtils.cleanup(null, pair.in, pair.out); } } /** * Set permissions to a file or directory. * @param src path name. * @param permission * * @see ClientProtocol#setPermission(String, FsPermission) */ public void setPermission(String src, FsPermission permission) throws IOException { checkOpen(); try { namenode.setPermission(src, permission); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class, SafeModeException.class, UnresolvedPathException.class, SnapshotAccessControlException.class); } } /** * Set file or directory owner. * @param src path name. * @param username user id. * @param groupname user group. * * @see ClientProtocol#setOwner(String, String, String) */ public void setOwner(String src, String username, String groupname) throws IOException { checkOpen(); try { namenode.setOwner(src, username, groupname); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class, SafeModeException.class, UnresolvedPathException.class, SnapshotAccessControlException.class); } } /** * @see ClientProtocol#getStats() */ public FsStatus getDiskStatus() throws IOException { long rawNums[] = namenode.getStats(); return new FsStatus(rawNums[0], rawNums[1], rawNums[2]); } /** * Returns count of blocks with no good replicas left. Normally should be * zero. * @throws IOException */ public long getMissingBlocksCount() throws IOException { return namenode.getStats()[ClientProtocol.GET_STATS_MISSING_BLOCKS_IDX]; } /** * Returns count of blocks with one of more replica missing. * @throws IOException */ public long getUnderReplicatedBlocksCount() throws IOException { return namenode.getStats()[ClientProtocol.GET_STATS_UNDER_REPLICATED_IDX]; } /** * Returns count of blocks with at least one replica marked corrupt. * @throws IOException */ public long getCorruptBlocksCount() throws IOException { return namenode.getStats()[ClientProtocol.GET_STATS_CORRUPT_BLOCKS_IDX]; } /** * @return a list in which each entry describes a corrupt file/block * @throws IOException */ public CorruptFileBlocks listCorruptFileBlocks(String path, String cookie) throws IOException { return namenode.listCorruptFileBlocks(path, cookie); } public DatanodeInfo[] datanodeReport(DatanodeReportType type) throws IOException { return namenode.getDatanodeReport(type); } /** * Enter, leave or get safe mode. * * @see ClientProtocol#setSafeMode(HdfsConstants.SafeModeAction,boolean) */ public boolean setSafeMode(SafeModeAction action) throws IOException { return setSafeMode(action, false); } /** * Enter, leave or get safe mode. * * @param action * One of SafeModeAction.GET, SafeModeAction.ENTER and * SafeModeActiob.LEAVE * @param isChecked * If true, then check only active namenode's safemode status, else * check first namenode's status. * @see ClientProtocol#setSafeMode(HdfsConstants.SafeModeAction, boolean) */ public boolean setSafeMode(SafeModeAction action, boolean isChecked) throws IOException{ return namenode.setSafeMode(action, isChecked); } /** * Create one snapshot. * * @param snapshotRoot The directory where the snapshot is to be taken * @param snapshotName Name of the snapshot * @return the snapshot path. * @see ClientProtocol#createSnapshot(String, String) */ public String createSnapshot(String snapshotRoot, String snapshotName) throws IOException { checkOpen(); try { return namenode.createSnapshot(snapshotRoot, snapshotName); } catch(RemoteException re) { throw re.unwrapRemoteException(); } } /** * Delete a snapshot of a snapshottable directory. * * @param snapshotRoot The snapshottable directory that the * to-be-deleted snapshot belongs to * @param snapshotName The name of the to-be-deleted snapshot * @throws IOException * @see ClientProtocol#deleteSnapshot(String, String) */ public void deleteSnapshot(String snapshotRoot, String snapshotName) throws IOException { try { namenode.deleteSnapshot(snapshotRoot, snapshotName); } catch(RemoteException re) { throw re.unwrapRemoteException(); } } /** * Rename a snapshot. * @param snapshotDir The directory path where the snapshot was taken * @param snapshotOldName Old name of the snapshot * @param snapshotNewName New name of the snapshot * @throws IOException * @see ClientProtocol#renameSnapshot(String, String, String) */ public void renameSnapshot(String snapshotDir, String snapshotOldName, String snapshotNewName) throws IOException { checkOpen(); try { namenode.renameSnapshot(snapshotDir, snapshotOldName, snapshotNewName); } catch(RemoteException re) { throw re.unwrapRemoteException(); } } /** * Get all the current snapshottable directories. * @return All the current snapshottable directories * @throws IOException * @see ClientProtocol#getSnapshottableDirListing() */ public SnapshottableDirectoryStatus[] getSnapshottableDirListing() throws IOException { checkOpen(); try { return namenode.getSnapshottableDirListing(); } catch(RemoteException re) { throw re.unwrapRemoteException(); } } /** * Allow snapshot on a directory. * * @see ClientProtocol#allowSnapshot(String snapshotRoot) */ public void allowSnapshot(String snapshotRoot) throws IOException { checkOpen(); try { namenode.allowSnapshot(snapshotRoot); } catch (RemoteException re) { throw re.unwrapRemoteException(); } } /** * Disallow snapshot on a directory. * * @see ClientProtocol#disallowSnapshot(String snapshotRoot) */ public void disallowSnapshot(String snapshotRoot) throws IOException { checkOpen(); try { namenode.disallowSnapshot(snapshotRoot); } catch (RemoteException re) { throw re.unwrapRemoteException(); } } /** * Get the difference between two snapshots, or between a snapshot and the * current tree of a directory. * @see ClientProtocol#getSnapshotDiffReport(String, String, String) */ public SnapshotDiffReport getSnapshotDiffReport(String snapshotDir, String fromSnapshot, String toSnapshot) throws IOException { checkOpen(); try { return namenode.getSnapshotDiffReport(snapshotDir, fromSnapshot, toSnapshot); } catch(RemoteException re) { throw re.unwrapRemoteException(); } } /** * Save namespace image. * * @see ClientProtocol#saveNamespace() */ void saveNamespace() throws AccessControlException, IOException { try { namenode.saveNamespace(); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class); } } /** * Rolls the edit log on the active NameNode. * @return the txid of the new log segment * * @see ClientProtocol#rollEdits() */ long rollEdits() throws AccessControlException, IOException { try { return namenode.rollEdits(); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class); } } /** * enable/disable restore failed storage. * * @see ClientProtocol#restoreFailedStorage(String arg) */ boolean restoreFailedStorage(String arg) throws AccessControlException, IOException{ return namenode.restoreFailedStorage(arg); } /** * Refresh the hosts and exclude files. (Rereads them.) * See {@link ClientProtocol#refreshNodes()} * for more details. * * @see ClientProtocol#refreshNodes() */ public void refreshNodes() throws IOException { namenode.refreshNodes(); } /** * Dumps DFS data structures into specified file. * * @see ClientProtocol#metaSave(String) */ public void metaSave(String pathname) throws IOException { namenode.metaSave(pathname); } /** * Requests the namenode to tell all datanodes to use a new, non-persistent * bandwidth value for dfs.balance.bandwidthPerSec. * See {@link ClientProtocol#setBalancerBandwidth(long)} * for more details. * * @see ClientProtocol#setBalancerBandwidth(long) */ public void setBalancerBandwidth(long bandwidth) throws IOException { namenode.setBalancerBandwidth(bandwidth); } /** * @see ClientProtocol#finalizeUpgrade() */ public void finalizeUpgrade() throws IOException { namenode.finalizeUpgrade(); } /** */ @Deprecated public boolean mkdirs(String src) throws IOException { return mkdirs(src, null, true); } /** * Create a directory (or hierarchy of directories) with the given * name and permission. * * @param src The path of the directory being created * @param permission The permission of the directory being created. * If permission == null, use {@link FsPermission#getDefault()}. * @param createParent create missing parent directory if true * * @return True if the operation success. * * @see ClientProtocol#mkdirs(String, FsPermission, boolean) */ public boolean mkdirs(String src, FsPermission permission, boolean createParent) throws IOException { if (permission == null) { permission = FsPermission.getDefault(); } FsPermission masked = permission.applyUMask(dfsClientConf.uMask); return primitiveMkdir(src, masked, createParent); } /** * Same {{@link #mkdirs(String, FsPermission, boolean)} except * that the permissions has already been masked against umask. */ public boolean primitiveMkdir(String src, FsPermission absPermission) throws IOException { return primitiveMkdir(src, absPermission, true); } /** * Same {{@link #mkdirs(String, FsPermission, boolean)} except * that the permissions has already been masked against umask. */ public boolean primitiveMkdir(String src, FsPermission absPermission, boolean createParent) throws IOException { checkOpen(); if (absPermission == null) { absPermission = FsPermission.getDefault().applyUMask(dfsClientConf.uMask); } if(LOG.isDebugEnabled()) { LOG.debug(src + ": masked=" + absPermission); } try { return namenode.mkdirs(src, absPermission, createParent); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, InvalidPathException.class, FileAlreadyExistsException.class, FileNotFoundException.class, ParentNotDirectoryException.class, SafeModeException.class, NSQuotaExceededException.class, DSQuotaExceededException.class, UnresolvedPathException.class, SnapshotAccessControlException.class); } } /** * Get {@link ContentSummary} rooted at the specified directory. * @param path The string representation of the path * * @see ClientProtocol#getContentSummary(String) */ ContentSummary getContentSummary(String src) throws IOException { try { return namenode.getContentSummary(src); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class, UnresolvedPathException.class); } } /** * Sets or resets quotas for a directory. * @see ClientProtocol#setQuota(String, long, long) */ void setQuota(String src, long namespaceQuota, long diskspaceQuota) throws IOException { // sanity check if ((namespaceQuota <= 0 && namespaceQuota != HdfsConstants.QUOTA_DONT_SET && namespaceQuota != HdfsConstants.QUOTA_RESET) || (diskspaceQuota <= 0 && diskspaceQuota != HdfsConstants.QUOTA_DONT_SET && diskspaceQuota != HdfsConstants.QUOTA_RESET)) { throw new IllegalArgumentException("Invalid values for quota : " + namespaceQuota + " and " + diskspaceQuota); } try { namenode.setQuota(src, namespaceQuota, diskspaceQuota); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class, NSQuotaExceededException.class, DSQuotaExceededException.class, UnresolvedPathException.class, SnapshotAccessControlException.class); } } /** * set the modification and access time of a file * * @see ClientProtocol#setTimes(String, long, long) */ public void setTimes(String src, long mtime, long atime) throws IOException { checkOpen(); try { namenode.setTimes(src, mtime, atime); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class, UnresolvedPathException.class, SnapshotAccessControlException.class); } } /** * @deprecated use {@link HdfsDataInputStream} instead. */ @Deprecated public static class DFSDataInputStream extends HdfsDataInputStream { public DFSDataInputStream(DFSInputStream in) throws IOException { super(in); } } void reportChecksumFailure(String file, ExtendedBlock blk, DatanodeInfo dn) { DatanodeInfo [] dnArr = { dn }; LocatedBlock [] lblocks = { new LocatedBlock(blk, dnArr) }; reportChecksumFailure(file, lblocks); } // just reports checksum failure and ignores any exception during the report. void reportChecksumFailure(String file, LocatedBlock lblocks[]) { try { reportBadBlocks(lblocks); } catch (IOException ie) { LOG.info("Found corruption while reading " + file + ". Error repairing corrupt blocks. Bad blocks remain.", ie); } } @Override public String toString() { return getClass().getSimpleName() + "[clientName=" + clientName + ", ugi=" + ugi + "]"; } public DomainSocketFactory getDomainSocketFactory() { return domainSocketFactory; } public void disableLegacyBlockReaderLocal() { shouldUseLegacyBlockReaderLocal = false; } public boolean useLegacyBlockReaderLocal() { return shouldUseLegacyBlockReaderLocal; } public CachingStrategy getDefaultReadCachingStrategy() { return defaultReadCachingStrategy; } public CachingStrategy getDefaultWriteCachingStrategy() { return defaultWriteCachingStrategy; } }
tomatoKiller/Hadoop_Source_Learn
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
Java
apache-2.0
95,752
// ---------------------------------------------------------------------------------- // // Copyright Microsoft Corporation // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ---------------------------------------------------------------------------------- using Microsoft.Azure.Commands.Sql.Properties; using Microsoft.Azure.Commands.Sql.Security.Model; using Microsoft.Azure.Commands.Sql.Security.Services; using System.Collections.Generic; using System.Globalization; using System.Linq; using System.Management.Automation; namespace Microsoft.Azure.Commands.Sql.Security.Cmdlet.DataMasking { /// <summary> /// Sets properties for a data masking rule. /// </summary> [Cmdlet(VerbsCommon.Set, "AzureSqlDatabaseDataMaskingRule")] public class SetAzureSqlDatabaseDataMaskingRule : BuildAzureSqlDatabaseDataMaskingRule { /// <summary> /// Gets or sets the column name /// </summary> [Parameter(Mandatory = false, ValueFromPipelineByPropertyName = true, HelpMessage = "The column name.")] public override string ColumnName { get; set; } /// <summary> /// Gets or sets the table name /// </summary> [Parameter(Mandatory = false, ValueFromPipelineByPropertyName = true, HelpMessage = "The table name.")] public override string TableName { get; set; } /// <summary> /// Gets or sets the masking function /// </summary> [Parameter(Mandatory = false, ValueFromPipelineByPropertyName = true, HelpMessage = "The type of the masking function")] [ValidateSet(SecurityConstants.NoMasking, SecurityConstants.Default, SecurityConstants.Text, SecurityConstants.Number, SecurityConstants.SSN, SecurityConstants.CCN, SecurityConstants.Email, IgnoreCase = false)] public override string MaskingFunction { get; set; } // intentionally overriding the parent's Masking function property, to defined it here as a non mandatory property /// <summary> /// An additional validation to see that a rule with the user provided Id already exists. /// </summary> /// <param name="rules">The rule the cmdlet operates on</param> /// <returns>An error message or null if all is fine</returns> protected override string ValidateOperation(IEnumerable<DatabaseDataMaskingRuleModel> rules) { if(!rules.Any(r=> r.RuleId == RuleId)) { return string.Format(CultureInfo.InvariantCulture, Resources.SetDataMaskingRuleIdDoesNotExistError, RuleId); } return null; } /// <summary> /// Returns a new data masking rule model /// </summary> /// <param name="rules">The database's data masking rules</param> /// <returns>A data masking rule object, initialized for the user provided rule identity</returns> protected override DatabaseDataMaskingRuleModel GetRule(IEnumerable<DatabaseDataMaskingRuleModel> rules) { return rules.First(r=> r.RuleId == RuleId); } /// <summary> /// Updates the data masking rule that this cmdlet operated in the list of rules of this database /// </summary> /// <param name="rules">The data masking rules already defined for this database</param> /// <param name="rule">The rule that this cmdlet operated on</param> /// <returns>The updated list of data masking rules</returns> protected override IEnumerable<DatabaseDataMaskingRuleModel> UpdateRuleList(IEnumerable<DatabaseDataMaskingRuleModel> rules, DatabaseDataMaskingRuleModel rule) { return rules; } } }
enavro/azure-powershell
src/ResourceManager/Sql/Commands.Sql/Security/Cmdlet/DataMasking/SetAzureSqlDatabaseDataMaskingRule.cs
C#
apache-2.0
4,202
module.exports = function(hljs) { var ELIXIR_IDENT_RE = '[a-zA-Z_][a-zA-Z0-9_.]*(\\!|\\?)?'; var ELIXIR_METHOD_RE = '[a-zA-Z_]\\w*[!?=]?|[-+~]\\@|<<|>>|=~|===?|<=>|[<>]=?|\\*\\*|[-/+%^&*~`|]|\\[\\]=?'; var ELIXIR_KEYWORDS = 'and false then defined module in return redo retry end for true self when ' + 'next until do begin unless nil break not case cond alias while ensure or ' + 'include use alias fn quote require import with|0'; var SUBST = { className: 'subst', begin: '#\\{', end: '}', lexemes: ELIXIR_IDENT_RE, keywords: ELIXIR_KEYWORDS }; var SIGIL_DELIMITERS = '[/|([{<"\']' var LOWERCASE_SIGIL = { className: 'string', begin: '~[a-z]' + '(?=' + SIGIL_DELIMITERS + ')', contains: [ { endsParent:true, contains: [{ contains: [hljs.BACKSLASH_ESCAPE, SUBST], variants: [ { begin: /"/, end: /"/ }, { begin: /'/, end: /'/ }, { begin: /\//, end: /\// }, { begin: /\|/, end: /\|/ }, { begin: /\(/, end: /\)/ }, { begin: /\[/, end: /\]/ }, { begin: /\{/, end: /\}/ }, { begin: /</, end: />/ } ] }] }, ], }; var UPCASE_SIGIL = { className: 'string', begin: '~[A-Z]' + '(?=' + SIGIL_DELIMITERS + ')', contains: [ { begin: /"/, end: /"/ }, { begin: /'/, end: /'/ }, { begin: /\//, end: /\// }, { begin: /\|/, end: /\|/ }, { begin: /\(/, end: /\)/ }, { begin: /\[/, end: /\]/ }, { begin: /\{/, end: /\}/ }, { begin: /\</, end: /\>/ } ] }; var STRING = { className: 'string', contains: [hljs.BACKSLASH_ESCAPE, SUBST], variants: [ { begin: /"""/, end: /"""/, }, { begin: /'''/, end: /'''/, }, { begin: /~S"""/, end: /"""/, contains: [] }, { begin: /~S"/, end: /"/, contains: [] }, { begin: /~S'''/, end: /'''/, contains: [] }, { begin: /~S'/, end: /'/, contains: [] }, { begin: /'/, end: /'/ }, { begin: /"/, end: /"/ }, ] }; var FUNCTION = { className: 'function', beginKeywords: 'def defp defmacro', end: /\B\b/, // the mode is ended by the title contains: [ hljs.inherit(hljs.TITLE_MODE, { begin: ELIXIR_IDENT_RE, endsParent: true }) ] }; var CLASS = hljs.inherit(FUNCTION, { className: 'class', beginKeywords: 'defimpl defmodule defprotocol defrecord', end: /\bdo\b|$|;/ }); var ELIXIR_DEFAULT_CONTAINS = [ STRING, UPCASE_SIGIL, LOWERCASE_SIGIL, hljs.HASH_COMMENT_MODE, CLASS, FUNCTION, { begin: '::' }, { className: 'symbol', begin: ':(?![\\s:])', contains: [STRING, {begin: ELIXIR_METHOD_RE}], relevance: 0 }, { className: 'symbol', begin: ELIXIR_IDENT_RE + ':(?!:)', relevance: 0 }, { className: 'number', begin: '(\\b0o[0-7_]+)|(\\b0b[01_]+)|(\\b0x[0-9a-fA-F_]+)|(-?\\b[1-9][0-9_]*(.[0-9_]+([eE][-+]?[0-9]+)?)?)', relevance: 0 }, { className: 'variable', begin: '(\\$\\W)|((\\$|\\@\\@?)(\\w+))' }, { begin: '->' }, { // regexp container begin: '(' + hljs.RE_STARTERS_RE + ')\\s*', contains: [ hljs.HASH_COMMENT_MODE, { className: 'regexp', illegal: '\\n', contains: [hljs.BACKSLASH_ESCAPE, SUBST], variants: [ { begin: '/', end: '/[a-z]*' }, { begin: '%r\\[', end: '\\][a-z]*' } ] } ], relevance: 0 } ]; SUBST.contains = ELIXIR_DEFAULT_CONTAINS; return { lexemes: ELIXIR_IDENT_RE, keywords: ELIXIR_KEYWORDS, contains: ELIXIR_DEFAULT_CONTAINS }; };
ZenekeZene/zenekezene.github.io
node_modules/highlight.js/lib/languages/elixir.js
JavaScript
apache-2.0
3,976
package com.google.api.ads.adwords.jaxws.v201506.cm; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlType; /** * * Defines the elements within the header of a SOAP response. * * * <p>Java class for SoapResponseHeader complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="SoapResponseHeader"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="requestId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="serviceName" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="methodName" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="operations" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="responseTime" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "SoapResponseHeader", propOrder = { "requestId", "serviceName", "methodName", "operations", "responseTime" }) public class SoapResponseHeader { protected String requestId; protected String serviceName; protected String methodName; protected Long operations; protected Long responseTime; /** * Gets the value of the requestId property. * * @return * possible object is * {@link String } * */ public String getRequestId() { return requestId; } /** * Sets the value of the requestId property. * * @param value * allowed object is * {@link String } * */ public void setRequestId(String value) { this.requestId = value; } /** * Gets the value of the serviceName property. * * @return * possible object is * {@link String } * */ public String getServiceName() { return serviceName; } /** * Sets the value of the serviceName property. * * @param value * allowed object is * {@link String } * */ public void setServiceName(String value) { this.serviceName = value; } /** * Gets the value of the methodName property. * * @return * possible object is * {@link String } * */ public String getMethodName() { return methodName; } /** * Sets the value of the methodName property. * * @param value * allowed object is * {@link String } * */ public void setMethodName(String value) { this.methodName = value; } /** * Gets the value of the operations property. * * @return * possible object is * {@link Long } * */ public Long getOperations() { return operations; } /** * Sets the value of the operations property. * * @param value * allowed object is * {@link Long } * */ public void setOperations(Long value) { this.operations = value; } /** * Gets the value of the responseTime property. * * @return * possible object is * {@link Long } * */ public Long getResponseTime() { return responseTime; } /** * Sets the value of the responseTime property. * * @param value * allowed object is * {@link Long } * */ public void setResponseTime(Long value) { this.responseTime = value; } }
stoksey69/googleads-java-lib
modules/adwords_appengine/src/main/java/com/google/api/ads/adwords/jaxws/v201506/cm/SoapResponseHeader.java
Java
apache-2.0
4,033
/* * Copyright 2015 MongoDB, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * This package contains cluster and connection event related classes */ package com.mongodb.event;
kay-kim/mongo-java-driver
driver-core/src/main/com/mongodb/event/package-info.java
Java
apache-2.0
700
/* * Copyright (c) 2002, 2003, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package com.sun.corba.se.spi.protocol; import org.omg.CORBA.portable.ServantObject; /** * @author Harold Carr */ public interface LocalClientRequestDispatcher { public boolean useLocalInvocation(org.omg.CORBA.Object self); public boolean is_local(org.omg.CORBA.Object self); /** * Returns a Java reference to the servant which should be used for this * request. servant_preinvoke() is invoked by a local stub. * If a ServantObject object is returned, then its servant field * has been set to an object of the expected type (Note: the object may * or may not be the actual servant instance). The local stub may cast * the servant field to the expected type, and then invoke the operation * directly. * * @param self The object reference which delegated to this delegate. * * @param operation a string containing the operation name. * The operation name corresponds to the operation name as it would be * encoded in a GIOP request. * * @param expectedType a Class object representing the expected type of the servant. * The expected type is the Class object associated with the operations * class of the stub's interface (e.g. A stub for an interface Foo, * would pass the Class object for the FooOperations interface). * * @return a ServantObject object. * The method may return a null value if it does not wish to support * this optimization (e.g. due to security, transactions, etc). * The method must return null if the servant is not of the expected type. */ public ServantObject servant_preinvoke(org.omg.CORBA.Object self, String operation, Class expectedType); public void servant_postinvoke(org.omg.CORBA.Object self, ServantObject servant); } // End of file.
shun634501730/java_source_cn
src_en/com/sun/corba/se/spi/protocol/LocalClientRequestDispatcher.java
Java
apache-2.0
2,154
using System.ComponentModel.DataAnnotations; namespace Mvc.Server.ViewModels.Account { public class ExternalLoginConfirmationViewModel { [Required] [EmailAddress] public string Email { get; set; } } }
ilmax/core
samples/Mvc.Server/ViewModels/Account/ExternalLoginConfirmationViewModel.cs
C#
apache-2.0
241
#!/usr/bin/env python import errno import os import re import tempfile from hashlib import md5 class _FileCacheError(Exception): """Base exception class for FileCache related errors""" class _FileCache(object): DEPTH = 3 def __init__(self, root_directory=None): self._InitializeRootDirectory(root_directory) def Get(self, key): path = self._GetPath(key) if os.path.exists(path): with open(path) as f: return f.read() else: return None def Set(self, key, data): path = self._GetPath(key) directory = os.path.dirname(path) if not os.path.exists(directory): os.makedirs(directory) if not os.path.isdir(directory): raise _FileCacheError('%s exists but is not a directory' % directory) temp_fd, temp_path = tempfile.mkstemp() temp_fp = os.fdopen(temp_fd, 'w') temp_fp.write(data) temp_fp.close() if not path.startswith(self._root_directory): raise _FileCacheError('%s does not appear to live under %s' % (path, self._root_directory)) if os.path.exists(path): os.remove(path) os.rename(temp_path, path) def Remove(self, key): path = self._GetPath(key) if not path.startswith(self._root_directory): raise _FileCacheError('%s does not appear to live under %s' % (path, self._root_directory )) if os.path.exists(path): os.remove(path) def GetCachedTime(self, key): path = self._GetPath(key) if os.path.exists(path): return os.path.getmtime(path) else: return None def _GetUsername(self): """Attempt to find the username in a cross-platform fashion.""" try: return os.getenv('USER') or \ os.getenv('LOGNAME') or \ os.getenv('USERNAME') or \ os.getlogin() or \ 'nobody' except (AttributeError, IOError, OSError): return 'nobody' def _GetTmpCachePath(self): username = self._GetUsername() cache_directory = 'python.cache_' + username return os.path.join(tempfile.gettempdir(), cache_directory) def _InitializeRootDirectory(self, root_directory): if not root_directory: root_directory = self._GetTmpCachePath() root_directory = os.path.abspath(root_directory) try: os.mkdir(root_directory) except OSError as e: if e.errno == errno.EEXIST and os.path.isdir(root_directory): # directory already exists pass else: # exists but is a file, or no permissions, or... raise self._root_directory = root_directory def _GetPath(self, key): try: hashed_key = md5(key.encode('utf-8')).hexdigest() except TypeError: hashed_key = md5.new(key).hexdigest() return os.path.join(self._root_directory, self._GetPrefix(hashed_key), hashed_key) def _GetPrefix(self, hashed_key): return os.path.sep.join(hashed_key[0:_FileCache.DEPTH]) class ParseTweet(object): # compile once on import regexp = {"RT": "^RT", "MT": r"^MT", "ALNUM": r"(@[a-zA-Z0-9_]+)", "HASHTAG": r"(#[\w\d]+)", "URL": r"([http://]?[a-zA-Z\d\/]+[\.]+[a-zA-Z\d\/\.]+)"} regexp = dict((key, re.compile(value)) for key, value in list(regexp.items())) def __init__(self, timeline_owner, tweet): """ timeline_owner : twitter handle of user account. tweet - 140 chars from feed; object does all computation on construction properties: RT, MT - boolean URLs - list of URL Hashtags - list of tags """ self.Owner = timeline_owner self.tweet = tweet self.UserHandles = ParseTweet.getUserHandles(tweet) self.Hashtags = ParseTweet.getHashtags(tweet) self.URLs = ParseTweet.getURLs(tweet) self.RT = ParseTweet.getAttributeRT(tweet) self.MT = ParseTweet.getAttributeMT(tweet) # additional intelligence if ( self.RT and len(self.UserHandles) > 0 ): # change the owner of tweet? self.Owner = self.UserHandles[0] return def __str__(self): """ for display method """ return "owner %s, urls: %d, hashtags %d, user_handles %d, len_tweet %d, RT = %s, MT = %s" % ( self.Owner, len(self.URLs), len(self.Hashtags), len(self.UserHandles), len(self.tweet), self.RT, self.MT) @staticmethod def getAttributeRT(tweet): """ see if tweet is a RT """ return re.search(ParseTweet.regexp["RT"], tweet.strip()) is not None @staticmethod def getAttributeMT(tweet): """ see if tweet is a MT """ return re.search(ParseTweet.regexp["MT"], tweet.strip()) is not None @staticmethod def getUserHandles(tweet): """ given a tweet we try and extract all user handles in order of occurrence""" return re.findall(ParseTweet.regexp["ALNUM"], tweet) @staticmethod def getHashtags(tweet): """ return all hashtags""" return re.findall(ParseTweet.regexp["HASHTAG"], tweet) @staticmethod def getURLs(tweet): """ URL : [http://]?[\w\.?/]+""" return re.findall(ParseTweet.regexp["URL"], tweet)
milmd90/TwitterBot
twitter/_file_cache.py
Python
apache-2.0
5,588
// Copyright 2014 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "src/crankshaft/s390/lithium-s390.h" #include <sstream> #include "src/crankshaft/hydrogen-osr.h" #include "src/crankshaft/lithium-inl.h" #include "src/crankshaft/s390/lithium-codegen-s390.h" namespace v8 { namespace internal { #define DEFINE_COMPILE(type) \ void L##type::CompileToNative(LCodeGen* generator) { \ generator->Do##type(this); \ } LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE) #undef DEFINE_COMPILE #ifdef DEBUG void LInstruction::VerifyCall() { // Call instructions can use only fixed registers as temporaries and // outputs because all registers are blocked by the calling convention. // Inputs operands must use a fixed register or use-at-start policy or // a non-register policy. DCHECK(Output() == NULL || LUnallocated::cast(Output())->HasFixedPolicy() || !LUnallocated::cast(Output())->HasRegisterPolicy()); for (UseIterator it(this); !it.Done(); it.Advance()) { LUnallocated* operand = LUnallocated::cast(it.Current()); DCHECK(operand->HasFixedPolicy() || operand->IsUsedAtStart()); } for (TempIterator it(this); !it.Done(); it.Advance()) { LUnallocated* operand = LUnallocated::cast(it.Current()); DCHECK(operand->HasFixedPolicy() || !operand->HasRegisterPolicy()); } } #endif void LInstruction::PrintTo(StringStream* stream) { stream->Add("%s ", this->Mnemonic()); PrintOutputOperandTo(stream); PrintDataTo(stream); if (HasEnvironment()) { stream->Add(" "); environment()->PrintTo(stream); } if (HasPointerMap()) { stream->Add(" "); pointer_map()->PrintTo(stream); } } void LInstruction::PrintDataTo(StringStream* stream) { stream->Add("= "); for (int i = 0; i < InputCount(); i++) { if (i > 0) stream->Add(" "); if (InputAt(i) == NULL) { stream->Add("NULL"); } else { InputAt(i)->PrintTo(stream); } } } void LInstruction::PrintOutputOperandTo(StringStream* stream) { if (HasResult()) result()->PrintTo(stream); } void LLabel::PrintDataTo(StringStream* stream) { LGap::PrintDataTo(stream); LLabel* rep = replacement(); if (rep != NULL) { stream->Add(" Dead block replaced with B%d", rep->block_id()); } } bool LGap::IsRedundant() const { for (int i = 0; i < 4; i++) { if (parallel_moves_[i] != NULL && !parallel_moves_[i]->IsRedundant()) { return false; } } return true; } void LGap::PrintDataTo(StringStream* stream) { for (int i = 0; i < 4; i++) { stream->Add("("); if (parallel_moves_[i] != NULL) { parallel_moves_[i]->PrintDataTo(stream); } stream->Add(") "); } } const char* LArithmeticD::Mnemonic() const { switch (op()) { case Token::ADD: return "add-d"; case Token::SUB: return "sub-d"; case Token::MUL: return "mul-d"; case Token::DIV: return "div-d"; case Token::MOD: return "mod-d"; default: UNREACHABLE(); return NULL; } } const char* LArithmeticT::Mnemonic() const { switch (op()) { case Token::ADD: return "add-t"; case Token::SUB: return "sub-t"; case Token::MUL: return "mul-t"; case Token::MOD: return "mod-t"; case Token::DIV: return "div-t"; case Token::BIT_AND: return "bit-and-t"; case Token::BIT_OR: return "bit-or-t"; case Token::BIT_XOR: return "bit-xor-t"; case Token::ROR: return "ror-t"; case Token::SHL: return "shl-t"; case Token::SAR: return "sar-t"; case Token::SHR: return "shr-t"; default: UNREACHABLE(); return NULL; } } bool LGoto::HasInterestingComment(LCodeGen* gen) const { return !gen->IsNextEmittedBlock(block_id()); } void LGoto::PrintDataTo(StringStream* stream) { stream->Add("B%d", block_id()); } void LBranch::PrintDataTo(StringStream* stream) { stream->Add("B%d | B%d on ", true_block_id(), false_block_id()); value()->PrintTo(stream); } void LCompareNumericAndBranch::PrintDataTo(StringStream* stream) { stream->Add("if "); left()->PrintTo(stream); stream->Add(" %s ", Token::String(op())); right()->PrintTo(stream); stream->Add(" then B%d else B%d", true_block_id(), false_block_id()); } void LIsStringAndBranch::PrintDataTo(StringStream* stream) { stream->Add("if is_string("); value()->PrintTo(stream); stream->Add(") then B%d else B%d", true_block_id(), false_block_id()); } void LIsSmiAndBranch::PrintDataTo(StringStream* stream) { stream->Add("if is_smi("); value()->PrintTo(stream); stream->Add(") then B%d else B%d", true_block_id(), false_block_id()); } void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) { stream->Add("if is_undetectable("); value()->PrintTo(stream); stream->Add(") then B%d else B%d", true_block_id(), false_block_id()); } void LStringCompareAndBranch::PrintDataTo(StringStream* stream) { stream->Add("if string_compare("); left()->PrintTo(stream); right()->PrintTo(stream); stream->Add(") then B%d else B%d", true_block_id(), false_block_id()); } void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) { stream->Add("if has_instance_type("); value()->PrintTo(stream); stream->Add(") then B%d else B%d", true_block_id(), false_block_id()); } void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) { stream->Add("if has_cached_array_index("); value()->PrintTo(stream); stream->Add(") then B%d else B%d", true_block_id(), false_block_id()); } void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) { stream->Add("if class_of_test("); value()->PrintTo(stream); stream->Add(", \"%o\") then B%d else B%d", *hydrogen()->class_name(), true_block_id(), false_block_id()); } void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) { stream->Add("if typeof "); value()->PrintTo(stream); stream->Add(" == \"%s\" then B%d else B%d", hydrogen()->type_literal()->ToCString().get(), true_block_id(), false_block_id()); } void LStoreCodeEntry::PrintDataTo(StringStream* stream) { stream->Add(" = "); function()->PrintTo(stream); stream->Add(".code_entry = "); code_object()->PrintTo(stream); } void LInnerAllocatedObject::PrintDataTo(StringStream* stream) { stream->Add(" = "); base_object()->PrintTo(stream); stream->Add(" + "); offset()->PrintTo(stream); } void LCallWithDescriptor::PrintDataTo(StringStream* stream) { for (int i = 0; i < InputCount(); i++) { InputAt(i)->PrintTo(stream); stream->Add(" "); } stream->Add("#%d / ", arity()); } void LLoadContextSlot::PrintDataTo(StringStream* stream) { context()->PrintTo(stream); stream->Add("[%d]", slot_index()); } void LStoreContextSlot::PrintDataTo(StringStream* stream) { context()->PrintTo(stream); stream->Add("[%d] <- ", slot_index()); value()->PrintTo(stream); } void LInvokeFunction::PrintDataTo(StringStream* stream) { stream->Add("= "); function()->PrintTo(stream); stream->Add(" #%d / ", arity()); } void LCallNewArray::PrintDataTo(StringStream* stream) { stream->Add("= "); constructor()->PrintTo(stream); stream->Add(" #%d / ", arity()); ElementsKind kind = hydrogen()->elements_kind(); stream->Add(" (%s) ", ElementsKindToString(kind)); } void LAccessArgumentsAt::PrintDataTo(StringStream* stream) { arguments()->PrintTo(stream); stream->Add(" length "); length()->PrintTo(stream); stream->Add(" index "); index()->PrintTo(stream); } void LStoreNamedField::PrintDataTo(StringStream* stream) { object()->PrintTo(stream); std::ostringstream os; os << hydrogen()->access() << " <- "; stream->Add(os.str().c_str()); value()->PrintTo(stream); } void LStoreNamedGeneric::PrintDataTo(StringStream* stream) { object()->PrintTo(stream); stream->Add("."); stream->Add(String::cast(*name())->ToCString().get()); stream->Add(" <- "); value()->PrintTo(stream); } void LLoadKeyed::PrintDataTo(StringStream* stream) { elements()->PrintTo(stream); stream->Add("["); key()->PrintTo(stream); if (hydrogen()->IsDehoisted()) { stream->Add(" + %d]", base_offset()); } else { stream->Add("]"); } } void LStoreKeyed::PrintDataTo(StringStream* stream) { elements()->PrintTo(stream); stream->Add("["); key()->PrintTo(stream); if (hydrogen()->IsDehoisted()) { stream->Add(" + %d] <-", base_offset()); } else { stream->Add("] <- "); } if (value() == NULL) { DCHECK(hydrogen()->IsConstantHoleStore() && hydrogen()->value()->representation().IsDouble()); stream->Add("<the hole(nan)>"); } else { value()->PrintTo(stream); } } void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) { object()->PrintTo(stream); stream->Add("["); key()->PrintTo(stream); stream->Add("] <- "); value()->PrintTo(stream); } void LTransitionElementsKind::PrintDataTo(StringStream* stream) { object()->PrintTo(stream); stream->Add(" %p -> %p", *original_map(), *transitioned_map()); } int LPlatformChunk::GetNextSpillIndex(RegisterKind kind) { // Skip a slot if for a double-width slot. if (kind == DOUBLE_REGISTERS) current_frame_slots_++; return current_frame_slots_++; } LOperand* LPlatformChunk::GetNextSpillSlot(RegisterKind kind) { int index = GetNextSpillIndex(kind); if (kind == DOUBLE_REGISTERS) { return LDoubleStackSlot::Create(index, zone()); } else { DCHECK(kind == GENERAL_REGISTERS); return LStackSlot::Create(index, zone()); } } LPlatformChunk* LChunkBuilder::Build() { DCHECK(is_unused()); chunk_ = new (zone()) LPlatformChunk(info(), graph()); LPhase phase("L_Building chunk", chunk_); status_ = BUILDING; // If compiling for OSR, reserve space for the unoptimized frame, // which will be subsumed into this frame. if (graph()->has_osr()) { for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) { chunk_->GetNextSpillIndex(GENERAL_REGISTERS); } } const ZoneList<HBasicBlock*>* blocks = graph()->blocks(); for (int i = 0; i < blocks->length(); i++) { HBasicBlock* next = NULL; if (i < blocks->length() - 1) next = blocks->at(i + 1); DoBasicBlock(blocks->at(i), next); if (is_aborted()) return NULL; } status_ = DONE; return chunk_; } LUnallocated* LChunkBuilder::ToUnallocated(Register reg) { return new (zone()) LUnallocated(LUnallocated::FIXED_REGISTER, reg.code()); } LUnallocated* LChunkBuilder::ToUnallocated(DoubleRegister reg) { return new (zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER, reg.code()); } LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) { return Use(value, ToUnallocated(fixed_register)); } LOperand* LChunkBuilder::UseFixedDouble(HValue* value, DoubleRegister reg) { return Use(value, ToUnallocated(reg)); } LOperand* LChunkBuilder::UseRegister(HValue* value) { return Use(value, new (zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER)); } LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) { return Use(value, new (zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER, LUnallocated::USED_AT_START)); } LOperand* LChunkBuilder::UseTempRegister(HValue* value) { return Use(value, new (zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER)); } LOperand* LChunkBuilder::Use(HValue* value) { return Use(value, new (zone()) LUnallocated(LUnallocated::NONE)); } LOperand* LChunkBuilder::UseAtStart(HValue* value) { return Use(value, new (zone()) LUnallocated(LUnallocated::NONE, LUnallocated::USED_AT_START)); } LOperand* LChunkBuilder::UseOrConstant(HValue* value) { return value->IsConstant() ? chunk_->DefineConstantOperand(HConstant::cast(value)) : Use(value); } LOperand* LChunkBuilder::UseOrConstantAtStart(HValue* value) { return value->IsConstant() ? chunk_->DefineConstantOperand(HConstant::cast(value)) : UseAtStart(value); } LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) { return value->IsConstant() ? chunk_->DefineConstantOperand(HConstant::cast(value)) : UseRegister(value); } LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) { return value->IsConstant() ? chunk_->DefineConstantOperand(HConstant::cast(value)) : UseRegisterAtStart(value); } LOperand* LChunkBuilder::UseConstant(HValue* value) { return chunk_->DefineConstantOperand(HConstant::cast(value)); } LOperand* LChunkBuilder::UseAny(HValue* value) { return value->IsConstant() ? chunk_->DefineConstantOperand(HConstant::cast(value)) : Use(value, new (zone()) LUnallocated(LUnallocated::ANY)); } LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) { if (value->EmitAtUses()) { HInstruction* instr = HInstruction::cast(value); VisitInstruction(instr); } operand->set_virtual_register(value->id()); return operand; } LInstruction* LChunkBuilder::Define(LTemplateResultInstruction<1>* instr, LUnallocated* result) { result->set_virtual_register(current_instruction_->id()); instr->set_result(result); return instr; } LInstruction* LChunkBuilder::DefineAsRegister( LTemplateResultInstruction<1>* instr) { return Define(instr, new (zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER)); } LInstruction* LChunkBuilder::DefineAsSpilled( LTemplateResultInstruction<1>* instr, int index) { return Define(instr, new (zone()) LUnallocated(LUnallocated::FIXED_SLOT, index)); } LInstruction* LChunkBuilder::DefineSameAsFirst( LTemplateResultInstruction<1>* instr) { return Define(instr, new (zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT)); } LInstruction* LChunkBuilder::DefineFixed(LTemplateResultInstruction<1>* instr, Register reg) { return Define(instr, ToUnallocated(reg)); } LInstruction* LChunkBuilder::DefineFixedDouble( LTemplateResultInstruction<1>* instr, DoubleRegister reg) { return Define(instr, ToUnallocated(reg)); } LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) { HEnvironment* hydrogen_env = current_block_->last_environment(); return LChunkBuilderBase::AssignEnvironment(instr, hydrogen_env); } LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr, HInstruction* hinstr, CanDeoptimize can_deoptimize) { info()->MarkAsNonDeferredCalling(); #ifdef DEBUG instr->VerifyCall(); #endif instr->MarkAsCall(); instr = AssignPointerMap(instr); // If instruction does not have side-effects lazy deoptimization // after the call will try to deoptimize to the point before the call. // Thus we still need to attach environment to this call even if // call sequence can not deoptimize eagerly. bool needs_environment = (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) || !hinstr->HasObservableSideEffects(); if (needs_environment && !instr->HasEnvironment()) { instr = AssignEnvironment(instr); // We can't really figure out if the environment is needed or not. instr->environment()->set_has_been_used(); } return instr; } LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) { DCHECK(!instr->HasPointerMap()); instr->set_pointer_map(new (zone()) LPointerMap(zone())); return instr; } LUnallocated* LChunkBuilder::TempRegister() { LUnallocated* operand = new (zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER); int vreg = allocator_->GetVirtualRegister(); if (!allocator_->AllocationOk()) { Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister); vreg = 0; } operand->set_virtual_register(vreg); return operand; } LUnallocated* LChunkBuilder::TempDoubleRegister() { LUnallocated* operand = new (zone()) LUnallocated(LUnallocated::MUST_HAVE_DOUBLE_REGISTER); int vreg = allocator_->GetVirtualRegister(); if (!allocator_->AllocationOk()) { Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister); vreg = 0; } operand->set_virtual_register(vreg); return operand; } LOperand* LChunkBuilder::FixedTemp(Register reg) { LUnallocated* operand = ToUnallocated(reg); DCHECK(operand->HasFixedPolicy()); return operand; } LOperand* LChunkBuilder::FixedTemp(DoubleRegister reg) { LUnallocated* operand = ToUnallocated(reg); DCHECK(operand->HasFixedPolicy()); return operand; } LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) { return new (zone()) LLabel(instr->block()); } LInstruction* LChunkBuilder::DoDummyUse(HDummyUse* instr) { return DefineAsRegister(new (zone()) LDummyUse(UseAny(instr->value()))); } LInstruction* LChunkBuilder::DoEnvironmentMarker(HEnvironmentMarker* instr) { UNREACHABLE(); return NULL; } LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) { return AssignEnvironment(new (zone()) LDeoptimize); } LInstruction* LChunkBuilder::DoShift(Token::Value op, HBitwiseBinaryOperation* instr) { if (instr->representation().IsSmiOrInteger32()) { DCHECK(instr->left()->representation().Equals(instr->representation())); DCHECK(instr->right()->representation().Equals(instr->representation())); LOperand* left = UseRegisterAtStart(instr->left()); HValue* right_value = instr->right(); LOperand* right = NULL; int constant_value = 0; bool does_deopt = false; if (right_value->IsConstant()) { HConstant* constant = HConstant::cast(right_value); right = chunk_->DefineConstantOperand(constant); constant_value = constant->Integer32Value() & 0x1f; // Left shifts can deoptimize if we shift by > 0 and the result cannot be // truncated to smi. if (instr->representation().IsSmi() && constant_value > 0) { does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToSmi); } } else { right = UseRegisterAtStart(right_value); } // Shift operations can only deoptimize if we do a logical shift // by 0 and the result cannot be truncated to int32. if (op == Token::SHR && constant_value == 0) { does_deopt = !instr->CheckFlag(HInstruction::kUint32); } LInstruction* result = DefineAsRegister(new (zone()) LShiftI(op, left, right, does_deopt)); return does_deopt ? AssignEnvironment(result) : result; } else { return DoArithmeticT(op, instr); } } LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op, HArithmeticBinaryOperation* instr) { DCHECK(instr->representation().IsDouble()); DCHECK(instr->left()->representation().IsDouble()); DCHECK(instr->right()->representation().IsDouble()); if (op == Token::MOD) { LOperand* left = UseFixedDouble(instr->left(), d1); LOperand* right = UseFixedDouble(instr->right(), d2); LArithmeticD* result = new (zone()) LArithmeticD(op, left, right); // We call a C function for double modulo. It can't trigger a GC. We need // to use fixed result register for the call. // TODO(fschneider): Allow any register as input registers. return MarkAsCall(DefineFixedDouble(result, d1), instr); } else { LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand()); LOperand* right = UseRegisterAtStart(instr->BetterRightOperand()); LArithmeticD* result = new (zone()) LArithmeticD(op, left, right); return DefineSameAsFirst(result); } } LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op, HBinaryOperation* instr) { HValue* left = instr->left(); HValue* right = instr->right(); DCHECK(left->representation().IsTagged()); DCHECK(right->representation().IsTagged()); LOperand* context = UseFixed(instr->context(), cp); LOperand* left_operand = UseFixed(left, r3); LOperand* right_operand = UseFixed(right, r2); LArithmeticT* result = new (zone()) LArithmeticT(op, context, left_operand, right_operand); return MarkAsCall(DefineFixed(result, r2), instr); } void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) { DCHECK(is_building()); current_block_ = block; next_block_ = next_block; if (block->IsStartBlock()) { block->UpdateEnvironment(graph_->start_environment()); argument_count_ = 0; } else if (block->predecessors()->length() == 1) { // We have a single predecessor => copy environment and outgoing // argument count from the predecessor. DCHECK(block->phis()->length() == 0); HBasicBlock* pred = block->predecessors()->at(0); HEnvironment* last_environment = pred->last_environment(); DCHECK(last_environment != NULL); // Only copy the environment, if it is later used again. if (pred->end()->SecondSuccessor() == NULL) { DCHECK(pred->end()->FirstSuccessor() == block); } else { if (pred->end()->FirstSuccessor()->block_id() > block->block_id() || pred->end()->SecondSuccessor()->block_id() > block->block_id()) { last_environment = last_environment->Copy(); } } block->UpdateEnvironment(last_environment); DCHECK(pred->argument_count() >= 0); argument_count_ = pred->argument_count(); } else { // We are at a state join => process phis. HBasicBlock* pred = block->predecessors()->at(0); // No need to copy the environment, it cannot be used later. HEnvironment* last_environment = pred->last_environment(); for (int i = 0; i < block->phis()->length(); ++i) { HPhi* phi = block->phis()->at(i); if (phi->HasMergedIndex()) { last_environment->SetValueAt(phi->merged_index(), phi); } } for (int i = 0; i < block->deleted_phis()->length(); ++i) { if (block->deleted_phis()->at(i) < last_environment->length()) { last_environment->SetValueAt(block->deleted_phis()->at(i), graph_->GetConstantUndefined()); } } block->UpdateEnvironment(last_environment); // Pick up the outgoing argument count of one of the predecessors. argument_count_ = pred->argument_count(); } HInstruction* current = block->first(); int start = chunk_->instructions()->length(); while (current != NULL && !is_aborted()) { // Code for constants in registers is generated lazily. if (!current->EmitAtUses()) { VisitInstruction(current); } current = current->next(); } int end = chunk_->instructions()->length() - 1; if (end >= start) { block->set_first_instruction_index(start); block->set_last_instruction_index(end); } block->set_argument_count(argument_count_); next_block_ = NULL; current_block_ = NULL; } void LChunkBuilder::VisitInstruction(HInstruction* current) { HInstruction* old_current = current_instruction_; current_instruction_ = current; LInstruction* instr = NULL; if (current->CanReplaceWithDummyUses()) { if (current->OperandCount() == 0) { instr = DefineAsRegister(new (zone()) LDummy()); } else { DCHECK(!current->OperandAt(0)->IsControlInstruction()); instr = DefineAsRegister(new (zone()) LDummyUse(UseAny(current->OperandAt(0)))); } for (int i = 1; i < current->OperandCount(); ++i) { if (current->OperandAt(i)->IsControlInstruction()) continue; LInstruction* dummy = new (zone()) LDummyUse(UseAny(current->OperandAt(i))); dummy->set_hydrogen_value(current); chunk_->AddInstruction(dummy, current_block_); } } else { HBasicBlock* successor; if (current->IsControlInstruction() && HControlInstruction::cast(current)->KnownSuccessorBlock(&successor) && successor != NULL) { instr = new (zone()) LGoto(successor); } else { instr = current->CompileToLithium(this); } } argument_count_ += current->argument_delta(); DCHECK(argument_count_ >= 0); if (instr != NULL) { AddInstruction(instr, current); } current_instruction_ = old_current; } void LChunkBuilder::AddInstruction(LInstruction* instr, HInstruction* hydrogen_val) { // Associate the hydrogen instruction first, since we may need it for // the ClobbersRegisters() or ClobbersDoubleRegisters() calls below. instr->set_hydrogen_value(hydrogen_val); #if DEBUG // Make sure that the lithium instruction has either no fixed register // constraints in temps or the result OR no uses that are only used at // start. If this invariant doesn't hold, the register allocator can decide // to insert a split of a range immediately before the instruction due to an // already allocated register needing to be used for the instruction's fixed // register constraint. In this case, The register allocator won't see an // interference between the split child and the use-at-start (it would if // the it was just a plain use), so it is free to move the split child into // the same register that is used for the use-at-start. // See https://code.google.com/p/chromium/issues/detail?id=201590 if (!(instr->ClobbersRegisters() && instr->ClobbersDoubleRegisters(isolate()))) { int fixed = 0; int used_at_start = 0; for (UseIterator it(instr); !it.Done(); it.Advance()) { LUnallocated* operand = LUnallocated::cast(it.Current()); if (operand->IsUsedAtStart()) ++used_at_start; } if (instr->Output() != NULL) { if (LUnallocated::cast(instr->Output())->HasFixedPolicy()) ++fixed; } for (TempIterator it(instr); !it.Done(); it.Advance()) { LUnallocated* operand = LUnallocated::cast(it.Current()); if (operand->HasFixedPolicy()) ++fixed; } DCHECK(fixed == 0 || used_at_start == 0); } #endif if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) { instr = AssignPointerMap(instr); } if (FLAG_stress_environments && !instr->HasEnvironment()) { instr = AssignEnvironment(instr); } chunk_->AddInstruction(instr, current_block_); CreateLazyBailoutForCall(current_block_, instr, hydrogen_val); } LInstruction* LChunkBuilder::DoPrologue(HPrologue* instr) { LInstruction* result = new (zone()) LPrologue(); if (info_->num_heap_slots() > 0) { result = MarkAsCall(result, instr); } return result; } LInstruction* LChunkBuilder::DoGoto(HGoto* instr) { return new (zone()) LGoto(instr->FirstSuccessor()); } LInstruction* LChunkBuilder::DoBranch(HBranch* instr) { HValue* value = instr->value(); Representation r = value->representation(); HType type = value->type(); ToBooleanICStub::Types expected = instr->expected_input_types(); if (expected.IsEmpty()) expected = ToBooleanICStub::Types::Generic(); bool easy_case = !r.IsTagged() || type.IsBoolean() || type.IsSmi() || type.IsJSArray() || type.IsHeapNumber() || type.IsString(); LInstruction* branch = new (zone()) LBranch(UseRegister(value)); if (!easy_case && ((!expected.Contains(ToBooleanICStub::SMI) && expected.NeedsMap()) || !expected.IsGeneric())) { branch = AssignEnvironment(branch); } return branch; } LInstruction* LChunkBuilder::DoDebugBreak(HDebugBreak* instr) { return new (zone()) LDebugBreak(); } LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) { DCHECK(instr->value()->representation().IsTagged()); LOperand* value = UseRegister(instr->value()); LOperand* temp = TempRegister(); return new (zone()) LCmpMapAndBranch(value, temp); } LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* instr) { info()->MarkAsRequiresFrame(); LOperand* value = UseRegister(instr->value()); return DefineAsRegister(new (zone()) LArgumentsLength(value)); } LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) { info()->MarkAsRequiresFrame(); return DefineAsRegister(new (zone()) LArgumentsElements); } LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) { LOperand* left = UseFixed(instr->left(), InstanceOfDescriptor::LeftRegister()); LOperand* right = UseFixed(instr->right(), InstanceOfDescriptor::RightRegister()); LOperand* context = UseFixed(instr->context(), cp); LInstanceOf* result = new (zone()) LInstanceOf(context, left, right); return MarkAsCall(DefineFixed(result, r2), instr); } LInstruction* LChunkBuilder::DoHasInPrototypeChainAndBranch( HHasInPrototypeChainAndBranch* instr) { LOperand* object = UseRegister(instr->object()); LOperand* prototype = UseRegister(instr->prototype()); LHasInPrototypeChainAndBranch* result = new (zone()) LHasInPrototypeChainAndBranch(object, prototype); return AssignEnvironment(result); } LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) { LOperand* receiver = UseRegisterAtStart(instr->receiver()); LOperand* function = UseRegisterAtStart(instr->function()); LWrapReceiver* result = new (zone()) LWrapReceiver(receiver, function); return AssignEnvironment(DefineAsRegister(result)); } LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) { LOperand* function = UseFixed(instr->function(), r3); LOperand* receiver = UseFixed(instr->receiver(), r2); LOperand* length = UseFixed(instr->length(), r4); LOperand* elements = UseFixed(instr->elements(), r5); LApplyArguments* result = new (zone()) LApplyArguments(function, receiver, length, elements); return MarkAsCall(DefineFixed(result, r2), instr, CAN_DEOPTIMIZE_EAGERLY); } LInstruction* LChunkBuilder::DoPushArguments(HPushArguments* instr) { int argc = instr->OperandCount(); for (int i = 0; i < argc; ++i) { LOperand* argument = Use(instr->argument(i)); AddInstruction(new (zone()) LPushArgument(argument), instr); } return NULL; } LInstruction* LChunkBuilder::DoStoreCodeEntry( HStoreCodeEntry* store_code_entry) { LOperand* function = UseRegister(store_code_entry->function()); LOperand* code_object = UseTempRegister(store_code_entry->code_object()); return new (zone()) LStoreCodeEntry(function, code_object); } LInstruction* LChunkBuilder::DoInnerAllocatedObject( HInnerAllocatedObject* instr) { LOperand* base_object = UseRegisterAtStart(instr->base_object()); LOperand* offset = UseRegisterOrConstantAtStart(instr->offset()); return DefineAsRegister(new (zone()) LInnerAllocatedObject(base_object, offset)); } LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) { return instr->HasNoUses() ? NULL : DefineAsRegister(new (zone()) LThisFunction); } LInstruction* LChunkBuilder::DoContext(HContext* instr) { if (instr->HasNoUses()) return NULL; if (info()->IsStub()) { return DefineFixed(new (zone()) LContext, cp); } return DefineAsRegister(new (zone()) LContext); } LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) { LOperand* context = UseFixed(instr->context(), cp); return MarkAsCall(new (zone()) LDeclareGlobals(context), instr); } LInstruction* LChunkBuilder::DoCallWithDescriptor(HCallWithDescriptor* instr) { CallInterfaceDescriptor descriptor = instr->descriptor(); LOperand* target = UseRegisterOrConstantAtStart(instr->target()); ZoneList<LOperand*> ops(instr->OperandCount(), zone()); // Target ops.Add(target, zone()); // Context LOperand* op = UseFixed(instr->OperandAt(1), cp); ops.Add(op, zone()); // Other register parameters for (int i = LCallWithDescriptor::kImplicitRegisterParameterCount; i < instr->OperandCount(); i++) { op = UseFixed(instr->OperandAt(i), descriptor.GetRegisterParameter( i - LCallWithDescriptor::kImplicitRegisterParameterCount)); ops.Add(op, zone()); } LCallWithDescriptor* result = new (zone()) LCallWithDescriptor(descriptor, ops, zone()); if (instr->syntactic_tail_call_mode() == TailCallMode::kAllow) { result->MarkAsSyntacticTailCall(); } return MarkAsCall(DefineFixed(result, r2), instr); } LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) { LOperand* context = UseFixed(instr->context(), cp); LOperand* function = UseFixed(instr->function(), r3); LInvokeFunction* result = new (zone()) LInvokeFunction(context, function); if (instr->syntactic_tail_call_mode() == TailCallMode::kAllow) { result->MarkAsSyntacticTailCall(); } return MarkAsCall(DefineFixed(result, r2), instr, CANNOT_DEOPTIMIZE_EAGERLY); } LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) { switch (instr->op()) { case kMathFloor: return DoMathFloor(instr); case kMathRound: return DoMathRound(instr); case kMathFround: return DoMathFround(instr); case kMathAbs: return DoMathAbs(instr); case kMathLog: return DoMathLog(instr); case kMathExp: return DoMathExp(instr); case kMathSqrt: return DoMathSqrt(instr); case kMathPowHalf: return DoMathPowHalf(instr); case kMathClz32: return DoMathClz32(instr); default: UNREACHABLE(); return NULL; } } LInstruction* LChunkBuilder::DoMathFloor(HUnaryMathOperation* instr) { LOperand* input = UseRegister(instr->value()); LMathFloor* result = new (zone()) LMathFloor(input); return AssignEnvironment(AssignPointerMap(DefineAsRegister(result))); } LInstruction* LChunkBuilder::DoMathRound(HUnaryMathOperation* instr) { LOperand* input = UseRegister(instr->value()); LOperand* temp = TempDoubleRegister(); LMathRound* result = new (zone()) LMathRound(input, temp); return AssignEnvironment(DefineAsRegister(result)); } LInstruction* LChunkBuilder::DoMathFround(HUnaryMathOperation* instr) { LOperand* input = UseRegister(instr->value()); LMathFround* result = new (zone()) LMathFround(input); return DefineAsRegister(result); } LInstruction* LChunkBuilder::DoMathAbs(HUnaryMathOperation* instr) { Representation r = instr->value()->representation(); LOperand* context = (r.IsDouble() || r.IsSmiOrInteger32()) ? NULL : UseFixed(instr->context(), cp); LOperand* input = UseRegister(instr->value()); LInstruction* result = DefineAsRegister(new (zone()) LMathAbs(context, input)); if (!r.IsDouble() && !r.IsSmiOrInteger32()) result = AssignPointerMap(result); if (!r.IsDouble()) result = AssignEnvironment(result); return result; } LInstruction* LChunkBuilder::DoMathLog(HUnaryMathOperation* instr) { DCHECK(instr->representation().IsDouble()); DCHECK(instr->value()->representation().IsDouble()); LOperand* input = UseFixedDouble(instr->value(), d1); return MarkAsCall(DefineFixedDouble(new (zone()) LMathLog(input), d1), instr); } LInstruction* LChunkBuilder::DoMathClz32(HUnaryMathOperation* instr) { LOperand* input = UseRegisterAtStart(instr->value()); LMathClz32* result = new (zone()) LMathClz32(input); return DefineAsRegister(result); } LInstruction* LChunkBuilder::DoMathExp(HUnaryMathOperation* instr) { DCHECK(instr->representation().IsDouble()); DCHECK(instr->value()->representation().IsDouble()); LOperand* input = UseRegister(instr->value()); LOperand* temp1 = TempRegister(); LOperand* temp2 = TempRegister(); LOperand* double_temp = TempDoubleRegister(); LMathExp* result = new (zone()) LMathExp(input, double_temp, temp1, temp2); return DefineAsRegister(result); } LInstruction* LChunkBuilder::DoMathSqrt(HUnaryMathOperation* instr) { LOperand* input = UseRegisterAtStart(instr->value()); LMathSqrt* result = new (zone()) LMathSqrt(input); return DefineAsRegister(result); } LInstruction* LChunkBuilder::DoMathPowHalf(HUnaryMathOperation* instr) { LOperand* input = UseRegisterAtStart(instr->value()); LMathPowHalf* result = new (zone()) LMathPowHalf(input); return DefineAsRegister(result); } LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) { LOperand* context = UseFixed(instr->context(), cp); LOperand* constructor = UseFixed(instr->constructor(), r3); LCallNewArray* result = new (zone()) LCallNewArray(context, constructor); return MarkAsCall(DefineFixed(result, r2), instr); } LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) { LOperand* context = UseFixed(instr->context(), cp); return MarkAsCall(DefineFixed(new (zone()) LCallRuntime(context), r2), instr); } LInstruction* LChunkBuilder::DoRor(HRor* instr) { return DoShift(Token::ROR, instr); } LInstruction* LChunkBuilder::DoShr(HShr* instr) { return DoShift(Token::SHR, instr); } LInstruction* LChunkBuilder::DoSar(HSar* instr) { return DoShift(Token::SAR, instr); } LInstruction* LChunkBuilder::DoShl(HShl* instr) { return DoShift(Token::SHL, instr); } LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) { if (instr->representation().IsSmiOrInteger32()) { DCHECK(instr->left()->representation().Equals(instr->representation())); DCHECK(instr->right()->representation().Equals(instr->representation())); DCHECK(instr->CheckFlag(HValue::kTruncatingToInt32)); LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand()); LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand()); return DefineAsRegister(new (zone()) LBitI(left, right)); } else { return DoArithmeticT(instr->op(), instr); } } LInstruction* LChunkBuilder::DoDivByPowerOf2I(HDiv* instr) { DCHECK(instr->representation().IsSmiOrInteger32()); DCHECK(instr->left()->representation().Equals(instr->representation())); DCHECK(instr->right()->representation().Equals(instr->representation())); LOperand* dividend = UseRegister(instr->left()); int32_t divisor = instr->right()->GetInteger32Constant(); LInstruction* result = DefineAsRegister(new (zone()) LDivByPowerOf2I(dividend, divisor)); if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) || (instr->CheckFlag(HValue::kCanOverflow) && divisor == -1) || (!instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) && divisor != 1 && divisor != -1)) { result = AssignEnvironment(result); } return result; } LInstruction* LChunkBuilder::DoDivByConstI(HDiv* instr) { DCHECK(instr->representation().IsInteger32()); DCHECK(instr->left()->representation().Equals(instr->representation())); DCHECK(instr->right()->representation().Equals(instr->representation())); LOperand* dividend = UseRegister(instr->left()); int32_t divisor = instr->right()->GetInteger32Constant(); LInstruction* result = DefineAsRegister(new (zone()) LDivByConstI(dividend, divisor)); if (divisor == 0 || (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) || !instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) { result = AssignEnvironment(result); } return result; } LInstruction* LChunkBuilder::DoDivI(HDiv* instr) { DCHECK(instr->representation().IsSmiOrInteger32()); DCHECK(instr->left()->representation().Equals(instr->representation())); DCHECK(instr->right()->representation().Equals(instr->representation())); LOperand* dividend = UseRegister(instr->left()); LOperand* divisor = UseRegister(instr->right()); LInstruction* result = DefineAsRegister(new (zone()) LDivI(dividend, divisor)); if (instr->CheckFlag(HValue::kCanBeDivByZero) || instr->CheckFlag(HValue::kBailoutOnMinusZero) || (instr->CheckFlag(HValue::kCanOverflow) && !instr->CheckFlag(HValue::kAllUsesTruncatingToInt32)) || (!instr->IsMathFloorOfDiv() && !instr->CheckFlag(HValue::kAllUsesTruncatingToInt32))) { result = AssignEnvironment(result); } return result; } LInstruction* LChunkBuilder::DoDiv(HDiv* instr) { if (instr->representation().IsSmiOrInteger32()) { if (instr->RightIsPowerOf2()) { return DoDivByPowerOf2I(instr); } else if (instr->right()->IsConstant()) { return DoDivByConstI(instr); } else { return DoDivI(instr); } } else if (instr->representation().IsDouble()) { return DoArithmeticD(Token::DIV, instr); } else { return DoArithmeticT(Token::DIV, instr); } } LInstruction* LChunkBuilder::DoFlooringDivByPowerOf2I(HMathFloorOfDiv* instr) { LOperand* dividend = UseRegisterAtStart(instr->left()); int32_t divisor = instr->right()->GetInteger32Constant(); LInstruction* result = DefineAsRegister(new (zone()) LFlooringDivByPowerOf2I(dividend, divisor)); if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) || (instr->CheckFlag(HValue::kLeftCanBeMinInt) && divisor == -1)) { result = AssignEnvironment(result); } return result; } LInstruction* LChunkBuilder::DoFlooringDivByConstI(HMathFloorOfDiv* instr) { DCHECK(instr->representation().IsInteger32()); DCHECK(instr->left()->representation().Equals(instr->representation())); DCHECK(instr->right()->representation().Equals(instr->representation())); LOperand* dividend = UseRegister(instr->left()); int32_t divisor = instr->right()->GetInteger32Constant(); LOperand* temp = ((divisor > 0 && !instr->CheckFlag(HValue::kLeftCanBeNegative)) || (divisor < 0 && !instr->CheckFlag(HValue::kLeftCanBePositive))) ? NULL : TempRegister(); LInstruction* result = DefineAsRegister( new (zone()) LFlooringDivByConstI(dividend, divisor, temp)); if (divisor == 0 || (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0)) { result = AssignEnvironment(result); } return result; } LInstruction* LChunkBuilder::DoFlooringDivI(HMathFloorOfDiv* instr) { DCHECK(instr->representation().IsSmiOrInteger32()); DCHECK(instr->left()->representation().Equals(instr->representation())); DCHECK(instr->right()->representation().Equals(instr->representation())); LOperand* dividend = UseRegister(instr->left()); LOperand* divisor = UseRegister(instr->right()); LInstruction* result = DefineAsRegister(new (zone()) LFlooringDivI(dividend, divisor)); if (instr->CheckFlag(HValue::kCanBeDivByZero) || instr->CheckFlag(HValue::kBailoutOnMinusZero) || (instr->CheckFlag(HValue::kCanOverflow) && !instr->CheckFlag(HValue::kAllUsesTruncatingToInt32))) { result = AssignEnvironment(result); } return result; } LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) { if (instr->RightIsPowerOf2()) { return DoFlooringDivByPowerOf2I(instr); } else if (instr->right()->IsConstant()) { return DoFlooringDivByConstI(instr); } else { return DoFlooringDivI(instr); } } LInstruction* LChunkBuilder::DoModByPowerOf2I(HMod* instr) { DCHECK(instr->representation().IsSmiOrInteger32()); DCHECK(instr->left()->representation().Equals(instr->representation())); DCHECK(instr->right()->representation().Equals(instr->representation())); LOperand* dividend = UseRegisterAtStart(instr->left()); int32_t divisor = instr->right()->GetInteger32Constant(); LInstruction* result = DefineSameAsFirst(new (zone()) LModByPowerOf2I(dividend, divisor)); if (instr->CheckFlag(HValue::kLeftCanBeNegative) && instr->CheckFlag(HValue::kBailoutOnMinusZero)) { result = AssignEnvironment(result); } return result; } LInstruction* LChunkBuilder::DoModByConstI(HMod* instr) { DCHECK(instr->representation().IsSmiOrInteger32()); DCHECK(instr->left()->representation().Equals(instr->representation())); DCHECK(instr->right()->representation().Equals(instr->representation())); LOperand* dividend = UseRegister(instr->left()); int32_t divisor = instr->right()->GetInteger32Constant(); LInstruction* result = DefineAsRegister(new (zone()) LModByConstI(dividend, divisor)); if (divisor == 0 || instr->CheckFlag(HValue::kBailoutOnMinusZero)) { result = AssignEnvironment(result); } return result; } LInstruction* LChunkBuilder::DoModI(HMod* instr) { DCHECK(instr->representation().IsSmiOrInteger32()); DCHECK(instr->left()->representation().Equals(instr->representation())); DCHECK(instr->right()->representation().Equals(instr->representation())); LOperand* dividend = UseRegister(instr->left()); LOperand* divisor = UseRegister(instr->right()); LInstruction* result = DefineAsRegister(new (zone()) LModI(dividend, divisor)); if (instr->CheckFlag(HValue::kCanBeDivByZero) || instr->CheckFlag(HValue::kBailoutOnMinusZero)) { result = AssignEnvironment(result); } return result; } LInstruction* LChunkBuilder::DoMod(HMod* instr) { if (instr->representation().IsSmiOrInteger32()) { if (instr->RightIsPowerOf2()) { return DoModByPowerOf2I(instr); } else if (instr->right()->IsConstant()) { return DoModByConstI(instr); } else { return DoModI(instr); } } else if (instr->representation().IsDouble()) { return DoArithmeticD(Token::MOD, instr); } else { return DoArithmeticT(Token::MOD, instr); } } LInstruction* LChunkBuilder::DoMul(HMul* instr) { if (instr->representation().IsSmiOrInteger32()) { DCHECK(instr->left()->representation().Equals(instr->representation())); DCHECK(instr->right()->representation().Equals(instr->representation())); HValue* left = instr->BetterLeftOperand(); HValue* right = instr->BetterRightOperand(); LOperand* left_op; LOperand* right_op; bool can_overflow = instr->CheckFlag(HValue::kCanOverflow); bool bailout_on_minus_zero = instr->CheckFlag(HValue::kBailoutOnMinusZero); int32_t constant_value = 0; if (right->IsConstant()) { HConstant* constant = HConstant::cast(right); constant_value = constant->Integer32Value(); // Constants -1, 0 and 1 can be optimized if the result can overflow. // For other constants, it can be optimized only without overflow. if (!can_overflow || ((constant_value >= -1) && (constant_value <= 1))) { left_op = UseRegisterAtStart(left); right_op = UseConstant(right); } else { if (bailout_on_minus_zero) { left_op = UseRegister(left); } else { left_op = UseRegisterAtStart(left); } right_op = UseRegister(right); } } else { if (bailout_on_minus_zero) { left_op = UseRegister(left); } else { left_op = UseRegisterAtStart(left); } right_op = UseRegister(right); } LMulI* mul = new (zone()) LMulI(left_op, right_op); if (right_op->IsConstantOperand() ? ((can_overflow && constant_value == -1) || (bailout_on_minus_zero && constant_value <= 0)) : (can_overflow || bailout_on_minus_zero)) { AssignEnvironment(mul); } return DefineAsRegister(mul); } else if (instr->representation().IsDouble()) { return DoArithmeticD(Token::MUL, instr); } else { return DoArithmeticT(Token::MUL, instr); } } LInstruction* LChunkBuilder::DoSub(HSub* instr) { if (instr->representation().IsSmiOrInteger32()) { DCHECK(instr->left()->representation().Equals(instr->representation())); DCHECK(instr->right()->representation().Equals(instr->representation())); if (instr->left()->IsConstant() && !instr->CheckFlag(HValue::kCanOverflow)) { // If lhs is constant, do reverse subtraction instead. return DoRSub(instr); } LOperand* left = UseRegisterAtStart(instr->left()); LOperand* right = UseOrConstantAtStart(instr->right()); LSubI* sub = new (zone()) LSubI(left, right); LInstruction* result = DefineAsRegister(sub); if (instr->CheckFlag(HValue::kCanOverflow)) { result = AssignEnvironment(result); } return result; } else if (instr->representation().IsDouble()) { return DoArithmeticD(Token::SUB, instr); } else { return DoArithmeticT(Token::SUB, instr); } } LInstruction* LChunkBuilder::DoRSub(HSub* instr) { DCHECK(instr->representation().IsSmiOrInteger32()); DCHECK(instr->left()->representation().Equals(instr->representation())); DCHECK(instr->right()->representation().Equals(instr->representation())); DCHECK(!instr->CheckFlag(HValue::kCanOverflow)); // Note: The lhs of the subtraction becomes the rhs of the // reverse-subtraction. LOperand* left = UseRegisterAtStart(instr->right()); LOperand* right = UseOrConstantAtStart(instr->left()); LRSubI* rsb = new (zone()) LRSubI(left, right); LInstruction* result = DefineAsRegister(rsb); return result; } LInstruction* LChunkBuilder::DoMultiplyAdd(HMul* mul, HValue* addend) { LOperand* multiplier_op = UseRegister(mul->left()); LOperand* multiplicand_op = UseRegister(mul->right()); LOperand* addend_op = UseRegister(addend); return DefineAsRegister( new (zone()) LMultiplyAddD(addend_op, multiplier_op, multiplicand_op)); } LInstruction* LChunkBuilder::DoMultiplySub(HValue* minuend, HMul* mul) { LOperand* minuend_op = UseRegister(minuend); LOperand* multiplier_op = UseRegister(mul->left()); LOperand* multiplicand_op = UseRegister(mul->right()); return DefineAsRegister( new (zone()) LMultiplySubD(minuend_op, multiplier_op, multiplicand_op)); } LInstruction* LChunkBuilder::DoAdd(HAdd* instr) { if (instr->representation().IsSmiOrInteger32()) { DCHECK(instr->left()->representation().Equals(instr->representation())); DCHECK(instr->right()->representation().Equals(instr->representation())); LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand()); LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand()); LAddI* add = new (zone()) LAddI(left, right); LInstruction* result = DefineAsRegister(add); if (instr->CheckFlag(HValue::kCanOverflow)) { result = AssignEnvironment(result); } return result; } else if (instr->representation().IsExternal()) { DCHECK(instr->IsConsistentExternalRepresentation()); DCHECK(!instr->CheckFlag(HValue::kCanOverflow)); LOperand* left = UseRegisterAtStart(instr->left()); LOperand* right = UseOrConstantAtStart(instr->right()); LAddI* add = new (zone()) LAddI(left, right); LInstruction* result = DefineAsRegister(add); return result; } else if (instr->representation().IsDouble()) { return DoArithmeticD(Token::ADD, instr); } else { return DoArithmeticT(Token::ADD, instr); } } LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) { LOperand* left = NULL; LOperand* right = NULL; if (instr->representation().IsSmiOrInteger32()) { DCHECK(instr->left()->representation().Equals(instr->representation())); DCHECK(instr->right()->representation().Equals(instr->representation())); left = UseRegisterAtStart(instr->BetterLeftOperand()); right = UseOrConstantAtStart(instr->BetterRightOperand()); } else { DCHECK(instr->representation().IsDouble()); DCHECK(instr->left()->representation().IsDouble()); DCHECK(instr->right()->representation().IsDouble()); left = UseRegister(instr->left()); right = UseRegister(instr->right()); } return DefineAsRegister(new (zone()) LMathMinMax(left, right)); } LInstruction* LChunkBuilder::DoPower(HPower* instr) { DCHECK(instr->representation().IsDouble()); // We call a C function for double power. It can't trigger a GC. // We need to use fixed result register for the call. Representation exponent_type = instr->right()->representation(); DCHECK(instr->left()->representation().IsDouble()); LOperand* left = UseFixedDouble(instr->left(), d1); LOperand* right = exponent_type.IsDouble() ? UseFixedDouble(instr->right(), d2) : UseFixed(instr->right(), r4); LPower* result = new (zone()) LPower(left, right); return MarkAsCall(DefineFixedDouble(result, d3), instr, CAN_DEOPTIMIZE_EAGERLY); } LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) { DCHECK(instr->left()->representation().IsTagged()); DCHECK(instr->right()->representation().IsTagged()); LOperand* context = UseFixed(instr->context(), cp); LOperand* left = UseFixed(instr->left(), r3); LOperand* right = UseFixed(instr->right(), r2); LCmpT* result = new (zone()) LCmpT(context, left, right); return MarkAsCall(DefineFixed(result, r2), instr); } LInstruction* LChunkBuilder::DoCompareNumericAndBranch( HCompareNumericAndBranch* instr) { Representation r = instr->representation(); if (r.IsSmiOrInteger32()) { DCHECK(instr->left()->representation().Equals(r)); DCHECK(instr->right()->representation().Equals(r)); LOperand* left = UseRegisterOrConstantAtStart(instr->left()); LOperand* right = UseRegisterOrConstantAtStart(instr->right()); return new (zone()) LCompareNumericAndBranch(left, right); } else { DCHECK(r.IsDouble()); DCHECK(instr->left()->representation().IsDouble()); DCHECK(instr->right()->representation().IsDouble()); LOperand* left = UseRegisterAtStart(instr->left()); LOperand* right = UseRegisterAtStart(instr->right()); return new (zone()) LCompareNumericAndBranch(left, right); } } LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch( HCompareObjectEqAndBranch* instr) { LOperand* left = UseRegisterAtStart(instr->left()); LOperand* right = UseRegisterAtStart(instr->right()); return new (zone()) LCmpObjectEqAndBranch(left, right); } LInstruction* LChunkBuilder::DoCompareHoleAndBranch( HCompareHoleAndBranch* instr) { LOperand* value = UseRegisterAtStart(instr->value()); return new (zone()) LCmpHoleAndBranch(value); } LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) { DCHECK(instr->value()->representation().IsTagged()); LOperand* value = UseRegisterAtStart(instr->value()); LOperand* temp = TempRegister(); return new (zone()) LIsStringAndBranch(value, temp); } LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) { DCHECK(instr->value()->representation().IsTagged()); return new (zone()) LIsSmiAndBranch(Use(instr->value())); } LInstruction* LChunkBuilder::DoIsUndetectableAndBranch( HIsUndetectableAndBranch* instr) { DCHECK(instr->value()->representation().IsTagged()); LOperand* value = UseRegisterAtStart(instr->value()); return new (zone()) LIsUndetectableAndBranch(value, TempRegister()); } LInstruction* LChunkBuilder::DoStringCompareAndBranch( HStringCompareAndBranch* instr) { DCHECK(instr->left()->representation().IsTagged()); DCHECK(instr->right()->representation().IsTagged()); LOperand* context = UseFixed(instr->context(), cp); LOperand* left = UseFixed(instr->left(), r3); LOperand* right = UseFixed(instr->right(), r2); LStringCompareAndBranch* result = new (zone()) LStringCompareAndBranch(context, left, right); return MarkAsCall(result, instr); } LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch( HHasInstanceTypeAndBranch* instr) { DCHECK(instr->value()->representation().IsTagged()); LOperand* value = UseRegisterAtStart(instr->value()); return new (zone()) LHasInstanceTypeAndBranch(value); } LInstruction* LChunkBuilder::DoGetCachedArrayIndex( HGetCachedArrayIndex* instr) { DCHECK(instr->value()->representation().IsTagged()); LOperand* value = UseRegisterAtStart(instr->value()); return DefineAsRegister(new (zone()) LGetCachedArrayIndex(value)); } LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch( HHasCachedArrayIndexAndBranch* instr) { DCHECK(instr->value()->representation().IsTagged()); return new (zone()) LHasCachedArrayIndexAndBranch(UseRegisterAtStart(instr->value())); } LInstruction* LChunkBuilder::DoClassOfTestAndBranch( HClassOfTestAndBranch* instr) { DCHECK(instr->value()->representation().IsTagged()); LOperand* value = UseRegister(instr->value()); return new (zone()) LClassOfTestAndBranch(value, TempRegister()); } LInstruction* LChunkBuilder::DoSeqStringGetChar(HSeqStringGetChar* instr) { LOperand* string = UseRegisterAtStart(instr->string()); LOperand* index = UseRegisterOrConstantAtStart(instr->index()); return DefineAsRegister(new (zone()) LSeqStringGetChar(string, index)); } LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) { LOperand* string = UseRegisterAtStart(instr->string()); LOperand* index = FLAG_debug_code ? UseRegisterAtStart(instr->index()) : UseRegisterOrConstantAtStart(instr->index()); LOperand* value = UseRegisterAtStart(instr->value()); LOperand* context = FLAG_debug_code ? UseFixed(instr->context(), cp) : NULL; return new (zone()) LSeqStringSetChar(context, string, index, value); } LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) { if (!FLAG_debug_code && instr->skip_check()) return NULL; LOperand* index = UseRegisterOrConstantAtStart(instr->index()); LOperand* length = !index->IsConstantOperand() ? UseRegisterOrConstantAtStart(instr->length()) : UseRegisterAtStart(instr->length()); LInstruction* result = new (zone()) LBoundsCheck(index, length); if (!FLAG_debug_code || !instr->skip_check()) { result = AssignEnvironment(result); } return result; } LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) { // The control instruction marking the end of a block that completed // abruptly (e.g., threw an exception). There is nothing specific to do. return NULL; } LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) { return NULL; } LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) { // All HForceRepresentation instructions should be eliminated in the // representation change phase of Hydrogen. UNREACHABLE(); return NULL; } LInstruction* LChunkBuilder::DoChange(HChange* instr) { Representation from = instr->from(); Representation to = instr->to(); HValue* val = instr->value(); if (from.IsSmi()) { if (to.IsTagged()) { LOperand* value = UseRegister(val); return DefineSameAsFirst(new (zone()) LDummyUse(value)); } from = Representation::Tagged(); } if (from.IsTagged()) { if (to.IsDouble()) { LOperand* value = UseRegister(val); LInstruction* result = DefineAsRegister(new (zone()) LNumberUntagD(value)); if (!val->representation().IsSmi()) result = AssignEnvironment(result); return result; } else if (to.IsSmi()) { LOperand* value = UseRegister(val); if (val->type().IsSmi()) { return DefineSameAsFirst(new (zone()) LDummyUse(value)); } return AssignEnvironment( DefineSameAsFirst(new (zone()) LCheckSmi(value))); } else { DCHECK(to.IsInteger32()); if (val->type().IsSmi() || val->representation().IsSmi()) { LOperand* value = UseRegisterAtStart(val); return DefineAsRegister(new (zone()) LSmiUntag(value, false)); } else { LOperand* value = UseRegister(val); LOperand* temp1 = TempRegister(); LOperand* temp2 = TempDoubleRegister(); LInstruction* result = DefineSameAsFirst(new (zone()) LTaggedToI(value, temp1, temp2)); if (!val->representation().IsSmi()) result = AssignEnvironment(result); return result; } } } else if (from.IsDouble()) { if (to.IsTagged()) { info()->MarkAsDeferredCalling(); LOperand* value = UseRegister(val); LOperand* temp1 = TempRegister(); LOperand* temp2 = TempRegister(); LUnallocated* result_temp = TempRegister(); LNumberTagD* result = new (zone()) LNumberTagD(value, temp1, temp2); return AssignPointerMap(Define(result, result_temp)); } else if (to.IsSmi()) { LOperand* value = UseRegister(val); return AssignEnvironment( DefineAsRegister(new (zone()) LDoubleToSmi(value))); } else { DCHECK(to.IsInteger32()); LOperand* value = UseRegister(val); LInstruction* result = DefineAsRegister(new (zone()) LDoubleToI(value)); if (!instr->CanTruncateToInt32()) result = AssignEnvironment(result); return result; } } else if (from.IsInteger32()) { info()->MarkAsDeferredCalling(); if (to.IsTagged()) { if (!instr->CheckFlag(HValue::kCanOverflow)) { LOperand* value = UseRegisterAtStart(val); return DefineAsRegister(new (zone()) LSmiTag(value)); } else if (val->CheckFlag(HInstruction::kUint32)) { LOperand* value = UseRegisterAtStart(val); LOperand* temp1 = TempRegister(); LOperand* temp2 = TempRegister(); LNumberTagU* result = new (zone()) LNumberTagU(value, temp1, temp2); return AssignPointerMap(DefineAsRegister(result)); } else { LOperand* value = UseRegisterAtStart(val); LOperand* temp1 = TempRegister(); LOperand* temp2 = TempRegister(); LNumberTagI* result = new (zone()) LNumberTagI(value, temp1, temp2); return AssignPointerMap(DefineAsRegister(result)); } } else if (to.IsSmi()) { LOperand* value = UseRegister(val); LInstruction* result = DefineAsRegister(new (zone()) LSmiTag(value)); if (instr->CheckFlag(HValue::kCanOverflow)) { result = AssignEnvironment(result); } return result; } else { DCHECK(to.IsDouble()); if (val->CheckFlag(HInstruction::kUint32)) { return DefineAsRegister(new (zone()) LUint32ToDouble(UseRegister(val))); } else { return DefineAsRegister(new (zone()) LInteger32ToDouble(Use(val))); } } } UNREACHABLE(); return NULL; } LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) { LOperand* value = UseRegisterAtStart(instr->value()); LInstruction* result = new (zone()) LCheckNonSmi(value); if (!instr->value()->type().IsHeapObject()) { result = AssignEnvironment(result); } return result; } LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) { LOperand* value = UseRegisterAtStart(instr->value()); return AssignEnvironment(new (zone()) LCheckSmi(value)); } LInstruction* LChunkBuilder::DoCheckArrayBufferNotNeutered( HCheckArrayBufferNotNeutered* instr) { LOperand* view = UseRegisterAtStart(instr->value()); LCheckArrayBufferNotNeutered* result = new (zone()) LCheckArrayBufferNotNeutered(view); return AssignEnvironment(result); } LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) { LOperand* value = UseRegisterAtStart(instr->value()); LInstruction* result = new (zone()) LCheckInstanceType(value); return AssignEnvironment(result); } LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) { LOperand* value = UseRegisterAtStart(instr->value()); return AssignEnvironment(new (zone()) LCheckValue(value)); } LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) { if (instr->IsStabilityCheck()) return new (zone()) LCheckMaps; LOperand* value = UseRegisterAtStart(instr->value()); LOperand* temp = TempRegister(); LInstruction* result = AssignEnvironment(new (zone()) LCheckMaps(value, temp)); if (instr->HasMigrationTarget()) { info()->MarkAsDeferredCalling(); result = AssignPointerMap(result); } return result; } LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) { HValue* value = instr->value(); Representation input_rep = value->representation(); LOperand* reg = UseRegister(value); if (input_rep.IsDouble()) { return DefineAsRegister(new (zone()) LClampDToUint8(reg)); } else if (input_rep.IsInteger32()) { return DefineAsRegister(new (zone()) LClampIToUint8(reg)); } else { DCHECK(input_rep.IsSmiOrTagged()); LClampTToUint8* result = new (zone()) LClampTToUint8(reg, TempDoubleRegister()); return AssignEnvironment(DefineAsRegister(result)); } } LInstruction* LChunkBuilder::DoDoubleBits(HDoubleBits* instr) { HValue* value = instr->value(); DCHECK(value->representation().IsDouble()); return DefineAsRegister(new (zone()) LDoubleBits(UseRegister(value))); } LInstruction* LChunkBuilder::DoConstructDouble(HConstructDouble* instr) { LOperand* lo = UseRegister(instr->lo()); LOperand* hi = UseRegister(instr->hi()); return DefineAsRegister(new (zone()) LConstructDouble(hi, lo)); } LInstruction* LChunkBuilder::DoReturn(HReturn* instr) { LOperand* context = info()->IsStub() ? UseFixed(instr->context(), cp) : NULL; LOperand* parameter_count = UseRegisterOrConstant(instr->parameter_count()); return new (zone()) LReturn(UseFixed(instr->value(), r2), context, parameter_count); } LInstruction* LChunkBuilder::DoConstant(HConstant* instr) { Representation r = instr->representation(); if (r.IsSmi()) { return DefineAsRegister(new (zone()) LConstantS); } else if (r.IsInteger32()) { return DefineAsRegister(new (zone()) LConstantI); } else if (r.IsDouble()) { return DefineAsRegister(new (zone()) LConstantD); } else if (r.IsExternal()) { return DefineAsRegister(new (zone()) LConstantE); } else if (r.IsTagged()) { return DefineAsRegister(new (zone()) LConstantT); } else { UNREACHABLE(); return NULL; } } LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) { LOperand* context = UseFixed(instr->context(), cp); LOperand* global_object = UseFixed(instr->global_object(), LoadDescriptor::ReceiverRegister()); LOperand* vector = NULL; if (instr->HasVectorAndSlot()) { vector = FixedTemp(LoadWithVectorDescriptor::VectorRegister()); } LLoadGlobalGeneric* result = new (zone()) LLoadGlobalGeneric(context, global_object, vector); return MarkAsCall(DefineFixed(result, r2), instr); } LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) { LOperand* context = UseRegisterAtStart(instr->value()); LInstruction* result = DefineAsRegister(new (zone()) LLoadContextSlot(context)); if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) { result = AssignEnvironment(result); } return result; } LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) { LOperand* context; LOperand* value; if (instr->NeedsWriteBarrier()) { context = UseTempRegister(instr->context()); value = UseTempRegister(instr->value()); } else { context = UseRegister(instr->context()); value = UseRegister(instr->value()); } LInstruction* result = new (zone()) LStoreContextSlot(context, value); if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) { result = AssignEnvironment(result); } return result; } LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) { LOperand* obj = UseRegisterAtStart(instr->object()); return DefineAsRegister(new (zone()) LLoadNamedField(obj)); } LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) { LOperand* context = UseFixed(instr->context(), cp); LOperand* object = UseFixed(instr->object(), LoadDescriptor::ReceiverRegister()); LOperand* vector = NULL; if (instr->HasVectorAndSlot()) { vector = FixedTemp(LoadWithVectorDescriptor::VectorRegister()); } LInstruction* result = DefineFixed(new (zone()) LLoadNamedGeneric(context, object, vector), r2); return MarkAsCall(result, instr); } LInstruction* LChunkBuilder::DoLoadFunctionPrototype( HLoadFunctionPrototype* instr) { return AssignEnvironment(DefineAsRegister( new (zone()) LLoadFunctionPrototype(UseRegister(instr->function())))); } LInstruction* LChunkBuilder::DoLoadRoot(HLoadRoot* instr) { return DefineAsRegister(new (zone()) LLoadRoot); } LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) { DCHECK(instr->key()->representation().IsSmiOrInteger32()); ElementsKind elements_kind = instr->elements_kind(); LOperand* key = UseRegisterOrConstantAtStart(instr->key()); LInstruction* result = NULL; if (!instr->is_fixed_typed_array()) { LOperand* obj = NULL; if (instr->representation().IsDouble()) { obj = UseRegister(instr->elements()); } else { obj = UseRegisterAtStart(instr->elements()); } result = DefineAsRegister(new (zone()) LLoadKeyed(obj, key, nullptr)); } else { DCHECK((instr->representation().IsInteger32() && !IsDoubleOrFloatElementsKind(elements_kind)) || (instr->representation().IsDouble() && IsDoubleOrFloatElementsKind(elements_kind))); LOperand* backing_store = UseRegister(instr->elements()); LOperand* backing_store_owner = UseAny(instr->backing_store_owner()); result = DefineAsRegister( new (zone()) LLoadKeyed(backing_store, key, backing_store_owner)); } bool needs_environment; if (instr->is_fixed_typed_array()) { // see LCodeGen::DoLoadKeyedExternalArray needs_environment = elements_kind == UINT32_ELEMENTS && !instr->CheckFlag(HInstruction::kUint32); } else { // see LCodeGen::DoLoadKeyedFixedDoubleArray and // LCodeGen::DoLoadKeyedFixedArray needs_environment = instr->RequiresHoleCheck() || (instr->hole_mode() == CONVERT_HOLE_TO_UNDEFINED && info()->IsStub()); } if (needs_environment) { result = AssignEnvironment(result); } return result; } LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) { LOperand* context = UseFixed(instr->context(), cp); LOperand* object = UseFixed(instr->object(), LoadDescriptor::ReceiverRegister()); LOperand* key = UseFixed(instr->key(), LoadDescriptor::NameRegister()); LOperand* vector = NULL; if (instr->HasVectorAndSlot()) { vector = FixedTemp(LoadWithVectorDescriptor::VectorRegister()); } LInstruction* result = DefineFixed( new (zone()) LLoadKeyedGeneric(context, object, key, vector), r2); return MarkAsCall(result, instr); } LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) { if (!instr->is_fixed_typed_array()) { DCHECK(instr->elements()->representation().IsTagged()); bool needs_write_barrier = instr->NeedsWriteBarrier(); LOperand* object = NULL; LOperand* key = NULL; LOperand* val = NULL; if (instr->value()->representation().IsDouble()) { object = UseRegisterAtStart(instr->elements()); val = UseRegister(instr->value()); key = UseRegisterOrConstantAtStart(instr->key()); } else { if (needs_write_barrier) { object = UseTempRegister(instr->elements()); val = UseTempRegister(instr->value()); key = UseTempRegister(instr->key()); } else { object = UseRegisterAtStart(instr->elements()); val = UseRegisterAtStart(instr->value()); key = UseRegisterOrConstantAtStart(instr->key()); } } return new (zone()) LStoreKeyed(object, key, val, nullptr); } DCHECK((instr->value()->representation().IsInteger32() && !IsDoubleOrFloatElementsKind(instr->elements_kind())) || (instr->value()->representation().IsDouble() && IsDoubleOrFloatElementsKind(instr->elements_kind()))); DCHECK(instr->elements()->representation().IsExternal()); LOperand* val = UseRegister(instr->value()); LOperand* key = UseRegisterOrConstantAtStart(instr->key()); LOperand* backing_store = UseRegister(instr->elements()); LOperand* backing_store_owner = UseAny(instr->backing_store_owner()); return new (zone()) LStoreKeyed(backing_store, key, val, backing_store_owner); } LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) { LOperand* context = UseFixed(instr->context(), cp); LOperand* obj = UseFixed(instr->object(), StoreDescriptor::ReceiverRegister()); LOperand* key = UseFixed(instr->key(), StoreDescriptor::NameRegister()); LOperand* val = UseFixed(instr->value(), StoreDescriptor::ValueRegister()); DCHECK(instr->object()->representation().IsTagged()); DCHECK(instr->key()->representation().IsTagged()); DCHECK(instr->value()->representation().IsTagged()); LOperand* slot = NULL; LOperand* vector = NULL; if (instr->HasVectorAndSlot()) { slot = FixedTemp(VectorStoreICDescriptor::SlotRegister()); vector = FixedTemp(VectorStoreICDescriptor::VectorRegister()); } LStoreKeyedGeneric* result = new (zone()) LStoreKeyedGeneric(context, obj, key, val, slot, vector); return MarkAsCall(result, instr); } LInstruction* LChunkBuilder::DoTransitionElementsKind( HTransitionElementsKind* instr) { if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) { LOperand* object = UseRegister(instr->object()); LOperand* new_map_reg = TempRegister(); LTransitionElementsKind* result = new (zone()) LTransitionElementsKind(object, NULL, new_map_reg); return result; } else { LOperand* object = UseFixed(instr->object(), r2); LOperand* context = UseFixed(instr->context(), cp); LTransitionElementsKind* result = new (zone()) LTransitionElementsKind(object, context, NULL); return MarkAsCall(result, instr); } } LInstruction* LChunkBuilder::DoTrapAllocationMemento( HTrapAllocationMemento* instr) { LOperand* object = UseRegister(instr->object()); LOperand* temp1 = TempRegister(); LOperand* temp2 = TempRegister(); LTrapAllocationMemento* result = new (zone()) LTrapAllocationMemento(object, temp1, temp2); return AssignEnvironment(result); } LInstruction* LChunkBuilder::DoMaybeGrowElements(HMaybeGrowElements* instr) { info()->MarkAsDeferredCalling(); LOperand* context = UseFixed(instr->context(), cp); LOperand* object = Use(instr->object()); LOperand* elements = Use(instr->elements()); LOperand* key = UseRegisterOrConstant(instr->key()); LOperand* current_capacity = UseRegisterOrConstant(instr->current_capacity()); LMaybeGrowElements* result = new (zone()) LMaybeGrowElements(context, object, elements, key, current_capacity); DefineFixed(result, r2); return AssignPointerMap(AssignEnvironment(result)); } LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) { bool is_in_object = instr->access().IsInobject(); bool needs_write_barrier = instr->NeedsWriteBarrier(); bool needs_write_barrier_for_map = instr->has_transition() && instr->NeedsWriteBarrierForMap(); LOperand* obj; if (needs_write_barrier) { obj = is_in_object ? UseRegister(instr->object()) : UseTempRegister(instr->object()); } else { obj = needs_write_barrier_for_map ? UseRegister(instr->object()) : UseRegisterAtStart(instr->object()); } LOperand* val; if (needs_write_barrier) { val = UseTempRegister(instr->value()); } else if (instr->field_representation().IsDouble()) { val = UseRegisterAtStart(instr->value()); } else { val = UseRegister(instr->value()); } // We need a temporary register for write barrier of the map field. LOperand* temp = needs_write_barrier_for_map ? TempRegister() : NULL; return new (zone()) LStoreNamedField(obj, val, temp); } LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) { LOperand* context = UseFixed(instr->context(), cp); LOperand* obj = UseFixed(instr->object(), StoreDescriptor::ReceiverRegister()); LOperand* val = UseFixed(instr->value(), StoreDescriptor::ValueRegister()); LOperand* slot = NULL; LOperand* vector = NULL; if (instr->HasVectorAndSlot()) { slot = FixedTemp(VectorStoreICDescriptor::SlotRegister()); vector = FixedTemp(VectorStoreICDescriptor::VectorRegister()); } LStoreNamedGeneric* result = new (zone()) LStoreNamedGeneric(context, obj, val, slot, vector); return MarkAsCall(result, instr); } LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) { LOperand* context = UseFixed(instr->context(), cp); LOperand* left = UseFixed(instr->left(), r3); LOperand* right = UseFixed(instr->right(), r2); return MarkAsCall( DefineFixed(new (zone()) LStringAdd(context, left, right), r2), instr); } LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) { LOperand* string = UseTempRegister(instr->string()); LOperand* index = UseTempRegister(instr->index()); LOperand* context = UseAny(instr->context()); LStringCharCodeAt* result = new (zone()) LStringCharCodeAt(context, string, index); return AssignPointerMap(DefineAsRegister(result)); } LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) { LOperand* char_code = UseRegister(instr->value()); LOperand* context = UseAny(instr->context()); LStringCharFromCode* result = new (zone()) LStringCharFromCode(context, char_code); return AssignPointerMap(DefineAsRegister(result)); } LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) { info()->MarkAsDeferredCalling(); LOperand* context = UseAny(instr->context()); LOperand* size = UseRegisterOrConstant(instr->size()); LOperand* temp1 = TempRegister(); LOperand* temp2 = TempRegister(); LAllocate* result = new (zone()) LAllocate(context, size, temp1, temp2); return AssignPointerMap(DefineAsRegister(result)); } LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) { DCHECK(argument_count_ == 0); allocator_->MarkAsOsrEntry(); current_block_->last_environment()->set_ast_id(instr->ast_id()); return AssignEnvironment(new (zone()) LOsrEntry); } LInstruction* LChunkBuilder::DoParameter(HParameter* instr) { LParameter* result = new (zone()) LParameter; if (instr->kind() == HParameter::STACK_PARAMETER) { int spill_index = chunk()->GetParameterStackSlot(instr->index()); return DefineAsSpilled(result, spill_index); } else { DCHECK(info()->IsStub()); CallInterfaceDescriptor descriptor = graph()->descriptor(); int index = static_cast<int>(instr->index()); Register reg = descriptor.GetRegisterParameter(index); return DefineFixed(result, reg); } } LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) { // Use an index that corresponds to the location in the unoptimized frame, // which the optimized frame will subsume. int env_index = instr->index(); int spill_index = 0; if (instr->environment()->is_parameter_index(env_index)) { spill_index = chunk()->GetParameterStackSlot(env_index); } else { spill_index = env_index - instr->environment()->first_local_index(); if (spill_index > LUnallocated::kMaxFixedSlotIndex) { Retry(kTooManySpillSlotsNeededForOSR); spill_index = 0; } spill_index += StandardFrameConstants::kFixedSlotCount; } return DefineAsSpilled(new (zone()) LUnknownOSRValue, spill_index); } LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) { // There are no real uses of the arguments object. // arguments.length and element access are supported directly on // stack arguments, and any real arguments object use causes a bailout. // So this value is never used. return NULL; } LInstruction* LChunkBuilder::DoCapturedObject(HCapturedObject* instr) { instr->ReplayEnvironment(current_block_->last_environment()); // There are no real uses of a captured object. return NULL; } LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) { info()->MarkAsRequiresFrame(); LOperand* args = UseRegister(instr->arguments()); LOperand* length = UseRegisterOrConstantAtStart(instr->length()); LOperand* index = UseRegisterOrConstantAtStart(instr->index()); return DefineAsRegister(new (zone()) LAccessArgumentsAt(args, length, index)); } LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) { LOperand* context = UseFixed(instr->context(), cp); LOperand* value = UseFixed(instr->value(), r5); LTypeof* result = new (zone()) LTypeof(context, value); return MarkAsCall(DefineFixed(result, r2), instr); } LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) { return new (zone()) LTypeofIsAndBranch(UseRegister(instr->value())); } LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) { instr->ReplayEnvironment(current_block_->last_environment()); return NULL; } LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) { if (instr->is_function_entry()) { LOperand* context = UseFixed(instr->context(), cp); return MarkAsCall(new (zone()) LStackCheck(context), instr); } else { DCHECK(instr->is_backwards_branch()); LOperand* context = UseAny(instr->context()); return AssignEnvironment( AssignPointerMap(new (zone()) LStackCheck(context))); } } LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) { HEnvironment* outer = current_block_->last_environment(); outer->set_ast_id(instr->ReturnId()); HConstant* undefined = graph()->GetConstantUndefined(); HEnvironment* inner = outer->CopyForInlining( instr->closure(), instr->arguments_count(), instr->function(), undefined, instr->inlining_kind(), instr->syntactic_tail_call_mode()); // Only replay binding of arguments object if it wasn't removed from graph. if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) { inner->Bind(instr->arguments_var(), instr->arguments_object()); } inner->BindContext(instr->closure_context()); inner->set_entry(instr); current_block_->UpdateEnvironment(inner); chunk_->AddInlinedFunction(instr->shared()); return NULL; } LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) { LInstruction* pop = NULL; HEnvironment* env = current_block_->last_environment(); if (env->entry()->arguments_pushed()) { int argument_count = env->arguments_environment()->parameter_count(); pop = new (zone()) LDrop(argument_count); DCHECK(instr->argument_delta() == -argument_count); } HEnvironment* outer = current_block_->last_environment()->DiscardInlined(false); current_block_->UpdateEnvironment(outer); return pop; } LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) { LOperand* context = UseFixed(instr->context(), cp); LOperand* object = UseFixed(instr->enumerable(), r2); LForInPrepareMap* result = new (zone()) LForInPrepareMap(context, object); return MarkAsCall(DefineFixed(result, r2), instr, CAN_DEOPTIMIZE_EAGERLY); } LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) { LOperand* map = UseRegister(instr->map()); return AssignEnvironment( DefineAsRegister(new (zone()) LForInCacheArray(map))); } LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) { LOperand* value = UseRegisterAtStart(instr->value()); LOperand* map = UseRegisterAtStart(instr->map()); return AssignEnvironment(new (zone()) LCheckMapValue(value, map)); } LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { LOperand* object = UseRegister(instr->object()); LOperand* index = UseTempRegister(instr->index()); LLoadFieldByIndex* load = new (zone()) LLoadFieldByIndex(object, index); LInstruction* result = DefineSameAsFirst(load); return AssignPointerMap(result); } } // namespace internal } // namespace v8
weolar/miniblink49
v8_5_1/src/crankshaft/s390/lithium-s390.cc
C++
apache-2.0
83,254
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Threading; using Microsoft.VisualStudio.LanguageServices.Implementation.CodeModel; using Microsoft.VisualStudio.LanguageServices.Implementation.ProjectSystem.Extensions; namespace Microsoft.VisualStudio.LanguageServices.Implementation.ProjectSystem.CPS { internal sealed partial class CPSProject { public EnvDTE.CodeModel GetCodeModel(EnvDTE.Project parent) { return _projectCodeModel.GetOrCreateRootCodeModel(parent); } public EnvDTE.FileCodeModel GetFileCodeModel(EnvDTE.ProjectItem item) { if (!item.TryGetFullPath(out var filePath)) { return null; } return _projectCodeModel.GetOrCreateFileCodeModel(filePath, item); } private class CPSCodeModelInstanceFactory : ICodeModelInstanceFactory { private CPSProject _project; public CPSCodeModelInstanceFactory(CPSProject project) { _project = project; } EnvDTE.FileCodeModel ICodeModelInstanceFactory.TryCreateFileCodeModelThroughProjectSystem(string filePath) { var projectItem = GetProjectItem(filePath); if (projectItem == null) { return null; } return _project._projectCodeModel.GetOrCreateFileCodeModel(filePath, projectItem); } private EnvDTE.ProjectItem GetProjectItem(string filePath) { var dteProject = _project._visualStudioWorkspace.TryGetDTEProject(_project._visualStudioProject.Id); if (dteProject == null) { return null; } return dteProject.FindItemByPath(filePath, StringComparer.OrdinalIgnoreCase); } } } }
DustinCampbell/roslyn
src/VisualStudio/Core/Impl/ProjectSystem/CPS/CPSProject_IProjectCodeModelProvider.cs
C#
apache-2.0
2,069
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.codeInspection; import com.intellij.analysis.JvmAnalysisBundle; import com.intellij.psi.CommonClassNames; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiElementVisitor; import com.intellij.psi.PsiIdentifier; import com.siyeh.HardcodedMethodConstants; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import org.jetbrains.uast.UCallExpression; import org.jetbrains.uast.UCallableReferenceExpression; import org.jetbrains.uast.UElement; import org.jetbrains.uast.UastContextKt; public class StringToUpperWithoutLocale2Inspection extends AbstractBaseUastLocalInspectionTool { @Nls @NotNull @Override public String getDisplayName() { //TODO remove once inspection is registered in JvmAnalysisPlugin.xml return "StringToUpperWithoutLocale2Inspection"; } private static final UastCallMatcher MATCHER = UastCallMatcher.anyOf( UastCallMatcher.builder() .withMethodName(HardcodedMethodConstants.TO_UPPER_CASE) .withClassFqn(CommonClassNames.JAVA_LANG_STRING) .withArgumentsCount(0).build(), UastCallMatcher.builder() .withMethodName(HardcodedMethodConstants.TO_LOWER_CASE) .withClassFqn(CommonClassNames.JAVA_LANG_STRING) .withArgumentsCount(0).build() ); @NotNull @Override public PsiElementVisitor buildVisitor(@NotNull ProblemsHolder holder, boolean isOnTheFly) { return new PsiElementVisitor() { @Override public void visitElement(PsiElement element) { UCallExpression callExpression = AnalysisUastUtil.getUCallExpression(element); if (callExpression != null) { handleCallExpression(callExpression, holder); return; } if (!(element instanceof PsiIdentifier)) return; PsiElement parent = element.getParent(); UElement parentUElement = UastContextKt.toUElement(parent); if (parentUElement instanceof UCallableReferenceExpression) { handleCallableReferenceExpression((UCallableReferenceExpression)parentUElement, element, holder); } } }; } private static void handleCallExpression(@NotNull UCallExpression callExpression, @NotNull ProblemsHolder holder) { if (!MATCHER.testCallExpression(callExpression)) return; if (NonNlsUastUtil.isCallExpressionWithNonNlsReceiver(callExpression)) return; PsiElement methodIdentifierPsi = AnalysisUastUtil.getMethodIdentifierSourcePsi(callExpression); if (methodIdentifierPsi == null) return; String methodName = callExpression.getMethodName(); if (methodName == null) return; // shouldn't happen holder.registerProblem(methodIdentifierPsi, getErrorDescription(methodName)); } private static void handleCallableReferenceExpression(@NotNull UCallableReferenceExpression expression, @NotNull PsiElement identifier, @NotNull ProblemsHolder holder) { if (!MATCHER.testCallableReferenceExpression(expression)) return; if (NonNlsUastUtil.isCallableReferenceExpressionWithNonNlsQualifier(expression)) return; holder.registerProblem(identifier, getErrorDescription(expression.getCallableName())); } @NotNull private static String getErrorDescription(@NotNull String methodName) { return JvmAnalysisBundle.message("jvm.inspections.string.touppercase.tolowercase.without.locale.description", methodName); } }
goodwinnk/intellij-community
jvm/jvm-analysis-impl/src/com/intellij/codeInspection/StringToUpperWithoutLocale2Inspection.java
Java
apache-2.0
3,686
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/ui/views/panels/taskbar_window_thumbnailer_win.h" #include <dwmapi.h> #include "base/logging.h" #include "base/win/scoped_hdc.h" #include "skia/ext/image_operations.h" #include "ui/gfx/canvas.h" #include "ui/gfx/gdi_util.h" namespace { HBITMAP GetNativeBitmapFromSkBitmap(const SkBitmap& bitmap) { int width = bitmap.width(); int height = bitmap.height(); BITMAPV4HEADER native_bitmap_header; gfx::CreateBitmapV4Header(width, height, &native_bitmap_header); HDC dc = ::GetDC(NULL); void* bits; HBITMAP native_bitmap = ::CreateDIBSection(dc, reinterpret_cast<BITMAPINFO*>(&native_bitmap_header), DIB_RGB_COLORS, &bits, NULL, 0); DCHECK(native_bitmap); ::ReleaseDC(NULL, dc); bitmap.copyPixelsTo(bits, width * height * 4, width * 4); return native_bitmap; } void EnableCustomThumbnail(HWND hwnd, bool enable) { BOOL enable_value = enable; ::DwmSetWindowAttribute(hwnd, DWMWA_FORCE_ICONIC_REPRESENTATION, &enable_value, sizeof(enable_value)); ::DwmSetWindowAttribute(hwnd, DWMWA_HAS_ICONIC_BITMAP, &enable_value, sizeof(enable_value)); } } // namespace TaskbarWindowThumbnailerWin::TaskbarWindowThumbnailerWin(HWND hwnd) : hwnd_(hwnd) { } TaskbarWindowThumbnailerWin::~TaskbarWindowThumbnailerWin() { } void TaskbarWindowThumbnailerWin::Start( const std::vector<HWND>& snapshot_hwnds) { snapshot_hwnds_ = snapshot_hwnds; if (snapshot_hwnds_.empty()) snapshot_hwnds_.push_back(hwnd_); capture_bitmap_.reset(CaptureWindowImage()); if (capture_bitmap_) EnableCustomThumbnail(hwnd_, true); } void TaskbarWindowThumbnailerWin::Stop() { capture_bitmap_.reset(); EnableCustomThumbnail(hwnd_, false); } bool TaskbarWindowThumbnailerWin::FilterMessage(HWND hwnd, UINT message, WPARAM w_param, LPARAM l_param, LRESULT* l_result) { DCHECK_EQ(hwnd_, hwnd); switch (message) { case WM_DWMSENDICONICTHUMBNAIL: return OnDwmSendIconicThumbnail(HIWORD(l_param), LOWORD(l_param), l_result); case WM_DWMSENDICONICLIVEPREVIEWBITMAP: return OnDwmSendIconicLivePreviewBitmap(l_result); } return false; } bool TaskbarWindowThumbnailerWin::OnDwmSendIconicThumbnail( int width, int height, LRESULT* l_result) { DCHECK(capture_bitmap_.get()); SkBitmap* thumbnail_bitmap = capture_bitmap_.get(); // Scale the image if needed. SkBitmap scaled_bitmap; if (capture_bitmap_->width() != width || capture_bitmap_->height() != height) { double x_scale = static_cast<double>(width) / capture_bitmap_->width(); double y_scale = static_cast<double>(height) / capture_bitmap_->height(); double scale = std::min(x_scale, y_scale); width = capture_bitmap_->width() * scale; height = capture_bitmap_->height() * scale; scaled_bitmap = skia::ImageOperations::Resize( *capture_bitmap_, skia::ImageOperations::RESIZE_GOOD, width, height); thumbnail_bitmap = &scaled_bitmap; } HBITMAP native_bitmap = GetNativeBitmapFromSkBitmap(*thumbnail_bitmap); ::DwmSetIconicThumbnail(hwnd_, native_bitmap, 0); ::DeleteObject(native_bitmap); *l_result = 0; return true; } bool TaskbarWindowThumbnailerWin::OnDwmSendIconicLivePreviewBitmap( LRESULT* l_result) { scoped_ptr<SkBitmap> live_bitmap(CaptureWindowImage()); HBITMAP native_bitmap = GetNativeBitmapFromSkBitmap(*live_bitmap); ::DwmSetIconicLivePreviewBitmap(hwnd_, native_bitmap, NULL, 0); ::DeleteObject(native_bitmap); *l_result = 0; return true; } SkBitmap* TaskbarWindowThumbnailerWin::CaptureWindowImage() const { int enclosing_x = 0; int enclosing_y = 0; int enclosing_right = 0; int enclosing_bottom = 0; for (std::vector<HWND>::const_iterator iter = snapshot_hwnds_.begin(); iter != snapshot_hwnds_.end(); ++iter) { RECT bounds; if (!::GetWindowRect(*iter, &bounds)) continue; if (iter == snapshot_hwnds_.begin()) { enclosing_x = bounds.left; enclosing_y = bounds.top; enclosing_right = bounds.right; enclosing_bottom = bounds.bottom; } else { if (bounds.left < enclosing_x) enclosing_x = bounds.left; if (bounds.top < enclosing_y) enclosing_y = bounds.top; if (bounds.right > enclosing_right) enclosing_right = bounds.right; if (bounds.bottom > enclosing_bottom) enclosing_bottom = bounds.bottom; } } int width = enclosing_right - enclosing_x; int height = enclosing_bottom - enclosing_y; if (!width || !height) return NULL; gfx::Canvas canvas(gfx::Size(width, height), ui::SCALE_FACTOR_100P, false); { skia::ScopedPlatformPaint scoped_platform_paint(canvas.sk_canvas()); HDC target_dc = scoped_platform_paint.GetPlatformSurface(); for (std::vector<HWND>::const_iterator iter = snapshot_hwnds_.begin(); iter != snapshot_hwnds_.end(); ++iter) { HWND current_hwnd = *iter; RECT current_bounds; if (!::GetWindowRect(current_hwnd, &current_bounds)) continue; base::win::ScopedGetDC source_dc(current_hwnd); ::BitBlt(target_dc, current_bounds.left - enclosing_x, current_bounds.top - enclosing_y, current_bounds.right - current_bounds.left, current_bounds.bottom - current_bounds.top, source_dc, 0, 0, SRCCOPY); ::ReleaseDC(current_hwnd, source_dc); } } return new SkBitmap(canvas.ExtractImageRep().sk_bitmap()); }
plxaye/chromium
src/chrome/browser/ui/views/panels/taskbar_window_thumbnailer_win.cc
C++
apache-2.0
6,091
using System.IO; using Moq; using NuGet.Test.Mocks; using Xunit; namespace NuGet.Test { public class XdtTransformTest { [Fact] public void AddPackageWithXdtTransformFile() { // Arrange var mockProjectSystem = new MockProjectSystem(); var mockRepository = new MockPackageRepository(); mockProjectSystem.AddFile("web.config", @"<configuration> <system.web> <compilation debug=""true"" /> </system.web> </configuration> ".AsStream()); var projectManager = new ProjectManager(mockRepository, new DefaultPackagePathResolver(new MockProjectSystem()), mockProjectSystem, new MockPackageRepository()); var package = new Mock<IPackage>(); package.Setup(m => m.Id).Returns("A"); package.Setup(m => m.Version).Returns(new SemanticVersion("1.0")); package.Setup(m => m.Listed).Returns(true); var file = new Mock<IPackageFile>(); file.Setup(m => m.Path).Returns(@"content\web.config.install.xdt"); file.Setup(m => m.EffectivePath).Returns("web.config.install.xdt"); file.Setup(m => m.GetStream()).Returns(() => @"<configuration xmlns:xdt=""http://schemas.microsoft.com/XML-Document-Transform""> <system.web> <compilation xdt:Locator=""Condition('@debug=true')"" debug=""false"" xdt:Transform=""Replace"" /> </system.web> </configuration>".AsStream()); var file2 = new Mock<IPackageFile>(); file2.Setup(m => m.Path).Returns(@"content\web.config.uninstall.xdt"); file2.Setup(m => m.EffectivePath).Returns("web.config.uninstall.xdt"); file2.Setup(m => m.GetStream()).Returns(() => @"<configuration xmlns:xdt=""http://schemas.microsoft.com/XML-Document-Transform""> <system.web> <compilation xdt:Locator=""Match(debug)"" debug=""false"" xdt:Transform=""Remove"" /> </system.web> </configuration>".AsStream()); package.Setup(m => m.GetFiles()).Returns(new[] { file.Object, file2.Object }); mockRepository.AddPackage(package.Object); // Act projectManager.AddPackageReference("A"); // Assert Assert.False(mockProjectSystem.FileExists("web.config.install.xdt")); Assert.False(mockProjectSystem.FileExists("web.config.uninstall.xdt")); Assert.True(mockProjectSystem.FileExists("web.config")); Assert.Equal( @"<configuration> <system.web> <compilation debug=""false""/> </system.web> </configuration> ", mockProjectSystem.OpenFile("web.config").ReadToEnd()); } [Fact] public void ReThrowWithMeaningfulErrorMessageWhenXdtFileHasSyntaxError() { // Arrange var mockProjectSystem = new MockProjectSystem(); var mockRepository = new MockPackageRepository(); mockProjectSystem.AddFile("web.config", @"<configuration> <system.web> <compilation debug=""true"" /> </system.web> </configuration> ".AsStream()); var projectManager = new ProjectManager(mockRepository, new DefaultPackagePathResolver(new MockProjectSystem()), mockProjectSystem, new MockPackageRepository()); var package = new Mock<IPackage>(); package.Setup(m => m.Id).Returns("A"); package.Setup(m => m.Version).Returns(new SemanticVersion("1.0")); package.Setup(m => m.Listed).Returns(true); var file = new Mock<IPackageFile>(); file.Setup(m => m.Path).Returns(@"content\web.config.install.xdt"); file.Setup(m => m.EffectivePath).Returns("web.config.install.xdt"); file.Setup(m => m.GetStream()).Returns(() => @"<configuration xmlns:xdt=""http://schemas.microsoft.com/XML-Document-Transform""> <system.web> <compilation xd:Locator=""Condition('@debug=true')"" debug=""false"" xdt:Transform=""Replace"" /> </system.web> </configuration>".AsStream()); var file2 = new Mock<IPackageFile>(); file2.Setup(m => m.Path).Returns(@"content\web.config.uninstall.xdt"); file2.Setup(m => m.EffectivePath).Returns("web.config.uninstall.xdt"); file2.Setup(m => m.GetStream()).Returns(() => @"<configuration xmlns:xdt=""http://schemas.microsoft.com/XML-Document-Transform""> <system.web> <compilation xdt:Locator=""Match(debug)"" debug=""false"" xdt:Transform=""Remove"" /> </system.web> </configuration>".AsStream()); package.Setup(m => m.GetFiles()).Returns(new[] { file.Object, file2.Object }); mockRepository.AddPackage(package.Object); // Act ExceptionAssert.Throws<InvalidDataException>( () => projectManager.AddPackageReference("A"), @"An error occurred while applying transformation to 'web.config' in project 'x:\MockFileSystem': 'xd' is an undeclared prefix. Line 3, position 22."); // Assert Assert.False(mockProjectSystem.FileExists("web.config.install.xdt")); Assert.False(mockProjectSystem.FileExists("web.config.uninstall.xdt")); Assert.True(mockProjectSystem.FileExists("web.config")); Assert.Equal( @"<configuration> <system.web> <compilation debug=""true"" /> </system.web> </configuration> ", mockProjectSystem.OpenFile("web.config").ReadToEnd()); } [Fact] public void RemovePackageWithXdtTransformFile() { // Arrange var mockProjectSystem = new MockProjectSystem(); var mockRepository = new MockPackageRepository(); mockProjectSystem.AddFile("web.config", @"<configuration> <system.web><compilation debug=""false"" /></system.web> </configuration> ".AsStream()); var projectManager = new ProjectManager(mockRepository, new DefaultPackagePathResolver(new MockProjectSystem()), mockProjectSystem, new MockPackageRepository()); var package = new Mock<IPackage>(); package.Setup(m => m.Id).Returns("A"); package.Setup(m => m.Version).Returns(new SemanticVersion("1.0")); package.Setup(m => m.Listed).Returns(true); var file = new Mock<IPackageFile>(); file.Setup(m => m.Path).Returns(@"content\web.config.uninstall.xdt"); file.Setup(m => m.EffectivePath).Returns("web.config.uninstall.xdt"); file.Setup(m => m.GetStream()).Returns(() => @"<configuration xmlns:xdt=""http://schemas.microsoft.com/XML-Document-Transform""> <system.web> <compilation xdt:Locator=""Match(debug)"" debug=""false"" xdt:Transform=""Remove"" /> </system.web> </configuration>".AsStream()); package.Setup(m => m.GetFiles()).Returns(new[] { file.Object }); mockRepository.AddPackage(package.Object); // Act 1 projectManager.AddPackageReference("A"); // Assert 1 Assert.False(mockProjectSystem.FileExists("web.config.uninstall.xdt")); Assert.Equal( @"<configuration> <system.web><compilation debug=""false"" /></system.web> </configuration> ", mockProjectSystem.OpenFile("web.config").ReadToEnd()); // Act 2 projectManager.RemovePackageReference("A"); // Assert 2 Assert.False(mockProjectSystem.FileExists("web.config.uninstall.xdt")); Assert.True(mockProjectSystem.FileExists("web.config")); Assert.Equal( @"<configuration> <system.web></system.web> </configuration> ", mockProjectSystem.OpenFile("web.config").ReadToEnd()); } } }
dolkensp/node.net
test/Core.Test/XdtTransformTest.cs
C#
apache-2.0
7,709
// Copyright 2009 the Sputnik authors. All rights reserved. // This code is governed by the BSD license found in the LICENSE file. /** * The length property of the search method is 1 * * @path ch15/15.5/15.5.4/15.5.4.12/S15.5.4.12_A11.js * @description Checking String.prototype.search.length */ ////////////////////////////////////////////////////////////////////////////// //CHECK#1 if (!(String.prototype.search.hasOwnProperty("length"))) { $ERROR('#1: String.prototype.search.hasOwnProperty("length") return true. Actual: '+String.prototype.search.hasOwnProperty("length")); } // ////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////// //CHECK#2 if (String.prototype.search.length !== 1) { $ERROR('#2: String.prototype.search.length === 1. Actual: '+String.prototype.search.length ); } // //////////////////////////////////////////////////////////////////////////////
hippich/typescript
tests/Fidelity/test262/suite/ch15/15.5/15.5.4/15.5.4.12/S15.5.4.12_A11.js
JavaScript
apache-2.0
988
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.interpreter; /** * Sink to which to send rows. * * <p>Corresponds to an output of a relational expression. */ public interface Sink { void send(Row row) throws InterruptedException; void end() throws InterruptedException; } // End Sink.java
glimpseio/incubator-calcite
core/src/main/java/org/apache/calcite/interpreter/Sink.java
Java
apache-2.0
1,077
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.util; import java.nio.ByteBuffer; import java.util.zip.Checksum; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.ChecksumException; /** * Wrapper around JNI support code to do checksum computation * natively. */ public class NativeCrc32 implements Checksum { static { NativeCodeLoader.isNativeCodeLoaded(); } /** the current CRC value, bit-flipped */ private int crc; private final PureJavaCrc32 pureJavaCrc32 = new PureJavaCrc32(); private final PureJavaCrc32C pureJavaCrc32C = new PureJavaCrc32C(); private int checksumType = CHECKSUM_CRC32; private boolean isAvailable = true; // Local benchmarks show that for >= 128 bytes, NativeCrc32 performs // better than PureJavaCrc32. private static final int SMALL_CHECKSUM = 128; private static final Log LOG = LogFactory.getLog(NativeCrc32.class); public NativeCrc32(int checksumType) { this(); if (checksumType != CHECKSUM_CRC32 && checksumType != CHECKSUM_CRC32C) { throw new IllegalArgumentException("Invalid checksum type"); } this.checksumType = checksumType; } public NativeCrc32() { isAvailable = isAvailable(); reset(); } /** {@inheritDoc} */ public long getValue() { return (~crc) & 0xffffffffL; } public void setValue(int crc) { this.crc = ~crc; } /** {@inheritDoc} */ public void reset() { crc = 0xffffffff; } /** * Return true if the JNI-based native CRC extensions are available. */ public static boolean isAvailable() { return NativeCodeLoader.isNativeCodeLoaded(); } /** * Verify the given buffers of data and checksums, and throw an exception * if any checksum is invalid. The buffers given to this function should * have their position initially at the start of the data, and their limit * set at the end of the data. The position, limit, and mark are not * modified. * * @param bytesPerSum the chunk size (eg 512 bytes) * @param checksumType the DataChecksum type constant * @param sums the DirectByteBuffer pointing at the beginning of the * stored checksums * @param data the DirectByteBuffer pointing at the beginning of the * data to check * @param basePos the position in the file where the data buffer starts * @param fileName the name of the file being verified * @throws ChecksumException if there is an invalid checksum */ public static void verifyChunkedSums(int bytesPerSum, int checksumType, ByteBuffer sums, ByteBuffer data, String fileName, long basePos) throws ChecksumException { nativeVerifyChunkedSums(bytesPerSum, checksumType, sums, sums.position(), data, data.position(), data.remaining(), fileName, basePos); } public void update(int b) { byte[] buf = new byte[1]; buf[0] = (byte)b; update(buf, 0, buf.length); } private void updatePureJava(byte[] buf, int offset, int len) { if (checksumType == CHECKSUM_CRC32) { pureJavaCrc32.setValueInternal(crc); pureJavaCrc32.update(buf, offset, len); crc = pureJavaCrc32.getCrcValue(); } else { pureJavaCrc32C.setValueInternal(crc); pureJavaCrc32C.update(buf, offset, len); crc = pureJavaCrc32C.getCrcValue(); } } public void update(byte[] buf, int offset, int len) { // To avoid JNI overhead, use native methods only for large checksum chunks. if (isAvailable && len >= SMALL_CHECKSUM) { try { crc = update(crc, buf, offset, len, checksumType); } catch (UnsatisfiedLinkError ule) { isAvailable = false; LOG.warn("Could not find native crc32 libraries," + " falling back to pure java", ule); updatePureJava(buf, offset, len); } } else { updatePureJava(buf, offset, len); } } public native int update(int crc, byte[] buf, int offset, int len, int checksumType); private static native void nativeVerifyChunkedSums( int bytesPerSum, int checksumType, ByteBuffer sums, int sumsOffset, ByteBuffer data, int dataOffset, int dataLength, String fileName, long basePos); // Copy the constants over from DataChecksum so that javah will pick them up // and make them available in the native code header. public static final int CHECKSUM_CRC32 = DataChecksum.CHECKSUM_CRC32; public static final int CHECKSUM_CRC32C = DataChecksum.CHECKSUM_CRC32C; }
shakamunyi/hadoop-20
src/core/org/apache/hadoop/util/NativeCrc32.java
Java
apache-2.0
5,310
/* * Copyright 2017 WSO2, Inc. (http://wso2.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.editor.log.appender; import org.apache.logging.log4j.core.Appender; import org.apache.logging.log4j.core.Core; import org.apache.logging.log4j.core.Filter; import org.apache.logging.log4j.core.Layout; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.core.appender.AbstractAppender; import org.apache.logging.log4j.core.config.plugins.Plugin; import org.apache.logging.log4j.core.config.plugins.PluginAttribute; import org.apache.logging.log4j.core.config.plugins.PluginElement; import org.apache.logging.log4j.core.config.plugins.PluginFactory; import org.apache.logging.log4j.core.layout.PatternLayout; import org.apache.logging.log4j.core.util.Booleans; import org.owasp.encoder.Encode; import org.wso2.carbon.editor.log.appender.internal.CircularBuffer; import org.wso2.carbon.editor.log.appender.internal.ConsoleLogEvent; import java.io.PrintWriter; import java.io.Serializable; import java.io.StringWriter; import java.text.SimpleDateFormat; /** * This appender will be used to capture the logs and later send to clients, if requested via the * logging web service. * This maintains a circular buffer, of some fixed amount {@value #BUFFER_SIZE}. */ @Plugin(name = "EditorConsole", category = Core.CATEGORY_NAME, elementType = Appender.ELEMENT_TYPE, printObject = true) public final class EditorConsoleAppender extends AbstractAppender { /** * Fixed size of the circular buffer {@value #BUFFER_SIZE} */ private static final int BUFFER_SIZE = 10; /** * Date Formatter to decode timestamp */ private final SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss_SSS"); /** * CircularBuffer to hold the log events */ private CircularBuffer<ConsoleLogEvent> circularBuffer; /** * Creates an instance of EditorConsoleAppender. * * @param name appender name * @param filter null if not specified * @param layout pattern of log messages * @param ignoreExceptions default is true * <p> * Called by {@link #createAppender(String, Filter, Layout, String, String)} */ private EditorConsoleAppender(final String name, final Filter filter, final Layout<? extends Serializable> layout, final boolean ignoreExceptions) { super(name, filter, layout, ignoreExceptions); activateOptions(); } /** * Taken from the previous EditorConsoleAppender */ public void activateOptions() { this.circularBuffer = DataHolder.getBuffer(BUFFER_SIZE); } /** * Creates a EditorConsoleAppender instance with * attributes configured in log4j2.properties. * * @param name appender name * @param filter null if not specified * @param layout pattern of log messages * @param ignore default is true * @return intance of EditorConsoleAppender */ @PluginFactory public static EditorConsoleAppender createAppender(@PluginAttribute("name") final String name, @PluginElement("Filters") final Filter filter, @PluginElement("Layout") Layout<? extends Serializable> layout, @PluginAttribute("ignoreExceptions") final String ignore, @PluginAttribute("buffSize") final String buffSize) { if (name == null) { LOGGER.error("No name provided for EditorConsoleAppender"); return null; } else { if (layout == null) { layout = PatternLayout.createDefaultLayout(); } final boolean ignoreExceptions = Booleans.parseBoolean(ignore, true); return new EditorConsoleAppender(name, filter, layout, ignoreExceptions); } } /** * This is the overridden method from the Appender interface. {@link Appender} * This allows to write log events to preferred destination. * <p> * Converts the default log events to tenant aware log events and writes to a CircularBuffer * * @param logEvent the LogEvent object */ @Override public void append(LogEvent logEvent) { if (circularBuffer != null) { circularBuffer.append(populateConsoleLogEvent(logEvent)); } } private ConsoleLogEvent populateConsoleLogEvent(LogEvent logEvent) { ConsoleLogEvent consoleLogEvent = new ConsoleLogEvent(); consoleLogEvent.setFqcn(logEvent.getLoggerName()); consoleLogEvent.setLevel(logEvent.getLevel().name()); consoleLogEvent.setMessage(getEncodedString(logEvent.getMessage().getFormattedMessage())); String dateString = dateFormatter.format(logEvent.getTimeMillis()); consoleLogEvent.setTimeStamp(dateString); if (logEvent.getThrown() != null) { consoleLogEvent.setStacktrace(getStacktrace(logEvent.getThrown())); } return consoleLogEvent; } private String getStacktrace(Throwable e) { StringWriter stringWriter = new StringWriter(); e.printStackTrace(new PrintWriter(stringWriter)); return stringWriter.toString().trim(); } private static String getEncodedString(String str) { String cleanedString = Encode.forHtml(str); if (!cleanedString.equals(str)) { cleanedString += " (Encoded)"; } return cleanedString; } }
minudika/carbon-analytics
components/org.wso2.carbon.editor.log.appender/src/main/java/org/wso2/carbon/editor/log/appender/EditorConsoleAppender.java
Java
apache-2.0
6,241
/* * Copyright 2014-2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.metrics.core.service.transformers; import static org.hawkular.metrics.core.service.transformers.NumericDataPointCollector.createPercentile; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.apache.commons.math3.stat.descriptive.moment.Mean; import org.apache.commons.math3.stat.descriptive.rank.Max; import org.apache.commons.math3.stat.descriptive.rank.Min; import org.apache.commons.math3.stat.descriptive.summary.Sum; import org.hawkular.metrics.core.service.PercentileWrapper; import org.hawkular.metrics.model.DataPoint; import org.hawkular.metrics.model.Percentile; import org.hawkular.metrics.model.TaggedBucketPoint; /** * @author jsanda */ public class TaggedDataPointCollector { // These are the tags that define this bucket. private Map<String, String> tags; private int samples = 0; private Min min = new Min(); private Mean average = new Mean(); private Max max = new Max(); private Sum sum = new Sum(); private List<PercentileWrapper> percentiles; private List<Percentile> percentileList; public TaggedDataPointCollector(Map<String, String> tags, List<Percentile> percentilesList) { this.tags = tags; this.percentiles = new ArrayList<>(percentilesList.size() + 1); this.percentileList = percentilesList; percentilesList.stream().forEach(d -> percentiles.add(createPercentile.apply(d.getQuantile()))); percentiles.add(createPercentile.apply(50.0)); // Important to be the last one } public void increment(DataPoint<? extends Number> dataPoint) { Number value = dataPoint.getValue(); min.increment(value.doubleValue()); average.increment(value.doubleValue()); max.increment(value.doubleValue()); sum.increment(value.doubleValue()); samples++; percentiles.stream().forEach(p -> p.addValue(value.doubleValue())); } public TaggedBucketPoint toBucketPoint() { List<Percentile> percentileReturns = new ArrayList<>(percentileList.size()); if(percentileList.size() > 0) { for(int i = 0; i < percentileList.size(); i++) { Percentile p = percentileList.get(i); PercentileWrapper pw = percentiles.get(i); percentileReturns.add(new Percentile(p.getOriginalQuantile(), pw.getResult())); } } return new TaggedBucketPoint(tags, min.getResult(), average.getResult(), this.percentiles.get(this.percentiles.size() - 1).getResult(), max.getResult(), sum.getResult(), samples, percentileReturns); } }
tsegismont/hawkular-metrics
core/metrics-core-service/src/main/java/org/hawkular/metrics/core/service/transformers/TaggedDataPointCollector.java
Java
apache-2.0
3,340
// Copyright © 2011-12 Qtrac Ltd. // // This program or package and any associated files are licensed under the // Apache License, Version 2.0 (the "License"); you may not use these files // except in compliance with the License. You can get a copy of the License // at: http://www.apache.org/licenses/LICENSE-2.0. // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package safemap_test import ( "fmt" "safemap" "sync" "testing" ) func TestSafeMap(t *testing.T) { store := safemap.New() fmt.Printf("Initially has %d items\n", store.Len()) deleted := []int{0, 2, 3, 5, 7, 20, 399, 25, 30, 1000, 91, 97, 98, 99} var waiter sync.WaitGroup waiter.Add(1) go func() { // Concurrent Inserter for i := 0; i < 100; i++ { store.Insert(fmt.Sprintf("0x%04X", i), i) if i > 0 && i%15 == 0 { fmt.Printf("Inserted %d items\n", store.Len()) } } fmt.Printf("Inserted %d items\n", store.Len()) waiter.Done() }() waiter.Add(1) go func() { // Concurrent Deleter for _, i := range deleted { key := fmt.Sprintf("0x%04X", i) before := store.Len() store.Delete(key) fmt.Printf("Deleted m[%s] (%d) before=%d after=%d\n", key, i, before, store.Len()) } waiter.Done() }() waiter.Add(1) go func() { // Concurrent Finder for _, i := range deleted { for _, j := range []int{i, i + 1} { key := fmt.Sprintf("0x%04X", j) value, found := store.Find(key) if found { fmt.Printf("Found m[%s] == %d\n", key, value) } else { fmt.Printf("Not found m[%s] (%d)\n", key, j) } } } waiter.Done() }() waiter.Wait() updater := func(value interface{}, found bool) interface{} { if found { return value.(int) * 1000 } return 1 } for _, i := range []int{5, 10, 15, 20, 25, 30, 35} { key := fmt.Sprintf("0x%04X", i) if value, found := store.Find(key); found { fmt.Printf("Original m[%s] == %d\t", key, value) store.Update(key, updater) if value, found := store.Find(key); found { fmt.Printf("Updated m[%s] == %5d\n", key, value) } } } fmt.Printf("Finished with %d items\n", store.Len()) // not needed here but useful if you want to free up the goroutine data := store.Close() fmt.Println("Closed") fmt.Printf("len == %d\n", len(data)) //for k, v := range data { fmt.Printf("%s = %v\n", k, v) } }
kare/gobook
src/safemap/safemap_test.go
GO
apache-2.0
3,008
/* * ConnectBot: simple, powerful, open-source SSH client for Android * Copyright 2007 Kenny Root, Jeffrey Sharkey * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.connectbot.util; import android.content.Context; import android.preference.DialogPreference; import android.util.AttributeSet; import android.view.View; import android.widget.SeekBar; import android.widget.SeekBar.OnSeekBarChangeListener; import org.connectbot.R; /** * @author kenny * */ public class VolumePreference extends DialogPreference implements OnSeekBarChangeListener { /** * @param context * @param attrs */ public VolumePreference(Context context, AttributeSet attrs) { super(context, attrs); setupLayout(context, attrs); } public VolumePreference(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); setupLayout(context, attrs); } private void setupLayout(Context context, AttributeSet attrs) { setDialogLayoutResource(R.layout.volume_preference_dialog_layout); setPersistent(true); } @Override protected void onBindDialogView(View view) { super.onBindDialogView(view); SeekBar volumeBar = (SeekBar) view.findViewById(R.id.volume_bar); volumeBar.setProgress((int) (getPersistedFloat( PreferenceConstants.DEFAULT_BELL_VOLUME) * 100)); volumeBar.setOnSeekBarChangeListener(this); } public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { persistFloat(progress / 100f); } public void onStartTrackingTouch(SeekBar seekBar) { } public void onStopTrackingTouch(SeekBar seekBar) { } }
Potass/ConnectBot
app/src/main/java/org/connectbot/util/VolumePreference.java
Java
apache-2.0
2,105
#!/usr/bin/env ruby # Encoding: utf-8 # # Author:: api.dklimkin@gmail.com (Danial Klimkin) # # Copyright:: Copyright 2012, Google Inc. All Rights Reserved. # # License:: Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # # This example removes the user from all its teams. To determine which users # exist, run get_all_users.rb. # # Tags: UserTeamAssociationService.getUserTeamAssociationsByStatement # Tags: UserTeamAssociationService.performUserTeamAssociationAction require 'dfp_api' require 'dfp_api_statement' API_VERSION = :v201411 def delete_user_team_associations() # Get DfpApi instance and load configuration from ~/dfp_api.yml. dfp = DfpApi::Api.new # To enable logging of SOAP requests, set the log_level value to 'DEBUG' in # the configuration file or provide your own logger: # dfp.logger = Logger.new('dfp_xml.log') # Get the UserTeamAssociationService. uta_service = dfp.service(:UserTeamAssociationService, API_VERSION) # Set the user to remove from its teams. user_id = 'INSERT_USER_ID_HERE'.to_i # Create filter text to remove association by user ID. statement = DfpApiStatement::FilterStatement.new( 'WHERE userId = :user_id', [ {:key => 'user_id', :value => {:value => user_id, :xsi_type => 'NumberValue'}} ] ) begin # Get user team associations by statement. page = uta_service.get_user_team_associations_by_statement( statement.toStatement()) if page[:results] page[:results].each do |association| puts ("User team association of user ID %d with team ID %d will be " + "deleted.") % [association[:user_id], association[:team_id]] end end statement.offset += DfpApiStatement::SUGGESTED_PAGE_LIMIT end while statement.offset < page[:total_result_set_size] # Reset offset back to 0 to perform action. statement.toStatementForAction() # Perform the action. result = uta_service.perform_user_team_association_action( {:xsi_type => 'DeleteUserTeamAssociations'}, statement.toStatement()) # Display results. if result and result[:num_changes] > 0 puts "Number of user team associations deleted: %d" % result[:num_changes] else puts 'No user team associations were deleted.' end end if __FILE__ == $0 begin delete_user_team_associations() # HTTP errors. rescue AdsCommon::Errors::HttpError => e puts "HTTP Error: %s" % e # API errors. rescue DfpApi::Errors::ApiException => e puts "Message: %s" % e.message puts 'Errors:' e.errors.each_with_index do |error, index| puts "\tError [%d]:" % (index + 1) error.each do |field, value| puts "\t\t%s: %s" % [field, value] end end end end
claimsmall/google-api-ads-ruby
dfp_api/examples/v201411/user_team_association_service/delete_user_team_associations.rb
Ruby
apache-2.0
3,310
// Generated by xsd compiler for android/java // DO NOT CHANGE! package com.amazon.webservices.awsecommerceservice._2011_08_01; import java.io.Serializable; import com.leansoft.nano.annotation.*; @RootElement(name = "EditorialReview", namespace = "http://webservices.amazon.com/AWSECommerceService/2011-08-01") public class EditorialReview implements Serializable { private static final long serialVersionUID = -1L; @Element(name = "Source") @Order(value=0) public String source; @Element(name = "Content") @Order(value=1) public String content; @Element(name = "IsLinkSuppressed") @Order(value=2) public Boolean isLinkSuppressed; }
uaraven/nano
sample/webservice/HelloAmazonProductAdvertising/src/com/amazon/webservices/awsecommerceservice/_2011_08_01/EditorialReview.java
Java
apache-2.0
663
#!/usr/bin/env ruby # Encoding: utf-8 # # Author:: api.dklimkin@gmail.com (Danial Klimkin) # # Copyright:: Copyright 2011, Google Inc. All Rights Reserved. # # License:: Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # # This example gets first 500 children below the effective root ad unit. To # create ad units, run create_ad_units.rb. # # Tags: InventoryService.getAdUnitsByStatement, NetworkService.getCurrentNetwork require 'dfp_api' API_VERSION = :v201408 def get_ad_units_by_statement() # Get DfpApi instance and load configuration from ~/dfp_api.yml. dfp = DfpApi::Api.new # To enable logging of SOAP requests, set the log_level value to 'DEBUG' in # the configuration file or provide your own logger: # dfp.logger = Logger.new('dfp_xml.log') # Get the InventoryService. inventory_service = dfp.service(:InventoryService, API_VERSION) # Get the NetworkService. network_service = dfp.service(:NetworkService, API_VERSION) # Get the effective root ad unit ID of the network. effective_root_ad_unit_id = network_service.get_current_network[:effective_root_ad_unit_id] puts "Using effective root ad unit: %d" % effective_root_ad_unit_id # Create a statement to select the children of the effective root ad unit. statement = { :query => 'WHERE parentId = :id ORDER BY id ASC LIMIT 500', :values => [ {:key => 'id', :value => {:value => effective_root_ad_unit_id, :xsi_type => 'NumberValue'}} ] } # Get ad units by statement. page = inventory_service.get_ad_units_by_statement(statement) if page[:results] # Print details about each ad unit in results. page[:results].each_with_index do |ad_unit, index| puts "%d) Ad unit ID: %d, name: %s, status: %s." % [index, ad_unit[:id], ad_unit[:name], ad_unit[:status]] end end # Print a footer. if page.include?(:total_result_set_size) puts "Total number of ad units: %d" % page[:total_result_set_size] end end if __FILE__ == $0 begin get_ad_units_by_statement() # HTTP errors. rescue AdsCommon::Errors::HttpError => e puts "HTTP Error: %s" % e # API errors. rescue DfpApi::Errors::ApiException => e puts "Message: %s" % e.message puts 'Errors:' e.errors.each_with_index do |error, index| puts "\tError [%d]:" % (index + 1) error.each do |field, value| puts "\t\t%s: %s" % [field, value] end end end end
claimsmall/google-api-ads-ruby
dfp_api/examples/v201408/inventory_service/get_ad_units_by_statement.rb
Ruby
apache-2.0
3,045
// This file was procedurally generated from the following sources: // - src/dstr-binding-for-await/ary-ptrn-rest-id-iter-step-err.case // - src/dstr-binding-for-await/error/for-await-of-async-gen-var.template /*--- description: Error forwarding when IteratorStep returns an abrupt completion (for-await-of statement) esid: sec-for-in-and-for-of-statements-runtime-semantics-labelledevaluation features: [generators, destructuring-binding, async-iteration] flags: [generated, async] info: | IterationStatement : for await ( ForDeclaration of AssignmentExpression ) Statement [...] 2. Return ? ForIn/OfBodyEvaluation(ForDeclaration, Statement, keyResult, lexicalBinding, labelSet, async). 13.7.5.13 Runtime Semantics: ForIn/OfBodyEvaluation [...] 4. Let destructuring be IsDestructuring of lhs. [...] 6. Repeat [...] j. If destructuring is false, then [...] k. Else i. If lhsKind is assignment, then [...] ii. Else if lhsKind is varBinding, then [...] iii. Else, 1. Assert: lhsKind is lexicalBinding. 2. Assert: lhs is a ForDeclaration. 3. Let status be the result of performing BindingInitialization for lhs passing nextValue and iterationEnv as arguments. [...] 13.3.3.6 Runtime Semantics: IteratorBindingInitialization BindingRestElement : ... BindingIdentifier 1. Let lhs be ResolveBinding(StringValue of BindingIdentifier, environment). 2. ReturnIfAbrupt(lhs). 3. Let A be ArrayCreate(0). 4. Let n=0. 5. Repeat, a. If iteratorRecord.[[done]] is false, i. Let next be IteratorStep(iteratorRecord.[[iterator]]). ii. If next is an abrupt completion, set iteratorRecord.[[done]] to true. iii. ReturnIfAbrupt(next). ---*/ var first = 0; var second = 0; var iter = function*() { first += 1; throw new Test262Error(); second += 1; }(); async function * gen() { for await (var [...x] of [iter]) { return; } } gen().next() .then(_ => { throw new Test262Error("Expected async function to reject, but resolved."); }, ({ constructor }) => { assert.sameValue(constructor, Test262Error); }) .then($DONE, $DONE);
sebastienros/jint
Jint.Tests.Test262/test/language/statements/for-await-of/async-gen-dstr-var-ary-ptrn-rest-id-iter-step-err.js
JavaScript
bsd-2-clause
2,326
// This file was procedurally generated from the following sources: // - src/dstr-binding-for-await/obj-ptrn-id-init-fn-name-cover.case // - src/dstr-binding-for-await/default/for-await-of-async-gen-const.template /*--- description: SingleNameBinding assigns `name` to "anonymous" functions "through" cover grammar (for-await-of statement) esid: sec-for-in-and-for-of-statements-runtime-semantics-labelledevaluation features: [destructuring-binding, async-iteration] flags: [generated, async] info: | IterationStatement : for await ( ForDeclaration of AssignmentExpression ) Statement [...] 2. Return ? ForIn/OfBodyEvaluation(ForDeclaration, Statement, keyResult, lexicalBinding, labelSet, async). 13.7.5.13 Runtime Semantics: ForIn/OfBodyEvaluation [...] 4. Let destructuring be IsDestructuring of lhs. [...] 6. Repeat [...] j. If destructuring is false, then [...] k. Else i. If lhsKind is assignment, then [...] ii. Else if lhsKind is varBinding, then [...] iii. Else, 1. Assert: lhsKind is lexicalBinding. 2. Assert: lhs is a ForDeclaration. 3. Let status be the result of performing BindingInitialization for lhs passing nextValue and iterationEnv as arguments. [...] 13.3.3.7 Runtime Semantics: KeyedBindingInitialization SingleNameBinding : BindingIdentifier Initializer_opt [...] 6. If Initializer is present and v is undefined, then [...] d. If IsAnonymousFunctionDefinition(Initializer) is true, then i. Let hasNameProperty be HasOwnProperty(v, "name"). ii. ReturnIfAbrupt(hasNameProperty). iii. If hasNameProperty is false, perform SetFunctionName(v, bindingId). ---*/ var iterCount = 0; async function *fn() { for await (const { cover = (function () {}), xCover = (0, function() {}) } of [{}]) { assert.sameValue(cover.name, 'cover'); assert.notSameValue(xCover.name, 'xCover'); iterCount += 1; } } fn().next() .then(() => assert.sameValue(iterCount, 1, 'iteration occurred as expected'), $DONE) .then($DONE, $DONE);
sebastienros/jint
Jint.Tests.Test262/test/language/statements/for-await-of/async-gen-dstr-const-obj-ptrn-id-init-fn-name-cover.js
JavaScript
bsd-2-clause
2,244
class Historyhound < Cask url 'http://www.stclairsoft.com/download/HistoryHound-1.9.9.dmg' homepage 'http://www.stclairsoft.com/HistoryHound/' version '1.9.9' sha256 '3b0873d2425115680c347d5e4be607e9232781e1711aec58ef76e07a07847ad3' link 'HistoryHound.app' end
okonomi/homebrew-cask
Casks/historyhound.rb
Ruby
bsd-2-clause
271
<?php /** * Used for sending editable mails, subject, from etc are stored in model */ class Kwc_Mail_Component extends Kwc_Mail_Abstract_Component { public static function getSettings() { $ret = parent::getSettings(); $ret['generators']['content'] = array( 'class' => 'Kwf_Component_Generator_Static', 'component' => 'Kwc_Paragraphs_Component' ); $sender = Kwf_Mail::getSenderFromConfig(); $ret['default'] = array( 'from_email' => $sender['address'], 'from_name' => $sender['name'] ); $ret['editFrom'] = true; $ret['editReplyTo'] = true; $ret['assetsAdmin']['files'][] = 'kwf/Kwc/Mail/PreviewWindow.js'; $ret['assetsAdmin']['dep'][] = 'ExtWindow'; $ret['ownModel'] = 'Kwc_Mail_Model'; $ret['componentName'] = 'Mail'; return $ret; } public function getTemplateVars() { $ret = parent::getTemplateVars(); $c = $this->getData()->getChildComponent('-content'); if ($c) { $ret['content'] = $c; } return $ret; } public function getHtmlStyles() { $ret = parent::getHtmlStyles(); // Hack für Tests, weil da der statische getStylesArray-Aufruf nicht funktioniert $contentComponent = $this->getData()->getChildComponent('-content'); if ($contentComponent && is_instance_of($contentComponent->componentClass, 'Kwc_Paragraphs_Component') ) { foreach (Kwc_Basic_Text_StylesModel::getStylesArray() as $tag => $classes) { foreach ($classes as $class => $style) { $ret[] = array( 'tag' => $tag, 'class' => $class, 'styles' => $style['styles'] ); } } foreach (Kwf_Model_Abstract::getInstance('Kwc_Basic_Text_StylesModel')->getMasterStyles() as $style) { $styles = array(); if (preg_match_all('/([a-z-]+): +([^;]+);/', $style['styles'], $m)) { foreach (array_keys($m[0]) as $i) { $styles[$m[1][$i]] = $m[2][$i]; } } $ret[] = array( 'tag' => $style['tagName'], 'class' => $style['className'], 'styles' => $styles ); } } return $ret; } public function createMail(Kwc_Mail_Recipient_Interface $recipient, $data = null, $toAddress = null, $format = null, $addViewTracker = true) { $mail = parent::createMail($recipient, $data, $toAddress, $format, $addViewTracker); if ($this->getRow()->from_email && $this->_getSetting('editFrom')) { $mail->setFrom($this->getRow()->from_email, $this->getRow()->from_name); } if ($this->getRow()->reply_email && $this->_getSetting('editReplyTo')) { $mail->setReplyTo($this->getRow()->reply_email); } return $mail; } protected function _getSubject() { return $this->getRow()->subject; } }
yacon/koala-framework
Kwc/Mail/Component.php
PHP
bsd-2-clause
3,218
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "content/renderer/dom_automation_controller.h" #include "base/bind.h" #include "base/bind_helpers.h" #include "base/json/json_string_value_serializer.h" #include "base/string_util.h" #include "content/common/view_messages.h" DomAutomationController::DomAutomationController() : sender_(NULL), routing_id_(MSG_ROUTING_NONE), automation_id_(MSG_ROUTING_NONE) { BindCallback("send", base::Bind(&DomAutomationController::Send, base::Unretained(this))); BindCallback("setAutomationId", base::Bind(&DomAutomationController::SetAutomationId, base::Unretained(this))); BindCallback("sendJSON", base::Bind(&DomAutomationController::SendJSON, base::Unretained(this))); BindCallback("sendWithId", base::Bind(&DomAutomationController::SendWithId, base::Unretained(this))); } void DomAutomationController::Send(const CppArgumentList& args, CppVariant* result) { if (args.size() != 1) { result->SetNull(); return; } if (automation_id_ == MSG_ROUTING_NONE) { result->SetNull(); return; } if (!sender_) { NOTREACHED(); result->SetNull(); return; } std::string json; JSONStringValueSerializer serializer(&json); scoped_ptr<Value> value; // Warning: note that JSON officially requires the root-level object to be // an object (e.g. {foo:3}) or an array, while here we're serializing // strings, bools, etc. to "JSON". This only works because (a) the JSON // writer is lenient, and (b) on the receiving side we wrap the JSON string // in square brackets, converting it to an array, then parsing it and // grabbing the 0th element to get the value out. switch (args[0].type) { case NPVariantType_String: { value.reset(Value::CreateStringValue(args[0].ToString())); break; } case NPVariantType_Bool: { value.reset(Value::CreateBooleanValue(args[0].ToBoolean())); break; } case NPVariantType_Int32: { value.reset(Value::CreateIntegerValue(args[0].ToInt32())); break; } case NPVariantType_Double: { // The value that is sent back is an integer while it is treated // as a double in this binding. The reason being that KJS treats // any number value as a double. Refer for more details, // chrome/third_party/webkit/src/JavaScriptCore/bindings/c/c_utility.cpp value.reset(Value::CreateIntegerValue(args[0].ToInt32())); break; } default: { result->SetNull(); return; } } if (!serializer.Serialize(*value)) { result->SetNull(); return; } bool succeeded = sender_->Send( new ViewHostMsg_DomOperationResponse(routing_id_, json, automation_id_)); result->Set(succeeded); automation_id_ = MSG_ROUTING_NONE; } void DomAutomationController::SendJSON(const CppArgumentList& args, CppVariant* result) { if (args.size() != 1) { result->SetNull(); return; } if (automation_id_ == MSG_ROUTING_NONE) { result->SetNull(); return; } if (!sender_) { NOTREACHED(); result->SetNull(); return; } if (args[0].type != NPVariantType_String) { result->SetNull(); return; } std::string json = args[0].ToString(); result->Set(sender_->Send( new ViewHostMsg_DomOperationResponse(routing_id_, json, automation_id_))); automation_id_ = MSG_ROUTING_NONE; } void DomAutomationController::SendWithId(const CppArgumentList& args, CppVariant* result) { if (args.size() != 2) { result->SetNull(); return; } if (!sender_) { NOTREACHED(); result->SetNull(); return; } if (!args[0].isNumber() || args[1].type != NPVariantType_String) { result->SetNull(); return; } result->Set(sender_->Send( new ViewHostMsg_DomOperationResponse(routing_id_, args[1].ToString(), args[0].ToInt32()))); } void DomAutomationController::SetAutomationId( const CppArgumentList& args, CppVariant* result) { if (args.size() != 1) { result->SetNull(); return; } // The check here is for NumberType and not Int32 as // KJS::JSType only defines a NumberType (no Int32) if (!args[0].isNumber()) { result->SetNull(); return; } automation_id_ = args[0].ToInt32(); result->Set(true); }
ropik/chromium
content/renderer/dom_automation_controller.cc
C++
bsd-3-clause
4,690
# # This file is part of pyasn1-modules software. # # Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com> # License: http://pyasn1.sf.net/license.html # # SNMPv3 message syntax # # ASN.1 source from: # http://www.ietf.org/rfc/rfc3414.txt # from pyasn1.type import constraint from pyasn1.type import namedtype from pyasn1.type import univ class UsmSecurityParameters(univ.Sequence): componentType = namedtype.NamedTypes( namedtype.NamedType('msgAuthoritativeEngineID', univ.OctetString()), namedtype.NamedType('msgAuthoritativeEngineBoots', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, 2147483647))), namedtype.NamedType('msgAuthoritativeEngineTime', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, 2147483647))), namedtype.NamedType('msgUserName', univ.OctetString().subtype(subtypeSpec=constraint.ValueSizeConstraint(0, 32))), namedtype.NamedType('msgAuthenticationParameters', univ.OctetString()), namedtype.NamedType('msgPrivacyParameters', univ.OctetString()) )
catapult-project/catapult
third_party/gsutil/third_party/pyasn1-modules/pyasn1_modules/rfc3414.py
Python
bsd-3-clause
1,161
/* * Copyright (c) 2008-2015 The Open Source Geospatial Foundation * * Published under the BSD license. * See https://github.com/geoext/geoext2/blob/master/license.txt for the full * text of the license. */ /* * @include OpenLayers/Util.js * @requires GeoExt/Version.js */ /** * The permalink provider. * * Sample code displaying a new permalink each time the map is moved: * * // create permalink provider * var permalinkProvider = Ext.create('GeoExt.state.PermalinkProvider', {}); * // set it in the state manager * Ext.state.Manager.setProvider(permalinkProvider); * // create a map panel, and make it stateful * var mapPanel = Ext.create('GeoExt.panel.Map', { * renderTo: "map", * layers: [ * new OpenLayers.Layer.WMS( * "Global Imagery", * "http://maps.opengeo.org/geowebcache/service/wms", * {layers: "bluemarble"} * ) * ], * stateId: "map", * prettyStateKeys: true // for pretty permalinks * }); * // display permalink each time state is changed * permalinkProvider.on({ * statechange: function(provider, name, value) { * alert(provider.getLink()); * } * }); * * @class GeoExt.state.PermalinkProvider */ Ext.define('GeoExt.state.PermalinkProvider', { extend : 'Ext.state.Provider', requires : [ 'GeoExt.Version' ], alias : 'widget.gx_permalinkprovider', /** * */ constructor: function(config){ this.callParent(arguments); config = config || {}; var url = config.url; delete config.url; Ext.apply(this, config); this.state = this.readURL(url); }, /** * Specifies whether type of state values should be encoded and decoded. * Set it to `false` if you work with components that don't require * encoding types, and want pretty permalinks. * * @property{Boolean} * @private */ encodeType: true, /** * Create a state object from a URL. * * @param url {String} The URL to get the state from. * @return {Object} The state object. * @private */ readURL: function(url) { var state = {}; var params = OpenLayers.Util.getParameters(url); var k, split, stateId; for(k in params) { if(params.hasOwnProperty(k)) { split = k.split("_"); if(split.length > 1) { stateId = split[0]; state[stateId] = state[stateId] || {}; state[stateId][split.slice(1).join("_")] = this.encodeType ? this.decodeValue(params[k]) : params[k]; } } } return state; }, /** * Returns the permalink corresponding to the current state. * * @param base {String} The base URL, optional. * @return {String} The permalink. */ getLink: function(base) { base = base || document.location.href; var params = {}; var id, k, state = this.state; for(id in state) { if(state.hasOwnProperty(id)) { for(k in state[id]) { params[id + "_" + k] = this.encodeType ? unescape(this.encodeValue(state[id][k])) : state[id][k]; } } } // merge params in the URL into the state params OpenLayers.Util.applyDefaults( params, OpenLayers.Util.getParameters(base)); var paramsStr = OpenLayers.Util.getParameterString(params); var qMark = base.indexOf("?"); if(qMark > 0) { base = base.substring(0, qMark); } return Ext.urlAppend(base, paramsStr); } });
m-click/geoext2
src/GeoExt/state/PermalinkProvider.js
JavaScript
bsd-3-clause
3,856
class Admin::ZonesController < Admin::BaseController resource_controller before_filter :load_data create.response do |wants| wants.html { redirect_to collection_url } end update.response do |wants| wants.html { redirect_to collection_url } end private def build_object @object ||= end_of_association_chain.send parent? ? :build : :new, object_params @object.zone_members.build() if @object.zone_members.empty? @object end def collection @search = end_of_association_chain.new_search(params[:search]) @search.order_by ||= :name @search.per_page = Spree::Config[:orders_per_page] @collection, @collection_count = @search.all, @search.count end def load_data @countries = Country.all.sort @states = State.all.sort @zones = Zone.all.sort end end
Ideas2IT/augur
vendor/spree/app/controllers/admin/zones_controller.rb
Ruby
bsd-3-clause
832
// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "content/browser/service_worker/service_worker_database.h" #include <string> #include "base/files/file_util.h" #include "base/files/scoped_temp_dir.h" #include "base/stl_util.h" #include "base/strings/string_number_conversions.h" #include "content/browser/service_worker/service_worker_database.pb.h" #include "content/common/service_worker/service_worker_types.h" #include "testing/gtest/include/gtest/gtest.h" #include "third_party/leveldatabase/src/include/leveldb/write_batch.h" namespace content { namespace { typedef ServiceWorkerDatabase::RegistrationData RegistrationData; typedef ServiceWorkerDatabase::ResourceRecord Resource; struct AvailableIds { int64 reg_id; int64 res_id; int64 ver_id; AvailableIds() : reg_id(-1), res_id(-1), ver_id(-1) {} ~AvailableIds() {} }; GURL URL(const GURL& origin, const std::string& path) { EXPECT_TRUE(origin.is_valid()); EXPECT_EQ(origin, origin.GetOrigin()); GURL out(origin.spec() + path); EXPECT_TRUE(out.is_valid()); return out; } Resource CreateResource(int64 resource_id, const GURL& url, uint64 size_bytes) { EXPECT_TRUE(url.is_valid()); return Resource(resource_id, url, size_bytes); } ServiceWorkerDatabase* CreateDatabase(const base::FilePath& path) { return new ServiceWorkerDatabase(path); } ServiceWorkerDatabase* CreateDatabaseInMemory() { return new ServiceWorkerDatabase(base::FilePath()); } void VerifyRegistrationData(const RegistrationData& expected, const RegistrationData& actual) { EXPECT_EQ(expected.registration_id, actual.registration_id); EXPECT_EQ(expected.scope, actual.scope); EXPECT_EQ(expected.script, actual.script); EXPECT_EQ(expected.version_id, actual.version_id); EXPECT_EQ(expected.is_active, actual.is_active); EXPECT_EQ(expected.has_fetch_handler, actual.has_fetch_handler); EXPECT_EQ(expected.last_update_check, actual.last_update_check); EXPECT_EQ(expected.resources_total_size_bytes, actual.resources_total_size_bytes); } void VerifyResourceRecords(const std::vector<Resource>& expected, const std::vector<Resource>& actual) { ASSERT_EQ(expected.size(), actual.size()); for (size_t i = 0; i < expected.size(); ++i) { EXPECT_EQ(expected[i].resource_id, actual[i].resource_id); EXPECT_EQ(expected[i].url, actual[i].url); EXPECT_EQ(expected[i].size_bytes, actual[i].size_bytes); } } } // namespace TEST(ServiceWorkerDatabaseTest, OpenDatabase) { base::ScopedTempDir database_dir; ASSERT_TRUE(database_dir.CreateUniqueTempDir()); scoped_ptr<ServiceWorkerDatabase> database( CreateDatabase(database_dir.path())); // Should be false because the database does not exist at the path. EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->LazyOpen(false)); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->LazyOpen(true)); database.reset(CreateDatabase(database_dir.path())); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->LazyOpen(false)); } TEST(ServiceWorkerDatabaseTest, OpenDatabase_InMemory) { scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory()); // Should be false because the database does not exist in memory. EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->LazyOpen(false)); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->LazyOpen(true)); database.reset(CreateDatabaseInMemory()); // Should be false because the database is not persistent. EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->LazyOpen(false)); } TEST(ServiceWorkerDatabaseTest, DatabaseVersion) { scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory()); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->LazyOpen(true)); // Opening a new database does not write anything, so its schema version // should be 0. int64 db_version = -1; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadDatabaseVersion(&db_version)); EXPECT_EQ(0u, db_version); // First writing triggers database initialization and bumps the schema // version. std::vector<ServiceWorkerDatabase::ResourceRecord> resources; ServiceWorkerDatabase::RegistrationData deleted_version; std::vector<int64> newly_purgeable_resources; ServiceWorkerDatabase::RegistrationData data; ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data, resources, &deleted_version, &newly_purgeable_resources)); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadDatabaseVersion(&db_version)); EXPECT_LT(0, db_version); } TEST(ServiceWorkerDatabaseTest, UpgradeSchemaToVersion2) { base::ScopedTempDir database_dir; ASSERT_TRUE(database_dir.CreateUniqueTempDir()); scoped_ptr<ServiceWorkerDatabase> database( CreateDatabase(database_dir.path())); GURL origin("http://example.com"); // Add a registration to the database. std::vector<ServiceWorkerDatabase::ResourceRecord> resources; ServiceWorkerDatabase::RegistrationData deleted_version; std::vector<int64> newly_purgeable_resources; ServiceWorkerDatabase::RegistrationData data; data.registration_id = 100; data.scope = URL(origin, "/foo"); data.script = URL(origin, "/script1.js"); data.version_id = 200; ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration(data, resources, &deleted_version, &newly_purgeable_resources)); // Sanity check on current version. int64 db_version = -1; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadDatabaseVersion(&db_version)); EXPECT_LE(2, db_version); // Now delete the data that will be created in an upgrade to schema version 2, // and reset the schema version to 1. leveldb::WriteBatch batch; batch.Delete("REGID_TO_ORIGIN:" + base::Int64ToString(data.registration_id)); batch.Put("INITDATA_DB_VERSION", base::Int64ToString(1)); ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WriteBatch(&batch)); // Make sure correct data got deleted. GURL origin_out; EXPECT_EQ( ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->ReadRegistrationOrigin(data.registration_id, &origin_out)); // Close and reopen the database to verify the schema got updated. database.reset(CreateDatabase(database_dir.path())); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->LazyOpen(true)); // Verify version number. EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadDatabaseVersion(&db_version)); EXPECT_LE(2, db_version); // And check that looking up origin for registration works. EXPECT_EQ( ServiceWorkerDatabase::STATUS_OK, database->ReadRegistrationOrigin(data.registration_id, &origin_out)); EXPECT_EQ(origin, origin_out); } TEST(ServiceWorkerDatabaseTest, GetNextAvailableIds) { base::ScopedTempDir database_dir; ASSERT_TRUE(database_dir.CreateUniqueTempDir()); scoped_ptr<ServiceWorkerDatabase> database( CreateDatabase(database_dir.path())); GURL origin("http://example.com"); // The database has never been used, so returns initial values. AvailableIds ids; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetNextAvailableIds( &ids.reg_id, &ids.ver_id, &ids.res_id)); EXPECT_EQ(0, ids.reg_id); EXPECT_EQ(0, ids.ver_id); EXPECT_EQ(0, ids.res_id); ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK, database->LazyOpen(true)); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetNextAvailableIds( &ids.reg_id, &ids.ver_id, &ids.res_id)); EXPECT_EQ(0, ids.reg_id); EXPECT_EQ(0, ids.ver_id); EXPECT_EQ(0, ids.res_id); // Writing a registration bumps the next available ids. std::vector<Resource> resources; RegistrationData data1; ServiceWorkerDatabase::RegistrationData deleted_version; std::vector<int64> newly_purgeable_resources; data1.registration_id = 100; data1.scope = URL(origin, "/foo"); data1.script = URL(origin, "/script1.js"); data1.version_id = 200; ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data1, resources, &deleted_version, &newly_purgeable_resources)); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetNextAvailableIds( &ids.reg_id, &ids.ver_id, &ids.res_id)); EXPECT_EQ(101, ids.reg_id); EXPECT_EQ(201, ids.ver_id); EXPECT_EQ(0, ids.res_id); // Writing uncommitted resources bumps the next available id. const int64 kUncommittedIds[] = {0, 1, 3, 5, 6, 10}; EXPECT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteUncommittedResourceIds(std::set<int64>( kUncommittedIds, kUncommittedIds + arraysize(kUncommittedIds)))); EXPECT_EQ( ServiceWorkerDatabase::STATUS_OK, database->GetNextAvailableIds(&ids.reg_id, &ids.ver_id, &ids.res_id)); EXPECT_EQ(101, ids.reg_id); EXPECT_EQ(201, ids.ver_id); EXPECT_EQ(11, ids.res_id); // Writing purgeable resources bumps the next available id. const int64 kPurgeableIds[] = {4, 12, 16, 17, 20}; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WriteUncommittedResourceIds(std::set<int64>( kPurgeableIds, kPurgeableIds + arraysize(kPurgeableIds)))); EXPECT_EQ( ServiceWorkerDatabase::STATUS_OK, database->GetNextAvailableIds(&ids.reg_id, &ids.ver_id, &ids.res_id)); EXPECT_EQ(101, ids.reg_id); EXPECT_EQ(201, ids.ver_id); EXPECT_EQ(21, ids.res_id); // Writing a registration whose ids are lower than the stored ones should not // bump the next available ids. RegistrationData data2; data2.registration_id = 10; data2.scope = URL(origin, "/bar"); data2.script = URL(origin, "/script2.js"); data2.version_id = 20; ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data2, resources, &deleted_version, &newly_purgeable_resources)); // Same with resources. int64 kLowResourceId = 15; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WriteUncommittedResourceIds( std::set<int64>(&kLowResourceId, &kLowResourceId + 1))); // Close and reopen the database to verify the stored values. database.reset(CreateDatabase(database_dir.path())); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetNextAvailableIds( &ids.reg_id, &ids.ver_id, &ids.res_id)); EXPECT_EQ(101, ids.reg_id); EXPECT_EQ(201, ids.ver_id); EXPECT_EQ(21, ids.res_id); } TEST(ServiceWorkerDatabaseTest, GetOriginsWithRegistrations) { scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory()); std::set<GURL> origins; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetOriginsWithRegistrations(&origins)); EXPECT_TRUE(origins.empty()); std::vector<Resource> resources; ServiceWorkerDatabase::RegistrationData deleted_version; std::vector<int64> newly_purgeable_resources; GURL origin1("http://example.com"); RegistrationData data1; data1.registration_id = 123; data1.scope = URL(origin1, "/foo"); data1.script = URL(origin1, "/script1.js"); data1.version_id = 456; ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data1, resources, &deleted_version, &newly_purgeable_resources)); GURL origin2("https://www.example.com"); RegistrationData data2; data2.registration_id = 234; data2.scope = URL(origin2, "/bar"); data2.script = URL(origin2, "/script2.js"); data2.version_id = 567; ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data2, resources, &deleted_version, &newly_purgeable_resources)); GURL origin3("https://example.org"); RegistrationData data3; data3.registration_id = 345; data3.scope = URL(origin3, "/hoge"); data3.script = URL(origin3, "/script3.js"); data3.version_id = 678; ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data3, resources, &deleted_version, &newly_purgeable_resources)); // |origin3| has two registrations. RegistrationData data4; data4.registration_id = 456; data4.scope = URL(origin3, "/fuga"); data4.script = URL(origin3, "/script4.js"); data4.version_id = 789; ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data4, resources, &deleted_version, &newly_purgeable_resources)); origins.clear(); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetOriginsWithRegistrations(&origins)); EXPECT_EQ(3U, origins.size()); EXPECT_TRUE(ContainsKey(origins, origin1)); EXPECT_TRUE(ContainsKey(origins, origin2)); EXPECT_TRUE(ContainsKey(origins, origin3)); // |origin3| has another registration, so should not remove it from the // unique origin list. ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK, database->DeleteRegistration(data4.registration_id, origin3, &deleted_version, &newly_purgeable_resources)); EXPECT_EQ(data4.registration_id, deleted_version.registration_id); origins.clear(); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetOriginsWithRegistrations(&origins)); EXPECT_EQ(3U, origins.size()); EXPECT_TRUE(ContainsKey(origins, origin1)); EXPECT_TRUE(ContainsKey(origins, origin2)); EXPECT_TRUE(ContainsKey(origins, origin3)); // |origin3| should be removed from the unique origin list. ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK, database->DeleteRegistration(data3.registration_id, origin3, &deleted_version, &newly_purgeable_resources)); EXPECT_EQ(data3.registration_id, deleted_version.registration_id); origins.clear(); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetOriginsWithRegistrations(&origins)); EXPECT_EQ(2U, origins.size()); EXPECT_TRUE(ContainsKey(origins, origin1)); EXPECT_TRUE(ContainsKey(origins, origin2)); } TEST(ServiceWorkerDatabaseTest, GetRegistrationsForOrigin) { scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory()); GURL origin1("http://example.com"); GURL origin2("https://www.example.com"); GURL origin3("https://example.org"); std::vector<RegistrationData> registrations; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetRegistrationsForOrigin(origin1, &registrations)); EXPECT_TRUE(registrations.empty()); std::vector<Resource> resources; ServiceWorkerDatabase::RegistrationData deleted_version; std::vector<int64> newly_purgeable_resources; RegistrationData data1; data1.registration_id = 100; data1.scope = URL(origin1, "/foo"); data1.script = URL(origin1, "/script1.js"); data1.version_id = 1000; ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data1, resources, &deleted_version, &newly_purgeable_resources)); RegistrationData data2; data2.registration_id = 200; data2.scope = URL(origin2, "/bar"); data2.script = URL(origin2, "/script2.js"); data2.version_id = 2000; ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data2, resources, &deleted_version, &newly_purgeable_resources)); RegistrationData data3; data3.registration_id = 300; data3.scope = URL(origin3, "/hoge"); data3.script = URL(origin3, "/script3.js"); data3.version_id = 3000; ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data3, resources, &deleted_version, &newly_purgeable_resources)); // |origin3| has two registrations. RegistrationData data4; data4.registration_id = 400; data4.scope = URL(origin3, "/fuga"); data4.script = URL(origin3, "/script4.js"); data4.version_id = 4000; ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data4, resources, &deleted_version, &newly_purgeable_resources)); registrations.clear(); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetRegistrationsForOrigin(origin3, &registrations)); EXPECT_EQ(2U, registrations.size()); VerifyRegistrationData(data3, registrations[0]); VerifyRegistrationData(data4, registrations[1]); } TEST(ServiceWorkerDatabaseTest, GetAllRegistrations) { scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory()); std::vector<RegistrationData> registrations; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetAllRegistrations(&registrations)); EXPECT_TRUE(registrations.empty()); std::vector<Resource> resources; ServiceWorkerDatabase::RegistrationData deleted_version; std::vector<int64> newly_purgeable_resources; GURL origin1("http://www1.example.com"); RegistrationData data1; data1.registration_id = 100; data1.scope = URL(origin1, "/foo"); data1.script = URL(origin1, "/script1.js"); data1.version_id = 1000; ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data1, resources, &deleted_version, &newly_purgeable_resources)); GURL origin2("http://www2.example.com"); RegistrationData data2; data2.registration_id = 200; data2.scope = URL(origin2, "/bar"); data2.script = URL(origin2, "/script2.js"); data2.version_id = 2000; ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data2, resources, &deleted_version, &newly_purgeable_resources)); GURL origin3("http://www3.example.com"); RegistrationData data3; data3.registration_id = 300; data3.scope = URL(origin3, "/hoge"); data3.script = URL(origin3, "/script3.js"); data3.version_id = 3000; ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data3, resources, &deleted_version, &newly_purgeable_resources)); // |origin3| has two registrations. RegistrationData data4; data4.registration_id = 400; data4.scope = URL(origin3, "/fuga"); data4.script = URL(origin3, "/script4.js"); data4.version_id = 4000; ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data4, resources, &deleted_version, &newly_purgeable_resources)); registrations.clear(); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetAllRegistrations(&registrations)); EXPECT_EQ(4U, registrations.size()); VerifyRegistrationData(data1, registrations[0]); VerifyRegistrationData(data2, registrations[1]); VerifyRegistrationData(data3, registrations[2]); VerifyRegistrationData(data4, registrations[3]); } TEST(ServiceWorkerDatabaseTest, Registration_Basic) { scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory()); GURL origin("http://example.com"); RegistrationData data; data.registration_id = 100; data.scope = URL(origin, "/foo"); data.script = URL(origin, "/script.js"); data.version_id = 200; data.resources_total_size_bytes = 10939 + 200; std::vector<Resource> resources; resources.push_back(CreateResource(1, URL(origin, "/resource1"), 10939)); resources.push_back(CreateResource(2, URL(origin, "/resource2"), 200)); // Write a resource to the uncommitted list to make sure that writing // registration removes resource ids associated with the registration from // the uncommitted list. std::set<int64> uncommitted_ids; uncommitted_ids.insert(resources[0].resource_id); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WriteUncommittedResourceIds(uncommitted_ids)); std::set<int64> uncommitted_ids_out; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetUncommittedResourceIds(&uncommitted_ids_out)); EXPECT_EQ(uncommitted_ids, uncommitted_ids_out); ServiceWorkerDatabase::RegistrationData deleted_version; deleted_version.version_id = 222; // Dummy initial value std::vector<int64> newly_purgeable_resources; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data, resources, &deleted_version, &newly_purgeable_resources)); EXPECT_EQ(kInvalidServiceWorkerVersionId, deleted_version.version_id); EXPECT_TRUE(newly_purgeable_resources.empty()); // Make sure that the registration and resource records are stored. RegistrationData data_out; std::vector<Resource> resources_out; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadRegistration( data.registration_id, origin, &data_out, &resources_out)); VerifyRegistrationData(data, data_out); VerifyResourceRecords(resources, resources_out); GURL origin_out; EXPECT_EQ( ServiceWorkerDatabase::STATUS_OK, database->ReadRegistrationOrigin(data.registration_id, &origin_out)); EXPECT_EQ(origin, origin_out); // Make sure that the resource is removed from the uncommitted list. uncommitted_ids_out.clear(); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetUncommittedResourceIds(&uncommitted_ids_out)); EXPECT_TRUE(uncommitted_ids_out.empty()); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->DeleteRegistration(data.registration_id, origin, &deleted_version, &newly_purgeable_resources)); EXPECT_EQ(data.version_id, deleted_version.version_id); ASSERT_EQ(resources.size(), newly_purgeable_resources.size()); for (size_t i = 0; i < resources.size(); ++i) EXPECT_EQ(newly_purgeable_resources[i], resources[i].resource_id); // Make sure that the registration and resource records are gone. resources_out.clear(); EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->ReadRegistration( data.registration_id, origin, &data_out, &resources_out)); EXPECT_TRUE(resources_out.empty()); EXPECT_EQ( ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->ReadRegistrationOrigin(data.registration_id, &origin_out)); // Resources should be purgeable because these are no longer referred. std::set<int64> purgeable_ids_out; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetPurgeableResourceIds(&purgeable_ids_out)); EXPECT_EQ(2u, purgeable_ids_out.size()); EXPECT_TRUE(ContainsKey(purgeable_ids_out, resources[0].resource_id)); EXPECT_TRUE(ContainsKey(purgeable_ids_out, resources[1].resource_id)); } TEST(ServiceWorkerDatabaseTest, DeleteNonExistentRegistration) { scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory()); GURL origin("http://example.com"); RegistrationData data; data.registration_id = 100; data.scope = URL(origin, "/foo"); data.script = URL(origin, "/script.js"); data.version_id = 200; data.resources_total_size_bytes = 19 + 29129; std::vector<Resource> resources; resources.push_back(CreateResource(1, URL(origin, "/resource1"), 19)); resources.push_back(CreateResource(2, URL(origin, "/resource2"), 29129)); const int64 kNonExistentRegistrationId = 999; const int64 kArbitraryVersionId = 222; // Used as a dummy initial value ServiceWorkerDatabase::RegistrationData deleted_version; deleted_version.version_id = kArbitraryVersionId; std::vector<int64> newly_purgeable_resources; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data, resources, &deleted_version, &newly_purgeable_resources)); EXPECT_EQ(kInvalidServiceWorkerVersionId, deleted_version.version_id); EXPECT_TRUE(newly_purgeable_resources.empty()); // Delete from an origin that has a registration. deleted_version.version_id = kArbitraryVersionId; newly_purgeable_resources.clear(); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->DeleteRegistration(kNonExistentRegistrationId, origin, &deleted_version, &newly_purgeable_resources)); EXPECT_EQ(kInvalidServiceWorkerVersionId, deleted_version.version_id); EXPECT_TRUE(newly_purgeable_resources.empty()); // Delete from an origin that has no registration. deleted_version.version_id = kArbitraryVersionId; newly_purgeable_resources.clear(); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->DeleteRegistration(kNonExistentRegistrationId, GURL("http://example.net"), &deleted_version, &newly_purgeable_resources)); EXPECT_EQ(kInvalidServiceWorkerVersionId, deleted_version.version_id); EXPECT_TRUE(newly_purgeable_resources.empty()); } TEST(ServiceWorkerDatabaseTest, Registration_Overwrite) { scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory()); GURL origin("http://example.com"); RegistrationData data; data.registration_id = 100; data.scope = URL(origin, "/foo"); data.script = URL(origin, "/script.js"); data.version_id = 200; data.resources_total_size_bytes = 10 + 11; std::vector<Resource> resources1; resources1.push_back(CreateResource(1, URL(origin, "/resource1"), 10)); resources1.push_back(CreateResource(2, URL(origin, "/resource2"), 11)); ServiceWorkerDatabase::RegistrationData deleted_version; deleted_version.version_id = 222; // Dummy inital value std::vector<int64> newly_purgeable_resources; EXPECT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data, resources1, &deleted_version, &newly_purgeable_resources)); EXPECT_EQ(kInvalidServiceWorkerVersionId, deleted_version.version_id); EXPECT_TRUE(newly_purgeable_resources.empty()); // Make sure that the registration and resource records are stored. RegistrationData data_out; std::vector<Resource> resources_out; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadRegistration( data.registration_id, origin, &data_out, &resources_out)); VerifyRegistrationData(data, data_out); VerifyResourceRecords(resources1, resources_out); // Update the registration. RegistrationData updated_data = data; updated_data.version_id = data.version_id + 1; updated_data.resources_total_size_bytes = 12 + 13; std::vector<Resource> resources2; resources2.push_back(CreateResource(3, URL(origin, "/resource3"), 12)); resources2.push_back(CreateResource(4, URL(origin, "/resource4"), 13)); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration(updated_data, resources2, &deleted_version, &newly_purgeable_resources)); EXPECT_EQ(data.version_id, deleted_version.version_id); ASSERT_EQ(resources1.size(), newly_purgeable_resources.size()); for (size_t i = 0; i < resources1.size(); ++i) EXPECT_EQ(newly_purgeable_resources[i], resources1[i].resource_id); // Make sure that |updated_data| is stored and resources referred from |data| // is moved to the purgeable list. resources_out.clear(); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadRegistration( updated_data.registration_id, origin, &data_out, &resources_out)); VerifyRegistrationData(updated_data, data_out); VerifyResourceRecords(resources2, resources_out); std::set<int64> purgeable_ids_out; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetPurgeableResourceIds(&purgeable_ids_out)); EXPECT_EQ(2u, purgeable_ids_out.size()); EXPECT_TRUE(ContainsKey(purgeable_ids_out, resources1[0].resource_id)); EXPECT_TRUE(ContainsKey(purgeable_ids_out, resources1[1].resource_id)); } TEST(ServiceWorkerDatabaseTest, Registration_Multiple) { scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory()); GURL origin("http://example.com"); ServiceWorkerDatabase::RegistrationData deleted_version; std::vector<int64> newly_purgeable_resources; // Add registration1. RegistrationData data1; data1.registration_id = 100; data1.scope = URL(origin, "/foo"); data1.script = URL(origin, "/script1.js"); data1.version_id = 200; data1.resources_total_size_bytes = 1451 + 15234; std::vector<Resource> resources1; resources1.push_back(CreateResource(1, URL(origin, "/resource1"), 1451)); resources1.push_back(CreateResource(2, URL(origin, "/resource2"), 15234)); EXPECT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data1, resources1, &deleted_version, &newly_purgeable_resources)); // Add registration2. RegistrationData data2; data2.registration_id = 101; data2.scope = URL(origin, "/bar"); data2.script = URL(origin, "/script2.js"); data2.version_id = 201; data2.resources_total_size_bytes = 5 + 6; std::vector<Resource> resources2; resources2.push_back(CreateResource(3, URL(origin, "/resource3"), 5)); resources2.push_back(CreateResource(4, URL(origin, "/resource4"), 6)); EXPECT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data2, resources2, &deleted_version, &newly_purgeable_resources)); // Make sure that registration1 is stored. RegistrationData data_out; std::vector<Resource> resources_out; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadRegistration( data1.registration_id, origin, &data_out, &resources_out)); VerifyRegistrationData(data1, data_out); VerifyResourceRecords(resources1, resources_out); GURL origin_out; EXPECT_EQ( ServiceWorkerDatabase::STATUS_OK, database->ReadRegistrationOrigin(data1.registration_id, &origin_out)); EXPECT_EQ(origin, origin_out); // Make sure that registration2 is also stored. resources_out.clear(); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadRegistration( data2.registration_id, origin, &data_out, &resources_out)); VerifyRegistrationData(data2, data_out); VerifyResourceRecords(resources2, resources_out); EXPECT_EQ( ServiceWorkerDatabase::STATUS_OK, database->ReadRegistrationOrigin(data2.registration_id, &origin_out)); EXPECT_EQ(origin, origin_out); std::set<int64> purgeable_ids_out; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetPurgeableResourceIds(&purgeable_ids_out)); EXPECT_TRUE(purgeable_ids_out.empty()); // Delete registration1. EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->DeleteRegistration(data1.registration_id, origin, &deleted_version, &newly_purgeable_resources)); EXPECT_EQ(data1.registration_id, deleted_version.registration_id); // Make sure that registration1 is gone. resources_out.clear(); EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->ReadRegistration( data1.registration_id, origin, &data_out, &resources_out)); EXPECT_TRUE(resources_out.empty()); EXPECT_EQ( ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->ReadRegistrationOrigin(data1.registration_id, &origin_out)); purgeable_ids_out.clear(); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetPurgeableResourceIds(&purgeable_ids_out)); EXPECT_EQ(2u, purgeable_ids_out.size()); EXPECT_TRUE(ContainsKey(purgeable_ids_out, resources1[0].resource_id)); EXPECT_TRUE(ContainsKey(purgeable_ids_out, resources1[1].resource_id)); // Make sure that registration2 is still alive. resources_out.clear(); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadRegistration( data2.registration_id, origin, &data_out, &resources_out)); VerifyRegistrationData(data2, data_out); VerifyResourceRecords(resources2, resources_out); EXPECT_EQ( ServiceWorkerDatabase::STATUS_OK, database->ReadRegistrationOrigin(data2.registration_id, &origin_out)); EXPECT_EQ(origin, origin_out); } TEST(ServiceWorkerDatabaseTest, Registration_UninitializedDatabase) { scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory()); const GURL origin("http://example.com"); // Should be failed because the database does not exist. RegistrationData data_out; std::vector<Resource> resources_out; EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->ReadRegistration( 100, origin, &data_out, &resources_out)); EXPECT_EQ(kInvalidServiceWorkerRegistrationId, data_out.registration_id); EXPECT_TRUE(resources_out.empty()); GURL origin_out; EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->ReadRegistrationOrigin(100, &origin_out)); // Deleting non-existent registration should succeed. RegistrationData deleted_version; std::vector<int64> newly_purgeable_resources; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->DeleteRegistration( 100, origin, &deleted_version, &newly_purgeable_resources)); EXPECT_EQ(kInvalidServiceWorkerVersionId, deleted_version.version_id); EXPECT_TRUE(newly_purgeable_resources.empty()); // Actually create a new database, but not initialized yet. database->LazyOpen(true); // Should be failed because the database is not initialized. ASSERT_EQ(ServiceWorkerDatabase::UNINITIALIZED, database->state_); EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->ReadRegistration( 100, origin, &data_out, &resources_out)); EXPECT_EQ(kInvalidServiceWorkerRegistrationId, data_out.registration_id); EXPECT_TRUE(resources_out.empty()); EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->ReadRegistrationOrigin(100, &origin_out)); // Deleting non-existent registration should succeed. EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->DeleteRegistration( 100, origin, &deleted_version, &newly_purgeable_resources)); EXPECT_EQ(kInvalidServiceWorkerVersionId, deleted_version.version_id); EXPECT_TRUE(newly_purgeable_resources.empty()); } TEST(ServiceWorkerDatabaseTest, UserData_Basic) { scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory()); const GURL kOrigin("http://example.com"); // Add a registration. RegistrationData data; data.registration_id = 100; data.scope = URL(kOrigin, "/foo"); data.script = URL(kOrigin, "/script.js"); data.version_id = 200; std::vector<Resource> resources; ServiceWorkerDatabase::RegistrationData deleted_version; std::vector<int64> newly_purgeable_resources; ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data, resources, &deleted_version, &newly_purgeable_resources)); // Write user data associated with the stored registration. std::string user_data_out; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WriteUserData( data.registration_id, kOrigin, "key1", "data")); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadUserData( data.registration_id, "key1", &user_data_out)); EXPECT_EQ("data", user_data_out); // Writing user data not associated with the stored registration should be // failed. EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->WriteUserData(300, kOrigin, "key1", "data")); // Write empty user data for a different key. EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WriteUserData( data.registration_id, kOrigin, "key2", std::string())); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadUserData( data.registration_id, "key2", &user_data_out)); EXPECT_EQ(std::string(), user_data_out); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadUserData( data.registration_id, "key1", &user_data_out)); EXPECT_EQ("data", user_data_out); // Overwrite the existing user data. EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WriteUserData( data.registration_id, kOrigin, "key1", "overwrite")); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadUserData( data.registration_id, "key1", &user_data_out)); EXPECT_EQ("overwrite", user_data_out); // Delete the user data. EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->DeleteUserData(data.registration_id, "key1")); EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->ReadUserData( data.registration_id, "key1", &user_data_out)); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadUserData( data.registration_id, "key2", &user_data_out)); EXPECT_EQ(std::string(), user_data_out); } TEST(ServiceWorkerDatabaseTest, UserData_DataIsolation) { scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory()); const GURL kOrigin("http://example.com"); // Add registration 1. RegistrationData data1; data1.registration_id = 100; data1.scope = URL(kOrigin, "/foo"); data1.script = URL(kOrigin, "/script1.js"); data1.version_id = 200; // Add registration 2. RegistrationData data2; data2.registration_id = 101; data2.scope = URL(kOrigin, "/bar"); data2.script = URL(kOrigin, "/script2.js"); data2.version_id = 201; std::vector<Resource> resources; ServiceWorkerDatabase::RegistrationData deleted_version; std::vector<int64> newly_purgeable_resources; ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data1, resources, &deleted_version, &newly_purgeable_resources)); ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data2, resources, &deleted_version, &newly_purgeable_resources)); // Write user data associated with the registration1. std::string user_data_out; ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WriteUserData( data1.registration_id, kOrigin, "key", "data1")); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadUserData( data1.registration_id, "key", &user_data_out)); EXPECT_EQ("data1", user_data_out); EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->ReadUserData( data2.registration_id, "key", &user_data_out)); // Write user data associated with the registration2. This shouldn't overwrite // the data associated with registration1. ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WriteUserData( data2.registration_id, kOrigin, "key", "data2")); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadUserData( data1.registration_id, "key", &user_data_out)); EXPECT_EQ("data1", user_data_out); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadUserData( data2.registration_id, "key", &user_data_out)); EXPECT_EQ("data2", user_data_out); // Get all registrations with user data. std::vector<std::pair<int64, std::string>> user_data_list; ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadUserDataForAllRegistrations("key", &user_data_list)); EXPECT_EQ(2u, user_data_list.size()); EXPECT_EQ(data1.registration_id, user_data_list[0].first); EXPECT_EQ("data1", user_data_list[0].second); EXPECT_EQ(data2.registration_id, user_data_list[1].first); EXPECT_EQ("data2", user_data_list[1].second); // Delete the data associated with the registration2. This shouldn't delete // the data associated with registration1. ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK, database->DeleteUserData(data2.registration_id, "key")); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadUserData( data1.registration_id, "key", &user_data_out)); EXPECT_EQ("data1", user_data_out); EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->ReadUserData( data2.registration_id, "key", &user_data_out)); // And again get all registrations with user data. user_data_list.clear(); ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadUserDataForAllRegistrations("key", &user_data_list)); EXPECT_EQ(1u, user_data_list.size()); EXPECT_EQ(data1.registration_id, user_data_list[0].first); EXPECT_EQ("data1", user_data_list[0].second); } TEST(ServiceWorkerDatabaseTest, UserData_DeleteRegistration) { scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory()); const GURL kOrigin("http://example.com"); // Add registration 1. RegistrationData data1; data1.registration_id = 100; data1.scope = URL(kOrigin, "/foo"); data1.script = URL(kOrigin, "/script1.js"); data1.version_id = 200; // Add registration 2. RegistrationData data2; data2.registration_id = 101; data2.scope = URL(kOrigin, "/bar"); data2.script = URL(kOrigin, "/script2.js"); data2.version_id = 201; std::vector<Resource> resources; ServiceWorkerDatabase::RegistrationData deleted_version; std::vector<int64> newly_purgeable_resources; ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data1, resources, &deleted_version, &newly_purgeable_resources)); ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data2, resources, &deleted_version, &newly_purgeable_resources)); // Write user data associated with the registration1. std::string user_data_out; ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WriteUserData( data1.registration_id, kOrigin, "key1", "data1")); ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WriteUserData( data1.registration_id, kOrigin, "key2", "data2")); ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadUserData( data1.registration_id, "key1", &user_data_out)); ASSERT_EQ("data1", user_data_out); ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadUserData( data1.registration_id, "key2", &user_data_out)); ASSERT_EQ("data2", user_data_out); // Write user data associated with the registration2. ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WriteUserData( data2.registration_id, kOrigin, "key3", "data3")); ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadUserData( data2.registration_id, "key3", &user_data_out)); ASSERT_EQ("data3", user_data_out); // Delete all data associated with the registration1. This shouldn't delete // the data associated with registration2. ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK, database->DeleteRegistration( data1.registration_id, kOrigin, &deleted_version, &newly_purgeable_resources)); EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->ReadUserData( data1.registration_id, "key1", &user_data_out)); EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->ReadUserData( data1.registration_id, "key2", &user_data_out)); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadUserData( data2.registration_id, "key3", &user_data_out)); EXPECT_EQ("data3", user_data_out); } TEST(ServiceWorkerDatabaseTest, UserData_UninitializedDatabase) { scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory()); const GURL kOrigin("http://example.com"); // Should be failed because the database does not exist. std::string user_data_out; EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->ReadUserData(100, "key", &user_data_out)); // Should be failed because the associated registration does not exist. EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->WriteUserData(100, kOrigin, "key", "data")); // Deleting non-existent entry should succeed. EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->DeleteUserData(100, "key")); // Actually create a new database, but not initialized yet. database->LazyOpen(true); // Should be failed because the database is not initialized. ASSERT_EQ(ServiceWorkerDatabase::UNINITIALIZED, database->state_); EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->ReadUserData(100, "key", &user_data_out)); EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->WriteUserData(100, kOrigin, "key", "data")); // Deleting non-existent entry should succeed. EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->DeleteUserData(100, "key")); } TEST(ServiceWorkerDatabaseTest, UpdateVersionToActive) { scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory()); GURL origin("http://example.com"); ServiceWorkerDatabase::RegistrationData deleted_version; std::vector<int64> newly_purgeable_resources; // Should be false because a registration does not exist. EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->UpdateVersionToActive(0, origin)); // Add a registration. RegistrationData data; data.registration_id = 100; data.scope = URL(origin, "/foo"); data.script = URL(origin, "/script.js"); data.version_id = 200; data.is_active = false; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration(data, std::vector<Resource>(), &deleted_version, &newly_purgeable_resources)); // Make sure that the registration is stored. RegistrationData data_out; std::vector<Resource> resources_out; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadRegistration( data.registration_id, origin, &data_out, &resources_out)); VerifyRegistrationData(data, data_out); EXPECT_TRUE(resources_out.empty()); // Activate the registration. EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->UpdateVersionToActive(data.registration_id, origin)); // Make sure that the registration is activated. resources_out.clear(); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadRegistration( data.registration_id, origin, &data_out, &resources_out)); RegistrationData expected_data = data; expected_data.is_active = true; VerifyRegistrationData(expected_data, data_out); EXPECT_TRUE(resources_out.empty()); // Delete the registration. EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->DeleteRegistration(data.registration_id, origin, &deleted_version, &newly_purgeable_resources)); EXPECT_EQ(data.registration_id, deleted_version.registration_id); // Should be false because the registration is gone. EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->UpdateVersionToActive(data.registration_id, origin)); } TEST(ServiceWorkerDatabaseTest, UpdateLastCheckTime) { scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory()); GURL origin("http://example.com"); ServiceWorkerDatabase::RegistrationData deleted_version; std::vector<int64> newly_purgeable_resources; // Should be false because a registration does not exist. EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->UpdateLastCheckTime(0, origin, base::Time::Now())); // Add a registration. RegistrationData data; data.registration_id = 100; data.scope = URL(origin, "/foo"); data.script = URL(origin, "/script.js"); data.version_id = 200; data.last_update_check = base::Time::Now(); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration(data, std::vector<Resource>(), &deleted_version, &newly_purgeable_resources)); // Make sure that the registration is stored. RegistrationData data_out; std::vector<Resource> resources_out; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadRegistration( data.registration_id, origin, &data_out, &resources_out)); VerifyRegistrationData(data, data_out); EXPECT_TRUE(resources_out.empty()); // Update the last check time. base::Time updated_time = base::Time::Now(); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->UpdateLastCheckTime( data.registration_id, origin, updated_time)); // Make sure that the registration is updated. resources_out.clear(); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadRegistration( data.registration_id, origin, &data_out, &resources_out)); RegistrationData expected_data = data; expected_data.last_update_check = updated_time; VerifyRegistrationData(expected_data, data_out); EXPECT_TRUE(resources_out.empty()); // Delete the registration. EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->DeleteRegistration(data.registration_id, origin, &deleted_version, &newly_purgeable_resources)); EXPECT_EQ(data.registration_id, deleted_version.registration_id); // Should be false because the registration is gone. EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->UpdateLastCheckTime( data.registration_id, origin, base::Time::Now())); } TEST(ServiceWorkerDatabaseTest, UncommittedResourceIds) { scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory()); // Write {1, 2, 3}. std::set<int64> ids1; ids1.insert(1); ids1.insert(2); ids1.insert(3); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WriteUncommittedResourceIds(ids1)); std::set<int64> ids_out; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetUncommittedResourceIds(&ids_out)); EXPECT_EQ(ids1, ids_out); // Write {2, 4}. std::set<int64> ids2; ids2.insert(2); ids2.insert(4); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WriteUncommittedResourceIds(ids2)); ids_out.clear(); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetUncommittedResourceIds(&ids_out)); std::set<int64> expected = base::STLSetUnion<std::set<int64> >(ids1, ids2); EXPECT_EQ(expected, ids_out); // Delete {2, 3}. std::set<int64> ids3; ids3.insert(2); ids3.insert(3); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ClearUncommittedResourceIds(ids3)); ids_out.clear(); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetUncommittedResourceIds(&ids_out)); expected = base::STLSetDifference<std::set<int64> >(expected, ids3); EXPECT_EQ(expected, ids_out); } TEST(ServiceWorkerDatabaseTest, PurgeableResourceIds) { scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory()); // Write {1, 2, 3}. std::set<int64> ids1; ids1.insert(1); ids1.insert(2); ids1.insert(3); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WritePurgeableResourceIds(ids1)); std::set<int64> ids_out; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetPurgeableResourceIds(&ids_out)); EXPECT_EQ(ids1, ids_out); // Write {2, 4}. std::set<int64> ids2; ids2.insert(2); ids2.insert(4); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WritePurgeableResourceIds(ids2)); ids_out.clear(); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetPurgeableResourceIds(&ids_out)); std::set<int64> expected = base::STLSetUnion<std::set<int64> >(ids1, ids2); EXPECT_EQ(expected, ids_out); // Delete {2, 3}. std::set<int64> ids3; ids3.insert(2); ids3.insert(3); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ClearPurgeableResourceIds(ids3)); ids_out.clear(); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetPurgeableResourceIds(&ids_out)); expected = base::STLSetDifference<std::set<int64> >(expected, ids3); EXPECT_EQ(expected, ids_out); } TEST(ServiceWorkerDatabaseTest, DeleteAllDataForOrigin) { scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory()); ServiceWorkerDatabase::RegistrationData deleted_version; std::vector<int64> newly_purgeable_resources; // Data associated with |origin1| will be removed. GURL origin1("http://example.com"); GURL origin2("http://example.org"); // |origin1| has two registrations (registration1 and registration2). RegistrationData data1; data1.registration_id = 10; data1.scope = URL(origin1, "/foo"); data1.script = URL(origin1, "/script1.js"); data1.version_id = 100; data1.resources_total_size_bytes = 2013 + 512; std::vector<Resource> resources1; resources1.push_back(CreateResource(1, URL(origin1, "/resource1"), 2013)); resources1.push_back(CreateResource(2, URL(origin1, "/resource2"), 512)); ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data1, resources1, &deleted_version, &newly_purgeable_resources)); ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteUserData( data1.registration_id, origin1, "key1", "data1")); ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteUserData( data1.registration_id, origin1, "key2", "data2")); RegistrationData data2; data2.registration_id = 11; data2.scope = URL(origin1, "/bar"); data2.script = URL(origin1, "/script2.js"); data2.version_id = 101; data2.resources_total_size_bytes = 4 + 5; std::vector<Resource> resources2; resources2.push_back(CreateResource(3, URL(origin1, "/resource3"), 4)); resources2.push_back(CreateResource(4, URL(origin1, "/resource4"), 5)); ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data2, resources2, &deleted_version, &newly_purgeable_resources)); ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteUserData( data2.registration_id, origin1, "key3", "data3")); ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteUserData( data2.registration_id, origin1, "key4", "data4")); // |origin2| has one registration (registration3). RegistrationData data3; data3.registration_id = 12; data3.scope = URL(origin2, "/hoge"); data3.script = URL(origin2, "/script3.js"); data3.version_id = 102; data3.resources_total_size_bytes = 6 + 7; std::vector<Resource> resources3; resources3.push_back(CreateResource(5, URL(origin2, "/resource5"), 6)); resources3.push_back(CreateResource(6, URL(origin2, "/resource6"), 7)); ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteRegistration( data3, resources3, &deleted_version, &newly_purgeable_resources)); ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteUserData( data3.registration_id, origin2, "key5", "data5")); ASSERT_EQ( ServiceWorkerDatabase::STATUS_OK, database->WriteUserData( data3.registration_id, origin2, "key6", "data6")); std::set<GURL> origins_to_delete; origins_to_delete.insert(origin1); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->DeleteAllDataForOrigins(origins_to_delete, &newly_purgeable_resources)); // |origin1| should be removed from the unique origin list. std::set<GURL> unique_origins; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetOriginsWithRegistrations(&unique_origins)); EXPECT_EQ(1u, unique_origins.size()); EXPECT_TRUE(ContainsKey(unique_origins, origin2)); // The registrations for |origin1| should be removed. std::vector<RegistrationData> registrations; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetRegistrationsForOrigin(origin1, &registrations)); EXPECT_TRUE(registrations.empty()); GURL origin_out; EXPECT_EQ( ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->ReadRegistrationOrigin(data1.registration_id, &origin_out)); // The registration for |origin2| should not be removed. RegistrationData data_out; std::vector<Resource> resources_out; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadRegistration( data3.registration_id, origin2, &data_out, &resources_out)); VerifyRegistrationData(data3, data_out); VerifyResourceRecords(resources3, resources_out); EXPECT_EQ( ServiceWorkerDatabase::STATUS_OK, database->ReadRegistrationOrigin(data3.registration_id, &origin_out)); EXPECT_EQ(origin2, origin_out); // The resources associated with |origin1| should be purgeable. std::set<int64> purgeable_ids_out; EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetPurgeableResourceIds(&purgeable_ids_out)); EXPECT_EQ(4u, purgeable_ids_out.size()); EXPECT_TRUE(ContainsKey(purgeable_ids_out, 1)); EXPECT_TRUE(ContainsKey(purgeable_ids_out, 2)); EXPECT_TRUE(ContainsKey(purgeable_ids_out, 3)); EXPECT_TRUE(ContainsKey(purgeable_ids_out, 4)); // The user data associated with |origin1| should be removed. std::string user_data_out; EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->ReadUserData( data1.registration_id, "key1", &user_data_out)); EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->ReadUserData( data1.registration_id, "key2", &user_data_out)); EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->ReadUserData( data2.registration_id, "key3", &user_data_out)); EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND, database->ReadUserData( data2.registration_id, "key4", &user_data_out)); // The user data associated with |origin2| should not be removed. EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadUserData( data3.registration_id, "key5", &user_data_out)); EXPECT_EQ("data5", user_data_out); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadUserData( data3.registration_id, "key6", &user_data_out)); EXPECT_EQ("data6", user_data_out); } TEST(ServiceWorkerDatabaseTest, DestroyDatabase) { base::ScopedTempDir database_dir; ASSERT_TRUE(database_dir.CreateUniqueTempDir()); scoped_ptr<ServiceWorkerDatabase> database( CreateDatabase(database_dir.path())); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->LazyOpen(true)); ASSERT_TRUE(base::DirectoryExists(database_dir.path())); EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->DestroyDatabase()); ASSERT_FALSE(base::DirectoryExists(database_dir.path())); } } // namespace content
CTSRD-SOAAP/chromium-42.0.2311.135
content/browser/service_worker/service_worker_database_unittest.cc
C++
bsd-3-clause
59,746
package org.hisp.dhis.system.collection; /* * Copyright (c) 2004-2017, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import java.util.HashMap; import java.util.Map; import org.hisp.dhis.scheduling.TaskId; /** * @author Lars Helge Overland */ public class TaskLocalMap<T, V> { private final Map<TaskId, Map<T, V>> internalMap; public TaskLocalMap() { this.internalMap = new HashMap<>(); } public Map<T, V> get( TaskId id ) { Map<T, V> map = internalMap.get( id ); if ( map == null ) { map = new HashMap<>(); internalMap.put( id, map ); } return map; } public boolean clear( TaskId id ) { return internalMap.remove( id ) != null; } }
troyel/dhis2-core
dhis-2/dhis-support/dhis-support-system/src/main/java/org/hisp/dhis/system/collection/TaskLocalMap.java
Java
bsd-3-clause
2,264
/******************************************************* * Copyright (c) 2014, ArrayFire * All rights reserved. * * This file is distributed under 3-clause BSD license. * The complete license agreement can be obtained at: * http://arrayfire.com/licenses/BSD-3-Clause ********************************************************/ #pragma once #include <kernel_headers/hsv_rgb.hpp> #include <program.hpp> #include <traits.hpp> #include <string> #include <mutex> #include <map> #include <dispatch.hpp> #include <Param.hpp> #include <debug_opencl.hpp> using cl::Buffer; using cl::Program; using cl::Kernel; using cl::KernelFunctor; using cl::EnqueueArgs; using cl::NDRange; using std::string; namespace opencl { namespace kernel { static const int THREADS_X = 16; static const int THREADS_Y = 16; template<typename T, bool isHSV2RGB> void hsv2rgb_convert(Param out, const Param in) { try { static std::once_flag compileFlags[DeviceManager::MAX_DEVICES]; static std::map<int, Program*> hrProgs; static std::map<int, Kernel*> hrKernels; int device = getActiveDeviceId(); std::call_once( compileFlags[device], [device] () { std::ostringstream options; options << " -D T=" << dtype_traits<T>::getName(); if(isHSV2RGB) options << " -D isHSV2RGB"; if (std::is_same<T, double>::value) { options << " -D USE_DOUBLE"; } Program prog; buildProgram(prog, hsv_rgb_cl, hsv_rgb_cl_len, options.str()); hrProgs[device] = new Program(prog); hrKernels[device] = new Kernel(*hrProgs[device], "convert"); }); NDRange local(THREADS_X, THREADS_Y); int blk_x = divup(in.info.dims[0], THREADS_X); int blk_y = divup(in.info.dims[1], THREADS_Y); // all images are three channels, so batch // parameter would be along 4th dimension NDRange global(blk_x * in.info.dims[3] * THREADS_X, blk_y * THREADS_Y); auto hsvrgbOp = KernelFunctor<Buffer, KParam, Buffer, KParam, int> (*hrKernels[device]); hsvrgbOp(EnqueueArgs(getQueue(), global, local), *out.data, out.info, *in.data, in.info, blk_x); CL_DEBUG_FINISH(getQueue()); } catch (cl::Error err) { CL_TO_AF_ERROR(err); throw; } } } }
ghisvail/arrayfire
src/backend/opencl/kernel/hsv_rgb.hpp
C++
bsd-3-clause
2,405
tinyMCE.init({ theme: "advanced", mode: "specific_textareas", editor_selector: "tinymce", plugins: "fullscreen,autoresize,searchreplace,mediapicker,inlinepopups", theme_advanced_toolbar_location: "top", theme_advanced_toolbar_align: "left", theme_advanced_buttons1: "search,replace,|,cut,copy,paste,|,undo,redo,|,mediapicker,|,link,unlink,charmap,emoticon,codeblock,|,bold,italic,|,numlist,bullist,formatselect,|,code,fullscreen", theme_advanced_buttons2: "", theme_advanced_buttons3: "", convert_urls: false, valid_elements: "*[*]", // shouldn't be needed due to the valid_elements setting, but TinyMCE would strip script.src without it. extended_valid_elements: "script[type|defer|src|language]" });
BankNotes/Web
src/Orchard.Web/Modules/TinyMce/Scripts/orchard-tinymce.js
JavaScript
bsd-3-clause
772
#region License // // Copyright (c) 2013, Kooboo team // // Licensed under the BSD License // See the file LICENSE.txt for details. // #endregion using Kooboo.CMS.Sites.Services; using System; using System.Collections.Generic; using System.Linq; using System.Web; namespace Kooboo.CMS.Web.Areas.Sites.Menu { public class CustomFileFolderMenuItems : FileFolderMenuItems { public override FileManager FileManager { get { return Kooboo.CMS.Common.Runtime.EngineContext.Current.Resolve<CustomFileManager>(); } } } }
lingxyd/CMS
Kooboo.CMS/Kooboo.CMS.Web/Areas/Sites/Menu/CustomFileFolderMenuItems.cs
C#
bsd-3-clause
566
// PhantomJS is missing Function.prototype.bind: // http://code.google.com/p/phantomjs/issues/detail?id=522 if (!Function.prototype.bind) { Function.prototype.bind = function (oThis) { if (typeof this !== "function") { // closest thing possible to the ECMAScript 5 internal IsCallable function throw new TypeError("Function.prototype.bind - what is trying to be bound is not callable"); } var aArgs = Array.prototype.slice.call(arguments, 1), fToBind = this, fNOP = function () {}, fBound = function () { return fToBind.apply(this instanceof fNOP && oThis ? this : oThis, aArgs.concat(Array.prototype.slice.call(arguments))); }; fNOP.prototype = this.prototype; fBound.prototype = new fNOP(); return fBound; }; }
vince0508/wegoto_iD
test/lib/bind-shim.js
JavaScript
isc
897
/** * Contains bank-related classes. */ package org.apollo.game.model.inter.bank;
DealerNextDoor/ApolloDev
src/org/apollo/game/model/inter/bank/package-info.java
Java
isc
83
export default function identity(x) { return x; }
d3/d3-arrays
src/identity.js
JavaScript
isc
52
// Copyright Louis Dionne 2013-2017 // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE.md or copy at http://boost.org/LICENSE_1_0.txt) #include <boost/hana/basic_tuple.hpp> namespace hana = boost::hana; template <int i> struct x { }; int main() { constexpr hana::basic_tuple<> empty{}; (void)empty; constexpr hana::basic_tuple<int, float> xs{1, 2.3f}; constexpr auto ys = hana::basic_tuple<int, float>{1, 2.3f}; constexpr auto copy = ys; (void)copy; }
nawawi/poedit
deps/boost/libs/hana/test/basic_tuple/construct.cpp
C++
mit
519
# Copyright (C) 2003 Python Software Foundation import unittest import shutil import tempfile import sys import stat import os import os.path from os.path import splitdrive from distutils.spawn import find_executable, spawn from shutil import (_make_tarball, _make_zipfile, make_archive, register_archive_format, unregister_archive_format, get_archive_formats) import tarfile import warnings from test import test_support from test.test_support import TESTFN, check_warnings, captured_stdout TESTFN2 = TESTFN + "2" try: import grp import pwd UID_GID_SUPPORT = True except ImportError: UID_GID_SUPPORT = False try: import zlib except ImportError: zlib = None try: import zipfile ZIP_SUPPORT = True except ImportError: ZIP_SUPPORT = find_executable('zip') class TestShutil(unittest.TestCase): def setUp(self): super(TestShutil, self).setUp() self.tempdirs = [] def tearDown(self): super(TestShutil, self).tearDown() while self.tempdirs: d = self.tempdirs.pop() shutil.rmtree(d, os.name in ('nt', 'cygwin')) def write_file(self, path, content='xxx'): """Writes a file in the given path. path can be a string or a sequence. """ if isinstance(path, (list, tuple)): path = os.path.join(*path) f = open(path, 'w') try: f.write(content) finally: f.close() def mkdtemp(self): """Create a temporary directory that will be cleaned up. Returns the path of the directory. """ d = tempfile.mkdtemp() self.tempdirs.append(d) return d def test_rmtree_errors(self): # filename is guaranteed not to exist filename = tempfile.mktemp() self.assertRaises(OSError, shutil.rmtree, filename) # See bug #1071513 for why we don't run this on cygwin # and bug #1076467 for why we don't run this as root. if (hasattr(os, 'chmod') and sys.platform[:6] != 'cygwin' and not (hasattr(os, 'geteuid') and os.geteuid() == 0)): def test_on_error(self): self.errorState = 0 os.mkdir(TESTFN) self.childpath = os.path.join(TESTFN, 'a') f = open(self.childpath, 'w') f.close() old_dir_mode = os.stat(TESTFN).st_mode old_child_mode = os.stat(self.childpath).st_mode # Make unwritable. os.chmod(self.childpath, stat.S_IREAD) os.chmod(TESTFN, stat.S_IREAD) shutil.rmtree(TESTFN, onerror=self.check_args_to_onerror) # Test whether onerror has actually been called. self.assertEqual(self.errorState, 2, "Expected call to onerror function did not happen.") # Make writable again. os.chmod(TESTFN, old_dir_mode) os.chmod(self.childpath, old_child_mode) # Clean up. shutil.rmtree(TESTFN) def check_args_to_onerror(self, func, arg, exc): # test_rmtree_errors deliberately runs rmtree # on a directory that is chmod 400, which will fail. # This function is run when shutil.rmtree fails. # 99.9% of the time it initially fails to remove # a file in the directory, so the first time through # func is os.remove. # However, some Linux machines running ZFS on # FUSE experienced a failure earlier in the process # at os.listdir. The first failure may legally # be either. if self.errorState == 0: if func is os.remove: self.assertEqual(arg, self.childpath) else: self.assertIs(func, os.listdir, "func must be either os.remove or os.listdir") self.assertEqual(arg, TESTFN) self.assertTrue(issubclass(exc[0], OSError)) self.errorState = 1 else: self.assertEqual(func, os.rmdir) self.assertEqual(arg, TESTFN) self.assertTrue(issubclass(exc[0], OSError)) self.errorState = 2 def test_rmtree_dont_delete_file(self): # When called on a file instead of a directory, don't delete it. handle, path = tempfile.mkstemp() os.fdopen(handle).close() self.assertRaises(OSError, shutil.rmtree, path) os.remove(path) def test_copytree_simple(self): def write_data(path, data): f = open(path, "w") f.write(data) f.close() def read_data(path): f = open(path) data = f.read() f.close() return data src_dir = tempfile.mkdtemp() dst_dir = os.path.join(tempfile.mkdtemp(), 'destination') write_data(os.path.join(src_dir, 'test.txt'), '123') os.mkdir(os.path.join(src_dir, 'test_dir')) write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456') try: shutil.copytree(src_dir, dst_dir) self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test.txt'))) self.assertTrue(os.path.isdir(os.path.join(dst_dir, 'test_dir'))) self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test_dir', 'test.txt'))) actual = read_data(os.path.join(dst_dir, 'test.txt')) self.assertEqual(actual, '123') actual = read_data(os.path.join(dst_dir, 'test_dir', 'test.txt')) self.assertEqual(actual, '456') finally: for path in ( os.path.join(src_dir, 'test.txt'), os.path.join(dst_dir, 'test.txt'), os.path.join(src_dir, 'test_dir', 'test.txt'), os.path.join(dst_dir, 'test_dir', 'test.txt'), ): if os.path.exists(path): os.remove(path) for path in (src_dir, os.path.dirname(dst_dir) ): if os.path.exists(path): shutil.rmtree(path) def test_copytree_with_exclude(self): def write_data(path, data): f = open(path, "w") f.write(data) f.close() def read_data(path): f = open(path) data = f.read() f.close() return data # creating data join = os.path.join exists = os.path.exists src_dir = tempfile.mkdtemp() try: dst_dir = join(tempfile.mkdtemp(), 'destination') write_data(join(src_dir, 'test.txt'), '123') write_data(join(src_dir, 'test.tmp'), '123') os.mkdir(join(src_dir, 'test_dir')) write_data(join(src_dir, 'test_dir', 'test.txt'), '456') os.mkdir(join(src_dir, 'test_dir2')) write_data(join(src_dir, 'test_dir2', 'test.txt'), '456') os.mkdir(join(src_dir, 'test_dir2', 'subdir')) os.mkdir(join(src_dir, 'test_dir2', 'subdir2')) write_data(join(src_dir, 'test_dir2', 'subdir', 'test.txt'), '456') write_data(join(src_dir, 'test_dir2', 'subdir2', 'test.py'), '456') # testing glob-like patterns try: patterns = shutil.ignore_patterns('*.tmp', 'test_dir2') shutil.copytree(src_dir, dst_dir, ignore=patterns) # checking the result: some elements should not be copied self.assertTrue(exists(join(dst_dir, 'test.txt'))) self.assertTrue(not exists(join(dst_dir, 'test.tmp'))) self.assertTrue(not exists(join(dst_dir, 'test_dir2'))) finally: if os.path.exists(dst_dir): shutil.rmtree(dst_dir) try: patterns = shutil.ignore_patterns('*.tmp', 'subdir*') shutil.copytree(src_dir, dst_dir, ignore=patterns) # checking the result: some elements should not be copied self.assertTrue(not exists(join(dst_dir, 'test.tmp'))) self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2'))) self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir'))) finally: if os.path.exists(dst_dir): shutil.rmtree(dst_dir) # testing callable-style try: def _filter(src, names): res = [] for name in names: path = os.path.join(src, name) if (os.path.isdir(path) and path.split()[-1] == 'subdir'): res.append(name) elif os.path.splitext(path)[-1] in ('.py'): res.append(name) return res shutil.copytree(src_dir, dst_dir, ignore=_filter) # checking the result: some elements should not be copied self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2', 'test.py'))) self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir'))) finally: if os.path.exists(dst_dir): shutil.rmtree(dst_dir) finally: shutil.rmtree(src_dir) shutil.rmtree(os.path.dirname(dst_dir)) if hasattr(os, "symlink"): def test_dont_copy_file_onto_link_to_itself(self): # bug 851123. os.mkdir(TESTFN) src = os.path.join(TESTFN, 'cheese') dst = os.path.join(TESTFN, 'shop') try: f = open(src, 'w') f.write('cheddar') f.close() os.link(src, dst) self.assertRaises(shutil.Error, shutil.copyfile, src, dst) with open(src, 'r') as f: self.assertEqual(f.read(), 'cheddar') os.remove(dst) # Using `src` here would mean we end up with a symlink pointing # to TESTFN/TESTFN/cheese, while it should point at # TESTFN/cheese. os.symlink('cheese', dst) self.assertRaises(shutil.Error, shutil.copyfile, src, dst) with open(src, 'r') as f: self.assertEqual(f.read(), 'cheddar') os.remove(dst) finally: try: shutil.rmtree(TESTFN) except OSError: pass def test_rmtree_on_symlink(self): # bug 1669. os.mkdir(TESTFN) try: src = os.path.join(TESTFN, 'cheese') dst = os.path.join(TESTFN, 'shop') os.mkdir(src) os.symlink(src, dst) self.assertRaises(OSError, shutil.rmtree, dst) finally: shutil.rmtree(TESTFN, ignore_errors=True) if hasattr(os, "mkfifo"): # Issue #3002: copyfile and copytree block indefinitely on named pipes def test_copyfile_named_pipe(self): os.mkfifo(TESTFN) try: self.assertRaises(shutil.SpecialFileError, shutil.copyfile, TESTFN, TESTFN2) self.assertRaises(shutil.SpecialFileError, shutil.copyfile, __file__, TESTFN) finally: os.remove(TESTFN) def test_copytree_named_pipe(self): os.mkdir(TESTFN) try: subdir = os.path.join(TESTFN, "subdir") os.mkdir(subdir) pipe = os.path.join(subdir, "mypipe") os.mkfifo(pipe) try: shutil.copytree(TESTFN, TESTFN2) except shutil.Error as e: errors = e.args[0] self.assertEqual(len(errors), 1) src, dst, error_msg = errors[0] self.assertEqual("`%s` is a named pipe" % pipe, error_msg) else: self.fail("shutil.Error should have been raised") finally: shutil.rmtree(TESTFN, ignore_errors=True) shutil.rmtree(TESTFN2, ignore_errors=True) @unittest.skipUnless(zlib, "requires zlib") def test_make_tarball(self): # creating something to tar tmpdir = self.mkdtemp() self.write_file([tmpdir, 'file1'], 'xxx') self.write_file([tmpdir, 'file2'], 'xxx') os.mkdir(os.path.join(tmpdir, 'sub')) self.write_file([tmpdir, 'sub', 'file3'], 'xxx') tmpdir2 = self.mkdtemp() unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0], "source and target should be on same drive") base_name = os.path.join(tmpdir2, 'archive') # working with relative paths to avoid tar warnings old_dir = os.getcwd() os.chdir(tmpdir) try: _make_tarball(splitdrive(base_name)[1], '.') finally: os.chdir(old_dir) # check if the compressed tarball was created tarball = base_name + '.tar.gz' self.assertTrue(os.path.exists(tarball)) # trying an uncompressed one base_name = os.path.join(tmpdir2, 'archive') old_dir = os.getcwd() os.chdir(tmpdir) try: _make_tarball(splitdrive(base_name)[1], '.', compress=None) finally: os.chdir(old_dir) tarball = base_name + '.tar' self.assertTrue(os.path.exists(tarball)) def _tarinfo(self, path): tar = tarfile.open(path) try: names = tar.getnames() names.sort() return tuple(names) finally: tar.close() def _create_files(self): # creating something to tar tmpdir = self.mkdtemp() dist = os.path.join(tmpdir, 'dist') os.mkdir(dist) self.write_file([dist, 'file1'], 'xxx') self.write_file([dist, 'file2'], 'xxx') os.mkdir(os.path.join(dist, 'sub')) self.write_file([dist, 'sub', 'file3'], 'xxx') os.mkdir(os.path.join(dist, 'sub2')) tmpdir2 = self.mkdtemp() base_name = os.path.join(tmpdir2, 'archive') return tmpdir, tmpdir2, base_name @unittest.skipUnless(zlib, "Requires zlib") @unittest.skipUnless(find_executable('tar') and find_executable('gzip'), 'Need the tar command to run') def test_tarfile_vs_tar(self): tmpdir, tmpdir2, base_name = self._create_files() old_dir = os.getcwd() os.chdir(tmpdir) try: _make_tarball(base_name, 'dist') finally: os.chdir(old_dir) # check if the compressed tarball was created tarball = base_name + '.tar.gz' self.assertTrue(os.path.exists(tarball)) # now create another tarball using `tar` tarball2 = os.path.join(tmpdir, 'archive2.tar.gz') tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist'] gzip_cmd = ['gzip', '-f9', 'archive2.tar'] old_dir = os.getcwd() os.chdir(tmpdir) try: with captured_stdout() as s: spawn(tar_cmd) spawn(gzip_cmd) finally: os.chdir(old_dir) self.assertTrue(os.path.exists(tarball2)) # let's compare both tarballs self.assertEqual(self._tarinfo(tarball), self._tarinfo(tarball2)) # trying an uncompressed one base_name = os.path.join(tmpdir2, 'archive') old_dir = os.getcwd() os.chdir(tmpdir) try: _make_tarball(base_name, 'dist', compress=None) finally: os.chdir(old_dir) tarball = base_name + '.tar' self.assertTrue(os.path.exists(tarball)) # now for a dry_run base_name = os.path.join(tmpdir2, 'archive') old_dir = os.getcwd() os.chdir(tmpdir) try: _make_tarball(base_name, 'dist', compress=None, dry_run=True) finally: os.chdir(old_dir) tarball = base_name + '.tar' self.assertTrue(os.path.exists(tarball)) @unittest.skipUnless(zlib, "Requires zlib") @unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run') def test_make_zipfile(self): # creating something to tar tmpdir = self.mkdtemp() self.write_file([tmpdir, 'file1'], 'xxx') self.write_file([tmpdir, 'file2'], 'xxx') tmpdir2 = self.mkdtemp() base_name = os.path.join(tmpdir2, 'archive') _make_zipfile(base_name, tmpdir) # check if the compressed tarball was created tarball = base_name + '.zip' self.assertTrue(os.path.exists(tarball)) def test_make_archive(self): tmpdir = self.mkdtemp() base_name = os.path.join(tmpdir, 'archive') self.assertRaises(ValueError, make_archive, base_name, 'xxx') @unittest.skipUnless(zlib, "Requires zlib") def test_make_archive_owner_group(self): # testing make_archive with owner and group, with various combinations # this works even if there's not gid/uid support if UID_GID_SUPPORT: group = grp.getgrgid(0)[0] owner = pwd.getpwuid(0)[0] else: group = owner = 'root' base_dir, root_dir, base_name = self._create_files() base_name = os.path.join(self.mkdtemp() , 'archive') res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner, group=group) self.assertTrue(os.path.exists(res)) res = make_archive(base_name, 'zip', root_dir, base_dir) self.assertTrue(os.path.exists(res)) res = make_archive(base_name, 'tar', root_dir, base_dir, owner=owner, group=group) self.assertTrue(os.path.exists(res)) res = make_archive(base_name, 'tar', root_dir, base_dir, owner='kjhkjhkjg', group='oihohoh') self.assertTrue(os.path.exists(res)) @unittest.skipUnless(zlib, "Requires zlib") @unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support") def test_tarfile_root_owner(self): tmpdir, tmpdir2, base_name = self._create_files() old_dir = os.getcwd() os.chdir(tmpdir) group = grp.getgrgid(0)[0] owner = pwd.getpwuid(0)[0] try: archive_name = _make_tarball(base_name, 'dist', compress=None, owner=owner, group=group) finally: os.chdir(old_dir) # check if the compressed tarball was created self.assertTrue(os.path.exists(archive_name)) # now checks the rights archive = tarfile.open(archive_name) try: for member in archive.getmembers(): self.assertEqual(member.uid, 0) self.assertEqual(member.gid, 0) finally: archive.close() def test_make_archive_cwd(self): current_dir = os.getcwd() def _breaks(*args, **kw): raise RuntimeError() register_archive_format('xxx', _breaks, [], 'xxx file') try: try: make_archive('xxx', 'xxx', root_dir=self.mkdtemp()) except Exception: pass self.assertEqual(os.getcwd(), current_dir) finally: unregister_archive_format('xxx') def test_register_archive_format(self): self.assertRaises(TypeError, register_archive_format, 'xxx', 1) self.assertRaises(TypeError, register_archive_format, 'xxx', lambda: x, 1) self.assertRaises(TypeError, register_archive_format, 'xxx', lambda: x, [(1, 2), (1, 2, 3)]) register_archive_format('xxx', lambda: x, [(1, 2)], 'xxx file') formats = [name for name, params in get_archive_formats()] self.assertIn('xxx', formats) unregister_archive_format('xxx') formats = [name for name, params in get_archive_formats()] self.assertNotIn('xxx', formats) class TestMove(unittest.TestCase): def setUp(self): filename = "foo" self.src_dir = tempfile.mkdtemp() self.dst_dir = tempfile.mkdtemp() self.src_file = os.path.join(self.src_dir, filename) self.dst_file = os.path.join(self.dst_dir, filename) # Try to create a dir in the current directory, hoping that it is # not located on the same filesystem as the system tmp dir. try: self.dir_other_fs = tempfile.mkdtemp( dir=os.path.dirname(__file__)) self.file_other_fs = os.path.join(self.dir_other_fs, filename) except OSError: self.dir_other_fs = None with open(self.src_file, "wb") as f: f.write("spam") def tearDown(self): for d in (self.src_dir, self.dst_dir, self.dir_other_fs): try: if d: shutil.rmtree(d) except: pass def _check_move_file(self, src, dst, real_dst): with open(src, "rb") as f: contents = f.read() shutil.move(src, dst) with open(real_dst, "rb") as f: self.assertEqual(contents, f.read()) self.assertFalse(os.path.exists(src)) def _check_move_dir(self, src, dst, real_dst): contents = sorted(os.listdir(src)) shutil.move(src, dst) self.assertEqual(contents, sorted(os.listdir(real_dst))) self.assertFalse(os.path.exists(src)) def test_move_file(self): # Move a file to another location on the same filesystem. self._check_move_file(self.src_file, self.dst_file, self.dst_file) def test_move_file_to_dir(self): # Move a file inside an existing dir on the same filesystem. self._check_move_file(self.src_file, self.dst_dir, self.dst_file) def test_move_file_other_fs(self): # Move a file to an existing dir on another filesystem. if not self.dir_other_fs: # skip return self._check_move_file(self.src_file, self.file_other_fs, self.file_other_fs) def test_move_file_to_dir_other_fs(self): # Move a file to another location on another filesystem. if not self.dir_other_fs: # skip return self._check_move_file(self.src_file, self.dir_other_fs, self.file_other_fs) def test_move_dir(self): # Move a dir to another location on the same filesystem. dst_dir = tempfile.mktemp() try: self._check_move_dir(self.src_dir, dst_dir, dst_dir) finally: try: shutil.rmtree(dst_dir) except: pass def test_move_dir_other_fs(self): # Move a dir to another location on another filesystem. if not self.dir_other_fs: # skip return dst_dir = tempfile.mktemp(dir=self.dir_other_fs) try: self._check_move_dir(self.src_dir, dst_dir, dst_dir) finally: try: shutil.rmtree(dst_dir) except: pass def test_move_dir_to_dir(self): # Move a dir inside an existing dir on the same filesystem. self._check_move_dir(self.src_dir, self.dst_dir, os.path.join(self.dst_dir, os.path.basename(self.src_dir))) def test_move_dir_to_dir_other_fs(self): # Move a dir inside an existing dir on another filesystem. if not self.dir_other_fs: # skip return self._check_move_dir(self.src_dir, self.dir_other_fs, os.path.join(self.dir_other_fs, os.path.basename(self.src_dir))) def test_existing_file_inside_dest_dir(self): # A file with the same name inside the destination dir already exists. with open(self.dst_file, "wb"): pass self.assertRaises(shutil.Error, shutil.move, self.src_file, self.dst_dir) def test_dont_move_dir_in_itself(self): # Moving a dir inside itself raises an Error. dst = os.path.join(self.src_dir, "bar") self.assertRaises(shutil.Error, shutil.move, self.src_dir, dst) def test_destinsrc_false_negative(self): os.mkdir(TESTFN) try: for src, dst in [('srcdir', 'srcdir/dest')]: src = os.path.join(TESTFN, src) dst = os.path.join(TESTFN, dst) self.assertTrue(shutil._destinsrc(src, dst), msg='_destinsrc() wrongly concluded that ' 'dst (%s) is not in src (%s)' % (dst, src)) finally: shutil.rmtree(TESTFN, ignore_errors=True) def test_destinsrc_false_positive(self): os.mkdir(TESTFN) try: for src, dst in [('srcdir', 'src/dest'), ('srcdir', 'srcdir.new')]: src = os.path.join(TESTFN, src) dst = os.path.join(TESTFN, dst) self.assertFalse(shutil._destinsrc(src, dst), msg='_destinsrc() wrongly concluded that ' 'dst (%s) is in src (%s)' % (dst, src)) finally: shutil.rmtree(TESTFN, ignore_errors=True) class TestCopyFile(unittest.TestCase): _delete = False class Faux(object): _entered = False _exited_with = None _raised = False def __init__(self, raise_in_exit=False, suppress_at_exit=True): self._raise_in_exit = raise_in_exit self._suppress_at_exit = suppress_at_exit def read(self, *args): return '' def __enter__(self): self._entered = True def __exit__(self, exc_type, exc_val, exc_tb): self._exited_with = exc_type, exc_val, exc_tb if self._raise_in_exit: self._raised = True raise IOError("Cannot close") return self._suppress_at_exit def tearDown(self): if self._delete: del shutil.open def _set_shutil_open(self, func): shutil.open = func self._delete = True def test_w_source_open_fails(self): def _open(filename, mode='r'): if filename == 'srcfile': raise IOError('Cannot open "srcfile"') assert 0 # shouldn't reach here. self._set_shutil_open(_open) self.assertRaises(IOError, shutil.copyfile, 'srcfile', 'destfile') def test_w_dest_open_fails(self): srcfile = self.Faux() def _open(filename, mode='r'): if filename == 'srcfile': return srcfile if filename == 'destfile': raise IOError('Cannot open "destfile"') assert 0 # shouldn't reach here. self._set_shutil_open(_open) shutil.copyfile('srcfile', 'destfile') self.assertTrue(srcfile._entered) self.assertTrue(srcfile._exited_with[0] is IOError) self.assertEqual(srcfile._exited_with[1].args, ('Cannot open "destfile"',)) def test_w_dest_close_fails(self): srcfile = self.Faux() destfile = self.Faux(True) def _open(filename, mode='r'): if filename == 'srcfile': return srcfile if filename == 'destfile': return destfile assert 0 # shouldn't reach here. self._set_shutil_open(_open) shutil.copyfile('srcfile', 'destfile') self.assertTrue(srcfile._entered) self.assertTrue(destfile._entered) self.assertTrue(destfile._raised) self.assertTrue(srcfile._exited_with[0] is IOError) self.assertEqual(srcfile._exited_with[1].args, ('Cannot close',)) def test_w_source_close_fails(self): srcfile = self.Faux(True) destfile = self.Faux() def _open(filename, mode='r'): if filename == 'srcfile': return srcfile if filename == 'destfile': return destfile assert 0 # shouldn't reach here. self._set_shutil_open(_open) self.assertRaises(IOError, shutil.copyfile, 'srcfile', 'destfile') self.assertTrue(srcfile._entered) self.assertTrue(destfile._entered) self.assertFalse(destfile._raised) self.assertTrue(srcfile._exited_with[0] is None) self.assertTrue(srcfile._raised) def test_move_dir_caseinsensitive(self): # Renames a folder to the same name # but a different case. self.src_dir = tempfile.mkdtemp() dst_dir = os.path.join( os.path.dirname(self.src_dir), os.path.basename(self.src_dir).upper()) self.assertNotEqual(self.src_dir, dst_dir) try: shutil.move(self.src_dir, dst_dir) self.assertTrue(os.path.isdir(dst_dir)) finally: if os.path.exists(dst_dir): os.rmdir(dst_dir) def test_main(): test_support.run_unittest(TestShutil, TestMove, TestCopyFile) if __name__ == '__main__': test_main()
ktan2020/legacy-automation
win/Lib/test/test_shutil.py
Python
mit
30,473
package org.jsondoc.core.scanner.builder; import org.jsondoc.core.annotation.Api; import org.jsondoc.core.pojo.ApiDoc; public class JSONDocApiDocBuilder { public static ApiDoc build(Class<?> controller) { Api api = controller.getAnnotation(Api.class); ApiDoc apiDoc = new ApiDoc(); apiDoc.setDescription(api.description()); apiDoc.setName(api.name()); apiDoc.setGroup(api.group()); apiDoc.setVisibility(api.visibility()); apiDoc.setStage(api.stage()); return apiDoc; } }
OrangGeeGee/jsondoc
jsondoc-core/src/main/java/org/jsondoc/core/scanner/builder/JSONDocApiDocBuilder.java
Java
mit
494
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. #nullable enable using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Collections.ObjectModel; using System.Diagnostics; using System.Globalization; using System.Linq; using Microsoft.CodeAnalysis.CodeGen; using Microsoft.CodeAnalysis.PooledObjects; using Microsoft.VisualStudio.Debugger.Evaluation; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.ExpressionEvaluator { internal abstract class EvaluationContextBase { internal static readonly AssemblyIdentity SystemIdentity = new AssemblyIdentity("System"); internal static readonly AssemblyIdentity SystemCoreIdentity = new AssemblyIdentity("System.Core"); internal static readonly AssemblyIdentity SystemLinqIdentity = new AssemblyIdentity("System.Linq"); internal static readonly AssemblyIdentity SystemXmlIdentity = new AssemblyIdentity("System.Xml"); internal static readonly AssemblyIdentity SystemXmlLinqIdentity = new AssemblyIdentity("System.Xml.Linq"); internal static readonly AssemblyIdentity MicrosoftVisualBasicIdentity = new AssemblyIdentity("Microsoft.VisualBasic"); internal abstract CompileResult? CompileExpression( string expr, DkmEvaluationFlags compilationFlags, ImmutableArray<Alias> aliases, DiagnosticBag diagnostics, out ResultProperties resultProperties, CompilationTestData? testData); internal abstract CompileResult? CompileAssignment( string target, string expr, ImmutableArray<Alias> aliases, DiagnosticBag diagnostics, out ResultProperties resultProperties, CompilationTestData? testData); internal abstract ReadOnlyCollection<byte> CompileGetLocals( ArrayBuilder<LocalAndMethod> locals, bool argumentsOnly, ImmutableArray<Alias> aliases, DiagnosticBag diagnostics, out string typeName, CompilationTestData? testData); internal string GetErrorMessageAndMissingAssemblyIdentities( DiagnosticBag diagnostics, DiagnosticFormatter formatter, CultureInfo? preferredUICulture, AssemblyIdentity linqLibrary, out bool useReferencedModulesOnly, out ImmutableArray<AssemblyIdentity> missingAssemblyIdentities) { var errors = diagnostics.AsEnumerable().Where(d => d.Severity == DiagnosticSeverity.Error); missingAssemblyIdentities = default; foreach (var error in errors) { missingAssemblyIdentities = this.GetMissingAssemblyIdentities(error, linqLibrary); if (!missingAssemblyIdentities.IsDefault) { break; } } if (missingAssemblyIdentities.IsDefault) { missingAssemblyIdentities = ImmutableArray<AssemblyIdentity>.Empty; } useReferencedModulesOnly = errors.All(HasDuplicateTypesOrAssemblies); return GetErrorMessage(errors.First(), formatter, preferredUICulture); } internal static string GetErrorMessage( Diagnostic error, DiagnosticFormatter formatter, CultureInfo? preferredUICulture) { return (error is SimpleMessageDiagnostic simpleMessage) ? simpleMessage.GetMessage() : formatter.Format(error, preferredUICulture ?? CultureInfo.CurrentUICulture); } internal abstract bool HasDuplicateTypesOrAssemblies(Diagnostic diagnostic); internal abstract ImmutableArray<AssemblyIdentity> GetMissingAssemblyIdentities(Diagnostic diagnostic, AssemblyIdentity linqLibrary); // ILOffset == 0xffffffff indicates an instruction outside of IL. // Treat such values as the beginning of the IL. internal static int NormalizeILOffset(uint ilOffset) { return (ilOffset == uint.MaxValue) ? 0 : (int)ilOffset; } protected sealed class SimpleMessageDiagnostic : Diagnostic { private readonly string _message; internal SimpleMessageDiagnostic(string message) { _message = message; } public override IReadOnlyList<Location> AdditionalLocations { get { throw new NotImplementedException(); } } public override DiagnosticDescriptor Descriptor { get { throw new NotImplementedException(); } } public override string Id { get { throw new NotImplementedException(); } } public override Location Location { get { throw new NotImplementedException(); } } public override DiagnosticSeverity Severity { get { return DiagnosticSeverity.Error; } } public override DiagnosticSeverity DefaultSeverity { get { return DiagnosticSeverity.Error; } } public override bool IsSuppressed { get { return false; } } public override int WarningLevel { get { throw new NotImplementedException(); } } public override bool Equals(Diagnostic? obj) { throw new NotImplementedException(); } public override bool Equals(object? obj) { throw new NotImplementedException(); } public override int GetHashCode() { throw new NotImplementedException(); } public override string GetMessage(IFormatProvider? formatProvider = null) { return _message; } internal override Diagnostic WithLocation(Location location) { throw new NotImplementedException(); } internal override Diagnostic WithSeverity(DiagnosticSeverity severity) { throw new NotImplementedException(); } internal override Diagnostic WithIsSuppressed(bool isSuppressed) { throw new NotImplementedException(); } } } }
abock/roslyn
src/ExpressionEvaluator/Core/Source/ExpressionCompiler/EvaluationContextBase.cs
C#
mit
6,740
<?php /** * Zend Framework (http://framework.zend.com/) * * @link http://github.com/zendframework/zf2 for the canonical source repository * @copyright Copyright (c) 2005-2013 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ namespace Zend\File\Transfer\Adapter; use ErrorException; use Zend\File\Transfer; use Zend\File\Transfer\Exception; use Zend\Filter; use Zend\Filter\Exception as FilterException; use Zend\I18n\Translator\Translator; use Zend\I18n\Translator\TranslatorAwareInterface; use Zend\Stdlib\ErrorHandler; use Zend\Validator; /** * Abstract class for file transfers (Downloads and Uploads) * * This class needs a full rewrite. It re-implements functionality present in * Zend\Filter\Input and/or Zend\Form\Element, and in a way that's inconsistent * with either one. Additionally, plugin loader usage is now deprecated -- but * modifying that should be done in tandem with a rewrite to utilize validator * and filter chains instead. * * @todo Rewrite */ abstract class AbstractAdapter implements TranslatorAwareInterface { /**@+ * Plugin loader Constants */ const FILTER = 'FILTER'; const VALIDATOR = 'VALIDATOR'; /**@-*/ /** * Internal list of breaks * * @var array */ protected $break = array(); /** * @var FilterPluginManager */ protected $filterManager; /** * Internal list of filters * * @var array */ protected $filters = array(); /** * Plugin loaders for filter and validation chains * * @var array */ protected $loaders = array(); /** * Internal list of messages * * @var array */ protected $messages = array(); /** * @var Translator */ protected $translator; /** * Is translation enabled? * * @var bool */ protected $translatorEnabled = true; /** * Translator text domain (optional) * * @var string */ protected $translatorTextDomain = 'default'; /** * @var ValidatorPluginManager */ protected $validatorManager; /** * Internal list of validators * @var array */ protected $validators = array(); /** * Internal list of files * This array looks like this: * array(form => array( - Form is the name within the form or, if not set the filename * name, - Original name of this file * type, - Mime type of this file * size, - Filesize in bytes * tmp_name, - Internally temporary filename for uploaded files * error, - Error which has occurred * destination, - New destination for this file * validators, - Set validator names for this file * files - Set file names for this file * )) * * @var array */ protected $files = array(); /** * TMP directory * @var string */ protected $tmpDir; /** * Available options for file transfers */ protected $options = array( 'ignoreNoFile' => false, 'useByteString' => true, 'magicFile' => null, 'detectInfos' => true, ); /** * Send file * * @param mixed $options * @return bool */ abstract public function send($options = null); /** * Receive file * * @param mixed $options * @return bool */ abstract public function receive($options = null); /** * Is file sent? * * @param array|string|null $files * @return bool */ abstract public function isSent($files = null); /** * Is file received? * * @param array|string|null $files * @return bool */ abstract public function isReceived($files = null); /** * Has a file been uploaded ? * * @param array|string|null $files * @return bool */ abstract public function isUploaded($files = null); /** * Has the file been filtered ? * * @param array|string|null $files * @return bool */ abstract public function isFiltered($files = null); /** * Adds one or more files * * @param string|array $file File to add * @param string|array $validator Validators to use for this file, must be set before * @param string|array $filter Filters to use for this file, must be set before * @return AbstractAdapter * @throws Exception Not implemented */ //abstract public function addFile($file, $validator = null, $filter = null); /** * Returns all set types * * @return array List of set types * @throws Exception Not implemented */ //abstract public function getType(); /** * Adds one or more type of files * * @param string|array $type Type of files to add * @param string|array $validator Validators to use for this file, must be set before * @param string|array $filter Filters to use for this file, must be set before * @return AbstractAdapter * @throws Exception Not implemented */ //abstract public function addType($type, $validator = null, $filter = null); /** * Returns all set files * * @return array List of set files */ //abstract public function getFile(); /** * Set the filter plugin manager instance * * @param FilterPluginManager $filterManager * @return AbstractAdapter */ public function setFilterManager(FilterPluginManager $filterManager) { $this->filterManager = $filterManager; return $this; } /** * Get the filter plugin manager instance * * @return FilterPluginManager */ public function getFilterManager() { if (!$this->filterManager instanceof FilterPluginManager) { $this->setFilterManager(new FilterPluginManager()); } return $this->filterManager; } /** * Set the validator plugin manager instance * * @param ValidatorPluginManager $validatorManager * @return AbstractAdapter */ public function setValidatorManager(ValidatorPluginManager $validatorManager) { $this->validatorManager = $validatorManager; return $this; } /** * Get the validator plugin manager instance * * @return ValidatorPluginManager */ public function getValidatorManager() { if (!$this->validatorManager instanceof ValidatorPluginManager) { $this->setValidatorManager(new ValidatorPluginManager()); } return $this->validatorManager; } /** * Adds a new validator for this class * * @param string|Validator\ValidatorInterface $validator Type of validator to add * @param bool $breakChainOnFailure If the validation chain should stop an failure * @param string|array $options Options to set for the validator * @param string|array $files Files to limit this validator to * @return AbstractAdapter * @throws Exception\InvalidArgumentException for invalid type */ public function addValidator($validator, $breakChainOnFailure = false, $options = null, $files = null) { if (is_string($validator)) { $validator = $this->getValidatorManager()->get($validator, $options); if (is_array($options) && isset($options['messages'])) { if (is_array($options['messages'])) { $validator->setMessages($options['messages']); } elseif (is_string($options['messages'])) { $validator->setMessage($options['messages']); } unset($options['messages']); } } if (!$validator instanceof Validator\ValidatorInterface) { throw new Exception\InvalidArgumentException( 'Invalid validator provided to addValidator; ' . 'must be string or Zend\Validator\ValidatorInterface' ); } $name = get_class($validator); $this->validators[$name] = $validator; $this->break[$name] = $breakChainOnFailure; $files = $this->getFiles($files, true, true); foreach ($files as $file) { if ($name == 'NotEmpty') { $temp = $this->files[$file]['validators']; $this->files[$file]['validators'] = array($name); $this->files[$file]['validators'] += $temp; } else { $this->files[$file]['validators'][] = $name; } $this->files[$file]['validated'] = false; } return $this; } /** * Add Multiple validators at once * * @param array $validators * @param string|array $files * @return AbstractAdapter * @throws Exception\InvalidArgumentException for invalid type */ public function addValidators(array $validators, $files = null) { foreach ($validators as $name => $validatorInfo) { if ($validatorInfo instanceof Validator\ValidatorInterface) { $this->addValidator($validatorInfo, null, null, $files); } elseif (is_string($validatorInfo)) { if (!is_int($name)) { $this->addValidator($name, null, $validatorInfo, $files); } else { $this->addValidator($validatorInfo, null, null, $files); } } elseif (is_array($validatorInfo)) { $argc = count($validatorInfo); $breakChainOnFailure = false; $options = array(); if (isset($validatorInfo['validator'])) { $validator = $validatorInfo['validator']; if (isset($validatorInfo['breakChainOnFailure'])) { $breakChainOnFailure = $validatorInfo['breakChainOnFailure']; } if (isset($validatorInfo['options'])) { $options = $validatorInfo['options']; } $this->addValidator($validator, $breakChainOnFailure, $options, $files); } else { if (is_string($name)) { $validator = $name; $options = $validatorInfo; $this->addValidator($validator, $breakChainOnFailure, $options, $files); } else { $file = $files; switch (true) { case (0 == $argc): break; case (1 <= $argc): $validator = array_shift($validatorInfo); case (2 <= $argc): $breakChainOnFailure = array_shift($validatorInfo); case (3 <= $argc): $options = array_shift($validatorInfo); case (4 <= $argc): if (!empty($validatorInfo)) { $file = array_shift($validatorInfo); } default: $this->addValidator($validator, $breakChainOnFailure, $options, $file); break; } } } } else { throw new Exception\InvalidArgumentException('Invalid validator passed to addValidators()'); } } return $this; } /** * Sets a validator for the class, erasing all previous set * * @param array $validators Validators to set * @param string|array $files Files to limit this validator to * @return AbstractAdapter */ public function setValidators(array $validators, $files = null) { $this->clearValidators(); return $this->addValidators($validators, $files); } /** * Determine if a given validator has already been registered * * @param string $name * @return bool */ public function hasValidator($name) { return (false !== $this->getValidatorIdentifier($name)); } /** * Retrieve individual validator * * @param string $name * @return Validator\ValidatorInterface|null */ public function getValidator($name) { if (false === ($identifier = $this->getValidatorIdentifier($name))) { return null; } return $this->validators[$identifier]; } /** * Returns all set validators * * @param string|array $files (Optional) Returns the validator for this files * @return null|array List of set validators */ public function getValidators($files = null) { if ($files == null) { return $this->validators; } $files = $this->getFiles($files, true, true); $validators = array(); foreach ($files as $file) { if (!empty($this->files[$file]['validators'])) { $validators += $this->files[$file]['validators']; } } $validators = array_unique($validators); $result = array(); foreach ($validators as $validator) { $result[$validator] = $this->validators[$validator]; } return $result; } /** * Remove an individual validator * * @param string $name * @return AbstractAdapter */ public function removeValidator($name) { if (false === ($key = $this->getValidatorIdentifier($name))) { return $this; } unset($this->validators[$key]); foreach (array_keys($this->files) as $file) { if (empty($this->files[$file]['validators'])) { continue; } $index = array_search($key, $this->files[$file]['validators']); if ($index === false) { continue; } unset($this->files[$file]['validators'][$index]); $this->files[$file]['validated'] = false; } return $this; } /** * Remove all validators * * @return AbstractAdapter */ public function clearValidators() { $this->validators = array(); foreach (array_keys($this->files) as $file) { $this->files[$file]['validators'] = array(); $this->files[$file]['validated'] = false; } return $this; } /** * Sets Options for adapters * * @param array $options Options to set * @param array $files (Optional) Files to set the options for * @return AbstractAdapter */ public function setOptions($options = array(), $files = null) { $file = $this->getFiles($files, false, true); if (is_array($options)) { if (empty($file)) { $this->options = array_merge($this->options, $options); } foreach ($options as $name => $value) { foreach ($file as $key => $content) { switch ($name) { case 'magicFile' : $this->files[$key]['options'][$name] = (string) $value; break; case 'ignoreNoFile' : case 'useByteString' : case 'detectInfos' : $this->files[$key]['options'][$name] = (bool) $value; break; default: continue; } } } } return $this; } /** * Returns set options for adapters or files * * @param array $files (Optional) Files to return the options for * @return array Options for given files */ public function getOptions($files = null) { $file = $this->getFiles($files, false, true); foreach ($file as $key => $content) { if (isset($this->files[$key]['options'])) { $options[$key] = $this->files[$key]['options']; } else { $options[$key] = array(); } } return $options; } /** * Checks if the files are valid * * @param string|array $files (Optional) Files to check * @return bool True if all checks are valid */ public function isValid($files = null) { $check = $this->getFiles($files, false, true); if (empty($check)) { return false; } $translator = $this->getTranslator(); $this->messages = array(); $break = false; foreach ($check as $content) { if (array_key_exists('validators', $content) && in_array('Zend\Validator\File\Count', $content['validators'])) { $validator = $this->validators['Zend\Validator\File\Count']; $count = $content; if (empty($content['tmp_name'])) { continue; } if (array_key_exists('destination', $content)) { $checkit = $content['destination']; } else { $checkit = dirname($content['tmp_name']); } $checkit .= DIRECTORY_SEPARATOR . $content['name']; $validator->addFile($checkit); } } if (isset($count)) { if (!$validator->isValid($count['tmp_name'], $count)) { $this->messages += $validator->getMessages(); } } foreach ($check as $key => $content) { $fileerrors = array(); if (array_key_exists('validators', $content) && $content['validated']) { continue; } if (array_key_exists('validators', $content)) { foreach ($content['validators'] as $class) { $validator = $this->validators[$class]; if (method_exists($validator, 'setTranslator')) { $validator->setTranslator($translator); } if (($class === 'Zend\Validator\File\Upload') && (empty($content['tmp_name']))) { $tocheck = $key; } else { $tocheck = $content['tmp_name']; } if (!$validator->isValid($tocheck, $content)) { $fileerrors += $validator->getMessages(); } if (!empty($content['options']['ignoreNoFile']) && (isset($fileerrors['fileUploadErrorNoFile']))) { unset($fileerrors['fileUploadErrorNoFile']); break; } if (($class === 'Zend\Validator\File\Upload') && (count($fileerrors) > 0)) { break; } if (($this->break[$class]) && (count($fileerrors) > 0)) { $break = true; break; } } } if (count($fileerrors) > 0) { $this->files[$key]['validated'] = false; } else { $this->files[$key]['validated'] = true; } $this->messages += $fileerrors; if ($break) { break; } } if (count($this->messages) > 0) { return false; } return true; } /** * Returns found validation messages * * @return array */ public function getMessages() { return $this->messages; } /** * Retrieve error codes * * @return array */ public function getErrors() { return array_keys($this->messages); } /** * Are there errors registered? * * @return bool */ public function hasErrors() { return (!empty($this->messages)); } /** * Adds a new filter for this class * * @param string|Filter\FilterInterface $filter Type of filter to add * @param string|array $options Options to set for the filter * @param string|array $files Files to limit this filter to * @return AbstractAdapter * @throws Exception\InvalidArgumentException for invalid type */ public function addFilter($filter, $options = null, $files = null) { if (is_string($filter)) { $filter = $this->getFilterManager()->get($filter, $options); } if (!$filter instanceof Filter\FilterInterface) { throw new Exception\InvalidArgumentException('Invalid filter specified'); } $class = get_class($filter); $this->filters[$class] = $filter; $files = $this->getFiles($files, true, true); foreach ($files as $file) { $this->files[$file]['filters'][] = $class; } return $this; } /** * Add Multiple filters at once * * @param array $filters * @param string|array $files * @return AbstractAdapter */ public function addFilters(array $filters, $files = null) { foreach ($filters as $key => $spec) { if ($spec instanceof Filter\FilterInterface) { $this->addFilter($spec, null, $files); continue; } if (is_string($key)) { $this->addFilter($key, $spec, $files); continue; } if (is_int($key)) { if (is_string($spec)) { $this->addFilter($spec, null, $files); continue; } if (is_array($spec)) { if (!array_key_exists('filter', $spec)) { continue; } $filter = $spec['filter']; unset($spec['filter']); $this->addFilter($filter, $spec, $files); continue; } continue; } } return $this; } /** * Sets a filter for the class, erasing all previous set * * @param array $filters Filter to set * @param string|array $files Files to limit this filter to * @return Filter\AbstractFilter */ public function setFilters(array $filters, $files = null) { $this->clearFilters(); return $this->addFilters($filters, $files); } /** * Determine if a given filter has already been registered * * @param string $name * @return bool */ public function hasFilter($name) { return (false !== $this->getFilterIdentifier($name)); } /** * Retrieve individual filter * * @param string $name * @return Filter\FilterInterface|null */ public function getFilter($name) { if (false === ($identifier = $this->getFilterIdentifier($name))) { return null; } return $this->filters[$identifier]; } /** * Returns all set filters * * @param string|array $files (Optional) Returns the filter for this files * @return array List of set filters * @throws Exception\RuntimeException When file not found */ public function getFilters($files = null) { if ($files === null) { return $this->filters; } $files = $this->getFiles($files, true, true); $filters = array(); foreach ($files as $file) { if (!empty($this->files[$file]['filters'])) { $filters += $this->files[$file]['filters']; } } $filters = array_unique($filters); $result = array(); foreach ($filters as $filter) { $result[] = $this->filters[$filter]; } return $result; } /** * Remove an individual filter * * @param string $name * @return AbstractAdapter */ public function removeFilter($name) { if (false === ($key = $this->getFilterIdentifier($name))) { return $this; } unset($this->filters[$key]); foreach (array_keys($this->files) as $file) { if (empty($this->files[$file]['filters'])) { continue; } $index = array_search($key, $this->files[$file]['filters']); if ($index === false) { continue; } unset($this->files[$file]['filters'][$index]); } return $this; } /** * Remove all filters * * @return AbstractAdapter */ public function clearFilters() { $this->filters = array(); foreach (array_keys($this->files) as $file) { $this->files[$file]['filters'] = array(); } return $this; } /** * Retrieves the filename of transferred files. * * @param string $file (Optional) Element to return the filename for * @param bool $path (Optional) Should the path also be returned ? * @return string|array */ public function getFileName($file = null, $path = true) { $files = $this->getFiles($file, true, true); $result = array(); $directory = ""; foreach ($files as $file) { if (empty($this->files[$file]['name'])) { continue; } if ($path === true) { $directory = $this->getDestination($file) . DIRECTORY_SEPARATOR; } $result[$file] = $directory . $this->files[$file]['name']; } if (count($result) == 1) { return current($result); } return $result; } /** * Retrieve additional internal file informations for files * * @param string $file (Optional) File to get informations for * @return array */ public function getFileInfo($file = null) { return $this->getFiles($file); } /** * Sets a new destination for the given files * * @deprecated Will be changed to be a filter!!! * @param string $destination New destination directory * @param string|array $files Files to set the new destination for * @return AbstractAdapter * @throws Exception\InvalidArgumentException when the given destination is not a directory or does not exist */ public function setDestination($destination, $files = null) { $orig = $files; $destination = rtrim($destination, "/\\"); if (!is_dir($destination)) { throw new Exception\InvalidArgumentException('The given destination is not a directory or does not exist'); } if (!is_writable($destination)) { throw new Exception\InvalidArgumentException('The given destination is not writeable'); } if ($files === null) { foreach ($this->files as $file => $content) { $this->files[$file]['destination'] = $destination; } } else { $files = $this->getFiles($files, true, true); if (empty($files) and is_string($orig)) { $this->files[$orig]['destination'] = $destination; } foreach ($files as $file) { $this->files[$file]['destination'] = $destination; } } return $this; } /** * Retrieve destination directory value * * @param null|string|array $files * @throws Exception\InvalidArgumentException * @return null|string|array */ public function getDestination($files = null) { $orig = $files; $files = $this->getFiles($files, false, true); $destinations = array(); if (empty($files) and is_string($orig)) { if (isset($this->files[$orig]['destination'])) { $destinations[$orig] = $this->files[$orig]['destination']; } else { throw new Exception\InvalidArgumentException( sprintf('The file transfer adapter can not find "%s"', $orig) ); } } foreach ($files as $key => $content) { if (isset($this->files[$key]['destination'])) { $destinations[$key] = $this->files[$key]['destination']; } else { $tmpdir = $this->getTmpDir(); $this->setDestination($tmpdir, $key); $destinations[$key] = $tmpdir; } } if (empty($destinations)) { $destinations = $this->getTmpDir(); } elseif (count($destinations) == 1) { $destinations = current($destinations); } return $destinations; } /** * Sets translator to use in helper * * @param Translator $translator [optional] translator. * Default is null, which sets no translator. * @param string $textDomain [optional] text domain * Default is null, which skips setTranslatorTextDomain * @return AbstractAdapter */ public function setTranslator(Translator $translator = null, $textDomain = null) { $this->translator = $translator; if (null !== $textDomain) { $this->setTranslatorTextDomain($textDomain); } return $this; } /** * Retrieve localization translator object * * @return Translator|null */ public function getTranslator() { if ($this->isTranslatorEnabled()) { return null; } return $this->translator; } /** * Checks if the helper has a translator * * @return bool */ public function hasTranslator() { return (bool) $this->getTranslator(); } /** * Indicate whether or not translation should be enabled * * @param bool $flag * @return AbstractAdapter */ public function setTranslatorEnabled($flag = true) { $this->translatorEnabled = (bool) $flag; return $this; } /** * Is translation enabled? * * @return bool */ public function isTranslatorEnabled() { return $this->translatorEnabled; } /** * Set translation text domain * * @param string $textDomain * @return AbstractAdapter */ public function setTranslatorTextDomain($textDomain = 'default') { $this->translatorTextDomain = $textDomain; return $this; } /** * Return the translation text domain * * @return string */ public function getTranslatorTextDomain() { return $this->translatorTextDomain; } /** * Returns the hash for a given file * * @param string $hash Hash algorithm to use * @param string|array $files Files to return the hash for * @return string|array Hashstring * @throws Exception\InvalidArgumentException On unknown hash algorithm */ public function getHash($hash = 'crc32', $files = null) { if (!in_array($hash, hash_algos())) { throw new Exception\InvalidArgumentException('Unknown hash algorithm'); } $files = $this->getFiles($files); $result = array(); foreach ($files as $key => $value) { if (file_exists($value['name'])) { $result[$key] = hash_file($hash, $value['name']); } elseif (file_exists($value['tmp_name'])) { $result[$key] = hash_file($hash, $value['tmp_name']); } elseif (empty($value['options']['ignoreNoFile'])) { throw new Exception\InvalidArgumentException("The file '{$value['name']}' does not exist"); } } if (count($result) == 1) { return current($result); } return $result; } /** * Returns the real filesize of the file * * @param string|array $files Files to get the filesize from * @return string|array Filesize * @throws Exception\InvalidArgumentException When the file does not exist */ public function getFileSize($files = null) { $files = $this->getFiles($files); $result = array(); foreach ($files as $key => $value) { if (file_exists($value['name']) || file_exists($value['tmp_name'])) { if ($value['options']['useByteString']) { $result[$key] = static::toByteString($value['size']); } else { $result[$key] = $value['size']; } } elseif (empty($value['options']['ignoreNoFile'])) { throw new Exception\InvalidArgumentException("The file '{$value['name']}' does not exist"); } else { continue; } } if (count($result) == 1) { return current($result); } return $result; } /** * Internal method to detect the size of a file * * @param array $value File infos * @return string Filesize of given file */ protected function detectFileSize($value) { if (file_exists($value['name'])) { $filename = $value['name']; } elseif (file_exists($value['tmp_name'])) { $filename = $value['tmp_name']; } else { return null; } ErrorHandler::start(); $filesize = filesize($filename); $return = ErrorHandler::stop(); if ($return instanceof ErrorException) { $filesize = 0; } return sprintf("%u", $filesize); } /** * Returns the real mimetype of the file * Uses fileinfo, when not available mime_magic and as last fallback a manual given mimetype * * @param string|array $files Files to get the mimetype from * @return string|array MimeType * @throws Exception\InvalidArgumentException When the file does not exist */ public function getMimeType($files = null) { $files = $this->getFiles($files); $result = array(); foreach ($files as $key => $value) { if (file_exists($value['name']) || file_exists($value['tmp_name'])) { $result[$key] = $value['type']; } elseif (empty($value['options']['ignoreNoFile'])) { throw new Exception\InvalidArgumentException("the file '{$value['name']}' does not exist"); } else { continue; } } if (count($result) == 1) { return current($result); } return $result; } /** * Internal method to detect the mime type of a file * * @param array $value File infos * @return string Mimetype of given file */ protected function detectMimeType($value) { if (file_exists($value['name'])) { $file = $value['name']; } elseif (file_exists($value['tmp_name'])) { $file = $value['tmp_name']; } else { return null; } if (class_exists('finfo', false)) { if (!empty($value['options']['magicFile'])) { ErrorHandler::start(); $mime = finfo_open(FILEINFO_MIME_TYPE, $value['options']['magicFile']); ErrorHandler::stop(); } if (empty($mime)) { ErrorHandler::start(); $mime = finfo_open(FILEINFO_MIME_TYPE); ErrorHandler::stop(); } if (!empty($mime)) { $result = finfo_file($mime, $file); } unset($mime); } if (empty($result) && (function_exists('mime_content_type') && ini_get('mime_magic.magicfile'))) { $result = mime_content_type($file); } if (empty($result)) { $result = 'application/octet-stream'; } return $result; } /** * Returns the formatted size * * @param integer $size * @return string */ protected static function toByteString($size) { $sizes = array('B', 'kB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'); for ($i=0; $size >= 1024 && $i < 9; $i++) { $size /= 1024; } return round($size, 2) . $sizes[$i]; } /** * Internal function to filter all given files * * @param string|array $files (Optional) Files to check * @return bool False on error */ protected function filter($files = null) { $check = $this->getFiles($files); foreach ($check as $name => $content) { if (array_key_exists('filters', $content)) { foreach ($content['filters'] as $class) { $filter = $this->filters[$class]; try { $result = $filter->filter($this->getFileName($name)); $this->files[$name]['destination'] = dirname($result); $this->files[$name]['name'] = basename($result); } catch (FilterException\ExceptionInterface $e) { $this->messages += array($e->getMessage()); } } } } if (count($this->messages) > 0) { return false; } return true; } /** * Determine system TMP directory and detect if we have read access * * @return string * @throws Exception\RuntimeException if unable to determine directory */ protected function getTmpDir() { if (null === $this->tmpDir) { $tmpdir = array(); if (function_exists('sys_get_temp_dir')) { $tmpdir[] = sys_get_temp_dir(); } if (!empty($_ENV['TMP'])) { $tmpdir[] = realpath($_ENV['TMP']); } if (!empty($_ENV['TMPDIR'])) { $tmpdir[] = realpath($_ENV['TMPDIR']); } if (!empty($_ENV['TEMP'])) { $tmpdir[] = realpath($_ENV['TEMP']); } $upload = ini_get('upload_tmp_dir'); if ($upload) { $tmpdir[] = realpath($upload); } foreach ($tmpdir as $directory) { if ($this->isPathWriteable($directory)) { $this->tmpDir = $directory; } } if (empty($this->tmpDir)) { // Attemp to detect by creating a temporary file $tempFile = tempnam(md5(uniqid(rand(), true)), ''); if ($tempFile) { $this->tmpDir = realpath(dirname($tempFile)); unlink($tempFile); } else { throw new Exception\RuntimeException('Could not determine a temporary directory'); } } $this->tmpDir = rtrim($this->tmpDir, "/\\"); } return $this->tmpDir; } /** * Tries to detect if we can read and write to the given path * * @param string $path * @return bool */ protected function isPathWriteable($path) { $tempFile = rtrim($path, "/\\"); $tempFile .= '/' . 'test.1'; ErrorHandler::start(); $result = file_put_contents($tempFile, 'TEST'); ErrorHandler::stop(); if ($result == false) { return false; } ErrorHandler::start(); $result = unlink($tempFile); ErrorHandler::stop(); if ($result == false) { return false; } return true; } /** * Returns found files based on internal file array and given files * * @param string|array $files (Optional) Files to return * @param bool $names (Optional) Returns only names on true, else complete info * @param bool $noexception (Optional) Allows throwing an exception, otherwise returns an empty array * @return array Found files * @throws Exception\RuntimeException On false filename */ protected function getFiles($files, $names = false, $noexception = false) { $check = array(); if (is_string($files)) { $files = array($files); } if (is_array($files)) { foreach ($files as $find) { $found = array(); foreach ($this->files as $file => $content) { if (!isset($content['name'])) { continue; } if (($content['name'] === $find) && isset($content['multifiles'])) { foreach ($content['multifiles'] as $multifile) { $found[] = $multifile; } break; } if ($file === $find) { $found[] = $file; break; } if ($content['name'] === $find) { $found[] = $file; break; } } if (empty($found)) { if ($noexception !== false) { return array(); } throw new Exception\RuntimeException(sprintf('The file transfer adapter can not find "%s"', $find)); } foreach ($found as $checked) { $check[$checked] = $this->files[$checked]; } } } if ($files === null) { $check = $this->files; $keys = array_keys($check); foreach ($keys as $key) { if (isset($check[$key]['multifiles'])) { unset($check[$key]); } } } if ($names) { $check = array_keys($check); } return $check; } /** * Retrieve internal identifier for a named validator * * @param string $name * @return string */ protected function getValidatorIdentifier($name) { if (array_key_exists($name, $this->validators)) { return $name; } foreach (array_keys($this->validators) as $test) { if (preg_match('/' . preg_quote($name) . '$/i', $test)) { return $test; } } return false; } /** * Retrieve internal identifier for a named filter * * @param string $name * @return string */ protected function getFilterIdentifier($name) { if (array_key_exists($name, $this->filters)) { return $name; } foreach (array_keys($this->filters) as $test) { if (preg_match('/' . preg_quote($name) . '$/i', $test)) { return $test; } } return false; } }
allengaller/a4c-nin-api
web/zend/library/Zend/File/Transfer/Adapter/AbstractAdapter.php
PHP
mit
44,097
using System; using System.Collections.Generic; using System.Linq; using System.Text; using CocosSharp; namespace tests { public class SpriteBatchNodeOffsetAnchorRotation : SpriteTestDemo { const int numOfSprites = 3; CCSprite[] sprites; CCSprite[] pointSprites; CCAnimation animation; #region Properties public override string Title { get { return "Testing SpriteBatchNode"; } } public override string Subtitle { get { return "offset + anchor + rot"; } } #endregion Properties #region Constructors public SpriteBatchNodeOffsetAnchorRotation() { CCSpriteFrameCache cache = CCSpriteFrameCache.SharedSpriteFrameCache; cache.AddSpriteFrames("animations/grossini.plist"); cache.AddSpriteFrames("animations/grossini_gray.plist", "animations/grossini_gray"); // Create animations and actions var animFrames = new List<CCSpriteFrame>(); string tmp = ""; for (int j = 0; j < 14; j++) { string temp = ""; if (j+1<10) { temp = "0" + (j + 1); } else { temp = (j + 1).ToString(); } tmp = string.Format("grossini_dance_{0}.png", temp); CCSpriteFrame frame = cache[tmp]; animFrames.Add(frame); } animation = new CCAnimation(animFrames, 0.3f); sprites = new CCSprite[numOfSprites]; pointSprites = new CCSprite[numOfSprites]; for (int i = 0; i < numOfSprites; i++) { // Animation using Sprite batch sprites[i] = new CCSprite("grossini_dance_01.png"); pointSprites[i] = new CCSprite("Images/r1"); CCSpriteBatchNode spritebatch = new CCSpriteBatchNode("animations/grossini"); AddChild(spritebatch); AddChild(pointSprites[i], 200); spritebatch.AddChild(sprites[i], i); } } #endregion Constructors #region Setup content public override void OnEnter() { base.OnEnter(); CCSize windowSize = Layer.VisibleBoundsWorldspace.Size; for (int i = 0; i < numOfSprites; i++) { sprites[i].Position = new CCPoint(windowSize.Width / 4 * (i + 1), windowSize.Height / 2); pointSprites[i].Scale = 0.25f; pointSprites[i].Position = sprites[i].Position; switch(i) { case 0: sprites[i].AnchorPoint = new CCPoint(0, 0); break; case 1: sprites[i].AnchorPoint = new CCPoint(0.5f, 0.5f); break; case 2: sprites[i].AnchorPoint = new CCPoint(1, 1); break; } sprites[i].RunAction(new CCRepeatForever(new CCAnimate(animation))); sprites[i].RunAction(new CCRepeatForever(new CCRotateBy (10, 360))); } } #endregion Setup content public override void OnExit() { base.OnExit(); CCSpriteFrameCache cache = CCSpriteFrameCache.SharedSpriteFrameCache; cache.RemoveSpriteFrames("animations/grossini.plist"); cache.RemoveSpriteFrames("animations/grossini_gray.plist"); } } }
haithemaraissia/CocosSharp
tests/tests/classes/tests/SpriteTest/SpriteBatchNodeOffsetAnchorRotation.cs
C#
mit
3,692
<?php class IndentTest extends PHPUnit_Framework_TestCase { protected $Y; protected function setUp() { $this->Y = Spyc::YAMLLoad(__DIR__."/indent_1.yaml"); } public function testIndent_1() { $this->assertEquals (array ('child_1' => 2, 'child_2' => 0, 'child_3' => 1), $this->Y['root']); } public function testIndent_2() { $this->assertEquals (array ('child_1' => 1, 'child_2' => 2), $this->Y['root2']); } public function testIndent_3() { $this->assertEquals (array (array ('resolutions' => array (1024 => 768, 1920 => 1200), 'producer' => 'Nec')), $this->Y['display']); } public function testIndent_4() { $this->assertEquals (array ( array ('resolutions' => array (1024 => 768)), array ('resolutions' => array (1920 => 1200)), ), $this->Y['displays']); } public function testIndent_5() { $this->assertEquals (array (array ( 'row' => 0, 'col' => 0, 'headsets_affected' => array ( array ( 'ports' => array (0), 'side' => 'left', ) ), 'switch_function' => array ( 'ics_ptt' => true ) )), $this->Y['nested_hashes_and_seqs']); } public function testIndent_6() { $this->assertEquals (array ( 'h' => array ( array ('a' => 'b', 'a1' => 'b1'), array ('c' => 'd') ) ), $this->Y['easier_nest']); } public function testIndent_space() { $this->assertEquals ("By four\n spaces", $this->Y['one_space']); } public function testListAndComment() { $this->assertEquals (array ('one', 'two', 'three'), $this->Y['list_and_comment']); } public function testAnchorAndAlias() { $this->assertEquals (array ('database' => 'rails_dev', 'adapter' => 'mysql', 'host' => 'localhost'), $this->Y['development']); $this->assertEquals (array (1 => 'abc'), $this->Y['zzz']); } }
mustangostang/spyc
tests/IndentTest.php
PHP
mit
2,094
"""scons.Node.Alias Alias nodes. This creates a hash of global Aliases (dummy targets). """ # # Copyright (c) 2001 - 2014 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "src/engine/SCons/Node/Alias.py 2014/07/05 09:42:21 garyo" import collections import SCons.Errors import SCons.Node import SCons.Util class AliasNameSpace(collections.UserDict): def Alias(self, name, **kw): if isinstance(name, SCons.Node.Alias.Alias): return name try: a = self[name] except KeyError: a = SCons.Node.Alias.Alias(name, **kw) self[name] = a return a def lookup(self, name, **kw): try: return self[name] except KeyError: return None class AliasNodeInfo(SCons.Node.NodeInfoBase): current_version_id = 1 field_list = ['csig'] def str_to_node(self, s): return default_ans.Alias(s) class AliasBuildInfo(SCons.Node.BuildInfoBase): current_version_id = 1 class Alias(SCons.Node.Node): NodeInfo = AliasNodeInfo BuildInfo = AliasBuildInfo def __init__(self, name): SCons.Node.Node.__init__(self) self.name = name def str_for_display(self): return '"' + self.__str__() + '"' def __str__(self): return self.name def make_ready(self): self.get_csig() really_build = SCons.Node.Node.build is_up_to_date = SCons.Node.Node.children_are_up_to_date def is_under(self, dir): # Make Alias nodes get built regardless of # what directory scons was run from. Alias nodes # are outside the filesystem: return 1 def get_contents(self): """The contents of an alias is the concatenation of the content signatures of all its sources.""" childsigs = [n.get_csig() for n in self.children()] return ''.join(childsigs) def sconsign(self): """An Alias is not recorded in .sconsign files""" pass # # # def changed_since_last_build(self, target, prev_ni): cur_csig = self.get_csig() try: return cur_csig != prev_ni.csig except AttributeError: return 1 def build(self): """A "builder" for aliases.""" pass def convert(self): try: del self.builder except AttributeError: pass self.reset_executor() self.build = self.really_build def get_csig(self): """ Generate a node's content signature, the digested signature of its content. node - the node cache - alternate node to use for the signature cache returns - the content signature """ try: return self.ninfo.csig except AttributeError: pass contents = self.get_contents() csig = SCons.Util.MD5signature(contents) self.get_ninfo().csig = csig return csig default_ans = AliasNameSpace() SCons.Node.arg2nodes_lookups.append(default_ans.lookup) # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
dezelin/scons
scons-local/SCons/Node/Alias.py
Python
mit
4,197
'use strict'; var _ = require('lodash'); var webpack = require('webpack'); var mergeWebpackConfig = function (config) { // Load webpackConfig only when using `grunt:webpack` // load of grunt tasks is faster var webpackConfig = require('./webpack.config'); return _.merge({}, webpackConfig, config, function (a, b) { if (_.isArray(a)) { return a.concat(b); } }); }; module.exports = function(grunt) { grunt.initConfig({ pkg: grunt.file.readJSON('package.json'), sass: { min: { files: { 'dist/react-datepicker.css': 'src/stylesheets/datepicker.scss' }, options: { sourcemap: 'none', style: 'expanded' } }, unmin: { files: { 'dist/react-datepicker.min.css': 'src/stylesheets/datepicker.scss' }, options: { sourcemap: 'none', style: 'compressed' } } }, watch: { jshint: { files: ['src/**/*.js', 'src/**/*.jsx'], tasks: ['jshint'] }, jest: { files: ['src/**/*.jsx', 'src/**/*.js', 'test/**/*.js'], tasks: ['jest'] }, css: { files: '**/*.scss', tasks: ['sass'] }, webpack: { files: ['src/**/*.js', 'src/**/*.jsx'], tasks: ['webpack'] } }, scsslint: { files: 'src/stylesheets/*.scss', options: { config: '.scss-lint.yml', colorizeOutput: true } }, jshint: { all: ['src/**/*.jsx', 'src/**/*.js'], options: { eqnull: true } }, webpack: { example: { entry: './example/boot', output: { filename: 'example.js', library: 'ExampleApp', path: './example/' }, resolve: { extensions: ['', '.js', '.jsx'] }, module: { loaders: [ {test: /\.js/, loaders: ['babel-loader'], exclude: /node_modules/} ] }, node: {Buffer: false}, plugins: [ new webpack.optimize.DedupePlugin(), new webpack.DefinePlugin({ 'process.env.NODE_ENV': JSON.stringify(process.env.NODE_ENV) }) ] }, unmin: mergeWebpackConfig({ output: { filename: 'react-datepicker.js' } }), min: mergeWebpackConfig({ output: { filename: 'react-datepicker.min.js' }, plugins: [ new webpack.optimize.UglifyJsPlugin({ compressor: { warnings: false } }) ] }) } }); grunt.loadNpmTasks('grunt-contrib-sass'); grunt.loadNpmTasks('grunt-scss-lint'); grunt.loadNpmTasks('grunt-contrib-watch'); grunt.loadNpmTasks('grunt-jsxhint'); grunt.loadNpmTasks('grunt-webpack'); grunt.registerTask('default', ['watch', 'scsslint']); grunt.registerTask('travis', ['jshint', 'jest', 'scsslint']); grunt.registerTask('build', ['jshint', 'scsslint', 'webpack', 'sass']); grunt.registerTask('jest', require('./grunt/tasks/jest')); };
lCharlie123l/react-datepicker
gruntfile.js
JavaScript
mit
3,123
module Gws::Qna class Initializer Gws::Role.permission :use_gws_qna, module_name: 'gws/qna' Gws::Role.permission :read_other_gws_qna_posts, module_name: 'gws/qna' Gws::Role.permission :read_private_gws_qna_posts, module_name: 'gws/qna' Gws::Role.permission :edit_other_gws_qna_posts, module_name: 'gws/qna' Gws::Role.permission :edit_private_gws_qna_posts, module_name: 'gws/qna' Gws::Role.permission :delete_other_gws_qna_posts, module_name: 'gws/qna' Gws::Role.permission :delete_private_gws_qna_posts, module_name: 'gws/qna' Gws::Role.permission :trash_other_gws_qna_posts, module_name: 'gws/qna' Gws::Role.permission :trash_private_gws_qna_posts, module_name: 'gws/qna' Gws::Role.permission :read_other_gws_qna_categories, module_name: 'gws/qna' Gws::Role.permission :read_private_gws_qna_categories, module_name: 'gws/qna' Gws::Role.permission :edit_other_gws_qna_categories, module_name: 'gws/qna' Gws::Role.permission :edit_private_gws_qna_categories, module_name: 'gws/qna' Gws::Role.permission :delete_other_gws_qna_categories, module_name: 'gws/qna' Gws::Role.permission :delete_private_gws_qna_categories, module_name: 'gws/qna' Gws.module_usable :qna do |site, user| Gws::Qna.allowed?(:use, user, site: site) end end end
itowtips/shirasagi
app/models/gws/qna/initializer.rb
Ruby
mit
1,314
var ArrayProto = Array.prototype; var ObjProto = Object.prototype; var escapeMap = { '&': '&amp;', '"': '&quot;', "'": '&#39;', "<": '&lt;', ">": '&gt;' }; var escapeRegex = /[&"'<>]/g; var lookupEscape = function(ch) { return escapeMap[ch]; }; var exports = module.exports = {}; exports.withPrettyErrors = function(path, withInternals, func) { try { return func(); } catch (e) { if (!e.Update) { // not one of ours, cast it e = new exports.TemplateError(e); } e.Update(path); // Unless they marked the dev flag, show them a trace from here if (!withInternals) { var old = e; e = new Error(old.message); e.name = old.name; } throw e; } }; exports.TemplateError = function(message, lineno, colno) { var err = this; if (message instanceof Error) { // for casting regular js errors err = message; message = message.name + ": " + message.message; } else { if(Error.captureStackTrace) { Error.captureStackTrace(err); } } err.name = 'Template render error'; err.message = message; err.lineno = lineno; err.colno = colno; err.firstUpdate = true; err.Update = function(path) { var message = "(" + (path || "unknown path") + ")"; // only show lineno + colno next to path of template // where error occurred if (this.firstUpdate) { if(this.lineno && this.colno) { message += ' [Line ' + this.lineno + ', Column ' + this.colno + ']'; } else if(this.lineno) { message += ' [Line ' + this.lineno + ']'; } } message += '\n '; if (this.firstUpdate) { message += ' '; } this.message = message + (this.message || ''); this.firstUpdate = false; return this; }; return err; }; exports.TemplateError.prototype = Error.prototype; exports.escape = function(val) { return val.replace(escapeRegex, lookupEscape); }; exports.isFunction = function(obj) { return ObjProto.toString.call(obj) == '[object Function]'; }; exports.isArray = Array.isArray || function(obj) { return ObjProto.toString.call(obj) == '[object Array]'; }; exports.isString = function(obj) { return ObjProto.toString.call(obj) == '[object String]'; }; exports.isObject = function(obj) { return ObjProto.toString.call(obj) == '[object Object]'; }; exports.groupBy = function(obj, val) { var result = {}; var iterator = exports.isFunction(val) ? val : function(obj) { return obj[val]; }; for(var i=0; i<obj.length; i++) { var value = obj[i]; var key = iterator(value, i); (result[key] || (result[key] = [])).push(value); } return result; }; exports.toArray = function(obj) { return Array.prototype.slice.call(obj); }; exports.without = function(array) { var result = []; if (!array) { return result; } var index = -1, length = array.length, contains = exports.toArray(arguments).slice(1); while(++index < length) { if(exports.indexOf(contains, array[index]) === -1) { result.push(array[index]); } } return result; }; exports.extend = function(obj, obj2) { for(var k in obj2) { obj[k] = obj2[k]; } return obj; }; exports.repeat = function(char_, n) { var str = ''; for(var i=0; i<n; i++) { str += char_; } return str; }; exports.each = function(obj, func, context) { if(obj == null) { return; } if(ArrayProto.each && obj.each == ArrayProto.each) { obj.forEach(func, context); } else if(obj.length === +obj.length) { for(var i=0, l=obj.length; i<l; i++) { func.call(context, obj[i], i, obj); } } }; exports.map = function(obj, func) { var results = []; if(obj == null) { return results; } if(ArrayProto.map && obj.map === ArrayProto.map) { return obj.map(func); } for(var i=0; i<obj.length; i++) { results[results.length] = func(obj[i], i); } if(obj.length === +obj.length) { results.length = obj.length; } return results; }; exports.asyncIter = function(arr, iter, cb) { var i = -1; function next() { i++; if(i < arr.length) { iter(arr[i], i, next, cb); } else { cb(); } } next(); }; exports.asyncFor = function(obj, iter, cb) { var keys = exports.keys(obj); var len = keys.length; var i = -1; function next() { i++; var k = keys[i]; if(i < len) { iter(k, obj[k], i, len, next); } else { cb(); } } next(); }; // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/indexOf#Polyfill exports.indexOf = Array.prototype.indexOf ? function (arr, searchElement, fromIndex) { return Array.prototype.indexOf.call(arr, searchElement, fromIndex); } : function (arr, searchElement, fromIndex) { var length = this.length >>> 0; // Hack to convert object.length to a UInt32 fromIndex = +fromIndex || 0; if(Math.abs(fromIndex) === Infinity) { fromIndex = 0; } if(fromIndex < 0) { fromIndex += length; if (fromIndex < 0) { fromIndex = 0; } } for(;fromIndex < length; fromIndex++) { if (arr[fromIndex] === searchElement) { return fromIndex; } } return -1; }; if(!Array.prototype.map) { Array.prototype.map = function() { throw new Error("map is unimplemented for this js engine"); }; } exports.keys = function(obj) { if(Object.prototype.keys) { return obj.keys(); } else { var keys = []; for(var k in obj) { if(obj.hasOwnProperty(k)) { keys.push(k); } } return keys; } }
angyukai/boulderactive2016-landing-page
node_modules/nunjucks/src/lib.js
JavaScript
mit
6,222
/** * * Apache License * Version 2.0, January 2004 * http://www.apache.org/licenses/ * * TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION * * 1. Definitions. * * "License" shall mean the terms and conditions for use, reproduction, * and distribution as defined by Sections 1 through 9 of this document. * * "Licensor" shall mean the copyright owner or entity authorized by * the copyright owner that is granting the License. * * "Legal Entity" shall mean the union of the acting entity and all * other entities that control, are controlled by, or are under common * control with that entity. For the purposes of this definition, * "control" means (i) the power, direct or indirect, to cause the * direction or management of such entity, whether by contract or * otherwise, or (ii) ownership of fifty percent (50%) or more of the * outstanding shares, or (iii) beneficial ownership of such entity. * * "You" (or "Your") shall mean an individual or Legal Entity * exercising permissions granted by this License. * * "Source" form shall mean the preferred form for making modifications, * including but not limited to software source code, documentation * source, and configuration files. * * "Object" form shall mean any form resulting from mechanical * transformation or translation of a Source form, including but * not limited to compiled object code, generated documentation, * and conversions to other media types. * * "Work" shall mean the work of authorship, whether in Source or * Object form, made available under the License, as indicated by a * copyright notice that is included in or attached to the work * (an example is provided in the Appendix below). * * "Derivative Works" shall mean any work, whether in Source or Object * form, that is based on (or derived from) the Work and for which the * editorial revisions, annotations, elaborations, or other modifications * represent, as a whole, an original work of authorship. For the purposes * of this License, Derivative Works shall not include works that remain * separable from, or merely link (or bind by name) to the interfaces of, * the Work and Derivative Works thereof. * * "Contribution" shall mean any work of authorship, including * the original version of the Work and any modifications or additions * to that Work or Derivative Works thereof, that is intentionally * submitted to Licensor for inclusion in the Work by the copyright owner * or by an individual or Legal Entity authorized to submit on behalf of * the copyright owner. For the purposes of this definition, "submitted" * means any form of electronic, verbal, or written communication sent * to the Licensor or its representatives, including but not limited to * communication on electronic mailing lists, source code control systems, * and issue tracking systems that are managed by, or on behalf of, the * Licensor for the purpose of discussing and improving the Work, but * excluding communication that is conspicuously marked or otherwise * designated in writing by the copyright owner as "Not a Contribution." * * "Contributor" shall mean Licensor and any individual or Legal Entity * on behalf of whom a Contribution has been received by Licensor and * subsequently incorporated within the Work. * * 2. Grant of Copyright License. Subject to the terms and conditions of * this License, each Contributor hereby grants to You a perpetual, * worldwide, non-exclusive, no-charge, royalty-free, irrevocable * copyright license to reproduce, prepare Derivative Works of, * publicly display, publicly perform, sublicense, and distribute the * Work and such Derivative Works in Source or Object form. * * 3. Grant of Patent License. Subject to the terms and conditions of * this License, each Contributor hereby grants to You a perpetual, * worldwide, non-exclusive, no-charge, royalty-free, irrevocable * (except as stated in this section) patent license to make, have made, * use, offer to sell, sell, import, and otherwise transfer the Work, * where such license applies only to those patent claims licensable * by such Contributor that are necessarily infringed by their * Contribution(s) alone or by combination of their Contribution(s) * with the Work to which such Contribution(s) was submitted. If You * institute patent litigation against any entity (including a * cross-claim or counterclaim in a lawsuit) alleging that the Work * or a Contribution incorporated within the Work constitutes direct * or contributory patent infringement, then any patent licenses * granted to You under this License for that Work shall terminate * as of the date such litigation is filed. * * 4. Redistribution. You may reproduce and distribute copies of the * Work or Derivative Works thereof in any medium, with or without * modifications, and in Source or Object form, provided that You * meet the following conditions: * * (a) You must give any other recipients of the Work or * Derivative Works a copy of this License; and * * (b) You must cause any modified files to carry prominent notices * stating that You changed the files; and * * (c) You must retain, in the Source form of any Derivative Works * that You distribute, all copyright, patent, trademark, and * attribution notices from the Source form of the Work, * excluding those notices that do not pertain to any part of * the Derivative Works; and * * (d) If the Work includes a "NOTICE" text file as part of its * distribution, then any Derivative Works that You distribute must * include a readable copy of the attribution notices contained * within such NOTICE file, excluding those notices that do not * pertain to any part of the Derivative Works, in at least one * of the following places: within a NOTICE text file distributed * as part of the Derivative Works; within the Source form or * documentation, if provided along with the Derivative Works; or, * within a display generated by the Derivative Works, if and * wherever such third-party notices normally appear. The contents * of the NOTICE file are for informational purposes only and * do not modify the License. You may add Your own attribution * notices within Derivative Works that You distribute, alongside * or as an addendum to the NOTICE text from the Work, provided * that such additional attribution notices cannot be construed * as modifying the License. * * You may add Your own copyright statement to Your modifications and * may provide additional or different license terms and conditions * for use, reproduction, or distribution of Your modifications, or * for any such Derivative Works as a whole, provided Your use, * reproduction, and distribution of the Work otherwise complies with * the conditions stated in this License. * * 5. Submission of Contributions. Unless You explicitly state otherwise, * any Contribution intentionally submitted for inclusion in the Work * by You to the Licensor shall be under the terms and conditions of * this License, without any additional terms or conditions. * Notwithstanding the above, nothing herein shall supersede or modify * the terms of any separate license agreement you may have executed * with Licensor regarding such Contributions. * * 6. Trademarks. This License does not grant permission to use the trade * names, trademarks, service marks, or product names of the Licensor, * except as required for reasonable and customary use in describing the * origin of the Work and reproducing the content of the NOTICE file. * * 7. Disclaimer of Warranty. Unless required by applicable law or * agreed to in writing, Licensor provides the Work (and each * Contributor provides its Contributions) on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied, including, without limitation, any warranties or conditions * of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A * PARTICULAR PURPOSE. You are solely responsible for determining the * appropriateness of using or redistributing the Work and assume any * risks associated with Your exercise of permissions under this License. * * 8. Limitation of Liability. In no event and under no legal theory, * whether in tort (including negligence), contract, or otherwise, * unless required by applicable law (such as deliberate and grossly * negligent acts) or agreed to in writing, shall any Contributor be * liable to You for damages, including any direct, indirect, special, * incidental, or consequential damages of any character arising as a * result of this License or out of the use or inability to use the * Work (including but not limited to damages for loss of goodwill, * work stoppage, computer failure or malfunction, or any and all * other commercial damages or losses), even if such Contributor * has been advised of the possibility of such damages. * * 9. Accepting Warranty or Additional Liability. While redistributing * the Work or Derivative Works thereof, You may choose to offer, * and charge a fee for, acceptance of support, warranty, indemnity, * or other liability obligations and/or rights consistent with this * License. However, in accepting such obligations, You may act only * on Your own behalf and on Your sole responsibility, not on behalf * of any other Contributor, and only if You agree to indemnify, * defend, and hold each Contributor harmless for any liability * incurred by, or claims asserted against, such Contributor by reason * of your accepting any such warranty or additional liability. * * END OF TERMS AND CONDITIONS * * APPENDIX: How to apply the Apache License to your work. * * To apply the Apache License to your work, attach the following * boilerplate notice, with the fields enclosed by brackets "[]" * replaced with your own identifying information. (Don't include * the brackets!) The text should be enclosed in the appropriate * comment syntax for the file format. We also recommend that a * file or class name and description of purpose be included on the * same "printed page" as the copyright notice for easier * identification within third-party archives. * * Copyright 2016 Alibaba Group * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.taobao.weex.ui.view.listview; import android.content.Context; import android.support.annotation.Nullable; import android.support.v7.widget.GridLayoutManager; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.OrientationHelper; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.StaggeredGridLayoutManager; import android.view.MotionEvent; import com.taobao.weex.common.WXThread; import com.taobao.weex.ui.view.gesture.WXGesture; import com.taobao.weex.ui.view.gesture.WXGestureObservable; public class WXRecyclerView extends RecyclerView implements WXGestureObservable { public static final int TYPE_LINEAR_LAYOUT = 1; public static final int TYPE_GRID_LAYOUT = 2; public static final int TYPE_STAGGERED_GRID_LAYOUT = 3; private WXGesture mGesture; private boolean scrollable = true; public WXRecyclerView(Context context) { super(context); } public boolean isScrollable() { return scrollable; } public void setScrollable(boolean scrollable) { this.scrollable = scrollable; } @Override public boolean postDelayed(Runnable action, long delayMillis) { return super.postDelayed(WXThread.secure(action), delayMillis); } /** * * @param context * @param type * @param orientation should be {@link OrientationHelper#HORIZONTAL} or {@link OrientationHelper#VERTICAL} */ public void initView(Context context, int type,int orientation) { if (type == TYPE_GRID_LAYOUT) { setLayoutManager(new GridLayoutManager(context, 2,orientation,false)); } else if (type == TYPE_STAGGERED_GRID_LAYOUT) { setLayoutManager(new StaggeredGridLayoutManager(2, orientation)); } else if (type == TYPE_LINEAR_LAYOUT) { setLayoutManager(new LinearLayoutManager(context,orientation,false){ @Override public boolean supportsPredictiveItemAnimations() { return false; } public void onLayoutChildren(RecyclerView.Recycler recycler, RecyclerView.State state) { try { super.onLayoutChildren(recycler, state); } catch (IndexOutOfBoundsException e) { e.printStackTrace(); } } @Override public int scrollVerticallyBy(int dy, RecyclerView.Recycler recycler, RecyclerView.State state) { try { return super.scrollVerticallyBy(dy, recycler, state); } catch (Exception e) { e.printStackTrace(); } return 0; } }); } } @Override public void registerGestureListener(@Nullable WXGesture wxGesture) { mGesture = wxGesture; } @Override public boolean onTouchEvent(MotionEvent event) { if(!scrollable) { return true; } boolean result = super.onTouchEvent(event); if (mGesture != null) { result |= mGesture.onTouch(this, event); } return result; } }
erguotou520/weex-uikit
platforms/android/WeexSDK/src/main/java/com/taobao/weex/ui/view/listview/WXRecyclerView.java
Java
mit
14,865
require 'helper' require 'faraday_middleware/response/parse_dates' require 'json' describe FaradayMiddleware::ParseDates, :type => :response do let(:parsed){ if RUBY_VERSION > "1.9" "2012-02-01 13:14:15 UTC" else "Wed Feb 01 13:14:15 UTC 2012" end } it "parses dates" do expect(process({"x" => "2012-02-01T13:14:15Z"}).body["x"].to_s).to eq(parsed) end it "parses dates with milliseconds" do date_str = "2012-02-01T13:14:15.123Z" expect(process({"x" => date_str}).body["x"]).to eq(Time.parse(date_str)) end it "parses nested dates in hash" do expect(process({"x" => {"y" => "2012-02-01T13:14:15Z"}}).body["x"]["y"].to_s).to eq(parsed) end it "parses nested dates in arrays" do expect(process({"x" => [{"y" =>"2012-02-01T13:14:15Z"}]}).body["x"][0]["y"].to_s).to eq(parsed) end it "returns nil when body is empty" do expect(process(nil).body).to eq(nil) end it "leaves arrays with ids alone" do expect(process({"x" => [1,2,3]}).body).to eq({"x" => [1,2,3]}) end it "does not parse date-like things" do expect(process({"x" => "2012-02-01T13:14:15Z bla"}).body["x"].to_s).to eq "2012-02-01T13:14:15Z bla" expect(process({"x" => "12012-02-01T13:14:15Z"}).body["x"].to_s).to eq "12012-02-01T13:14:15Z" expect(process({"x" => "2012-02-01T13:14:15Z\nfoo"}).body["x"].to_s).to eq "2012-02-01T13:14:15Z\nfoo" end end
BigAppleSoftball/ratingsManager
vendor/bundle/ruby/2.0.0/gems/faraday_middleware-0.9.1/spec/parse_dates_spec.rb
Ruby
mit
1,412
import * as React from 'react'; import { Graph, Link, Node } from 'react-d3-graph'; const nodes = [ { id: 'node1', labelProperty: 'id' }, { id: 'node2', name: 'node2Name' }, { id: 'node3', size: { width: 100, height: 200, }, }, ]; type INode = typeof nodes[number]; const links = [{ source: 'node1', target: 'node2' }]; type ILink = typeof links[number]; export class Example extends React.Component { ref: React.MutableRefObject<Graph<INode, ILink> | null> = React.createRef(); render(): React.ReactElement { return ( <div> <Graph ref={this.ref} id="test" data={{ nodes, links, focusedNodeId: 'node1', }} config={{ node: { color: 'green', fontColor: 'blue', opacity: 0.5, renderLabel: true, size: 100, strokeColor: 'white', strokeWidth: 100, svg: '<line />', symbolType: 'circle', viewGenerator: node => <div>{node.name}</div>, labelProperty: node => node.name || 'No name', }, link: { fontSize: 10, fontWeight: 'bold', highlightColor: '#fff', highlightFontWeight: '100', labelProperty: () => 'Label', renderLabel: true, semanticStrokeWidth: true, markerHeight: 100, type: 'circle', mouseCursor: 'pointer', }, automaticRearrangeAfterDropNode: true, collapsible: true, directed: true, focusZoom: 5, focusAnimationDuration: 10, height: 10, nodeHighlightBehavior: true, linkHighlightBehavior: true, highlightDegree: 2, highlightOpacity: 0.5, maxZoom: 3.0, minZoom: 0.5, initialZoom: 2.0, panAndZoom: false, staticGraph: true, staticGraphWithDragAndDrop: true, width: 100, d3: { alphaTarget: 1.0, gravity: 9.8, linkLength: 10, linkStrength: 10, disableLinkForce: true, }, }} onClickGraph={(event: MouseEvent) => {}} onClickNode={(nodeId: string) => {}} onDoubleClickNode={(nodeId: string) => {}} onRightClickNode={(event: MouseEvent, nodeId: string) => {}} onMouseOverNode={(nodeId: string) => {}} onMouseOutNode={(nodeId: string) => {}} onClickLink={(source: string, target: string) => {}} onRightClickLink={(event: MouseEvent, source: string, target: string) => {}} onMouseOverLink={(source: string, target: string) => {}} onMouseOutLink={(source: string, target: string) => {}} onNodePositionChange={(nodeId: string, x: number, y: number) => {}} onZoomChange={(previousZoom: number, newZoom: number) => {}} /> <Link /> <Node /> </div> ); } }
markogresak/DefinitelyTyped
types/react-d3-graph/react-d3-graph-tests.tsx
TypeScript
mit
4,139
$.widget("metro.slider", { version: "3.0.0", options: { position: 0, accuracy: 0, color: 'default', completeColor: 'default', markerColor: 'default', colors: false, showHint: false, permanentHint: false, hintPosition: 'top', vertical: false, min: 0, max: 100, animate: true, minValue: 0, maxValue: 100, currValue: 0, returnType: 'value', target: false, onChange: function(value, slider){}, _slider : { vertical: false, offset: 0, length: 0, marker: 0, ppp: 0, start: 0, stop: 0 } }, _create: function(){ var that = this, element = this.element; var o = this.options, s = o._slider; $.each(element.data(), function(key, value){ if (key in o) { try { o[key] = $.parseJSON(value); } catch (e) { o[key] = value; } } }); o.accuracy = o.accuracy < 0 ? 0 : o.accuracy; o.min = o.min < 0 ? 0 : o.min; o.min = o.min > o.max ? o.max : o.min; o.max = o.max > 100 ? 100 : o.max; o.max = o.max < o.min ? o.min : o.max; o.position = this._correctValue(element.data('position') > o.min ? (element.data('position') > o.max ? o.max : element.data('position')) : o.min); o.colors = o.colors ? o.colors.split(",") : false; s.vertical = o.vertical; if (o.vertical && !element.hasClass('vertical')) { element.addClass('vertical'); } if (o.permanentHint && !element.hasClass('permanent-hint')) { element.addClass('permanent-hint'); } if (!o.vertical && o.hintPosition === 'bottom') { element.addClass('hint-bottom'); } if (o.vertical && o.hintPosition === 'left') { element.addClass('hint-left'); } this._createSlider(); this._initPoints(); this._placeMarker(o.position); var event_down = isTouchDevice() ? 'touchstart' : 'mousedown'; element.children('.marker').on(event_down, function (e) { e.preventDefault(); that._startMoveMarker(e); }); element.on(event_down, function (e) { e.preventDefault(); that._startMoveMarker(e); }); element.data('slider', this); }, _startMoveMarker: function(e){ var element = this.element, o = this.options, that = this, hint = element.children('.slider-hint'); var returnedValue; var event_move = isTouchDevice() ? 'touchmove' : 'mousemove'; var event_up = isTouchDevice() ? 'touchend' : 'mouseup mouseleave'; $(element).on(event_move, function (event) { that._movingMarker(event); if (!element.hasClass('permanent-hint')) { hint.css('display', 'block'); } }); $(element).on(event_up, function () { $(element).off('mousemove'); $(element).off('mouseup'); element.data('value', o.position); element.trigger('changed', o.position); returnedValue = o.returnType === 'value' ? that._valueToRealValue(o.position) : o.position; if (!element.hasClass('permanent-hint')) { hint.css('display', 'none'); } }); this._initPoints(); this._movingMarker(e); }, _movingMarker: function (ev) { var element = this.element, o = this.options; var cursorPos, percents, valuePix, vertical = o._slider.vertical, sliderOffset = o._slider.offset, sliderStart = o._slider.start, sliderEnd = o._slider.stop, sliderLength = o._slider.length, markerSize = o._slider.marker; var event = !isTouchDevice() ? ev.originalEvent : ev.originalEvent.touches[0]; //console.log(event); if (vertical) { cursorPos = event.pageY - sliderOffset; } else { cursorPos = event.pageX - sliderOffset; } if (cursorPos < sliderStart) { cursorPos = sliderStart; } else if (cursorPos > sliderEnd) { cursorPos = sliderEnd; } if (vertical) { valuePix = sliderLength - cursorPos - markerSize / 2; } else { valuePix = cursorPos - markerSize / 2; } percents = this._pixToPerc(valuePix); this._placeMarker(percents); o.currValue = this._valueToRealValue(percents); o.position = percents; var returnedValue = o.returnType === 'value' ? this._valueToRealValue(o.position) : o.position; if (o.target) { $(o.target).val(returnedValue); } if (typeof o.onChange === 'function') { o.onChange(returnedValue, element); } else { if (typeof window[o.onChange] === 'function') { window[o.onChange](returnedValue, element); } else { var result = eval("(function(){"+o.onChange+"})"); result.call(returnedValue, element); } } }, _placeMarker: function (value) { var size, size2, o = this.options, colorParts, colorIndex = 0, colorDelta, element = this.element, marker = this.element.children('.marker'), complete = this.element.children('.complete'), hint = this.element.children('.slider-hint'), hintValue, oldPos = this._percToPix(o.position); colorParts = o.colors.length; colorDelta = o._slider.length / colorParts; if (o._slider.vertical) { var oldSize = this._percToPix(o.position) + o._slider.marker, oldSize2 = o._slider.length - oldSize; size = this._percToPix(value) + o._slider.marker; size2 = o._slider.length - size; this._animate(marker.css('top', oldSize2),{top: size2}); this._animate(complete.css('height', oldSize),{height: size}); if (colorParts) { colorIndex = Math.round(size / colorDelta)-1; complete.css('background-color', o.colors[colorIndex<0?0:colorIndex]); } if (o.showHint) { hintValue = this._valueToRealValue(value); hint.html(hintValue).css('top', size2 - hint.height()/2 + (element.hasClass('large') ? 8 : 0)); } } else { size = this._percToPix(value); this._animate(marker.css('left', oldPos),{left: size}); this._animate(complete.css('width', oldPos),{width: size}); if (colorParts) { colorIndex = Math.round(size / colorDelta)-1; complete.css('background-color', o.colors[colorIndex<0?0:colorIndex]); } if (o.showHint) { hintValue = this._valueToRealValue(value); hint.html(hintValue).css({left: size - hint.width() / 2 + (element.hasClass('large') ? 6 : 0)}); } } }, _valueToRealValue: function(value){ var o = this.options; var real_value; var percent_value = (o.maxValue - o.minValue) / 100; real_value = value * percent_value + o.minValue; return Math.round(real_value); }, _animate: function (obj, val) { var o = this.options; if(o.animate) { obj.stop(true).animate(val); } else { obj.css(val); } }, _pixToPerc: function (valuePix) { var valuePerc; valuePerc = valuePix * this.options._slider.ppp; return Math.round(this._correctValue(valuePerc)); }, _percToPix: function (value) { if (this.options._slider.ppp === 0) { return 0; } return Math.round(value / this.options._slider.ppp); }, _correctValue: function (value) { var o = this.options; var accuracy = o.accuracy; var max = o.max; var min = o.min; if (accuracy === 0) { return value; } if (value === max) { return max; } if (value === min) { return min; } value = Math.floor(value / accuracy) * accuracy + Math.round(value % accuracy / accuracy) * accuracy; if (value > max) { return max; } if (value < min) { return min; } return value; }, _initPoints: function(){ var o = this.options, s = o._slider, element = this.element; if (s.vertical) { s.offset = element.offset().top; s.length = element.height(); s.marker = element.children('.marker').height(); } else { s.offset = element.offset().left; s.length = element.width(); s.marker = element.children('.marker').width(); } s.ppp = o.max / (s.length - s.marker); s.start = s.marker / 2; s.stop = s.length - s.marker / 2; }, _createSlider: function(){ var element = this.element, o = this.options, complete, marker, hint; element.html(''); complete = $("<div/>").addClass("complete").appendTo(element); marker = $("<a/>").addClass("marker").appendTo(element); if (o.showHint) { hint = $("<span/>").addClass("slider-hint").appendTo(element); } if (o.color !== 'default') { if (o.color.isColor()) { element.css('background-color', o.color); } else { element.addClass(o.color); } } if (o.completeColor !== 'default') { if (o.completeColor.isColor()) { complete.css('background-color', o.completeColor); } else { complete.addClass(o.completeColor); } } if (o.markerColor !== 'default') { if (o.markerColor.isColor()) { marker.css('background-color', o.markerColor); } else { marker.addClass(o.markerColor); } } }, value: function (value) { var element = this.element, o = this.options, returnedValue; if (typeof value !== 'undefined') { value = value > o.max ? o.max : value; value = value < o.min ? o.min : value; this._placeMarker(parseInt(value)); o.position = parseInt(value); returnedValue = o.returnType === 'value' ? this._valueToRealValue(o.position) : o.position; if (typeof o.onChange === 'function') { o.onChange(returnedValue, element); } else { if (typeof window[o.onChange] === 'function') { window[o.onChange](returnedValue, element); } else { var result = eval("(function(){"+o.onChange+"})"); result.call(returnedValue, element); } } return this; } else { returnedValue = o.returnType === 'value' ? this._valueToRealValue(o.position) : o.position; return returnedValue; } }, _destroy: function(){}, _setOption: function(key, value){ this._super('_setOption', key, value); } });
ckc/Metro-UI-CSS
js/widgets/slider.js
JavaScript
mit
11,712
/******************************************************************************* * Copyright (c) 2012-2017 Codenvy, S.A. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Codenvy, S.A. - initial API and implementation *******************************************************************************/ package org.eclipse.che.ide.editor.macro; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.runners.MockitoJUnitRunner; import static org.junit.Assert.assertSame; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.verify; /** * Unit tests for the {@link EditorCurrentProjectNameMacro} * * @author Vlad Zhukovskyi */ @RunWith(MockitoJUnitRunner.class) public class EditorCurrentProjectNameMacroTest extends AbstractEditorMacroTest { private EditorCurrentProjectNameMacro provider; @Override protected AbstractEditorMacro getProvider() { return provider; } @Before public void init() throws Exception { provider = new EditorCurrentProjectNameMacro(editorAgent, promiseProvider, localizationConstants); } @Test public void testGetKey() throws Exception { assertSame(provider.getName(), EditorCurrentProjectNameMacro.KEY); } @Test public void getValue() throws Exception { initEditorWithTestFile(); provider.expand(); verify(editorAgent).getActiveEditor(); verify(promiseProvider).resolve(eq(PROJECT_NAME)); } @Test public void getEmptyValue() throws Exception { provider.expand(); verify(editorAgent).getActiveEditor(); verify(promiseProvider).resolve(eq("")); } }
gazarenkov/che-sketch
ide/che-core-ide-app/src/test/java/org/eclipse/che/ide/editor/macro/EditorCurrentProjectNameMacroTest.java
Java
epl-1.0
1,916
/******************************************************************************* * Copyright (c) 2012-2017 Codenvy, S.A. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Codenvy, S.A. - initial API and implementation *******************************************************************************/ package org.eclipse.che.plugin.svn.shared; import org.eclipse.che.dto.shared.DTO; import javax.validation.constraints.NotNull; import java.util.List; @DTO public interface InfoResponse { /************************************************************************** * * Subversion command * **************************************************************************/ String getCommand(); void setCommand(@NotNull final String command); InfoResponse withCommand(@NotNull final String command); /************************************************************************** * * Execution output * **************************************************************************/ List<String> getOutput(); void setOutput(@NotNull final List<String> output); InfoResponse withOutput(@NotNull final List<String> output); /************************************************************************** * * Error output * **************************************************************************/ List<String> getErrorOutput(); void setErrorOutput(List<String> errorOutput); InfoResponse withErrorOutput(List<String> errorOutput); /************************************************************************** * * Item list * **************************************************************************/ List<SubversionItem> getItems(); void setItems(List<SubversionItem> items); InfoResponse withItems(List<SubversionItem> items); }
gazarenkov/che-sketch
plugins/plugin-svn/che-plugin-svn-ext-shared/src/main/java/org/eclipse/che/plugin/svn/shared/InfoResponse.java
Java
epl-1.0
2,096
/* * Copyright (C) 2013-2018 Team Kodi * This file is part of Kodi - https://kodi.tv * * SPDX-License-Identifier: GPL-2.0-or-later * See LICENSES/README.md for more information. */ #include "SettingUpdate.h" #include "ServiceBroker.h" #include "SettingDefinitions.h" #include "utils/StringUtils.h" #include "utils/XBMCTinyXML.h" #include "utils/log.h" CSettingUpdate::CSettingUpdate() : CStaticLoggerBase("CSettingUpdate") { } bool CSettingUpdate::Deserialize(const TiXmlNode *node) { if (node == nullptr) return false; auto elem = node->ToElement(); if (elem == nullptr) return false; auto strType = elem->Attribute(SETTING_XML_ATTR_TYPE); if (strType == nullptr || strlen(strType) <= 0 || !setType(strType)) { s_logger->warn("missing or unknown update type definition"); return false; } if (m_type == SettingUpdateType::Rename) { if (node->FirstChild() == nullptr || node->FirstChild()->Type() != TiXmlNode::TINYXML_TEXT) { s_logger->warn("missing or invalid setting id for rename update definition"); return false; } m_value = node->FirstChild()->ValueStr(); } return true; } bool CSettingUpdate::setType(const std::string &type) { if (StringUtils::EqualsNoCase(type, "change")) m_type = SettingUpdateType::Change; else if (StringUtils::EqualsNoCase(type, "rename")) m_type = SettingUpdateType::Rename; else return false; return true; }
xbmcin/XBMCinTC
xbmc/settings/lib/SettingUpdate.cpp
C++
gpl-2.0
1,447
/* * ProGuard -- shrinking, optimization, obfuscation, and preverification * of Java bytecode. * * Copyright (c) 2002-2009 Eric Lafortune (eric@graphics.cornell.edu) * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free * Software Foundation; either version 2 of the License, or (at your option) * any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package proguard.classfile.util; import proguard.classfile.*; import proguard.classfile.attribute.CodeAttribute; import proguard.classfile.attribute.visitor.AttributeVisitor; import proguard.classfile.constant.*; import proguard.classfile.constant.visitor.ConstantVisitor; import proguard.classfile.instruction.*; import proguard.classfile.instruction.visitor.InstructionVisitor; import proguard.classfile.visitor.*; import proguard.util.StringMatcher; /** * This InstructionVisitor initializes any constant * <code>Class.get[Declared]{Field,Method}</code> references of all instructions * it visits. More specifically, it fills out the references of string constant * pool entries that refer to a class member in the program class pool or in the * library class pool. * <p> * It optionally prints notes if on usage of * <code>(SomeClass)Class.forName(variable).newInstance()</code>. * <p> * The class hierarchy and references must be initialized before using this * visitor. * * @see ClassSuperHierarchyInitializer * @see ClassReferenceInitializer * * @author Eric Lafortune */ public class DynamicMemberReferenceInitializer extends SimplifiedVisitor implements InstructionVisitor, ConstantVisitor, AttributeVisitor, MemberVisitor { public static final int X = InstructionSequenceMatcher.X; public static final int Y = InstructionSequenceMatcher.Y; public static final int Z = InstructionSequenceMatcher.Z; public static final int A = InstructionSequenceMatcher.A; public static final int B = InstructionSequenceMatcher.B; public static final int C = InstructionSequenceMatcher.C; public static final int D = InstructionSequenceMatcher.D; private final Constant[] GET_FIELD_CONSTANTS = new Constant[] { new MethodrefConstant(1, 2, null, null), new ClassConstant(3, null), new NameAndTypeConstant(4, 5), new Utf8Constant(ClassConstants.INTERNAL_NAME_JAVA_LANG_CLASS), new Utf8Constant(ClassConstants.INTERNAL_METHOD_NAME_CLASS_GET_FIELD), new Utf8Constant(ClassConstants.INTERNAL_METHOD_TYPE_CLASS_GET_FIELD), }; private final Constant[] GET_DECLARED_FIELD_CONSTANTS = new Constant[] { new MethodrefConstant(1, 2, null, null), new ClassConstant(3, null), new NameAndTypeConstant(4, 5), new Utf8Constant(ClassConstants.INTERNAL_NAME_JAVA_LANG_CLASS), new Utf8Constant(ClassConstants.INTERNAL_METHOD_NAME_CLASS_GET_DECLARED_FIELD), new Utf8Constant(ClassConstants.INTERNAL_METHOD_TYPE_CLASS_GET_DECLARED_FIELD), }; private final Constant[] GET_METHOD_CONSTANTS = new Constant[] { new MethodrefConstant(1, 2, null, null), new ClassConstant(3, null), new NameAndTypeConstant(4, 5), new Utf8Constant(ClassConstants.INTERNAL_NAME_JAVA_LANG_CLASS), new Utf8Constant(ClassConstants.INTERNAL_METHOD_NAME_CLASS_GET_METHOD), new Utf8Constant(ClassConstants.INTERNAL_METHOD_TYPE_CLASS_GET_METHOD), }; private final Constant[] GET_DECLARED_METHOD_CONSTANTS = new Constant[] { new MethodrefConstant(1, 2, null, null), new ClassConstant(3, null), new NameAndTypeConstant(4, 5), new Utf8Constant(ClassConstants.INTERNAL_NAME_JAVA_LANG_CLASS), new Utf8Constant(ClassConstants.INTERNAL_METHOD_NAME_CLASS_GET_DECLARED_METHOD), new Utf8Constant(ClassConstants.INTERNAL_METHOD_TYPE_CLASS_GET_DECLARED_METHOD), }; // SomeClass.class.get[Declared]Field("someField"). private final Instruction[] CONSTANT_GET_FIELD_INSTRUCTIONS = new Instruction[] { new ConstantInstruction(InstructionConstants.OP_LDC, X), new ConstantInstruction(InstructionConstants.OP_LDC, Y), new ConstantInstruction(InstructionConstants.OP_INVOKEVIRTUAL, 0), }; // SomeClass.class.get[Declared]Method("someMethod", new Class[] {}). private final Instruction[] CONSTANT_GET_METHOD_INSTRUCTIONS0 = new Instruction[] { new ConstantInstruction(InstructionConstants.OP_LDC, X), new ConstantInstruction(InstructionConstants.OP_LDC, Y), new SimpleInstruction(InstructionConstants.OP_ICONST_0), new ConstantInstruction(InstructionConstants.OP_ANEWARRAY, 1), new ConstantInstruction(InstructionConstants.OP_INVOKEVIRTUAL, 0), }; // SomeClass.class.get[Declared]Method("someMethod", new Class[] { A.class }). private final Instruction[] CONSTANT_GET_METHOD_INSTRUCTIONS1 = new Instruction[] { new ConstantInstruction(InstructionConstants.OP_LDC, X), new ConstantInstruction(InstructionConstants.OP_LDC, Y), new SimpleInstruction(InstructionConstants.OP_ICONST_1), new ConstantInstruction(InstructionConstants.OP_ANEWARRAY, 1), new SimpleInstruction(InstructionConstants.OP_DUP), new SimpleInstruction(InstructionConstants.OP_ICONST_0), new ConstantInstruction(InstructionConstants.OP_LDC, A), new SimpleInstruction(InstructionConstants.OP_AASTORE), new ConstantInstruction(InstructionConstants.OP_INVOKEVIRTUAL, 0), }; // SomeClass.class.get[Declared]Method("someMethod", new Class[] { A.class, B.class }). private final Instruction[] CONSTANT_GET_METHOD_INSTRUCTIONS2 = new Instruction[] { new ConstantInstruction(InstructionConstants.OP_LDC, X), new ConstantInstruction(InstructionConstants.OP_LDC, Y), new SimpleInstruction(InstructionConstants.OP_ICONST_2), new ConstantInstruction(InstructionConstants.OP_ANEWARRAY, 1), new SimpleInstruction(InstructionConstants.OP_DUP), new SimpleInstruction(InstructionConstants.OP_ICONST_0), new ConstantInstruction(InstructionConstants.OP_LDC, A), new SimpleInstruction(InstructionConstants.OP_AASTORE), new SimpleInstruction(InstructionConstants.OP_DUP), new SimpleInstruction(InstructionConstants.OP_ICONST_1), new ConstantInstruction(InstructionConstants.OP_LDC, B), new SimpleInstruction(InstructionConstants.OP_AASTORE), new ConstantInstruction(InstructionConstants.OP_INVOKEVIRTUAL, 0), }; // get[Declared]Field("someField"). private final Instruction[] GET_FIELD_INSTRUCTIONS = new Instruction[] { new ConstantInstruction(InstructionConstants.OP_LDC, Y), new ConstantInstruction(InstructionConstants.OP_INVOKEVIRTUAL, 0), }; // get[Declared]Method("someMethod", new Class[] {}). private final Instruction[] GET_METHOD_INSTRUCTIONS0 = new Instruction[] { new ConstantInstruction(InstructionConstants.OP_LDC, Y), new SimpleInstruction(InstructionConstants.OP_ICONST_0), new ConstantInstruction(InstructionConstants.OP_ANEWARRAY, 1), new ConstantInstruction(InstructionConstants.OP_INVOKEVIRTUAL, 0), }; // get[Declared]Method("someMethod", new Class[] { A.class }). private final Instruction[] GET_METHOD_INSTRUCTIONS1 = new Instruction[] { new ConstantInstruction(InstructionConstants.OP_LDC, Y), new SimpleInstruction(InstructionConstants.OP_ICONST_1), new ConstantInstruction(InstructionConstants.OP_ANEWARRAY, 1), new SimpleInstruction(InstructionConstants.OP_DUP), new SimpleInstruction(InstructionConstants.OP_ICONST_0), new ConstantInstruction(InstructionConstants.OP_LDC, A), new SimpleInstruction(InstructionConstants.OP_AASTORE), new ConstantInstruction(InstructionConstants.OP_INVOKEVIRTUAL, 0), }; // get[Declared]Method("someMethod", new Class[] { A.class, B.class }). private final Instruction[] GET_METHOD_INSTRUCTIONS2 = new Instruction[] { new ConstantInstruction(InstructionConstants.OP_LDC, Y), new SimpleInstruction(InstructionConstants.OP_ICONST_2), new ConstantInstruction(InstructionConstants.OP_ANEWARRAY, 1), new SimpleInstruction(InstructionConstants.OP_DUP), new SimpleInstruction(InstructionConstants.OP_ICONST_0), new ConstantInstruction(InstructionConstants.OP_LDC, A), new SimpleInstruction(InstructionConstants.OP_AASTORE), new SimpleInstruction(InstructionConstants.OP_DUP), new SimpleInstruction(InstructionConstants.OP_ICONST_1), new ConstantInstruction(InstructionConstants.OP_LDC, B), new SimpleInstruction(InstructionConstants.OP_AASTORE), new ConstantInstruction(InstructionConstants.OP_INVOKEVIRTUAL, 0), }; private final ClassPool programClassPool; private final ClassPool libraryClassPool; private final WarningPrinter notePrinter; private final StringMatcher noteFieldExceptionMatcher; private final StringMatcher noteMethodExceptionMatcher; private final InstructionSequenceMatcher constantGetFieldMatcher = new InstructionSequenceMatcher(GET_FIELD_CONSTANTS, CONSTANT_GET_FIELD_INSTRUCTIONS); private final InstructionSequenceMatcher constantGetDeclaredFieldMatcher = new InstructionSequenceMatcher(GET_DECLARED_FIELD_CONSTANTS, CONSTANT_GET_FIELD_INSTRUCTIONS); private final InstructionSequenceMatcher constantGetMethodMatcher0 = new InstructionSequenceMatcher(GET_METHOD_CONSTANTS, CONSTANT_GET_METHOD_INSTRUCTIONS0); private final InstructionSequenceMatcher constantGetDeclaredMethodMatcher0 = new InstructionSequenceMatcher(GET_DECLARED_METHOD_CONSTANTS, CONSTANT_GET_METHOD_INSTRUCTIONS0); private final InstructionSequenceMatcher constantGetMethodMatcher1 = new InstructionSequenceMatcher(GET_METHOD_CONSTANTS, CONSTANT_GET_METHOD_INSTRUCTIONS1); private final InstructionSequenceMatcher constantGetDeclaredMethodMatcher1 = new InstructionSequenceMatcher(GET_DECLARED_METHOD_CONSTANTS, CONSTANT_GET_METHOD_INSTRUCTIONS1); private final InstructionSequenceMatcher constantGetMethodMatcher2 = new InstructionSequenceMatcher(GET_METHOD_CONSTANTS, CONSTANT_GET_METHOD_INSTRUCTIONS2); private final InstructionSequenceMatcher constantGetDeclaredMethodMatcher2 = new InstructionSequenceMatcher(GET_DECLARED_METHOD_CONSTANTS, CONSTANT_GET_METHOD_INSTRUCTIONS2); private final InstructionSequenceMatcher getFieldMatcher = new InstructionSequenceMatcher(GET_FIELD_CONSTANTS, GET_FIELD_INSTRUCTIONS); private final InstructionSequenceMatcher getDeclaredFieldMatcher = new InstructionSequenceMatcher(GET_DECLARED_FIELD_CONSTANTS, GET_FIELD_INSTRUCTIONS); private final InstructionSequenceMatcher getMethodMatcher0 = new InstructionSequenceMatcher(GET_METHOD_CONSTANTS, GET_METHOD_INSTRUCTIONS0); private final InstructionSequenceMatcher getDeclaredMethodMatcher0 = new InstructionSequenceMatcher(GET_DECLARED_METHOD_CONSTANTS, GET_METHOD_INSTRUCTIONS0); private final InstructionSequenceMatcher getMethodMatcher1 = new InstructionSequenceMatcher(GET_METHOD_CONSTANTS, GET_METHOD_INSTRUCTIONS1); private final InstructionSequenceMatcher getDeclaredMethodMatcher1 = new InstructionSequenceMatcher(GET_DECLARED_METHOD_CONSTANTS, GET_METHOD_INSTRUCTIONS1); private final InstructionSequenceMatcher getMethodMatcher2 = new InstructionSequenceMatcher(GET_METHOD_CONSTANTS, GET_METHOD_INSTRUCTIONS2); private final InstructionSequenceMatcher getDeclaredMethodMatcher2 = new InstructionSequenceMatcher(GET_DECLARED_METHOD_CONSTANTS, GET_METHOD_INSTRUCTIONS2); private final MemberFinder memberFinder = new MemberFinder(); // Fields acting as parameters for the visitors. private Clazz referencedClass; private boolean isDeclared; private boolean isField; /** * Creates a new DynamicMemberReferenceInitializer. */ public DynamicMemberReferenceInitializer(ClassPool programClassPool, ClassPool libraryClassPool, WarningPrinter notePrinter, StringMatcher noteFieldExceptionMatcher, StringMatcher noteMethodExceptionMatcher) { this.programClassPool = programClassPool; this.libraryClassPool = libraryClassPool; this.notePrinter = notePrinter; this.noteFieldExceptionMatcher = noteFieldExceptionMatcher; this.noteMethodExceptionMatcher = noteMethodExceptionMatcher; } // Implementations for InstructionVisitor. public void visitAnyInstruction(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, Instruction instruction) { // Try to match the SomeClass.class.getField("someField") construct. matchGetMember(clazz, method, codeAttribute, offset, instruction, constantGetFieldMatcher, getFieldMatcher, true, false); // Try to match the SomeClass.class.getDeclaredField("someField") construct. matchGetMember(clazz, method, codeAttribute, offset, instruction, constantGetDeclaredFieldMatcher, getDeclaredFieldMatcher, true, true); // Try to match the SomeClass.class.getMethod("someMethod", new Class[] // {}) construct. matchGetMember(clazz, method, codeAttribute, offset, instruction, constantGetMethodMatcher0, getMethodMatcher0, false, false); // Try to match the SomeClass.class.getDeclaredMethod("someMethod", // new Class[] {}) construct. matchGetMember(clazz, method, codeAttribute, offset, instruction, constantGetDeclaredMethodMatcher0, getDeclaredMethodMatcher0, false, true); // Try to match the SomeClass.class.getMethod("someMethod", new Class[] // { A.class }) construct. matchGetMember(clazz, method, codeAttribute, offset, instruction, constantGetMethodMatcher1, getMethodMatcher1, false, false); // Try to match the SomeClass.class.getDeclaredMethod("someMethod", // new Class[] { A.class }) construct. matchGetMember(clazz, method, codeAttribute, offset, instruction, constantGetDeclaredMethodMatcher1, getDeclaredMethodMatcher1, false, true); // Try to match the SomeClass.class.getMethod("someMethod", new Class[] // { A.class, B.class }) construct. matchGetMember(clazz, method, codeAttribute, offset, instruction, constantGetMethodMatcher2, getMethodMatcher2, false, false); // Try to match the SomeClass.class.getDeclaredMethod("someMethod", // new Class[] { A.class, B.class }) construct. matchGetMember(clazz, method, codeAttribute, offset, instruction, constantGetDeclaredMethodMatcher2, getDeclaredMethodMatcher2, false, true); } /** * Tries to match the next instruction and fills out the string constant * or prints out a note accordingly. */ private void matchGetMember(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, Instruction instruction, InstructionSequenceMatcher constantSequenceMatcher, InstructionSequenceMatcher variableSequenceMatcher, boolean isField, boolean isDeclared) { // Try to match the next instruction in the constant sequence. instruction.accept(clazz, method, codeAttribute, offset, constantSequenceMatcher); // Did we find a match to fill out the string constant? if (constantSequenceMatcher.isMatching()) { this.isField = isField; this.isDeclared = isDeclared; // Get the member's class. clazz.constantPoolEntryAccept(constantSequenceMatcher.matchedConstantIndex(X), this); // Fill out the matched string constant. clazz.constantPoolEntryAccept(constantSequenceMatcher.matchedConstantIndex(Y), this); // Don't look for the dynamic construct. variableSequenceMatcher.reset(); } // Try to match the next instruction in the variable sequence. instruction.accept(clazz, method, codeAttribute, offset, variableSequenceMatcher); // Did we find a match to print out a note? if (variableSequenceMatcher.isMatching()) { // Print out a note about the dynamic invocation. printDynamicInvocationNote(clazz, variableSequenceMatcher, isField, isDeclared); } } // Implementations for ConstantVisitor. /** * Remembers the referenced class. */ public void visitClassConstant(Clazz clazz, ClassConstant classConstant) { // Remember the referenced class. referencedClass = ClassUtil.isInternalArrayType(classConstant.getName(clazz)) ? null : classConstant.referencedClass; } /** * Fills out the link to the referenced class member. */ public void visitStringConstant(Clazz clazz, StringConstant stringConstant) { if (referencedClass != null) { String name = stringConstant.getString(clazz); // See if we can find the referenced class member locally, or // somewhere in the hierarchy. Member referencedMember = isDeclared ? isField ? (Member)referencedClass.findField(name, null) : (Member)referencedClass.findMethod(name, null) : (Member)memberFinder.findMember(clazz, referencedClass, name, null, isField); if (referencedMember != null) { stringConstant.referencedMember = referencedMember; stringConstant.referencedClass = isDeclared ? referencedClass : memberFinder.correspondingClass(); } } } // Small utility methods. /** * Prints out a note on the matched dynamic invocation, if necessary. */ private void printDynamicInvocationNote(Clazz clazz, InstructionSequenceMatcher noteSequenceMatcher, boolean isField, boolean isDeclared) { // Print out a note about the dynamic invocation. if (notePrinter != null && notePrinter.accepts(clazz.getName())) { // Is the class member name in the list of exceptions? StringMatcher noteExceptionMatcher = isField ? noteFieldExceptionMatcher : noteMethodExceptionMatcher; int memberNameIndex = noteSequenceMatcher.matchedConstantIndex(Y); String memberName = clazz.getStringString(memberNameIndex); if (noteExceptionMatcher == null || !noteExceptionMatcher.matches(memberName)) { // Compose the external member name and partial descriptor. String externalMemberDescription = memberName; if (!isField) { externalMemberDescription += '('; for (int count = 0; count < 2; count++) { int memberArgumentIndex = noteSequenceMatcher.matchedConstantIndex(A + count); if (memberArgumentIndex > 0) { if (count > 0) { externalMemberDescription += ','; } String className = clazz.getClassName(memberArgumentIndex); externalMemberDescription += ClassUtil.isInternalArrayType(className) ? ClassUtil.externalType(className) : ClassUtil.externalClassName(className); } } externalMemberDescription += ')'; } // Print out the actual note. notePrinter.print(clazz.getName(), "Note: " + ClassUtil.externalClassName(clazz.getName()) + " accesses a " + (isDeclared ? "declared " : "") + (isField ? "field" : "method") + " '" + externalMemberDescription + "' dynamically"); // Print out notes about potential candidates. ClassVisitor classVisitor; if (isField) { classVisitor = new AllFieldVisitor( new MemberNameFilter(memberName, this)); } else { // Compose the partial method descriptor. String methodDescriptor = "("; for (int count = 0; count < 2; count++) { int memberArgumentIndex = noteSequenceMatcher.matchedConstantIndex(A + count); if (memberArgumentIndex > 0) { if (count > 0) { methodDescriptor += ','; } String className = clazz.getClassName(memberArgumentIndex); methodDescriptor += ClassUtil.isInternalArrayType(className) ? className : ClassUtil.internalTypeFromClassName(className); } } methodDescriptor += ")L///;"; classVisitor = new AllMethodVisitor( new MemberNameFilter(memberName, new MemberDescriptorFilter(methodDescriptor, this))); } programClassPool.classesAcceptAlphabetically(classVisitor); libraryClassPool.classesAcceptAlphabetically(classVisitor); } } } // Implementations for MemberVisitor. public void visitProgramField(ProgramClass programClass, ProgramField programField) { if (notePrinter.accepts(programClass.getName())) { System.out.println(" Maybe this is program field '" + ClassUtil.externalFullClassDescription(0, programClass.getName()) + " { " + ClassUtil.externalFullFieldDescription(0, programField.getName(programClass), programField.getDescriptor(programClass)) + "; }'"); } } public void visitProgramMethod(ProgramClass programClass, ProgramMethod programMethod) { if (notePrinter.accepts(programClass.getName())) { System.out.println(" Maybe this is program method '" + ClassUtil.externalFullClassDescription(0, programClass.getName()) + " { " + ClassUtil.externalFullMethodDescription(null, 0, programMethod.getName(programClass), programMethod.getDescriptor(programClass)) + "; }'"); } } public void visitLibraryField(LibraryClass libraryClass, LibraryField libraryField) { if (notePrinter.accepts(libraryClass.getName())) { System.out.println(" Maybe this is library field '" + ClassUtil.externalFullClassDescription(0, libraryClass.getName()) + " { " + ClassUtil.externalFullFieldDescription(0, libraryField.getName(libraryClass), libraryField.getDescriptor(libraryClass)) + "; }'"); } } public void visitLibraryMethod(LibraryClass libraryClass, LibraryMethod libraryMethod) { if (notePrinter.accepts(libraryClass.getName())) { System.out.println(" Maybe this is library method '" + ClassUtil.externalFullClassDescription(0, libraryClass.getName()) + " { " + ClassUtil.externalFullMethodDescription(null, 0, libraryMethod.getName(libraryClass), libraryMethod.getDescriptor(libraryClass)) + "; }'"); } } }
shakalaca/ASUS_ZenFone_A450CG
external/proguard/src/proguard/classfile/util/DynamicMemberReferenceInitializer.java
Java
gpl-2.0
27,311
// clang-format off /* Voro++ Copyright (c) 2008, The Regents of the University of California, through Lawrence Berkeley National Laboratory (subject to receipt of any required approvals from the U.S. Dept. of Energy). All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: (1) Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. (2) Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. (3) Neither the name of the University of California, Lawrence Berkeley National Laboratory, U.S. Dept. of Energy nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. You are under no obligation whatsoever to provide any bug fixes, patches, or upgrades to the features, functionality or performance of the source code ("Enhancements") to anyone; however, if you choose to make your Enhancements available either publicly, or directly to Lawrence Berkeley National Laboratory, without imposing a separate written license agreement for such Enhancements, then you hereby grant the following license: a non-exclusive, royalty-free perpetual license to install, use, modify, prepare derivative works, incorporate into other computer software, distribute, and sublicense such enhancements or derivative works thereof, in binary and source code form. */ // Voro++, a 3D cell-based Voronoi library // // Author : Chris H. Rycroft (LBL / UC Berkeley) // Email : chr@alum.mit.edu // Date : August 30th 2011 // // Modified by PM Larsen for use in Polyhedral Template Matching /** \file cell.cc * \brief Function implementations for the voronoicell and related classes. */ #include <cmath> #include <cstdio> #include <cstdlib> #include "ptm_voronoi_config.h" #include "ptm_voronoi_cell.h" namespace ptm_voro { inline void voro_fatal_error(const char *p,int status) { fprintf(stderr,"voro++: %s\n",p); exit(status); //return -1;//status; } /** Constructs a Voronoi cell and sets up the initial memory. */ voronoicell_base::voronoicell_base() : current_vertices(init_vertices), current_vertex_order(init_vertex_order), current_delete_size(init_delete_size), current_delete2_size(init_delete2_size), ed(new int*[current_vertices]), nu(new int[current_vertices]), pts(new double[3*current_vertices]), mem(new int[current_vertex_order]), mec(new int[current_vertex_order]), mep(new int*[current_vertex_order]), ds(new int[current_delete_size]), stacke(ds+current_delete_size), ds2(new int[current_delete2_size]), stacke2(ds2+current_delete_size), current_marginal(init_marginal), marg(new int[current_marginal]) { int i; for (i=0;i<3;i++) { mem[i]=init_n_vertices;mec[i]=0; mep[i]=new int[init_n_vertices*((i<<1)+1)]; } mem[3]=init_3_vertices;mec[3]=0; mep[3]=new int[init_3_vertices*7]; for (i=4;i<current_vertex_order;i++) { mem[i]=init_n_vertices;mec[i]=0; mep[i]=new int[init_n_vertices*((i<<1)+1)]; } } /** The voronoicell destructor deallocates all the dynamic memory. */ voronoicell_base::~voronoicell_base() { for (int i=current_vertex_order-1;i>=0;i--) if (mem[i]>0) delete [] mep[i]; delete [] marg; delete [] ds2;delete [] ds; delete [] mep;delete [] mec; delete [] mem;delete [] pts; delete [] nu;delete [] ed; } /** Ensures that enough memory is allocated prior to carrying out a copy. * \param[in] vc a reference to the specialized version of the calling class. * \param[in] vb a pointered to the class to be copied. */ template<class vc_class> void voronoicell_base::check_memory_for_copy(vc_class &vc,voronoicell_base* vb) { while (current_vertex_order<vb->current_vertex_order) add_memory_vorder(vc); for (int i=0;i<current_vertex_order;i++) while (mem[i]<vb->mec[i]) add_memory(vc,i,ds2); while (current_vertices<vb->p) add_memory_vertices(vc); } /** Increases the memory storage for a particular vertex order, by increasing * the size of the of the corresponding mep array. If the arrays already exist, * their size is doubled; if they don't exist, then new ones of size * init_n_vertices are allocated. The routine also ensures that the pointers in * the ed array are updated, by making use of the back pointers. For the cases * where the back pointer has been temporarily overwritten in the marginal * vertex code, the auxiliary delete stack is scanned to find out how to update * the ed value. If the template has been instantiated with the neighbor * tracking turned on, then the routine also reallocates the corresponding mne * array. * \param[in] i the order of the vertex memory to be increased. */ template<class vc_class> void voronoicell_base::add_memory(vc_class &vc,int i,int *stackp2) { int s=(i<<1)+1; if (mem[i]==0) { vc.n_allocate(i,init_n_vertices); mep[i]=new int[init_n_vertices*s]; mem[i]=init_n_vertices; #if VOROPP_VERBOSE >=2 fprintf(stderr,"Order %d vertex memory created\n",i); #endif } else { int j=0,k,*l; mem[i]<<=1; if (mem[i]>max_n_vertices) voro_fatal_error("Point memory allocation exceeded absolute maximum",VOROPP_MEMORY_ERROR); #if VOROPP_VERBOSE >=2 fprintf(stderr,"Order %d vertex memory scaled up to %d\n",i,mem[i]); #endif l=new int[s*mem[i]]; int m=0; vc.n_allocate_aux1(i); while (j<s*mec[i]) { k=mep[i][j+(i<<1)]; if (k>=0) { ed[k]=l+j; vc.n_set_to_aux1_offset(k,m); } else { int *dsp; for (dsp=ds2;dsp<stackp2;dsp++) { if (ed[*dsp]==mep[i]+j) { ed[*dsp]=l+j; vc.n_set_to_aux1_offset(*dsp,m); break; } } if (dsp==stackp2) voro_fatal_error("Couldn't relocate dangling pointer",VOROPP_INTERNAL_ERROR); #if VOROPP_VERBOSE >=3 fputs("Relocated dangling pointer",stderr); #endif } for (k=0;k<s;k++,j++) l[j]=mep[i][j]; for (k=0;k<i;k++,m++) vc.n_copy_to_aux1(i,m); } delete [] mep[i]; mep[i]=l; vc.n_switch_to_aux1(i); } } /** Doubles the maximum number of vertices allowed, by reallocating the ed, nu, * and pts arrays. If the allocation exceeds the absolute maximum set in * max_vertices, then the routine exits with a fatal error. If the template has * been instantiated with the neighbor tracking turned on, then the routine * also reallocates the ne array. */ template<class vc_class> void voronoicell_base::add_memory_vertices(vc_class &vc) { printf("nope: %d\n", current_vertices); exit(3); int i=(current_vertices<<1),j,**pp,*pnu; if (i>max_vertices) voro_fatal_error("Vertex memory allocation exceeded absolute maximum",VOROPP_MEMORY_ERROR); #if VOROPP_VERBOSE >=2 fprintf(stderr,"Vertex memory scaled up to %d\n",i); #endif double *ppts; pp=new int*[i]; for (j=0;j<current_vertices;j++) pp[j]=ed[j]; delete [] ed;ed=pp; vc.n_add_memory_vertices(i); pnu=new int[i]; for (j=0;j<current_vertices;j++) pnu[j]=nu[j]; delete [] nu;nu=pnu; ppts=new double[3*i]; for (j=0;j<3*current_vertices;j++) ppts[j]=pts[j]; delete [] pts;pts=ppts; current_vertices=i; } /** Doubles the maximum allowed vertex order, by reallocating mem, mep, and mec * arrays. If the allocation exceeds the absolute maximum set in * max_vertex_order, then the routine causes a fatal error. If the template has * been instantiated with the neighbor tracking turned on, then the routine * also reallocates the mne array. */ template<class vc_class> void voronoicell_base::add_memory_vorder(vc_class &vc) { int i=(current_vertex_order<<1),j,*p1,**p2; if (i>max_vertex_order) voro_fatal_error("Vertex order memory allocation exceeded absolute maximum",VOROPP_MEMORY_ERROR); #if VOROPP_VERBOSE >=2 fprintf(stderr,"Vertex order memory scaled up to %d\n",i); #endif p1=new int[i]; for (j=0;j<current_vertex_order;j++) p1[j]=mem[j]; while (j<i) p1[j++]=0; delete [] mem;mem=p1; p2=new int*[i]; for (j=0;j<current_vertex_order;j++) p2[j]=mep[j]; delete [] mep;mep=p2; p1=new int[i]; for (j=0;j<current_vertex_order;j++) p1[j]=mec[j]; while (j<i) p1[j++]=0; delete [] mec;mec=p1; vc.n_add_memory_vorder(i); current_vertex_order=i; } /** Doubles the size allocation of the main delete stack. If the allocation * exceeds the absolute maximum set in max_delete_size, then routine causes a * fatal error. */ void voronoicell_base::add_memory_ds(int *&stackp) { current_delete_size<<=1; if (current_delete_size>max_delete_size) voro_fatal_error("Delete stack 1 memory allocation exceeded absolute maximum",VOROPP_MEMORY_ERROR); #if VOROPP_VERBOSE >=2 fprintf(stderr,"Delete stack 1 memory scaled up to %d\n",current_delete_size); #endif int *dsn=new int[current_delete_size],*dsnp=dsn,*dsp=ds; while (dsp<stackp) *(dsnp++)=*(dsp++); delete [] ds;ds=dsn;stackp=dsnp; stacke=ds+current_delete_size; } /** Doubles the size allocation of the auxiliary delete stack. If the * allocation exceeds the absolute maximum set in max_delete2_size, then the * routine causes a fatal error. */ void voronoicell_base::add_memory_ds2(int *&stackp2) { current_delete2_size<<=1; if (current_delete2_size>max_delete2_size) voro_fatal_error("Delete stack 2 memory allocation exceeded absolute maximum",VOROPP_MEMORY_ERROR); #if VOROPP_VERBOSE >=2 fprintf(stderr,"Delete stack 2 memory scaled up to %d\n",current_delete2_size); #endif int *dsn=new int[current_delete2_size],*dsnp=dsn,*dsp=ds2; while (dsp<stackp2) *(dsnp++)=*(dsp++); delete [] ds2;ds2=dsn;stackp2=dsnp; stacke2=ds2+current_delete2_size; } /** Initializes a Voronoi cell as a rectangular box with the given dimensions. * \param[in] (xmin,xmax) the minimum and maximum x coordinates. * \param[in] (ymin,ymax) the minimum and maximum y coordinates. * \param[in] (zmin,zmax) the minimum and maximum z coordinates. */ void voronoicell_base::init_base(double xmin,double xmax,double ymin,double ymax,double zmin,double zmax) { for (int i=0;i<current_vertex_order;i++) mec[i]=0; up=0;mec[3]=p=8;xmin*=2;xmax*=2;ymin*=2;ymax*=2;zmin*=2;zmax*=2; *pts=xmin;pts[1]=ymin;pts[2]=zmin; pts[3]=xmax;pts[4]=ymin;pts[5]=zmin; pts[6]=xmin;pts[7]=ymax;pts[8]=zmin; pts[9]=xmax;pts[10]=ymax;pts[11]=zmin; pts[12]=xmin;pts[13]=ymin;pts[14]=zmax; pts[15]=xmax;pts[16]=ymin;pts[17]=zmax; pts[18]=xmin;pts[19]=ymax;pts[20]=zmax; pts[21]=xmax;pts[22]=ymax;pts[23]=zmax; int *q=mep[3]; *q=1;q[1]=4;q[2]=2;q[3]=2;q[4]=1;q[5]=0;q[6]=0; q[7]=3;q[8]=5;q[9]=0;q[10]=2;q[11]=1;q[12]=0;q[13]=1; q[14]=0;q[15]=6;q[16]=3;q[17]=2;q[18]=1;q[19]=0;q[20]=2; q[21]=2;q[22]=7;q[23]=1;q[24]=2;q[25]=1;q[26]=0;q[27]=3; q[28]=6;q[29]=0;q[30]=5;q[31]=2;q[32]=1;q[33]=0;q[34]=4; q[35]=4;q[36]=1;q[37]=7;q[38]=2;q[39]=1;q[40]=0;q[41]=5; q[42]=7;q[43]=2;q[44]=4;q[45]=2;q[46]=1;q[47]=0;q[48]=6; q[49]=5;q[50]=3;q[51]=6;q[52]=2;q[53]=1;q[54]=0;q[55]=7; *ed=q;ed[1]=q+7;ed[2]=q+14;ed[3]=q+21; ed[4]=q+28;ed[5]=q+35;ed[6]=q+42;ed[7]=q+49; *nu=nu[1]=nu[2]=nu[3]=nu[4]=nu[5]=nu[6]=nu[7]=3; } /** Starting from a point within the current cutting plane, this routine attempts * to find an edge to a point outside the cutting plane. This prevents the plane * routine from . * \param[in] vc a reference to the specialized version of the calling class. * \param[in,out] up */ template<class vc_class> inline bool voronoicell_base::search_for_outside_edge(vc_class &vc,int &up) { int i,lp,lw,*j(ds2),*stackp2(ds2); double l; *(stackp2++)=up; while (j<stackp2) { up=*(j++); for (i=0;i<nu[up];i++) { lp=ed[up][i]; lw=m_test(lp,l); if (lw==-1) return true; else if (lw==0) add_to_stack(vc,lp,stackp2); } } return false; } /** Adds a point to the auxiliary delete stack if it is not already there. * \param[in] vc a reference to the specialized version of the calling class. * \param[in] lp the index of the point to add. * \param[in,out] stackp2 a pointer to the end of the stack entries. */ template<class vc_class> inline void voronoicell_base::add_to_stack(vc_class &vc,int lp,int *&stackp2) { (void)vc; for (int *k(ds2);k<stackp2;k++) if (*k==lp) return; if (stackp2==stacke2) add_memory_ds2(stackp2); *(stackp2++)=lp; } /** Cuts the Voronoi cell by a particle whose center is at a separation of * (x,y,z) from the cell center. The value of rsq should be initially set to * \f$x^2+y^2+z^2\f$. * \param[in] vc a reference to the specialized version of the calling class. * \param[in] (x,y,z) the normal vector to the plane. * \param[in] rsq the distance along this vector of the plane. * \param[in] p_id the plane ID (for neighbor tracking only). * \return False if the plane cut deleted the cell entirely, true otherwise. */ template<class vc_class> bool voronoicell_base::nplane(vc_class &vc,double x,double y,double z,double rsq,int p_id) { int count=0,i,j,k,lp=up,cp,qp,rp,*stackp(ds),*stackp2(ds2),*dsp; int us=0,ls=0,qs,iqs,cs,uw,qw,lw; int *edp,*edd; double u,l,r,q;bool complicated_setup=false,new_double_edge=false,double_edge=false; // Initialize the safe testing routine n_marg=0;px=x;py=y;pz=z;prsq=rsq; // Test approximately sqrt(n)/4 points for their proximity to the plane // and keep the one which is closest uw=m_test(up,u); // Starting from an initial guess, we now move from vertex to vertex, // to try and find an edge which intersects the cutting plane, // or a vertex which is on the plane try { if (uw==1) { // The test point is inside the cutting plane. us=0; do { lp=ed[up][us]; lw=m_test(lp,l); if (l<u) break; us++; } while (us<nu[up]); if (us==nu[up]) { return false; } ls=ed[up][nu[up]+us]; while (lw==1) { if (++count>=p) throw true; u=l;up=lp; for (us=0;us<ls;us++) { lp=ed[up][us]; lw=m_test(lp,l); if (l<u) break; } if (us==ls) { us++; while (us<nu[up]) { lp=ed[up][us]; lw=m_test(lp,l); if (l<u) break; us++; } if (us==nu[up]) { return false; } } ls=ed[up][nu[up]+us]; } // If the last point in the iteration is within the // plane, we need to do the complicated setup // routine. Otherwise, we use the regular iteration. if (lw==0) { up=lp; complicated_setup=true; } else complicated_setup=false; } else if (uw==-1) { us=0; do { qp=ed[up][us]; qw=m_test(qp,q); if (u<q) break; us++; } while (us<nu[up]); if (us==nu[up]) return true; while (qw==-1) { qs=ed[up][nu[up]+us]; if (++count>=p) throw true; u=q;up=qp; for (us=0;us<qs;us++) { qp=ed[up][us]; qw=m_test(qp,q); if (u<q) break; } if (us==qs) { us++; while (us<nu[up]) { qp=ed[up][us]; qw=m_test(qp,q); if (u<q) break; us++; } if (us==nu[up]) return true; } } if (qw==1) { lp=up;ls=us;l=u; up=qp;us=ed[lp][nu[lp]+ls];u=q; complicated_setup=false; } else { up=qp; complicated_setup=true; } } else { // Our original test point was on the plane, so we // automatically head for the complicated setup // routine complicated_setup=true; } } catch(bool except) { // This routine is a fall-back, in case floating point errors // cause the usual search routine to fail. In the fall-back // routine, we just test every edge to find one straddling // the plane. #if VOROPP_VERBOSE >=1 fputs("Bailed out of convex calculation\n",stderr); #endif qw=1;lw=0; for (qp=0;qp<p;qp++) { qw=m_test(qp,q); if (qw==1) { // The point is inside the cutting space. Now // see if we can find a neighbor which isn't. for (us=0;us<nu[qp];us++) { lp=ed[qp][us]; if (lp<qp) { lw=m_test(lp,l); if (lw!=1) break; } } if (us<nu[qp]) { up=qp; if (lw==0) { complicated_setup=true; } else { complicated_setup=false; u=q; ls=ed[up][nu[up]+us]; } break; } } else if (qw==-1) { // The point is outside the cutting space. See // if we can find a neighbor which isn't. for (ls=0;ls<nu[qp];ls++) { up=ed[qp][ls]; if (up<qp) { uw=m_test(up,u); if (uw!=-1) break; } } if (ls<nu[qp]) { if (uw==0) { up=qp; complicated_setup=true; } else { complicated_setup=false; lp=qp;l=q; us=ed[lp][nu[lp]+ls]; } break; } } else { // The point is in the plane, so we just // proceed with the complicated setup routine up=qp; complicated_setup=true; break; } } if (qp==p) return qw==-1?true:false; } // We're about to add the first point of the new facet. In either // routine, we have to add a point, so first check there's space for // it. if (p==current_vertices) add_memory_vertices(vc); if (complicated_setup) { // We want to be strict about reaching the conclusion that the // cell is entirely within the cutting plane. It's not enough // to find a vertex that has edges which are all inside or on // the plane. If the vertex has neighbors that are also on the // plane, we should check those too. if (!search_for_outside_edge(vc,up)) return false; // The search algorithm found a point which is on the cutting // plane. We leave that point in place, and create a new one at // the same location. pts[3*p]=pts[3*up]; pts[3*p+1]=pts[3*up+1]; pts[3*p+2]=pts[3*up+2]; // Search for a collection of edges of the test vertex which // are outside of the cutting space. Begin by testing the // zeroth edge. i=0; lp=*ed[up]; lw=m_test(lp,l); if (lw!=-1) { // The first edge is either inside the cutting space, // or lies within the cutting plane. Test the edges // sequentially until we find one that is outside. rp=lw; do { i++; // If we reached the last edge with no luck // then all of the vertices are inside // or on the plane, so the cell is completely // deleted if (i==nu[up]) return false; lp=ed[up][i]; lw=m_test(lp,l); } while (lw!=-1); j=i+1; // We found an edge outside the cutting space. Keep // moving through these edges until we find one that's // inside or on the plane. while (j<nu[up]) { lp=ed[up][j]; lw=m_test(lp,l); if (lw!=-1) break; j++; } // Compute the number of edges for the new vertex. In // general it will be the number of outside edges // found, plus two. But we need to recognize the // special case when all but one edge is outside, and // the remaining one is on the plane. For that case we // have to reduce the edge count by one to prevent // doubling up. if (j==nu[up]&&i==1&&rp==0) { nu[p]=nu[up]; double_edge=true; } else nu[p]=j-i+2; k=1; // Add memory for the new vertex if needed, and // initialize while (nu[p]>=current_vertex_order) add_memory_vorder(vc); if (mec[nu[p]]==mem[nu[p]]) add_memory(vc,nu[p],stackp2); vc.n_set_pointer(p,nu[p]); ed[p]=mep[nu[p]]+((nu[p]<<1)+1)*mec[nu[p]]++; ed[p][nu[p]<<1]=p; // Copy the edges of the original vertex into the new // one. Delete the edges of the original vertex, and // update the relational table. us=cycle_down(i,up); while (i<j) { qp=ed[up][i]; qs=ed[up][nu[up]+i]; vc.n_copy(p,k,up,i); ed[p][k]=qp; ed[p][nu[p]+k]=qs; ed[qp][qs]=p; ed[qp][nu[qp]+qs]=k; ed[up][i]=-1; i++;k++; } qs=i==nu[up]?0:i; } else { // In this case, the zeroth edge is outside the cutting // plane. Begin by searching backwards from the last // edge until we find an edge which isn't outside. i=nu[up]-1; lp=ed[up][i]; lw=m_test(lp,l); while (lw==-1) { i--; // If i reaches zero, then we have a point in // the plane all of whose edges are outside // the cutting space, so we just exit if (i==0) return true; lp=ed[up][i]; lw=m_test(lp,l); } // Now search forwards from zero j=1; qp=ed[up][j]; qw=m_test(qp,q); while (qw==-1) { j++; qp=ed[up][j]; qw=m_test(qp,l); } // Compute the number of edges for the new vertex. In // general it will be the number of outside edges // found, plus two. But we need to recognize the // special case when all but one edge is outside, and // the remaining one is on the plane. For that case we // have to reduce the edge count by one to prevent // doubling up. if (i==j&&qw==0) { double_edge=true; nu[p]=nu[up]; } else { nu[p]=nu[up]-i+j+1; } // Add memory to store the vertex if it doesn't exist // already k=1; while (nu[p]>=current_vertex_order) add_memory_vorder(vc); if (mec[nu[p]]==mem[nu[p]]) add_memory(vc,nu[p],stackp2); // Copy the edges of the original vertex into the new // one. Delete the edges of the original vertex, and // update the relational table. vc.n_set_pointer(p,nu[p]); ed[p]=mep[nu[p]]+((nu[p]<<1)+1)*mec[nu[p]]++; ed[p][nu[p]<<1]=p; us=i++; while (i<nu[up]) { qp=ed[up][i]; qs=ed[up][nu[up]+i]; vc.n_copy(p,k,up,i); ed[p][k]=qp; ed[p][nu[p]+k]=qs; ed[qp][qs]=p; ed[qp][nu[qp]+qs]=k; ed[up][i]=-1; i++;k++; } i=0; while (i<j) { qp=ed[up][i]; qs=ed[up][nu[up]+i]; vc.n_copy(p,k,up,i); ed[p][k]=qp; ed[p][nu[p]+k]=qs; ed[qp][qs]=p; ed[qp][nu[qp]+qs]=k; ed[up][i]=-1; i++;k++; } qs=j; } if (!double_edge) { vc.n_copy(p,k,up,qs); vc.n_set(p,0,p_id); } else vc.n_copy(p,0,up,qs); // Add this point to the auxiliary delete stack if (stackp2==stacke2) add_memory_ds2(stackp2); *(stackp2++)=up; // Look at the edges on either side of the group that was // detected. We're going to commence facet computation by // moving along one of them. We are going to end up coming back // along the other one. cs=k; qp=up;q=u; i=ed[up][us]; us=ed[up][nu[up]+us]; up=i; ed[qp][nu[qp]<<1]=-p; } else { // The search algorithm found an intersected edge between the // points lp and up. Create a new vertex between them which // lies on the cutting plane. Since u and l differ by at least // the tolerance, this division should never screw up. if (stackp==stacke) add_memory_ds(stackp); *(stackp++)=up; r=u/(u-l);l=1-r; pts[3*p]=pts[3*lp]*r+pts[3*up]*l; pts[3*p+1]=pts[3*lp+1]*r+pts[3*up+1]*l; pts[3*p+2]=pts[3*lp+2]*r+pts[3*up+2]*l; // This point will always have three edges. Connect one of them // to lp. nu[p]=3; if (mec[3]==mem[3]) add_memory(vc,3,stackp2); vc.n_set_pointer(p,3); vc.n_set(p,0,p_id); vc.n_copy(p,1,up,us); vc.n_copy(p,2,lp,ls); ed[p]=mep[3]+7*mec[3]++; ed[p][6]=p; ed[up][us]=-1; ed[lp][ls]=p; ed[lp][nu[lp]+ls]=1; ed[p][1]=lp; ed[p][nu[p]+1]=ls; cs=2; // Set the direction to move in qs=cycle_up(us,up); qp=up;q=u; } // When the code reaches here, we have initialized the first point, and // we have a direction for moving it to construct the rest of the facet cp=p;rp=p;p++; while (qp!=up||qs!=us) { // We're currently tracing round an intersected facet. Keep // moving around it until we find a point or edge which // intersects the plane. lp=ed[qp][qs]; lw=m_test(lp,l); if (lw==1) { // The point is still in the cutting space. Just add it // to the delete stack and keep moving. qs=cycle_up(ed[qp][nu[qp]+qs],lp); qp=lp; q=l; if (stackp==stacke) add_memory_ds(stackp); *(stackp++)=qp; } else if (lw==-1) { // The point is outside of the cutting space, so we've // found an intersected edge. Introduce a regular point // at the point of intersection. Connect it to the // point we just tested. Also connect it to the previous // new point in the facet we're constructing. if (p==current_vertices) add_memory_vertices(vc); r=q/(q-l);l=1-r; pts[3*p]=pts[3*lp]*r+pts[3*qp]*l; pts[3*p+1]=pts[3*lp+1]*r+pts[3*qp+1]*l; pts[3*p+2]=pts[3*lp+2]*r+pts[3*qp+2]*l; nu[p]=3; if (mec[3]==mem[3]) add_memory(vc,3,stackp2); ls=ed[qp][qs+nu[qp]]; vc.n_set_pointer(p,3); vc.n_set(p,0,p_id); vc.n_copy(p,1,qp,qs); vc.n_copy(p,2,lp,ls); ed[p]=mep[3]+7*mec[3]++; *ed[p]=cp; ed[p][1]=lp; ed[p][3]=cs; ed[p][4]=ls; ed[p][6]=p; ed[lp][ls]=p; ed[lp][nu[lp]+ls]=1; ed[cp][cs]=p; ed[cp][nu[cp]+cs]=0; ed[qp][qs]=-1; qs=cycle_up(qs,qp); cp=p++; cs=2; } else { // We've found a point which is on the cutting plane. // We're going to introduce a new point right here, but // first we need to figure out the number of edges it // has. if (p==current_vertices) add_memory_vertices(vc); // If the previous vertex detected a double edge, our // new vertex will have one less edge. k=double_edge?0:1; qs=ed[qp][nu[qp]+qs]; qp=lp; iqs=qs; // Start testing the edges of the current point until // we find one which isn't outside the cutting space do { k++; qs=cycle_up(qs,qp); lp=ed[qp][qs]; lw=m_test(lp,l); } while (lw==-1); // Now we need to find out whether this marginal vertex // we are on has been visited before, because if that's // the case, we need to add vertices to the existing // new vertex, rather than creating a fresh one. We also // need to figure out whether we're in a case where we // might be creating a duplicate edge. j=-ed[qp][nu[qp]<<1]; if (qp==up&&qs==us) { // If we're heading into the final part of the // new facet, then we never worry about the // duplicate edge calculation. new_double_edge=false; if (j>0) k+=nu[j]; } else { if (j>0) { // This vertex was visited before, so // count those vertices to the ones we // already have. k+=nu[j]; // The only time when we might make a // duplicate edge is if the point we're // going to move to next is also a // marginal point, so test for that // first. if (lw==0) { // Now see whether this marginal point // has been visited before. i=-ed[lp][nu[lp]<<1]; if (i>0) { // Now see if the last edge of that other // marginal point actually ends up here. if (ed[i][nu[i]-1]==j) { new_double_edge=true; k-=1; } else new_double_edge=false; } else { // That marginal point hasn't been visited // before, so we probably don't have to worry // about duplicate edges, except in the // case when that's the way into the end // of the facet, because that way always creates // an edge. if (j==rp&&lp==up&&ed[qp][nu[qp]+qs]==us) { new_double_edge=true; k-=1; } else new_double_edge=false; } } else new_double_edge=false; } else { // The vertex hasn't been visited // before, but let's see if it's // marginal if (lw==0) { // If it is, we need to check // for the case that it's a // small branch, and that we're // heading right back to where // we came from i=-ed[lp][nu[lp]<<1]; if (i==cp) { new_double_edge=true; k-=1; } else new_double_edge=false; } else new_double_edge=false; } } // k now holds the number of edges of the new vertex // we are forming. Add memory for it if it doesn't exist // already. while (k>=current_vertex_order) add_memory_vorder(vc); if (mec[k]==mem[k]) add_memory(vc,k,stackp2); // Now create a new vertex with order k, or augment // the existing one if (j>0) { // If we're augmenting a vertex but we don't // actually need any more edges, just skip this // routine to avoid memory confusion if (nu[j]!=k) { // Allocate memory and copy the edges // of the previous instance into it vc.n_set_aux1(k); edp=mep[k]+((k<<1)+1)*mec[k]++; i=0; while (i<nu[j]) { vc.n_copy_aux1(j,i); edp[i]=ed[j][i]; edp[k+i]=ed[j][nu[j]+i]; i++; } edp[k<<1]=j; // Remove the previous instance with // fewer vertices from the memory // structure edd=mep[nu[j]]+((nu[j]<<1)+1)*--mec[nu[j]]; if (edd!=ed[j]) { for (lw=0;lw<=(nu[j]<<1);lw++) ed[j][lw]=edd[lw]; vc.n_set_aux2_copy(j,nu[j]); vc.n_copy_pointer(edd[nu[j]<<1],j); ed[edd[nu[j]<<1]]=ed[j]; } vc.n_set_to_aux1(j); ed[j]=edp; } else i=nu[j]; } else { // Allocate a new vertex of order k vc.n_set_pointer(p,k); ed[p]=mep[k]+((k<<1)+1)*mec[k]++; ed[p][k<<1]=p; if (stackp2==stacke2) add_memory_ds2(stackp2); *(stackp2++)=qp; pts[3*p]=pts[3*qp]; pts[3*p+1]=pts[3*qp+1]; pts[3*p+2]=pts[3*qp+2]; ed[qp][nu[qp]<<1]=-p; j=p++; i=0; } nu[j]=k; // Unless the previous case was a double edge, connect // the first available edge of the new vertex to the // last one in the facet if (!double_edge) { ed[j][i]=cp; ed[j][nu[j]+i]=cs; vc.n_set(j,i,p_id); ed[cp][cs]=j; ed[cp][nu[cp]+cs]=i; i++; } // Copy in the edges of the underlying vertex, // and do one less if this was a double edge qs=iqs; while (i<(new_double_edge?k:k-1)) { qs=cycle_up(qs,qp); lp=ed[qp][qs];ls=ed[qp][nu[qp]+qs]; vc.n_copy(j,i,qp,qs); ed[j][i]=lp; ed[j][nu[j]+i]=ls; ed[lp][ls]=j; ed[lp][nu[lp]+ls]=i; ed[qp][qs]=-1; i++; } qs=cycle_up(qs,qp); cs=i; cp=j; vc.n_copy(j,new_double_edge?0:cs,qp,qs); // Update the double_edge flag, to pass it // to the next instance of this routine double_edge=new_double_edge; } } // Connect the final created vertex to the initial one ed[cp][cs]=rp; *ed[rp]=cp; ed[cp][nu[cp]+cs]=0; ed[rp][nu[rp]]=cs; // Delete points: first, remove any duplicates dsp=ds; while (dsp<stackp) { j=*dsp; if (ed[j][nu[j]]!=-1) { ed[j][nu[j]]=-1; dsp++; } else *dsp=*(--stackp); } // Add the points in the auxiliary delete stack, // and reset their back pointers for (dsp=ds2;dsp<stackp2;dsp++) { j=*dsp; ed[j][nu[j]<<1]=j; if (ed[j][nu[j]]!=-1) { ed[j][nu[j]]=-1; if (stackp==stacke) add_memory_ds(stackp); *(stackp++)=j; } } // Scan connections and add in extras for (dsp=ds;dsp<stackp;dsp++) { cp=*dsp; for (edp=ed[cp];edp<ed[cp]+nu[cp];edp++) { qp=*edp; if (qp!=-1&&ed[qp][nu[qp]]!=-1) { if (stackp==stacke) { int dis=stackp-dsp; add_memory_ds(stackp); dsp=ds+dis; } *(stackp++)=qp; ed[qp][nu[qp]]=-1; } } } up=0; // Delete them from the array structure while (stackp>ds) { --p; while (ed[p][nu[p]]==-1) { j=nu[p]; edp=ed[p];edd=(mep[j]+((j<<1)+1)*--mec[j]); while (edp<ed[p]+(j<<1)+1) *(edp++)=*(edd++); vc.n_set_aux2_copy(p,j); vc.n_copy_pointer(ed[p][(j<<1)],p); ed[ed[p][(j<<1)]]=ed[p]; --p; } up=*(--stackp); if (up<p) { // Vertex management pts[3*up]=pts[3*p]; pts[3*up+1]=pts[3*p+1]; pts[3*up+2]=pts[3*p+2]; // Memory management j=nu[up]; edp=ed[up];edd=(mep[j]+((j<<1)+1)*--mec[j]); while (edp<ed[up]+(j<<1)+1) *(edp++)=*(edd++); vc.n_set_aux2_copy(up,j); vc.n_copy_pointer(ed[up][j<<1],up); vc.n_copy_pointer(up,p); ed[ed[up][j<<1]]=ed[up]; // Edge management ed[up]=ed[p]; nu[up]=nu[p]; for (i=0;i<nu[up];i++) ed[ed[up][i]][ed[up][nu[up]+i]]=up; ed[up][nu[up]<<1]=up; } else up=p++; } // Check for any vertices of zero order if (*mec>0) voro_fatal_error("Zero order vertex formed",VOROPP_INTERNAL_ERROR); // Collapse any order 2 vertices and exit return collapse_order2(vc); } /** During the creation of a new facet in the plane routine, it is possible * that some order two vertices may arise. This routine removes them. * Suppose an order two vertex joins c and d. If there's a edge between * c and d already, then the order two vertex is just removed; otherwise, * the order two vertex is removed and c and d are joined together directly. * It is possible this process will create order two or order one vertices, * and the routine is continually run until all of them are removed. * \return False if the vertex removal was unsuccessful, indicative of the cell * reducing to zero volume and disappearing; true if the vertex removal * was successful. */ template<class vc_class> inline bool voronoicell_base::collapse_order2(vc_class &vc) { if (!collapse_order1(vc)) return false; int a,b,i,j,k,l; while (mec[2]>0) { // Pick a order 2 vertex and read in its edges i=--mec[2]; j=mep[2][5*i];k=mep[2][5*i+1]; if (j==k) { #if VOROPP_VERBOSE >=1 fputs("Order two vertex joins itself",stderr); #endif return false; } // Scan the edges of j to see if joins k for (l=0;l<nu[j];l++) { if (ed[j][l]==k) break; } // If j doesn't already join k, join them together. // Otherwise delete the connection to the current // vertex from j and k. a=mep[2][5*i+2];b=mep[2][5*i+3];i=mep[2][5*i+4]; if (l==nu[j]) { ed[j][a]=k; ed[k][b]=j; ed[j][nu[j]+a]=b; ed[k][nu[k]+b]=a; } else { if (!delete_connection(vc,j,a,false)) return false; if (!delete_connection(vc,k,b,true)) return false; } // Compact the memory --p; if (up==i) up=0; if (p!=i) { if (up==p) up=i; pts[3*i]=pts[3*p]; pts[3*i+1]=pts[3*p+1]; pts[3*i+2]=pts[3*p+2]; for (k=0;k<nu[p];k++) ed[ed[p][k]][ed[p][nu[p]+k]]=i; vc.n_copy_pointer(i,p); ed[i]=ed[p]; nu[i]=nu[p]; ed[i][nu[i]<<1]=i; } // Collapse any order 1 vertices if they were created if (!collapse_order1(vc)) return false; } return true; } /** Order one vertices can potentially be created during the order two collapse * routine. This routine keeps removing them until there are none left. * \return False if the vertex removal was unsuccessful, indicative of the cell * having zero volume and disappearing; true if the vertex removal was * successful. */ template<class vc_class> inline bool voronoicell_base::collapse_order1(vc_class &vc) { int i,j,k; while (mec[1]>0) { up=0; #if VOROPP_VERBOSE >=1 fputs("Order one collapse\n",stderr); #endif i=--mec[1]; j=mep[1][3*i];k=mep[1][3*i+1]; i=mep[1][3*i+2]; if (!delete_connection(vc,j,k,false)) return false; --p; if (up==i) up=0; if (p!=i) { if (up==p) up=i; pts[3*i]=pts[3*p]; pts[3*i+1]=pts[3*p+1]; pts[3*i+2]=pts[3*p+2]; for (k=0;k<nu[p];k++) ed[ed[p][k]][ed[p][nu[p]+k]]=i; vc.n_copy_pointer(i,p); ed[i]=ed[p]; nu[i]=nu[p]; ed[i][nu[i]<<1]=i; } } return true; } /** This routine deletes the kth edge of vertex j and reorganizes the memory. * If the neighbor computation is enabled, we also have to supply an handedness * flag to decide whether to preserve the plane on the left or right of the * connection. * \return False if a zero order vertex was formed, indicative of the cell * disappearing; true if the vertex removal was successful. */ template<class vc_class> inline bool voronoicell_base::delete_connection(vc_class &vc,int j,int k,bool hand) { int q=hand?k:cycle_up(k,j); int i=nu[j]-1,l,*edp,*edd,m; #if VOROPP_VERBOSE >=1 if (i<1) { fputs("Zero order vertex formed\n",stderr); return false; } #endif if (mec[i]==mem[i]) add_memory(vc,i,ds2); vc.n_set_aux1(i); for (l=0;l<q;l++) vc.n_copy_aux1(j,l); while (l<i) { vc.n_copy_aux1_shift(j,l); l++; } edp=mep[i]+((i<<1)+1)*mec[i]++; edp[i<<1]=j; for (l=0;l<k;l++) { edp[l]=ed[j][l]; edp[l+i]=ed[j][l+nu[j]]; } while (l<i) { m=ed[j][l+1]; edp[l]=m; k=ed[j][l+nu[j]+1]; edp[l+i]=k; ed[m][nu[m]+k]--; l++; } edd=mep[nu[j]]+((nu[j]<<1)+1)*--mec[nu[j]]; for (l=0;l<=(nu[j]<<1);l++) ed[j][l]=edd[l]; vc.n_set_aux2_copy(j,nu[j]); vc.n_set_to_aux2(edd[nu[j]<<1]); vc.n_set_to_aux1(j); ed[edd[nu[j]<<1]]=edd; ed[j]=edp; nu[j]=i; return true; } /** Calculates the areas of each face of the Voronoi cell and prints the * results to an output stream. * \param[out] v the vector to store the results in. */ void voronoicell_base::face_areas(std::vector<double> &v) { double area; v.clear(); int i,j,k,l,m,n; double ux,uy,uz,vx,vy,vz,wx,wy,wz; for (i=1;i<p;i++) for(j=0;j<nu[i];j++) { k=ed[i][j]; if (k>=0) { area=0; ed[i][j]=-1-k; l=cycle_up(ed[i][nu[i]+j],k); m=ed[k][l];ed[k][l]=-1-m; while (m!=i) { n=cycle_up(ed[k][nu[k]+l],m); ux=pts[3*k]-pts[3*i]; uy=pts[3*k+1]-pts[3*i+1]; uz=pts[3*k+2]-pts[3*i+2]; vx=pts[3*m]-pts[3*i]; vy=pts[3*m+1]-pts[3*i+1]; vz=pts[3*m+2]-pts[3*i+2]; wx=uy*vz-uz*vy; wy=uz*vx-ux*vz; wz=ux*vy-uy*vx; area+=sqrt(wx*wx+wy*wy+wz*wz); k=m;l=n; m=ed[k][l];ed[k][l]=-1-m; } v.push_back(0.125*area); } } reset_edges(); } /** Several routines in the class that gather cell-based statistics internally * track their progress by flipping edges to negative so that they know what * parts of the cell have already been tested. This function resets them back * to positive. When it is called, it assumes that every edge in the routine * should have already been flipped to negative, and it bails out with an * internal error if it encounters a positive edge. */ inline void voronoicell_base::reset_edges() { int i,j; for (i=0;i<p;i++) for(j=0;j<nu[i];j++) { if (ed[i][j]>=0) voro_fatal_error("Edge reset routine found a previously untested edge",VOROPP_INTERNAL_ERROR); ed[i][j]=-1-ed[i][j]; } } /** Checks to see if a given vertex is inside, outside or within the test * plane. If the point is far away from the test plane, the routine immediately * returns whether it is inside or outside. If the routine is close the the * plane and within the specified tolerance, then the special check_marginal() * routine is called. * \param[in] n the vertex to test. * \param[out] ans the result of the scalar product used in evaluating the * location of the point. * \return -1 if the point is inside the plane, 1 if the point is outside the * plane, or 0 if the point is within the plane. */ inline int voronoicell_base::m_test(int n,double &ans) { double *pp=pts+n+(n<<1); ans=*(pp++)*px; ans+=*(pp++)*py; ans+=*pp*pz-prsq; if (ans<-tolerance2) { return -1; } else if (ans>tolerance2) { return 1; } return check_marginal(n,ans); } /** Checks to see if a given vertex is inside, outside or within the test * plane, for the case when the point has been detected to be very close to the * plane. The routine ensures that the returned results are always consistent * with previous tests, by keeping a table of any marginal results. The routine * first sees if the vertex is in the table, and if it finds a previously * computed result it uses that. Otherwise, it computes a result for this * vertex and adds it the table. * \param[in] n the vertex to test. * \param[in] ans the result of the scalar product used in evaluating * the location of the point. * \return -1 if the point is inside the plane, 1 if the point is outside the * plane, or 0 if the point is within the plane. */ int voronoicell_base::check_marginal(int n,double &ans) { int i; for (i=0;i<n_marg;i+=2) if (marg[i]==n) return marg[i+1]; if (n_marg==current_marginal) { current_marginal<<=1; if (current_marginal>max_marginal) voro_fatal_error("Marginal case buffer allocation exceeded absolute maximum",VOROPP_MEMORY_ERROR); #if VOROPP_VERBOSE >=2 fprintf(stderr,"Marginal cases buffer scaled up to %d\n",i); #endif int *pmarg=new int[current_marginal]; for (int j=0;j<n_marg;j++) pmarg[j]=marg[j]; delete [] marg; marg=pmarg; } marg[n_marg++]=n; marg[n_marg++]=ans>tolerance?1:(ans<-tolerance?-1:0); return marg[n_marg-1]; } /** This initializes the class to be a rectangular box. It calls the base class * initialization routine to set up the edge and vertex information, and then * sets up the neighbor information, with initial faces being assigned ID * numbers from -1 to -6. * \param[in] (xmin,xmax) the minimum and maximum x coordinates. * \param[in] (ymin,ymax) the minimum and maximum y coordinates. * \param[in] (zmin,zmax) the minimum and maximum z coordinates. */ void voronoicell_neighbor::init(double xmin,double xmax,double ymin,double ymax,double zmin,double zmax) { init_base(xmin,xmax,ymin,ymax,zmin,zmax); int *q=mne[3]; *q=-5;q[1]=-3;q[2]=-1; q[3]=-5;q[4]=-2;q[5]=-3; q[6]=-5;q[7]=-1;q[8]=-4; q[9]=-5;q[10]=-4;q[11]=-2; q[12]=-6;q[13]=-1;q[14]=-3; q[15]=-6;q[16]=-3;q[17]=-2; q[18]=-6;q[19]=-4;q[20]=-1; q[21]=-6;q[22]=-2;q[23]=-4; *ne=q;ne[1]=q+3;ne[2]=q+6;ne[3]=q+9; ne[4]=q+12;ne[5]=q+15;ne[6]=q+18;ne[7]=q+21; } /** This routine checks to make sure the neighbor information of each face is * consistent. */ void voronoicell_neighbor::check_facets() { int i,j,k,l,m,q; for (i=1;i<p;i++) for(j=0;j<nu[i];j++) { k=ed[i][j]; if (k>=0) { ed[i][j]=-1-k; q=ne[i][j]; l=cycle_up(ed[i][nu[i]+j],k); do { m=ed[k][l]; ed[k][l]=-1-m; if (ne[k][l]!=q) fprintf(stderr,"Facet error at (%d,%d)=%d, started from (%d,%d)=%d\n",k,l,ne[k][l],i,j,q); l=cycle_up(ed[k][nu[k]+l],m); k=m; } while (k!=i); } } reset_edges(); } /** The class constructor allocates memory for storing neighbor information. */ voronoicell_neighbor::voronoicell_neighbor() { int i; mne=new int*[current_vertex_order]; ne=new int*[current_vertices]; for (i=0;i<3;i++) mne[i]=new int[init_n_vertices*i]; mne[3]=new int[init_3_vertices*3]; for (i=4;i<current_vertex_order;i++) mne[i]=new int[init_n_vertices*i]; } /** The class destructor frees the dynamically allocated memory for storing * neighbor information. */ voronoicell_neighbor::~voronoicell_neighbor() { for (int i=current_vertex_order-1;i>=0;i--) if (mem[i]>0) delete [] mne[i]; delete [] mne; delete [] ne; } /** Computes a vector list of neighbors. */ void voronoicell_neighbor::neighbors(std::vector<int> &v) { v.clear(); int i,j,k,l,m; for (i=1;i<p;i++) for(j=0;j<nu[i];j++) { k=ed[i][j]; if (k>=0) { v.push_back(ne[i][j]); ed[i][j]=-1-k; l=cycle_up(ed[i][nu[i]+j],k); do { m=ed[k][l]; ed[k][l]=-1-m; l=cycle_up(ed[k][nu[k]+l],m); k=m; } while (k!=i); } } reset_edges(); } /** Returns the number of faces of a computed Voronoi cell. * \return The number of faces. */ int voronoicell_base::number_of_faces() { int i,j,k,l,m,s=0; for (i=1;i<p;i++) for(j=0;j<nu[i];j++) { k=ed[i][j]; if (k>=0) { s++; ed[i][j]=-1-k; l=cycle_up(ed[i][nu[i]+j],k); do { m=ed[k][l]; ed[k][l]=-1-m; l=cycle_up(ed[k][nu[k]+l],m); k=m; } while (k!=i); } } reset_edges(); return s; } /** Returns a vector of the vertex vectors in the global coordinate system. * \param[out] v the vector to store the results in. * \param[in] (x,y,z) the position vector of the particle in the global * coordinate system. */ void voronoicell_base::vertices(double x,double y,double z,std::vector<double> &v) { v.resize(3*p); double *ptsp=pts; for (int i=0;i<3*p;i+=3) { v[i]=x+*(ptsp++)*0.5; v[i+1]=y+*(ptsp++)*0.5; v[i+2]=z+*(ptsp++)*0.5; } } /** For each face, this routine outputs a bracketed sequence of numbers * containing a list of all the vertices that make up that face. * \param[out] v the vector to store the results in. */ void voronoicell_base::face_vertices(std::vector<int> &v) { int i,j,k,l,m,vp(0),vn; v.clear(); for (i=1;i<p;i++) for(j=0;j<nu[i];j++) { k=ed[i][j]; if (k>=0) { v.push_back(0); v.push_back(i); ed[i][j]=-1-k; l=cycle_up(ed[i][nu[i]+j],k); do { v.push_back(k); m=ed[k][l]; ed[k][l]=-1-m; l=cycle_up(ed[k][nu[k]+l],m); k=m; } while (k!=i); vn=v.size(); v[vp]=vn-vp-1; vp=vn; } } reset_edges(); } // Explicit instantiation template bool voronoicell_base::nplane(voronoicell_neighbor&,double,double,double,double,int); template void voronoicell_base::check_memory_for_copy(voronoicell_neighbor&,voronoicell_base*); }
agiliopadua/lammps
src/PTM/ptm_voronoi_cell.cpp
C++
gpl-2.0
66,283
<?php /* * This file is part of EC-CUBE * * Copyright(c) 2000-2012 LOCKON CO.,LTD. All Rights Reserved. * * http://www.lockon.co.jp/ * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ /** * HttpResponse を扱うクラス. * * @author Ryuichi Tokugami * @version $Id: SC_Response.php 21867 2012-05-30 07:37:01Z nakanishi $ */ class SC_Response{ /** * コンテンツタイプ * Enter description here ... * @var unknown_type */ var $contentType; var $body; var $statusCode; var $header = array(); /** * * Enter description here ... */ var $encoding; /** * レスポンス出力を書き込む. */ function write() { $this->sendHeader(); echo $this->body; } function sendHeader() { // HTTPのヘッダ foreach ($this->header as $name => $head) { header($name.': '.$head); } if (strlen($this->statusCode) >= 1) { $this->sendHttpStatus($this->statusCode); } } function setContentType($contentType) { $this->header['Content-Type'] = $contentType; } function setResposeBody($body) { $this->body = $body; } function addHeader($name, $value) { $this->header[$name] = $value; } function containsHeader($name) { return isset($this->header[$name]); } /** * アプリケーションのexit処理をする。以降の出力は基本的に停止する。 * 各クラス内部で勝手にexitするな! */ function actionExit() { // ローカルフックポイント処理 $objPlugin = SC_Helper_Plugin_Ex::getSingletonInstance($this->plugin_activate_flg); $arrBacktrace = debug_backtrace(); if (is_object($arrBacktrace[0]['object'])) { $parent_class_name = get_parent_class($arrBacktrace[0]['object']); $objPlugin->doAction($parent_class_name . '_action_' . $arrBacktrace[0]['object']->getMode(), array($arrBacktrace[0]['object'])); $class_name = get_class($arrBacktrace[0]['object']); if ($class_name != $parent_class_name) { $objPlugin->doAction($class_name . '_action_' . $arrBacktrace[0]['object']->getMode(), array($arrBacktrace[0]['object'])); } } exit; // exitしてますが、実際は、LC_Page::destroy() が呼ばれるはず } /** * アプリケーション内でリダイレクトする * * 内部で生成する URL の searchpart は、下記の順で上書きしていく。(後勝ち) * 1. 引数 $inheritQueryString が true の場合、$_SERVER['QUERY_STRING'] * 2. $location に含まれる searchpart * 3. 引数 $arrQueryString * @param string $location 「url-path」「現在のURLからのパス」「URL」のいずれか。「../」の解釈は行なわない。 * @param array $arrQueryString URL に付加する searchpart * @param bool $inheritQueryString 現在のリクエストの searchpart を継承するか * @param bool|null $useSsl true:HTTPSを強制, false:HTTPを強制, null:継承 * @return void * @static */ function sendRedirect($location, $arrQueryString = array(), $inheritQueryString = false, $useSsl = null) { // ローカルフックポイント処理 $objPlugin = SC_Helper_Plugin_Ex::getSingletonInstance($this->plugin_activate_flg); $arrBacktrace = debug_backtrace(); if (is_object($arrBacktrace[0]['object']) && method_exists($arrBacktrace[0]['object'], 'getMode')) { $parent_class_name = get_parent_class($arrBacktrace[0]['object']); $objPlugin->doAction($parent_class_name . '_action_' . $arrBacktrace[0]['object']->getMode(), array($arrBacktrace[0]['object'])); $class_name = get_class($arrBacktrace[0]['object']); if ($class_name != $parent_class_name) { $objPlugin->doAction($class_name . '_action_' . $arrBacktrace[0]['object']->getMode(), array($this)); } } elseif (is_object($arrBacktrace[0]['object'])) { $pattern = '/^[a-zA-Z0-9_]+$/'; $mode = null; if (isset($_GET['mode']) && preg_match($pattern, $_GET['mode'])) { $mode = $_GET['mode']; } elseif (isset($_POST['mode']) && preg_match($pattern, $_POST['mode'])) { $mode = $_POST['mode']; } $parent_class_name = get_parent_class($arrBacktrace[0]['object']); $objPlugin->doAction($parent_class_name . '_action_' . $mode, array($arrBacktrace[0]['object'])); $class_name = get_class($arrBacktrace[0]['object']); if ($class_name != $parent_class_name) { $objPlugin->doAction($class_name . '_action_' . $mode, array($this)); } } // url-path → URL 変換 if ($location[0] === '/') { $netUrl = new Net_URL($location); $location = $netUrl->getUrl(); } // URL の場合 if (preg_match('/^https?:/', $location)) { $url = $location; if (is_bool($useSsl)) { if ($useSsl) { $pattern = '/^' . preg_quote(HTTP_URL, '/') . '(.*)/'; $replacement = HTTPS_URL . '\1'; $url = preg_replace($pattern, $replacement, $url); } else { $pattern = '/^' . preg_quote(HTTPS_URL, '/') . '(.*)/'; $replacement = HTTP_URL . '\1'; $url = preg_replace($pattern, $replacement, $url); } } } // 現在のURLからのパス else { if (!is_bool($useSsl)) { $useSsl = SC_Utils_Ex::sfIsHTTPS(); } $netUrl = new Net_URL($useSsl ? HTTPS_URL : HTTP_URL); $netUrl->path = dirname($_SERVER['SCRIPT_NAME']) . '/' . $location; $url = $netUrl->getUrl(); } $pattern = '/^(' . preg_quote(HTTP_URL, '/') . '|' . preg_quote(HTTPS_URL, '/') . ')/'; // アプリケーション外へのリダイレクトは扱わない if (preg_match($pattern, $url) === 0) { trigger_error('', E_USER_ERROR); } $netUrl = new Net_URL($url); if ($inheritQueryString && !empty($_SERVER['QUERY_STRING'])) { $arrQueryStringBackup = $netUrl->querystring; // XXX メソッド名は add で始まるが、実際には置換を行う $netUrl->addRawQueryString($_SERVER['QUERY_STRING']); $netUrl->querystring = array_merge($netUrl->querystring, $arrQueryStringBackup); } $netUrl->querystring = array_merge($netUrl->querystring, $arrQueryString); $session = SC_SessionFactory_Ex::getInstance(); if ((SC_Display_Ex::detectDevice() == DEVICE_TYPE_MOBILE) || ($session->useCookie() == false) ) { $netUrl->addQueryString(session_name(), session_id()); } $netUrl->addQueryString(TRANSACTION_ID_NAME, SC_Helper_Session_Ex::getToken()); $url = $netUrl->getURL(); header("Location: $url"); exit; } /** * /html/ からのパスを指定してリダイレクトする * * FIXME メソッド名を分かりやすくしたい。現状だと、引数が「url-path より後」とも「url-path」とも読み取れる。(前者が意図したいところ) * @param string $location /html/ からのパス。先頭に / を含むかは任意。「../」の解釈は行なわない。 * @return void * @static */ function sendRedirectFromUrlPath($location, $arrQueryString = array(), $inheritQueryString = false, $useSsl = null) { $location = ROOT_URLPATH . ltrim($location, '/'); SC_Response_Ex::sendRedirect($location, $arrQueryString, $inheritQueryString, $useSsl); } /** * @static */ function reload($arrQueryString = array(), $removeQueryString = false) { // 現在の URL を取得 $netUrl = new Net_URL($_SERVER['REQUEST_URI']); if (!$removeQueryString) { $arrQueryString = array_merge($netUrl->querystring, $arrQueryString); } $netUrl->querystring = array(); SC_Response_Ex::sendRedirect($netUrl->getURL(), $arrQueryString); } function setHeader($headers) { $this->header = $headers; } function setStatusCode($statusCode = null) { $this->statusCode = $statusCode; } /** * HTTPステータスコードを送出する。 * * @param integer $statusCode HTTPステータスコード * @return void * @author Seasoft (新規作成) * @see Moony_Action::status() (オリジナル) * @link http://moony.googlecode.com/ (オリジナル) * @author YAMAOKA Hiroyuki (オリジナル) * @copyright 2005-2008 YAMAOKA Hiroyuki (オリジナル) * @license http://opensource.org/licenses/bsd-license.php New BSD License (オリジナル) * @link http://ja.wikipedia.org/wiki/HTTP%E3%82%B9%E3%83%86%E3%83%BC%E3%82%BF%E3%82%B9%E3%82%B3%E3%83%BC%E3%83%89 (邦訳) * @license http://www.gnu.org/licenses/fdl.html GFDL (邦訳) * @static */ function sendHttpStatus($statusCode) { $protocol = $_SERVER['SERVER_PROTOCOL']; $httpVersion = (strpos($protocol, '1.1') !== false) ? '1.1' : '1.0'; $messages = array( // Informational 1xx // 【情報】 100 => 'Continue', // 継続 101 => 'Switching Protocols', // プロトコル切替え // Success 2xx // 【成功】 200 => 'OK', // OK 201 => 'Created', // 作成 202 => 'Accepted', // 受理 203 => 'Non-Authoritative Information', // 信頼できない情報 204 => 'No Content', // 内容なし 205 => 'Reset Content', // 内容のリセット 206 => 'Partial Content', // 部分的内容 // Redirection 3xx // 【リダイレクション】 300 => 'Multiple Choices', // 複数の選択 301 => 'Moved Permanently', // 恒久的に移動した 302 => 'Found', // 1.1 // 発見した (リクエストしたリソースは一時的に移動されているときに返される) 303 => 'See Other', // 他を参照せよ 304 => 'Not Modified', // 未更新 305 => 'Use Proxy', // プロキシを使用せよ // 306 is no longer used but still reserved // 将来のために予約されている 307 => 'Temporary Redirect', // 一時的リダイレクト // Client Error 4xx // 【クライアントエラー】 400 => 'Bad Request', // リクエストが不正である 401 => 'Unauthorized', // 認証が必要である 402 => 'Payment Required', // 支払いが必要である 403 => 'Forbidden', // 禁止されている 404 => 'Not Found', // 未検出 405 => 'Method Not Allowed', // 許可されていないメソッド 406 => 'Not Acceptable', // 受理できない 407 => 'Proxy Authentication Required', // プロキシ認証が必要である 408 => 'Request Timeout', // リクエストタイムアウト 409 => 'Conflict', // 矛盾 410 => 'Gone', // 消滅した 411 => 'Length Required', // 長さが必要 412 => 'Precondition Failed', // 前提条件で失敗した 413 => 'Request Entity Too Large', // リクエストエンティティが大きすぎる 414 => 'Request-URI Too Long', // リクエストURIが大きすぎる 415 => 'Unsupported Media Type', // サポートしていないメディアタイプ 416 => 'Requested Range Not Satisfiable', // リクエストしたレンジは範囲外にある 417 => 'Expectation Failed', // 期待するヘッダに失敗 // Server Error 5xx // 【サーバーエラー】 500 => 'Internal Server Error', // サーバー内部エラー 501 => 'Not Implemented', // 実装されていない 502 => 'Bad Gateway', // 不正なゲートウェイ 503 => 'Service Unavailable', // サービス利用不可 504 => 'Gateway Timeout', // ゲートウェイタイムアウト 505 => 'HTTP Version Not Supported', // サポートしていないHTTPバージョン 509 => 'Bandwidth Limit Exceeded' // 帯域幅制限超過 ); if (isset($messages[$statusCode])) { if ($httpVersion !== '1.1') { // HTTP/1.0 $messages[302] = 'Moved Temporarily'; } header("HTTP/{$httpVersion} {$statusCode} {$messages[$statusCode]}"); header("Status: {$statusCode} {$messages[$statusCode]}", true, $statusCode); } } }
bangvndng/ec-cube
data/class/SC_Response.php
PHP
gpl-2.0
14,670
import unittest from django.core.urlresolvers import resolve, reverse, NoReverseMatch from pulp.server.webservices.urls import handler404 def assert_url_match(expected_url, url_name, *args, **kwargs): """ Generate a url given args and kwargs and pass it through Django's reverse and resolve functions. Example use to match a url /v2/tasks/<task_argument>/: assert_url_match('/v2/tasks/example_arg/', 'tasks', task_argument='example_arg') :param expected_url: the url that should be generated given a url_name and args :type expected_url: str :param url_name : name given to a url as defined in the urls.py :type url_name : str :param args : optional positional arguments to place into a url's parameters as specified by urls.py :type args : tuple :param kwargs : optional named arguments to place into a url's parameters as specified by urls.py :type kwargs : dict """ try: # Invalid arguments will cause a NoReverseMatch. url = reverse(url_name, args=args, kwargs=kwargs) except NoReverseMatch: raise AssertionError( "Name: '{0}' could match a url with args '{1}'" "and kwargs '{2}'".format(url_name, args, kwargs) ) else: # If the url exists but is not the expected url. if url != expected_url: raise AssertionError( 'url {0} not equal to expected url {1}'.format(url, expected_url)) # Run this url back through resolve and ensure that it matches the url_name. matched_view = resolve(url) if matched_view.url_name != url_name: raise AssertionError('Url name {0} not equal to expected url name {1}'.format( matched_view.url_name, url_name) ) class TestNotFoundHandler(unittest.TestCase): def test_not_found_handler(self): """ Test that the handler404 module attribute is set as expected. """ self.assertEqual(handler404, 'pulp.server.webservices.views.util.page_not_found') class TestDjangoContentUrls(unittest.TestCase): """ Test the matching of the content urls """ def test_match_content_catalog_resource(self): """ Test url matching for content_catalog_resource. """ url = '/v2/content/catalog/mock-source/' url_name = 'content_catalog_resource' assert_url_match(url, url_name, source_id='mock-source') def test_match_content_orphan_collection(self): """ Test url matching for content_orphan_collection. """ url = '/v2/content/orphans/' url_name = 'content_orphan_collection' assert_url_match(url, url_name) def test_match_content_units_collection(self): """ Test the url matching for content_units_collection. """ url = '/v2/content/units/mock-type/' url_name = 'content_units_collection' assert_url_match(url, url_name, type_id='mock-type') def test_match_content_unit_search(self): """ Test the url matching for content_unit_search. """ url = '/v2/content/units/mock-type/search/' url_name = 'content_unit_search' assert_url_match(url, url_name, type_id='mock-type') def test_match_content_unit_resource(self): """ Test url matching for content_unit_resource. """ url = '/v2/content/units/mock-type/mock-unit/' url_name = 'content_unit_resource' assert_url_match(url, url_name, type_id='mock-type', unit_id='mock-unit') def test_match_content_unit_user_metadata_resource(self): """ Test url matching for content_unit_user_metadata_resource. """ url = '/v2/content/units/mock-type/mock-unit/pulp_user_metadata/' url_name = 'content_unit_user_metadata_resource' assert_url_match(url, url_name, type_id='mock-type', unit_id='mock-unit') def test_match_content_upload_resource(self): """ Test url matching for content_upload_resource. """ url = '/v2/content/uploads/mock-upload/' url_name = 'content_upload_resource' assert_url_match(url, url_name, upload_id='mock-upload') def test_match_content_upload_segment_resource(self): """ Test Url matching for content_upload_segment_resource. """ url = '/v2/content/uploads/mock-upload-id/8/' url_name = 'content_upload_segment_resource' assert_url_match(url, url_name, upload_id='mock-upload-id', offset='8') def test_match_content_actions_delete_orphans(self): """ Test url matching for content_actions_delete_orphans. """ url = '/v2/content/actions/delete_orphans/' url_name = 'content_actions_delete_orphans' assert_url_match(url, url_name) def test_match_content_orphan_resource(self): """ Test url matching for content_orphan_resource. """ url = '/v2/content/orphans/mock-type/mock-unit/' url_name = 'content_orphan_resource' assert_url_match(url, url_name, content_type='mock-type', unit_id='mock-unit') def test_match_content_orphan_type_subcollection(self): """ Test url matching for content_orphan_type_subcollection. """ url = '/v2/content/orphans/mock_type/' url_name = 'content_orphan_type_subcollection' assert_url_match(url, url_name, content_type='mock_type') def test_match_content_uploads(self): """ Test url matching for content_uploads. """ url = '/v2/content/uploads/' url_name = 'content_uploads' assert_url_match(url, url_name) class TestDjangoPluginsUrls(unittest.TestCase): """ Test url matching for plugins urls. """ def test_match_distributor_resource_view(self): """ Test the url matching for the distributor resource view. """ url = '/v2/plugins/distributors/mock_distributor/' url_name = 'plugin_distributor_resource' assert_url_match(url, url_name, distributor_id='mock_distributor') def test_match_distributors_view(self): """ Test the url matching for the Distributors view. """ url = '/v2/plugins/distributors/' url_name = 'plugin_distributors' assert_url_match(url, url_name) def test_match_importer_resource_view(self): """ Test the url matching for plugin_importer_resource """ url = '/v2/plugins/importers/mock_importer_id/' url_name = 'plugin_importer_resource' assert_url_match(url, url_name, importer_id='mock_importer_id') def test_match_importers_view(self): """ Test the url matching for the Importers view """ url = '/v2/plugins/importers/' url_name = 'plugin_importers' assert_url_match(url, url_name) def test_match_type_resource_view(self): """ Test the url matching for the TypeResourceView. """ url = '/v2/plugins/types/type_id/' url_name = 'plugin_type_resource' assert_url_match(url, url_name, type_id='type_id') def test_match_types_view(self): """ Test url matching for plugin_types. """ url = '/v2/plugins/types/' url_name = 'plugin_types' assert_url_match(url, url_name) class TestDjangoLoginUrls(unittest.TestCase): """ Tests for root_actions urls. """ def test_match_login_view(self): """ Test url match for login. """ url = '/v2/actions/login/' url_name = 'login' assert_url_match(url, url_name) class TestDjangoConsumerGroupsUrls(unittest.TestCase): """ Tests for consumer_groups urls """ def test_match_consumer_group_view(self): """ Test url matching for consumer_groups """ url = '/v2/consumer_groups/' url_name = 'consumer_group' assert_url_match(url, url_name) def test_match_consumer_group_search_view(self): """ Test url matching for consumer_group_search """ url = '/v2/consumer_groups/search/' url_name = 'consumer_group_search' assert_url_match(url, url_name) def test_match_consumer_group_resource_view(self): """ Test url matching for single consumer_group """ url = '/v2/consumer_groups/test-group/' url_name = 'consumer_group_resource' assert_url_match(url, url_name, consumer_group_id='test-group') def test_match_consumer_group_associate_action_view(self): """ Test url matching for consumer_groups association """ url = '/v2/consumer_groups/test-group/actions/associate/' url_name = 'consumer_group_associate' assert_url_match(url, url_name, consumer_group_id='test-group') def test_match_consumer_group_unassociate_action_view(self): """ Test url matching for consumer_groups unassociation """ url = '/v2/consumer_groups/test-group/actions/unassociate/' url_name = 'consumer_group_unassociate' assert_url_match(url, url_name, consumer_group_id='test-group') def test_match_consumer_group_content_action_install_view(self): """ Test url matching for consumer_groups content installation """ url = '/v2/consumer_groups/test-group/actions/content/install/' url_name = 'consumer_group_content' assert_url_match(url, url_name, consumer_group_id='test-group', action='install') def test_match_consumer_group_content_action_update_view(self): """ Test url matching for consumer_groups content update """ url = '/v2/consumer_groups/test-group/actions/content/update/' url_name = 'consumer_group_content' assert_url_match(url, url_name, consumer_group_id='test-group', action='update') def test_match_consumer_group_content_action_uninstall_view(self): """ Test url matching for consumer_groups content uninstall """ url = '/v2/consumer_groups/test-group/actions/content/uninstall/' url_name = 'consumer_group_content' assert_url_match(url, url_name, consumer_group_id='test-group', action='uninstall') def test_match_consumer_group_bindings_view(self): """ Test url matching for consumer_groups bindings """ url = '/v2/consumer_groups/test-group/bindings/' url_name = 'consumer_group_bind' assert_url_match(url, url_name, consumer_group_id='test-group') def test_match_consumer_group_binding_view(self): """ Test url matching for consumer_groups binding removal """ url = '/v2/consumer_groups/test-group/bindings/repo1/dist1/' url_name = 'consumer_group_unbind' assert_url_match(url, url_name, consumer_group_id='test-group', repo_id='repo1', distributor_id='dist1') class TestDjangoRepositoriesUrls(unittest.TestCase): """ Test url matching for repositories urls. """ def test_match_repos(self): """ Test url matching for repos. """ url = '/v2/repositories/' url_name = 'repos' assert_url_match(url, url_name) def test_match_repo_search(self): """ Test url matching for repo_search. """ url = '/v2/repositories/search/' url_name = 'repo_search' assert_url_match(url, url_name) def test_match_repo_content_app_regen(self): """ Test url matching for repo_content_app_regen. """ url_name = 'repo_content_app_regen' url = '/v2/repositories/actions/content/regenerate_applicability/' assert_url_match(url, url_name) def test_match_repo_resource(self): """ Test url matching for repo_resource. """ url_name = 'repo_resource' url = '/v2/repositories/mock_repo/' assert_url_match(url, url_name, repo_id='mock_repo') def test_match_repo_unit_search(self): """ Test url matching for repo_unit_search. """ url_name = 'repo_unit_search' url = '/v2/repositories/mock_repo/search/units/' assert_url_match(url, url_name, repo_id='mock_repo') def test_match_repo_importers(self): """ Test url matching for repo_importers. """ url_name = 'repo_importers' url = '/v2/repositories/mock_repo/importers/' assert_url_match(url, url_name, repo_id='mock_repo') def test_match_repo_importer_resource(self): """ Test url matching for repo_importer_resource. """ url = '/v2/repositories/mock_repo/importers/mock_importer/' url_name = 'repo_importer_resource' assert_url_match(url, url_name, repo_id='mock_repo', importer_id='mock_importer') def test_match_repo_sync_schedule_collection(self): """ Test url matching for repo_sync_schedules. """ url = '/v2/repositories/mock_repo/importers/mock_importer/schedules/sync/' url_name = 'repo_sync_schedules' assert_url_match(url, url_name, repo_id='mock_repo', importer_id='mock_importer') def test_match_repo_sync_schedule_resource(self): """ Test url matching for repo_sync_schedule_resource. """ url = '/v2/repositories/mock_repo/importers/mock_importer/schedules/sync/mock_schedule/' url_name = 'repo_sync_schedule_resource' assert_url_match(url, url_name, repo_id='mock_repo', importer_id='mock_importer', schedule_id='mock_schedule') def test_match_repo_distributors(self): """ Test url matching for repo_distributors. """ url = '/v2/repositories/mock_repo/distributors/' url_name = 'repo_distributors' assert_url_match(url, url_name, repo_id='mock_repo') def test_match_repo_distributor_resource(self): """ Test url matching for repo_distributor_resource. """ url = '/v2/repositories/mock_repo/distributors/mock_distributor/' url_name = 'repo_distributor_resource' assert_url_match(url, url_name, repo_id='mock_repo', distributor_id='mock_distributor') def test_match_repo_publish_schedules(self): """ Test url matching for repo_publish_schedules. """ url = '/v2/repositories/mock_repo/distributors/mock_distributor/schedules/publish/' url_name = 'repo_publish_schedules' assert_url_match(url, url_name, repo_id='mock_repo', distributor_id='mock_distributor') def test_match_repo_publish_schedule_resource(self): """ Test url matching for repo_publish_schedule_resource. """ url = '/v2/repositories/mock_repo/distributors/'\ 'mock_distributor/schedules/publish/mock_schedule/' url_name = 'repo_publish_schedule_resource' assert_url_match(url, url_name, repo_id='mock_repo', distributor_id='mock_distributor', schedule_id='mock_schedule') def test_match_repo_sync_history(self): """ Test url matching for repo_sync_history. """ url = '/v2/repositories/mock_repo/history/sync/' url_name = 'repo_sync_history' assert_url_match(url, url_name, repo_id='mock_repo') def test_match_repo_sync(self): """ Test url matching for repo_sync. """ url = '/v2/repositories/mock_repo/actions/sync/' url_name = 'repo_sync' assert_url_match(url, url_name, repo_id='mock_repo') def test_match_repo_download(self): """ Test url matching for repo_download. """ url = '/v2/repositories/mock_repo/actions/download/' url_name = 'repo_download' assert_url_match(url, url_name, repo_id='mock_repo') def test_match_repo_publish_history(self): """ Test url matching for repo_publish_history. """ url = '/v2/repositories/mock_repo/history/publish/mock_dist/' url_name = 'repo_publish_history' assert_url_match(url, url_name, repo_id='mock_repo', distributor_id='mock_dist') def test_match_repo_publish(self): """ Test url matching for repo_publish. """ url = '/v2/repositories/mock_repo/actions/publish/' url_name = 'repo_publish' assert_url_match(url, url_name, repo_id='mock_repo') def test_match_repo_associate(self): """ Test url matching for repo_associate. """ url = '/v2/repositories/mock_repo/actions/associate/' url_name = 'repo_associate' assert_url_match(url, url_name, dest_repo_id='mock_repo') def test_match_repo_unassociate(self): """ Test url matching for repo_unassociate. """ url = '/v2/repositories/mock_repo/actions/unassociate/' url_name = 'repo_unassociate' assert_url_match(url, url_name, repo_id='mock_repo') def test_match_repo_import_upload(self): """ Test url matching for repo_import_upload. """ url = '/v2/repositories/mock_repo/actions/import_upload/' url_name = 'repo_import_upload' assert_url_match(url, url_name, repo_id='mock_repo') class TestDjangoRepoGroupsUrls(unittest.TestCase): """ Test url matching for repo_groups urls """ def test_match_repo_groups(self): """Test url matching for repo_groups.""" url = '/v2/repo_groups/' url_name = 'repo_groups' assert_url_match(url, url_name) def test_match_repo_group_search(self): """Test url matching for repo_group_search.""" url = '/v2/repo_groups/search/' url_name = 'repo_group_search' assert_url_match(url, url_name) def test_match_repo_group_resource(self): url = '/v2/repo_groups/test-group-id/' url_name = 'repo_group_resource' assert_url_match(url, url_name, repo_group_id='test-group-id') def test_match_repo_group_associate(self): url = '/v2/repo_groups/test-group-id/actions/associate/' url_name = 'repo_group_associate' assert_url_match(url, url_name, repo_group_id='test-group-id') def test_match_repo_group_unassociate(self): url = '/v2/repo_groups/test-group-id/actions/unassociate/' url_name = 'repo_group_unassociate' assert_url_match(url, url_name, repo_group_id='test-group-id') def test_match_repo_group_distributors(self): url = '/v2/repo_groups/test-group-id/distributors/' url_name = 'repo_group_distributors' assert_url_match(url, url_name, repo_group_id='test-group-id') def test_match_repo_group_distributor_resource(self): url = '/v2/repo_groups/test-group-id/distributors/test-distributor/' url_name = 'repo_group_distributor_resource' assert_url_match(url, url_name, repo_group_id='test-group-id', distributor_id='test-distributor') def test_repo_group_publish(self): url = '/v2/repo_groups/test-group-id/actions/publish/' url_name = 'repo_group_publish' assert_url_match(url, url_name, repo_group_id='test-group-id') class TestDjangoTasksUrls(unittest.TestCase): """ Test the matching for tasks urls. """ def test_match_task_collection(self): """ Test the matching for task_collection. """ url = '/v2/tasks/' url_name = 'task_collection' assert_url_match(url, url_name) def test_match_task_resource(self): """ Test the matching for task_resource. """ url = '/v2/tasks/test-task/' url_name = 'task_resource' assert_url_match(url, url_name, task_id='test-task') def test_match_task_search(self): """ Test the matching for task_resource. """ url = '/v2/tasks/search/' url_name = 'task_search' assert_url_match(url, url_name) class TestDjangoRolesUrls(unittest.TestCase): """ Tests for roles urls. """ def test_match_roles_view(self): """ Test url match for roles. """ url = '/v2/roles/' url_name = 'roles' assert_url_match(url, url_name) def test_match_role_resource_view(self): """ Test url matching for single role. """ url = '/v2/roles/test-role/' url_name = 'role_resource' assert_url_match(url, url_name, role_id='test-role') def test_match_role_users_view(self): """ Test url matching for role's users. """ url = '/v2/roles/test-role/users/' url_name = 'role_users' assert_url_match(url, url_name, role_id='test-role') def test_match_role_user_view(self): """ Test url matching for role's user. """ url = '/v2/roles/test-role/users/test-login/' url_name = 'role_user' assert_url_match(url, url_name, role_id='test-role', login='test-login') class TestDjangoPermissionsUrls(unittest.TestCase): """ Tests for permissions urls """ def test_match_permissions_view(self): """ Test url matching for permissions """ url = '/v2/permissions/' url_name = 'permissions' assert_url_match(url, url_name) def test_match_permission_grant_to_role_view(self): """ Test url matching for grant permissions to a role """ url = '/v2/permissions/actions/grant_to_role/' url_name = 'grant_to_role' assert_url_match(url, url_name) def test_match_permission_grant_to_user_view(self): """ Test url matching for grant permissions to a user """ url = '/v2/permissions/actions/grant_to_user/' url_name = 'grant_to_user' assert_url_match(url, url_name) def test_match_permission_revoke_from_role_view(self): """ Test url matching for revoke permissions from a role """ url = '/v2/permissions/actions/revoke_from_role/' url_name = 'revoke_from_role' assert_url_match(url, url_name) def test_match_permission_revoke_from_userview(self): """ Test url matching for revoke permissions from a user """ url = '/v2/permissions/actions/revoke_from_user/' url_name = 'revoke_from_user' assert_url_match(url, url_name) class TestDjangoEventListenersUrls(unittest.TestCase): """ Tests for events urls """ def test_match_event_listeners_view(self): """ Test url matching for event_listeners """ url = '/v2/events/' url_name = 'events' assert_url_match(url, url_name) def test_match_event_listeners_resource_view(self): """ Test url matching for single event_listener """ url = '/v2/events/12345/' url_name = 'event_resource' assert_url_match(url, url_name, event_listener_id='12345') class TestDjangoUsersUrls(unittest.TestCase): """ Tests for userss urls """ def test_match_users_view(self): """ Test url matching for users """ url = '/v2/users/' url_name = 'users' assert_url_match(url, url_name) def test_match_user_search_view(self): """ Test url matching for user search. """ url = '/v2/users/search/' url_name = 'user_search' assert_url_match(url, url_name) def test_match_user_resource(self): """ Test the matching for user resource. """ url = '/v2/users/user_login/' url_name = 'user_resource' assert_url_match(url, url_name, login='user_login') class TestStatusUrl(unittest.TestCase): """ Tests for server status url """ def test_match_status_view(self): """ Test url matching for status """ url = '/v2/status/' url_name = 'status' assert_url_match(url, url_name) class TestDjangoConsumersUrls(unittest.TestCase): """ Tests for consumers urls """ def test_match_consumers_view(self): """ Test url matching for consumer """ url = '/v2/consumers/' url_name = 'consumers' assert_url_match(url, url_name) def test_match_consumer_search(self): """ Test url matching for consumer_search. """ url = '/v2/consumers/search/' url_name = 'consumer_search' assert_url_match(url, url_name) def test_match_consumer_resource_view(self): """ Test url matching for consumer resource. """ url = '/v2/consumers/test-consumer/' url_name = 'consumer_resource' assert_url_match(url, url_name, consumer_id='test-consumer') def test_match_consumer_search_view(self): """ Test url matching for consumer search. """ url = '/v2/consumers/search/' url_name = 'consumer_search' assert_url_match(url, url_name) def test_match_consumer_binding_search_view(self): """ Test url matching for consumer binding search. """ url = '/v2/consumers/binding/search/' url_name = 'consumer_binding_search' assert_url_match(url, url_name) def test_match_consumer_profile_search_view(self): """ Test url matching for consumer profile search. """ url = '/v2/consumers/profile/search/' url_name = 'consumer_profile_search' assert_url_match(url, url_name) def test_match_consumer_profiles_view(self): """ Test url matching for consumer profiles """ url = '/v2/consumers/test-consumer/profiles/' url_name = 'consumer_profiles' assert_url_match(url, url_name, consumer_id='test-consumer') def test_match_consumer_profile_resource_view(self): """ Test url matching for consumer profile resource """ url = '/v2/consumers/test-consumer/profiles/some-profile/' url_name = 'consumer_profile_resource' assert_url_match(url, url_name, consumer_id='test-consumer', content_type='some-profile') def test_match_consumer_bindings_view(self): """ Test url matching for consumer bindings """ url = '/v2/consumers/test-consumer/bindings/' url_name = 'bindings' assert_url_match(url, url_name, consumer_id='test-consumer') def test_match_consumer_binding_resource_view(self): """ Test url matching for consumer binding resource """ url = '/v2/consumers/test-consumer/bindings/some-repo/some-dist/' url_name = 'consumer_binding_resource' assert_url_match(url, url_name, consumer_id='test-consumer', repo_id='some-repo', distributor_id='some-dist') def test_match_consumer_binding_repo_view(self): """ Test url matching for consumer and repo binding """ url = '/v2/consumers/test-consumer/bindings/some-repo/' url_name = 'bindings_repo' assert_url_match(url, url_name, consumer_id='test-consumer', repo_id='some-repo') def test_match_consumer_appicability_regen_view(self): """ Test url matching for consumer applicability renegeration """ url = '/v2/consumers/test-consumer/actions/content/regenerate_applicability/' url_name = 'consumer_appl_regen' assert_url_match(url, url_name, consumer_id='test-consumer') def test_match_consumer_content_action_install_view(self): """ Test url matching for consumer content installation """ url = '/v2/consumers/test-consumer/actions/content/install/' url_name = 'consumer_content' assert_url_match(url, url_name, consumer_id='test-consumer', action='install') def test_match_consumer_content_action_update_view(self): """ Test url matching for consumer content update """ url = '/v2/consumers/test-consumer/actions/content/update/' url_name = 'consumer_content' assert_url_match(url, url_name, consumer_id='test-consumer', action='update') def test_match_consumer_content_action_uninstall_view(self): """ Test url matching for consumer content uninstall """ url = '/v2/consumers/test-consumer/actions/content/uninstall/' url_name = 'consumer_content' assert_url_match(url, url_name, consumer_id='test-consumer', action='uninstall') def test_match_consumers_appicability_regen_view(self): """ Test url matching for consumers applicability renegeration """ url = '/v2/consumers/actions/content/regenerate_applicability/' url_name = 'appl_regen' assert_url_match(url, url_name) def test_match_consumer_query_appicability_view(self): """ Test url matching for consumer query applicability """ url = '/v2/consumers/content/applicability/' url_name = 'consumer_query_appl' assert_url_match(url, url_name) def test_match_consumer_schedule_content_action_install_view(self): """ Test url matching for consumer schedule content installation """ url = '/v2/consumers/test-consumer/schedules/content/install/' url_name = 'schedule_content_install' assert_url_match(url, url_name, consumer_id='test-consumer') def test_match_consumer_schedule_content_action_update_view(self): """ Test url matching for consumer schedule content update """ url = '/v2/consumers/test-consumer/schedules/content/update/' url_name = 'schedule_content_update' assert_url_match(url, url_name, consumer_id='test-consumer') def test_match_consumer_schedule_content_action_uninstall_view(self): """ Test url matching for consumer schedule content uninstall """ url = '/v2/consumers/test-consumer/schedules/content/uninstall/' url_name = 'schedule_content_uninstall' assert_url_match(url, url_name, consumer_id='test-consumer') def test_match_consumer_schedule_content_action_install_resource_view(self): """ Test url matching for consumer schedule content resource installation """ url = '/v2/consumers/test-consumer/schedules/content/install/12345/' url_name = 'schedule_content_install_resource' assert_url_match(url, url_name, consumer_id='test-consumer', schedule_id='12345') def test_match_consumer_schedule_content_action_update_resource_view(self): """ Test url matching for consumer schedule content resource update """ url = '/v2/consumers/test-consumer/schedules/content/update/12345/' url_name = 'schedule_content_update_resource' assert_url_match(url, url_name, consumer_id='test-consumer', schedule_id='12345') def test_match_consumer_schedule_content_action_uninstall_resource_view(self): """ Test url matching for consumer schedule content resource uninstall """ url = '/v2/consumers/test-consumer/schedules/content/uninstall/12345/' url_name = 'schedule_content_uninstall_resource' assert_url_match(url, url_name, consumer_id='test-consumer', schedule_id='12345') def test_match_consumer_history_view(self): """ Test url matching for consumer history """ url = '/v2/consumers/test-consumer/history/' url_name = 'consumer_history' assert_url_match(url, url_name, consumer_id='test-consumer') class TestDjangoContentSourcesUrls(unittest.TestCase): """ Tests for content sources. """ def test_match_content_sources_view(self): """ Test url matching for content sources. """ url = '/v2/content/sources/' url_name = 'content_sources' assert_url_match(url, url_name) def test_match_content_sources_resource(self): """ Test the matching for content sources resource. """ url = '/v2/content/sources/some-source/' url_name = 'content_sources_resource' assert_url_match(url, url_name, source_id='some-source') def test_match_content_sources_refresh_view(self): """ Test url matching for content sources refresh. """ url = '/v2/content/sources/action/refresh/' url_name = 'content_sources_action' assert_url_match(url, url_name, action='refresh') def test_match_content_sources_resource_refresh(self): """ Test the matching for content sources resource refresh. """ url = '/v2/content/sources/some-source/action/refresh/' url_name = 'content_sources_resource_action' assert_url_match(url, url_name, source_id='some-source', action='refresh')
ulif/pulp
server/test/unit/server/webservices/test_urls.py
Python
gpl-2.0
33,452
<?php /** * @version $Id$ * @package WR MegaMenu * @author WooRockets Team <support@woorockets.com> * @copyright Copyright (C) 2014 WooRockets.com All Rights Reserved. * @license GNU/GPL v2 or later http://www.gnu.org/licenses/gpl-2.0.html * * Websites: http://www.woorockets.com * Technical Support: Feedback - http://www.woorockets.com */ /** * Gadget class for loading editor for WR MegaMenu element. * * @package WR_Megamenu * @since 2.0.2 */ class WR_Megamenu_Gadget_Edit_Element extends WR_Megamenu_Gadget_Base { /** * Gadget file name without extension. * * @var string */ protected $gadget = 'edit-element'; /** * Load form for editing WR MegaMenu element. * * @return void */ public function form_action() { global $mega_menu; // Use output buffering to capture HTML code for element editor ob_start(); if ( isset( $_GET['wr_shortcode_preview'] ) && 1 == $_GET['wr_shortcode_preview'] ) { $mega_menu->shortcode_iframe_preview(); } else { $mega_menu->get_modal_content(); } // Set response for injecting into template file $this->set_response( 'success', ob_get_clean() ); // Register action to remove unnecessary assets // Register action to remove unnecessary assets global $Wr_Megamenu_Preview_Class; if ( $Wr_Megamenu_Preview_Class != 'WR_Megamenu_Widget' ) { add_action( 'mm_admin_print_styles', array( &$this, 'filter_assets' ), 0 ); add_action( 'mm_admin_print_scripts', array( &$this, 'filter_assets' ), 0 ); } } /** * Filter required assets. * * @return void */ public function filter_assets() { static $executed; global $wp_styles, $wp_scripts; $form_only = ( isset( $_GET['form_only'] ) && absint( $_GET['form_only'] ) ); if ( ! isset( $executed ) ) { // Check if requesting form only // Remove unnecessary assets foreach ( array( &$wp_styles, &$wp_scripts ) as $assets ) { if ( @count( $assets->queue ) ) { foreach ( $assets->queue as $k => $v ) { // Keep only required assets if ( $form_only ) { unset( $assets->queue[ $k ] ); } elseif ( 'wr-' != substr( $v, 0, 3 ) ) { unset( $assets->queue[ $k ] ); } } } } // Get response data $response = $this->get_response(); // Allow required assets to be filterable $on_demand_assets = array(); if ( ! $form_only ) { $on_demand_assets['jsn-tabs'] = 'jquery-ui-tabs'; $on_demand_assets['ui-sortable'] = 'jquery-ui-sortable'; } $on_demand_assets = apply_filters( 'wr-mm-edit-element-required-assets', $on_demand_assets ); // Detect and load required assets on demand foreach ( $on_demand_assets as $sign => $handle ) { if ( is_numeric( $sign ) ) { $this->load_asset( $handle ); } elseif ( preg_match( '/\s(id|class)\s*=\s*[\'"][^\'"]*' . $sign . '[^\'"]*[\'"]/', $response['data'] ) ) { $this->load_asset( $handle ); } } // State that this method is already executed $executed = true; } else { // Never load jQuery core when serving form only if ( $form_only ) { foreach ( $wp_scripts->queue as $k => $v ) { if ( 'jquery' == substr( $v, 0, 6 ) ) { unset( $wp_scripts->queue[ $k ] ); } } } } } /** * Method to load specified asset. * * @param string $handle Asset handle. * * @return void */ protected function load_asset( $handle ) { if ( is_array( $handle ) ) { foreach ( $handle as $h ) { $this->load_asset( $h ); } return; } // Prepare asset handle if ( preg_match( '/\.(css|js)$/', $handle ) ) { $handle = WR_Megamenu_Init_Assets::file_to_handle( $handle ); } // Load asset WR_Megamenu_Init_Assets::load( $handle ); } }
zhed/wpatomneto
wp-content/plugins/wr-megamenu/includes/gadget/edit-element.php
PHP
gpl-2.0
3,740
/* * * @APPLE_LICENSE_HEADER_START@ * * Copyright (c) 1999-2008 Apple Inc. All Rights Reserved. * * This file contains Original Code and/or Modifications of Original Code * as defined in and that are subject to the Apple Public Source License * Version 2.0 (the 'License'). You may not use this file except in * compliance with the License. Please obtain a copy of the License at * http://www.opensource.apple.com/apsl/ and read it before using this * file. * * The Original Code and all software distributed under the License are * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT. * Please see the License for the specific language governing rights and * limitations under the License. * * @APPLE_LICENSE_HEADER_END@ * */ /* File: QTSSPrefs.cpp Contains: Implements class defined in QTSSPrefs.h. Change History (most recent first): */ #include "QTSSPrefs.h" #include "MyAssert.h" #include "OSMemory.h" #include "QTSSDataConverter.h" #include "OSArrayObjectDeleter.h" QTSSPrefs::QTSSPrefs(XMLPrefsParser* inPrefsSource, StrPtrLen* inModuleName, QTSSDictionaryMap* inMap, Bool16 areInstanceAttrsAllowed, QTSSPrefs* parentDictionary ) : QTSSDictionary(inMap, &fPrefsMutex), fPrefsSource(inPrefsSource), fPrefName(NULL), fParentDictionary(parentDictionary) { if (inModuleName != NULL) fPrefName = inModuleName->GetAsCString(); } QTSSDictionary* QTSSPrefs::CreateNewDictionary(QTSSDictionaryMap* inMap, OSMutex* /* inMutex */) { return NEW QTSSPrefs(fPrefsSource, NULL, inMap, true, this ); } void QTSSPrefs::RereadPreferences() { RereadObjectPreferences(GetContainerRef()); } void QTSSPrefs::RereadObjectPreferences(ContainerRef container) { QTSS_Error theErr = QTSS_NoErr; // // Keep track of which pref attributes should remain. All others // will be removed. // This routine uses names because it adds and deletes attributes. This means attribute indexes,positions and counts are constantly changing. UInt32 initialNumAttrs = 0; if (this->GetInstanceDictMap() != NULL) { initialNumAttrs = this->GetInstanceDictMap()->GetNumAttrs(); }; char** modulePrefInServer; if (initialNumAttrs > 0) { modulePrefInServer = NEW char*[initialNumAttrs ]; ::memset(modulePrefInServer, 0, sizeof(char*) * initialNumAttrs); } else { modulePrefInServer = NULL; } OSMutexLocker locker(&fPrefsMutex); UInt32 theNumPrefs = fPrefsSource->GetNumPrefsByContainer(container); for (UInt32 i = 0; i < initialNumAttrs;i++) // pull out all the names in the server { QTSSAttrInfoDict* theAttrInfoPtr = NULL; theErr = this->GetInstanceDictMap()->GetAttrInfoByIndex(i, &theAttrInfoPtr); if (theErr != QTSS_NoErr) continue; UInt32 nameLen = 0; theErr = theAttrInfoPtr->GetValuePtr(qtssAttrName,0, (void **) &modulePrefInServer[i], &nameLen); Assert(theErr == QTSS_NoErr); //qtss_printf("QTSSPrefs::RereadPreferences modulePrefInServer in server=%s\n",modulePrefInServer[i]); } // Use the names of the attributes in the attribute map as the key values for // finding preferences in the config file. for (UInt32 x = 0; x < theNumPrefs; x++) { char* thePrefTypeStr = NULL; char* thePrefName = NULL; (void)fPrefsSource->GetPrefValueByIndex(container, x, 0, &thePrefName, &thePrefTypeStr); // What type is this data type? QTSS_AttrDataType thePrefType = QTSSDataConverter::TypeStringToType(thePrefTypeStr); // // Check to see if there is an attribute with this name already in the // instance map. If one matches, then we don't need to add this attribute. QTSSAttrInfoDict* theAttrInfo = NULL; if (this->GetInstanceDictMap() != NULL) (void)this->GetInstanceDictMap()->GetAttrInfoByName(thePrefName, &theAttrInfo, false ); // false=don't return info on deleted attributes UInt32 theLen = sizeof(QTSS_AttrDataType); QTSS_AttributeID theAttrID = qtssIllegalAttrID; for (UInt32 i = 0; i < initialNumAttrs;i++) // see if this name is in the server { if (modulePrefInServer[i] != NULL && thePrefName != NULL && 0 == ::strcmp(modulePrefInServer[i],thePrefName)) { modulePrefInServer[i] = NULL; // in the server so don't delete later //qtss_printf("QTSSPrefs::RereadPreferences modulePrefInServer in file and in server=%s\n",thePrefName); } } if ( theAttrInfo == NULL ) { theAttrID = this->AddPrefAttribute(thePrefName, thePrefType); // not present or deleted this->SetPrefValuesFromFile(container, x, theAttrID, 0); // will add another or replace a deleted attribute } else { QTSS_AttrDataType theAttrType = qtssAttrDataTypeUnknown; theErr = theAttrInfo->GetValue(qtssAttrDataType, 0, &theAttrType, &theLen); Assert(theErr == QTSS_NoErr); theLen = sizeof(theAttrID); theErr = theAttrInfo->GetValue(qtssAttrID, 0, &theAttrID, &theLen); Assert(theErr == QTSS_NoErr); if (theAttrType != thePrefType) { // // This is not the same pref as before, because the data types // are different. Remove the old one from the map, add the new one. (void)this->RemoveInstanceAttribute(theAttrID); theAttrID = this->AddPrefAttribute(thePrefName, thePrefType); } else { // // This pref already exists } // // Set the values this->SetPrefValuesFromFile(container, x, theAttrID, 0); // Mark this pref as found. SInt32 theIndex = this->GetInstanceDictMap()->ConvertAttrIDToArrayIndex(theAttrID); Assert(theIndex >= 0); } } // Remove all attributes that no longer apply if (this->GetInstanceDictMap() != NULL && initialNumAttrs > 0) { for (UInt32 a = 0; a < initialNumAttrs; a++) { if (NULL != modulePrefInServer[a]) // found a pref in the server that wasn't in the file { QTSSAttrInfoDict* theAttrInfoPtr = NULL; theErr = this->GetInstanceDictMap()->GetAttrInfoByName(modulePrefInServer[a], &theAttrInfoPtr); Assert(theErr == QTSS_NoErr); if (theErr != QTSS_NoErr) continue; QTSS_AttributeID theAttrID = qtssIllegalAttrID; UInt32 theLen = sizeof(theAttrID); theErr = theAttrInfoPtr->GetValue(qtssAttrID, 0, &theAttrID, &theLen); Assert(theErr == QTSS_NoErr); if (theErr != QTSS_NoErr) continue; if (0) { char* theName = NULL; UInt32 nameLen = 0; theAttrInfoPtr->GetValuePtr(qtssAttrName,0, (void **) &theName, &nameLen); qtss_printf("QTSSPrefs::RereadPreferences about to delete modulePrefInServer=%s attr=%s id=%"_U32BITARG_"\n",modulePrefInServer[a], theName,theAttrID); } this->GetInstanceDictMap()->RemoveAttribute(theAttrID); modulePrefInServer[a] = NULL; } } } delete modulePrefInServer; } void QTSSPrefs::SetPrefValuesFromFile(ContainerRef container, UInt32 inPrefIndex, QTSS_AttributeID inAttrID, UInt32 inNumValues) { ContainerRef pref = fPrefsSource->GetPrefRefByIndex(container, inPrefIndex); SetPrefValuesFromFileWithRef(pref, inAttrID, inNumValues); } void QTSSPrefs::SetPrefValuesFromFileWithRef(ContainerRef pref, QTSS_AttributeID inAttrID, UInt32 inNumValues) { // // We have an attribute ID for this pref, it is in the map and everything. // Now, let's add all the values that are in the pref file. if (pref == 0) return; UInt32 numPrefValues = inNumValues; if (inNumValues == 0) numPrefValues = fPrefsSource->GetNumPrefValues(pref); char* thePrefName = NULL; char* thePrefValue = NULL; char* thePrefTypeStr = NULL; QTSS_AttrDataType thePrefType = qtssAttrDataTypeUnknown; // find the type. If this is a QTSSObject, then we need to call a different routine thePrefValue = fPrefsSource->GetPrefValueByRef( pref, 0, &thePrefName, &thePrefTypeStr); thePrefType = QTSSDataConverter::TypeStringToType(thePrefTypeStr); if (thePrefType == qtssAttrDataTypeQTSS_Object) { SetObjectValuesFromFile(pref, inAttrID, numPrefValues, thePrefName); return; } UInt32 maxPrefValueSize = 0; QTSS_Error theErr = QTSS_NoErr; // // We have to loop through all the values associated with this pref twice: // first, to figure out the length (in bytes) of the longest value, secondly // to actually copy these values into the dictionary. for (UInt32 y = 0; y < numPrefValues; y++) { UInt32 tempMaxPrefValueSize = 0; thePrefValue = fPrefsSource->GetPrefValueByRef( pref, y, &thePrefName, &thePrefTypeStr); theErr = QTSSDataConverter::StringToValue( thePrefValue, thePrefType, NULL, &tempMaxPrefValueSize ); Assert(theErr == QTSS_NotEnoughSpace); if (tempMaxPrefValueSize > maxPrefValueSize) maxPrefValueSize = tempMaxPrefValueSize; } for (UInt32 z = 0; z < numPrefValues; z++) { thePrefValue = fPrefsSource->GetPrefValueByRef( pref, z, &thePrefName, &thePrefTypeStr); this->SetPrefValue(inAttrID, z, thePrefValue, thePrefType, maxPrefValueSize); } // // Make sure the dictionary knows exactly how many values are associated with // this pref this->SetNumValues(inAttrID, numPrefValues); } void QTSSPrefs::SetObjectValuesFromFile(ContainerRef pref, QTSS_AttributeID inAttrID, UInt32 inNumValues, char* prefName) { for (UInt32 z = 0; z < inNumValues; z++) { ContainerRef object = fPrefsSource->GetObjectValue( pref, z ); QTSSPrefs* prefObject; UInt32 len = sizeof(QTSSPrefs*); QTSS_Error err = this->GetValue(inAttrID, z, &prefObject, &len); if (err != QTSS_NoErr) { UInt32 tempIndex; err = CreateObjectValue(inAttrID, &tempIndex, (QTSSDictionary**)&prefObject, NULL, QTSSDictionary::kDontObeyReadOnly | QTSSDictionary::kDontCallCompletionRoutine); Assert(err == QTSS_NoErr); Assert(tempIndex == z); if (err != QTSS_NoErr) // this shouldn't happen return; StrPtrLen temp(prefName); prefObject->fPrefName = temp.GetAsCString(); } prefObject->RereadObjectPreferences(object); } // // Make sure the dictionary knows exactly how many values are associated with // this pref this->SetNumValues(inAttrID, inNumValues); } void QTSSPrefs::SetPrefValue(QTSS_AttributeID inAttrID, UInt32 inAttrIndex, char* inPrefValue, QTSS_AttrDataType inPrefType, UInt32 inValueSize) { static const UInt32 kMaxPrefValueSize = 1024; char convertedPrefValue[kMaxPrefValueSize]; ::memset(convertedPrefValue, 0, kMaxPrefValueSize); Assert(inValueSize < kMaxPrefValueSize); UInt32 convertedBufSize = kMaxPrefValueSize; QTSS_Error theErr = QTSSDataConverter::StringToValue (inPrefValue, inPrefType, convertedPrefValue, &convertedBufSize ); Assert(theErr == QTSS_NoErr); if (inValueSize == 0) inValueSize = convertedBufSize; this->SetValue(inAttrID, inAttrIndex, convertedPrefValue, inValueSize, QTSSDictionary::kDontObeyReadOnly | QTSSDictionary::kDontCallCompletionRoutine); } QTSS_AttributeID QTSSPrefs::AddPrefAttribute(const char* inAttrName, QTSS_AttrDataType inDataType) { QTSS_Error theErr = this->AddInstanceAttribute( inAttrName, NULL, inDataType, qtssAttrModeRead | qtssAttrModeWrite | qtssAttrModeDelete); Assert(theErr == QTSS_NoErr); QTSS_AttributeID theID = qtssIllegalAttrID; theErr = this->GetInstanceDictMap()->GetAttrID( inAttrName, &theID); Assert(theErr == QTSS_NoErr); return theID; } void QTSSPrefs::RemoveValueComplete(UInt32 inAttrIndex, QTSSDictionaryMap* inMap, UInt32 inValueIndex) { ContainerRef objectRef = GetContainerRef(); ContainerRef pref = fPrefsSource->GetPrefRefByName( objectRef, inMap->GetAttrName(inAttrIndex)); Assert(pref != NULL); if (pref != NULL) fPrefsSource->RemovePrefValue( pref, inValueIndex); if (fPrefsSource->WritePrefsFile()) QTSSModuleUtils::LogError(qtssWarningVerbosity, qtssMsgCantWriteFile, 0); } void QTSSPrefs::RemoveInstanceAttrComplete(UInt32 inAttrIndex, QTSSDictionaryMap* inMap) { ContainerRef objectRef = GetContainerRef(); ContainerRef pref = fPrefsSource->GetPrefRefByName( objectRef, inMap->GetAttrName(inAttrIndex)); Assert(pref != NULL); if (pref != NULL) { fPrefsSource->RemovePref(pref); } if (fPrefsSource->WritePrefsFile()) QTSSModuleUtils::LogError(qtssWarningVerbosity, qtssMsgCantWriteFile, 0); } void QTSSPrefs::SetValueComplete(UInt32 inAttrIndex, QTSSDictionaryMap* inMap, UInt32 inValueIndex, void* inNewValue, UInt32 inNewValueLen) { ContainerRef objectRef = GetContainerRef(); ContainerRef pref = fPrefsSource->AddPref(objectRef, inMap->GetAttrName(inAttrIndex), QTSSDataConverter::TypeToTypeString(inMap->GetAttrType(inAttrIndex))); if (inMap->GetAttrType(inAttrIndex) == qtssAttrDataTypeQTSS_Object) { QTSSPrefs* object = *(QTSSPrefs**)inNewValue; // value is a pointer to a QTSSPrefs object StrPtrLen temp(inMap->GetAttrName(inAttrIndex)); object->fPrefName = temp.GetAsCString(); if (inValueIndex == fPrefsSource->GetNumPrefValues(pref)) fPrefsSource->AddNewObject(pref); } else { OSCharArrayDeleter theValueAsString(QTSSDataConverter::ValueToString(inNewValue, inNewValueLen, inMap->GetAttrType(inAttrIndex))); fPrefsSource->SetPrefValue(pref, inValueIndex, theValueAsString.GetObject()); } if (fPrefsSource->WritePrefsFile()) QTSSModuleUtils::LogError(qtssWarningVerbosity, qtssMsgCantWriteFile, 0); } ContainerRef QTSSPrefs::GetContainerRefForObject(QTSSPrefs* object) { ContainerRef thisContainer = GetContainerRef(); ContainerRef pref = fPrefsSource->GetPrefRefByName(thisContainer, object->fPrefName); if (pref == NULL) return NULL; if (fPrefsSource->GetNumPrefValues(pref) <= 1) return fPrefsSource->GetObjectValue(pref, 0); QTSSAttrInfoDict* theAttrInfoPtr = NULL; QTSS_Error theErr = this->GetInstanceDictMap()->GetAttrInfoByName(object->fPrefName, &theAttrInfoPtr); Assert(theErr == QTSS_NoErr); if (theErr != QTSS_NoErr) return NULL; QTSS_AttributeID theAttrID = qtssIllegalAttrID; UInt32 len = sizeof(theAttrID); theErr = theAttrInfoPtr->GetValue(qtssAttrID, 0, &theAttrID, &len); Assert(theErr == QTSS_NoErr); if (theErr != QTSS_NoErr) return NULL; UInt32 index = 0; QTSSPrefs* prefObject; len = sizeof(prefObject); while (this->GetValue(theAttrID, index, &prefObject, &len) == QTSS_NoErr) { if (prefObject == object) { return fPrefsSource->GetObjectValue(pref, index); } } return NULL; } ContainerRef QTSSPrefs::GetContainerRef() { if (fParentDictionary == NULL) // this is a top level Pref, so it must be a module return fPrefsSource->GetRefForModule(fPrefName); else return fParentDictionary->GetContainerRefForObject(this); }
zkf-qwj/Relay-Wasu-NoBalance
DarwinStreamingSrvr6.0.3-Source-relay-wasu-sx/Server.tproj/QTSSPrefs.cpp
C++
gpl-2.0
16,598
/* Copyright (c) 2003, 2005, 2006 MySQL AB This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; version 2 of the License. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA */ #ifndef LINEAR_POOL_HPP #define LINEAR_POOL_HPP #include <Bitmask.hpp> #include "SuperPool.hpp" /* * LinearPool - indexed record pool * * LinearPool implements a pool where each record has a 0-based index. * Any index value (up to 2^32-1) is allowed. Normal efficient usage is * to assign index values in sequence and to re-use any values which * have become free. This is default seize/release behaviour. * * LinearPool has 2 internal RecordPool instances: * * (a) record pool of T (the template argument class) * (b) record pool of "maps" (array of Uint32) * * The maps translate an index into an i-value in (a). Each map has * a level. Level 0 maps point to i-values. Level N+1 maps point to * level N maps. There is a unique "root map" at top. * * This works exactly like numbers in a given base. Each map has base * size entries. For implementation convenience the base must be power * of 2 between 2^1 and 2^15. It is given by its log2 value (1-15). * * A position in a map is also called a "digit". * * There is a doubly linked list of available maps (some free entries) * on each level. There is a doubly linked freelist within each map. * There is also a bitmask of used entries in each map. * * Level 0 free entry has space for one record. Level N free entry * implies space for base^N records. The implied levels are created and * removed on demand. Empty maps are usually removed. * * Default base is 256 (log2 = 8) which requires maximum 4 levels or * digits (similar to ip address). * * TODO * * - move most of the inline code to LinearPool.cpp * - optimize for common case * - add optimized 2-level implementation (?) */ #include "SuperPool.hpp" template <class T, Uint32 LogBase = 8> class LinearPool { typedef SuperPool::PtrI PtrI; // Base. STATIC_CONST( Base = 1 << LogBase ); // Digit mask. STATIC_CONST( DigitMask = Base - 1 ); // Max possible levels (0 to max root level). STATIC_CONST( MaxLevels = (32 + LogBase - 1) / LogBase ); // Number of words in map used bit mask. STATIC_CONST( BitmaskSize = (Base + 31) / 32 ); // Map. struct Map { Uint32 m_level; Uint32 m_occup; // number of used entries Uint32 m_firstfree; // position of first free entry PtrI m_parent; // parent map Uint32 m_index; // from root to here PtrI m_nextavail; PtrI m_prevavail; Uint32 m_bitmask[BitmaskSize]; PtrI m_entry[Base]; }; public: // Constructor. LinearPool(GroupPool& gp); // Destructor. ~LinearPool(); // Update pointer ptr.p according to index value ptr.i. void getPtr(Ptr<T>& ptr); // Allocate record from the pool. Reuses free index if possible. bool seize(Ptr<T>& ptr); // Allocate given index. Like seize but returns -1 if in use. int seize_index(Ptr<T>& ptr, Uint32 index); // Return record to the pool. void release(Ptr<T>& ptr); // Return number of used records (may require 1 page scan). Uint32 count(); // Verify (debugging). void verify(); private: // Given index find the bottom map. void get_map(Ptr<Map>& map_ptr, Uint32 index); // Add new root map and increase level bool add_root(); // Add new non-root map. bool add_map(Ptr<Map>& map_ptr, Ptr<Map> parent_ptr, Uint32 digit); // Subroutine to initialize map free lists. void init_free(Ptr<Map> map_ptr); // Add entry at given free position. void add_entry(Ptr<Map> map_ptr, Uint32 digit, PtrI ptr_i); // Remove entry and map if it becomes empty. void remove_entry(Ptr<Map> map_ptr, Uint32 digit); // Remove map and all parents which become empty. void remove_map(Ptr<Map> map_ptr); // Add map to available list. void add_avail(Ptr<Map> map_ptr); // Remove map from available list. void remove_avail(Ptr<Map> map_ptr); // Verify available lists void verify_avail(); // Verify map (recursive). void verify_map(Ptr<Map> map_ptr, Uint32 level, Uint32* count); RecordPool<T> m_records; RecordPool<Map> m_maps; Uint32 m_levels; // 0 means empty pool PtrI m_root; PtrI m_avail[MaxLevels]; }; template <class T, Uint32 LogBase> inline LinearPool<T, LogBase>::LinearPool(GroupPool& gp) : m_records(gp), m_maps(gp), m_levels(0), m_root(RNIL) { Uint32 n; for (n = 0; n < MaxLevels; n++) m_avail[n] = RNIL; } template <class T, Uint32 LogBase> inline LinearPool<T, LogBase>::~LinearPool() { } template <class T, Uint32 LogBase> inline void LinearPool<T, LogBase>::getPtr(Ptr<T>& ptr) { Uint32 index = ptr.i; // get level 0 map Ptr<Map> map_ptr; get_map(map_ptr, index); // get record Ptr<T> rec_ptr; Uint32 digit = index & DigitMask; assert(BitmaskImpl::get(BitmaskSize, map_ptr.p->m_bitmask, digit)); rec_ptr.i = map_ptr.p->m_entry[digit]; m_records.getPtr(rec_ptr); ptr.p = rec_ptr.p; } template <class T, Uint32 LogBase> inline bool LinearPool<T, LogBase>::seize(Ptr<T>& ptr) { // look for free list on some level Ptr<Map> map_ptr; map_ptr.i = RNIL; Uint32 n = 0; while (n < m_levels) { if ((map_ptr.i = m_avail[n]) != RNIL) break; n++; } if (map_ptr.i == RNIL) { // add new level with available maps if (! add_root()) return false; assert(n < m_levels); map_ptr.i = m_avail[n]; } m_maps.getPtr(map_ptr); // walk down creating missing levels and using an entry on each Uint32 digit; Ptr<Map> new_ptr; new_ptr.i = RNIL; while (true) { digit = map_ptr.p->m_firstfree; if (n == 0) break; Ptr<Map> child_ptr; if (! add_map(child_ptr, map_ptr, digit)) { if (new_ptr.i != RNIL) remove_map(new_ptr); return false; } new_ptr = child_ptr; map_ptr = child_ptr; n--; } // now on level 0 assert(map_ptr.p->m_level == 0); Ptr<T> rec_ptr; if (! m_records.seize(rec_ptr)) { if (new_ptr.i != RNIL) remove_map(new_ptr); return false; } add_entry(map_ptr, digit, rec_ptr.i); ptr.i = digit + (map_ptr.p->m_index << LogBase); ptr.p = rec_ptr.p; return true; } template <class T, Uint32 LogBase> inline int LinearPool<T, LogBase>::seize_index(Ptr<T>& ptr, Uint32 index) { // extract all digits at least up to current root level Uint32 digits[MaxLevels]; Uint32 n = 0; Uint32 tmp = index; do { digits[n] = tmp & DigitMask; tmp >>= LogBase; } while (++n < m_levels || tmp != 0); // add any new root levels while (n > m_levels) { if (! add_root()) return false; } // start from root Ptr<Map> map_ptr; map_ptr.i = m_root; m_maps.getPtr(map_ptr); // walk down creating or re-using existing levels Uint32 digit; bool used; Ptr<Map> new_ptr; new_ptr.i = RNIL; while (true) { digit = digits[--n]; used = BitmaskImpl::get(BitmaskSize, map_ptr.p->m_bitmask, digit); if (n == 0) break; if (used) { map_ptr.i = map_ptr.p->m_entry[digit]; m_maps.getPtr(map_ptr); } else { Ptr<Map> child_ptr; if (! add_map(child_ptr, map_ptr, digit)) { if (new_ptr.i != RNIL) remove_map(new_ptr); } new_ptr = child_ptr; map_ptr = child_ptr; } } // now at level 0 assert(map_ptr.p->m_level == 0); Ptr<T> rec_ptr; if (used || ! m_records.seize(rec_ptr)) { if (new_ptr.i != RNIL) remove_map(new_ptr); return used ? -1 : false; } add_entry(map_ptr, digit, rec_ptr.i); assert(index == digit + (map_ptr.p->m_index << LogBase)); ptr.i = index; ptr.p = rec_ptr.p; return true; } template <class T, Uint32 LogBase> inline void LinearPool<T, LogBase>::release(Ptr<T>& ptr) { Uint32 index = ptr.i; // get level 0 map Ptr<Map> map_ptr; get_map(map_ptr, index); // release record Ptr<T> rec_ptr; Uint32 digit = index & DigitMask; rec_ptr.i = map_ptr.p->m_entry[digit]; m_records.release(rec_ptr); // remove entry remove_entry(map_ptr, digit); // null pointer ptr.i = RNIL; ptr.p = 0; } template <class T, Uint32 LogBase> inline Uint32 LinearPool<T, LogBase>::count() { SuperPool& sp = m_records.m_superPool; Uint32 count1 = sp.getRecUseCount(m_records.m_recInfo); return count1; } template <class T, Uint32 LogBase> inline void LinearPool<T, LogBase>::verify() { verify_avail(); if (m_root == RNIL) { assert(m_levels == 0); return; } assert(m_levels != 0); Ptr<Map> map_ptr; map_ptr.i = m_root; m_maps.getPtr(map_ptr); Uint32 count1 = count(); Uint32 count2 = 0; verify_map(map_ptr, m_levels - 1, &count2); assert(count1 == count2); } // private methods template <class T, Uint32 LogBase> inline void LinearPool<T, LogBase>::get_map(Ptr<Map>& map_ptr, Uint32 index) { // root map must exist Ptr<Map> tmp_ptr; tmp_ptr.i = m_root; m_maps.getPtr(tmp_ptr); assert(tmp_ptr.p->m_level + 1 == m_levels); // extract index digits up to current root level Uint32 digits[MaxLevels]; Uint32 n = 0; do { digits[n] = index & DigitMask; index >>= LogBase; } while (++n < m_levels); assert(index == 0); // walk down indirect levels while (--n > 0) { tmp_ptr.i = tmp_ptr.p->m_entry[digits[n]]; m_maps.getPtr(tmp_ptr); } // level 0 map assert(tmp_ptr.p->m_level == 0); map_ptr = tmp_ptr; } template <class T, Uint32 LogBase> inline bool LinearPool<T, LogBase>::add_root() { // new root Ptr<Map> map_ptr; if (! m_maps.seize(map_ptr)) return false; Uint32 n = m_levels++; assert(n < MaxLevels); // set up map_ptr.p->m_level = n; map_ptr.p->m_parent = RNIL; map_ptr.p->m_index = 0; init_free(map_ptr); // on level > 0 digit 0 points to old root if (n > 0) { Ptr<Map> old_ptr; old_ptr.i = m_root; m_maps.getPtr(old_ptr); assert(old_ptr.p->m_parent == RNIL); old_ptr.p->m_parent = map_ptr.i; add_entry(map_ptr, 0, old_ptr.i); } // set new root m_root = map_ptr.i; return true; } template <class T, Uint32 LogBase> inline bool LinearPool<T, LogBase>::add_map(Ptr<Map>& map_ptr, Ptr<Map> parent_ptr, Uint32 digit) { if (! m_maps.seize(map_ptr)) return false; assert(parent_ptr.p->m_level != 0); // set up map_ptr.p->m_level = parent_ptr.p->m_level - 1; map_ptr.p->m_parent = parent_ptr.i; map_ptr.p->m_index = digit + (parent_ptr.p->m_index << LogBase); init_free(map_ptr); add_entry(parent_ptr, digit, map_ptr.i); return true; } template <class T, Uint32 LogBase> inline void LinearPool<T, LogBase>::init_free(Ptr<Map> map_ptr) { map_ptr.p->m_occup = 0; map_ptr.p->m_firstfree = 0; // freelist Uint32 j; Uint16 back = ZNIL; for (j = 0; j < Base - 1; j++) { map_ptr.p->m_entry[j] = back | ((j + 1) << 16); back = j; } map_ptr.p->m_entry[j] = back | (ZNIL << 16); // bitmask BitmaskImpl::clear(BitmaskSize, map_ptr.p->m_bitmask); // add to available add_avail(map_ptr); } template <class T, Uint32 LogBase> inline void LinearPool<T, LogBase>::add_entry(Ptr<Map> map_ptr, Uint32 digit, PtrI ptr_i) { assert(map_ptr.p->m_occup < Base && digit < Base); assert(! BitmaskImpl::get(BitmaskSize, map_ptr.p->m_bitmask, digit)); // unlink from freelist Uint32 val = map_ptr.p->m_entry[digit]; Uint16 back = val & ZNIL; Uint16 forw = val >> 16; if (back != ZNIL) { assert(back < Base); map_ptr.p->m_entry[back] &= ZNIL; map_ptr.p->m_entry[back] |= (forw << 16); } if (forw != ZNIL) { assert(forw < Base); map_ptr.p->m_entry[forw] &= (ZNIL << 16); map_ptr.p->m_entry[forw] |= back; } if (back == ZNIL) { map_ptr.p->m_firstfree = forw; } // set new value map_ptr.p->m_entry[digit] = ptr_i; map_ptr.p->m_occup++; BitmaskImpl::set(BitmaskSize, map_ptr.p->m_bitmask, digit); if (map_ptr.p->m_occup == Base) remove_avail(map_ptr); } template <class T, Uint32 LogBase> inline void LinearPool<T, LogBase>::remove_entry(Ptr<Map> map_ptr, Uint32 digit) { assert(map_ptr.p->m_occup != 0 && digit < Base); assert(BitmaskImpl::get(BitmaskSize, map_ptr.p->m_bitmask, digit)); // add to freelist Uint32 firstfree = map_ptr.p->m_firstfree; map_ptr.p->m_entry[digit] = ZNIL | (firstfree << 16); if (firstfree != ZNIL) { assert(firstfree < Base); map_ptr.p->m_entry[firstfree] &= (ZNIL << 16); map_ptr.p->m_entry[firstfree] |= digit; } map_ptr.p->m_firstfree = digit; map_ptr.p->m_occup--; BitmaskImpl::clear(BitmaskSize, map_ptr.p->m_bitmask, digit); if (map_ptr.p->m_occup + 1 == Base) add_avail(map_ptr); else if (map_ptr.p->m_occup == 0) remove_map(map_ptr); } template <class T, Uint32 LogBase> inline void LinearPool<T, LogBase>::remove_map(Ptr<Map> map_ptr) { assert(map_ptr.p->m_occup == 0); remove_avail(map_ptr); Ptr<Map> parent_ptr; parent_ptr.i = map_ptr.p->m_parent; Uint32 digit = map_ptr.p->m_index & DigitMask; PtrI map_ptr_i = map_ptr.i; m_maps.release(map_ptr); if (m_root == map_ptr_i) { assert(parent_ptr.i == RNIL); Uint32 used = count(); assert(used == 0); m_root = RNIL; m_levels = 0; } if (parent_ptr.i != RNIL) { m_maps.getPtr(parent_ptr); // remove child entry (recursive) remove_entry(parent_ptr, digit); } } template <class T, Uint32 LogBase> inline void LinearPool<T, LogBase>::add_avail(Ptr<Map> map_ptr) { Uint32 n = map_ptr.p->m_level; assert(n < m_levels); map_ptr.p->m_nextavail = m_avail[n]; if (map_ptr.p->m_nextavail != RNIL) { Ptr<Map> next_ptr; next_ptr.i = map_ptr.p->m_nextavail; m_maps.getPtr(next_ptr); next_ptr.p->m_prevavail = map_ptr.i; } map_ptr.p->m_prevavail = RNIL; m_avail[n] = map_ptr.i; } template <class T, Uint32 LogBase> inline void LinearPool<T, LogBase>::remove_avail(Ptr<Map> map_ptr) { Uint32 n = map_ptr.p->m_level; assert(n < m_levels); if (map_ptr.p->m_nextavail != RNIL) { Ptr<Map> next_ptr; next_ptr.i = map_ptr.p->m_nextavail; m_maps.getPtr(next_ptr); next_ptr.p->m_prevavail = map_ptr.p->m_prevavail; } if (map_ptr.p->m_prevavail != RNIL) { Ptr<Map> prev_ptr; prev_ptr.i = map_ptr.p->m_prevavail; m_maps.getPtr(prev_ptr); prev_ptr.p->m_nextavail = map_ptr.p->m_nextavail; } if (map_ptr.p->m_prevavail == RNIL) { m_avail[n] = map_ptr.p->m_nextavail; } map_ptr.p->m_nextavail = RNIL; map_ptr.p->m_prevavail = RNIL; } template <class T, Uint32 LogBase> inline void LinearPool<T, LogBase>::verify_avail() { // check available lists for (Uint32 n = 0; n < MaxLevels; n++) { Ptr<Map> map_ptr; map_ptr.i = m_avail[n]; Uint32 back = RNIL; while (map_ptr.i != RNIL) { m_maps.getPtr(map_ptr); assert(map_ptr.p->m_occup < Base); assert(back == map_ptr.p->m_prevavail); back = map_ptr.i; map_ptr.i = map_ptr.p->m_nextavail; } } } template <class T, Uint32 LogBase> inline void LinearPool<T, LogBase>::verify_map(Ptr<Map> map_ptr, Uint32 level, Uint32* count) { assert(level < MaxLevels); assert(map_ptr.p->m_level == level); // check freelist { Uint32 nused = BitmaskImpl::count(BitmaskSize, map_ptr.p->m_bitmask); assert(nused <= Base); assert(map_ptr.p->m_occup == nused); Uint32 nfree = 0; Uint32 j = map_ptr.p->m_firstfree; Uint16 back = ZNIL; while (j != ZNIL) { assert(j < Base); assert(! BitmaskImpl::get(BitmaskSize, map_ptr.p->m_bitmask, j)); Uint32 val = map_ptr.p->m_entry[j]; assert(back == (val & ZNIL)); back = j; j = (val >> 16); nfree++; } assert(nused + nfree == Base); } // check entries { for (Uint32 j = 0; j < Base; j++) { bool free = ! BitmaskImpl::get(BitmaskSize, map_ptr.p->m_bitmask, j); if (free) continue; if (level != 0) { Ptr<Map> child_ptr; child_ptr.i = map_ptr.p->m_entry[j]; m_maps.getPtr(child_ptr); assert(child_ptr.p->m_parent == map_ptr.i); assert(child_ptr.p->m_index == j + (map_ptr.p->m_index << LogBase)); verify_map(child_ptr, level - 1, count); } else { Ptr<T> rec_ptr; rec_ptr.i = map_ptr.p->m_entry[j]; m_records.getPtr(rec_ptr); (*count)++; } } } // check membership on available list { Ptr<Map> avail_ptr; avail_ptr.i = m_avail[map_ptr.p->m_level]; bool found = false; while (avail_ptr.i != RNIL) { if (avail_ptr.i == map_ptr.i) { found = true; break; } m_maps.getPtr(avail_ptr); avail_ptr.i = avail_ptr.p->m_nextavail; } assert(found == (map_ptr.p->m_occup < Base)); } } #endif
SunguckLee/MariaDB
storage/ndb/src/kernel/vm/LinearPool.hpp
C++
gpl-2.0
17,219
/** * ScriptDev2 is an extension for mangos providing enhanced features for * area triggers, creatures, game objects, instances, items, and spells beyond * the default database scripting in mangos. * * Copyright (C) 2006-2013 ScriptDev2 <http://www.scriptdev2.com/> * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA * * World of Warcraft, and all World of Warcraft or Warcraft art, images, * and lore are copyrighted by Blizzard Entertainment, Inc. */ /** * ScriptData * SDName: Instance_Scarlet_Monastery * SD%Complete: 50 * SDComment: None * SDCategory: Scarlet Monastery * EndScriptData */ #include "precompiled.h" #include "scarlet_monastery.h" instance_scarlet_monastery::instance_scarlet_monastery(Map* pMap) : ScriptedInstance(pMap) { Initialize(); } void instance_scarlet_monastery::Initialize() { memset(&m_auiEncounter, 0, sizeof(m_auiEncounter)); } void instance_scarlet_monastery::OnCreatureCreate(Creature* pCreature) { switch (pCreature->GetEntry()) { case NPC_MOGRAINE: case NPC_WHITEMANE: case NPC_VORREL: m_mNpcEntryGuidStore[pCreature->GetEntry()] = pCreature->GetObjectGuid(); break; } } void instance_scarlet_monastery::OnCreatureDeath(Creature* pCreature) { if (pCreature->GetEntry() == NPC_INTERROGATOR_VISHAS) { // Any other actions to do with Vorrel? setStandState? if (Creature* pVorrel = GetSingleCreatureFromStorage(NPC_VORREL)) { DoScriptText(SAY_TRIGGER_VORREL, pVorrel); } } } void instance_scarlet_monastery::OnObjectCreate(GameObject* pGo) { if (pGo->GetEntry() == GO_WHITEMANE_DOOR) { m_mGoEntryGuidStore[GO_WHITEMANE_DOOR] = pGo->GetObjectGuid(); } } void instance_scarlet_monastery::SetData(uint32 uiType, uint32 uiData) { if (uiType == TYPE_MOGRAINE_AND_WHITE_EVENT) { if (uiData == IN_PROGRESS) { DoUseDoorOrButton(GO_WHITEMANE_DOOR); } if (uiData == FAIL) { DoUseDoorOrButton(GO_WHITEMANE_DOOR); } m_auiEncounter[0] = uiData; } } uint32 instance_scarlet_monastery::GetData(uint32 uiData) const { if (uiData == TYPE_MOGRAINE_AND_WHITE_EVENT) { return m_auiEncounter[0]; } return 0; } InstanceData* GetInstanceData_instance_scarlet_monastery(Map* pMap) { return new instance_scarlet_monastery(pMap); } void AddSC_instance_scarlet_monastery() { Script* pNewScript; pNewScript = new Script; pNewScript->Name = "instance_scarlet_monastery"; pNewScript->GetInstanceData = &GetInstanceData_instance_scarlet_monastery; pNewScript->RegisterSelf(); }
mangoszero/serverZero_Rel19
src/scripts/scripts/eastern_kingdoms/scarlet_monastery/instance_scarlet_monastery.cpp
C++
gpl-2.0
3,368
# -*- coding: utf-8 -*- # # This file is part of Invenio. # Copyright (C) 2012, 2013 CERN. # # Invenio is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Invenio is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Invenio; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. from datetime import date, datetime import six from wtforms import DateField from wtforms.validators import optional from ..field_base import WebDepositField __all__ = ['Date'] class Date(WebDepositField, DateField): def __init__(self, **kwargs): defaults = dict( icon='calendar', validators=[optional()], widget_classes="form-control" ) defaults.update(kwargs) super(Date, self).__init__(**defaults) def process_data(self, value): """ Called when loading data from Python (incoming objects can be either datetime objects or strings, depending on if they are loaded from an JSON or Python objects). """ if isinstance(value, six.string_types): self.object_data = datetime.strptime(value, self.format).date() elif isinstance(value, datetime): self.object_data = value.date() elif isinstance(value, date): self.object_data = value # Be sure to set both self.object_data and self.data due to internals # of Field.process() and draft_form_process_and_validate(). self.data = self.object_data @property def json_data(self): """ Serialize data into JSON serializalbe object """ # Just use _value() to format the date into a string. if self.data: return self.data.strftime(self.format) # pylint: disable-msg= return None
quantifiedcode-bot/invenio-deposit
invenio_deposit/fields/date.py
Python
gpl-2.0
2,268
<?php $this->text( 'main_top_padding', __( 'Page Content Top Padding', 'Avada' ), __( 'In pixels ex: 20px. Leave empty for default value.', 'Avada' ) ); $this->text( 'main_bottom_padding', __( 'Page Content Bottom Padding', 'Avada' ), __( 'In pixels ex: 20px. Leave empty for default value.', 'Avada' ) ); $this->text( 'hundredp_padding', __( '100% Width Left/Right Padding', 'Avada' ), __( 'This option controls the left/right padding for page content when using 100% site width or 100% width page template. Enter value in px. ex: 20px.', 'Avada' ) ); $screen = get_current_screen(); if ( 'page' == $screen->post_type ) { $this->select( 'show_first_featured_image', __( 'Disable First Featured Image', 'Avada' ), array( 'no' => __( 'No', 'Avada' ), 'yes' => __( 'Yes', 'Avada' ) ), __( 'Disable the 1st featured image on page.', 'Avada' ) ); } // Omit closing PHP tag to avoid "Headers already sent" issues.
MinmingQian/smartcity
wp-content/themes/Avada/framework/metaboxes/tabs/tab_page.php
PHP
gpl-2.0
944
/* * This file is part of the TrinityCore Project. See AUTHORS file for Copyright information * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the * Free Software Foundation; either version 2 of the License, or (at your * option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program. If not, see <http://www.gnu.org/licenses/>. */ #include "ScriptMgr.h" #include "blackrock_spire.h" #include "ScriptedCreature.h" enum Spells { SPELL_FLAMEBREAK = 16785, SPELL_IMMOLATE = 20294, SPELL_TERRIFYINGROAR = 14100, }; enum Events { EVENT_FLAME_BREAK = 1, EVENT_IMMOLATE = 2, EVENT_TERRIFYING_ROAR = 3, }; class boss_the_beast : public CreatureScript { public: boss_the_beast() : CreatureScript("boss_the_beast") { } CreatureAI* GetAI(Creature* creature) const override { return GetBlackrockSpireAI<boss_thebeastAI>(creature); } struct boss_thebeastAI : public BossAI { boss_thebeastAI(Creature* creature) : BossAI(creature, DATA_THE_BEAST) { } void Reset() override { _Reset(); } void EnterCombat(Unit* /*who*/) override { _EnterCombat(); events.ScheduleEvent(EVENT_FLAME_BREAK, 12 * IN_MILLISECONDS); events.ScheduleEvent(EVENT_IMMOLATE, 3 * IN_MILLISECONDS); events.ScheduleEvent(EVENT_TERRIFYING_ROAR, 23 * IN_MILLISECONDS); } void JustDied(Unit* /*killer*/) override { _JustDied(); } void UpdateAI(uint32 diff) override { if (!UpdateVictim()) return; events.Update(diff); if (me->HasUnitState(UNIT_STATE_CASTING)) return; while (uint32 eventId = events.ExecuteEvent()) { switch (eventId) { case EVENT_FLAME_BREAK: DoCastVictim(SPELL_FLAMEBREAK); events.ScheduleEvent(EVENT_FLAME_BREAK, 10 * IN_MILLISECONDS); break; case EVENT_IMMOLATE: if (Unit* target = SelectTarget(SELECT_TARGET_RANDOM, 0, 100, true)) DoCast(target, SPELL_IMMOLATE); events.ScheduleEvent(EVENT_IMMOLATE, 8 * IN_MILLISECONDS); break; case EVENT_TERRIFYING_ROAR: DoCastVictim(SPELL_TERRIFYINGROAR); events.ScheduleEvent(EVENT_TERRIFYING_ROAR, 20 * IN_MILLISECONDS); break; } if (me->HasUnitState(UNIT_STATE_CASTING)) return; } DoMeleeAttackIfReady(); } }; }; void AddSC_boss_thebeast() { new boss_the_beast(); }
Regigicas/TrinityCore
src/server/scripts/EasternKingdoms/BlackrockMountain/BlackrockSpire/boss_the_beast.cpp
C++
gpl-2.0
3,345
<?php /** * @package Joomla.Administrator * @subpackage com_contenthistory * * @copyright Copyright (C) 2005 - 2016 Open Source Matters, Inc. All rights reserved. * @license GNU General Public License version 2 or later; see LICENSE.txt */ defined('_JEXEC') or die; /** * View class for a list of contenthistory. * * @since 1.5 */ class ContenthistoryViewPreview extends JViewLegacy { protected $items; protected $state; /** * Method to display the view. * * @param string $tpl A template file to load. [optional] * * @return mixed Exception on failure, void on success. * * @since 3.2 */ public function display($tpl = null) { $this->state = $this->get('State'); $this->item = $this->get('Item'); if (false === $this->item) { JFactory::getLanguage()->load('com_content', JPATH_SITE, null, true); JError::raiseError(404, JText::_('COM_CONTENT_ERROR_ARTICLE_NOT_FOUND')); return false; } // Check for errors. if (count($errors = $this->get('Errors'))) { JError::raiseError(500, implode("\n", $errors)); return false; } return parent::display($tpl); } }
puneet0191/gsoc16_browser-automated-tests
administrator/components/com_contenthistory/views/preview/view.html.php
PHP
gpl-2.0
1,153
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <title>{#xhtmlxtras_dlg.title_del_element}</title> <?php $file = dirname(__FILE__); $file = substr($file, 0, stripos($file, "wp-content") ); require( $file . "/wp-load.php"); $url = includes_url(); echo '<script type="text/javascript" src="'.$url.'js/tinymce/tiny_mce_popup.js'.'"></script>'; echo '<script type="text/javascript" src="'.$url.'js/tinymce/utils/mctabs.js'.'"></script>'; echo '<script type="text/javascript" src="'.$url.'js/tinymce/utils/form_utils.js'.'"></script>'; echo '<script type="text/javascript" src="'.$url.'js/tinymce/utils/editable_selects.js'.'"></script>'; ?> <!-- <script type="text/javascript" src="../../tinymce/tiny_mce_popup.js"></script> <script type="text/javascript" src="../../tinymce/mctabs.js"></script> <script type="text/javascript" src="../../tinymce/form_utils.js"></script> <script type="text/javascript" src="../../tinymce/editable_selects.js"></script> --> <script type="text/javascript" src="js/element_common.js"></script> <script type="text/javascript" src="js/del.js"></script> <link rel="stylesheet" type="text/css" href="css/popup.css" /> </head> <body id="xhtmlxtrasins" style="display: none" role="application" aria-labelledby="app_title"> <span style="display:none;" id="app_title">{#xhtmlxtras_dlg.title_del_element}</span> <form onsubmit="insertDel();return false;" action="#"> <div class="tabs"> <ul> <li id="general_tab" class="current" aria-controls="general_panel"><span><a href="javascript:mcTabs.displayTab('general_tab','general_panel');" onmousedown="return false;">{#xhtmlxtras_dlg.general_tab}</a></span></li> <!-- <li id="events_tab"><span><a href="javascript:mcTabs.displayTab('events_tab','events_panel');" onmousedown="return false;">{#xhtmlxtras_dlg.events_tab}</a></span></li> --> </ul> </div> <div class="panel_wrapper"> <div id="general_panel" class="panel current"> <fieldset> <legend>{#xhtmlxtras_dlg.fieldset_general_tab}</legend> <table role="presentation" border="0" cellpadding="0" cellspacing="4"> <tr> <td class="label"><label id="datetimelabel" for="datetime">{#xhtmlxtras_dlg.attribute_label_datetime}</label>:</td> <td> <table role="presentation" border="0" cellspacing="0" cellpadding="0"> <tr> <td><input id="datetime" name="datetime" type="text" value="" maxlength="19" class="field mceFocus" /></td> <td><a href="javascript:insertDateTime('datetime');" onmousedown="return false;" class="browse" role="button" aria-labelledby="datetimelabel"><span class="datetime" title="{#xhtmlxtras_dlg.insert_date}"></span></a></td> </tr> </table> </td> </tr> <tr> <td class="label"><label id="citelabel" for="cite">{#xhtmlxtras_dlg.attribute_label_cite}</label>:</td> <td><input id="cite" name="cite" type="text" value="" class="field" /></td> </tr> </table> </fieldset> <fieldset> <legend>{#xhtmlxtras_dlg.fieldset_attrib_tab}</legend> <table role="presentation" border="0" cellpadding="0" cellspacing="4"> <tr> <td class="label"><label id="titlelabel" for="title">{#xhtmlxtras_dlg.attribute_label_title}</label>:</td> <td><input id="title" name="title" type="text" value="" class="field" /></td> </tr> <tr> <td class="label"><label id="idlabel" for="id">{#xhtmlxtras_dlg.attribute_label_id}</label>:</td> <td><input id="id" name="id" type="text" value="" class="field" /></td> </tr> <tr> <td class="label"><label id="classlabel" for="class">{#xhtmlxtras_dlg.attribute_label_class}</label>:</td> <td> <select id="class" name="class" class="field mceEditableSelect"> <option value="">{#not_set}</option> </select> </td> </tr> <tr> <td class="label"><label id="stylelabel" for="style">{#xhtmlxtras_dlg.attribute_label_style}</label>:</td> <td><input id="style" name="style" type="text" value="" class="field" /></td> </tr> <tr> <td class="label"><label id="dirlabel" for="dir">{#xhtmlxtras_dlg.attribute_label_langdir}</label>:</td> <td> <select id="dir" name="dir" class="field"> <option value="">{#not_set}</option> <option value="ltr">{#xhtmlxtras_dlg.attribute_option_ltr}</option> <option value="rtl">{#xhtmlxtras_dlg.attribute_option_rtl}</option> </select> </td> </tr> <tr> <td class="label"><label id="langlabel" for="lang">{#xhtmlxtras_dlg.attribute_label_langcode}</label>:</td> <td> <input id="lang" name="lang" type="text" value="" class="field" /> </td> </tr> </table> </fieldset> </div> <div id="events_panel" class="panel"> <fieldset> <legend>{#xhtmlxtras_dlg.fieldset_events_tab}</legend> <table role="presentation" border="0" cellpadding="0" cellspacing="4"> <tr> <td class="label"><label for="onfocus">onfocus</label>:</td> <td><input id="onfocus" name="onfocus" type="text" value="" class="field" /></td> </tr> <tr> <td class="label"><label for="onblur">onblur</label>:</td> <td><input id="onblur" name="onblur" type="text" value="" class="field" /></td> </tr> <tr> <td class="label"><label for="onclick">onclick</label>:</td> <td><input id="onclick" name="onclick" type="text" value="" class="field" /></td> </tr> <tr> <td class="label"><label for="ondblclick">ondblclick</label>:</td> <td><input id="ondblclick" name="ondblclick" type="text" value="" class="field" /></td> </tr> <tr> <td class="label"><label for="onmousedown">onmousedown</label>:</td> <td><input id="onmousedown" name="onmousedown" type="text" value="" class="field" /></td> </tr> <tr> <td class="label"><label for="onmouseup">onmouseup</label>:</td> <td><input id="onmouseup" name="onmouseup" type="text" value="" class="field" /></td> </tr> <tr> <td class="label"><label for="onmouseover">onmouseover</label>:</td> <td><input id="onmouseover" name="onmouseover" type="text" value="" class="field" /></td> </tr> <tr> <td class="label"><label for="onmousemove">onmousemove</label>:</td> <td><input id="onmousemove" name="onmousemove" type="text" value="" class="field" /></td> </tr> <tr> <td class="label"><label for="onmouseout">onmouseout</label>:</td> <td><input id="onmouseout" name="onmouseout" type="text" value="" class="field" /></td> </tr> <tr> <td class="label"><label for="onkeypress">onkeypress</label>:</td> <td><input id="onkeypress" name="onkeypress" type="text" value="" class="field" /></td> </tr> <tr> <td class="label"><label for="onkeydown">onkeydown</label>:</td> <td><input id="onkeydown" name="onkeydown" type="text" value="" class="field" /></td> </tr> <tr> <td class="label"><label for="onkeyup">onkeyup</label>:</td> <td><input id="onkeyup" name="onkeyup" type="text" value="" class="field" /></td> </tr> </table> </fieldset> </div> </div> <div class="mceActionPanel"> <input type="submit" id="insert" name="insert" value="{#update}" /> <input type="button" id="remove" name="remove" class="button" value="{#xhtmlxtras_dlg.remove}" onclick="removeDel();" style="display: none;" /> <input type="button" id="cancel" name="cancel" value="{#cancel}" onclick="tinyMCEPopup.close();" /> </div> </form> </body> </html>
mod13/roamafrica
wp-content/plugins/ultimate-tinymce/addons/xhtmlxtras/del.php
PHP
gpl-2.0
7,711
/* * This file is part of the TYPO3 CMS project. * * It is free software; you can redistribute it and/or modify it under * the terms of the GNU General Public License, either version 2 * of the License, or any later version. * * For the full copyright and license information, please read the * LICENSE.txt file that was distributed with this source code. * * The TYPO3 project - inspiring people to share! */ /** * Module: @exports TYPO3/CMS/Rtehtmlarea/HTMLArea/UserAgent/HtmlArea * Initialization script of TYPO3 htmlArea RTE */ define(['TYPO3/CMS/Rtehtmlarea/HTMLArea/UserAgent/UserAgent', 'TYPO3/CMS/Rtehtmlarea/HTMLArea/Util/Util', 'TYPO3/CMS/Rtehtmlarea/HTMLArea/Configuration/Config', 'TYPO3/CMS/Rtehtmlarea/HTMLArea/Editor/Editor'], function (UserAgent, Util, Config, Editor) { /** * * @type {{RE_htmlTag: RegExp, RE_tagName: RegExp, RE_head: RegExp, RE_body: RegExp, reservedClassNames: RegExp, RE_email: RegExp, RE_url: RegExp, RE_numberOrPunctuation: RegExp, init: Function, initEditor: Function, localize: Function, appendToLog: Function}} * @exports TYPO3/CMS/Rtehtmlarea/HTMLArea/UserAgent/HtmlArea */ var HtmlArea = { /*************************************************** * COMPILED REGULAR EXPRESSIONS * ***************************************************/ RE_htmlTag : /<.[^<>]*?>/g, RE_tagName : /(<\/|<)\s*([^ \t\n>]+)/ig, RE_head : /<head>((.|\n)*?)<\/head>/i, RE_body : /<body>((.|\n)*?)<\/body>/i, reservedClassNames : /htmlarea/, RE_email : /([0-9a-z]+([a-z0-9_-]*[0-9a-z])*){1}(\.[0-9a-z]+([a-z0-9_-]*[0-9a-z])*)*@([0-9a-z]+([a-z0-9_-]*[0-9a-z])*\.)+[a-z]{2,9}/i, RE_url : /(([^:/?#]+):\/\/)?(([a-z0-9_]+:[a-z0-9_]+@)?[a-z0-9_-]{2,}(\.[a-z0-9_-]{2,})+\.[a-z]{2,5}(:[0-9]+)?(\/\S+)*\/?)/i, RE_numberOrPunctuation : /[0-9.(),;:!¡?¿%#$'"_+=\\\/-]*/g, /*************************************************** * INITIALIZATION * ***************************************************/ init: function () { if (!HTMLArea.isReady) { // Apply global configuration settings Util.apply(HtmlArea, RTEarea[0]); HTMLArea.isReady = true; HtmlArea.appendToLog('', 'HTMLArea', 'init', 'Editor url set to: ' + HtmlArea.editorUrl, 'info'); HtmlArea.appendToLog('', 'HTMLArea', 'init', 'Editor content skin CSS set to: ' + HtmlArea.editedContentCSS, 'info'); Util.apply(HTMLArea, HtmlArea); } }, /** * Create an editor when HTMLArea is loaded and when Ext is ready * * @param string editorId: the id of the editor * @return boolean false if successful */ initEditor: function (editorId) { if (document.getElementById('pleasewait' + editorId)) { if (UserAgent.isSupported()) { document.getElementById('pleasewait' + editorId).style.display = 'block'; document.getElementById('editorWrap' + editorId).style.visibility = 'hidden'; if (!HTMLArea.isReady) { var self = this; window.setTimeout(function () { return self.initEditor(editorId); }, 150); } else { // Create an editor for the textarea var editor = new Editor(Util.apply(new Config(editorId), RTEarea[editorId])); editor.generate(); return false; } } else { document.getElementById('pleasewait' + editorId).style.display = 'none'; document.getElementById('editorWrap' + editorId).style.visibility = 'visible'; } } return true; }, /*************************************************** * LOCALIZATION * ***************************************************/ localize: function (label, plural) { var i = plural || 0; var localized = HTMLArea.I18N.dialogs[label] || HTMLArea.I18N.tooltips[label] || HTMLArea.I18N.msg[label] || ''; if (typeof localized === 'object' && localized !== null && typeof localized[i] !== 'undefined') { localized = localized[i]['target']; } return localized; }, /*************************************************** * LOGGING * ***************************************************/ /** * Write message to JavaScript console * * @param string editorId: the id of the editor issuing the message * @param string objectName: the name of the object issuing the message * @param string functionName: the name of the function issuing the message * @param string text: the text of the message * @param string type: the type of message: 'log', 'info', 'warn' or 'error' * @return void */ appendToLog: function (editorId, objectName, functionName, text, type) { var str = 'RTE[' + editorId + '][' + objectName + '::' + functionName + ']: ' + text; if (typeof type === 'undefined') { var type = 'info'; } // IE may not have any console if (typeof console === 'object' && console !== null && typeof console[type] !== 'undefined') { console[type](str); } } }; return Util.apply(HTMLArea, HtmlArea); });
liayn/TYPO3.CMS
typo3/sysext/rtehtmlarea/Resources/Public/JavaScript/HTMLArea/HTMLArea.js
JavaScript
gpl-2.0
5,129
<?php namespace Drupal\DKANExtension\ServiceContainer; use Behat\Behat\Context\Context; /** * Defines application features from the specific context. */ class EntityStore implements StoreInterface { // Store entities as EntityMetadataWrappers for easy property inspection. protected $entities = array(); protected $names = array(); function store($entity_type, $bundle, $entity_id, $entity, $name = false) { $entities = &$this->entities; $names = &$this->names; if (!isset($entities[$entity_type])) { $entities[$entity_type] = array(); } if (!isset($entities[$entity_type][$bundle])) { $entities[$entity_type][$bundle] = array(); } $entities[$entity_type][$bundle][$entity_id] = $entity; if ($name !== false) { if (!isset($names[$name])) { // This should point to the same objects if they get updated. $names[$name] = &$entities[$entity_type][$bundle][$entity_id]; } else { throw new \Exception("Named Entity with name '$name' already exists."); } } } function retrieve($entity_type = false, $bundle = false, $entity_id = false) { $entities = &$this->entities; if ($entity_type !== FALSE && !isset($entities[$entity_type])) { return FALSE; } if ($bundle !== FALSE && !isset($entities[$entity_type][$bundle])) { return FALSE; } if ($entity_id !== FALSE && !isset($entities[$entity_type][$bundle][$entity_id])) { return FALSE; } if ($entity_type === FALSE) { return $entities; } if ($bundle === FALSE) { return $entities[$entity_type]; } if ($entity_id === FALSE) { return $entities[$entity_type][$bundle]; } return $entities[$entity_type][$bundle][$entity_id]; } function delete($entity_type = false, $bundle = false, $entity_id = false) { $entities = &$this->entities; if ($entity_type !== FALSE && !isset($entities[$entity_type])) { return FALSE; } if ($bundle !== FALSE && !isset($entities[$entity_type][$bundle])) { return FALSE; } if ($entity_id !== FALSE && !isset($entities[$entity_type][$bundle][$entity_id])) { return FALSE; } if ($entity_type === FALSE) { unset($entities); return true; } if ($bundle === FALSE) { unset($entities[$entity_type]); return true; } if ($entity_id === FALSE) { unset($entities[$entity_type][$bundle]); return true; } unset($entities[$entity_type][$bundle][$entity_id]); return true; } function retrieve_by_name($name) { if (isset($this->names[$name])) { return $this->names[$name]; } return false; } function names_flush() { $this->names = array(); } function flush() { $this->entities = array(); $this->names_flush(); } }
marciuz/dkan
test/dkanextension/src/Drupal/DKANExtension/ServiceContainer/EntityStore.php
PHP
gpl-2.0
2,834
/* Copyright (c) 2003, 2005 MySQL AB This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; version 2 of the License. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA */ #ifndef KEY_DESCRIPTOR_HPP #define KEY_DESCRIPTOR_HPP #include <ndb_types.h> #include <ndb_limits.h> #include "CArray.hpp" struct KeyDescriptor { KeyDescriptor () { noOfKeyAttr = hasCharAttr = noOfDistrKeys = noOfVarKeys = 0; } Uint8 noOfKeyAttr; Uint8 hasCharAttr; Uint8 noOfDistrKeys; Uint8 noOfVarKeys; struct KeyAttr { Uint32 attributeDescriptor; CHARSET_INFO* charsetInfo; } keyAttr[MAX_ATTRIBUTES_IN_INDEX]; }; extern CArray<KeyDescriptor> g_key_descriptor_pool; #endif
SunguckLee/MariaDB
storage/ndb/src/kernel/vm/KeyDescriptor.hpp
C++
gpl-2.0
1,223
require 'rails_helper' require 'post_destroyer' describe PostDestroyer do before do ActiveRecord::Base.observers.enable :all end let(:moderator) { Fabricate(:moderator) } let(:admin) { Fabricate(:admin) } let(:post) { create_post } describe "destroy_old_hidden_posts" do it "destroys posts that have been hidden for 30 days" do Fabricate(:admin) now = Time.now freeze_time(now - 60.days) topic = post.topic reply1 = create_post(topic: topic) freeze_time(now - 40.days) reply2 = create_post(topic: topic) PostAction.hide_post!(reply2, PostActionType.types[:off_topic]) freeze_time(now - 20.days) reply3 = create_post(topic: topic) PostAction.hide_post!(reply3, PostActionType.types[:off_topic]) freeze_time(now - 10.days) reply4 = create_post(topic: topic) freeze_time(now) PostDestroyer.destroy_old_hidden_posts reply1.reload reply2.reload reply3.reload reply4.reload expect(reply1.deleted_at).to eq(nil) expect(reply2.deleted_at).not_to eq(nil) expect(reply3.deleted_at).to eq(nil) expect(reply4.deleted_at).to eq(nil) end end describe 'destroy_old_stubs' do it 'destroys stubs for deleted by user posts' do SiteSetting.stubs(:delete_removed_posts_after).returns(24) Fabricate(:admin) topic = post.topic reply1 = create_post(topic: topic) reply2 = create_post(topic: topic) reply3 = create_post(topic: topic) PostDestroyer.new(reply1.user, reply1).destroy PostDestroyer.new(reply2.user, reply2).destroy reply2.update_column(:updated_at, 2.days.ago) PostDestroyer.destroy_stubs reply1.reload reply2.reload reply3.reload expect(reply1.deleted_at).to eq(nil) expect(reply2.deleted_at).not_to eq(nil) expect(reply3.deleted_at).to eq(nil) # if topic is deleted we should still be able to destroy stubs topic.trash! reply1.update_column(:updated_at, 2.days.ago) PostDestroyer.destroy_stubs reply1.reload expect(reply1.deleted_at).to eq(nil) # flag the post, it should not nuke the stub anymore topic.recover! PostAction.act(Fabricate(:coding_horror), reply1, PostActionType.types[:spam]) PostDestroyer.destroy_stubs reply1.reload expect(reply1.deleted_at).to eq(nil) end it 'uses the delete_removed_posts_after site setting' do Fabricate(:admin) topic = post.topic reply1 = create_post(topic: topic) reply2 = create_post(topic: topic) PostDestroyer.new(reply1.user, reply1).destroy PostDestroyer.new(reply2.user, reply2).destroy SiteSetting.stubs(:delete_removed_posts_after).returns(1) reply2.update_column(:updated_at, 70.minutes.ago) PostDestroyer.destroy_stubs reply1.reload reply2.reload expect(reply1.deleted_at).to eq(nil) expect(reply2.deleted_at).not_to eq(nil) SiteSetting.stubs(:delete_removed_posts_after).returns(72) reply1.update_column(:updated_at, 2.days.ago) PostDestroyer.destroy_stubs expect(reply1.reload.deleted_at).to eq(nil) SiteSetting.stubs(:delete_removed_posts_after).returns(47) PostDestroyer.destroy_stubs expect(reply1.reload.deleted_at).not_to eq(nil) end it "deletes posts immediately if delete_removed_posts_after is 0" do Fabricate(:admin) topic = post.topic reply1 = create_post(topic: topic) SiteSetting.stubs(:delete_removed_posts_after).returns(0) PostDestroyer.new(reply1.user, reply1).destroy expect(reply1.reload.deleted_at).not_to eq(nil) end end describe "recovery and user actions" do it "recreates user actions" do reply = create_post(topic: post.topic) author = reply.user post_action = author.user_actions.where(action_type: UserAction::REPLY, target_post_id: reply.id).first expect(post_action).to be_present PostDestroyer.new(moderator, reply).destroy # User Action is removed post_action = author.user_actions.where(action_type: UserAction::REPLY, target_post_id: reply.id).first expect(post_action).to be_blank PostDestroyer.new(moderator, reply).recover # On recovery, the user action is recreated post_action = author.user_actions.where(action_type: UserAction::REPLY, target_post_id: reply.id).first expect(post_action).to be_present end describe "post_count recovery" do before do post @user = post.user expect(@user.user_stat.post_count).to eq(1) end context "recovered by user" do it "should increment the user's post count" do PostDestroyer.new(@user, post).destroy expect(@user.user_stat.post_count).to eq(1) PostDestroyer.new(@user, post.reload).recover expect(@user.reload.user_stat.post_count).to eq(1) end end context "recovered by admin" do it "should increment the user's post count" do PostDestroyer.new(moderator, post).destroy expect(@user.user_stat.post_count).to eq(0) PostDestroyer.new(admin, post).recover expect(@user.reload.user_stat.post_count).to eq(1) end end end end describe 'basic destroying' do it "as the creator of the post, doesn't delete the post" do begin post2 = create_post called = 0 topic_destroyed = -> (topic, user) do expect(topic).to eq(post2.topic) expect(user).to eq(post2.user) called += 1 end DiscourseEvent.on(:topic_destroyed, &topic_destroyed) @orig = post2.cooked PostDestroyer.new(post2.user, post2).destroy post2.reload expect(post2.deleted_at).to be_blank expect(post2.deleted_by).to be_blank expect(post2.user_deleted).to eq(true) expect(post2.raw).to eq(I18n.t('js.post.deleted_by_author', {count: 24})) expect(post2.version).to eq(2) expect(called).to eq(1) called = 0 topic_recovered = -> (topic, user) do expect(topic).to eq(post2.topic) expect(user).to eq(post2.user) called += 1 end DiscourseEvent.on(:topic_recovered, &topic_recovered) # lets try to recover PostDestroyer.new(post2.user, post2).recover post2.reload expect(post2.version).to eq(3) expect(post2.user_deleted).to eq(false) expect(post2.cooked).to eq(@orig) expect(called).to eq(1) ensure DiscourseEvent.off(:topic_destroyed, &topic_destroyed) DiscourseEvent.off(:topic_recovered, &topic_recovered) end end context "as a moderator" do it "deletes the post" do author = post.user post_count = author.post_count history_count = UserHistory.count PostDestroyer.new(moderator, post).destroy expect(post.deleted_at).to be_present expect(post.deleted_by).to eq(moderator) author.reload expect(author.post_count).to eq(post_count - 1) expect(UserHistory.count).to eq(history_count + 1) end end context "as an admin" do it "deletes the post" do PostDestroyer.new(admin, post).destroy expect(post.deleted_at).to be_present expect(post.deleted_by).to eq(admin) end it "updates the user's post_count" do author = post.user expect { PostDestroyer.new(admin, post).destroy author.reload }.to change { author.post_count }.by(-1) end end end context 'deleting the second post in a topic' do let(:user) { Fabricate(:user) } let!(:post) { create_post(user: user) } let(:topic) { post.topic.reload } let(:second_user) { Fabricate(:coding_horror) } let!(:second_post) { create_post(topic: topic, user: second_user) } before do PostDestroyer.new(moderator, second_post).destroy end it 'resets the last_poster_id back to the OP' do expect(topic.last_post_user_id).to eq(user.id) end it 'resets the last_posted_at back to the OP' do expect(topic.last_posted_at.to_i).to eq(post.created_at.to_i) end context 'topic_user' do let(:topic_user) { second_user.topic_users.find_by(topic_id: topic.id) } it 'clears the posted flag for the second user' do expect(topic_user.posted?).to eq(false) end it "sets the second user's last_read_post_number back to 1" do expect(topic_user.last_read_post_number).to eq(1) end it "sets the second user's last_read_post_number back to 1" do expect(topic_user.highest_seen_post_number).to eq(1) end end end context "deleting a post belonging to a deleted topic" do let!(:topic) { post.topic } before do topic.trash!(admin) post.reload end context "as a moderator" do before do PostDestroyer.new(moderator, post).destroy end it "deletes the post" do expect(post.deleted_at).to be_present expect(post.deleted_by).to eq(moderator) end end context "as an admin" do before do PostDestroyer.new(admin, post).destroy end it "deletes the post" do expect(post.deleted_at).to be_present expect(post.deleted_by).to eq(admin) end it "creates a new user history entry" do expect { PostDestroyer.new(admin, post).destroy }.to change { UserHistory.count}.by(1) end end end describe 'after delete' do let!(:coding_horror) { Fabricate(:coding_horror) } let!(:post) { Fabricate(:post, raw: "Hello @CodingHorror") } it "should feature the users again (in case they've changed)" do Jobs.expects(:enqueue).with(:feature_topic_users, has_entries(topic_id: post.topic_id)) PostDestroyer.new(moderator, post).destroy end describe 'with a reply' do let!(:reply) { Fabricate(:basic_reply, user: coding_horror, topic: post.topic) } let!(:post_reply) { PostReply.create(post_id: post.id, reply_id: reply.id) } it 'changes the post count of the topic' do post.reload expect { PostDestroyer.new(moderator, reply).destroy post.topic.reload }.to change(post.topic, :posts_count).by(-1) end it 'lowers the reply_count when the reply is deleted' do expect { PostDestroyer.new(moderator, reply).destroy }.to change(post.post_replies, :count).by(-1) end it 'should increase the post_number when there are deletion gaps' do PostDestroyer.new(moderator, reply).destroy p = Fabricate(:post, user: post.user, topic: post.topic) expect(p.post_number).to eq(3) end end end context '@mentions' do it 'removes notifications when deleted' do user = Fabricate(:evil_trout) post = create_post(raw: 'Hello @eviltrout') expect { PostDestroyer.new(Fabricate(:moderator), post).destroy }.to change(user.notifications, :count).by(-1) end end describe "post actions" do let(:second_post) { Fabricate(:post, topic_id: post.topic_id) } let!(:bookmark) { PostAction.act(moderator, second_post, PostActionType.types[:bookmark]) } let!(:flag) { PostAction.act(moderator, second_post, PostActionType.types[:off_topic]) } it "should delete public post actions and agree with flags" do second_post.expects(:update_flagged_posts_count) PostDestroyer.new(moderator, second_post).destroy expect(PostAction.find_by(id: bookmark.id)).to eq(nil) off_topic = PostAction.find_by(id: flag.id) expect(off_topic).not_to eq(nil) expect(off_topic.agreed_at).not_to eq(nil) second_post.reload expect(second_post.bookmark_count).to eq(0) expect(second_post.off_topic_count).to eq(1) end end describe "user actions" do let(:codinghorror) { Fabricate(:coding_horror) } let(:second_post) { Fabricate(:post, topic_id: post.topic_id) } def create_user_action(action_type) UserAction.log_action!({ action_type: action_type, user_id: codinghorror.id, acting_user_id: codinghorror.id, target_topic_id: second_post.topic_id, target_post_id: second_post.id }) end it "should delete the user actions" do bookmark = create_user_action(UserAction::BOOKMARK) like = create_user_action(UserAction::LIKE) PostDestroyer.new(moderator, second_post).destroy expect(UserAction.find_by(id: bookmark.id)).to be_nil expect(UserAction.find_by(id: like.id)).to be_nil end end describe 'topic links' do let!(:first_post) { Fabricate(:post) } let!(:topic) { first_post.topic } let!(:second_post) { Fabricate(:post_with_external_links, topic: topic) } before { TopicLink.extract_from(second_post) } it 'should destroy the topic links when moderator destroys the post' do PostDestroyer.new(moderator, second_post.reload).destroy expect(topic.topic_links.count).to eq(0) end it 'should destroy the topic links when the user destroys the post' do PostDestroyer.new(second_post.user, second_post.reload).destroy expect(topic.topic_links.count).to eq(0) end end end
shirishgoyal/daemo-forum
spec/components/post_destroyer_spec.rb
Ruby
gpl-2.0
13,531
<?php namespace TYPO3\CMS\Core\Resource\Collection; /* * This file is part of the TYPO3 CMS project. * * It is free software; you can redistribute it and/or modify it under * the terms of the GNU General Public License, either version 2 * of the License, or any later version. * * For the full copyright and license information, please read the * LICENSE.txt file that was distributed with this source code. * * The TYPO3 project - inspiring people to share! */ use TYPO3\CMS\Core\Resource\Folder; use TYPO3\CMS\Core\Resource\StorageRepository; use TYPO3\CMS\Core\Utility\GeneralUtility; /** * A collection containing a set of files to be represented as a (virtual) folder. * This collection is persisted to the database with the accordant folder reference. */ class FolderBasedFileCollection extends AbstractFileCollection { /** * @var string */ protected static $storageTableName = 'sys_file_collection'; /** * @var string */ protected static $type = 'folder'; /** * @var string */ protected static $itemsCriteriaField = 'folder'; /** * The folder * * @var \TYPO3\CMS\Core\Resource\Folder */ protected $folder; /** * @var bool */ protected $recursive; /** * Populates the content-entries of the storage * * Queries the underlying storage for entries of the collection * and adds them to the collection data. * * If the content entries of the storage had not been loaded on creation * ($fillItems = false) this function is to be used for loading the contents * afterwards. */ public function loadContents() { if ($this->folder instanceof Folder) { $entries = $this->folder->getFiles(0, 0, Folder::FILTER_MODE_USE_OWN_AND_STORAGE_FILTERS, $this->recursive); foreach ($entries as $entry) { $this->add($entry); } } } /** * Gets the items criteria. * * @return string */ public function getItemsCriteria() { return $this->folder->getCombinedIdentifier(); } /** * Returns an array of the persistable properties and contents * which are processable by DataHandler. * * @return array */ protected function getPersistableDataArray() { return [ 'title' => $this->getTitle(), 'type' => self::$type, 'description' => $this->getDescription(), 'folder' => $this->folder->getIdentifier(), 'storage' => $this->folder->getStorage()->getUid() ]; } /** * Similar to method in \TYPO3\CMS\Core\Collection\AbstractRecordCollection, * but without $this->itemTableName= $array['table_name'], * but with $this->storageItemsFieldContent = $array[self::$storageItemsField]; * * @param array $array */ public function fromArray(array $array) { $this->uid = $array['uid']; $this->title = $array['title']; $this->description = $array['description']; $this->recursive = (bool)$array['recursive']; if (!empty($array['folder']) && !empty($array['storage'])) { /** @var $storageRepository StorageRepository */ $storageRepository = GeneralUtility::makeInstance(StorageRepository::class); /** @var $storage \TYPO3\CMS\Core\Resource\ResourceStorage */ $storage = $storageRepository->findByUid($array['storage']); if ($storage) { $this->folder = $storage->getFolder($array['folder']); } } } }
morinfa/TYPO3.CMS
typo3/sysext/core/Classes/Resource/Collection/FolderBasedFileCollection.php
PHP
gpl-2.0
3,654
"""Utility functions, node construction macros, etc.""" # Author: Collin Winter # Local imports from .pgen2 import token from .pytree import Leaf, Node from .pygram import python_symbols as syms from . import patcomp ########################################################### ### Common node-construction "macros" ########################################################### def KeywordArg(keyword, value): return Node(syms.argument, [keyword, Leaf(token.EQUAL, u'='), value]) def LParen(): return Leaf(token.LPAR, u"(") def RParen(): return Leaf(token.RPAR, u")") def Assign(target, source): """Build an assignment statement""" if not isinstance(target, list): target = [target] if not isinstance(source, list): source.prefix = u" " source = [source] return Node(syms.atom, target + [Leaf(token.EQUAL, u"=", prefix=u" ")] + source) def Name(name, prefix=None): """Return a NAME leaf""" return Leaf(token.NAME, name, prefix=prefix) def Attr(obj, attr): """A node tuple for obj.attr""" return [obj, Node(syms.trailer, [Dot(), attr])] def Comma(): """A comma leaf""" return Leaf(token.COMMA, u",") def Dot(): """A period (.) leaf""" return Leaf(token.DOT, u".") def ArgList(args, lparen=LParen(), rparen=RParen()): """A parenthesised argument list, used by Call()""" node = Node(syms.trailer, [lparen.clone(), rparen.clone()]) if args: node.insert_child(1, Node(syms.arglist, args)) return node def Call(func_name, args=None, prefix=None): """A function call""" node = Node(syms.power, [func_name, ArgList(args)]) if prefix is not None: node.prefix = prefix return node def Newline(): """A newline literal""" return Leaf(token.NEWLINE, u"\n") def BlankLine(): """A blank line""" return Leaf(token.NEWLINE, u"") def Number(n, prefix=None): return Leaf(token.NUMBER, n, prefix=prefix) def Subscript(index_node): """A numeric or string subscript""" return Node(syms.trailer, [Leaf(token.LBRACE, u'['), index_node, Leaf(token.RBRACE, u']')]) def String(string, prefix=None): """A string leaf""" return Leaf(token.STRING, string, prefix=prefix) def ListComp(xp, fp, it, test=None): """A list comprehension of the form [xp for fp in it if test]. If test is None, the "if test" part is omitted. """ xp.prefix = u"" fp.prefix = u" " it.prefix = u" " for_leaf = Leaf(token.NAME, u"for") for_leaf.prefix = u" " in_leaf = Leaf(token.NAME, u"in") in_leaf.prefix = u" " inner_args = [for_leaf, fp, in_leaf, it] if test: test.prefix = u" " if_leaf = Leaf(token.NAME, u"if") if_leaf.prefix = u" " inner_args.append(Node(syms.comp_if, [if_leaf, test])) inner = Node(syms.listmaker, [xp, Node(syms.comp_for, inner_args)]) return Node(syms.atom, [Leaf(token.LBRACE, u"["), inner, Leaf(token.RBRACE, u"]")]) def FromImport(package_name, name_leafs): """ Return an import statement in the form: from package import name_leafs""" # XXX: May not handle dotted imports properly (eg, package_name='foo.bar') #assert package_name == '.' or '.' not in package_name, "FromImport has "\ # "not been tested with dotted package names -- use at your own "\ # "peril!" for leaf in name_leafs: # Pull the leaves out of their old tree leaf.remove() children = [Leaf(token.NAME, u'from'), Leaf(token.NAME, package_name, prefix=u" "), Leaf(token.NAME, u'import', prefix=u" "), Node(syms.import_as_names, name_leafs)] imp = Node(syms.import_from, children) return imp ########################################################### ### Determine whether a node represents a given literal ########################################################### def is_tuple(node): """Does the node represent a tuple literal?""" if isinstance(node, Node) and node.children == [LParen(), RParen()]: return True return (isinstance(node, Node) and len(node.children) == 3 and isinstance(node.children[0], Leaf) and isinstance(node.children[1], Node) and isinstance(node.children[2], Leaf) and node.children[0].value == u"(" and node.children[2].value == u")") def is_list(node): """Does the node represent a list literal?""" return (isinstance(node, Node) and len(node.children) > 1 and isinstance(node.children[0], Leaf) and isinstance(node.children[-1], Leaf) and node.children[0].value == u"[" and node.children[-1].value == u"]") ########################################################### ### Misc ########################################################### def parenthesize(node): return Node(syms.atom, [LParen(), node, RParen()]) consuming_calls = set(["sorted", "list", "set", "any", "all", "tuple", "sum", "min", "max"]) def attr_chain(obj, attr): """Follow an attribute chain. If you have a chain of objects where a.foo -> b, b.foo-> c, etc, use this to iterate over all objects in the chain. Iteration is terminated by getattr(x, attr) is None. Args: obj: the starting object attr: the name of the chaining attribute Yields: Each successive object in the chain. """ next = getattr(obj, attr) while next: yield next next = getattr(next, attr) p0 = """for_stmt< 'for' any 'in' node=any ':' any* > | comp_for< 'for' any 'in' node=any any* > """ p1 = """ power< ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | 'sum' | 'any' | 'all' | (any* trailer< '.' 'join' >) ) trailer< '(' node=any ')' > any* > """ p2 = """ power< 'sorted' trailer< '(' arglist<node=any any*> ')' > any* > """ pats_built = False def in_special_context(node): """ Returns true if node is in an environment where all that is required of it is being itterable (ie, it doesn't matter if it returns a list or an itterator). See test_map_nochange in test_fixers.py for some examples and tests. """ global p0, p1, p2, pats_built if not pats_built: p1 = patcomp.compile_pattern(p1) p0 = patcomp.compile_pattern(p0) p2 = patcomp.compile_pattern(p2) pats_built = True patterns = [p0, p1, p2] for pattern, parent in zip(patterns, attr_chain(node, "parent")): results = {} if pattern.match(parent, results) and results["node"] is node: return True return False def is_probably_builtin(node): """ Check that something isn't an attribute or function name etc. """ prev = node.prev_sibling if prev is not None and prev.type == token.DOT: # Attribute lookup. return False parent = node.parent if parent.type in (syms.funcdef, syms.classdef): return False if parent.type == syms.expr_stmt and parent.children[0] is node: # Assignment. return False if parent.type == syms.parameters or \ (parent.type == syms.typedargslist and ( (prev is not None and prev.type == token.COMMA) or parent.children[0] is node )): # The name of an argument. return False return True ########################################################### ### The following functions are to find bindings in a suite ########################################################### def make_suite(node): if node.type == syms.suite: return node node = node.clone() parent, node.parent = node.parent, None suite = Node(syms.suite, [node]) suite.parent = parent return suite def find_root(node): """Find the top level namespace.""" # Scamper up to the top level namespace while node.type != syms.file_input: assert node.parent, "Tree is insane! root found before "\ "file_input node was found." node = node.parent return node def does_tree_import(package, name, node): """ Returns true if name is imported from package at the top level of the tree which node belongs to. To cover the case of an import like 'import foo', use None for the package and 'foo' for the name. """ binding = find_binding(name, find_root(node), package) return bool(binding) def is_import(node): """Returns true if the node is an import statement.""" return node.type in (syms.import_name, syms.import_from) def touch_import(package, name, node): """ Works like `does_tree_import` but adds an import statement if it was not imported. """ def is_import_stmt(node): return node.type == syms.simple_stmt and node.children and \ is_import(node.children[0]) root = find_root(node) if does_tree_import(package, name, root): return # figure out where to insert the new import. First try to find # the first import and then skip to the last one. insert_pos = offset = 0 for idx, node in enumerate(root.children): if not is_import_stmt(node): continue for offset, node2 in enumerate(root.children[idx:]): if not is_import_stmt(node2): break insert_pos = idx + offset break # if there are no imports where we can insert, find the docstring. # if that also fails, we stick to the beginning of the file if insert_pos == 0: for idx, node in enumerate(root.children): if node.type == syms.simple_stmt and node.children and \ node.children[0].type == token.STRING: insert_pos = idx + 1 break if package is None: import_ = Node(syms.import_name, [ Leaf(token.NAME, u'import'), Leaf(token.NAME, name, prefix=u' ') ]) else: import_ = FromImport(package, [Leaf(token.NAME, name, prefix=u' ')]) children = [import_, Newline()] root.insert_child(insert_pos, Node(syms.simple_stmt, children)) _def_syms = set([syms.classdef, syms.funcdef]) def find_binding(name, node, package=None): """ Returns the node which binds variable name, otherwise None. If optional argument package is supplied, only imports will be returned. See test cases for examples.""" for child in node.children: ret = None if child.type == syms.for_stmt: if _find(name, child.children[1]): return child n = find_binding(name, make_suite(child.children[-1]), package) if n: ret = n elif child.type in (syms.if_stmt, syms.while_stmt): n = find_binding(name, make_suite(child.children[-1]), package) if n: ret = n elif child.type == syms.try_stmt: n = find_binding(name, make_suite(child.children[2]), package) if n: ret = n else: for i, kid in enumerate(child.children[3:]): if kid.type == token.COLON and kid.value == ":": # i+3 is the colon, i+4 is the suite n = find_binding(name, make_suite(child.children[i+4]), package) if n: ret = n elif child.type in _def_syms and child.children[1].value == name: ret = child elif _is_import_binding(child, name, package): ret = child elif child.type == syms.simple_stmt: ret = find_binding(name, child, package) elif child.type == syms.expr_stmt: if _find(name, child.children[0]): ret = child if ret: if not package: return ret if is_import(ret): return ret return None _block_syms = set([syms.funcdef, syms.classdef, syms.trailer]) def _find(name, node): nodes = [node] while nodes: node = nodes.pop() if node.type > 256 and node.type not in _block_syms: nodes.extend(node.children) elif node.type == token.NAME and node.value == name: return node return None def _is_import_binding(node, name, package=None): """ Will reuturn node if node will import name, or node will import * from package. None is returned otherwise. See test cases for examples. """ if node.type == syms.import_name and not package: imp = node.children[1] if imp.type == syms.dotted_as_names: for child in imp.children: if child.type == syms.dotted_as_name: if child.children[2].value == name: return node elif child.type == token.NAME and child.value == name: return node elif imp.type == syms.dotted_as_name: last = imp.children[-1] if last.type == token.NAME and last.value == name: return node elif imp.type == token.NAME and imp.value == name: return node elif node.type == syms.import_from: # unicode(...) is used to make life easier here, because # from a.b import parses to ['import', ['a', '.', 'b'], ...] if package and unicode(node.children[1]).strip() != package: return None n = node.children[3] if package and _find(u'as', n): # See test_from_import_as for explanation return None elif n.type == syms.import_as_names and _find(name, n): return node elif n.type == syms.import_as_name: child = n.children[2] if child.type == token.NAME and child.value == name: return node elif n.type == token.NAME and n.value == name: return node elif package and n.type == token.STAR: return node return None
2ndy/RaspIM
usr/lib/python2.6/lib2to3/fixer_util.py
Python
gpl-2.0
14,225
/* * ByteGetter.java * * PostGIS extension for PostgreSQL JDBC driver - Binary Parser * * (C) 2005 Markus Schaber, markus.schaber@logix-tt.com * * This library is free software; you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by the Free * Software Foundation, either version 2.1 of the License. * * This library is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more * details. * * You should have received a copy of the GNU Lesser General Public License * along with this library; if not, write to the Free Software Foundation, Inc., * 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA or visit the web at * http://www.gnu.org. * * $Id$ */ package org.postgis.binary; public abstract class ByteGetter { /** * Get a byte. * * @return The result is returned as Int to eliminate sign problems when * or'ing several values together. */ public abstract int get(int index); public static class BinaryByteGetter extends ByteGetter { private byte[] array; public BinaryByteGetter(byte[] array) { this.array = array; } public int get(int index) { return array[index] & 0xFF; // mask out sign-extended bits. } } public static class StringByteGetter extends ByteGetter { private String rep; public StringByteGetter(String rep) { this.rep = rep; } public int get(int index) { index *= 2; int high = unhex(rep.charAt(index)); int low = unhex(rep.charAt(index + 1)); return (high << 4) + low; } public static byte unhex(char c) { if (c >= '0' && c <= '9') { return (byte) (c - '0'); } else if (c >= 'A' && c <= 'F') { return (byte) (c - 'A' + 10); } else if (c >= 'a' && c <= 'f') { return (byte) (c - 'a' + 10); } else { throw new IllegalArgumentException("No valid Hex char " + c); } } } }
ahinz/postgis
java/jdbc/src/org/postgis/binary/ByteGetter.java
Java
gpl-2.0
2,315
<?php /** * Custom functions that act independently of the theme templates * * Eventually, some of the functionality here could be replaced by core features * * @package sparkling */ /** * Get our wp_nav_menu() fallback, wp_page_menu(), to show a home link. * * @param array $args Configuration arguments. * @return array */ function sparkling_page_menu_args( $args ) { $args['show_home'] = true; return $args; } add_filter( 'wp_page_menu_args', 'sparkling_page_menu_args' ); /** * Adds custom classes to the array of body classes. * * @param array $classes Classes for the body element. * @return array */ function sparkling_body_classes( $classes ) { // Adds a class of group-blog to blogs with more than 1 published author. if ( is_multi_author() ) { $classes[] = 'group-blog'; } return $classes; } add_filter( 'body_class', 'sparkling_body_classes' ); if ( version_compare( $GLOBALS['wp_version'], '4.1', '<' ) ) : /** * Filters wp_title to print a neat <title> tag based on what is being viewed. * * @param string $title Default title text for current view. * @param string $sep Optional separator. * @return string The filtered title. */ function sparkling_wp_title( $title, $sep ) { if ( is_feed() ) { return $title; } global $page, $paged; // Add the blog name $title .= get_bloginfo( 'name', 'display' ); // Add the blog description for the home/front page. $site_description = get_bloginfo( 'description', 'display' ); if ( $site_description && ( is_home() || is_front_page() ) ) { $title .= " $sep $site_description"; } // Add a page number if necessary: if ( ( $paged >= 2 || $page >= 2 ) && ! is_404() ) { $title .= " $sep " . sprintf( __( 'Page %s', 'sparkling' ), max( $paged, $page ) ); } return $title; } add_filter( 'wp_title', 'sparkling_wp_title', 10, 2 ); /** * Title shim for sites older than WordPress 4.1. * * @link https://make.wordpress.org/core/2014/10/29/title-tags-in-4-1/ * @todo Remove this function when WordPress 4.3 is released. */ function sparkling_render_title() { ?> <title><?php wp_title( '|', true, 'right' ); ?></title> <?php } add_action( 'wp_head', 'sparkling_render_title' ); endif; // Mark Posts/Pages as Untiled when no title is used add_filter( 'the_title', 'sparkling_title' ); function sparkling_title( $title ) { if ( $title == '' ) { return 'Untitled'; } else { return $title; } } /** * Sets the authordata global when viewing an author archive. * * This provides backwards compatibility with * http://core.trac.wordpress.org/changeset/25574 * * It removes the need to call the_post() and rewind_posts() in an author * template to print information about the author. * * @global WP_Query $wp_query WordPress Query object. * @return void */ function sparkling_setup_author() { global $wp_query; if ( $wp_query->is_author() && isset( $wp_query->post ) ) { $GLOBALS['authordata'] = get_userdata( $wp_query->post->post_author ); } } add_action( 'wp', 'sparkling_setup_author' ); /************* search form *****************/ // Search Form function sparkling_wpsearch( $form ) { $form = '<form method="get" class="form-search" action="' . home_url( '/' ) . '"> <div class="row"> <div class="col-lg-12"> <div class="input-group"> <input type="text" class="form-control search-query" value="' . get_search_query() . '" name="s" id="s" placeholder="'. esc_attr__('Search...','sparkling') .'"> <span class="input-group-btn"> <button type="submit" class="btn btn-default" name="submit" id="searchsubmit" value="Go"><span class="glyphicon glyphicon-search"></span></button> </span> </div> </div> </div> </form>'; return $form; } // don't remove this bracket! /****************** password protected post form *****/ add_filter( 'the_password_form', 'custom_password_form' ); function custom_password_form() { global $post; $label = 'pwbox-'.( empty( $post->ID ) ? rand() : $post->ID ); $o = '<form class="protected-post-form" action="' . get_option('siteurl') . '/wp-login.php?action=postpass" method="post"> <div class="row"> <div class="col-lg-10"> ' . __( "<p>This post is password protected. To view it please enter your password below:</p>" ,'sparkling') . ' <label for="' . $label . '">' . __( "Password:" ,'sparkling') . ' </label> <div class="input-group"> <input class="form-control" value="' . get_search_query() . '" name="post_password" id="' . $label . '" type="password"> <span class="input-group-btn"><button type="submit" class="btn btn-default" name="submit" id="searchsubmit" vvalue="' . esc_attr__( "Submit",'sparkling' ) . '">' . __( "Submit" ,'sparkling') . '</button> </span> </div> </div> </div> </form>'; return $o; } // Add Bootstrap classes for table add_filter( 'the_content', 'sparkling_add_custom_table_class' ); function sparkling_add_custom_table_class( $content ) { return str_replace( '<table>', '<table class="table table-hover">', $content ); } if ( ! function_exists( 'sparkling_social' ) ) : /** * Display social links in footer and widgets if enabled */ function sparkling_social(){ $services = array ( 'facebook' => 'Facebook', 'twitter' => 'Twitter', 'googleplus' => 'Google+', 'youtube' => 'Youtube', 'vimeo' => 'Vimeo', 'linkedin' => 'LinkedIn', 'pinterest' => 'Pinterest', 'rss' => 'RSS', 'tumblr' => 'Tumblr', 'flickr' => 'Flickr', 'instagram' => 'Instagram', 'dribbble' => 'Dribbble', 'skype' => 'Skype', 'foursquare' => 'Foursquare', 'soundcloud' => 'SoundCloud', 'github' => 'GitHub' ); echo '<div class="social-icons">'; foreach ( $services as $service => $name ) : $active[ $service ] = of_get_option ( 'social_'.$service ); if ( $active[$service] ) { echo '<a href="'. esc_url( $active[$service] ) .'" title="'. __('Follow us on ','sparkling').$name.'" class="'. $service .'" target="_blank"><i class="social_icon fa fa-'.$service.'"></i></a>';} endforeach; echo '</div>'; } endif; if ( ! function_exists( 'sparkling_header_menu' ) ) : /** * Header menu (should you choose to use one) */ function sparkling_header_menu() { // display the WordPress Custom Menu if available wp_nav_menu(array( 'menu' => 'primary', 'theme_location' => 'primary', 'depth' => 2, 'container' => 'div', 'container_class' => 'collapse navbar-collapse navbar-ex1-collapse', 'menu_class' => 'nav navbar-nav', 'fallback_cb' => 'wp_bootstrap_navwalker::fallback', 'walker' => new wp_bootstrap_navwalker() )); } /* end header menu */ endif; if ( ! function_exists( 'sparkling_footer_links' ) ) : /** * Footer menu (should you choose to use one) */ function sparkling_footer_links() { // display the WordPress Custom Menu if available wp_nav_menu(array( 'container' => '', // remove nav container 'container_class' => 'footer-links clearfix', // class of container (should you choose to use it) 'menu' => __( 'Footer Links', 'sparkling' ), // nav name 'menu_class' => 'nav footer-nav clearfix', // adding custom nav class 'theme_location' => 'footer-links', // where it's located in the theme 'before' => '', // before the menu 'after' => '', // after the menu 'link_before' => '', // before each link 'link_after' => '', // after each link 'depth' => 0, // limit the depth of the nav 'fallback_cb' => 'sparkling_footer_links_fallback' // fallback function )); } /* end sparkling footer link */ endif; if ( ! function_exists( 'sparkling_call_for_action' ) ) : /** * Call for action text and button displayed above content */ function sparkling_call_for_action() { if ( is_front_page() && of_get_option( 'w2f_cfa_text' )!=''){ echo '<div class="cfa">'; echo '<div class="container">'; echo '<div class="col-sm-8">'; echo '<span class="cfa-text">'. of_get_option( 'w2f_cfa_text' ).'</span>'; echo '</div>'; echo '<div class="col-sm-4">'; echo '<a class="btn btn-lg cfa-button" href="'. of_get_option( 'w2f_cfa_link' ). '">'. of_get_option( 'w2f_cfa_button' ). '</a>'; echo '</div>'; echo '</div>'; echo '</div>'; } } endif; if ( ! function_exists( 'sparkling_featured_slider' ) ) : /** * Featured image slider, displayed on front page for static page and blog */ function sparkling_featured_slider() { if ( is_front_page() && of_get_option( 'sparkling_slider_checkbox' ) == 1 ) { echo '<div class="flexslider">'; echo '<ul class="slides">'; $count = of_get_option( 'sparkling_slide_number' ); $slidecat =of_get_option( 'sparkling_slide_categories' ); $query = new WP_Query( array( 'cat' =>$slidecat,'posts_per_page' =>$count ) ); if ($query->have_posts()) : while ($query->have_posts()) : $query->the_post(); echo '<li><a href="'. get_permalink() .'">'; if ( (function_exists( 'has_post_thumbnail' )) && ( has_post_thumbnail() ) ) : echo get_the_post_thumbnail(); endif; echo '<div class="flex-caption">'; if ( get_the_title() != '' ) echo '<h2 class="entry-title">'. get_the_title().'</h2>'; if ( get_the_excerpt() != '' ) echo '<div class="excerpt">' . get_the_excerpt() .'</div>'; echo '</div>'; endwhile; endif; echo '</a></li>'; echo '</ul>'; echo ' </div>'; } } endif; /** * function to show the footer info, copyright information */ function sparkling_footer_info() { global $sparkling_footer_info; printf( __( 'Theme by %1$s Powered by %2$s', 'sparkling' ) , '<a href="http://colorlib.com/" target="_blank">Colorlib</a>', '<a href="http://wordpress.org/" target="_blank">WordPress</a>'); } if ( ! function_exists( 'get_sparkling_theme_options' ) ) { /** * Get information from Theme Options and add it into wp_head */ function get_sparkling_theme_options(){ echo '<style type="text/css">'; if ( of_get_option('link_color')) { echo 'a, #infinite-handle span, #secondary .widget .post-content a {color:' . of_get_option('link_color') . '}'; } if ( of_get_option('link_hover_color')) { echo 'a:hover, a:active, #secondary .widget .post-content a:hover {color: '.of_get_option('link_hover_color').';}'; } if ( of_get_option('element_color')) { echo '.btn-default, .label-default, .flex-caption h2, .btn.btn-default.read-more, button {background-color: '.of_get_option('element_color').'; border-color: '.of_get_option('element_color').';} .site-main [class*="navigation"] a, .more-link { color: '.of_get_option('element_color').'}'; } if ( of_get_option('element_color_hover')) { echo '.btn-default:hover, .label-default[href]:hover, .tagcloud a:hover, button, .main-content [class*="navigation"] a:hover, .label-default[href]:focus, #infinite-handle span:hover, .btn.btn-default.read-more:hover, .btn-default:hover, .scroll-to-top:hover, .btn-default:focus, .btn-default:active, .btn-default.active, .site-main [class*="navigation"] a:hover, .more-link:hover, #image-navigation .nav-previous a:hover, #image-navigation .nav-next a:hover, .cfa-button:hover { background-color: '.of_get_option('element_color_hover').'; border-color: '.of_get_option('element_color_hover').'; }'; } if ( of_get_option('cfa_bg_color')) { echo '.cfa { background-color: '.of_get_option('cfa_bg_color').'; } .cfa-button:hover a {color: '.of_get_option('cfa_bg_color').';}'; } if ( of_get_option('cfa_color')) { echo '.cfa-text { color: '.of_get_option('cfa_color').';}'; } if ( of_get_option('cfa_btn_color') || of_get_option('cfa_btn_txt_color') ) { echo '.cfa-button {border-color: '.of_get_option('cfa_btn_color').'; color: '.of_get_option('cfa_btn_txt_color').';}'; } if ( of_get_option('heading_color')) { echo 'h1, h2, h3, h4, h5, h6, .h1, .h2, .h3, .h4, .h5, .h6, .entry-title {color: '.of_get_option('heading_color').';}'; } if ( of_get_option('nav_bg_color')) { echo '.navbar.navbar-default {background-color: '.of_get_option('nav_bg_color').';}'; } if ( of_get_option('nav_link_color')) { echo '.navbar-default .navbar-nav > li > a, .navbar-default .navbar-nav > .open > a, .navbar-default .navbar-nav > .open > a:hover, .navbar-default .navbar-nav > .open > a:focus, .navbar-default .navbar-nav > .active > a, .navbar-default .navbar-nav > .active > a:hover, .navbar-default .navbar-nav > .active > a:focus { color: '.of_get_option('nav_link_color').';}'; } if ( of_get_option('nav_item_hover_color')) { echo '.navbar-default .navbar-nav > li > a:hover, .navbar-default .navbar-nav > .active > a, .navbar-default .navbar-nav > .active > a:hover, .navbar-default .navbar-nav > .active > a:focus, .navbar-default .navbar-nav > li > a:hover, .navbar-default .navbar-nav > li > a:focus, .navbar-default .navbar-nav > .open > a, .navbar-default .navbar-nav > .open > a:hover, .navbar-default .navbar-nav > .open > a:focus, .entry-title a:hover {color: '.of_get_option('nav_item_hover_color').';}'; } if ( of_get_option('nav_dropdown_bg')) { echo '.dropdown-menu {background-color: '.of_get_option('nav_dropdown_bg').';}'; } if ( of_get_option('nav_dropdown_item')) { echo '.navbar-default .navbar-nav .open .dropdown-menu > li > a, .dropdown-menu > li > a { color: '.of_get_option('nav_dropdown_item').';}'; } if ( of_get_option('nav_dropdown_bg_hover') || of_get_option('nav_dropdown_item_hover') ) { echo '.dropdown-menu > li > a:hover, .dropdown-menu > li > a:focus, .navbar-default .navbar-nav .open .dropdown-menu > li > a:hover, .navbar-default .navbar-nav .open .dropdown-menu > li > a:focus, .dropdown-menu > .active > a, .dropdown-menu > .active > a:hover, .dropdown-menu > .active > a:focus {background-color: '.of_get_option('nav_dropdown_bg_hover').'; color:'.of_get_option('nav_dropdown_item_hover').'}'; } if ( of_get_option('footer_bg_color')) { echo '#colophon {background-color: '.of_get_option('footer_bg_color').';}'; } if ( of_get_option('footer_text_color')) { echo '#footer-area, .site-info {color: '.of_get_option('footer_text_color').';}'; } if ( of_get_option('footer_widget_bg_color')) { echo '#footer-area {background-color: '.of_get_option('footer_widget_bg_color').';}'; } if ( of_get_option('footer_link_color')) { echo '.site-info a, #footer-area a {color: '.of_get_option('footer_link_color').';}'; } if ( of_get_option('social_color')) { echo '.well .social-icons a {background-color: '.of_get_option('social_color').' !important ;}'; } if ( of_get_option('social_footer_color')) { echo '#footer-area .social-icons a {background-color: '.of_get_option('social_footer_color').' ;}'; } $typography = of_get_option('main_body_typography'); if ( $typography ) { echo '.entry-content {font-family: ' . $typography['face'] . '; font-size:' . $typography['size'] . '; font-weight: ' . $typography['style'] . '; color:'.$typography['color'] . ';}'; } if ( of_get_option('custom_css')) { echo of_get_option( 'custom_css', 'no entry' ); } echo '</style>'; } } add_action( 'wp_head', 'get_sparkling_theme_options', 10 ); // Theme Options sidebar add_action( 'optionsframework_after', 'sparkling_options_display_sidebar' ); function sparkling_options_display_sidebar() { ?> <!-- Twitter --> <script>!function(d,s,id){var js,fjs=d.getElementsByTagName(s)[0],p=/^http:/.test(d.location)?'http':'https';if(!d.getElementById(id)){js=d.createElement(s);js.id=id;js.src=p+'://platform.twitter.com/widgets.js';fjs.parentNode.insertBefore(js,fjs);}}(document, 'script', 'twitter-wjs');</script> <!-- Facebook --> <div id="fb-root"></div> <div id="fb-root"></div> <script>(function(d, s, id) { var js, fjs = d.getElementsByTagName(s)[0]; if (d.getElementById(id)) return; js = d.createElement(s); js.id = id; js.src = "//connect.facebook.net/en_US/all.js#xfbml=1&appId=328285627269392"; fjs.parentNode.insertBefore(js, fjs); }(document, 'script', 'facebook-jssdk'));</script> <div id="optionsframework-sidebar" class="metabox-holder"> <div id="optionsframework" class="postbox"> <h3><?php _e('Support and Documentation','sparkling') ?></h3> <div class="inside"> <div id="social-share"> <div class="fb-like" data-href="<?php echo esc_url( 'https://www.facebook.com/colorlib' ); ?>" data-send="false" data-layout="button_count" data-width="90" data-show-faces="true"></div> <div class="tw-follow" ><a href="https://twitter.com/colorlib" class="twitter-follow-button" data-show-count="false">Follow @colorlib</a></div> </div> <p><b><a href="<?php echo esc_url( 'http://colorlib.com/wp/support/sparkling' ); ?>"><?php _e('Sparkling Documentation','sparkling'); ?></a></b></p> <p><?php _e('The best way to contact us with <b>support questions</b> and <b>bug reports</b> is via','sparkling') ?> <a href="<?php echo esc_url( 'http://colorlib.com/wp/forums' ); ?>"><?php _e('Colorlib support forum','sparkling') ?></a>.</p> <p><?php _e('If you like this theme, I\'d appreciate any of the following:','sparkling') ?></p> <ul> <li><a class="button" href="<?php echo esc_url( 'http://wordpress.org/support/view/theme-reviews/sparkling?filter=5' ); ?>" title="<?php esc_attr_e('Rate this Theme', 'sparkling'); ?>" target="_blank"><?php printf(__('Rate this Theme','sparkling')); ?></a></li> <li><a class="button" href="<?php echo esc_url( 'http://www.facebook.com/colorlib' ); ?>" title="Like Colorlib on Facebook" target="_blank"><?php printf(__('Like on Facebook','sparkling')); ?></a></li> <li><a class="button" href="<?php echo esc_url( 'http://twitter.com/colorlib/' ); ?>" title="Follow Colrolib on Twitter" target="_blank"><?php printf(__('Follow on Twitter','sparkling')); ?></a></li> </ul> </div> </div> </div> <?php } /** * Add Bootstrap thumbnail styling to images with captions * Use <figure> and <figcaption> * * @link http://justintadlock.com/archives/2011/07/01/captions-in-wordpress */ function sparkling_caption($output, $attr, $content) { if (is_feed()) { return $output; } $defaults = array( 'id' => '', 'align' => 'alignnone', 'width' => '', 'caption' => '' ); $attr = shortcode_atts($defaults, $attr); // If the width is less than 1 or there is no caption, return the content wrapped between the [caption] tags if ($attr['width'] < 1 || empty($attr['caption'])) { return $content; } // Set up the attributes for the caption <figure> $attributes = (!empty($attr['id']) ? ' id="' . esc_attr($attr['id']) . '"' : '' ); $attributes .= ' class="thumbnail wp-caption ' . esc_attr($attr['align']) . '"'; $attributes .= ' style="width: ' . (esc_attr($attr['width']) + 10) . 'px"'; $output = '<figure' . $attributes .'>'; $output .= do_shortcode($content); $output .= '<figcaption class="caption wp-caption-text">' . $attr['caption'] . '</figcaption>'; $output .= '</figure>'; return $output; } add_filter('img_caption_shortcode', 'sparkling_caption', 10, 3); /** * Skype URI support for social media icons */ function sparkling_allow_skype_protocol( $protocols ){ $protocols[] = 'skype'; return $protocols; } add_filter( 'kses_allowed_protocols' , 'sparkling_allow_skype_protocol' ); /** * Add custom favicon displayed in WordPress dashboard and frontend */ function sparkling_add_favicon() { if ( of_get_option( 'custom_favicon' ) ) { echo '<link rel="shortcut icon" type="image/x-icon" href="' . of_get_option( 'custom_favicon' ) . '" />'. "\n"; } } add_action( 'wp_head', 'sparkling_add_favicon', 0 ); add_action( 'admin_head', 'sparkling_add_favicon', 0 ); /* * This one shows/hides the an option when a checkbox is clicked. */ add_action( 'optionsframework_custom_scripts', 'optionsframework_custom_scripts' ); function optionsframework_custom_scripts() { ?> <script type="text/javascript"> jQuery(document).ready(function() { jQuery('#sparkling_slider_checkbox').click(function() { jQuery('#section-sparkling_slide_categories').fadeToggle(400); }); if (jQuery('#sparkling_slider_checkbox:checked').val() !== undefined) { jQuery('#section-sparkling_slide_categories').show(); } jQuery('#sparkling_slider_checkbox').click(function() { jQuery('#section-sparkling_slide_number').fadeToggle(400); }); if (jQuery('#sparkling_slider_checkbox:checked').val() !== undefined) { jQuery('#section-sparkling_slide_number').show(); } }); </script> <?php }
bee7er/whetstone_allotments
wp-content/themes/sparkling/inc/extras.php
PHP
gpl-2.0
21,606
/* * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package org.graalvm.compiler.phases.tiers; import org.graalvm.compiler.lir.phases.AllocationPhase.AllocationContext; import org.graalvm.compiler.lir.phases.LIRPhaseSuite; import org.graalvm.compiler.lir.phases.PostAllocationOptimizationPhase.PostAllocationOptimizationContext; import org.graalvm.compiler.lir.phases.PreAllocationOptimizationPhase.PreAllocationOptimizationContext; import org.graalvm.compiler.phases.PhaseSuite; public interface CompilerConfiguration { PhaseSuite<HighTierContext> createHighTier(); PhaseSuite<MidTierContext> createMidTier(); PhaseSuite<LowTierContext> createLowTier(); LIRPhaseSuite<PreAllocationOptimizationContext> createPreAllocationOptimizationStage(); LIRPhaseSuite<AllocationContext> createAllocationStage(); LIRPhaseSuite<PostAllocationOptimizationContext> createPostAllocationOptimizationStage(); }
YouDiSN/OpenJDK-Research
jdk9/hotspot/src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.phases/src/org/graalvm/compiler/phases/tiers/CompilerConfiguration.java
Java
gpl-2.0
1,924
export default { content: { width: '100%', padding: 10, marginRight: 20, }, header: { display: 'flex', justifyContent: 'space-between', alignItems: 'center', }, };
mcldev/geonode
geonode/monitoring/frontend/src/components/organisms/geonode-layers-analytics/styles.js
JavaScript
gpl-3.0
197
/* This file is part of cpp-ethereum. cpp-ethereum is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. cpp-ethereum is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with cpp-ethereum. If not, see <http://www.gnu.org/licenses/>. */ /** @file Common.cpp * @author Gav Wood <i@gavwood.com> * @date 2014 */ #include "Common.h" #include <boost/algorithm/string/case_conv.hpp> #include <libdevcore/Base64.h> #include <libdevcore/Terminal.h> #include <libdevcore/CommonData.h> #include <libdevcore/CommonIO.h> #include <libdevcore/Log.h> #include <libdevcore/SHA3.h> #include "ICAP.h" #include "Exceptions.h" #include "BlockHeader.h" using namespace std; using namespace dev; using namespace dev::eth; namespace dev { namespace eth { const unsigned c_protocolVersion = 63; #if ETH_FATDB const unsigned c_minorProtocolVersion = 3; const unsigned c_databaseBaseVersion = 9; const unsigned c_databaseVersionModifier = 1; #else const unsigned c_minorProtocolVersion = 2; const unsigned c_databaseBaseVersion = 9; const unsigned c_databaseVersionModifier = 0; #endif const unsigned c_databaseVersion = c_databaseBaseVersion + (c_databaseVersionModifier << 8) + (23 << 9); Address toAddress(std::string const& _s) { try { eth::ICAP i = eth::ICAP::decoded(_s); return i.direct(); } catch (eth::InvalidICAP&) {} try { auto b = fromHex(_s.substr(0, 2) == "0x" ? _s.substr(2) : _s, WhenError::Throw); if (b.size() == 20) return Address(b); } catch (BadHexCharacter&) {} BOOST_THROW_EXCEPTION(InvalidAddress()); } vector<pair<u256, string>> const& units() { static const vector<pair<u256, string>> s_units = { {exp10<54>(), "Uether"}, {exp10<51>(), "Vether"}, {exp10<48>(), "Dether"}, {exp10<45>(), "Nether"}, {exp10<42>(), "Yether"}, {exp10<39>(), "Zether"}, {exp10<36>(), "Eether"}, {exp10<33>(), "Pether"}, {exp10<30>(), "Tether"}, {exp10<27>(), "Gether"}, {exp10<24>(), "Mether"}, {exp10<21>(), "grand"}, {exp10<18>(), "ether"}, {exp10<15>(), "finney"}, {exp10<12>(), "szabo"}, {exp10<9>(), "Gwei"}, {exp10<6>(), "Mwei"}, {exp10<3>(), "Kwei"}, {exp10<0>(), "wei"} }; return s_units; } std::string formatBalance(bigint const& _b) { ostringstream ret; u256 b; if (_b < 0) { ret << "-"; b = (u256)-_b; } else b = (u256)_b; if (b > units()[0].first * 1000) { ret << (b / units()[0].first) << " " << units()[0].second; return ret.str(); } ret << setprecision(5); for (auto const& i: units()) if (i.first != 1 && b >= i.first) { ret << (double(b / (i.first / 1000)) / 1000.0) << " " << i.second; return ret.str(); } ret << b << " wei"; return ret.str(); } static void badBlockInfo(BlockHeader const& _bi, string const& _err) { string const c_line = EthReset EthOnMaroon + string(80, ' ') + EthReset; string const c_border = EthReset EthOnMaroon + string(2, ' ') + EthReset EthMaroonBold; string const c_space = c_border + string(76, ' ') + c_border + EthReset; stringstream ss; ss << c_line << endl; ss << c_space << endl; ss << c_border + " Import Failure " + _err + string(max<int>(0, 53 - _err.size()), ' ') + " " + c_border << endl; ss << c_space << endl; string bin = toString(_bi.number()); ss << c_border + (" Guru Meditation #" + string(max<int>(0, 8 - bin.size()), '0') + bin + "." + _bi.hash().abridged() + " ") + c_border << endl; ss << c_space << endl; ss << c_line; cwarn << "\n" + ss.str(); } void badBlock(bytesConstRef _block, string const& _err) { BlockHeader bi; DEV_IGNORE_EXCEPTIONS(bi = BlockHeader(_block)); badBlockInfo(bi, _err); } string TransactionSkeleton::userReadable(bool _toProxy, function<pair<bool, string>(TransactionSkeleton const&)> const& _getNatSpec, function<string(Address const&)> const& _formatAddress) const { if (creation) { // show notice concerning the creation code. TODO: this needs entering into natspec. return string("ÐApp is attempting to create a contract; ") + (_toProxy ? "(this transaction is not executed directly, but forwarded to another ÐApp) " : "") + "to be endowed with " + formatBalance(value) + ", with additional network fees of up to " + formatBalance(gas * gasPrice) + ".\n\nMaximum total cost is " + formatBalance(value + gas * gasPrice) + "."; } bool isContract; std::string natSpec; tie(isContract, natSpec) = _getNatSpec(*this); if (!isContract) { // recipient has no code - nothing special about this transaction, show basic value transfer info return "ÐApp is attempting to send " + formatBalance(value) + " to a recipient " + _formatAddress(to) + (_toProxy ? " (this transaction is not executed directly, but forwarded to another ÐApp)" : "") + ", with additional network fees of up to " + formatBalance(gas * gasPrice) + ".\n\nMaximum total cost is " + formatBalance(value + gas * gasPrice) + "."; } if (natSpec.empty()) return "ÐApp is attempting to call into an unknown contract at address " + _formatAddress(to) + ".\n\n" + (_toProxy ? "This transaction is not executed directly, but forwarded to another ÐApp.\n\n" : "") + "Call involves sending " + formatBalance(value) + " to the recipient, with additional network fees of up to " + formatBalance(gas * gasPrice) + "However, this also does other stuff which we don't understand, and does so in your name.\n\n" + "WARNING: This is probably going to cost you at least " + formatBalance(value + gas * gasPrice) + ", however this doesn't include any side-effects, which could be of far greater importance.\n\n" + "REJECT UNLESS YOU REALLY KNOW WHAT YOU ARE DOING!"; return "ÐApp attempting to conduct contract interaction with " + _formatAddress(to) + ": <b>" + natSpec + "</b>.\n\n" + (_toProxy ? "This transaction is not executed directly, but forwarded to another ÐApp.\n\n" : "") + (value > 0 ? "In addition, ÐApp is attempting to send " + formatBalance(value) + " to said recipient, with additional network fees of up to " + formatBalance(gas * gasPrice) + " = " + formatBalance(value + gas * gasPrice) + "." : "Additional network fees are at most" + formatBalance(gas * gasPrice) + "."); } } }
EarthDollar/farmer
libethereum/libethcore/Common.cpp
C++
gpl-3.0
6,608
import requests from allauth.socialaccount.providers.oauth2.views import (OAuth2Adapter, OAuth2LoginView, OAuth2CallbackView) from .provider import BasecampProvider class BasecampOAuth2Adapter(OAuth2Adapter): provider_id = BasecampProvider.id access_token_url = 'https://launchpad.37signals.com/authorization/token?type=web_server' # noqa authorize_url = 'https://launchpad.37signals.com/authorization/new' profile_url = 'https://launchpad.37signals.com/authorization.json' def complete_login(self, request, app, token, **kwargs): headers = {'Authorization': 'Bearer {0}'.format(token.token)} resp = requests.get(self.profile_url, headers=headers) extra_data = resp.json() return self.get_provider().sociallogin_from_response(request, extra_data) oauth2_login = OAuth2LoginView.adapter_view(BasecampOAuth2Adapter) oauth2_callback = OAuth2CallbackView.adapter_view(BasecampOAuth2Adapter)
Alexander-M-Waldman/local_currency_site
lib/python2.7/site-packages/allauth/socialaccount/providers/basecamp/views.py
Python
gpl-3.0
1,123
/* * PLUGIN LoginMGR * * Latvian language file. * * Author: */ theUILang.accLogin = "Login"; theUILang.accPassword = "Password"; theUILang.accAccounts = "Accounts"; theUILang.accAuto = "Autologin"; theUILang.acAutoNone = "None"; theUILang.acAutoDay = "Every day"; theUILang.acAutoWeek = "Every week"; theUILang.acAutoMonth = "Every month"; thePlugins.get("loginmgr").langLoaded();
Rapiddot/ruTorrent
plugins/loginmgr/lang/lv.js
JavaScript
gpl-3.0
408
<?php require('../../config.php'); require_once($CFG->libdir.'/adminlib.php'); require_login(); $adminroot = admin_get_root(false, false); // settings not required - only pages if ($section = optional_param('section', '', PARAM_SAFEDIR) and confirm_sesskey()) { if (get_user_preferences('admin_bookmarks')) { $bookmarks = explode(',', get_user_preferences('admin_bookmarks')); if (in_array($section, $bookmarks)) { print_error('bookmarkalreadyexists','admin'); die; } } else { $bookmarks = array(); } $temp = $adminroot->locate($section); if ($temp instanceof admin_settingpage || $temp instanceof admin_externalpage) { $bookmarks[] = $section; $bookmarks = implode(',', $bookmarks); set_user_preference('admin_bookmarks', $bookmarks); } else { print_error('invalidsection','admin'); die; } if ($temp instanceof admin_settingpage) { redirect($CFG->wwwroot . '/' . $CFG->admin . '/settings.php?section=' . $section); } elseif ($temp instanceof admin_externalpage) { redirect($temp->url); } } else { print_error('invalidsection','admin'); die; }
dhamma-dev/SEA
web/blocks/admin_bookmarks/create.php
PHP
gpl-3.0
1,219
/* * Copyright (C) 2006 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // This file was generated from the C++ include file: SkColorFilter.h // Any changes made to this file will be discarded by the build. // To change this file, either edit the include, or device/tools/gluemaker/main.cpp, // or one of the auxilary file specifications in device/tools/gluemaker. package android.graphics; public class ColorFilter { protected void finalize() throws Throwable { finalizer(native_instance); } private static native void finalizer(int native_instance); int native_instance; }
mateor/pdroid
android-2.3.4_r1/tags/1.32/frameworks/base/graphics/java/android/graphics/ColorFilter.java
Java
gpl-3.0
1,155
<?php /* * Copyright 2014 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ class Google_Service_CloudIAP_AccessSettings extends Google_Model { protected $corsSettingsType = 'Google_Service_CloudIAP_CorsSettings'; protected $corsSettingsDataType = ''; protected $gcipSettingsType = 'Google_Service_CloudIAP_GcipSettings'; protected $gcipSettingsDataType = ''; protected $oauthSettingsType = 'Google_Service_CloudIAP_OAuthSettings'; protected $oauthSettingsDataType = ''; protected $policyDelegationSettingsType = 'Google_Service_CloudIAP_PolicyDelegationSettings'; protected $policyDelegationSettingsDataType = ''; /** * @param Google_Service_CloudIAP_CorsSettings */ public function setCorsSettings(Google_Service_CloudIAP_CorsSettings $corsSettings) { $this->corsSettings = $corsSettings; } /** * @return Google_Service_CloudIAP_CorsSettings */ public function getCorsSettings() { return $this->corsSettings; } /** * @param Google_Service_CloudIAP_GcipSettings */ public function setGcipSettings(Google_Service_CloudIAP_GcipSettings $gcipSettings) { $this->gcipSettings = $gcipSettings; } /** * @return Google_Service_CloudIAP_GcipSettings */ public function getGcipSettings() { return $this->gcipSettings; } /** * @param Google_Service_CloudIAP_OAuthSettings */ public function setOauthSettings(Google_Service_CloudIAP_OAuthSettings $oauthSettings) { $this->oauthSettings = $oauthSettings; } /** * @return Google_Service_CloudIAP_OAuthSettings */ public function getOauthSettings() { return $this->oauthSettings; } /** * @param Google_Service_CloudIAP_PolicyDelegationSettings */ public function setPolicyDelegationSettings(Google_Service_CloudIAP_PolicyDelegationSettings $policyDelegationSettings) { $this->policyDelegationSettings = $policyDelegationSettings; } /** * @return Google_Service_CloudIAP_PolicyDelegationSettings */ public function getPolicyDelegationSettings() { return $this->policyDelegationSettings; } }
ftisunpar/BlueTape
vendor/google/apiclient-services/src/Google/Service/CloudIAP/AccessSettings.php
PHP
gpl-3.0
2,609
/** * Created by panos on 4/5/16. */ import angular from 'angular/index' let _app = new WeakMap() export default class Register { constructor (appName, deps) { deps = deps || [] let module try { module = angular.module(appName) } catch (error) { module = angular.module(appName, deps) } _app.set(this, module) } config (constructorFn) { constructorFn = this._normalizeConstructor(constructorFn) let factoryArray = this._createFactoryArray(constructorFn) _app.get(this).config(factoryArray) return this } constant (name, obj) { _app.get(this).constant(name, obj) return this } controller (name, constructorFn) { _app.get(this).controller(name, constructorFn) return this } directive (name, constructorFn) { constructorFn = this._normalizeConstructor(constructorFn) if (!constructorFn.prototype.compile) { // create an empty compile function if none was defined constructorFn.prototype.compile = () => {} } let originalCompileFn = this._cloneFunction(constructorFn.prototype.compile) // Decorate the compile method to automatically return the link method (if it exists) // and bind it to the context of the constructor (so `this` works correctly). // This gets around the problem of a non-lexical "this" which occurs when the directive class itself // returns `this.link` from within the compile function. this._override(constructorFn.prototype, 'compile', () => { return function() { originalCompileFn.apply(this, arguments) if (constructorFn.prototype.link) { return constructorFn.prototype.link.bind(this) } } }) let factoryArray = this._createFactoryArray(constructorFn) _app.get(this).directive(name, factoryArray) return this } factory (name, constructorFn) { constructorFn = this._normalizeConstructor(constructorFn) let factoryArray = this._createFactoryArray(constructorFn) _app.get(this).factory(name, factoryArray) return this } filter (name, constructorFn) { constructorFn = this._normalizeConstructor(constructorFn) let factoryArray = this._createFactoryArray(constructorFn) _app.get(this).filter(name, factoryArray) return this } provider (name, constructorFn) { _app.get(this).provider(name, constructorFn) return this } run (constructorFn) { constructorFn = this._normalizeConstructor(constructorFn) let factoryArray = this._createFactoryArray(constructorFn) _app.get(this).run(factoryArray) return this } service (name, constructorFn) { _app.get(this).service(name, constructorFn) return this } value (name, object) { _app.get(this).value(name, object) return this } /** * If the constructorFn is an array of type ['dep1', 'dep2', ..., constructor() {}] * we need to pull out the array of dependencies and add it as an $inject property of the * actual constructor function. * @param input * @returns {*} * @private */ _normalizeConstructor (input) { let constructorFn if (input.constructor == Array) { let injected = input.slice(0, input.length - 1) constructorFn = input[input.length - 1] constructorFn.$inject = injected } else { constructorFn = input } return constructorFn } /** * Convert a constructor function into a factory function which returns a new instance of that * constructor, with the correct dependencies automatically injected as arguments. * * In order to inject the dependencies, they must be attached to the constructor function with the * `$inject` property annotation. * * @param constructorFn * @returns {Array.<T>} * @private */ _createFactoryArray(constructorFn) { // get the array of dependencies that are needed by this component (as contained in the `$inject` array) let args = constructorFn.$inject || [] let factoryArray = args.slice() // The factoryArray uses Angular's array notation whereby each element of the array is the name of a // dependency, and the final item is the factory function itself. factoryArray.push((...args) => { // return new constructorFn(...args) let instance = new constructorFn(...args) for (let key in instance) { instance[key] = instance[key] } return instance }) return factoryArray } /** * Clone a function * @param original * @returns {Function} * @private */ _cloneFunction (original) { return function() { return original.apply(this, arguments); } } /** * Override an object's method with a new one specified by `callback` * @param object * @param method * @param callback * @private */ _override (object, method, callback) { object[method] = callback(object[method]) } }
ClaroBot/Distribution
plugin/website/Resources/modules/utils/register.js
JavaScript
gpl-3.0
4,922
/* * Copyright (C) 2011 Tuomo Penttinen, all rights reserved. * * Author: Tuomo Penttinen <tp@herqq.org> * * This file is part of Herqq UPnP Av (HUPnPAv) library. * * Herqq UPnP Av is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Herqq UPnP Av is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Herqq UPnP Av. If not, see <http://www.gnu.org/licenses/>. */ #include "haudiobroadcast.h" #include "haudiobroadcast_p.h" #include "../model_mgmt/hcdsproperties.h" #include "../../common/hradioband.h" namespace Herqq { namespace Upnp { namespace Av { /******************************************************************************* * HAudioBroadcastPrivate ******************************************************************************/ HAudioBroadcastPrivate::HAudioBroadcastPrivate( const QString& clazz, HObject::CdsType cdsType) : HAudioItemPrivate(clazz, cdsType) { const HCdsProperties& inst = HCdsProperties::instance(); insert(inst.get(HCdsProperties::upnp_region)); insert(inst.get(HCdsProperties::upnp_radioCallSign)); insert(inst.get(HCdsProperties::upnp_radioStationID)); insert(inst.get(HCdsProperties::upnp_radioBand)); insert(inst.get(HCdsProperties::upnp_channelNr)); insert(inst.get(HCdsProperties::upnp_signalStrength)); insert(inst.get(HCdsProperties::upnp_signalLocked).name(), false); insert(inst.get(HCdsProperties::upnp_tuned).name(), false); insert(inst.get(HCdsProperties::upnp_recordable).name(), false); } /******************************************************************************* * HAudioBroadcast ******************************************************************************/ HAudioBroadcast::HAudioBroadcast(const QString& clazz, CdsType cdsType) : HAudioItem(*new HAudioBroadcastPrivate(clazz, cdsType)) { } HAudioBroadcast::HAudioBroadcast(HAudioBroadcastPrivate& dd) : HAudioItem(dd) { } HAudioBroadcast::HAudioBroadcast( const QString& title, const QString& parentId, const QString& id) : HAudioItem(*new HAudioBroadcastPrivate(sClass(), sType())) { init(title, parentId, id); } HAudioBroadcast::~HAudioBroadcast() { } HAudioBroadcast* HAudioBroadcast::newInstance() const { return new HAudioBroadcast(); } void HAudioBroadcast::setRegion(const QString& arg) { setCdsProperty(HCdsProperties::upnp_region, arg); } void HAudioBroadcast::setRadioCallSign(const QString& arg) { setCdsProperty(HCdsProperties::upnp_radioCallSign, arg); } void HAudioBroadcast::setRadioStationId(const QString& arg) { setCdsProperty(HCdsProperties::upnp_radioStationID, arg); } void HAudioBroadcast::setRadioBand(const HRadioBand& arg) { setCdsProperty(HCdsProperties::upnp_radioBand, QVariant::fromValue(arg)); } void HAudioBroadcast::setChannelNr(qint32 arg) { setCdsProperty(HCdsProperties::upnp_channelNr, arg); } void HAudioBroadcast::setSignalStrength(qint32 arg) { setCdsProperty(HCdsProperties::upnp_signalStrength, arg); } void HAudioBroadcast::setSignalLocked(bool arg) { setCdsProperty(HCdsProperties::upnp_signalLocked, arg); } void HAudioBroadcast::setTuned(bool arg) { setCdsProperty(HCdsProperties::upnp_tuned, arg); } void HAudioBroadcast::setRecordable(bool arg) { setCdsProperty(HCdsProperties::upnp_recordable, arg); } QString HAudioBroadcast::region() const { QVariant value; getCdsProperty(HCdsProperties::upnp_region, &value); return value.toString(); } QString HAudioBroadcast::radioCallSign() const { QVariant value; getCdsProperty(HCdsProperties::upnp_radioCallSign, &value); return value.toString(); } QString HAudioBroadcast::radioStationId() const { QVariant value; getCdsProperty(HCdsProperties::upnp_radioStationID, &value); return value.toString(); } HRadioBand HAudioBroadcast::radioBand() const { QVariant value; getCdsProperty(HCdsProperties::upnp_radioBand, &value); return value.value<HRadioBand>(); } qint32 HAudioBroadcast::channelNr() const { QVariant value; getCdsProperty(HCdsProperties::upnp_channelNr, &value); return value.toInt(); } qint32 HAudioBroadcast::signalStrength() const { QVariant value; getCdsProperty(HCdsProperties::upnp_signalStrength, &value); return value.toInt(); } bool HAudioBroadcast::signalLocked() const { QVariant value; getCdsProperty(HCdsProperties::upnp_signalLocked, &value); return value.toBool(); } bool HAudioBroadcast::tuned() const { QVariant value; getCdsProperty(HCdsProperties::upnp_tuned, &value); return value.toBool(); } bool HAudioBroadcast::recordable() const { QVariant value; getCdsProperty(HCdsProperties::upnp_recordable, &value); return value.toBool(); } } } }
0xd34df00d/hupnp-ng
hupnp_av/src/cds_model/cds_objects/haudiobroadcast.cpp
C++
gpl-3.0
5,199
/*---------------------------------------------------------------------------*\ ========= | \\ / F ield | foam-extend: Open Source CFD \\ / O peration | \\ / A nd | For copyright notice see file Copyright \\/ M anipulation | ------------------------------------------------------------------------------- License This file is part of foam-extend. foam-extend is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. foam-extend is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with foam-extend. If not, see <http://www.gnu.org/licenses/>. Description Abstract base class for lnGrad schemes. \*---------------------------------------------------------------------------*/ #include "lnGradScheme.H" #include "HashTable.H" // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // namespace Foam { // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // namespace fa { // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // Define the constructor function hash tables defineTemplateRunTimeSelectionTable(lnGradScheme<scalar>, Mesh); defineTemplateRunTimeSelectionTable(lnGradScheme<vector>, Mesh); defineTemplateRunTimeSelectionTable(lnGradScheme<tensor>, Mesh); // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // } // End namespace fa // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // } // End namespace Foam // ************************************************************************* //
Unofficial-Extend-Project-Mirror/openfoam-extend-foam-extend-3.1
src/finiteArea/finiteArea/lnGradSchemes/lnGradScheme/lnGradSchemes.C
C++
gpl-3.0
2,074
package org.bukkit.command.defaults; import java.util.List; import org.apache.commons.lang.Validate; import org.bukkit.Bukkit; import org.bukkit.ChatColor; import org.bukkit.Location; import org.bukkit.command.Command; import org.bukkit.command.CommandSender; import org.bukkit.entity.Player; import org.bukkit.event.player.PlayerTeleportEvent.TeleportCause; import com.google.common.collect.ImmutableList; @Deprecated public class TeleportCommand extends VanillaCommand { public TeleportCommand() { super("tp"); this.description = "Teleports the given player (or yourself) to another player or coordinates"; this.usageMessage = "/tp [player] <target> and/or <x> <y> <z>"; this.setPermission("bukkit.command.teleport"); } @Override public boolean execute(CommandSender sender, String currentAlias, String[] args) { if (!testPermission(sender)) return true; if (args.length < 1 || args.length > 4) { sender.sendMessage(ChatColor.RED + "Usage: " + usageMessage); return false; } Player player; if (args.length == 1 || args.length == 3) { if (sender instanceof Player) { player = (Player) sender; } else { sender.sendMessage("Please provide a player!"); return true; } } else { player = Bukkit.getPlayerExact(args[0]); } if (player == null) { sender.sendMessage("Player not found: " + args[0]); return true; } if (args.length < 3) { Player target = Bukkit.getPlayerExact(args[args.length - 1]); if (target == null) { sender.sendMessage("Can't find player " + args[args.length - 1] + ". No tp."); return true; } player.teleport(target, TeleportCause.COMMAND); Command.broadcastCommandMessage(sender, "Teleported " + player.getDisplayName() + " to " + target.getDisplayName()); } else if (player.getWorld() != null) { Location playerLocation = player.getLocation(); double x = getCoordinate(sender, playerLocation.getX(), args[args.length - 3]); double y = getCoordinate(sender, playerLocation.getY(), args[args.length - 2], 0, 0); double z = getCoordinate(sender, playerLocation.getZ(), args[args.length - 1]); if (x == MIN_COORD_MINUS_ONE || y == MIN_COORD_MINUS_ONE || z == MIN_COORD_MINUS_ONE) { sender.sendMessage("Please provide a valid location!"); return true; } playerLocation.setX(x); playerLocation.setY(y); playerLocation.setZ(z); player.teleport(playerLocation, TeleportCause.COMMAND); Command.broadcastCommandMessage(sender, String.format("Teleported %s to %.2f, %.2f, %.2f", player.getDisplayName(), x, y, z)); } return true; } private double getCoordinate(CommandSender sender, double current, String input) { return getCoordinate(sender, current, input, MIN_COORD, MAX_COORD); } private double getCoordinate(CommandSender sender, double current, String input, int min, int max) { boolean relative = input.startsWith("~"); double result = relative ? current : 0; if (!relative || input.length() > 1) { boolean exact = input.contains("."); if (relative) input = input.substring(1); double testResult = getDouble(sender, input); if (testResult == MIN_COORD_MINUS_ONE) { return MIN_COORD_MINUS_ONE; } result += testResult; if (!exact && !relative) result += 0.5f; } if (min != 0 || max != 0) { if (result < min) { result = MIN_COORD_MINUS_ONE; } if (result > max) { result = MIN_COORD_MINUS_ONE; } } return result; } @Override public List<String> tabComplete(CommandSender sender, String alias, String[] args) throws IllegalArgumentException { Validate.notNull(sender, "Sender cannot be null"); Validate.notNull(args, "Arguments cannot be null"); Validate.notNull(alias, "Alias cannot be null"); if (args.length == 1 || args.length == 2) { return super.tabComplete(sender, alias, args); } return ImmutableList.of(); } }
dentmaged/Bukkit
src/main/java/org/bukkit/command/defaults/TeleportCommand.java
Java
gpl-3.0
4,538
<?php // This file is part of Moodle - http://moodle.org/ // // Moodle is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // Moodle is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with Moodle. If not, see <http://www.gnu.org/licenses/>. /** * Copyright (C) 2007-2011 Catalyst IT (http://www.catalyst.net.nz) * Copyright (C) 2011-2013 Totara LMS (http://www.totaralms.com) * Copyright (C) 2014 onwards Catalyst IT (http://www.catalyst-eu.net) * * @package mod * @subpackage facetoface * @copyright 2014 onwards Catalyst IT <http://www.catalyst-eu.net> * @author Stacey Walker <stacey@catalyst-eu.net> */ namespace mod_facetoface\event; defined('MOODLE_INTERNAL') || die(); /** * The mod_facetoface take attendance event class. * * @package mod_facetoface * @since Moodle 2.7 * @copyright 2014 onwards Catalyst IT <http://www.catalyst-eu.net> * @author Stacey Walker <stacey@catalyst-eu.net> * @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later */ class take_attendance extends \core\event\base { /** * Init method. * * @return void */ protected function init() { $this->data['crud'] = 'r'; $this->data['edulevel'] = self::LEVEL_PARTICIPATING; $this->data['objecttable'] = 'facetoface_sessions'; } /** * Returns description of what happened. * * @return string */ public function get_description() { return "The user with id '$this->userid' has taken the attendance for session with id '$this->objectid' in the facetoface instance " . "with the course module id '$this->contextinstanceid'."; } /** * Return localised event name. * * @return string */ public static function get_name() { return get_string('eventattendancetaken', 'mod_facetoface'); } /** * Get URL related to the action * * @return \moodle_url */ public function get_url() { return new \moodle_url('/mod/facetoface/attendees.php', array('s' => $this->objectid, 'takeattendance' => 1)); } /** * Return the legacy event log data. * * @return array|null */ protected function get_legacy_logdata() { return array($this->courseid, $this->objecttable, 'take attendance', 'attendees.php?s=' . $this->objectid . '&takeattendance=1', $this->objectid, $this->contextinstanceid); } /** * Custom validation. * * @throws \coding_exception * @return void */ protected function validate_data() { parent::validate_data(); if ($this->contextlevel != CONTEXT_MODULE) { throw new \coding_exception('Context level must be CONTEXT_MODULE.'); } } }
saylordotorg/Moodle_Saylor
mod/facetoface/classes/event/take_attendance.php
PHP
gpl-3.0
3,223
/*! * This is a `i18n` language object. * * Spanish * * @author * Jalios (Twitter: @Jalios) * Sascha Greuel (Twitter: @SoftCreatR) * Rafael Miranda (GitHub: @rafa8626) * * @see core/i18n.js */ (function (exports) { if (exports.es === undefined) { exports.es = { 'mejs.plural-form': 1, // core/mediaelement.js 'mejs.download-file': 'Descargar archivo', // renderers/flash.js 'mejs.install-flash': 'Esta usando un navegador que no tiene activado o instalado el reproductor de Flash. Por favor active el plugin del reproductor de Flash o descargue la versión más reciente en https://get.adobe.com/flashplayer/', // features/fullscreen.js 'mejs.fullscreen': 'Pantalla completa', // features/playpause.js 'mejs.play': 'Reproducción', 'mejs.pause': 'Pausa', // features/progress.js 'mejs.time-slider': 'Control deslizante de tiempo', 'mejs.time-help-text': 'Use las flechas Izquierda/Derecha para avanzar un segundo y las flechas Arriba/Abajo para avanzar diez segundos.', 'mejs.live-broadcast': 'Transmisión en Vivo', // features/volume.js 'mejs.volume-help-text': 'Use las flechas Arriba/Abajo para subir o bajar el volumen.', 'mejs.unmute': 'Reactivar silencio', 'mejs.mute': 'Silencio', 'mejs.volume-slider': 'Control deslizante de volumen', // core/player.js 'mejs.video-player': 'Reproductor de video', 'mejs.audio-player': 'Reproductor de audio', // features/tracks.js 'mejs.captions-subtitles': 'Leyendas/Subtítulos', 'mejs.captions-chapters': 'Capítulos', 'mejs.none': 'Ninguno', 'mejs.afrikaans': 'Afrikaans', 'mejs.albanian': 'Albano', 'mejs.arabic': 'Árabe', 'mejs.belarusian': 'Bielorruso', 'mejs.bulgarian': 'Búlgaro', 'mejs.catalan': 'Catalán', 'mejs.chinese': 'Chino', 'mejs.chinese-simplified': 'Chino (Simplificado)', 'mejs.chinese-traditional': 'Chino (Tradicional)', 'mejs.croatian': 'Croata', 'mejs.czech': 'Checo', 'mejs.danish': 'Danés', 'mejs.dutch': 'Holandés', 'mejs.english': 'Inglés', 'mejs.estonian': 'Estoniano', 'mejs.filipino': 'Filipino', 'mejs.finnish': 'Finlandés', 'mejs.french': 'Francés', 'mejs.galician': 'Gallego', 'mejs.german': 'Alemán', 'mejs.greek': 'Griego', 'mejs.haitian-creole': 'Haitiano Criollo', 'mejs.hebrew': 'Hebreo', 'mejs.hindi': 'Hindi', 'mejs.hungarian': 'Húngaro', 'mejs.icelandic': 'Islandés', 'mejs.indonesian': 'Indonesio', 'mejs.irish': 'Irlandés', 'mejs.italian': 'Italiano', 'mejs.japanese': 'Japonés', 'mejs.korean': 'Coreano', 'mejs.latvian': 'Letón', 'mejs.lithuanian': 'Lituano', 'mejs.macedonian': 'Macedonio', 'mejs.malay': 'Malayo', 'mejs.maltese': 'Maltés', 'mejs.norwegian': 'Noruego', 'mejs.persian': 'Persa', 'mejs.polish': 'Polaco', 'mejs.portuguese': 'Portugués', 'mejs.romanian': 'Rumano', 'mejs.russian': 'Ruso', 'mejs.serbian': 'Serbio', 'mejs.slovak': 'Eslovaco', 'mejs.slovenian': 'Eslovenio', 'mejs.spanish': 'Español', 'mejs.swahili': 'Swahili', 'mejs.swedish': 'Suizo', 'mejs.tagalog': 'Tagalog', 'mejs.thai': 'Tailandés', 'mejs.turkish': 'Turco', 'mejs.ukrainian': 'Ucraniano', 'mejs.vietnamese': 'Vietnamita', 'mejs.welsh': 'Galés', 'mejs.yiddish': 'Yiddish' }; } })(mejs.i18n);
ntja/smartschool
v2/public/js/plugins/mediaelement/src/js/languages/es.js
JavaScript
gpl-3.0
3,360
<?php /* \thebuggenie\core\framework\Event::createNew('core', 'login_form_tab')->trigger(array('selected_tab' => $selected_tab)); */ ?> <script type="text/javascript"> require(['domReady', 'prototype'], function (domReady, prototype) { domReady(function () { if (document.location.href.search('<?php echo make_url('login_page'); ?>') != -1) if ($('tbg3_referer')) $('tbg3_referer').setValue('<?php echo make_url('dashboard'); ?>'); else if ($('return_to')) $('return_to').setValue('<?php echo make_url('dashboard'); ?>'); }); }); </script> <div class="logindiv regular active" id="regular_login_container"> <?php if ($loginintro instanceof \thebuggenie\modules\publish\entities\Article): ?> <?php include_component('publish/articledisplay', array('article' => $loginintro, 'show_title' => false, 'show_details' => false, 'show_actions' => false, 'embedded' => true)); ?> <?php endif; ?> <form accept-charset="<?php echo \thebuggenie\core\framework\Context::getI18n()->getCharset(); ?>" action="<?php echo make_url('login'); ?>" method="post" id="login_form" onsubmit="TBG.Main.Login.login('<?php echo make_url('login'); ?>'); return false;"> <?php if (!\thebuggenie\core\framework\Context::hasMessage('login_force_redirect') || \thebuggenie\core\framework\Context::getMessage('login_force_redirect') !== true): ?> <input type="hidden" id="tbg3_referer" name="tbg3_referer" value="<?php echo $referer; ?>" /> <?php else: ?> <input type="hidden" id="return_to" name="return_to" value="<?php echo $referer; ?>" /> <?php endif; ?> <h2 class="login_header"><?php echo __('Log in with your username and password'); ?></h2> <ul class="login_formlist"> <li> <label for="tbg3_username"><?php echo __('Username'); ?></label> <input type="text" id="tbg3_username" name="tbg3_username"> </li> <li> <label for="tbg3_password"><?php echo __('Password'); ?></label> <input type="password" id="tbg3_password" name="tbg3_password"><br> </li> <li> <input type="checkbox" name="tbg3_rememberme" value="1" id="tbg3_rememberme"><label class="login_fieldlabel" for="tbg3_rememberme"><?php echo __('Keep me logged in'); ?></label> </li> </ul> <div class="login_button_container"> <?php \thebuggenie\core\framework\Event::createNew('core', 'login_button_container')->trigger(); ?> <?php echo image_tag('spinning_20.gif', array('id' => 'login_indicator', 'style' => 'display: none;')); ?> <input type="submit" id="login_button" class="button button-silver" value="<?php echo __('Log in'); ?>"> </div> </form> <?php if (\thebuggenie\core\framework\Settings::isPersonaAvailable() || \thebuggenie\core\framework\Settings::isOpenIDavailable()): ?> <div style="text-align: center;"> <fieldset style="border: 0; border-top: 1px dotted rgba(0, 0, 0, 0.3); padding: 10px 100px; width: 100px; margin: 15px auto 0 auto;"> <legend style="text-align: center; width: 100%; background-color: transparent;"><?php echo __('%regular_login or %persona_or_openid_login', array('%regular_login' => '', '%persona_or_openid_login' => '')); ?></legend> </fieldset> <?php if (\thebuggenie\core\framework\Settings::isPersonaAvailable()): ?> <a class="persona-button" id="persona-signin-button" href="#"><span><?php echo __('Sign in with Persona'); ?></span></a> <?php endif; ?> <?php if (\thebuggenie\core\framework\Settings::isOpenIDavailable()): ?> <a class="persona-button orange" id="openid-signin-button" href="javascript:void(0);" onclick="$('regular_login_container').toggleClassName('active');$('openid_container').toggleClassName('active');"><span><?php echo __('Sign in with OpenID'); ?></span></a> <?php endif; ?> </div> <?php endif; ?> </div> <?php if (\thebuggenie\core\framework\Settings::isOpenIDavailable()): ?> <?php include_component('main/openidbuttons'); ?> <?php endif; ?> <?php \thebuggenie\core\framework\Event::createNew('core', 'login_form_pane')->trigger(array_merge(array('selected_tab' => $selected_tab), $options)); ?> <?php if (\thebuggenie\core\framework\Settings::isRegistrationAllowed()): ?> <div style="text-align: center;" id="registration-button-container" class="logindiv login_button_container registration_button_container active"> <fieldset style="border: 0; border-top: 1px dotted rgba(0, 0, 0, 0.3); padding: 5px 100px; width: 100px; margin: 5px auto 0 auto;"> <legend style="text-align: center; width: 100%; background-color: transparent;"><?php echo __('%login or %signup', array('%login' => '', '%signup' => '')); ?></legend> </fieldset> <a href="javascript:void(0);" id="create-account-button" onclick="$('register').addClassName('active');$('registration-button-container').removeClassName('active');$('regular_login_container').removeClassName('active');$('openid_container').removeClassName('active');"><?php echo __('Create an account'); ?></a> </div> <?php include_component('main/loginregister', compact('registrationintro')); ?> <?php endif; ?> <?php if (isset($error)): ?> <script type="text/javascript"> require(['domReady', 'thebuggenie/tbg'], function (domReady, TBG) { domReady(function () { TBG.Main.Helpers.Message.error('<?php echo $error; ?>'); }); }); </script> <?php endif; ?>
gabor-udvari/thebuggenie
core/modules/main/templates/_login.inc.php
PHP
mpl-2.0
5,720
#include "node_dtrace.h" #include "node_win32_etw_provider.h" #include "node_etw_provider.h" #include "node_win32_etw_provider-inl.h" namespace node { using v8::JitCodeEvent; using v8::V8; HMODULE advapi; REGHANDLE node_provider; EventRegisterFunc event_register; EventUnregisterFunc event_unregister; EventWriteFunc event_write; int events_enabled; static uv_async_t dispatch_etw_events_change_async; struct v8tags { char prefix[32 - sizeof(size_t)]; size_t prelen; }; // The v8 CODE_ADDED event name has a prefix indicating the type of event. // Many of these are internal to v8. // The trace_codes array specifies which types are written. struct v8tags trace_codes[] = { #define MAKE_V8TAG(s) { s, sizeof(s) - 1 } MAKE_V8TAG("LazyCompile:"), MAKE_V8TAG("Script:"), MAKE_V8TAG("Function:"), MAKE_V8TAG("RegExp:"), MAKE_V8TAG("Eval:") #undef MAKE_V8TAG }; /* Below are some code prefixes which are not being written. * "Builtin:" * "Stub:" * "CallIC:" * "LoadIC:" * "KeyedLoadIC:" * "StoreIC:" * "KeyedStoreIC:" * "CallPreMonomorphic:" * "CallInitialize:" * "CallMiss:" * "CallMegamorphic:" */ // v8 sometimes puts a '*' or '~' in front of the name. #define V8_MARKER1 '*' #define V8_MARKER2 '~' // If prefix is not in filtered list return -1, // else return length of prefix and marker. int FilterCodeEvents(const char* name, size_t len) { for (size_t i = 0; i < arraysize(trace_codes); i++) { size_t prelen = trace_codes[i].prelen; if (prelen < len) { if (strncmp(name, trace_codes[i].prefix, prelen) == 0) { if (name[prelen] == V8_MARKER1 || name[prelen] == V8_MARKER2) prelen++; return prelen; } } } return -1; } // callback from V8 module passes symbol and address info for stack walk void CodeAddressNotification(const JitCodeEvent* jevent) { int pre_offset = 0; if (NODE_V8SYMBOL_ENABLED()) { switch (jevent->type) { case JitCodeEvent::CODE_ADDED: pre_offset = FilterCodeEvents(jevent->name.str, jevent->name.len); if (pre_offset >= 0) { // skip over prefix and marker NODE_V8SYMBOL_ADD(jevent->name.str + pre_offset, jevent->name.len - pre_offset, jevent->code_start, jevent->code_len); } break; case JitCodeEvent::CODE_REMOVED: NODE_V8SYMBOL_REMOVE(jevent->code_start, 0); break; case JitCodeEvent::CODE_MOVED: NODE_V8SYMBOL_MOVE(jevent->code_start, jevent->new_code_start); break; default: break; } } } // Call v8 to enable or disable code event callbacks. // Must be on default thread to do this. // Note: It is possible to call v8 from ETW thread, but then // event callbacks are received in the same thread. Attempts // to write ETW events in this thread will fail. void etw_events_change_async(uv_async_t* handle) { if (events_enabled > 0) { NODE_V8SYMBOL_RESET(); v8::Isolate::GetCurrent()->SetJitCodeEventHandler( v8::kJitCodeEventEnumExisting, CodeAddressNotification); } else { v8::Isolate::GetCurrent()->SetJitCodeEventHandler( v8::kJitCodeEventDefault, nullptr); } } // This callback is called by ETW when consumers of our provider // are enabled or disabled. // The callback is dispatched on ETW thread. // Before calling into V8 to enable code events, switch to default thread. void NTAPI etw_events_enable_callback( LPCGUID SourceId, ULONG IsEnabled, UCHAR Level, ULONGLONG MatchAnyKeyword, ULONGLONG MatchAllKeywords, PEVENT_FILTER_DESCRIPTOR FilterData, PVOID CallbackContext) { if (IsEnabled) { events_enabled++; if (events_enabled == 1) { uv_async_send(&dispatch_etw_events_change_async); } } else { events_enabled--; if (events_enabled == 0) { uv_async_send(&dispatch_etw_events_change_async); } } } void init_etw() { events_enabled = 0; advapi = LoadLibraryW(L"advapi32.dll"); if (advapi) { event_register = (EventRegisterFunc) GetProcAddress(advapi, "EventRegister"); event_unregister = (EventUnregisterFunc) GetProcAddress(advapi, "EventUnregister"); event_write = (EventWriteFunc)GetProcAddress(advapi, "EventWrite"); // create async object used to invoke main thread from callback CHECK_EQ(0, uv_async_init(uv_default_loop(), &dispatch_etw_events_change_async, etw_events_change_async)); uv_unref(reinterpret_cast<uv_handle_t*>(&dispatch_etw_events_change_async)); if (event_register) { DWORD status = event_register(&NODE_ETW_PROVIDER, etw_events_enable_callback, nullptr, &node_provider); CHECK_EQ(status, ERROR_SUCCESS); } } } void shutdown_etw() { if (advapi && event_unregister && node_provider) { event_unregister(node_provider); node_provider = 0; } events_enabled = 0; v8::Isolate::GetCurrent()->SetJitCodeEventHandler( v8::kJitCodeEventDefault, nullptr); if (advapi) { FreeLibrary(advapi); advapi = nullptr; } } } // namespace node
Jet-Streaming/framework
src/bes/nodejs/node_win32_etw_provider.cc
C++
mpl-2.0
5,284
odoo.define('website_event.ticket_details', function (require) { var publicWidget = require('web.public.widget'); publicWidget.registry.ticketDetailsWidget = publicWidget.Widget.extend({ selector: '.o_wevent_js_ticket_details', events: { 'click .o_wevent_registration_btn': '_onTicketDetailsClick', 'change .custom-select': '_onTicketQuantityChange' }, start: function (){ this.foldedByDefault = this.$el.data('foldedByDefault') === 1; return this._super.apply(this, arguments); }, //-------------------------------------------------------------------------- // Private //-------------------------------------------------------------------------- /** * @private */ _getTotalTicketCount: function (){ var ticketCount = 0; this.$('.custom-select').each(function (){ ticketCount += parseInt($(this).val()); }); return ticketCount; }, //-------------------------------------------------------------------------- // Handlers //-------------------------------------------------------------------------- /** * @private * @param {*} ev */ _onTicketDetailsClick: function (ev){ ev.preventDefault(); if (this.foldedByDefault){ $(ev.currentTarget).toggleClass('btn-primary text-left pl-0'); $(ev.currentTarget).siblings().toggleClass('d-none'); this.$('.close').toggleClass('d-none'); } }, /** * @private */ _onTicketQuantityChange: function (){ this.$('button.btn-primary').attr('disabled', this._getTotalTicketCount() === 0); } }); return publicWidget.registry.ticketDetailsWidget; });
ddico/odoo
addons/website_event/static/src/js/website_event_ticket_details.js
JavaScript
agpl-3.0
1,926
<?php return [ 'about_companies_title' => 'Über Unternehmen', 'about_companies_text' => 'Firmen können als Bezeichnungsfeld genutzt werden oder um die Sichtbarkeit der Assets, Benutzer, etc. einzuschränken, wenn die volle Mehrmandanten-Unterstützung für Firmen in den Admin-Einstellungen aktiviert ist.', 'select_company' => 'Firma auswählen', ];
dkmansion/snipe-it
resources/lang/de/admin/companies/general.php
PHP
agpl-3.0
393
/* ***** BEGIN LICENSE BLOCK ***** Copyright © 2009 Center for History and New Media George Mason University, Fairfax, Virginia, USA http://zotero.org This file is part of Zotero. Zotero is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Zotero is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with Zotero. If not, see <http://www.gnu.org/licenses/>. ***** END LICENSE BLOCK ***** */ var noteEditor; var notifierUnregisterID; function onLoad() { noteEditor = document.getElementById('zotero-note-editor'); noteEditor.mode = 'edit'; noteEditor.focus(); // Set font size from pref Zotero.setFontSize(noteEditor); if (window.arguments) { var io = window.arguments[0]; } var itemID = io.itemID; var collectionID = io.collectionID; var parentItemID = io.parentItemID; if (itemID) { var ref = Zotero.Items.get(itemID); var clearUndo = noteEditor.item ? noteEditor.item.id != ref.id : false; noteEditor.item = ref; // If loading new or different note, disable undo while we repopulate the text field // so Undo doesn't end up clearing the field. This also ensures that Undo doesn't // undo content from another note into the current one. if (clearUndo) { noteEditor.clearUndo(); } document.title = ref.getNoteTitle(); } else { if (parentItemID) { var ref = Zotero.Items.get(parentItemID); noteEditor.parent = ref; } else { if (collectionID && collectionID != '' && collectionID != 'undefined') { noteEditor.collection = Zotero.Collections.get(collectionID); } } noteEditor.refresh(); } notifierUnregisterID = Zotero.Notifier.registerObserver(NotifyCallback, 'item'); } function onUnload() { if(noteEditor && noteEditor.value) noteEditor.save(); Zotero.Notifier.unregisterObserver(notifierUnregisterID); } var NotifyCallback = { notify: function(action, type, ids){ if (noteEditor.item && ids.indexOf(noteEditor.item.id) != -1) { noteEditor.item = noteEditor.item; // If the document title hasn't yet been set, reset undo so // undoing to empty isn't possible var noteTitle = noteEditor.note.getNoteTitle(); if (!document.title && noteTitle != '') { noteEditor.clearUndo(); document.title = noteTitle; } // Update the window name (used for focusing) in case this is a new note window.name = 'zotero-note-' + noteEditor.item.id; } } } addEventListener("load", function(e) { onLoad(e); }, false); addEventListener("unload", function(e) { onUnload(e); }, false);
egh/zotero
chrome/content/zotero/note.js
JavaScript
agpl-3.0
3,063
<?php /** * PHPExcel * * Copyright (c) 2006 - 2008 PHPExcel * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA * * @category PHPExcel * @package PHPExcel_Worksheet * @copyright Copyright (c) 2006 - 2008 PHPExcel (http://www.codeplex.com/PHPExcel) * @license http://www.gnu.org/licenses/old-licenses/lgpl-2.1.txt LGPL * @version 1.6.3, 2008-08-25 */ /** PHPExcel_IComparable */ //require_once 'PHPExcel/IComparable.php'; /** PHPExcel_Worksheet */ //require_once 'PHPExcel/Worksheet.php'; /** PHPExcel_Worksheet_BaseDrawing */ //require_once 'PHPExcel/Worksheet/BaseDrawing.php'; /** PHPExcel_Worksheet_Drawing */ //require_once 'PHPExcel/Worksheet/Drawing.php'; /** * PHPExcel_Worksheet_HeaderFooterDrawing * * @category PHPExcel * @package PHPExcel_Worksheet * @copyright Copyright (c) 2006 - 2008 PHPExcel (http://www.codeplex.com/PHPExcel) */ class PHPExcel_Worksheet_HeaderFooterDrawing extends PHPExcel_Worksheet_Drawing implements PHPExcel_IComparable { /** * Path * * @var string */ private $_path; /** * Name * * @var string */ protected $_name; /** * Offset X * * @var int */ protected $_offsetX; /** * Offset Y * * @var int */ protected $_offsetY; /** * Width * * @var int */ protected $_width; /** * Height * * @var int */ protected $_height; /** * Proportional resize * * @var boolean */ protected $_resizeProportional; /** * Create a new PHPExcel_Worksheet_HeaderFooterDrawing */ public function __construct() { // Initialise values $this->_path = ''; $this->_name = ''; $this->_offsetX = 0; $this->_offsetY = 0; $this->_width = 0; $this->_height = 0; $this->_resizeProportional = true; } /** * Get Name * * @return string */ public function getName() { return $this->_name; } /** * Set Name * * @param string $pValue */ public function setName($pValue = '') { $this->_name = $pValue; } /** * Get OffsetX * * @return int */ public function getOffsetX() { return $this->_offsetX; } /** * Set OffsetX * * @param int $pValue */ public function setOffsetX($pValue = 0) { $this->_offsetX = $pValue; } /** * Get OffsetY * * @return int */ public function getOffsetY() { return $this->_offsetY; } /** * Set OffsetY * * @param int $pValue */ public function setOffsetY($pValue = 0) { $this->_offsetY = $pValue; } /** * Get Width * * @return int */ public function getWidth() { return $this->_width; } /** * Set Width * * @param int $pValue */ public function setWidth($pValue = 0) { // Resize proportional? if ($this->_resizeProportional && $pValue != 0) { $ratio = $this->_width / $this->_height; $this->_height = round($ratio * $pValue); } // Set width $this->_width = $pValue; } /** * Get Height * * @return int */ public function getHeight() { return $this->_height; } /** * Set Height * * @param int $pValue */ public function setHeight($pValue = 0) { // Resize proportional? if ($this->_resizeProportional && $pValue != 0) { $ratio = $this->_width / $this->_height; $this->_width = round($ratio * $pValue); } // Set height $this->_height = $pValue; } /** * Set width and height with proportional resize * @author Vincent@luo MSN:kele_100@hotmail.com * @param int $width * @param int $height * @example $objDrawing->setResizeProportional(true); * @example $objDrawing->setWidthAndHeight(160,120); */ public function setWidthAndHeight($width = 0, $height = 0) { $xratio = $width / $this->_width; $yratio = $height / $this->_height; if ($this->_resizeProportional && !($width == 0 || $height == 0)) { if (($xratio * $this->_height) < $height) { $this->_height = ceil($xratio * $this->_height); $this->_width = $width; } else { $this->_width = ceil($yratio * $this->_width); $this->_height = $height; } } } /** * Get ResizeProportional * * @return boolean */ public function getResizeProportional() { return $this->_resizeProportional; } /** * Set ResizeProportional * * @param boolean $pValue */ public function setResizeProportional($pValue = true) { $this->_resizeProportional = $pValue; } /** * Get Filename * * @return string */ public function getFilename() { return basename($this->_path); } /** * Get Extension * * @return string */ public function getExtension() { return end(explode(".", basename($this->_path))); } /** * Get Path * * @return string */ public function getPath() { return $this->_path; } /** * Set Path * * @param string $pValue File path * @param boolean $pVerifyFile Verify file * @throws Exception */ public function setPath($pValue = '', $pVerifyFile = true) { if ($pVerifyFile) { if (file_exists($pValue)) { $this->_path = $pValue; if ($this->_width == 0 && $this->_height == 0) { // Get width/height list($this->_width, $this->_height) = getimagesize($pValue); } } else { throw new Exception("File $pValue not found!"); } } else { $this->_path = $pValue; } } /** * Get hash code * * @return string Hash code */ public function getHashCode() { return md5( $this->_path . $this->_name . $this->_offsetX . $this->_offsetY . $this->_width . $this->_height . __CLASS__ ); } /** * Implement PHP __clone to create a deep clone, not just a shallow copy. */ public function __clone() { $vars = get_object_vars($this); foreach ($vars as $key => $value) { if (is_object($value)) { $this->$key = clone $value; } else { $this->$key = $value; } } } }
maestrano/fengoffice
public/assets/javascript/gelSheet/server/export/PHPExcel/Classes/PHPExcel/Worksheet/HeaderFooterDrawing.php
PHP
agpl-3.0
7,357
/* This file is part of VoltDB. * Copyright (C) 2008-2015 VoltDB Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with VoltDB. If not, see <http://www.gnu.org/licenses/>. */ package org.voltdb.jdbc; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.net.URISyntaxException; import java.net.URL; import java.sql.Connection; import java.sql.DriverManager; import java.sql.DriverPropertyInfo; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.util.Enumeration; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.logging.Logger; import java.util.regex.Pattern; public class Driver implements java.sql.Driver { public static final String JDBC_PROP_FILE_ENV = "VOLTDB_JDBC_PROPERTIES"; public static final String JDBC_PROP_FILE_PROP = "voltdb.jdbcproperties"; public static final String DEFAULT_PROP_FILENAME = "voltdb.properties"; //Driver URL prefix. private static final String URL_PREFIX = "jdbc:voltdb:"; // Static so it's unit-testable, yes, lazy me static String[] getServersFromURL(String url) { // get everything between the prefix and the ? String prefix = URL_PREFIX + "//"; int end = url.length(); if (url.indexOf("?") > 0) { end = url.indexOf("?"); } String servstring = url.substring(prefix.length(), end); return servstring.split(","); } static Map<String, String> getPropsFromURL(String url) { Map<String, String> results = new HashMap<String, String>(); if (url.indexOf("?") > 0) { String propstring = url.substring(url.indexOf("?") + 1); String[] props = propstring.split("&"); for (String prop : props) { if (prop.indexOf("=") > 0) { String[] comps = prop.split("="); results.put(comps[0], comps[1]); } } } return results; } private static final int MAJOR_VERSION = 1; private static final int MINOR_VERSION = 0; static { try { DriverManager.registerDriver(new Driver()); } catch (Exception e) {} } public Driver() throws SQLException { // Required for Class.forName().newInstance() } @Override public Connection connect(String url, Properties props) throws SQLException { if (acceptsURL(url)) { try { // Properties favored order: // 1) property file specified by env variable // 2) property file specified by system property // 3) property file with default name in same path as driver jar // 4) Properties specified in the URL // 5) Properties specified to getConnection() arg // Properties fileprops = tryToFindPropsFile(); // Copy the provided properties so we don't muck with // the object the caller gave us. Properties info = (Properties) props.clone(); String prefix = URL_PREFIX + "//"; if (!url.startsWith(prefix)) { throw SQLError.get(SQLError.ILLEGAL_ARGUMENT); } // get the server strings String[] servers = Driver.getServersFromURL(url); // get the props from the URL Map<String, String> urlprops = Driver.getPropsFromURL(url); for (Entry<String, String> e : urlprops.entrySet()) { // Favor the URL over the provided props info.setProperty(e.getKey(), e.getValue()); } // Favor the file-specified properties over the other props for (Enumeration<?> e = fileprops.propertyNames(); e.hasMoreElements();) { String key = (String) e.nextElement(); info.setProperty(key, fileprops.getProperty(key)); } String user = ""; String password = ""; boolean heavyweight = false; int maxoutstandingtxns = 0; for (Enumeration<?> e = info.propertyNames(); e.hasMoreElements();) { String key = (String) e.nextElement(); String value = info.getProperty(key); if (key.toLowerCase().equals("user")) user = value; else if (key.toLowerCase().equals("password")) password = value; else if (key.toLowerCase().equals("heavyweight")) heavyweight = (value.toLowerCase().equals("true") || value.toLowerCase().equals("yes") || value.toLowerCase().equals("1")); else if (key.toLowerCase().equals("maxoutstandingtxns")) maxoutstandingtxns = Integer.parseInt(value); // else - unknown; ignore } // Return JDBC connection wrapper for the client return new JDBC4Connection(JDBC4ClientConnectionPool.get(servers, user, password, heavyweight, maxoutstandingtxns), info); } catch (Exception x) { throw SQLError.get(x, SQLError.CONNECTION_UNSUCCESSFUL); } } return null; } @Override public boolean acceptsURL(String url) throws SQLException { return Pattern.compile("^jdbc:voltdb://.+", Pattern.CASE_INSENSITIVE).matcher(url).matches(); } @Override public int getMajorVersion() { return MAJOR_VERSION; } @Override public int getMinorVersion() { return MINOR_VERSION; } @Override public DriverPropertyInfo[] getPropertyInfo(String url, Properties loginProps) throws SQLException { return new DriverPropertyInfo[0]; } @Override public boolean jdbcCompliant() { return false; } public Logger getParentLogger() throws SQLFeatureNotSupportedException { throw new SQLFeatureNotSupportedException(); } private Properties tryToFindPropsFile() { Properties fileprops = new Properties(); String filename = null; // Check the env first filename = System.getenv(Driver.JDBC_PROP_FILE_ENV); if (filename == null) { filename = System.getProperty(Driver.JDBC_PROP_FILE_PROP); } if (filename == null) { // see if we can find a file in the default location URL pathToJar = this.getClass().getProtectionDomain() .getCodeSource().getLocation(); String tmp = null; try { tmp = new File(pathToJar.toURI()).getParent() + File.separator + DEFAULT_PROP_FILENAME; } catch (Exception e) { tmp = null; } filename = tmp; } if (filename != null) { File propfile = new File(filename); if (propfile.exists() && propfile.isFile()) { FileInputStream in = null; try { in = new FileInputStream(propfile); fileprops.load(in); } catch (FileNotFoundException fnfe) {} catch (IOException ioe) {} finally { if (in != null) { try { in.close(); } catch (IOException e) {} } } } } return fileprops; } }
kumarrus/voltdb
src/frontend/org/voltdb/jdbc/Driver.java
Java
agpl-3.0
8,523
/* =========================================================== * JFreeChart : a free chart library for the Java(tm) platform * =========================================================== * * (C) Copyright 2000-2009, by Object Refinery Limited and Contributors. * * Project Info: http://www.jfree.org/jfreechart/index.html * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2.1 of the License, or * (at your option) any later version. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, * USA. * * [Java is a trademark or registered trademark of Sun Microsystems, Inc. * in the United States and other countries.] * * ---------------------------------- * CategoryDatasetSelectionState.java * ---------------------------------- * (C) Copyright 2009, by Object Refinery Limited. * * Original Author: David Gilbert (for Object Refinery Limited); * Contributor(s): -; * * Changes * ------- * 30-Jun-2009 : Version 1 (DG); * */ package org.jfree.data.category; import java.io.Serializable; import org.jfree.chart.util.PublicCloneable; import org.jfree.data.general.DatasetSelectionState; /** * Returns information about the selection state of items in an * {@link CategoryDataset}. Classes that implement this interface must also * implement {@link PublicCloneable} to ensure that charts and datasets can be * correctly cloned. Likewise, classes implementing this interface must also * implement {@link Serializable}. * <br><br> * The selection state might be part of a dataset implementation, or it could * be maintained in parallel with a dataset implementation that doesn't * directly support selection state. * * @since 1.2.0 */ public interface CategoryDatasetSelectionState extends DatasetSelectionState { /** * Returns the number of rows in the dataset. * * @return The number of rows. */ public int getRowCount(); /** * Returns the number of columns in the dataset. * * * @return The number of columns. */ public int getColumnCount(); /** * Returns <code>true</code> if the specified item is selected, and * <code>false</code> otherwise. * * @param row the row index. * @param column the column index. * * @return A boolean. */ public boolean isSelected(int row, int column); /** * Sets the selection state for an item in the dataset. * * @param row the row index. * @param column the column index. * @param selected the selection state. */ public void setSelected(int row, int column, boolean selected); /** * Sets the selection state for the specified item and, if requested, * fires a change event. * * @param row the row index. * @param column the column index. * @param selected the selection state. * @param notify notify listeners? */ public void setSelected(int row, int column, boolean selected, boolean notify); /** * Clears all selected items. */ public void clearSelection(); /** * Send an event to registered listeners to indicate that the selection * has changed. */ public void fireSelectionEvent(); }
ilyessou/jfreechart
source/org/jfree/data/category/CategoryDatasetSelectionState.java
Java
lgpl-2.1
3,804