Dataset Preview
The full dataset viewer is not available (click to read why). Only showing a preview of the rows.
The dataset generation failed
Error code: DatasetGenerationError
Exception: TypeError
Message: Couldn't cast array of type
struct<content_hash: string, timestamp: string, source: string, line_count: int64, max_line_length: int64, avg_line_length: double, alnum_prop: double, repo_name: string, id: string, size: string, binary: bool, copies: string, ref: string, path: string, mode: string, license: string, language: list<item: struct<name: string, bytes: string>>, symlink_target: string>
to
{'content_hash': Value(dtype='string', id=None), 'timestamp': Value(dtype='string', id=None), 'source': Value(dtype='string', id=None), 'line_count': Value(dtype='int64', id=None), 'max_line_length': Value(dtype='int64', id=None), 'avg_line_length': Value(dtype='float64', id=None), 'alnum_prop': Value(dtype='float64', id=None), 'repo_name': Value(dtype='string', id=None), 'id': Value(dtype='string', id=None), 'size': Value(dtype='string', id=None), 'binary': Value(dtype='bool', id=None), 'copies': Value(dtype='string', id=None), 'ref': Value(dtype='string', id=None), 'path': Value(dtype='string', id=None), 'mode': Value(dtype='string', id=None), 'license': Value(dtype='string', id=None), 'language': [{'name': Value(dtype='string', id=None), 'bytes': Value(dtype='string', id=None)}]}
Traceback: Traceback (most recent call last):
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 2011, in _prepare_split_single
writer.write_table(table)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/arrow_writer.py", line 585, in write_table
pa_table = table_cast(pa_table, self._schema)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2302, in table_cast
return cast_table_to_schema(table, schema)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2261, in cast_table_to_schema
arrays = [cast_array_to_feature(table[name], feature) for name, feature in features.items()]
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2261, in <listcomp>
arrays = [cast_array_to_feature(table[name], feature) for name, feature in features.items()]
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 1802, in wrapper
return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 1802, in <listcomp>
return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2122, in cast_array_to_feature
raise TypeError(f"Couldn't cast array of type\n{_short_str(array.type)}\nto\n{_short_str(feature)}")
TypeError: Couldn't cast array of type
struct<content_hash: string, timestamp: string, source: string, line_count: int64, max_line_length: int64, avg_line_length: double, alnum_prop: double, repo_name: string, id: string, size: string, binary: bool, copies: string, ref: string, path: string, mode: string, license: string, language: list<item: struct<name: string, bytes: string>>, symlink_target: string>
to
{'content_hash': Value(dtype='string', id=None), 'timestamp': Value(dtype='string', id=None), 'source': Value(dtype='string', id=None), 'line_count': Value(dtype='int64', id=None), 'max_line_length': Value(dtype='int64', id=None), 'avg_line_length': Value(dtype='float64', id=None), 'alnum_prop': Value(dtype='float64', id=None), 'repo_name': Value(dtype='string', id=None), 'id': Value(dtype='string', id=None), 'size': Value(dtype='string', id=None), 'binary': Value(dtype='bool', id=None), 'copies': Value(dtype='string', id=None), 'ref': Value(dtype='string', id=None), 'path': Value(dtype='string', id=None), 'mode': Value(dtype='string', id=None), 'license': Value(dtype='string', id=None), 'language': [{'name': Value(dtype='string', id=None), 'bytes': Value(dtype='string', id=None)}]}
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1529, in compute_config_parquet_and_info_response
parquet_operations = convert_to_parquet(builder)
File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1154, in convert_to_parquet
builder.download_and_prepare(
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1027, in download_and_prepare
self._download_and_prepare(
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1122, in _download_and_prepare
self._prepare_split(split_generator, **prepare_split_kwargs)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1882, in _prepare_split
for job_id, done, content in self._prepare_split_single(
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 2038, in _prepare_split_single
raise DatasetGenerationError("An error occurred while generating the dataset") from e
datasets.exceptions.DatasetGenerationError: An error occurred while generating the datasetNeed help to make the dataset viewer work? Make sure to review how to configure the dataset viewer, and open a discussion for direct support.
text
string | meta
dict |
|---|---|
package org.apache.hadoop.hdfs.server.datanode;
import java.io.File;
import java.io.FileDescriptor;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.channels.ClosedChannelException;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.management.NotCompliantMBeanException;
import javax.management.ObjectName;
import javax.management.StandardMBean;
import org.apache.commons.lang.ArrayUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.BlockListAsLongs;
import org.apache.hadoop.hdfs.protocol.BlockLocalPathInfo;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.HdfsBlocksMetadata;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.ReplicaState;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeReference;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.LengthInputStream;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.ReplicaInputStreams;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.ReplicaOutputStreams;
import org.apache.hadoop.hdfs.server.datanode.metrics.DataNodeMetricHelper;
import org.apache.hadoop.hdfs.server.datanode.metrics.FSDatasetMBean;
import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock;
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
import org.apache.hadoop.hdfs.server.protocol.ReplicaRecoveryInfo;
import org.apache.hadoop.hdfs.server.protocol.StorageReport;
import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.util.MBeans;
import org.apache.hadoop.util.DataChecksum;
/**
* This class implements a simulated FSDataset.
*
* Blocks that are created are recorded but their data (plus their CRCs) are
* discarded.
* Fixed data is returned when blocks are read; a null CRC meta file is
* created for such data.
*
* This FSDataset does not remember any block information across its
* restarts; it does however offer an operation to inject blocks
* (See the TestInectionForSImulatedStorage()
* for a usage example of injection.
*
* Note the synchronization is coarse grained - it is at each method.
*/
public class SimulatedFSDataset implements FsDatasetSpi<FsVolumeSpi> {
public final static int BYTE_MASK = 0xff;
static class Factory extends FsDatasetSpi.Factory<SimulatedFSDataset> {
@Override
public SimulatedFSDataset newInstance(DataNode datanode,
DataStorage storage, Configuration conf) throws IOException {
return new SimulatedFSDataset(datanode, storage, conf);
}
@Override
public boolean isSimulated() {
return true;
}
}
public static void setFactory(Configuration conf) {
conf.set(DFSConfigKeys.DFS_DATANODE_FSDATASET_FACTORY_KEY,
Factory.class.getName());
}
public static byte simulatedByte(Block b, long offsetInBlk) {
byte firstByte = (byte) (b.getBlockId() & BYTE_MASK);
return (byte) ((firstByte + offsetInBlk) & BYTE_MASK);
}
public static final String CONFIG_PROPERTY_CAPACITY =
"dfs.datanode.simulateddatastorage.capacity";
public static final long DEFAULT_CAPACITY = 2L<<40; // 1 terabyte
public static final String CONFIG_PROPERTY_STATE =
"dfs.datanode.simulateddatastorage.state";
private static final DatanodeStorage.State DEFAULT_STATE =
DatanodeStorage.State.NORMAL;
static final byte[] nullCrcFileData;
static {
DataChecksum checksum = DataChecksum.newDataChecksum(
DataChecksum.Type.NULL, 16*1024 );
byte[] nullCrcHeader = checksum.getHeader();
nullCrcFileData = new byte[2 + nullCrcHeader.length];
nullCrcFileData[0] = (byte) ((BlockMetadataHeader.VERSION >>> 8) & 0xff);
nullCrcFileData[1] = (byte) (BlockMetadataHeader.VERSION & 0xff);
for (int i = 0; i < nullCrcHeader.length; i++) {
nullCrcFileData[i+2] = nullCrcHeader[i];
}
}
// information about a single block
private class BInfo implements ReplicaInPipelineInterface {
final Block theBlock;
private boolean finalized = false; // if not finalized => ongoing creation
SimulatedOutputStream oStream = null;
private long bytesAcked;
private long bytesRcvd;
private boolean pinned = false;
BInfo(String bpid, Block b, boolean forWriting) throws IOException {
theBlock = new Block(b);
if (theBlock.getNumBytes() < 0) {
theBlock.setNumBytes(0);
}
if (!storage.alloc(bpid, theBlock.getNumBytes())) {
// expected length - actual length may
// be more - we find out at finalize
DataNode.LOG.warn("Lack of free storage on a block alloc");
throw new IOException("Creating block, no free space available");
}
if (forWriting) {
finalized = false;
oStream = new SimulatedOutputStream();
} else {
finalized = true;
oStream = null;
}
}
@Override
public String getStorageUuid() {
return storage.getStorageUuid();
}
@Override
synchronized public long getGenerationStamp() {
return theBlock.getGenerationStamp();
}
@Override
synchronized public long getNumBytes() {
if (!finalized) {
return bytesRcvd;
} else {
return theBlock.getNumBytes();
}
}
@Override
synchronized public void setNumBytes(long length) {
if (!finalized) {
bytesRcvd = length;
} else {
theBlock.setNumBytes(length);
}
}
synchronized SimulatedInputStream getIStream() {
if (!finalized) {
// throw new IOException("Trying to read an unfinalized block");
return new SimulatedInputStream(oStream.getLength(), theBlock);
} else {
return new SimulatedInputStream(theBlock.getNumBytes(), theBlock);
}
}
synchronized void finalizeBlock(String bpid, long finalSize)
throws IOException {
if (finalized) {
throw new IOException(
"Finalizing a block that has already been finalized" +
theBlock.getBlockId());
}
if (oStream == null) {
DataNode.LOG.error("Null oStream on unfinalized block - bug");
throw new IOException("Unexpected error on finalize");
}
if (oStream.getLength() != finalSize) {
DataNode.LOG.warn("Size passed to finalize (" + finalSize +
")does not match what was written:" + oStream.getLength());
throw new IOException(
"Size passed to finalize does not match the amount of data written");
}
// We had allocated the expected length when block was created;
// adjust if necessary
long extraLen = finalSize - theBlock.getNumBytes();
if (extraLen > 0) {
if (!storage.alloc(bpid,extraLen)) {
DataNode.LOG.warn("Lack of free storage on a block alloc");
throw new IOException("Creating block, no free space available");
}
} else {
storage.free(bpid, -extraLen);
}
theBlock.setNumBytes(finalSize);
finalized = true;
oStream = null;
return;
}
synchronized void unfinalizeBlock() throws IOException {
if (!finalized) {
throw new IOException("Unfinalized a block that's not finalized "
+ theBlock);
}
finalized = false;
oStream = new SimulatedOutputStream();
long blockLen = theBlock.getNumBytes();
oStream.setLength(blockLen);
bytesRcvd = blockLen;
bytesAcked = blockLen;
}
SimulatedInputStream getMetaIStream() {
return new SimulatedInputStream(nullCrcFileData);
}
synchronized boolean isFinalized() {
return finalized;
}
@Override
synchronized public ReplicaOutputStreams createStreams(boolean isCreate,
DataChecksum requestedChecksum) throws IOException {
if (finalized) {
throw new IOException("Trying to write to a finalized replica "
+ theBlock);
} else {
SimulatedOutputStream crcStream = new SimulatedOutputStream();
return new ReplicaOutputStreams(oStream, crcStream, requestedChecksum,
volume.isTransientStorage());
}
}
@Override
synchronized public long getBlockId() {
return theBlock.getBlockId();
}
@Override
synchronized public long getVisibleLength() {
return getBytesAcked();
}
@Override
public ReplicaState getState() {
return finalized ? ReplicaState.FINALIZED : ReplicaState.RBW;
}
@Override
synchronized public long getBytesAcked() {
if (finalized) {
return theBlock.getNumBytes();
} else {
return bytesAcked;
}
}
@Override
synchronized public void setBytesAcked(long bytesAcked) {
if (!finalized) {
this.bytesAcked = bytesAcked;
}
}
@Override
public void releaseAllBytesReserved() {
}
@Override
synchronized public long getBytesOnDisk() {
if (finalized) {
return theBlock.getNumBytes();
} else {
return oStream.getLength();
}
}
@Override
public void setLastChecksumAndDataLen(long dataLength, byte[] lastChecksum) {
oStream.setLength(dataLength);
}
@Override
public ChunkChecksum getLastChecksumAndDataLen() {
return new ChunkChecksum(oStream.getLength(), null);
}
@Override
public boolean isOnTransientStorage() {
return false;
}
}
/**
* Class is used for tracking block pool storage utilization similar
* to {@link BlockPoolSlice}
*/
private static class SimulatedBPStorage {
private long used; // in bytes
long getUsed() {
return used;
}
void alloc(long amount) {
used += amount;
}
void free(long amount) {
used -= amount;
}
SimulatedBPStorage() {
used = 0;
}
}
/**
* Class used for tracking datanode level storage utilization similar
* to {@link FSVolumeSet}
*/
private static class SimulatedStorage {
private final Map<String, SimulatedBPStorage> map =
new HashMap<String, SimulatedBPStorage>();
private final long capacity; // in bytes
private final DatanodeStorage dnStorage;
synchronized long getFree() {
return capacity - getUsed();
}
long getCapacity() {
return capacity;
}
synchronized long getUsed() {
long used = 0;
for (SimulatedBPStorage bpStorage : map.values()) {
used += bpStorage.getUsed();
}
return used;
}
synchronized long getBlockPoolUsed(String bpid) throws IOException {
return getBPStorage(bpid).getUsed();
}
int getNumFailedVolumes() {
return 0;
}
synchronized boolean alloc(String bpid, long amount) throws IOException {
if (getFree() >= amount) {
getBPStorage(bpid).alloc(amount);
return true;
}
return false;
}
synchronized void free(String bpid, long amount) throws IOException {
getBPStorage(bpid).free(amount);
}
SimulatedStorage(long cap, DatanodeStorage.State state) {
capacity = cap;
dnStorage = new DatanodeStorage(
"SimulatedStorage-" + DatanodeStorage.generateUuid(),
state, StorageType.DEFAULT);
}
synchronized void addBlockPool(String bpid) {
SimulatedBPStorage bpStorage = map.get(bpid);
if (bpStorage != null) {
return;
}
map.put(bpid, new SimulatedBPStorage());
}
synchronized void removeBlockPool(String bpid) {
map.remove(bpid);
}
private SimulatedBPStorage getBPStorage(String bpid) throws IOException {
SimulatedBPStorage bpStorage = map.get(bpid);
if (bpStorage == null) {
throw new IOException("block pool " + bpid + " not found");
}
return bpStorage;
}
String getStorageUuid() {
return dnStorage.getStorageID();
}
DatanodeStorage getDnStorage() {
return dnStorage;
}
synchronized StorageReport getStorageReport(String bpid) {
return new StorageReport(dnStorage,
false, getCapacity(), getUsed(), getFree(),
map.get(bpid).getUsed());
}
}
static class SimulatedVolume implements FsVolumeSpi {
private final SimulatedStorage storage;
SimulatedVolume(final SimulatedStorage storage) {
this.storage = storage;
}
@Override
public FsVolumeReference obtainReference() throws ClosedChannelException {
return null;
}
@Override
public String getStorageID() {
return storage.getStorageUuid();
}
@Override
public String[] getBlockPoolList() {
return new String[0];
}
@Override
public long getAvailable() throws IOException {
return storage.getCapacity() - storage.getUsed();
}
@Override
public String getBasePath() {
return null;
}
@Override
public String getPath(String bpid) throws IOException {
return null;
}
@Override
public File getFinalizedDir(String bpid) throws IOException {
return null;
}
@Override
public StorageType getStorageType() {
return null;
}
@Override
public boolean isTransientStorage() {
return false;
}
@Override
public void reserveSpaceForRbw(long bytesToReserve) {
}
@Override
public void releaseReservedSpace(long bytesToRelease) {
}
@Override
public BlockIterator newBlockIterator(String bpid, String name) {
throw new UnsupportedOperationException();
}
@Override
public BlockIterator loadBlockIterator(String bpid, String name)
throws IOException {
throw new UnsupportedOperationException();
}
@Override
public FsDatasetSpi getDataset() {
throw new UnsupportedOperationException();
}
}
private final Map<String, Map<Block, BInfo>> blockMap
= new HashMap<String, Map<Block,BInfo>>();
private final SimulatedStorage storage;
private final SimulatedVolume volume;
private final String datanodeUuid;
private final DataNode datanode;
public SimulatedFSDataset(DataStorage storage, Configuration conf) {
this(null, storage, conf);
}
public SimulatedFSDataset(DataNode datanode, DataStorage storage, Configuration conf) {
this.datanode = datanode;
if (storage != null) {
for (int i = 0; i < storage.getNumStorageDirs(); ++i) {
storage.createStorageID(storage.getStorageDir(i), false);
}
this.datanodeUuid = storage.getDatanodeUuid();
} else {
this.datanodeUuid = "SimulatedDatanode-" + DataNode.generateUuid();
}
registerMBean(datanodeUuid);
this.storage = new SimulatedStorage(
conf.getLong(CONFIG_PROPERTY_CAPACITY, DEFAULT_CAPACITY),
conf.getEnum(CONFIG_PROPERTY_STATE, DEFAULT_STATE));
this.volume = new SimulatedVolume(this.storage);
}
public synchronized void injectBlocks(String bpid,
Iterable<? extends Block> injectBlocks) throws IOException {
ExtendedBlock blk = new ExtendedBlock();
if (injectBlocks != null) {
for (Block b: injectBlocks) { // if any blocks in list is bad, reject list
if (b == null) {
throw new NullPointerException("Null blocks in block list");
}
blk.set(bpid, b);
if (isValidBlock(blk)) {
throw new IOException("Block already exists in block list");
}
}
Map<Block, BInfo> map = blockMap.get(bpid);
if (map == null) {
map = new HashMap<Block, BInfo>();
blockMap.put(bpid, map);
}
for (Block b: injectBlocks) {
BInfo binfo = new BInfo(bpid, b, false);
map.put(binfo.theBlock, binfo);
}
}
}
/** Get a map for a given block pool Id */
private Map<Block, BInfo> getMap(String bpid) throws IOException {
final Map<Block, BInfo> map = blockMap.get(bpid);
if (map == null) {
throw new IOException("Non existent blockpool " + bpid);
}
return map;
}
@Override // FsDatasetSpi
public synchronized void finalizeBlock(ExtendedBlock b) throws IOException {
final Map<Block, BInfo> map = getMap(b.getBlockPoolId());
BInfo binfo = map.get(b.getLocalBlock());
if (binfo == null) {
throw new IOException("Finalizing a non existing block " + b);
}
binfo.finalizeBlock(b.getBlockPoolId(), b.getNumBytes());
}
@Override // FsDatasetSpi
public synchronized void unfinalizeBlock(ExtendedBlock b) throws IOException{
if (isValidRbw(b)) {
final Map<Block, BInfo> map = getMap(b.getBlockPoolId());
map.remove(b.getLocalBlock());
}
}
synchronized BlockListAsLongs getBlockReport(String bpid) {
BlockListAsLongs.Builder report = BlockListAsLongs.builder();
final Map<Block, BInfo> map = blockMap.get(bpid);
if (map != null) {
for (BInfo b : map.values()) {
if (b.isFinalized()) {
report.add(b);
}
}
}
return report.build();
}
@Override
public synchronized Map<DatanodeStorage, BlockListAsLongs> getBlockReports(
String bpid) {
return Collections.singletonMap(storage.getDnStorage(), getBlockReport(bpid));
}
@Override // FsDatasetSpi
public List<Long> getCacheReport(String bpid) {
return new LinkedList<Long>();
}
@Override // FSDatasetMBean
public long getCapacity() {
return storage.getCapacity();
}
@Override // FSDatasetMBean
public long getDfsUsed() {
return storage.getUsed();
}
@Override // FSDatasetMBean
public long getBlockPoolUsed(String bpid) throws IOException {
return storage.getBlockPoolUsed(bpid);
}
@Override // FSDatasetMBean
public long getRemaining() {
return storage.getFree();
}
@Override // FSDatasetMBean
public int getNumFailedVolumes() {
return storage.getNumFailedVolumes();
}
@Override // FSDatasetMBean
public String[] getFailedStorageLocations() {
return null;
}
@Override // FSDatasetMBean
public long getLastVolumeFailureDate() {
return 0;
}
@Override // FSDatasetMBean
public long getEstimatedCapacityLostTotal() {
return 0;
}
@Override // FsDatasetSpi
public VolumeFailureSummary getVolumeFailureSummary() {
return new VolumeFailureSummary(ArrayUtils.EMPTY_STRING_ARRAY, 0, 0);
}
@Override // FSDatasetMBean
public long getCacheUsed() {
return 0l;
}
@Override // FSDatasetMBean
public long getCacheCapacity() {
return 0l;
}
@Override // FSDatasetMBean
public long getNumBlocksCached() {
return 0l;
}
@Override
public long getNumBlocksFailedToCache() {
return 0l;
}
@Override
public long getNumBlocksFailedToUncache() {
return 0l;
}
/**
* Get metrics from the metrics source
*
* @param collector to contain the resulting metrics snapshot
* @param all if true, return all metrics even if unchanged.
*/
@Override
public void getMetrics(MetricsCollector collector, boolean all) {
try {
DataNodeMetricHelper.getMetrics(collector, this, "SimulatedFSDataset");
} catch (Exception e){
//ignore Exceptions
}
}
@Override // FsDatasetSpi
public synchronized long getLength(ExtendedBlock b) throws IOException {
final Map<Block, BInfo> map = getMap(b.getBlockPoolId());
BInfo binfo = map.get(b.getLocalBlock());
if (binfo == null) {
throw new IOException("Finalizing a non existing block " + b);
}
return binfo.getNumBytes();
}
@Override
@Deprecated
public Replica getReplica(String bpid, long blockId) {
final Map<Block, BInfo> map = blockMap.get(bpid);
if (map != null) {
return map.get(new Block(blockId));
}
return null;
}
@Override
public synchronized String getReplicaString(String bpid, long blockId) {
Replica r = null;
final Map<Block, BInfo> map = blockMap.get(bpid);
if (map != null) {
r = map.get(new Block(blockId));
}
return r == null? "null": r.toString();
}
@Override // FsDatasetSpi
public Block getStoredBlock(String bpid, long blkid) throws IOException {
final Map<Block, BInfo> map = blockMap.get(bpid);
if (map != null) {
BInfo binfo = map.get(new Block(blkid));
if (binfo == null) {
return null;
}
return new Block(blkid, binfo.getGenerationStamp(), binfo.getNumBytes());
}
return null;
}
@Override // FsDatasetSpi
public synchronized void invalidate(String bpid, Block[] invalidBlks)
throws IOException {
boolean error = false;
if (invalidBlks == null) {
return;
}
final Map<Block, BInfo> map = getMap(bpid);
for (Block b: invalidBlks) {
if (b == null) {
continue;
}
BInfo binfo = map.get(b);
if (binfo == null) {
error = true;
DataNode.LOG.warn("Invalidate: Missing block");
continue;
}
storage.free(bpid, binfo.getNumBytes());
map.remove(b);
if (datanode != null) {
datanode.notifyNamenodeDeletedBlock(new ExtendedBlock(bpid, b),
binfo.getStorageUuid());
}
}
if (error) {
throw new IOException("Invalidate: Missing blocks.");
}
}
@Override // FSDatasetSpi
public void cache(String bpid, long[] cacheBlks) {
throw new UnsupportedOperationException(
"SimulatedFSDataset does not support cache operation!");
}
@Override // FSDatasetSpi
public void uncache(String bpid, long[] uncacheBlks) {
throw new UnsupportedOperationException(
"SimulatedFSDataset does not support uncache operation!");
}
@Override // FSDatasetSpi
public boolean isCached(String bpid, long blockId) {
return false;
}
private BInfo getBInfo(final ExtendedBlock b) {
final Map<Block, BInfo> map = blockMap.get(b.getBlockPoolId());
return map == null? null: map.get(b.getLocalBlock());
}
@Override // {@link FsDatasetSpi}
public boolean contains(ExtendedBlock block) {
return getBInfo(block) != null;
}
/**
* Check if a block is valid.
*
* @param b The block to check.
* @param minLength The minimum length that the block must have. May be 0.
* @param state If this is null, it is ignored. If it is non-null, we
* will check that the replica has this state.
*
* @throws ReplicaNotFoundException If the replica is not found
*
* @throws UnexpectedReplicaStateException If the replica is not in the
* expected state.
*/
@Override // {@link FsDatasetSpi}
public void checkBlock(ExtendedBlock b, long minLength, ReplicaState state)
throws ReplicaNotFoundException, UnexpectedReplicaStateException {
final BInfo binfo = getBInfo(b);
if (binfo == null) {
throw new ReplicaNotFoundException(b);
}
if ((state == ReplicaState.FINALIZED && !binfo.isFinalized()) ||
(state != ReplicaState.FINALIZED && binfo.isFinalized())) {
throw new UnexpectedReplicaStateException(b,state);
}
}
@Override // FsDatasetSpi
public synchronized boolean isValidBlock(ExtendedBlock b) {
try {
checkBlock(b, 0, ReplicaState.FINALIZED);
} catch (IOException e) {
return false;
}
return true;
}
/* check if a block is created but not finalized */
@Override
public synchronized boolean isValidRbw(ExtendedBlock b) {
try {
checkBlock(b, 0, ReplicaState.RBW);
} catch (IOException e) {
return false;
}
return true;
}
@Override
public String toString() {
return getStorageInfo();
}
@Override // FsDatasetSpi
public synchronized ReplicaHandler append(
ExtendedBlock b, long newGS, long expectedBlockLen) throws IOException {
final Map<Block, BInfo> map = getMap(b.getBlockPoolId());
BInfo binfo = map.get(b.getLocalBlock());
if (binfo == null || !binfo.isFinalized()) {
throw new ReplicaNotFoundException("Block " + b
+ " is not valid, and cannot be appended to.");
}
binfo.unfinalizeBlock();
return new ReplicaHandler(binfo, null);
}
@Override // FsDatasetSpi
public synchronized ReplicaHandler recoverAppend(
ExtendedBlock b, long newGS, long expectedBlockLen) throws IOException {
final Map<Block, BInfo> map = getMap(b.getBlockPoolId());
BInfo binfo = map.get(b.getLocalBlock());
if (binfo == null) {
throw new ReplicaNotFoundException("Block " + b
+ " is not valid, and cannot be appended to.");
}
if (binfo.isFinalized()) {
binfo.unfinalizeBlock();
}
map.remove(b);
binfo.theBlock.setGenerationStamp(newGS);
map.put(binfo.theBlock, binfo);
return new ReplicaHandler(binfo, null);
}
@Override // FsDatasetSpi
public String recoverClose(ExtendedBlock b, long newGS, long expectedBlockLen)
throws IOException {
final Map<Block, BInfo> map = getMap(b.getBlockPoolId());
BInfo binfo = map.get(b.getLocalBlock());
if (binfo == null) {
throw new ReplicaNotFoundException("Block " + b
+ " is not valid, and cannot be appended to.");
}
if (!binfo.isFinalized()) {
binfo.finalizeBlock(b.getBlockPoolId(), binfo.getNumBytes());
}
map.remove(b.getLocalBlock());
binfo.theBlock.setGenerationStamp(newGS);
map.put(binfo.theBlock, binfo);
return binfo.getStorageUuid();
}
@Override // FsDatasetSpi
public synchronized ReplicaHandler recoverRbw(
ExtendedBlock b, long newGS, long minBytesRcvd, long maxBytesRcvd)
throws IOException {
final Map<Block, BInfo> map = getMap(b.getBlockPoolId());
BInfo binfo = map.get(b.getLocalBlock());
if ( binfo == null) {
throw new ReplicaNotFoundException("Block " + b
+ " does not exist, and cannot be appended to.");
}
if (binfo.isFinalized()) {
throw new ReplicaAlreadyExistsException("Block " + b
+ " is valid, and cannot be written to.");
}
map.remove(b);
binfo.theBlock.setGenerationStamp(newGS);
map.put(binfo.theBlock, binfo);
return new ReplicaHandler(binfo, null);
}
@Override // FsDatasetSpi
public synchronized ReplicaHandler createRbw(
StorageType storageType, ExtendedBlock b,
boolean allowLazyPersist) throws IOException {
return createTemporary(storageType, b);
}
@Override // FsDatasetSpi
public synchronized ReplicaHandler createTemporary(
StorageType storageType, ExtendedBlock b) throws IOException {
if (isValidBlock(b)) {
throw new ReplicaAlreadyExistsException("Block " + b +
" is valid, and cannot be written to.");
}
if (isValidRbw(b)) {
throw new ReplicaAlreadyExistsException("Block " + b +
" is being written, and cannot be written to.");
}
final Map<Block, BInfo> map = getMap(b.getBlockPoolId());
BInfo binfo = new BInfo(b.getBlockPoolId(), b.getLocalBlock(), true);
map.put(binfo.theBlock, binfo);
return new ReplicaHandler(binfo, null);
}
synchronized InputStream getBlockInputStream(ExtendedBlock b
) throws IOException {
final Map<Block, BInfo> map = getMap(b.getBlockPoolId());
BInfo binfo = map.get(b.getLocalBlock());
if (binfo == null) {
throw new IOException("No such Block " + b );
}
return binfo.getIStream();
}
@Override // FsDatasetSpi
public synchronized InputStream getBlockInputStream(ExtendedBlock b,
long seekOffset) throws IOException {
InputStream result = getBlockInputStream(b);
IOUtils.skipFully(result, seekOffset);
return result;
}
/** Not supported */
@Override // FsDatasetSpi
public ReplicaInputStreams getTmpInputStreams(ExtendedBlock b, long blkoff,
long ckoff) throws IOException {
throw new IOException("Not supported");
}
@Override // FsDatasetSpi
public synchronized LengthInputStream getMetaDataInputStream(ExtendedBlock b
) throws IOException {
final Map<Block, BInfo> map = getMap(b.getBlockPoolId());
BInfo binfo = map.get(b.getLocalBlock());
if (binfo == null) {
throw new IOException("No such Block " + b );
}
if (!binfo.finalized) {
throw new IOException("Block " + b +
" is being written, its meta cannot be read");
}
final SimulatedInputStream sin = binfo.getMetaIStream();
return new LengthInputStream(sin, sin.getLength());
}
@Override
public Set<File> checkDataDir() {
// nothing to check for simulated data set
return null;
}
@Override // FsDatasetSpi
public synchronized void adjustCrcChannelPosition(ExtendedBlock b,
ReplicaOutputStreams stream,
int checksumSize)
throws IOException {
}
/**
* Simulated input and output streams
*
*/
static private class SimulatedInputStream extends java.io.InputStream {
final long length; // bytes
int currentPos = 0;
byte[] data = null;
Block theBlock = null;
/**
* An input stream of size l with repeated bytes
* @param l size of the stream
* @param iRepeatedData byte that is repeated in the stream
*/
SimulatedInputStream(long l, Block b) {
length = l;
theBlock = b;
}
/**
* An input stream of of the supplied data
* @param iData data to construct the stream
*/
SimulatedInputStream(byte[] iData) {
data = iData;
length = data.length;
}
/**
* @return the lenght of the input stream
*/
long getLength() {
return length;
}
@Override
public int read() throws IOException {
if (currentPos >= length) {
return -1;
}
if (data !=null) {
return data[currentPos++];
} else {
return simulatedByte(theBlock, currentPos++) & BYTE_MASK;
}
}
@Override
public int read(byte[] b) throws IOException {
if (b == null) {
throw new NullPointerException();
}
if (b.length == 0) {
return 0;
}
if (currentPos >= length) { // EOF
return -1;
}
int bytesRead = (int) Math.min(b.length, length-currentPos);
if (data != null) {
System.arraycopy(data, currentPos, b, 0, bytesRead);
} else { // all data is zero
for (int i = 0; i < bytesRead; i++) {
b[i] = simulatedByte(theBlock, currentPos + i);
}
}
currentPos += bytesRead;
return bytesRead;
}
}
/**
* This class implements an output stream that merely throws its data away, but records its
* length.
*
*/
static private class SimulatedOutputStream extends OutputStream {
long length = 0;
/**
* constructor for Simulated Output Steram
*/
SimulatedOutputStream() {
}
/**
*
* @return the length of the data created so far.
*/
long getLength() {
return length;
}
/**
*/
void setLength(long length) {
this.length = length;
}
@Override
public void write(int arg0) throws IOException {
length++;
}
@Override
public void write(byte[] b) throws IOException {
length += b.length;
}
@Override
public void write(byte[] b,
int off,
int len) throws IOException {
length += len;
}
}
private ObjectName mbeanName;
/**
* Register the FSDataset MBean using the name
* "hadoop:service=DataNode,name=FSDatasetState-<storageid>"
* We use storage id for MBean name since a minicluster within a single
* Java VM may have multiple Simulated Datanodes.
*/
void registerMBean(final String storageId) {
// We wrap to bypass standard mbean naming convetion.
// This wraping can be removed in java 6 as it is more flexible in
// package naming for mbeans and their impl.
StandardMBean bean;
try {
bean = new StandardMBean(this,FSDatasetMBean.class);
mbeanName = MBeans.register("DataNode", "FSDatasetState-"+
storageId, bean);
} catch (NotCompliantMBeanException e) {
DataNode.LOG.warn("Error registering FSDatasetState MBean", e);
}
DataNode.LOG.info("Registered FSDatasetState MBean");
}
@Override
public void shutdown() {
if (mbeanName != null) MBeans.unregister(mbeanName);
}
@Override
public String getStorageInfo() {
return "Simulated FSDataset-" + datanodeUuid;
}
@Override
public boolean hasEnoughResource() {
return true;
}
@Override
public ReplicaRecoveryInfo initReplicaRecovery(RecoveringBlock rBlock)
throws IOException {
ExtendedBlock b = rBlock.getBlock();
final Map<Block, BInfo> map = getMap(b.getBlockPoolId());
BInfo binfo = map.get(b.getLocalBlock());
if (binfo == null) {
throw new IOException("No such Block " + b );
}
return new ReplicaRecoveryInfo(binfo.getBlockId(), binfo.getBytesOnDisk(),
binfo.getGenerationStamp(),
binfo.isFinalized()?ReplicaState.FINALIZED : ReplicaState.RBW);
}
@Override // FsDatasetSpi
public String updateReplicaUnderRecovery(ExtendedBlock oldBlock,
long recoveryId,
long newBlockId,
long newlength) {
// Caller does not care about the exact Storage UUID returned.
return datanodeUuid;
}
@Override // FsDatasetSpi
public long getReplicaVisibleLength(ExtendedBlock block) {
return block.getNumBytes();
}
@Override // FsDatasetSpi
public void addBlockPool(String bpid, Configuration conf) {
Map<Block, BInfo> map = new HashMap<Block, BInfo>();
blockMap.put(bpid, map);
storage.addBlockPool(bpid);
}
@Override // FsDatasetSpi
public void shutdownBlockPool(String bpid) {
blockMap.remove(bpid);
storage.removeBlockPool(bpid);
}
@Override // FsDatasetSpi
public void deleteBlockPool(String bpid, boolean force) {
return;
}
@Override
public ReplicaInPipelineInterface convertTemporaryToRbw(ExtendedBlock temporary)
throws IOException {
final Map<Block, BInfo> map = blockMap.get(temporary.getBlockPoolId());
if (map == null) {
throw new IOException("Block pool not found, temporary=" + temporary);
}
final BInfo r = map.get(temporary.getLocalBlock());
if (r == null) {
throw new IOException("Block not found, temporary=" + temporary);
} else if (r.isFinalized()) {
throw new IOException("Replica already finalized, temporary="
+ temporary + ", r=" + r);
}
return r;
}
@Override
public BlockLocalPathInfo getBlockLocalPathInfo(ExtendedBlock b) {
throw new UnsupportedOperationException();
}
@Override
public HdfsBlocksMetadata getHdfsBlocksMetadata(String bpid, long[] blockIds)
throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void enableTrash(String bpid) {
throw new UnsupportedOperationException();
}
@Override
public void clearTrash(String bpid) {
}
@Override
public boolean trashEnabled(String bpid) {
return false;
}
@Override
public void setRollingUpgradeMarker(String bpid) {
}
@Override
public void clearRollingUpgradeMarker(String bpid) {
}
@Override
public void checkAndUpdate(String bpid, long blockId, File diskFile,
File diskMetaFile, FsVolumeSpi vol) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public FsVolumeReferences getFsVolumeReferences() {
throw new UnsupportedOperationException();
}
@Override
public void addVolume(
final StorageLocation location,
final List<NamespaceInfo> nsInfos) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public DatanodeStorage getStorage(final String storageUuid) {
return storageUuid.equals(storage.getStorageUuid()) ?
storage.dnStorage :
null;
}
@Override
public StorageReport[] getStorageReports(String bpid) {
return new StorageReport[] {storage.getStorageReport(bpid)};
}
@Override
public List<FinalizedReplica> getFinalizedBlocks(String bpid) {
throw new UnsupportedOperationException();
}
@Override
public List<FinalizedReplica> getFinalizedBlocksOnPersistentStorage(String bpid) {
throw new UnsupportedOperationException();
}
@Override
public Map<String, Object> getVolumeInfoMap() {
throw new UnsupportedOperationException();
}
@Override
public FsVolumeSpi getVolume(ExtendedBlock b) {
return volume;
}
@Override
public synchronized void removeVolumes(Set<File> volumes, boolean clearFailure) {
throw new UnsupportedOperationException();
}
@Override
public void submitBackgroundSyncFileRangeRequest(ExtendedBlock block,
FileDescriptor fd, long offset, long nbytes, int flags) {
throw new UnsupportedOperationException();
}
@Override
public void onCompleteLazyPersist(String bpId, long blockId,
long creationTime, File[] savedFiles, FsVolumeSpi targetVolume) {
throw new UnsupportedOperationException();
}
@Override
public void onFailLazyPersist(String bpId, long blockId) {
throw new UnsupportedOperationException();
}
@Override
public ReplicaInfo moveBlockAcrossStorage(ExtendedBlock block,
StorageType targetStorageType) throws IOException {
// TODO Auto-generated method stub
return null;
}
@Override
public void setPinning(ExtendedBlock b) throws IOException {
blockMap.get(b.getBlockPoolId()).get(b.getLocalBlock()).pinned = true;
}
@Override
public boolean getPinning(ExtendedBlock b) throws IOException {
return blockMap.get(b.getBlockPoolId()).get(b.getLocalBlock()).pinned;
}
@Override
public boolean isDeletingBlock(String bpid, long blockId) {
throw new UnsupportedOperationException();
}
}
|
{
"content_hash": "b58df29a0b115de72393ac8d271345f7",
"timestamp": "",
"source": "github",
"line_count": 1344,
"max_line_length": 93,
"avg_line_length": 28.895089285714285,
"alnum_prop": 0.6604609244238445,
"repo_name": "simbadzina/hadoop-fcfs",
"id": "2ac94165abdcd1d7d936b9907bfdd46f0f397961",
"size": "39641",
"binary": false,
"copies": "2",
"ref": "refs/heads/fcfs",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AspectJ",
"bytes": "29602"
},
{
"name": "Batchfile",
"bytes": "67213"
},
{
"name": "C",
"bytes": "1411756"
},
{
"name": "C++",
"bytes": "1741547"
},
{
"name": "CMake",
"bytes": "56862"
},
{
"name": "CSS",
"bytes": "50573"
},
{
"name": "HTML",
"bytes": "2355777"
},
{
"name": "Java",
"bytes": "51618846"
},
{
"name": "JavaScript",
"bytes": "26964"
},
{
"name": "Perl",
"bytes": "19540"
},
{
"name": "Protocol Buffer",
"bytes": "237642"
},
{
"name": "Python",
"bytes": "34012"
},
{
"name": "Shell",
"bytes": "316360"
},
{
"name": "TeX",
"bytes": "19322"
},
{
"name": "XSLT",
"bytes": "15460"
}
]
}
|
layout: default
title: "arcgis-egdb cookbook"
category: cookbooks
item: arcgis-egdb
version: 1.1.0
latest: true
---
# arcgis-egdb cookbook
The arcgis-egdb cookbook creates enterprise geodatabases in SQL Server or PostgreSQL databases and registers them with ArcGIS Server as the site's managed database.
## Platforms
* Windows 8 (8.1)
* Windows 10
* Windows Server 2008 (R2)
* Windows Server 2012 (R2)
* Windows Server 2016
* Windows Server 2019
* Windows Server 2022
* Ubuntu Server 18.04 and 20.04 LTS
* Red Hat Enterprise Linux Server 8
* SUSE Linux Enterprise Server 15
* Oracle Linux 8
## Database Servers
The cookbook was tested with:
* Amazon RDS for SQL Server
* Amazon RDS for PostgreSQL
* Amazon Aurora PostgreSQL-compatible
## Dependencies
The following cookbooks are required:
* arcgis-enterprise
The cookbook uses ArcPy to create and enable geodatabases. ArcPy is installed by the ArcGIS Server setup.
ArcPy does not support creating databases in Amazon RDS database servers. The cookbook uses the sqlcmd and the psql utility for SQL Server and PostgreSQL database servers respectively to create the databases. The 'sqlcmd' and 'psql' recipes could be used to install these utilities. ArcGIS Data Store and Portal for ArcGIS include an embedded PostgreSQL client with psql utility that can be used by the arcgis-egdb cookbook.
## Attributes
* `node['arcgis']['egdb']['engine']` = DB engine `<nil|postgres|sqlserver-se>`. Default DB engine is `nil`.
* `node['arcgis']['egdb']['endpoint']` = DB instance endpoint domain name. Default endpoint is `nil`.
* `node['arcgis']['egdb']['keycodes']` = ArcGIS Server license file path. Default path is `node['arcgis']['server']['keycodes']`.
* `node['arcgis']['egdb']['master_username']` = RDS DB instance master username. Default username is `EsriRDSAdmin`.
* `node['arcgis']['egdb']['master_password']` = RDS DB instance master user password. Default password is `nil`.
* `node['arcgis']['egdb']['db_username']` = Geodatabase username. Default username is `sde`.
* `node['arcgis']['egdb']['db_password']` = Geodatabase user password. Default password is `node['arcgis']['egdb']['master_password']`.
* `node['arcgis']['egdb']['postgresbin']` = Path to PostgreSQL client bin directory. Default path is `C:\Program Files\ArcGIS\DataStore\framework\runtime\pgsql\bin` on Windows and `/arcgis/datastore/framework/runtime/pgsql/bin` on Linux.
* `node['arcgis']['egdb']['sqlcmdbin']` = Path to Microsoft SQL Server Client SDK ODBC Tools 17 Binn directory. Default path is `C:\Program Files\Microsoft SQL Server\Client SDK\ODBC\170\Tools\Binn` on Windows.
* `node['arcgis']['egdb']['connection_files_dir']` = Directory path for geodatabase connection files (.sde) created by the recipes. Default directory is `node['arcgis']['misc']['scripts_dir']/connection_files`.
* `node['arcgis']['egdb']['data_items']` = Array with properties of geodatabases. Default value is as follows:
```JSON
[{
"database" : "egdb",
"data_item_path" : "/enterpriseDatabases/registeredDatabase",
"connection_file": "C:\\chef\\misc_scripts\\connection_files\\RDS_egdb.sde",
"is_managed" : true,
"connection_type" : "shared"
}]
```
## Recipes
### default
Creates an enterprise geodatabase and registers it with ArcGIS Server.
Attributes used by the recipe:
```JSON
{
"arcgis": {
"version": "11.0",
"server": {
"install_dir": "C:\\Program Files\\ArcGIS\\Server",
"private_url": "https://domain.com:6443/arcgis",
"admin_username": "admin",
"admin_password": "changeit"
},
"misc": {
"scripts_dir": "C:\\chef\\misc_scripts"
},
"egdb": {
"engine": "postgres",
"endpoint": "xxx.cluster-yyy.us-east-2.rds.amazonaws.com",
"keycodes": "C:\\Program Files\\ESRI\\License11.0\\sysgen\\keycodes",
"postgresbin" : "C:\\Program Files\\ArcGIS\\DataStore\\framework\\runtime\\pgsql\\bin",
"master_username": "EsriRDSAdmin",
"master_password": "changeit",
"db_password": "changeit",
"connection_files_dir": "C:\\chef\\misc_scripts\\connection_files",
"data_items": [{
"database": "egdb",
"data_item_path": "/enterpriseDatabases/registeredDatabase",
"connection_file": "C:\\chef\\msic_scripts\\connection_files\\RDS_egdb.sde",
"is_managed": true,
"connection_type": "shared"
}]
}
},
"run_list": [
"recipe[arcgis-egdb]"
]
}
```
### egdb_postgres
Creates an enterprise geodatabase in PostgreSQL.
Attributes used by the recipe:
```JSON
{
"arcgis": {
"version": "11.0",
"run_as_user": "arcgis",
"server": {
"install_dir": "C:\\Program Files\\ArcGIS\\Server"
},
"misc": {
"scripts_dir": "C:\\chef\\misc_scripts"
},
"egdb": {
"engine": "postgres",
"endpoint": "xxx.cluster-yyy.us-east-2.rds.amazonaws.com",
"keycodes": "C:\\Program Files\\ESRI\\License11.0\\sysgen\\keycodes",
"postgresbin" : "C:\\Program Files\\ArcGIS\\DataStore\\framework\\runtime\\pgsql\\bin",
"master_username": "EsriRDSAdmin",
"master_password": "changeit",
"db_password": "changeit",
"connection_files_dir": "C:\\chef\\misc_scripts\\connection_files",
"postgresbin": "C:\\Program Files\\ArcGIS\\DataStore\\framework\\runtime\\pgsql\\bin",
"data_items": [{
"database": "egdb",
"data_item_path": "/enterpriseDatabases/registeredDatabase",
"connection_file": "C:\\chef\\msic_scripts\\connection_files\\RDS_egdb.sde",
"is_managed": true,
"connection_type": "shared"
}]
}
},
"run_list": [
"recipe[arcgis-egdb::egdb_postgres]"
]
}
```
### egdb_sqlserver
Creates an enterprise geodatabase in SQL Server.
Attributes used by the recipe:
```JSON
{
"arcgis": {
"version": "11.0",
"server": {
"install_dir": "C:\\Program Files\\ArcGIS\\Server"
},
"misc": {
"scripts_dir": "C:\\chef\\misc_scripts"
},
"egdb": {
"endpoint": "xxx.cluster-yyy.us-east-2.rds.amazonaws.com",
"keycodes": "C:\\Program Files\\ESRI\\License11.0\\sysgen\\keycodes",
"master_username": "EsriRDSAdmin",
"master_password": "changeit",
"db_password": "changeit",
"data_items": [{
"database": "egdb",
"data_item_path": "/enterpriseDatabases/registeredDatabase",
"connection_file": "C:\\chef\\msic_scripts\\connection_files\\RDS_egdb.sde",
"is_managed": true,
"connection_type": "shared"
}]
}
},
"run_list": [
"recipe[arcgis-egdb::egdb_sqlserver]"
]
}
```
### register_egdb
Registers the database connection file with the specified ArcGIS Server site as the site's managed database.
Attributes used by the recipe:
```JSON
{
"arcgis": {
"server": {
"private_url": "https://domain.com:6443/arcgis",
"admin_username": "admin",
"admin_password": "changeit"
},
"egdb": {
"data_items": [{
"database": "egdb",
"data_item_path": "/enterpriseDatabases/registeredDatabase",
"connection_file": "C:\\chef\\msic_scripts\\connection_files\\RDS_egdb.sde",
"is_managed": true,
"connection_type": "shared"
}]
}
},
"run_list": [
"recipe[arcgis-egdb::register_egdb]"
]
}
```
### sql_alias
Creates EGDBHOST alias for SQL Server endpoint domain.
Attributes used by the recipe:
```JSON
{
"arcgis": {
"egdb": {
"engine": "sqlserver-se",
"endpoint": "xxx.us-east-2.rds.amazonaws.com"
}
},
"run_list": [
"recipe[arcgis-egdb::sql_alias]"
]
}
```
### sqlcmd
Installs the SQL Server ODBC driver and sqlcmd utility used by SQL Server enterprise geodatabase configuration scripts.
Attributes used by the recipe:
```JSON
{
"run_list": [
"recipe[arcgis-egdb::sqlcmd]"
]
}
```
|
{
"content_hash": "fc26e15cb9a660a8997b01711cfae56e",
"timestamp": "",
"source": "github",
"line_count": 256,
"max_line_length": 424,
"avg_line_length": 30.6328125,
"alnum_prop": 0.6483040040805916,
"repo_name": "Esri/arcgis-cookbook",
"id": "3ea45662a3b47ddd00b5805f579a86887e109870",
"size": "7846",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cookbooks/arcgis-egdb/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "41242"
},
{
"name": "Python",
"bytes": "1743"
},
{
"name": "Ruby",
"bytes": "772479"
},
{
"name": "Shell",
"bytes": "46214"
}
]
}
|
package com.github.tonivade.claudb.command.set;
import static com.github.tonivade.claudb.data.DatabaseKey.safeKey;
import com.github.tonivade.resp.annotation.Command;
import com.github.tonivade.resp.annotation.ParamLength;
import com.github.tonivade.resp.command.Request;
import com.github.tonivade.resp.protocol.RedisToken;
import com.github.tonivade.claudb.command.DBCommand;
import com.github.tonivade.claudb.command.annotation.ParamType;
import com.github.tonivade.claudb.command.annotation.ReadOnly;
import com.github.tonivade.claudb.data.DataType;
import com.github.tonivade.claudb.data.DatabaseValue;
import com.github.tonivade.claudb.data.Database;
@ReadOnly
@Command("smembers")
@ParamLength(1)
@ParamType(DataType.SET)
public class SetMembersCommand implements DBCommand {
@Override
public RedisToken execute(Database db, Request request) {
DatabaseValue value = db.getOrDefault(safeKey(request.getParam(0)), DatabaseValue.EMPTY_SET);
return convert(value);
}
}
|
{
"content_hash": "cdf6438d1eb856bfa487063c8a644790",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 97,
"avg_line_length": 35.392857142857146,
"alnum_prop": 0.8153380423814329,
"repo_name": "tonivade/claudb",
"id": "f619ca6b8281129af8ff5773768709a2c9a7f888",
"size": "1135",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/src/main/java/com/github/tonivade/claudb/command/set/SetMembersCommand.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "321"
},
{
"name": "Java",
"bytes": "442249"
}
]
}
|
;(function ($, Formstone, undefined) {
"use strict";
/**
* @method private
* @name resize
* @description Handles window resize
*/
function resize(windowWidth) {
Functions.iterate.call($Instances, resizeInstance);
}
/**
* @method private
* @name cacheInstances
* @description Caches active instances
*/
function cacheInstances() {
$Instances = $(Classes.base);
}
/**
* @method private
* @name construct
* @description Builds instance.
* @param data [object] "Instance data"
*/
function construct(data) {
var i;
data.maxWidth = (data.maxWidth === Infinity ? "100000px" : data.maxWidth);
data.mq = "(min-width:" + data.minWidth + ") and (max-width:" + data.maxWidth + ")";
// Legacy browser support
if (!Formstone.support.transform) {
data.useMargin = true;
}
// Build controls and pagination
var controlsHtml = '',
paginationHtml = '';
if (data.controls) {
controlsHtml += '<div class="' + RawClasses.controls + '">';
controlsHtml += '<button type="button" class="' + [RawClasses.control, RawClasses.control_previous].join(" ")+ '">' + data.labels.previous + '</button>';
controlsHtml += '<button type="button" class="' + [RawClasses.control, RawClasses.control_next].join(" ")+ '">' + data.labels.next + '</button>';
controlsHtml += '</div>';
}
if (data.pagination) {
paginationHtml += '<div class="' + RawClasses.pagination + '">';
paginationHtml += '</div>';
}
// Modify dom
this.addClass( [RawClasses.base, data.customClass, (data.rtl ? RawClasses.rtl : RawClasses.ltr)].join(" ") )
.wrapInner('<div class="' + RawClasses.wrapper + '"><div class="' + RawClasses.container + '"><div class="' + RawClasses.canister + '"></div></div></div>')
.append(controlsHtml)
.wrapInner('<div class="' + RawClasses.viewport + '"></div>')
.append(paginationHtml);
data.$viewport = this.find(Classes.viewport).eq(0);
data.$container = this.find(Classes.container).eq(0);
data.$canister = this.find(Classes.canister).eq(0);
data.$controls = this.find(Classes.controls).eq(0);
data.$pagination = this.find(Classes.pagination).eq(0);
data.$items = data.$canister.children().addClass(RawClasses.item);
data.$controlItems = data.$controls.find(Classes.control);
data.$paginationItems = data.$pagination.find(Classes.page);
data.$images = data.$canister.find("img");
data.index = 0;
data.enabled = false;
data.leftPosition = 0;
data.totalImages = data.$images.length;
data.autoTimer = null;
data.resizeTimer = null;
if ($.type(data.show) === "object") {
var show = data.show,
keys = [];
for (i in show) {
if (show.hasOwnProperty(i)) {
keys.push(i);
}
}
keys.sort(Functions.sortAsc);
data.show = {};
for (i in keys) {
if (keys.hasOwnProperty(i)) {
data.show[ keys[i] ] = {
width: parseInt( keys[i] ),
count: show[ keys[i] ]
};
}
}
}
// Media Query support
$.mediaquery("bind", data.rawGuid, data.mq, {
enter: function() {
enable.call(data.$el, data);
},
leave: function() {
disable.call(data.$el, data);
}
});
// Watch Images
data.$images.on(Events.load, data, onImageLoad);
// Auto timer
if (data.autoAdvance) {
data.autoTimer = Functions.startTimer(data.autoTimer, data.autoTime, function() {
autoAdvance(data);
}, true);
}
cacheInstances();
}
/**
* @method private
* @name destruct
* @description Tears down instance.
* @param data [object] "Instance data"
*/
function destruct(data) {
Functions.clearTimer(data.autoTimer);
Functions.startTimer(data.resizeTimer);
disable.call(this, data);
$.mediaquery("unbind", data.rawGuid);
data.$images.off(Events.namespace);
data.$items.removeClass( [RawClasses.item, RawClasses.visible].join(" ") )
.unwrap().unwrap();
if (data.pagination) {
data.$pagination.remove();
}
if (data.controls) {
data.$controls.remove();
}
this.removeClass( [RawClasses.base, RawClasses.ltr, RawClasses.rtl, RawClasses.enabled, RawClasses.animated, data.customClass].join(" ") );
cacheInstances();
}
/**
* @method
* @name disable
* @description Disables instance of plugin
* @example $(".target").carousel("disable");
*/
function disable(data) {
if (data.enabled) {
Functions.clearTimer(data.autoTimer);
data.enabled = false;
this.removeClass( [RawClasses.enabled, RawClasses.animated].join(" ") )
.off(Events.namespace);
data.$canister.touch("destroy")
.off(Events.namespace)
.attr("style", "")
.css(TransitionProperty, "none");
data.$items.css({
width: "",
height: ""
});
data.$controls.removeClass(RawClasses.visible);
data.$pagination.removeClass(RawClasses.visible)
.html("");
if (data.useMargin) {
data.$canister.css({
marginLeft: ""
});
} else {
data.$canister.css(TransformProperty, "");
}
data.index = 0;
}
}
/**
* @method
* @name enable
* @description Enables instance of plugin
* @example $(".target").carousel("enable");
*/
function enable(data) {
if (!data.enabled) {
data.enabled = true;
this.addClass(RawClasses.enabled)
.on(Events.clickTouchStart, Classes.control, data, onAdvance)
.on(Events.clickTouchStart, Classes.page, data, onSelect);
data.$canister.touch({
axis: "x",
pan: true,
swipe: true
}).on(Events.panStart, data, onPanStart)
.on(Events.pan, data, onPan)
.on(Events.panEnd, data, onPanEnd)
.on(Events.swipe, data, onSwipe)
.css(TransitionProperty, "");
resizeInstance.call(this, data);
}
}
/**
* @method
* @name resize
* @description Resizes instance
* @example $(".target").carousel("resize");
*/
/**
* @method private
* @name resizeInstance
* @description Resizes each instance
* @param data [object] "Instance data"
*/
function resizeInstance(data) {
if (data.enabled) {
var i,
j,
$items,
$first,
height,
left;
data.count = data.$items.length;
if (data.count < 1) { // avoid empty carousels
return;
}
this.removeClass(RawClasses.animated);
// data.viewportWidth = data.$viewport.outerWidth(false);
data.containerWidth = data.$container.outerWidth(false);
data.visible = calculateVisible(data);
data.perPage = data.paged ? 1 : data.visible;
data.itemMargin = parseInt(data.$items.eq(0).css("marginRight")) + parseInt(data.$items.eq(0).css("marginLeft"));
data.itemWidth = (data.containerWidth - (data.itemMargin * (data.visible - 1))) / data.visible;
data.itemHeight = 0;
data.pageWidth = data.paged ? data.itemWidth : data.containerWidth;
data.pageCount = Math.ceil(data.count / data.perPage);
data.canisterWidth = ((data.pageWidth + data.itemMargin) * data.pageCount);
data.$canister.css({
width: data.canisterWidth
});
data.$items.css({
width: data.itemWidth,
height: ""
}).removeClass(RawClasses.visible);
// initial page
data.pages = [];
for (i = 0, j = 0; i < data.count; i += data.perPage) {
$items = data.$items.slice(i, i + data.perPage);
if ($items.length < data.perPage) {
if (i === 0) {
$items = data.$items;
} else {
$items = data.$items.slice(data.$items.length - data.perPage);
}
}
$first = data.rtl ? $items.eq( $items.length - 1 ) : $items.eq(0);
height = $first.outerHeight();
left = $first.position().left;
data.pages.push({
left : data.rtl ? left - (data.canisterWidth - data.pageWidth - data.itemMargin) : left,
height : height,
$items : $items
});
if (height > data.itemHeight) {
data.itemHeight = height;
}
j++;
}
if (data.paged) {
data.pageCount -= (data.count % data.visible);
}
data.maxMove = -data.pages[ data.pageCount - 1 ].left;
// auto height
if (data.autoHeight) {
data.$items.css({
height: data.itemHeight
});
}
// Reset Page Count
var html = '';
for (i = 0; i < data.pageCount; i++) {
html += '<button type="button" class="' + RawClasses.page + '">' + (i + 1) + '</button>';
}
data.$pagination.html(html);
// update pagination
if (data.pageCount <= 1) {
data.$controls.removeClass(RawClasses.visible);
data.$pagination.removeClass(RawClasses.visible);
} else {
data.$controls.addClass(RawClasses.visible);
data.$pagination.addClass(RawClasses.visible);
}
data.$paginationItems = data.$el.find(Classes.page);
positionCanister(data, data.index, false);
setTimeout(function() {
data.$el.addClass(RawClasses.animated);
}, 5);
}
}
/**
* @method
* @name reset
* @description Resets instance after item change
* @example $(".target").carousel("reset");
*/
/**
* @method private
* @name resetInstance
* @description Resets instance after item change
* @param data [object] "Instance data"
*/
function resetInstance(data) {
if (data.enabled) {
data.$items = data.$canister.children().addClass(RawClasses.item);
resizeInstance.call(this, data);
}
}
/**
* @method
* @name jump
* @description Jump instance of plugin to specific page
* @example $(".target").carousel("jump", 1);
*/
/**
* @method private
* @name jumpToItem
* @description Jump instance of plugin to specific page
* @param data [object] "Instance data"
* @param index [int] "New index"
*/
function jumpToItem(data, index) {
if (data.enabled) {
Functions.clearTimer(data.autoTimer);
positionCanister(data, index - 1);
}
}
/**
* @method
* @name previous
* @description Move to the previous item
* @example $(".target").carousel("previous");
*/
/**
* @method private
* @name previousItem
* @description Move to next item
* @param data [object] "Instance data"
*/
function previousItem(data) {
var index = data.index - 1;
if (data.infinite && index < 0) {
index = data.pageCount - 1;
}
positionCanister(data, index);
}
/**
* @method
* @name next
* @description Move to next item
* @param data [object] "Instance data"
*/
/**
* @method private
* @name nextItem
* @description Move to next item
* @example $(".target").carousel("next");
*/
function nextItem(data) {
var index = data.index + 1;
if (data.infinite && index >= data.pageCount) {
index = 0;
}
positionCanister(data, index);
}
/**
* @method private
* @name onImageLoad
* @description Handles child image load
* @param e [object] "Event data"
*/
function onImageLoad(e) {
var data = e.data;
data.resizeTimer = Functions.startTimer(data.resizeTimer, 20, function() {
resizeInstance.call(data.$el, data);
});
}
/**
* @method private
* @name autoAdvance
* @description Handles auto advancement
* @param data [object] "Instance data"
*/
function autoAdvance(data) {
var index = data.index + 1;
if (index >= data.pageCount) {
index = 0;
}
positionCanister(data, index);
}
/**
* @method private
* @name onAdvance
* @description Handles item advancement
* @param e [object] "Event data"
*/
function onAdvance(e) {
Functions.killEvent(e);
var data = e.data,
index = data.index + ($(e.currentTarget).hasClass(RawClasses.control_next) ? 1 : -1);
Functions.clearTimer(data.autoTimer);
positionCanister(data, index);
}
/**
* @method private
* @name onSelect
* @description Handles item select
* @param e [object] "Event data"
*/
function onSelect(e) {
Functions.killEvent(e);
var data = e.data,
index = data.$paginationItems.index($(e.currentTarget));
Functions.clearTimer(data.autoTimer);
positionCanister(data, index);
}
/**
* @method private
* @name position
* @description Handles updating instance position
* @param data [object] "Instance data"
* @param index [int] "Item index"
*/
function positionCanister(data, index, animate) {
if (index < 0) {
index = (data.infinite) ? data.pageCount-1 : 0;
}
if (index >= data.pageCount) {
index = (data.infinite) ? 0 : data.pageCount-1;
}
if (data.pages[index]) {
data.leftPosition = -data.pages[index].left;
}
data.leftPosition = checkPosition(data, data.leftPosition);
if (data.useMargin) {
data.$canister.css({
marginLeft: data.leftPosition
});
} else {
if (animate === false) {
data.$canister.css(TransitionProperty, "none")
.css(TransformProperty, "translateX(" + data.leftPosition + "px)");
// Slight delay before adding transitions back
setTimeout(function() {
data.$canister.css(TransitionProperty, "");
}, 5);
} else {
data.$canister.css(TransformProperty, "translateX(" + data.leftPosition + "px)");
}
}
// Update classes
data.$items.removeClass(RawClasses.visible);
data.pages[index].$items.addClass(RawClasses.visible);
if (animate !== false && index !== data.index && (data.infinite || (index > -1 && index < data.pageCount)) ) {
data.$el.trigger(Events.update, [ index ]);
}
data.index = index;
updateControls(data);
}
/**
* @method private
* @name updateControls
* @description Handles updating instance controls
* @param data [object] "Instance data"
*/
function updateControls(data) {
data.$paginationItems.removeClass(RawClasses.active)
.eq(data.index)
.addClass(RawClasses.active);
if (data.infinite) {
data.$controlItems.addClass(RawClasses.visible);
} else if (data.pageCount < 1) {
data.$controlItems.removeClass(RawClasses.visible);
} else {
data.$controlItems.addClass(RawClasses.visible);
if (data.index <= 0) {
data.$controlItems.filter(Classes.control_previous).removeClass(RawClasses.visible);
} else if (data.index >= data.pageCount || data.leftPosition === data.maxMove) {
data.$controlItems.filter(Classes.control_next).removeClass(RawClasses.visible);
}
}
}
/**
* @method private
* @name calculateVisible
* @description Determines how many items should show at screen width
* @param data [object] "Instance data"
* @return [int] "New visible count"
*/
function calculateVisible(data) {
if ($.type(data.show) === "object") {
for (var i in data.show) {
if (data.show.hasOwnProperty(i) && Formstone.windowWidth >= data.show[i].width) {
return (data.fill && data.count < data.show[i].count) ? data.count : data.show[i].count;
}
}
return 1;
}
return (data.fill && data.count < data.show) ? data.count : data.show;
}
/**
* @method private
* @name onPanStart
* @description Handles pan start event
* @param e [object] "Event data"
*/
function onPanStart(e) {
var data = e.data;
if (data.useMargin) {
data.leftPosition = parseInt(data.$canister.css("marginLeft"));
} else {
var matrix = data.$canister.css(TransformProperty).split(",");
data.leftPosition = parseInt(matrix[4]); // ?
}
data.$canister.css(TransitionProperty, "none");
onPan(e);
data.isTouching = true;
}
/**
* @method private
* @name onPan
* @description Handles pan event
* @param e [object] "Event data"
*/
function onPan(e) {
var data = e.data;
data.touchLeft = checkPosition(data, data.leftPosition + e.deltaX);
if (data.useMargin) {
data.$canister.css({
marginLeft: data.touchLeft
});
} else {
data.$canister.css(TransformProperty, "translateX(" + data.touchLeft + "px)");
}
}
/**
* @method private
* @name onPanEnd
* @description Handles pan end event
* @param e [object] "Event data"
*/
function onPanEnd(e) {
var data = e.data,
increment = getIncrement(data, e),
index = (e.deltaX > -50 && e.deltaX < 50) ? data.index : data.index + increment;
endTouch(data, index);
}
/**
* @method private
* @name onSwipe
* @description Handles swipe event
* @param e [object] "Event data"
*/
function onSwipe(e) {
var data = e.data,
increment = getIncrement(data, e),
index = data.index + increment;
endTouch(data, index);
}
/**
* @method private
* @name endTouch
* @description Cleans up touch interactions
* @param data [object] "Instance data"
* @param index [object] "New index"
*/
function endTouch(data, index) {
data.$canister.css(TransitionProperty, "");
positionCanister(data, index);
data.isTouching = false;
}
/**
* @method private
* @name checkPosition
* @description Checks if left pos is in range
* @param data [object] "Event data"
* @param e [object] "Event data"
* @return [int] "Corrected left position"
*/
function checkPosition(data, pos) {
if (isNaN(pos)) {
pos = 0;
} else if (data.rtl) {
if (pos > data.maxMove) {
pos = data.maxMove;
}
if (pos < 0) {
pos = 0;
}
} else {
if (pos < data.maxMove) {
pos = data.maxMove;
}
if (pos > 0) {
pos = 0;
}
}
return pos;
}
/**
* @method private
* @name getIncrement
* @description Returns touch increment
* @param data [object] "Instance data"
* @param e [object] "Event data"
* @return [int] "Target direction"
*/
function getIncrement(data, e) {
return data.rtl ? ((e.directionX === "right") ? 1 : -1) : ((e.directionX === "left") ? 1 : -1);
}
/**
* @plugin
* @name Carousel
* @description A jQuery plugin for simple content carousels.
* @type widget
* @dependency core.js
* @dependency mediaquery.js
* @dependency touch.js
*/
var Plugin = Formstone.Plugin("carousel", {
widget: true,
/**
* @options
* @param autoAdvance [boolean] <false> "Flag to auto advance items"
* @param autoHeight [boolean] <false> "Flag to auto-size items"
* @param autoTime [int] <8000> "Auto advance time"
* @param controls [boolean] <true> "Flag to draw controls"
* @param customClass [string] <''> "Class applied to instance"
* @param fill [boolean] <false> "Flag to fill viewport if item count is less then show count"
* @param infinite [boolean] <false> "Flag for looping items"
* @param labels.next [string] <'Next'> "Control text"
* @param labels.previous [string] <'Previous'> "Control text"
* @param maxWidth [string] <'Infinity'> "Width at which to auto-disable plugin"
* @param minWidth [string] <'0'> "Width at which to auto-disable plugin"
* @param paged [boolean] <false> "Flag for paged items"
* @param pagination [boolean] <true> "Flag to draw pagination"
* @param show [int / object] <1> "Items visible per page; Object for responsive counts"
* @param rtl [boolean] <false> "Right to Left display"
* @param useMargin [boolean] <false> "Use margins instead of css transitions (legacy browser support)"
*/
defaults: {
autoAdvance : false,
autoHeight : false,
autoTime : 8000,
controls : true,
customClass : "",
fill : false,
infinite : false,
labels: {
next : "Next",
previous : "Previous"
},
maxWidth : Infinity,
minWidth : '0px',
paged : false,
pagination : true,
show : 1,
rtl : false,
useMargin : false
},
classes: [
"ltr",
"rtl",
"viewport",
"wrapper",
"container",
"canister",
"item",
"controls",
"control",
"pagination",
"page",
"animated",
"enabled",
"visible",
"active",
"control_previous",
"control_next"
],
/**
* @events
* @event update.carousel "Carousel position updated"
*/
events: {
update : "update",
panStart : "panstart",
pan : "pan",
panEnd : "panend",
swipe : "swipe"
},
methods: {
_construct : construct,
_destruct : destruct,
_resize : resize,
disable : disable,
enable : enable,
jump : jumpToItem,
previous : previousItem,
next : nextItem,
reset : resetInstance,
resize : resizeInstance
}
}),
// Localize References
Classes = Plugin.classes,
RawClasses = Classes.raw,
Events = Plugin.events,
Functions = Plugin.functions,
$Instances = [],
TransformProperty = Formstone.transform,
TransitionProperty = Formstone.transition;
})(jQuery, Formstone);
|
{
"content_hash": "df9a8481cce71060d84afaaa0d59e01e",
"timestamp": "",
"source": "github",
"line_count": 866,
"max_line_length": 158,
"avg_line_length": 24.68013856812933,
"alnum_prop": 0.5907921209002012,
"repo_name": "ufhy/Formstone",
"id": "aa652980569ebfd3268e7847b72e6f9e175cd307",
"size": "21373",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/js/carousel.js",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "159"
},
{
"name": "CSS",
"bytes": "333455"
},
{
"name": "HTML",
"bytes": "993599"
},
{
"name": "JavaScript",
"bytes": "291676"
},
{
"name": "PHP",
"bytes": "17630"
}
]
}
|
#include "config.h"
#include "SliderThumbElement.h"
#include "CSSValueKeywords.h"
#include "ElementShadow.h"
#include "Event.h"
#include "Frame.h"
#include "HTMLInputElement.h"
#include "HTMLParserIdioms.h"
#include "MouseEvent.h"
#include "RenderDeprecatedFlexibleBox.h"
#include "RenderSlider.h"
#include "RenderTheme.h"
#include "ShadowRoot.h"
#include "StepRange.h"
#include <wtf/MathExtras.h>
using namespace std;
namespace WebCore {
using namespace HTMLNames;
inline static Decimal sliderPosition(HTMLInputElement* element)
{
const StepRange stepRange(element->createStepRange(RejectAny));
const Decimal oldValue = parseToDecimalForNumberType(element->value(), stepRange.defaultValue());
return stepRange.proportionFromValue(stepRange.clampValue(oldValue));
}
inline static bool hasVerticalAppearance(HTMLInputElement* input)
{
ASSERT(input->renderer());
RenderStyle* sliderStyle = input->renderer()->style();
#if ENABLE(VIDEO)
if (sliderStyle->appearance() == MediaVolumeSliderPart && input->renderer()->theme()->usesVerticalVolumeSlider())
return true;
#endif
return sliderStyle->appearance() == SliderVerticalPart;
}
SliderThumbElement* sliderThumbElementOf(Node* node)
{
ASSERT(node);
ShadowRoot* shadow = node->toInputElement()->userAgentShadowRoot();
ASSERT(shadow);
Node* thumb = shadow->firstChild()->firstChild()->firstChild();
ASSERT(thumb);
return toSliderThumbElement(thumb);
}
HTMLElement* sliderTrackElementOf(Node* node)
{
ASSERT(node);
ShadowRoot* shadow = node->toInputElement()->userAgentShadowRoot();
ASSERT(shadow);
Node* track = shadow->firstChild()->firstChild();
ASSERT(track);
return toHTMLElement(track);
}
// --------------------------------
RenderSliderThumb::RenderSliderThumb(Node* node)
: RenderBlock(node)
{
}
void RenderSliderThumb::updateAppearance(RenderStyle* parentStyle)
{
if (parentStyle->appearance() == SliderVerticalPart)
style()->setAppearance(SliderThumbVerticalPart);
else if (parentStyle->appearance() == SliderHorizontalPart)
style()->setAppearance(SliderThumbHorizontalPart);
else if (parentStyle->appearance() == MediaSliderPart)
style()->setAppearance(MediaSliderThumbPart);
else if (parentStyle->appearance() == MediaVolumeSliderPart)
style()->setAppearance(MediaVolumeSliderThumbPart);
else if (parentStyle->appearance() == MediaFullScreenVolumeSliderPart)
style()->setAppearance(MediaFullScreenVolumeSliderThumbPart);
if (style()->hasAppearance())
theme()->adjustSliderThumbSize(style(), toElement(node()));
}
bool RenderSliderThumb::isSliderThumb() const
{
return true;
}
void RenderSliderThumb::layout()
{
// Do not cast node() to SliderThumbElement. This renderer is used for
// TrackLimitElement too.
HTMLInputElement* input = node()->shadowHost()->toInputElement();
bool isVertical = hasVerticalAppearance(input);
double fraction = (sliderPosition(input) * 100).toDouble();
if (isVertical)
style()->setTop(Length(100 - fraction, Percent));
else if (style()->isLeftToRightDirection())
style()->setLeft(Length(fraction, Percent));
else
style()->setRight(Length(fraction, Percent));
RenderBlock::layout();
}
// --------------------------------
// FIXME: Find a way to cascade appearance and adjust heights, and get rid of this class.
// http://webkit.org/b/62535
class RenderSliderContainer : public RenderDeprecatedFlexibleBox {
public:
RenderSliderContainer(Node* node)
: RenderDeprecatedFlexibleBox(node) { }
private:
virtual void layout();
};
void RenderSliderContainer::layout()
{
HTMLInputElement* input = node()->shadowHost()->toInputElement();
bool isVertical = hasVerticalAppearance(input);
style()->setBoxOrient(isVertical ? VERTICAL : HORIZONTAL);
// Sets the concrete height if the height of the <input> is not fixed or a
// percentage value because a percentage height value of this box won't be
// based on the <input> height in such case.
if (input->renderer()->isSlider()) {
if (!isVertical) {
RenderObject* trackRenderer = node()->firstChild()->renderer();
Length inputHeight = input->renderer()->style()->height();
if (!inputHeight.isSpecified()) {
RenderObject* thumbRenderer = input->sliderThumbElement()->renderer();
if (thumbRenderer) {
Length height = thumbRenderer->style()->height();
#if ENABLE(DATALIST_ELEMENT)
if (input && input->list()) {
int offsetFromCenter = theme()->sliderTickOffsetFromTrackCenter();
int trackHeight = 0;
if (offsetFromCenter < 0)
trackHeight = -2 * offsetFromCenter;
else {
int tickLength = theme()->sliderTickSize().height();
trackHeight = 2 * (offsetFromCenter + tickLength);
}
float zoomFactor = style()->effectiveZoom();
if (zoomFactor != 1.0)
trackHeight *= zoomFactor;
height = Length(trackHeight, Fixed);
}
#endif
style()->setHeight(height);
}
} else {
style()->setHeight(Length(100, Percent));
if (trackRenderer)
trackRenderer->style()->setHeight(Length());
}
}
}
RenderDeprecatedFlexibleBox::layout();
// Percentage 'top' for the thumb doesn't work if the parent style has no
// concrete height.
Node* track = node()->firstChild();
if (track && track->renderer()->isBox()) {
RenderBox* trackBox = track->renderBox();
trackBox->style()->setHeight(Length(trackBox->height() - trackBox->borderAndPaddingHeight(), Fixed));
}
}
// --------------------------------
void SliderThumbElement::setPositionFromValue()
{
// Since the code to calculate position is in the RenderSliderThumb layout
// path, we don't actually update the value here. Instead, we poke at the
// renderer directly to trigger layout.
if (renderer())
renderer()->setNeedsLayout(true);
}
RenderObject* SliderThumbElement::createRenderer(RenderArena* arena, RenderStyle*)
{
return new (arena) RenderSliderThumb(this);
}
bool SliderThumbElement::isEnabledFormControl() const
{
return hostInput()->isEnabledFormControl();
}
bool SliderThumbElement::shouldMatchReadOnlySelector() const
{
return hostInput()->shouldMatchReadOnlySelector();
}
bool SliderThumbElement::shouldMatchReadWriteSelector() const
{
return hostInput()->shouldMatchReadWriteSelector();
}
Node* SliderThumbElement::focusDelegate()
{
return hostInput();
}
void SliderThumbElement::dragFrom(const LayoutPoint& point)
{
setPositionFromPoint(point);
startDragging();
}
void SliderThumbElement::setPositionFromPoint(const LayoutPoint& point)
{
HTMLInputElement* input = hostInput();
HTMLElement* trackElement = sliderTrackElementOf(input);
if (!input->renderer() || !renderer() || !trackElement->renderer())
return;
input->setTextAsOfLastFormControlChangeEvent(input->value());
LayoutPoint offset = roundedLayoutPoint(input->renderer()->absoluteToLocal(point, UseTransforms | SnapOffsetForTransforms));
bool isVertical = hasVerticalAppearance(input);
bool isLeftToRightDirection = renderBox()->style()->isLeftToRightDirection();
LayoutUnit trackSize;
LayoutUnit position;
LayoutUnit currentPosition;
// We need to calculate currentPosition from absolute points becaue the
// renderer for this node is usually on a layer and renderBox()->x() and
// y() are unusable.
// FIXME: This should probably respect transforms.
LayoutPoint absoluteThumbOrigin = renderBox()->absoluteBoundingBoxRectIgnoringTransforms().location();
LayoutPoint absoluteSliderContentOrigin = roundedLayoutPoint(input->renderer()->localToAbsolute());
IntRect trackBoundingBox = trackElement->renderer()->absoluteBoundingBoxRectIgnoringTransforms();
IntRect inputBoundingBox = input->renderer()->absoluteBoundingBoxRectIgnoringTransforms();
if (isVertical) {
trackSize = trackElement->renderBox()->contentHeight();
position = offset.y() - renderBox()->height() / 2 - trackBoundingBox.y() + inputBoundingBox.y() - renderBox()->marginBottom();
currentPosition = absoluteThumbOrigin.y() - absoluteSliderContentOrigin.y();
} else {
trackSize = trackElement->renderBox()->contentWidth();
position = offset.x() - renderBox()->width() / 2 - trackBoundingBox.x() + inputBoundingBox.x();
if (isLeftToRightDirection)
position -= renderBox()->marginLeft();
else
position += renderBox()->width() - renderBox()->marginRight();
currentPosition = absoluteThumbOrigin.x() - absoluteSliderContentOrigin.x();
}
position = max<LayoutUnit>(0, min(position, trackSize));
if (position == currentPosition)
return;
const Decimal ratio = Decimal::fromDouble(static_cast<double>(position) / trackSize);
const Decimal fraction = isVertical || !isLeftToRightDirection ? Decimal(1) - ratio : ratio;
StepRange stepRange(input->createStepRange(RejectAny));
Decimal value = stepRange.clampValue(stepRange.valueFromProportion(fraction));
#if ENABLE(DATALIST_ELEMENT)
const LayoutUnit snappingThreshold = renderer()->theme()->sliderTickSnappingThreshold();
if (snappingThreshold > 0) {
Decimal closest = input->findClosestTickMarkValue(value);
if (closest.isFinite()) {
double closestFraction = stepRange.proportionFromValue(closest).toDouble();
double closestRatio = isVertical || !isLeftToRightDirection ? 1.0 - closestFraction : closestFraction;
LayoutUnit closestPosition = trackSize * closestRatio;
if ((closestPosition - position).abs() <= snappingThreshold)
value = closest;
}
}
#endif
// FIXME: This is no longer being set from renderer. Consider updating the method name.
input->setValueFromRenderer(serializeForNumberType(value));
renderer()->setNeedsLayout(true);
input->dispatchFormControlChangeEvent();
}
void SliderThumbElement::startDragging()
{
if (Frame* frame = document()->frame()) {
frame->eventHandler()->setCapturingMouseEventsNode(this);
m_inDragMode = true;
}
}
void SliderThumbElement::stopDragging()
{
if (!m_inDragMode)
return;
if (Frame* frame = document()->frame())
frame->eventHandler()->setCapturingMouseEventsNode(0);
m_inDragMode = false;
if (renderer())
renderer()->setNeedsLayout(true);
}
void SliderThumbElement::defaultEventHandler(Event* event)
{
if (!event->isMouseEvent()) {
HTMLDivElement::defaultEventHandler(event);
return;
}
// FIXME: Should handle this readonly/disabled check in more general way.
// Missing this kind of check is likely to occur elsewhere if adding it in each shadow element.
HTMLInputElement* input = hostInput();
if (!input || input->readOnly() || !input->isEnabledFormControl()) {
stopDragging();
HTMLDivElement::defaultEventHandler(event);
return;
}
MouseEvent* mouseEvent = static_cast<MouseEvent*>(event);
bool isLeftButton = mouseEvent->button() == LeftButton;
const AtomicString& eventType = event->type();
// We intentionally do not call event->setDefaultHandled() here because
// MediaControlTimelineElement::defaultEventHandler() wants to handle these
// mouse events.
if (eventType == eventNames().mousedownEvent && isLeftButton) {
startDragging();
return;
} else if (eventType == eventNames().mouseupEvent && isLeftButton) {
stopDragging();
return;
} else if (eventType == eventNames().mousemoveEvent) {
if (m_inDragMode)
setPositionFromPoint(mouseEvent->absoluteLocation());
return;
}
HTMLDivElement::defaultEventHandler(event);
}
bool SliderThumbElement::willRespondToMouseMoveEvents()
{
const HTMLInputElement* input = hostInput();
if (input && !input->readOnly() && input->isEnabledFormControl() && m_inDragMode)
return true;
return HTMLDivElement::willRespondToMouseMoveEvents();
}
bool SliderThumbElement::willRespondToMouseClickEvents()
{
const HTMLInputElement* input = hostInput();
if (input && !input->readOnly() && input->isEnabledFormControl())
return true;
return HTMLDivElement::willRespondToMouseClickEvents();
}
void SliderThumbElement::detach()
{
if (m_inDragMode) {
if (Frame* frame = document()->frame())
frame->eventHandler()->setCapturingMouseEventsNode(0);
}
HTMLDivElement::detach();
}
HTMLInputElement* SliderThumbElement::hostInput() const
{
// Only HTMLInputElement creates SliderThumbElement instances as its shadow nodes.
// So, shadowHost() must be an HTMLInputElement.
return shadowHost()->toInputElement();
}
static const AtomicString& sliderThumbShadowPseudoId()
{
DEFINE_STATIC_LOCAL(const AtomicString, sliderThumb, ("-webkit-slider-thumb"));
return sliderThumb;
}
static const AtomicString& mediaSliderThumbShadowPseudoId()
{
DEFINE_STATIC_LOCAL(const AtomicString, mediaSliderThumb, ("-webkit-media-slider-thumb"));
return mediaSliderThumb;
}
const AtomicString& SliderThumbElement::shadowPseudoId() const
{
HTMLInputElement* input = hostInput();
if (!input)
return sliderThumbShadowPseudoId();
RenderStyle* sliderStyle = input->renderer()->style();
switch (sliderStyle->appearance()) {
case MediaSliderPart:
case MediaSliderThumbPart:
case MediaVolumeSliderPart:
case MediaVolumeSliderThumbPart:
case MediaFullScreenVolumeSliderPart:
case MediaFullScreenVolumeSliderThumbPart:
return mediaSliderThumbShadowPseudoId();
default:
return sliderThumbShadowPseudoId();
}
}
// --------------------------------
inline TrackLimiterElement::TrackLimiterElement(Document* document)
: HTMLDivElement(HTMLNames::divTag, document)
{
}
PassRefPtr<TrackLimiterElement> TrackLimiterElement::create(Document* document)
{
RefPtr<TrackLimiterElement> element = adoptRef(new TrackLimiterElement(document));
element->setInlineStyleProperty(CSSPropertyVisibility, CSSValueHidden);
element->setInlineStyleProperty(CSSPropertyPosition, CSSValueStatic);
return element.release();
}
RenderObject* TrackLimiterElement::createRenderer(RenderArena* arena, RenderStyle*)
{
return new (arena) RenderSliderThumb(this);
}
const AtomicString& TrackLimiterElement::shadowPseudoId() const
{
HTMLInputElement* input = shadowHost()->toInputElement();
if (!input)
return sliderThumbShadowPseudoId();
RenderStyle* sliderStyle = input->renderer()->style();
switch (sliderStyle->appearance()) {
case MediaSliderPart:
case MediaSliderThumbPart:
case MediaVolumeSliderPart:
case MediaVolumeSliderThumbPart:
case MediaFullScreenVolumeSliderPart:
case MediaFullScreenVolumeSliderThumbPart:
return mediaSliderThumbShadowPseudoId();
default:
return sliderThumbShadowPseudoId();
}
}
TrackLimiterElement* trackLimiterElementOf(Node* node)
{
ASSERT(node);
ShadowRoot* shadow = node->toInputElement()->userAgentShadowRoot();
ASSERT(shadow);
Node* limiter = shadow->firstChild()->lastChild();
ASSERT(limiter);
return static_cast<TrackLimiterElement*>(limiter);
}
// --------------------------------
inline SliderContainerElement::SliderContainerElement(Document* document)
: HTMLDivElement(HTMLNames::divTag, document)
{
}
PassRefPtr<SliderContainerElement> SliderContainerElement::create(Document* document)
{
return adoptRef(new SliderContainerElement(document));
}
RenderObject* SliderContainerElement::createRenderer(RenderArena* arena, RenderStyle*)
{
return new (arena) RenderSliderContainer(this);
}
const AtomicString& SliderContainerElement::shadowPseudoId() const
{
DEFINE_STATIC_LOCAL(const AtomicString, mediaSliderContainer, ("-webkit-media-slider-container"));
DEFINE_STATIC_LOCAL(const AtomicString, sliderContainer, ("-webkit-slider-container"));
HTMLInputElement* input = shadowHost()->toInputElement();
if (!input)
return sliderContainer;
RenderStyle* sliderStyle = input->renderer()->style();
switch (sliderStyle->appearance()) {
case MediaSliderPart:
case MediaSliderThumbPart:
case MediaVolumeSliderPart:
case MediaVolumeSliderThumbPart:
case MediaFullScreenVolumeSliderPart:
case MediaFullScreenVolumeSliderThumbPart:
return mediaSliderContainer;
default:
return sliderContainer;
}
}
}
|
{
"content_hash": "a54dbf462f955080e03dbb032e7e269c",
"timestamp": "",
"source": "github",
"line_count": 502,
"max_line_length": 134,
"avg_line_length": 34.04780876494024,
"alnum_prop": 0.6857009127076995,
"repo_name": "yoavweiss/RespImg-WebCore",
"id": "d6920a913f1dd744f7c4f8caa2f35bf7c0e99a5f",
"size": "18726",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "html/shadow/SliderThumbElement.cpp",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Assembly",
"bytes": "1301"
},
{
"name": "C",
"bytes": "2369715"
},
{
"name": "C++",
"bytes": "39064862"
},
{
"name": "JavaScript",
"bytes": "3763760"
},
{
"name": "Objective-C",
"bytes": "2038598"
},
{
"name": "Perl",
"bytes": "768866"
},
{
"name": "Prolog",
"bytes": "519"
},
{
"name": "Python",
"bytes": "210630"
},
{
"name": "Ruby",
"bytes": "1927"
},
{
"name": "Shell",
"bytes": "8214"
}
]
}
|
typedef void* yyscan_t;
int yylex_init(yyscan_t*);
YYSTYPE yyget_extra(yyscan_t scanner);
int yylex_init_extra(YYSTYPE, yyscan_t*);
int yylex(yyscan_t);
int yylex_destroy(yyscan_t);
struct yy_buffer_state;
typedef struct yy_buffer_state* YY_BUFFER_STATE;
YY_BUFFER_STATE yy_scan_string(const char*, yyscan_t);
void yy_delete_buffer(YY_BUFFER_STATE, yyscan_t);
#endif
|
{
"content_hash": "492daa8bdc5982c143d403eaf7acf605",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 54,
"avg_line_length": 17.952380952380953,
"alnum_prop": 0.7374005305039788,
"repo_name": "y4n9squared/HEtest",
"id": "9389f42caea052ffc4d069c32f4d4a014f2b8a56",
"size": "2053",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hetest/cpp/baseline/common/flex-def.h",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "7645"
},
{
"name": "C++",
"bytes": "226646"
},
{
"name": "Lex",
"bytes": "5631"
},
{
"name": "Python",
"bytes": "329232"
},
{
"name": "Shell",
"bytes": "2428"
},
{
"name": "Yacc",
"bytes": "11612"
}
]
}
|
import { setOverlayHandlerOverride } from '../../../../resources/overlay_plugin_api';
import {
OverlayHandlerRequests,
OverlayHandlerResponses,
PluginCombatantState,
} from '../../../../types/event';
import AnalyzedEncounter from '../data/AnalyzedEncounter';
import LineEvent from '../data/network_log_converter/LineEvent';
import RaidEmulator from '../data/RaidEmulator';
export default class RaidEmulatorOverlayApiHook {
currentLogTime = 0;
connected = false;
constructor(private emulator: RaidEmulator) {
setOverlayHandlerOverride('getCombatants', this._getCombatantsOverride.bind(this));
emulator.on('tick', (currentLogTime: number) => {
this.currentLogTime = currentLogTime;
});
emulator.on('preSeek', () => {
this.currentLogTime = 0;
});
emulator.on('preCurrentEncounterChanged', (encounter: AnalyzedEncounter) => {
this.currentLogTime = 0;
encounter.on('analyzeLine', (log: LineEvent) => {
this.currentLogTime = log.timestamp;
});
});
}
_getCombatantsOverride(
msg: OverlayHandlerRequests['getCombatants'],
): OverlayHandlerResponses['getCombatants'] {
return new Promise<{ combatants: PluginCombatantState[] }>((res) => {
const curEnc = this.emulator.currentEncounter;
const tracker = curEnc?.encounter.combatantTracker;
if (!curEnc || !tracker) {
res({ combatants: [] });
return;
}
const timestamp = this.currentLogTime;
const combatants: PluginCombatantState[] = [];
const ids = msg.ids ?? [];
const names = msg.names ?? [];
const hasIds = ids.length > 0;
const hasNames = names.length > 0;
for (const [id, combatant] of Object.entries(tracker.combatants)) {
// If this combatant didn't exist at this point, skip them
const firstStateStamp = combatant.significantStates[0];
const lastStateStamp = combatant.significantStates.slice(-1)[0];
if (!firstStateStamp || !lastStateStamp)
continue;
if (firstStateStamp > timestamp || lastStateStamp < timestamp)
continue;
const idNum = parseInt(id, 16);
// nextSignificantState is a bit inefficient but given that this isn't run every tick
// we can afford to be a bit inefficient for readability's sake
const combatantState = combatant.nextSignificantState(timestamp).toPluginState(combatant);
if (!hasIds && !hasNames)
combatants.push(combatantState);
else if (hasIds && ids.includes(idNum))
combatants.push(combatantState);
else if (hasNames && names.includes(combatant.name))
combatants.push(combatantState);
}
res({
combatants: combatants,
});
});
}
}
|
{
"content_hash": "df78b4a6aaa0229fa5e69fa4b5423310",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 98,
"avg_line_length": 37.37837837837838,
"alnum_prop": 0.6565437454808387,
"repo_name": "quisquous/cactbot",
"id": "ee967aa97331de8be2a48bd9f2161785c18484dd",
"size": "2766",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "ui/raidboss/emulator/overrides/RaidEmulatorOverlayApiHook.ts",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "158496"
},
{
"name": "CSS",
"bytes": "75185"
},
{
"name": "HTML",
"bytes": "34557"
},
{
"name": "JavaScript",
"bytes": "112053"
},
{
"name": "Python",
"bytes": "46029"
},
{
"name": "Shell",
"bytes": "490"
},
{
"name": "TypeScript",
"bytes": "7641667"
}
]
}
|
package test
import org.scalatest.matchers.{MatchResult, Matcher}
import scala.util.Try
/**
* Para estender os matchers do ScalaTest de forma a ficar mais intuitivo alguns testes
* Matchers inspirados no specs2
* @author Giovanni Silva
* 22/09/15.
*/
trait CustomMatchers {
object beSuccessfulTry extends Matcher[Try[Any]]{
def apply(left: Try[Any]) = {
MatchResult(left.isSuccess, s"$left is not a succesful try", "The Try object is not a success")
}
}
object beFailedTry extends Matcher[Try[Any]]{
def apply(left: Try[Any]) = {
MatchResult(left.isFailure, s"$left is not a failure try", "The Try object must be a failure")
}
}
object beSome extends Matcher[Option[Any]]{
def apply(left: Option[Any]) = {
MatchResult(left.isDefined, s"$left is not a Some", "The Option object is not a Some")
}
}
case class beSome(val value: Any) extends Matcher[Option[Any]] {
def apply(left: Option[Any]) = {
MatchResult(left.isDefined && left.get.equals(value), s"$left is not a Some($value)", "The Option object is not a Some")
}
}
object beNone extends Matcher[Option[Any]]{
def apply(left: Option[Any]) = {
MatchResult(left.isEmpty, s"$left is not a None", "The Option object is not a None")
}
}
object beTrue extends Matcher[Boolean]{
def apply(left: Boolean) = {
MatchResult(left, s"$left is not true", "The object should be true")
}
}
object beFalse extends Matcher[Boolean]{
def apply(left: Boolean) = {
MatchResult(!left, s"$left is not false", "The object should be false")
}
}
// case class allPasses (test: Any => Boolean) extends Matcher[Seq[Any]] {
// override def apply(left: Seq[Any]): MatchResult = {
// MatchResult(left.forall(test), "The collection Does not pass the test", "At least one element of the collection " +
// "does not have pass the test")
// }
// }
}
|
{
"content_hash": "83795cd74df310aa60eb435f47255b0c",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 126,
"avg_line_length": 32.83050847457627,
"alnum_prop": 0.6566855962829117,
"repo_name": "giovannicandido/slush-spring-aurelia",
"id": "5782a8cfeb127d1e0f5638d5e0a6d31a4d7eb3f4",
"size": "1937",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "templates/server/src/test/scala/test/CustomMatchers.scala",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "227147"
},
{
"name": "HTML",
"bytes": "4315"
},
{
"name": "Java",
"bytes": "17405"
},
{
"name": "JavaScript",
"bytes": "35056"
},
{
"name": "Scala",
"bytes": "13942"
},
{
"name": "Shell",
"bytes": "450"
},
{
"name": "TypeScript",
"bytes": "12145"
}
]
}
|
.class final Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;
.super Landroid/support/v4/app/SharedElementCallback;
.source "DetailsOverviewSharedElementHelper.java"
# annotations
.annotation system Ldalvik/annotation/MemberClasses;
value = {
Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper$TransitionTimeOutRunnable;
}
.end annotation
# instance fields
.field mActivityToRunTransition:Landroid/app/Activity;
.field mRightPanelHeight:I
.field mRightPanelWidth:I
.field private mSavedMatrix:Landroid/graphics/Matrix;
.field private mSavedScaleType:Landroid/widget/ImageView$ScaleType;
.field mSharedElementName:Ljava/lang/String;
.field mStartedPostpone:Z
.field mViewHolder:Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;
# direct methods
.method constructor <init>()V
.locals 0
invoke-direct {p0}, Landroid/support/v4/app/SharedElementCallback;-><init>()V
return-void
.end method
.method private changeImageViewScale(Landroid/view/View;)V
.locals 4
move-object v1, p1
check-cast v1, Landroid/widget/ImageView;
iget-object v2, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mViewHolder:Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;
iget-object v0, v2, Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;->mImageView:Landroid/widget/ImageView;
invoke-virtual {v1}, Landroid/widget/ImageView;->getScaleType()Landroid/widget/ImageView$ScaleType;
move-result-object v2
invoke-virtual {v0, v2}, Landroid/widget/ImageView;->setScaleType(Landroid/widget/ImageView$ScaleType;)V
invoke-virtual {v1}, Landroid/widget/ImageView;->getScaleType()Landroid/widget/ImageView$ScaleType;
move-result-object v2
sget-object v3, Landroid/widget/ImageView$ScaleType;->MATRIX:Landroid/widget/ImageView$ScaleType;
if-ne v2, v3, :cond_0
invoke-virtual {v1}, Landroid/widget/ImageView;->getImageMatrix()Landroid/graphics/Matrix;
move-result-object v2
invoke-virtual {v0, v2}, Landroid/widget/ImageView;->setImageMatrix(Landroid/graphics/Matrix;)V
:cond_0
invoke-static {v0}, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->updateImageViewAfterScaleTypeChange(Landroid/widget/ImageView;)V
return-void
.end method
.method private hasImageViewScaleChange(Landroid/view/View;)Z
.locals 1
instance-of v0, p1, Landroid/widget/ImageView;
return v0
.end method
.method private restoreImageViewScale()V
.locals 4
const/4 v3, 0x0
iget-object v1, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mSavedScaleType:Landroid/widget/ImageView$ScaleType;
if-eqz v1, :cond_1
iget-object v1, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mViewHolder:Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;
iget-object v0, v1, Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;->mImageView:Landroid/widget/ImageView;
iget-object v1, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mSavedScaleType:Landroid/widget/ImageView$ScaleType;
invoke-virtual {v0, v1}, Landroid/widget/ImageView;->setScaleType(Landroid/widget/ImageView$ScaleType;)V
iget-object v1, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mSavedScaleType:Landroid/widget/ImageView$ScaleType;
sget-object v2, Landroid/widget/ImageView$ScaleType;->MATRIX:Landroid/widget/ImageView$ScaleType;
if-ne v1, v2, :cond_0
iget-object v1, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mSavedMatrix:Landroid/graphics/Matrix;
invoke-virtual {v0, v1}, Landroid/widget/ImageView;->setImageMatrix(Landroid/graphics/Matrix;)V
:cond_0
iput-object v3, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mSavedScaleType:Landroid/widget/ImageView$ScaleType;
invoke-static {v0}, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->updateImageViewAfterScaleTypeChange(Landroid/widget/ImageView;)V
:cond_1
return-void
.end method
.method private saveImageViewScale()V
.locals 4
const/4 v1, 0x0
iget-object v2, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mSavedScaleType:Landroid/widget/ImageView$ScaleType;
if-nez v2, :cond_1
iget-object v2, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mViewHolder:Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;
iget-object v0, v2, Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;->mImageView:Landroid/widget/ImageView;
invoke-virtual {v0}, Landroid/widget/ImageView;->getScaleType()Landroid/widget/ImageView$ScaleType;
move-result-object v2
iput-object v2, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mSavedScaleType:Landroid/widget/ImageView$ScaleType;
iget-object v2, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mSavedScaleType:Landroid/widget/ImageView$ScaleType;
sget-object v3, Landroid/widget/ImageView$ScaleType;->MATRIX:Landroid/widget/ImageView$ScaleType;
if-ne v2, v3, :cond_0
invoke-virtual {v0}, Landroid/widget/ImageView;->getMatrix()Landroid/graphics/Matrix;
move-result-object v1
:cond_0
iput-object v1, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mSavedMatrix:Landroid/graphics/Matrix;
:cond_1
return-void
.end method
.method private static updateImageViewAfterScaleTypeChange(Landroid/widget/ImageView;)V
.locals 4
const/high16 v2, 0x40000000 # 2.0f
invoke-virtual {p0}, Landroid/widget/ImageView;->getMeasuredWidth()I
move-result v0
invoke-static {v0, v2}, Landroid/view/View$MeasureSpec;->makeMeasureSpec(II)I
move-result v0
invoke-virtual {p0}, Landroid/widget/ImageView;->getMeasuredHeight()I
move-result v1
invoke-static {v1, v2}, Landroid/view/View$MeasureSpec;->makeMeasureSpec(II)I
move-result v1
invoke-virtual {p0, v0, v1}, Landroid/widget/ImageView;->measure(II)V
invoke-virtual {p0}, Landroid/widget/ImageView;->getLeft()I
move-result v0
invoke-virtual {p0}, Landroid/widget/ImageView;->getTop()I
move-result v1
invoke-virtual {p0}, Landroid/widget/ImageView;->getRight()I
move-result v2
invoke-virtual {p0}, Landroid/widget/ImageView;->getBottom()I
move-result v3
invoke-virtual {p0, v0, v1, v2, v3}, Landroid/widget/ImageView;->layout(IIII)V
return-void
.end method
# virtual methods
.method onBindToDrawable(Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;)V
.locals 2
const/4 v1, 0x0
iget-object v0, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mViewHolder:Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;
if-eqz v0, :cond_0
iget-object v0, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mViewHolder:Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;
iget-object v0, v0, Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;->mOverviewFrame:Landroid/widget/FrameLayout;
invoke-static {v0, v1}, Landroid/support/v4/view/ViewCompat;->setTransitionName(Landroid/view/View;Ljava/lang/String;)V
:cond_0
iput-object p1, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mViewHolder:Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;
iget-object v0, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mViewHolder:Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;
iget-object v0, v0, Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;->mRightPanel:Landroid/view/ViewGroup;
new-instance v1, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper$1;
invoke-direct {v1, p0}, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper$1;-><init>(Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;)V
invoke-virtual {v0, v1}, Landroid/view/ViewGroup;->addOnLayoutChangeListener(Landroid/view/View$OnLayoutChangeListener;)V
iget-object v0, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mViewHolder:Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;
iget-object v0, v0, Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;->mRightPanel:Landroid/view/ViewGroup;
new-instance v1, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper$2;
invoke-direct {v1, p0}, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper$2;-><init>(Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;)V
invoke-virtual {v0, v1}, Landroid/view/ViewGroup;->postOnAnimation(Ljava/lang/Runnable;)V
return-void
.end method
.method public onSharedElementEnd(Ljava/util/List;Ljava/util/List;Ljava/util/List;)V
.locals 4
.annotation system Ldalvik/annotation/Signature;
value = {
"(",
"Ljava/util/List",
"<",
"Ljava/lang/String;",
">;",
"Ljava/util/List",
"<",
"Landroid/view/View;",
">;",
"Ljava/util/List",
"<",
"Landroid/view/View;",
">;)V"
}
.end annotation
const/4 v3, 0x0
invoke-interface {p2}, Ljava/util/List;->size()I
move-result v1
const/4 v2, 0x1
if-ge v1, v2, :cond_0
return-void
:cond_0
invoke-interface {p2, v3}, Ljava/util/List;->get(I)Ljava/lang/Object;
move-result-object v0
check-cast v0, Landroid/view/View;
iget-object v1, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mViewHolder:Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;
if-eqz v1, :cond_1
iget-object v1, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mViewHolder:Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;
iget-object v1, v1, Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;->mOverviewFrame:Landroid/widget/FrameLayout;
if-eq v1, v0, :cond_2
:cond_1
return-void
:cond_2
invoke-direct {p0}, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->restoreImageViewScale()V
iget-object v1, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mViewHolder:Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;
iget-object v1, v1, Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;->mActionsRow:Landroid/support/v17/leanback/widget/HorizontalGridView;
const/high16 v2, 0x20000
invoke-virtual {v1, v2}, Landroid/support/v17/leanback/widget/HorizontalGridView;->setDescendantFocusability(I)V
iget-object v1, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mViewHolder:Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;
iget-object v1, v1, Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;->mActionsRow:Landroid/support/v17/leanback/widget/HorizontalGridView;
invoke-virtual {v1, v3}, Landroid/support/v17/leanback/widget/HorizontalGridView;->setVisibility(I)V
iget-object v1, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mViewHolder:Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;
iget-object v1, v1, Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;->mActionsRow:Landroid/support/v17/leanback/widget/HorizontalGridView;
const/high16 v2, 0x40000
invoke-virtual {v1, v2}, Landroid/support/v17/leanback/widget/HorizontalGridView;->setDescendantFocusability(I)V
iget-object v1, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mViewHolder:Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;
iget-object v1, v1, Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;->mDetailsDescriptionFrame:Landroid/widget/FrameLayout;
invoke-virtual {v1, v3}, Landroid/widget/FrameLayout;->setVisibility(I)V
return-void
.end method
.method public onSharedElementStart(Ljava/util/List;Ljava/util/List;Ljava/util/List;)V
.locals 11
.annotation system Ldalvik/annotation/Signature;
value = {
"(",
"Ljava/util/List",
"<",
"Ljava/lang/String;",
">;",
"Ljava/util/List",
"<",
"Landroid/view/View;",
">;",
"Ljava/util/List",
"<",
"Landroid/view/View;",
">;)V"
}
.end annotation
const/4 v10, 0x4
const/high16 v9, 0x40000000 # 2.0f
const/4 v8, 0x0
invoke-interface {p2}, Ljava/util/List;->size()I
move-result v6
const/4 v7, 0x1
if-ge v6, v7, :cond_0
return-void
:cond_0
invoke-interface {p2, v8}, Ljava/util/List;->get(I)Ljava/lang/Object;
move-result-object v2
check-cast v2, Landroid/view/View;
iget-object v6, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mViewHolder:Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;
if-eqz v6, :cond_1
iget-object v6, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mViewHolder:Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;
iget-object v6, v6, Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;->mOverviewFrame:Landroid/widget/FrameLayout;
if-eq v6, v2, :cond_2
:cond_1
return-void
:cond_2
invoke-interface {p3, v8}, Ljava/util/List;->get(I)Ljava/lang/Object;
move-result-object v4
check-cast v4, Landroid/view/View;
invoke-direct {p0, v4}, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->hasImageViewScaleChange(Landroid/view/View;)Z
move-result v6
if-eqz v6, :cond_3
invoke-direct {p0}, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->saveImageViewScale()V
invoke-direct {p0, v4}, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->changeImageViewScale(Landroid/view/View;)V
:cond_3
iget-object v6, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mViewHolder:Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;
iget-object v1, v6, Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;->mImageView:Landroid/widget/ImageView;
invoke-virtual {v2}, Landroid/view/View;->getWidth()I
move-result v5
invoke-virtual {v2}, Landroid/view/View;->getHeight()I
move-result v0
invoke-static {v5, v9}, Landroid/view/View$MeasureSpec;->makeMeasureSpec(II)I
move-result v6
invoke-static {v0, v9}, Landroid/view/View$MeasureSpec;->makeMeasureSpec(II)I
move-result v7
invoke-virtual {v1, v6, v7}, Landroid/view/View;->measure(II)V
invoke-virtual {v1, v8, v8, v5, v0}, Landroid/view/View;->layout(IIII)V
iget-object v6, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mViewHolder:Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;
iget-object v3, v6, Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;->mRightPanel:Landroid/view/ViewGroup;
iget v6, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mRightPanelWidth:I
if-eqz v6, :cond_4
iget v6, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mRightPanelHeight:I
if-eqz v6, :cond_4
iget v6, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mRightPanelWidth:I
invoke-static {v6, v9}, Landroid/view/View$MeasureSpec;->makeMeasureSpec(II)I
move-result v6
iget v7, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mRightPanelHeight:I
invoke-static {v7, v9}, Landroid/view/View$MeasureSpec;->makeMeasureSpec(II)I
move-result v7
invoke-virtual {v3, v6, v7}, Landroid/view/View;->measure(II)V
invoke-virtual {v3}, Landroid/view/View;->getTop()I
move-result v6
iget v7, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mRightPanelWidth:I
add-int/2addr v7, v5
invoke-virtual {v3}, Landroid/view/View;->getTop()I
move-result v8
iget v9, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mRightPanelHeight:I
add-int/2addr v8, v9
invoke-virtual {v3, v5, v6, v7, v8}, Landroid/view/View;->layout(IIII)V
:goto_0
iget-object v6, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mViewHolder:Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;
iget-object v6, v6, Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;->mActionsRow:Landroid/support/v17/leanback/widget/HorizontalGridView;
invoke-virtual {v6, v10}, Landroid/support/v17/leanback/widget/HorizontalGridView;->setVisibility(I)V
iget-object v6, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mViewHolder:Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;
iget-object v6, v6, Landroid/support/v17/leanback/widget/DetailsOverviewRowPresenter$ViewHolder;->mDetailsDescriptionFrame:Landroid/widget/FrameLayout;
invoke-virtual {v6, v10}, Landroid/widget/FrameLayout;->setVisibility(I)V
return-void
:cond_4
invoke-virtual {v3}, Landroid/view/View;->getLeft()I
move-result v6
sub-int v6, v5, v6
invoke-virtual {v3, v6}, Landroid/view/View;->offsetLeftAndRight(I)V
goto :goto_0
.end method
.method startPostponedEnterTransition()V
.locals 1
iget-boolean v0, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mStartedPostpone:Z
if-nez v0, :cond_0
iget-object v0, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mActivityToRunTransition:Landroid/app/Activity;
invoke-static {v0}, Landroid/support/v4/app/ActivityCompat;->startPostponedEnterTransition(Landroid/app/Activity;)V
const/4 v0, 0x1
iput-boolean v0, p0, Landroid/support/v17/leanback/widget/DetailsOverviewSharedElementHelper;->mStartedPostpone:Z
:cond_0
return-void
.end method
|
{
"content_hash": "3b9007c9905d0ca510519db41e34ac8a",
"timestamp": "",
"source": "github",
"line_count": 517,
"max_line_length": 186,
"avg_line_length": 37.129593810444874,
"alnum_prop": 0.7603667430714732,
"repo_name": "BatMan-Rom/ModdedFiles",
"id": "ae97770364741d6a272034a76b8552b56da492b9",
"size": "19196",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "SystemUI/smali/android/support/v17/leanback/widget/DetailsOverviewSharedElementHelper.smali",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "GLSL",
"bytes": "15069"
},
{
"name": "HTML",
"bytes": "139176"
},
{
"name": "Smali",
"bytes": "541934400"
}
]
}
|
layout: story
title: SPIKE Move Story Across Kanban
date: 2016-03-16 13:04
acs:
- determine a technology on which to base Kanboard board dragging
- determine a mechanism that facilitates the user moving Stories across the board
assigned:
- Luther Baker
- John Doe
scenarios:
- As a developer, I would like a simple way to move Stories across the Kanban board.
usecases:
---
Kanban boards help track Story progress by moving stories across the different columns of the board. Such functionality might be implemented as drag-n-drop or it might more literally be _advance_ or _retreat_ a story. Take some time to explore a few different options and determine which method is less risky - both from a user standpoint as well as technical.
|
{
"content_hash": "e388203ce8c04bc00756b9e6bcc72c7c",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 360,
"avg_line_length": 37.25,
"alnum_prop": 0.7812080536912752,
"repo_name": "FuzzyBearings/Kanban-Tool",
"id": "f40d239f716037d8fa54fe284fa3e21549523a6d",
"size": "749",
"binary": false,
"copies": "1",
"ref": "refs/heads/gh-pages",
"path": "kanban/doing/_posts/2016-03-16-spike-move-story-across-kanban.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "12969"
},
{
"name": "HTML",
"bytes": "10121"
},
{
"name": "Shell",
"bytes": "37"
}
]
}
|
using System;
using System.ComponentModel;
using System.Linq;
using Nuke.Common.Tooling;
[TypeConverter(typeof(TypeConverter<Configuration>))]
public class Configuration : Enumeration
{
public static Configuration Debug = new Configuration { Value = nameof(Debug) };
public static Configuration Release = new Configuration { Value = nameof(Release) };
public static implicit operator string(Configuration configuration)
{
return configuration.Value;
}
}
|
{
"content_hash": "b0a4e6fccb5682968b51018540476798",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 88,
"avg_line_length": 29.529411764705884,
"alnum_prop": 0.7270916334661355,
"repo_name": "martin211/aimp_dotnet",
"id": "2a9faba62b816c8febe24c8eec96eb820adbc972",
"size": "775",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "build/Configuration.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "214"
},
{
"name": "C",
"bytes": "21484"
},
{
"name": "C#",
"bytes": "904968"
},
{
"name": "C++",
"bytes": "1245187"
},
{
"name": "CSS",
"bytes": "18676"
},
{
"name": "JavaScript",
"bytes": "42990"
},
{
"name": "Liquid",
"bytes": "2760"
},
{
"name": "PowerShell",
"bytes": "5649"
},
{
"name": "Shell",
"bytes": "2342"
},
{
"name": "XSLT",
"bytes": "2166"
}
]
}
|
class HighLightsController < ApplicationController
#----> Controller is used by Ajax and does not totally respond to the Rails 4.2 convention.
before_action :auth
before_action :set_high_light, only: [:show, :edit, :update, :destroy]
# GET /high_lights
# GET /high_lights.json
#-----> Can't touch this! To many things involved. //Niklas
def index
@high_lights = HighLight.all
highlights = HighLight.where('user_id' => session[:user_id])
events = Event.all
json = {:events => events, :highlights => highlights }.to_json
respond_to do |format|
format.html { }
format.json { render :json => json }
end
end
#----->
# GET /high_lights/1
# GET /high_lights/1.json
def show
end
# GET /high_lights/new
def new
@high_light = HighLight.new
end
# GET /high_lights/1/edit
def edit
end
# POST /high_lights
# POST /high_lights.json
def create
@high_light = HighLight.new(high_light_params)
respond_to do |format|
if @high_light.save
@week = @high_light.week
colors = ['', 'red', 'yellow', 'green']
@color = colors[@high_light.color]
@high_lights = HighLight.where('id' => @high_light.id)
format.html { redirect_to @high_light, notice: 'Veckobelastning har skapats!' }
format.json { render action: 'show', status: :created, location: @high_light }
format.js
else
format.html { render action: 'new' }
format.json { render json: @high_light.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /high_lights/1
# PATCH/PUT /high_lights/1.json
def update
@high_lights = HighLight.where('id' => params[:id])
respond_to do |format|
if @high_light.update(high_light_params)
format.html { redirect_to @high_light, notice: 'Veckobelastning har uppdaterats!' }
format.json { head :no_content }
format.js
else
format.html { render action: 'edit' }
format.json { render json: @high_light.errors, status: :unprocessable_entity }
end
end
end
# DELETE /high_lights/1
# DELETE /high_lights/1.json
def destroy
output = @high_light.to_json
@high_light.destroy
respond_to do |format|
format.html { redirect_to high_lights_url, notice: 'Veckobelastning har tagits bort!' }
format.json { render :json => output }
format.js
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_high_light
@high_light = HighLight.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def high_light_params
params.require(:high_light).permit(:week, :year, :color, :comment, :user_id)
end
#Checks your permissions
def auth
if session[:user_id] != nil
else
redirect_to root_url, notice: "Du måste logga in med ditt google konto för att använda applikationen!"
end
end
end
#--> Review by Niklas 10:30 27/3-14
|
{
"content_hash": "e25175bf63064f8564cd657630b15e3f",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 108,
"avg_line_length": 29.27102803738318,
"alnum_prop": 0.617816091954023,
"repo_name": "Ice-A-Slice/kvalitetshjulet",
"id": "398413a62741b3ffbf9c198825d218e1d277d786",
"size": "3135",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/controllers/high_lights_controller.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "33126"
},
{
"name": "CSS",
"bytes": "731438"
},
{
"name": "CoffeeScript",
"bytes": "1997"
},
{
"name": "HTML",
"bytes": "96550"
},
{
"name": "JavaScript",
"bytes": "3839223"
},
{
"name": "Ruby",
"bytes": "114601"
}
]
}
|
using System;
using System.Collections.Generic;
using System.Threading;
namespace Apache.Geode.Client.UnitTests
{
using NUnit.Framework;
using Apache.Geode.DUnitFramework;
using Apache.Geode.Client.Tests;
using Apache.Geode.Client;
using QueryStatics = Apache.Geode.Client.Tests.QueryStatics;
using QueryCategory = Apache.Geode.Client.Tests.QueryCategory;
using QueryStrings = Apache.Geode.Client.Tests.QueryStrings;
[TestFixture]
[Category("group1")]
[Category("unicast_only")]
[Category("generics")]
public class ThinClientRemoteQueryFailoverTests : ThinClientRegionSteps
{
#region Private members
private UnitProcess m_client1;
private UnitProcess m_client2;
private static string[] QueryRegionNames = { "Portfolios", "Positions", "Portfolios2",
"Portfolios3" };
#endregion
protected override ClientBase[] GetClients()
{
m_client1 = new UnitProcess();
m_client2 = new UnitProcess();
return new ClientBase[] { m_client1, m_client2 };
}
[TestFixtureSetUp]
public override void InitTests()
{
base.InitTests();
}
[TearDown]
public override void EndTest()
{
m_client1.Call(Close);
m_client2.Call(Close);
CacheHelper.StopJavaServers();
base.EndTest();
}
[SetUp]
public override void InitTest()
{
m_client1.Call(InitClient);
m_client2.Call(InitClient);
}
#region Functions invoked by the tests
public void InitClient()
{
CacheHelper.Init();
Serializable.RegisterTypeGeneric(Portfolio.CreateDeserializable, CacheHelper.DCache);
Serializable.RegisterTypeGeneric(Position.CreateDeserializable, CacheHelper.DCache);
Serializable.RegisterPdxType(Apache.Geode.Client.Tests.PortfolioPdx.CreateDeserializable);
Serializable.RegisterPdxType(Apache.Geode.Client.Tests.PositionPdx.CreateDeserializable);
}
public void KillServer()
{
CacheHelper.StopJavaServer(1);
Util.Log("Cacheserver 1 stopped.");
}
public delegate void KillServerDelegate();
public void StepOneFailover(bool isPdx)
{
m_isPdx = isPdx;
// This is here so that Client1 registers information of the cacheserver
// that has been already started
CacheHelper.SetupJavaServers(true,
"cacheserver_remoteoqlN.xml",
"cacheserver_remoteoql2N.xml");
CacheHelper.StartJavaLocator(1, "GFELOC");
Util.Log("Locator started");
CacheHelper.StartJavaServerWithLocators(1, "GFECS1", 1);
Util.Log("Cacheserver 1 started.");
CacheHelper.CreateTCRegion_Pool<object, object>(QueryRegionNames[0], true, true, null,
CacheHelper.Locators, "__TESTPOOL1_", true);
IRegion<object, object> region = CacheHelper.GetVerifyRegion<object, object>(QueryRegionNames[0]);
if (!m_isPdx)
{
Portfolio p1 = new Portfolio(1, 100);
Portfolio p2 = new Portfolio(2, 200);
Portfolio p3 = new Portfolio(3, 300);
Portfolio p4 = new Portfolio(4, 400);
region["1"] = p1;
region["2"] = p2;
region["3"] = p3;
region["4"] = p4;
}
else
{
PortfolioPdx p1 = new PortfolioPdx(1, 100);
PortfolioPdx p2 = new PortfolioPdx(2, 200);
PortfolioPdx p3 = new PortfolioPdx(3, 300);
PortfolioPdx p4 = new PortfolioPdx(4, 400);
region["1"] = p1;
region["2"] = p2;
region["3"] = p3;
region["4"] = p4;
}
}
public void StepTwoFailover()
{
CacheHelper.StartJavaServerWithLocators(2, "GFECS2", 1);
Util.Log("Cacheserver 2 started.");
IAsyncResult killRes = null;
KillServerDelegate ksd = new KillServerDelegate(KillServer);
var qs = CacheHelper.DCache.GetPoolManager().Find("__TESTPOOL1_").GetQueryService();
for (int i = 0; i < 10000; i++)
{
Query<object> qry = qs.NewQuery<object>("select distinct * from /" + QueryRegionNames[0]);
ISelectResults<object> results = qry.Execute();
if (i == 10)
{
killRes = ksd.BeginInvoke(null, null);
}
Int32 resultSize = results.Size;
if (i % 100 == 0)
{
Util.Log("Iteration upto {0} done, result size is {1}", i, resultSize);
}
Assert.AreEqual(4, resultSize, "Result size is not 4!");
}
killRes.AsyncWaitHandle.WaitOne();
ksd.EndInvoke(killRes);
}
#endregion
void runRemoteQueryFailover()
{
try
{
m_client1.Call(StepOneFailover, m_isPdx);
Util.Log("StepOneFailover complete.");
m_client1.Call(StepTwoFailover);
Util.Log("StepTwoFailover complete.");
m_client1.Call(Close);
Util.Log("Client closed");
}
finally
{
m_client1.Call(CacheHelper.StopJavaServers);
m_client1.Call(CacheHelper.StopJavaLocator, 1);
}
}
static bool m_isPdx = false;
[Test]
public void RemoteQueryFailoverWithPdx()
{
m_isPdx = true;
runRemoteQueryFailover();
}
[Test]
public void RemoteQueryFailoverWithoutPdx()
{
m_isPdx = false;
runRemoteQueryFailover();
}
}
}
|
{
"content_hash": "71019dbb7df04960baffba3ed59dc1bc",
"timestamp": "",
"source": "github",
"line_count": 198,
"max_line_length": 104,
"avg_line_length": 26.48989898989899,
"alnum_prop": 0.6326024785510009,
"repo_name": "mhansonp/geode-native",
"id": "0e56440fe878772403c36fe1f5cd015fbbce61f1",
"size": "6047",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "clicache/integration-test/ThinClientRemoteQueryFailoverTests.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1899"
},
{
"name": "C#",
"bytes": "3515617"
},
{
"name": "C++",
"bytes": "10771399"
},
{
"name": "CMake",
"bytes": "107196"
},
{
"name": "GAP",
"bytes": "73860"
},
{
"name": "Java",
"bytes": "408387"
},
{
"name": "Perl",
"bytes": "2704"
},
{
"name": "PowerShell",
"bytes": "20450"
},
{
"name": "Shell",
"bytes": "35505"
}
]
}
|
require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Hello
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
end
end
|
{
"content_hash": "a778dd84dab245f3ec01123a1793f6de",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 82,
"avg_line_length": 34.526315789473685,
"alnum_prop": 0.760670731707317,
"repo_name": "seansu4you87/kupo",
"id": "e699b90ff53d6d68614e37a51cf8478e64f2d319",
"size": "656",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sandbox/rust/legate/docker/hello/config/application.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "17860"
},
{
"name": "C++",
"bytes": "30440"
},
{
"name": "CMake",
"bytes": "17065"
},
{
"name": "CSS",
"bytes": "3434"
},
{
"name": "CoffeeScript",
"bytes": "211"
},
{
"name": "DIGITAL Command Language",
"bytes": "456078"
},
{
"name": "Dockerfile",
"bytes": "5182"
},
{
"name": "Elixir",
"bytes": "89781"
},
{
"name": "Erlang",
"bytes": "261"
},
{
"name": "HTML",
"bytes": "7438591"
},
{
"name": "Java",
"bytes": "17152"
},
{
"name": "JavaScript",
"bytes": "14345"
},
{
"name": "Jupyter Notebook",
"bytes": "29349148"
},
{
"name": "Kotlin",
"bytes": "4055"
},
{
"name": "Makefile",
"bytes": "14878"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Objective-C",
"bytes": "50622"
},
{
"name": "PLpgSQL",
"bytes": "1145"
},
{
"name": "Python",
"bytes": "1480193"
},
{
"name": "RenderScript",
"bytes": "1386"
},
{
"name": "Ruby",
"bytes": "160524"
},
{
"name": "Rust",
"bytes": "377469"
},
{
"name": "Shell",
"bytes": "6618"
},
{
"name": "Swift",
"bytes": "24524"
},
{
"name": "TSQL",
"bytes": "328"
}
]
}
|
[T4Scaffolding.Scaffolder(Description = "SpongeBob.Interfaces - Adds the generic interfaces for data and services")][CmdletBinding()]
param(
[string]$Project,
[string]$CodeLanguage,
[string[]]$TemplateFolders,
[switch]$Force = $false
)
##############################################################
# NAMESPACE
##############################################################
$namespace = (Get-Project $Project).Properties.Item("DefaultNamespace").Value
$rootNamespace = $namespace
$dotIX = $namespace.LastIndexOf('.')
if($dotIX -gt 0){
$rootNamespace = $namespace.Substring(0,$namespace.LastIndexOf('.'))
}
##############################################################
# Project Name
##############################################################
$coreProjectName = $rootNamespace + ".Core"
##############################################################
# Add Data Interface - IRepository
##############################################################
$outputPath = "Interfaces\Data\IRepository"
$namespace = $coreProjectName + ".Interfaces.Data"
$ximports = $coreProjectName + ".Interfaces.Paging"
Add-ProjectItemViaTemplate $outputPath -Template IRepository `
-Model @{ Namespace = $namespace; ExtraUsings = $ximports} `
-SuccessMessage "Added IRepository at {0}" `
-TemplateFolders $TemplateFolders -Project $coreProjectName -CodeLanguage $CodeLanguage -Force:$Force
try{
$file = Get-ProjectItem "$($outputPath).cs" -Project $coreProjectName
$file.Open()
$file.Document.Activate()
$DTE.ExecuteCommand("Edit.FormatDocument", "")
$DTE.ActiveDocument.Save()
}catch {
Write-Host "Hey, you better not be clicking around in VS while we generate code" -ForegroundColor DarkRed
}
##############################################################
# Add Data Interface - IUnitOfWork
##############################################################
$outputPath = "Interfaces\Data\IUnitOfWork"
$namespace = $coreProjectName + ".Interfaces.Data"
Add-ProjectItemViaTemplate $outputPath -Template IUnitOfWork `
-Model @{ Namespace = $namespace; } `
-SuccessMessage "Added IUnitOfWork at {0}" `
-TemplateFolders $TemplateFolders -Project $coreProjectName -CodeLanguage $CodeLanguage -Force:$Force
try{
$file = Get-ProjectItem "$($outputPath).cs" -Project $coreProjectName
$file.Open()
$file.Document.Activate()
$DTE.ExecuteCommand("Edit.FormatDocument", "")
$DTE.ActiveDocument.Save()
}catch {
Write-Host "Hey, you better not be clicking around in VS while we generate code" -ForegroundColor DarkRed
}
##############################################################
# Add Data Interface - IDatabaseFactory
##############################################################
$outputPath = "Interfaces\Data\IDatabaseFactory"
$namespace = $coreProjectName + ".Interfaces.Data"
Add-ProjectItemViaTemplate $outputPath -Template IDatabaseFactory `
-Model @{ Namespace = $namespace; } `
-SuccessMessage "Added IDatabaseFactory at {0}" `
-TemplateFolders $TemplateFolders -Project $coreProjectName -CodeLanguage $CodeLanguage -Force:$Force
try{
$file = Get-ProjectItem "$($outputPath).cs" -Project $coreProjectName
$file.Open()
$file.Document.Activate()
$DTE.ExecuteCommand("Edit.FormatDocument", "")
$DTE.ActiveDocument.Save()
}catch {
Write-Host "Hey, you better not be clicking around in VS while we generate code" -ForegroundColor DarkRed
}
##############################################################
# Add Data Interface - IDataContext
##############################################################
$outputPath = "Interfaces\Data\IDataContext"
$namespace = $coreProjectName + ".Interfaces.Data"
$ximports = $coreProjectName + ".Model"
Add-ProjectItemViaTemplate $outputPath -Template IDataContext `
-Model @{ Namespace = $namespace; ExtraUsings = $ximports} `
-SuccessMessage "Added IDataContext at {0}" `
-TemplateFolders $TemplateFolders -Project $coreProjectName -CodeLanguage $CodeLanguage -Force:$Force
try{
$file = Get-ProjectItem "$($outputPath).cs" -Project $coreProjectName
$file.Open()
$file.Document.Activate()
$DTE.ExecuteCommand("Edit.FormatDocument", "")
$DTE.ActiveDocument.Save()
}catch {
Write-Host "Hey, you better not be clicking around in VS while we generate code" -ForegroundColor DarkRed
}
##############################################################
# Add Service Interface - IService
##############################################################
$outputPath = "Interfaces\Service\IService"
$namespace = $coreProjectName + ".Interfaces.Service"
$ximports = $coreProjectName + ".Interfaces.Validation," + $coreProjectName + ".Interfaces.Paging"
Add-ProjectItemViaTemplate $outputPath -Template IService `
-Model @{ Namespace = $namespace; ExtraUsings = $ximports} `
-SuccessMessage "Added IService at {0}" `
-TemplateFolders $TemplateFolders -Project $coreProjectName -CodeLanguage $CodeLanguage -Force:$Force
try{
$file = Get-ProjectItem "$($outputPath).cs" -Project $coreProjectName
$file.Open()
$file.Document.Activate()
$DTE.ExecuteCommand("Edit.FormatDocument", "")
$DTE.ActiveDocument.Save()
}catch {
Write-Host "Hey, you better not be clicking around in VS while we generate code" -ForegroundColor DarkRed
}
|
{
"content_hash": "5d8cefc7e9bd3c6d73d3af491ba0b944",
"timestamp": "",
"source": "github",
"line_count": 130,
"max_line_length": 133,
"avg_line_length": 39.99230769230769,
"alnum_prop": 0.6245431813810348,
"repo_name": "rmanoto/XVA",
"id": "8b442d0b70c749e4f8ace7ca5acf19ecc79696ee",
"size": "6489",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "XVA-02-07-PollingDbForUpdates/Any OS/PollingDbForUpdates/PollingDbForUpdates/CodeTemplates/Scaffolders/Architecture/Core/Bob.Core.Interfaces/Bob.Core.Interfaces.ps1",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "209"
},
{
"name": "Arduino",
"bytes": "1193"
},
{
"name": "C#",
"bytes": "599531"
},
{
"name": "CSS",
"bytes": "513"
},
{
"name": "HTML",
"bytes": "8747"
},
{
"name": "JavaScript",
"bytes": "3910838"
},
{
"name": "Pascal",
"bytes": "2032"
},
{
"name": "PowerShell",
"bytes": "1227623"
},
{
"name": "R",
"bytes": "11206"
}
]
}
|
'use strict';
import React, { Component } from 'react';
import {
View,
Animated,
Alert,
} from 'react-native';
import LinearGradient from 'react-native-linear-gradient';
import { FBLogin } from '../actions/Login';
import LoginOverlay from './LoginOverlay';
const styles = require('./style/Login');
const companyLogo = require('./img/company.jpg');
export default class Login extends Component {
_isMounted: boolean;
constructor(props) {
super(props);
this.state = {
email: null,
password: null,
logInProcess: false,
error: null,
fadeAnim: new Animated.Value(0),
animToValue: 3000,
animduration: 3000
};
}
componentDidMount() {
this._isMounted = true;
Animated.timing(this.state.fadeAnim, { toValue: this.state.animToValue, duration: this.state.animduration }).start();
}
componentWillUnmount() {
this._isMounted = false;
}
async onPressLogin() {
this.setState({ logInProcess: true });
try {
await Promise.race([
this.props.dispatch(FBLogin()),
this.timeout(20000)
]);
} catch (error) {
const message = error.message || error;
this.setState({ logInProcess: false, error: message });
if (message !== 'Timed out' && message !== 'Login cancelled') {
Alert.alert('Ads Manager Login', message);
}
} finally {
this._isMounted && this.setState({ logInProcess: false, error: null });
}
}
async timeout(ms) {
return new Promise((resolve, reject) => {
setTimeout(() => reject(new Error('Timed out')), ms);
});
}
fadeMe(delay, from = 0) {
const delayOffset = 500;
const minInputRange = (delay <= this.state.animToValue) ? delay : this.state.animToValue - delayOffset;
const maxInputRange = (delay <= this.state.animToValue) ? Math.min(delay + delayOffset, this.state.animToValue) : this.state.animToValue;
return {
opacity: this.state.fadeAnim.interpolate({
inputRange: [minInputRange, maxInputRange],
outputRange: [0, 1],
extrapolate: 'clamp',
}),
transform: [{
translateY: this.state.fadeAnim.interpolate({
inputRange: [minInputRange, maxInputRange],
outputRange: [from, 0],
extrapolate: 'clamp',
}),
}],
};
}
render() {
return (
<View style={styles.LoginContainer}>
<Animated.Image style={[styles.imageContainer, this.fadeMe(1, 10)]} source={companyLogo} resizeMode="contain">
<Animated.View style={[styles.LoginContainerMiddleBtnFBSK, this.fadeMe(1500, 20)]}>
<LinearGradient
start={{ x: 0.0, y: 0.2 }} end={{ x: 0.5, y: 1.0 }}
colors={['#303F9F', '#3F51B5']}
style={styles.linearGradient}
>
<Animated.Text onPress={this.onPressLogin.bind(this)} style={[styles.loginbtntxt, this.fadeMe(2000, 20)]}>
Login with Facebook
</Animated.Text>
</LinearGradient>
</Animated.View>
</Animated.Image>
<LoginOverlay logInProcess={this.state.logInProcess} />
</View>
);
}
}
|
{
"content_hash": "4b0d01b4232a52607151242d049f4eb6",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 141,
"avg_line_length": 26.572727272727274,
"alnum_prop": 0.6520697913102976,
"repo_name": "httpdeveloper/AdsManager",
"id": "51e856df5d4057ed93f15284d9b7bb1e4fef10cb",
"size": "4079",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "js/components/Login.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "2627"
},
{
"name": "JavaScript",
"bytes": "220547"
},
{
"name": "Objective-C",
"bytes": "5137"
}
]
}
|
ENV["RAILS_ENV"] = "test"
require "codeclimate-test-reporter"
CodeClimate::TestReporter.start
require File.expand_path("../../spec/dummy/config/environment.rb", __FILE__)
require "action_controller"
require "rspec/rails"
require 'ostruct'
RSpec.configure do |config|
config.infer_spec_type_from_file_location!
config.order = :random
config.use_transactional_fixtures = true
config.expect_with :rspec do |expectations|
expectations.syntax = :expect
end
config.mock_with :rspec do |mocks|
mocks.syntax = :expect
end
end
|
{
"content_hash": "a133fe9afb38c47fff118e4b0b37390f",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 77,
"avg_line_length": 23.73913043478261,
"alnum_prop": 0.7307692307692307,
"repo_name": "oswaldoferreira/action_bouncer",
"id": "6db32d26813d46ae4d47b6c95024be2b0cc10568",
"size": "576",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/spec_helper.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "686"
},
{
"name": "HTML",
"bytes": "4883"
},
{
"name": "JavaScript",
"bytes": "596"
},
{
"name": "Ruby",
"bytes": "22588"
}
]
}
|
using System;
using HtmlAgilityPack;
namespace Xyperico.Base.Testing
{
public class HtmlDocumentAssertion
{
private HtmlDocument Html;
public HtmlDocumentAssertion(HtmlDocument html)
{
Html = html;
}
public HtmlDocumentAssertion Where(Action<HtmlDocument> a)
{
a(Html);
return this;
}
}
}
|
{
"content_hash": "d885d71968faf909a0b96e1a28aaf4c3",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 62,
"avg_line_length": 16.727272727272727,
"alnum_prop": 0.6277173913043478,
"repo_name": "JornWildt/Xyperico",
"id": "f35744fb77bc9e7195489716ae0ed0a69887bde5",
"size": "370",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Xyperico.Base/Xyperico.Base.Testing/HtmlDocumentAssertion.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "114"
},
{
"name": "C#",
"bytes": "459795"
},
{
"name": "CSS",
"bytes": "1005"
}
]
}
|
<?php require_once('Connections/ECOM.php'); ?>
<?php
// *** Validate request to login to this site.
if (!isset($_SESSION)) {
session_start();
}
$query_Recordset1 = "SELECT * FROM userdetails WHERE UserName='".$_POST['textfield']."' AND Question='".$_POST['question']."' AND Answer='".$_POST['textfield2']."'";
mysql_select_db($database_ECOM, $ECOM);
//echo $query_Recordset1;
$Recordset1 = mysql_query($query_Recordset1, $ECOM) or die(mysql_error());
$row_Recordset1 = mysql_fetch_assoc($Recordset1);
$FoundUser = mysql_num_rows($Recordset1);
if($FoundUser==1)
{
$pass='';
for($i=0; $i<4;$i++)
{
$pass=$pass.rand(1,9);
$pass=$pass.chr(rand(65,90));
}
//$enc = base64_encode($pass);
$enc=md5($pass);
$updateSQL = "UPDATE userdetails SET Password='".$enc."' WHERE UserName='".$_POST['textfield']."' AND Question='".$_POST['question']."' AND Answer='".$_POST['textfield2']."'";
mysql_select_db($database_ECOM, $ECOM);
$Result1 = mysql_query($updateSQL, $ECOM) or die(mysql_error());
$_SESSION['Found']="Your Password is Cleared. Your New Password is: ".$pass;
header("Location: ForgotPassword.php" );
}
else
{
$_SESSION['Found']="Sorry...., <br>System can not find your detail. <br>Please type correct informations";
header("Location: ForgotPassword.php" );
}
?>
|
{
"content_hash": "fcc3e2f5e25e05a1dee7a75b925c50a3",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 178,
"avg_line_length": 25.962264150943398,
"alnum_prop": 0.6104651162790697,
"repo_name": "yumsuresht/Ecom",
"id": "d1aa47cb7199d15d6f33e311e71827baeea3499b",
"size": "1376",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "getpassword.php",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13423"
},
{
"name": "PHP",
"bytes": "370614"
}
]
}
|
class CreateQueries < ActiveRecord::Migration
def change
create_table :queries do |t|
t.text :english
t.text :other
t.belongs_to :language
t.belongs_to :user
t.timestamps
end
end
end
|
{
"content_hash": "1b9beaea4207209ac889f070f9190697",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 45,
"avg_line_length": 17.46153846153846,
"alnum_prop": 0.6299559471365639,
"repo_name": "stellajkimm/translateYo",
"id": "92abd9edc7df9aaff148abf136b8c6bb19d33cdb",
"size": "227",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "db/migrate/20140808035809_create_queries.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "680"
},
{
"name": "JavaScript",
"bytes": "624289"
},
{
"name": "Ruby",
"bytes": "31861"
}
]
}
|
require "spec_helper"
module Scanny::Checks
describe SkipBeforeFiltersCheck do
before :each do
@runner = Scanny::Runner.new(SkipBeforeFiltersCheck.new)
@login_required_issue = issue(:info,
"The \"skip_before_filter\" method with :login_required filter is used.",
[285, 288, 425])
@admin_required_issue = issue(:info,
"The \"skip_before_filter\" method with :admin_required filter is used.",
[285, 288, 425])
@verify_authenticity_token_issue = issue(:info,
"The \"skip_before_filter\" method with :verify_authenticity_token filter is used.",
[285, 288, 425])
@authenticate_issue = issue(:info,
"The \"skip_before_filter\" method with :authenticate filter is used.",
[285, 288, 425])
end
it "reports \"skip_before_filter\" with :login_required filter correctly" do
@runner.should check(
'skip_before_filter :login_required'
).with_issue(@login_required_issue)
@runner.should check(
'self.skip_before_filter :login_required'
).with_issue(@login_required_issue)
@runner.should check('foo.skip_before_filter :login_required').without_issues
@runner.should check('skip_after_filter :login_required').without_issues
@runner.should check(
'skip_before_filter :some_filter, :login_required, :another_filter'
).with_issue(@login_required_issue)
@runner.should check('skip_before_filter :some_filter').without_issues
end
it "reports \"skip_before_filter\" with :admin_required filter correctly" do
@runner.should check(
'skip_before_filter :admin_required'
).with_issue(@admin_required_issue)
@runner.should check(
'self.skip_before_filter :admin_required'
).with_issue(@admin_required_issue)
@runner.should check('foo.skip_before_filter :admin_required').without_issues
@runner.should check('skip_after_filter :admin_required').without_issues
@runner.should check(
'skip_before_filter :some_filter, :admin_required, :another_filter'
).with_issue(@admin_required_issue)
@runner.should check('skip_before_filter :some_filter').without_issues
end
it "reports \"skip_before_filter\" with :verify_authenticity_token filter correctly" do
@runner.should check(
'skip_before_filter :verify_authenticity_token'
).with_issue(@verify_authenticity_token_issue)
@runner.should check(
'self.skip_before_filter :verify_authenticity_token'
).with_issue(@verify_authenticity_token_issue)
@runner.should check('foo.skip_before_filter :verify_authenticity_token').without_issues
@runner.should check('skip_after_filter :verify_authenticity_token').without_issues
@runner.should check(
'skip_before_filter :some_filter, :verify_authenticity_token, :another_filter'
).with_issue(@verify_authenticity_token_issue)
@runner.should check('skip_before_filter :some_filter').without_issues
end
it "reports \"skip_before_filter\" with :authenticate filter correctly" do
@runner.should check(
'skip_before_filter :authenticate'
).with_issue(@authenticate_issue)
@runner.should check(
'self.skip_before_filter :authenticate'
).with_issue(@authenticate_issue)
@runner.should check('foo.skip_before_filter :authenticate').without_issues
@runner.should check('skip_after_filter :authenticate').without_issues
@runner.should check(
'skip_before_filter :some_filter, :authenticate, :another_filter'
).with_issue(@authenticate_issue)
@runner.should check('skip_before_filter :some_filter').without_issues
end
end
end
|
{
"content_hash": "ef86f87dde827a1b2f8fb29c33e18cfe",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 94,
"avg_line_length": 46,
"alnum_prop": 0.678743961352657,
"repo_name": "openSUSE/scanny",
"id": "090662fa48d65958a5be48331195408d8d4a0fb1",
"size": "3726",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/scanny/checks/skip_before_filters_check_spec.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "136621"
}
]
}
|
from googledrive import GoogleDrive
from onedrive import OneDrive
from scpUpload import ScpUpload
from logs import *
SERVICE_GOOGLE_DRIVE = 'googledrive'
SERVICE_ONEDRIVE = 'onedrive'
SERVICE_DROPBOX = 'DROPBOX'
SERVICE_SCP = 'scp'
class Upload(object):
"""
TODO interface or abstract class for upload services
"""
def __init__(self, config, service_type):
self.__config = config
self.info = {
'details': []
}
if service_type == SERVICE_GOOGLE_DRIVE:
self.service = GoogleDrive(config)
elif service_type == SERVICE_ONEDRIVE:
self.service = OneDrive(config)
elif service_type == SERVICE_DROPBOX:
raise NotImplementedError('not implemented yet!')
elif service_type == SERVICE_SCP:
self.service = ScpUpload(config)
def run(self, paths):
"""
"""
for path in paths:
self.service.upload(path)
self.info['details'].append(self.service.info)
log_dict(self.service.info)
|
{
"content_hash": "d7c6f40432158b69b5828bac7fd79d19",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 61,
"avg_line_length": 29.5,
"alnum_prop": 0.6120527306967984,
"repo_name": "niqdev/packtpub-crawler",
"id": "1d0fc899405c85dfbfe5f6284477e9d8331bc9e3",
"size": "1062",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "script/upload.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "16104"
},
{
"name": "JavaScript",
"bytes": "413"
},
{
"name": "Python",
"bytes": "40562"
}
]
}
|
package main
import (
"bytes"
"io"
"os/exec"
"strings"
"time"
"github.com/go-check/check"
"github.com/kr/pty"
"gotest.tools/assert"
)
// regression test for #12546
func (s *DockerSuite) TestExecInteractiveStdinClose(c *check.C) {
testRequires(c, DaemonIsLinux)
out, _ := dockerCmd(c, "run", "-itd", "busybox", "/bin/cat")
contID := strings.TrimSpace(out)
cmd := exec.Command(dockerBinary, "exec", "-i", contID, "echo", "-n", "hello")
p, err := pty.Start(cmd)
assert.NilError(c, err)
b := bytes.NewBuffer(nil)
ch := make(chan error)
go func() { ch <- cmd.Wait() }()
select {
case err := <-ch:
assert.NilError(c, err)
io.Copy(b, p)
p.Close()
bs := b.Bytes()
bs = bytes.Trim(bs, "\x00")
output := string(bs[:])
assert.Equal(c, strings.TrimSpace(output), "hello")
case <-time.After(5 * time.Second):
p.Close()
c.Fatal("timed out running docker exec")
}
}
func (s *DockerSuite) TestExecTTY(c *check.C) {
testRequires(c, DaemonIsLinux, testEnv.IsLocalDaemon)
dockerCmd(c, "run", "-d", "--name=test", "busybox", "sh", "-c", "echo hello > /foo && top")
cmd := exec.Command(dockerBinary, "exec", "-it", "test", "sh")
p, err := pty.Start(cmd)
assert.NilError(c, err)
defer p.Close()
_, err = p.Write([]byte("cat /foo && exit\n"))
assert.NilError(c, err)
chErr := make(chan error)
go func() {
chErr <- cmd.Wait()
}()
select {
case err := <-chErr:
assert.NilError(c, err)
case <-time.After(3 * time.Second):
c.Fatal("timeout waiting for exec to exit")
}
buf := make([]byte, 256)
read, err := p.Read(buf)
assert.NilError(c, err)
assert.Assert(c, bytes.Contains(buf, []byte("hello")), string(buf[:read]))
}
// Test the TERM env var is set when -t is provided on exec
func (s *DockerSuite) TestExecWithTERM(c *check.C) {
testRequires(c, DaemonIsLinux, testEnv.IsLocalDaemon)
out, _ := dockerCmd(c, "run", "-id", "busybox", "/bin/cat")
contID := strings.TrimSpace(out)
cmd := exec.Command(dockerBinary, "exec", "-t", contID, "sh", "-c", "if [ -z $TERM ]; then exit 1; else exit 0; fi")
if err := cmd.Run(); err != nil {
assert.NilError(c, err)
}
}
// Test that the TERM env var is not set on exec when -t is not provided, even if it was set
// on run
func (s *DockerSuite) TestExecWithNoTERM(c *check.C) {
testRequires(c, DaemonIsLinux, testEnv.IsLocalDaemon)
out, _ := dockerCmd(c, "run", "-itd", "busybox", "/bin/cat")
contID := strings.TrimSpace(out)
cmd := exec.Command(dockerBinary, "exec", contID, "sh", "-c", "if [ -z $TERM ]; then exit 0; else exit 1; fi")
if err := cmd.Run(); err != nil {
assert.NilError(c, err)
}
}
|
{
"content_hash": "4654f9e264a790fcb94ffcfc39347780",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 117,
"avg_line_length": 27.42105263157895,
"alnum_prop": 0.6310940499040307,
"repo_name": "unclejack/docker",
"id": "21abcbad8c0b9030d11c72e35902248bf31c4582",
"size": "2625",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "integration-cli/docker_cli_exec_unix_test.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "81"
},
{
"name": "C",
"bytes": "5027"
},
{
"name": "Go",
"bytes": "7117513"
},
{
"name": "Makefile",
"bytes": "12708"
},
{
"name": "PowerShell",
"bytes": "23242"
},
{
"name": "Shell",
"bytes": "506389"
},
{
"name": "Vim script",
"bytes": "1350"
}
]
}
|
package com.alx.etx.model;
import com.alx.etx.data.ParticipantState;
import com.fasterxml.jackson.databind.PropertyNamingStrategy;
import com.fasterxml.jackson.databind.annotation.JsonNaming;
import lombok.Data;
import java.time.OffsetDateTime;
@JsonNaming(PropertyNamingStrategy.SnakeCaseStrategy.class)
public @Data class ParticipantData {
private String id;
private String name;
private String payload;
private String callbackUrl;
private String callbackToken;
private OffsetDateTime joinTime;
private OffsetDateTime executeTime;
private OffsetDateTime confirmTime;
private OffsetDateTime cancelTime;
private ParticipantState state;
}
|
{
"content_hash": "6d59f6359ab7078fab7013c6e6030c2e",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 61,
"avg_line_length": 30.954545454545453,
"alnum_prop": 0.801762114537445,
"repo_name": "alemser/etx",
"id": "7db80c6afbeb5f175bea9b959d6c05499b74e6b7",
"size": "681",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "etx-app/src/main/java/com/alx/etx/model/ParticipantData.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Gherkin",
"bytes": "11179"
},
{
"name": "Java",
"bytes": "53955"
}
]
}
|
package com.tle.web.sections.events;
import com.tle.web.sections.MutableSectionInfo;
import com.tle.web.sections.SectionId;
import com.tle.web.sections.SectionInfo;
import java.util.EventListener;
public class InfoEvent extends AbstractSectionEvent<InfoEventListener> {
private final boolean removed;
private final boolean processParameters;
public InfoEvent(boolean removed, boolean processParameters) {
this.removed = removed;
this.processParameters = processParameters;
}
@Override
public void fire(SectionId sectionId, SectionInfo info, InfoEventListener listener)
throws Exception {
listener.handleInfoEvent(
info.getAttributeForClass(MutableSectionInfo.class), removed, processParameters);
}
@Override
public Class<? extends EventListener> getListenerClass() {
return InfoEventListener.class;
}
public boolean isProcessParameters() {
return processParameters;
}
}
|
{
"content_hash": "9552928d049b037427aabd520de3bf55",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 89,
"avg_line_length": 27.529411764705884,
"alnum_prop": 0.7756410256410257,
"repo_name": "equella/Equella",
"id": "1df8d58d2841ce22d02b9c75be19967468a9f3fd",
"size": "1739",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "Platform/Plugins/com.tle.web.sections/src/com/tle/web/sections/events/InfoEvent.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Awk",
"bytes": "402"
},
{
"name": "Batchfile",
"bytes": "38432"
},
{
"name": "CSS",
"bytes": "648823"
},
{
"name": "Dockerfile",
"bytes": "2055"
},
{
"name": "FreeMarker",
"bytes": "370046"
},
{
"name": "HTML",
"bytes": "865667"
},
{
"name": "Java",
"bytes": "27081020"
},
{
"name": "JavaScript",
"bytes": "1673995"
},
{
"name": "PHP",
"bytes": "821"
},
{
"name": "PLpgSQL",
"bytes": "1363"
},
{
"name": "PureScript",
"bytes": "307610"
},
{
"name": "Python",
"bytes": "79871"
},
{
"name": "Scala",
"bytes": "765981"
},
{
"name": "Shell",
"bytes": "64170"
},
{
"name": "TypeScript",
"bytes": "146564"
},
{
"name": "XSLT",
"bytes": "510113"
}
]
}
|
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Pretty JSON</title>
<style>body{font-family:sans-serif;padding:0}textarea{font:14px Menlo,Monaco,monospace;box-sizing:border-box;display:block;border:0;outline:0;padding:10px;position:fixed;top:0;left:0;right:0;bottom:0}button{position:fixed;right:10px;bottom:10px}footer{color:#999;text-align:center;font-size:.9em}footer a{color:#999;text-decoration:none;border-bottom:1px solid transparent}footer a:hover{border-bottom:1px solid}</style>
</head>
<body onload="document.getElementsByTagName('textarea')[0].focus()">
<form action="javascript:pretty()">
<textarea placeholder='{"your":"json","goes":"here"}'></textarea>
<button type="submit">Pretty</button>
</form>
<pre></pre>
<footer>
© 2013–2019 <a href="http://soff.es" rel="external nofollow">Sam Soffes</a> —
<a href="https://github.com/soffes/prettyjson.com" rel="external nofollow">Source on GitHub</a>
</footer>
<script>
(function() {
window.pretty = function() {
var jsonString, textArea;
textArea = document.getElementsByTagName('textarea')[0];
jsonString = textArea.value;
return textArea.value = JSON.stringify(JSON.parse(jsonString), null, 2);
};
}).call(this);
</script>
</body>
</html>
|
{
"content_hash": "b1e2f63ff93bf9f5d0c4bd91d65bcf29",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 422,
"avg_line_length": 43.733333333333334,
"alnum_prop": 0.6897865853658537,
"repo_name": "soffes/prettyjson.com",
"id": "deeb3075998c42630657a1687e95eb1f87baecaf",
"size": "1314",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "index.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1705"
},
{
"name": "CoffeeScript",
"bytes": "174"
},
{
"name": "HTML",
"bytes": "658"
},
{
"name": "Ruby",
"bytes": "1593"
}
]
}
|
package kf
import (
"encoding/json"
"fmt"
"github.com/silenceper/wechat/v2/util"
)
const (
customerBatchGetAddr = "https://qyapi.weixin.qq.com/cgi-bin/kf/customer/batchget?access_token=%s"
)
// CustomerBatchGetOptions 客户基本信息获取请求参数
type CustomerBatchGetOptions struct {
ExternalUserIDList []string `json:"external_userid_list"` // external_userid列表
}
// CustomerSchema 微信客户基本资料
type CustomerSchema struct {
ExternalUserID string `json:"external_userid"` // 微信客户的external_userid
NickName string `json:"nickname"` // 微信昵称
Avatar string `json:"avatar"` // 微信头像。第三方不可获取
Gender int `json:"gender"` // 性别
UnionID string `json:"unionid"` // unionid,需要绑定微信开发者帐号才能获取到,查看绑定方法: https://open.work.weixin.qq.com/kf/doc/92512/93143/94769#%E5%A6%82%E4%BD%95%E8%8E%B7%E5%8F%96%E5%BE%AE%E4%BF%A1%E5%AE%A2%E6%88%B7%E7%9A%84unionid
}
// CustomerBatchGetSchema 获取客户基本信息响应内容
type CustomerBatchGetSchema struct {
util.CommonError
CustomerList []CustomerSchema `json:"customer_list"` // 微信客户信息列表
InvalidExternalUserID []string `json:"invalid_external_userid"` // 无效的微信客户ID
}
// CustomerBatchGet 客户基本信息获取
func (r *Client) CustomerBatchGet(options CustomerBatchGetOptions) (info CustomerBatchGetSchema, err error) {
var (
accessToken string
data []byte
)
if accessToken, err = r.ctx.GetAccessToken(); err != nil {
return
}
if data, err = util.PostJSON(fmt.Sprintf(customerBatchGetAddr, accessToken), options); err != nil {
return
}
if err = json.Unmarshal(data, &info); err != nil {
return
}
if info.ErrCode != 0 {
return info, NewSDKErr(info.ErrCode, info.ErrMsg)
}
return info, nil
}
|
{
"content_hash": "d33c06e0499b7e1bd1963bfde1e86bbb",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 229,
"avg_line_length": 31.51851851851852,
"alnum_prop": 0.6962397179788484,
"repo_name": "silenceper/wechat",
"id": "1ded143f9367f434bf2be2186374d0fb36bada03",
"size": "1910",
"binary": false,
"copies": "1",
"ref": "refs/heads/v2",
"path": "work/kf/customer.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "5412"
},
{
"name": "C++",
"bytes": "7508"
},
{
"name": "Go",
"bytes": "542418"
}
]
}
|
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>UI Input Module — Leap Motion JavaScript SDK v3.1 documentation</title>
<link rel="stylesheet" href="../../cpp/_static/bootstrap-3.0.0/css/documentation-bundle.1471552333.css" type="text/css" />
<script type="text/javascript" src="https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
<script type="text/javascript">
var DOCUMENTATION_OPTIONS = {
URL_ROOT: '../',
VERSION: '3.1',
COLLAPSE_INDEX: false,
FILE_SUFFIX: '.html',
HAS_SOURCE: false
};
</script>
<script type="text/javascript" src="../../cpp/_static/bootstrap-3.0.0/js/documentation-bundle.1471552333.js"></script>
<link rel="top" title="Leap Motion JavaScript SDK v3.1 documentation" href="../index.html" />
<script type="text/javascript" src="/assets/standalone-header.js?r9"></script>
<link rel="stylesheet" href="/assets/standalone-header.css?r9" type="text/css" />
<meta charset='utf-8'>
<meta http-equiv='X-UA-Compatible' content='IE=edge,chrome=1'>
<meta name='viewport' content='width=device-width, initial-scale=1.0, maximum-scale=1'>
<meta name="apple-mobile-web-app-capable" content="yes">
<script type="text/javascript">
var _gaq = _gaq || [];
_gaq.push(['_setAccount', 'UA-31536531-1']);
_gaq.push(['_setDomainName', 'leapmotion.com']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
</script>
<script>
function getQueryValue(variable)
{
var query = window.location.search.substring(1);
var vars = query.split("&");
for (var i=0;i<vars.length;i++) {
var pair = vars[i].split("=");
if(pair[0] == variable){return pair[1];}
}
return(false);
}
var relPath = "../../";
var requestedAPI = getQueryValue("proglang");
if(requestedAPI == "current") requestedAPI = localStorage["currentAPI"];
var pageAPI = 'javascript';
var hasAPI = {};
hasAPI.unity = true;
if(requestedAPI && (requestedAPI != pageAPI))
{
if(pageAPI != 'none'){
var redirectedLocation = relPath + 'javascript/unity/Unity_UIInput.html';
if( requestedAPI == 'cpp' && hasAPI.cpp){
redirectedLocation = relPath + "cpp/unity/Unity_UIInput.html";
}
else if( requestedAPI == 'csharp' && hasAPI.csharp){
redirectedLocation = relPath + "csharp/unity/Unity_UIInput.html";
}
else if( requestedAPI == 'unity' && hasAPI.unity){
redirectedLocation = relPath + "unity/unity/Unity_UIInput.html";
}
else if( requestedAPI == 'objc' && hasAPI.objc){
redirectedLocation = relPath + "objc/unity/Unity_UIInput.html";
}
else if( requestedAPI == 'java' && hasAPI.java) {
redirectedLocation = relPath + "java/unity/Unity_UIInput.html";
}
else if( requestedAPI == 'javascript' && hasAPI.javascript){
redirectedLocation = relPath + "javascript/unity/Unity_UIInput.html";
}
else if( requestedAPI == 'python' && hasAPI.python){
redirectedLocation = relPath + "python/unity/Unity_UIInput.html";
}
else if( requestedAPI == 'unreal' && hasAPI.unreal) {
redirectedLocation = relPath + "unreal/unity/Unity_UIInput.html";
} else {
if( requestedAPI == 'cpp'){
redirectedLocation = relPath + "cpp/index.html?proglang=cpp";
}
else if( requestedAPI == 'csharp'){
redirectedLocation = relPath + "csharp/index.html?proglang=csharp";
}
else if( requestedAPI == 'unity'){
redirectedLocation = relPath + "unity/index.html?proglang=unity";
}
else if( requestedAPI == 'objc'){
redirectedLocation = relPath + "objc/index.html?proglang=objc";
}
else if( requestedAPI == 'java') {
redirectedLocation = relPath + "java/index.html?proglang=java";
}
else if( requestedAPI == 'javascript'){
redirectedLocation = relPath + "javascript/index.html?proglang=javascript";
}
else if( requestedAPI == 'python'){
redirectedLocation = relPath + "python/index.html?proglang=python";
}
else if( requestedAPI == 'unreal') {
redirectedLocation = relPath + "unreal/index.html?proglang=unreal";
} else {
redirectedLocation = relPath + "index.html";
}
}
//Guard against redirecting to the same page (infinitely)
if(relPath + 'javascript/unity/Unity_UIInput.html' != redirectedLocation) window.location.replace(redirectedLocation);
}
}
</script>
<script>
window.addEventListener('keyup', handleKeyInput);
function handleKeyInput(e)
{
var code;
if (!e) var e = window.event;
if (e.keyCode) code = e.keyCode;
else if (e.which) code = e.which;
var character = String.fromCharCode(code);
if( character == "J" & e.altKey){ }
else if( character == "K" & e.altKey){
}
}
</script>
</head>
<body role="document">
<div class="developer-portal-styles">
<header class="navbar navbar-static-top developer-navbar header beta-header">
<nav class="container pr">
<a class="logo-link pull-left" href="/">
<img alt="Leap Motion Developers" class="media-object pull-left white-background" src="../_static/logo.png" />
</a>
<span class="inline-block hidden-phone developer-logo-text">
<div class="text">
<a href="/">
<span class="more-than-1199">Developer Portal</span>
</a>
</div>
</span>
<button type="button" class="btn btn-navbar" data-toggle="collapse" data-target=".nav-collapse">
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<!-- Everything within here will be hidden at 940px or less, accessible via a button. -->
<div class="nav-collapse">
<ul class="nav header-navigation developer-links">
<li class="external-link"><a href="https://developer.leapmotion.com/features">What's new</a> </li>
<li class="external-link"><a href="https://developer.leapmotion.com/downloads/skeletal-beta" class="">Getting Started</a></li>
<li><a class="active" href="#" class="">Documentation</a></li>
<li class="external-link"> <a href="https://developer.leapmotion.com/gallery" class="">Examples</a> </li>
<li class="external-link"> <a href="https://www.leapmotion.com/blog/category/labs/" class="" target="_blank">Blog <i class='fa fa-external-link'></i></a> </li>
<li class="external-link"> <a href="https://community.leapmotion.com/category/beta" class="" target="_blank">Community <i class='fa fa-external-link'></i></a> </li>
</ul>
</div>
</nav>
</header>
</div>
<section class="main-wrap">
<div data-swiftype-index="true">
<div class="second_navigation">
<div class="container">
<div class="row">
<div class="col-md-8">
<ul>
<li>
JavaScript
</li>
<li>
<a href="../../unity/unity/Unity_UIInput.html?proglang=unity" onclick="localStorage['currentAPI'] = 'unity'">Unity</a>
</li>
<li>
<a href="../../csharp/index.html?proglang=csharp" onclick="localStorage['currentAPI'] = 'csharp'">C#</a>
</li>
<li>
<a href="../../cpp/index.html?proglang=cpp" onclick="localStorage['currentAPI'] = 'cpp'">C++</a>
</li>
<li>
<a href="../../java/index.html?proglang=java" onclick="localStorage['currentAPI'] = 'java'">Java</a>
</li>
<li>
<a href="../../python/index.html?proglang=python" onclick="localStorage['currentAPI'] = 'python'">Python</a>
</li>
<li>
<a href="../../objc/index.html?proglang=objc" onclick="localStorage['currentAPI'] = 'objc'">Objective-C</a>
</li>
<li>
<a href="../../unreal/index.html?proglang=unreal" onclick="localStorage['currentAPI'] = 'unreal'">Unreal</a>
</li>
</ul>
</div>
<div class="col-md-4 search">
<script>
function storeThisPage(){
sessionStorage["pageBeforeSearch"] = window.location;
return true;
}
function doneWithSearch(){
var storedPage = sessionStorage["pageBeforeSearch"];
if(storedPage){
window.location = storedPage;
} else {
window.location = "index.html"; //fallback
}
return false;
}
</script>
<div style="margin-top:-4px">
<ul style="display:inline; white-space:nowrap"><li>
<form class="navbar-form" action="../search.html" method="get" onsubmit="storeThisPage()">
<div class="form-group">
<input type="search" results="5" name="q" class="form-control" placeholder="Search" />
</div>
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form></li>
</ul>
</div>
</div>
</div>
</div>
</div>
<script>
//Remove dev portal header and footer when viewing from file system
if(window.location.protocol == 'file:'){
var navNode = document.querySelector(".developer-links");
navNode.parentNode.removeChild(navNode);
}
</script>
<div id="wrap" data-spy="scroll" data-target="#sidebar">
<div class="container">
<div class="row">
<div class="col-md-9 pull-right">
<!--
<span id="breadcrumbs">
<a href="../index.html">Home</a>»
UI Input Module
</span> -->
<div class="section" id="ui-input-module">
<h1>UI Input Module<a class="headerlink" href="#ui-input-module" title="Permalink to this headline">¶</a></h1>
<p>The UI Input module allows you to use hands to control standard Unity UI widgets.</p>
<img alt="unity/../../../images/unity/UIInput_Example2.jpg" src="unity/../../../images/unity/UIInput_Example2.jpg" />
<p>The primary component of the UIInput module is the LeapEventSystem prefab. The LeapInputModule script component of this prefab implements a Unity InputModule that uses tracking data to allow the user to manipulate standatd UI controls with their hands. The UIInput module also provides prefabs for the main Unity UI widget types. These prefabs use the CompressibleUI script, which animates the controls when the user manipulates them, as well as drop shadows. You can use the standard Unity widgets as is, however.</p>
<p>The UIInput module uses the tracking data from the scene’s LeapServiceProvider directly and doesn’t rely on graphical or physics hands in the scene. The system projects a cursor through the user’s first knuckle and generates pointer events in the Unity event system.</p>
<p>The exception to this is the PhysicsButton prefab and its PhysicsUI script. The PhysicsButton uses rigidbody collisions to depress the button and so requires physics hands. The PhysicsButton does not require the LeapInputModule.</p>
<p>See also: <a class="reference external" href="http://blog.leapmotion.com/ui-input-module/4/">New Unity Module for User Interface Input</a>.</p>
<blockquote>
<div><div class="toctree-wrapper compound">
</div>
</div></blockquote>
</div>
<!-- get_disqus_sso -->
</div>
<div id="sidebar" class="col-md-3">
<div class="well-sidebar" data-offset-top="188">
<ul>
<li><a href="../index.html" title="Home">JavaScript Docs (v3.1)</a></li>
</ul><ul>
<li class="toctree-l1"><a class="reference internal" href="../devguide/Leap_Overview.html">API Overview</a></li>
<li class="toctree-l1"><a class="reference internal" href="../practices/Leap_Practices.html">Guidelines</a></li>
<li class="toctree-l1"><a class="reference internal" href="../devguide/Leap_Guides.html">Application Development</a></li>
<li class="toctree-l1"><a class="reference internal" href="../devguide/Leap_Guides2.html">Using the Tracking API</a></li>
<li class="toctree-l1"><a class="reference external" href="https://developer.leapmotion.com/downloads/javascript#plugins">LeapJS Plug-ins</a></li>
<li class="toctree-l1"><a class="reference internal" href="../api/Leap_Classes.html">API Reference</a></li>
<li class="toctree-l1"><a class="reference internal" href="../supplements/Leap_Supplements.html">Appendices</a></li>
</ul>
</div>
</div>
</div>
</div>
</div>
<!--
<div class="ribbon">
<p>JavaScript</p>
</div>
-->
<footer>
<div id="footer" class="container">
<div class="container">
<div class="copyright">
<span>Copyright © 2012 - 2016, Leap Motion, Inc.</span>
</div>
</div>
</div>
</footer>
</body>
</html>
|
{
"content_hash": "75d14e8d756c3b8f37c10c65109fac01",
"timestamp": "",
"source": "github",
"line_count": 350,
"max_line_length": 521,
"avg_line_length": 39.04571428571428,
"alnum_prop": 0.6018586272501097,
"repo_name": "MTASZTAKI/ApertusVR",
"id": "52ac448043b9b6e05829d81f7883b06c2e6e26d4",
"size": "13670",
"binary": false,
"copies": "2",
"ref": "refs/heads/0.9",
"path": "plugins/track/hand/leapMotion/3rdParty/leapMotion/docs/javascript/unity/Unity_UIInput.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "7599"
},
{
"name": "C++",
"bytes": "1207412"
},
{
"name": "CMake",
"bytes": "165066"
},
{
"name": "CSS",
"bytes": "1816"
},
{
"name": "GLSL",
"bytes": "223507"
},
{
"name": "HLSL",
"bytes": "141879"
},
{
"name": "HTML",
"bytes": "34827"
},
{
"name": "JavaScript",
"bytes": "140550"
},
{
"name": "Python",
"bytes": "1370"
}
]
}
|
layout: page
title: Curtis International Party
date: 2016-05-24
author: Marilyn Foley
tags: weekly links, java
status: published
summary: Praesent elit ante, viverra a pellentesque ut, pulvinar nec.
banner: images/banner/wedding.jpg
booking:
startDate: 02/25/2017
endDate: 03/01/2017
ctyhocn: NCLHXHX
groupCode: CIP
published: true
---
Duis eget efficitur ante, et ultrices justo. Donec ut commodo ligula. Ut est urna, bibendum at tempus eget, hendrerit et orci. Donec venenatis consequat leo, ut egestas magna rhoncus eget. Mauris sed urna id nunc auctor viverra nec et nisi. Sed bibendum varius tellus nec aliquet. Donec vel nibh non risus vulputate eleifend.
Vivamus ac efficitur ante, varius varius nisl. Phasellus metus sapien, hendrerit et fringilla ac, pellentesque in turpis. Etiam sit amet molestie purus. Mauris vel rhoncus ante. Sed ultricies lacinia justo, at pellentesque massa aliquet fringilla. Nunc quis magna ultrices, dapibus turpis at, sagittis risus. Curabitur facilisis varius urna eu tempor. Sed mi orci, viverra quis rhoncus quis, ornare sed nulla. Fusce condimentum enim non enim pellentesque vestibulum. Cras porttitor erat eget erat imperdiet cursus. Integer vel ornare metus.
1 Mauris ultricies quam quis eros vestibulum lobortis
1 Sed venenatis ante a egestas mattis
1 Donec at odio vulputate, consequat quam viverra, rutrum nisi
1 Ut dictum leo vel nisl aliquet hendrerit
1 Duis non urna a dui rutrum interdum.
Suspendisse at luctus massa. Quisque enim ipsum, finibus ut ligula tincidunt, egestas egestas libero. Vestibulum et lacus interdum, varius justo ultrices, tempus mi. Fusce at auctor erat. Mauris porttitor lorem varius mi scelerisque, sed tincidunt dolor ultrices. Morbi ac lectus lectus. Donec sed consectetur eros. Mauris consequat ultricies placerat. Aliquam ac magna dignissim, hendrerit lectus id, fringilla sapien. Sed ultricies lorem et ligula dignissim feugiat. Aliquam id libero a nisl blandit scelerisque et varius neque. In sit amet ex id ante iaculis blandit. Nulla semper lacinia magna, et tincidunt nisl ultrices ultricies. Aliquam lacinia nulla at lorem suscipit, nec pharetra eros volutpat. Fusce eu dictum est, ut accumsan mauris. Sed at nisi et mi lobortis gravida.
|
{
"content_hash": "7502b1481cc0474515ff5f05667e4708",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 782,
"avg_line_length": 89.32,
"alnum_prop": 0.8083296014330497,
"repo_name": "KlishGroup/prose-pogs",
"id": "9cd49a7058346bdde0ca06ae66a9fd1264a1e9f9",
"size": "2237",
"binary": false,
"copies": "1",
"ref": "refs/heads/gh-pages",
"path": "pogs/N/NCLHXHX/CIP/index.md",
"mode": "33188",
"license": "mit",
"language": []
}
|
var svnMonitor = require("svnmonitor");
var sqlite3 = require("sqlite3").verbose();
var paths = new sqlite3.Database("paths.db");
var monitor = new sqlite3.Database("monitor.db");
var moment = require("moment");
//var async = require('async');
monitor.run("DROP TABLE IF EXISTS Monitor");
monitor.run("CREATE TABLE IF NOT EXISTS Monitor (rowid INTEGER PRIMARY KEY, dir TEXT, revision INTEGER unique, author TEXT, date TEXT, unixdate INTEGER, message TEXT)");
var svnMon = new svnMonitor(
"http://www.ineedthisurlherefornow.com", //URL
"", //User Name
"" //Password
);
//Get latest commits
exports.getcommits = function (req, res) {
svnMon.url = req.query.url;
svnMon.getLatestCommits(req.query.limit, function(err, log){
if(err){
console.log(err);
return;
}
res.send(log);
});
};
//Grab DB URLs and insert changes into Monitored
exports.getrev = function (req, res) {
var query = "SELECT dir FROM Paths ORDER BY rowid ASC";
paths.all(query, function (err, rows) {
for(var i in rows) {
getCommitList(req.query.limit, rows[i].dir, function(list, url) {
for (var j = 0; j < list.length; j++)
{
unixdate = moment(list[j].date).unix();
author = list[j].author.replace(/^\s+|\s+$/g,'')
monitor.prepare("INSERT OR IGNORE INTO Monitor (dir, revision, author, date, unixdate, message) VALUES (?, ?, ?, ?, ?, ?)", url, list[j].revision, author, list[j].date, unixdate, list[j].message).run();
}
});
res.send(rows);
};
});
};
//Display ALL Monitored changes
//monitorjson : display all
//monitorjson?url= : displays that url
//monitorjson?url= &author=0 : display author list ranked by most contribution
//monitorjson?url= &author=author : display all by that author from that url
exports.monitorjson = function (req, res) {
var query = "SELECT * FROM Monitor ORDER BY rowid ASC";
if (req.query.url) {
query = "SELECT * FROM Monitor WHERE dir = '" + req.query.url + "' ORDER BY rowid ASC";
if (req.query.author == 0) {
query = "SELECT author, COUNT(*) AS Changes FROM Monitor WHERE dir = '" + req.query.url + "' GROUP BY author ORDER BY Changes DESC ";
}
if (req.query.author && req.query.author != 0) {
query = "SELECT * FROM Monitor WHERE dir = '" + req.query.url + "' AND author = '" + req.query.author + "' ORDER BY revision DESC";
}
}
monitor.all(query, function (err, rows) {
res.send({ Monitor: rows });
});
};
//Retrieves all commits from the last day, NOT TAKING INTO CONSIDERATION DAYLIGHT SAVING (NOT ALL DAYS ARE EQUAL)
//activity : display all commits ordered by unixdate
//activity?url= : display all commits from last day
//activity?option=1 : display all commits from last day
//acitivty?option=2 : display all dir order by commits from last day
exports.activity = function (req, res) {
var lastday = moment().unix() - (24*60*60);
var query = "SELECT * FROM Monitor ORDER BY unixdate DESC";
if (req.query.url) {
query = "SELECT * FROM Monitor WHERE dir = '" + req.query.url + "' AND unixdate > " + lastday + " ORDER BY unixdate DESC";
}
if (req.query.option == 1) {
query = "SELECT dir, COUNT(*) AS COUNT FROM Monitor WHERE unixdate > " + lastday + " GROUP BY dir ORDER BY COUNT DESC";
}
if (req.query.option == 2) {
query = "SELECT * FROM Monitor WHERE unixdate > " + lastday + " ORDER BY unixdate DESC"
}
monitor.all(query, function (err, rows) {
res.send({ Monitor: rows });
});
};
function getCommitList (limit, url, callback){
svnMon.url = url;
svnMon.getLatestCommits(limit, function(err, log){
if(err){
console.log(err);
return;
}
callback(log, url);
});
}
|
{
"content_hash": "79e8c9b8cb669c01dc5d94f62be58d3a",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 207,
"avg_line_length": 32.95614035087719,
"alnum_prop": 0.639073729039127,
"repo_name": "joshjung/TeamRepoMonitor",
"id": "a6a77547f211e7cde6505045d0378688ce3acea1",
"size": "3757",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "routes/monitor.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6437"
},
{
"name": "JavaScript",
"bytes": "318857"
},
{
"name": "Shell",
"bytes": "20077"
}
]
}
|
End of preview.
No dataset card yet
- Downloads last month
- 4